stmt.c revision 107590
1/* Expands front end tree to back end RTL for GNU C-Compiler
2   Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3   1998, 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA.  */
21
22/* This file handles the generation of rtl code from tree structure
23   above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24   It also creates the rtl expressions for parameters and auto variables
25   and has full responsibility for allocating stack slots.
26
27   The functions whose names start with `expand_' are called by the
28   parser to generate RTL instructions for various kinds of constructs.
29
30   Some control and binding constructs require calling several such
31   functions at different times.  For example, a simple if-then
32   is expanded by calling `expand_start_cond' (with the condition-expression
33   as argument) before parsing the then-clause and calling `expand_end_cond'
34   after parsing the then-clause.  */
35
36#include "config.h"
37#include "system.h"
38
39#include "rtl.h"
40#include "tree.h"
41#include "tm_p.h"
42#include "flags.h"
43#include "except.h"
44#include "function.h"
45#include "insn-config.h"
46#include "expr.h"
47#include "libfuncs.h"
48#include "hard-reg-set.h"
49#include "obstack.h"
50#include "loop.h"
51#include "recog.h"
52#include "machmode.h"
53#include "toplev.h"
54#include "output.h"
55#include "ggc.h"
56
57#define obstack_chunk_alloc xmalloc
58#define obstack_chunk_free free
59struct obstack stmt_obstack;
60
61/* Assume that case vectors are not pc-relative.  */
62#ifndef CASE_VECTOR_PC_RELATIVE
63#define CASE_VECTOR_PC_RELATIVE 0
64#endif
65
66/* Functions and data structures for expanding case statements.  */
67
68/* Case label structure, used to hold info on labels within case
69   statements.  We handle "range" labels; for a single-value label
70   as in C, the high and low limits are the same.
71
72   An AVL tree of case nodes is initially created, and later transformed
73   to a list linked via the RIGHT fields in the nodes.  Nodes with
74   higher case values are later in the list.
75
76   Switch statements can be output in one of two forms.  A branch table
77   is used if there are more than a few labels and the labels are dense
78   within the range between the smallest and largest case value.  If a
79   branch table is used, no further manipulations are done with the case
80   node chain.
81
82   The alternative to the use of a branch table is to generate a series
83   of compare and jump insns.  When that is done, we use the LEFT, RIGHT,
84   and PARENT fields to hold a binary tree.  Initially the tree is
85   totally unbalanced, with everything on the right.  We balance the tree
86   with nodes on the left having lower case values than the parent
87   and nodes on the right having higher values.  We then output the tree
88   in order.  */
89
90struct case_node
91{
92  struct case_node	*left;	/* Left son in binary tree */
93  struct case_node	*right;	/* Right son in binary tree; also node chain */
94  struct case_node	*parent; /* Parent of node in binary tree */
95  tree			low;	/* Lowest index value for this label */
96  tree			high;	/* Highest index value for this label */
97  tree			code_label; /* Label to jump to when node matches */
98  int			balance;
99};
100
101typedef struct case_node case_node;
102typedef struct case_node *case_node_ptr;
103
104/* These are used by estimate_case_costs and balance_case_nodes.  */
105
106/* This must be a signed type, and non-ANSI compilers lack signed char.  */
107static short cost_table_[129];
108static int use_cost_table;
109static int cost_table_initialized;
110
111/* Special care is needed because we allow -1, but TREE_INT_CST_LOW
112   is unsigned.  */
113#define COST_TABLE(I)  cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
114
115/* Stack of control and binding constructs we are currently inside.
116
117   These constructs begin when you call `expand_start_WHATEVER'
118   and end when you call `expand_end_WHATEVER'.  This stack records
119   info about how the construct began that tells the end-function
120   what to do.  It also may provide information about the construct
121   to alter the behavior of other constructs within the body.
122   For example, they may affect the behavior of C `break' and `continue'.
123
124   Each construct gets one `struct nesting' object.
125   All of these objects are chained through the `all' field.
126   `nesting_stack' points to the first object (innermost construct).
127   The position of an entry on `nesting_stack' is in its `depth' field.
128
129   Each type of construct has its own individual stack.
130   For example, loops have `loop_stack'.  Each object points to the
131   next object of the same type through the `next' field.
132
133   Some constructs are visible to `break' exit-statements and others
134   are not.  Which constructs are visible depends on the language.
135   Therefore, the data structure allows each construct to be visible
136   or not, according to the args given when the construct is started.
137   The construct is visible if the `exit_label' field is non-null.
138   In that case, the value should be a CODE_LABEL rtx.  */
139
140struct nesting
141{
142  struct nesting *all;
143  struct nesting *next;
144  int depth;
145  rtx exit_label;
146  union
147    {
148      /* For conds (if-then and if-then-else statements).  */
149      struct
150	{
151	  /* Label for the end of the if construct.
152	     There is none if EXITFLAG was not set
153	     and no `else' has been seen yet.  */
154	  rtx endif_label;
155	  /* Label for the end of this alternative.
156	     This may be the end of the if or the next else/elseif.  */
157	  rtx next_label;
158	} cond;
159      /* For loops.  */
160      struct
161	{
162	  /* Label at the top of the loop; place to loop back to.  */
163	  rtx start_label;
164	  /* Label at the end of the whole construct.  */
165	  rtx end_label;
166	  /* Label before a jump that branches to the end of the whole
167	     construct.  This is where destructors go if any.  */
168	  rtx alt_end_label;
169	  /* Label for `continue' statement to jump to;
170	     this is in front of the stepper of the loop.  */
171	  rtx continue_label;
172	} loop;
173      /* For variable binding contours.  */
174      struct
175	{
176	  /* Sequence number of this binding contour within the function,
177	     in order of entry.  */
178	  int block_start_count;
179	  /* Nonzero => value to restore stack to on exit.  */
180	  rtx stack_level;
181	  /* The NOTE that starts this contour.
182	     Used by expand_goto to check whether the destination
183	     is within each contour or not.  */
184	  rtx first_insn;
185	  /* Innermost containing binding contour that has a stack level.  */
186	  struct nesting *innermost_stack_block;
187	  /* List of cleanups to be run on exit from this contour.
188	     This is a list of expressions to be evaluated.
189	     The TREE_PURPOSE of each link is the ..._DECL node
190	     which the cleanup pertains to.  */
191	  tree cleanups;
192	  /* List of cleanup-lists of blocks containing this block,
193	     as they were at the locus where this block appears.
194	     There is an element for each containing block,
195	     ordered innermost containing block first.
196	     The tail of this list can be 0,
197	     if all remaining elements would be empty lists.
198	     The element's TREE_VALUE is the cleanup-list of that block,
199	     which may be null.  */
200	  tree outer_cleanups;
201	  /* Chain of labels defined inside this binding contour.
202	     For contours that have stack levels or cleanups.  */
203	  struct label_chain *label_chain;
204	  /* Number of function calls seen, as of start of this block.  */
205	  int n_function_calls;
206	  /* Nonzero if this is associated with a EH region.  */
207	  int exception_region;
208	  /* The saved target_temp_slot_level from our outer block.
209	     We may reset target_temp_slot_level to be the level of
210	     this block, if that is done, target_temp_slot_level
211	     reverts to the saved target_temp_slot_level at the very
212	     end of the block.  */
213	  int block_target_temp_slot_level;
214	  /* True if we are currently emitting insns in an area of
215	     output code that is controlled by a conditional
216	     expression.  This is used by the cleanup handling code to
217	     generate conditional cleanup actions.  */
218	  int conditional_code;
219	  /* A place to move the start of the exception region for any
220	     of the conditional cleanups, must be at the end or after
221	     the start of the last unconditional cleanup, and before any
222	     conditional branch points.  */
223	  rtx last_unconditional_cleanup;
224	  /* When in a conditional context, this is the specific
225	     cleanup list associated with last_unconditional_cleanup,
226	     where we place the conditionalized cleanups.  */
227	  tree *cleanup_ptr;
228	} block;
229      /* For switch (C) or case (Pascal) statements,
230	 and also for dummies (see `expand_start_case_dummy').  */
231      struct
232	{
233	  /* The insn after which the case dispatch should finally
234	     be emitted.  Zero for a dummy.  */
235	  rtx start;
236	  /* A list of case labels; it is first built as an AVL tree.
237	     During expand_end_case, this is converted to a list, and may be
238	     rearranged into a nearly balanced binary tree.  */
239	  struct case_node *case_list;
240	  /* Label to jump to if no case matches.  */
241	  tree default_label;
242	  /* The expression to be dispatched on.  */
243	  tree index_expr;
244	  /* Type that INDEX_EXPR should be converted to.  */
245	  tree nominal_type;
246	  /* Name of this kind of statement, for warnings.  */
247	  const char *printname;
248	  /* Used to save no_line_numbers till we see the first case label.
249	     We set this to -1 when we see the first case label in this
250	     case statement.  */
251	  int line_number_status;
252	} case_stmt;
253    } data;
254};
255
256/* Allocate and return a new `struct nesting'.  */
257
258#define ALLOC_NESTING() \
259 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
260
261/* Pop the nesting stack element by element until we pop off
262   the element which is at the top of STACK.
263   Update all the other stacks, popping off elements from them
264   as we pop them from nesting_stack.  */
265
266#define POPSTACK(STACK)					\
267do { struct nesting *target = STACK;			\
268     struct nesting *this;				\
269     do { this = nesting_stack;				\
270	  if (loop_stack == this)			\
271	    loop_stack = loop_stack->next;		\
272	  if (cond_stack == this)			\
273	    cond_stack = cond_stack->next;		\
274	  if (block_stack == this)			\
275	    block_stack = block_stack->next;		\
276	  if (stack_block_stack == this)		\
277	    stack_block_stack = stack_block_stack->next; \
278	  if (case_stack == this)			\
279	    case_stack = case_stack->next;		\
280	  nesting_depth = nesting_stack->depth - 1;	\
281	  nesting_stack = this->all;			\
282	  obstack_free (&stmt_obstack, this); }		\
283     while (this != target); } while (0)
284
285/* In some cases it is impossible to generate code for a forward goto
286   until the label definition is seen.  This happens when it may be necessary
287   for the goto to reset the stack pointer: we don't yet know how to do that.
288   So expand_goto puts an entry on this fixup list.
289   Each time a binding contour that resets the stack is exited,
290   we check each fixup.
291   If the target label has now been defined, we can insert the proper code.  */
292
293struct goto_fixup
294{
295  /* Points to following fixup.  */
296  struct goto_fixup *next;
297  /* Points to the insn before the jump insn.
298     If more code must be inserted, it goes after this insn.  */
299  rtx before_jump;
300  /* The LABEL_DECL that this jump is jumping to, or 0
301     for break, continue or return.  */
302  tree target;
303  /* The BLOCK for the place where this goto was found.  */
304  tree context;
305  /* The CODE_LABEL rtx that this is jumping to.  */
306  rtx target_rtl;
307  /* Number of binding contours started in current function
308     before the label reference.  */
309  int block_start_count;
310  /* The outermost stack level that should be restored for this jump.
311     Each time a binding contour that resets the stack is exited,
312     if the target label is *not* yet defined, this slot is updated.  */
313  rtx stack_level;
314  /* List of lists of cleanup expressions to be run by this goto.
315     There is one element for each block that this goto is within.
316     The tail of this list can be 0,
317     if all remaining elements would be empty.
318     The TREE_VALUE contains the cleanup list of that block as of the
319     time this goto was seen.
320     The TREE_ADDRESSABLE flag is 1 for a block that has been exited.  */
321  tree cleanup_list_list;
322};
323
324/* Within any binding contour that must restore a stack level,
325   all labels are recorded with a chain of these structures.  */
326
327struct label_chain
328{
329  /* Points to following fixup.  */
330  struct label_chain *next;
331  tree label;
332};
333
334struct stmt_status
335{
336  /* Chain of all pending binding contours.  */
337  struct nesting *x_block_stack;
338
339  /* If any new stacks are added here, add them to POPSTACKS too.  */
340
341  /* Chain of all pending binding contours that restore stack levels
342     or have cleanups.  */
343  struct nesting *x_stack_block_stack;
344
345  /* Chain of all pending conditional statements.  */
346  struct nesting *x_cond_stack;
347
348  /* Chain of all pending loops.  */
349  struct nesting *x_loop_stack;
350
351  /* Chain of all pending case or switch statements.  */
352  struct nesting *x_case_stack;
353
354  /* Separate chain including all of the above,
355     chained through the `all' field.  */
356  struct nesting *x_nesting_stack;
357
358  /* Number of entries on nesting_stack now.  */
359  int x_nesting_depth;
360
361  /* Number of binding contours started so far in this function.  */
362  int x_block_start_count;
363
364  /* Each time we expand an expression-statement,
365     record the expr's type and its RTL value here.  */
366  tree x_last_expr_type;
367  rtx x_last_expr_value;
368
369  /* Nonzero if within a ({...}) grouping, in which case we must
370     always compute a value for each expr-stmt in case it is the last one.  */
371  int x_expr_stmts_for_value;
372
373  /* Filename and line number of last line-number note,
374     whether we actually emitted it or not.  */
375  const char *x_emit_filename;
376  int x_emit_lineno;
377
378  struct goto_fixup *x_goto_fixup_chain;
379};
380
381#define block_stack (cfun->stmt->x_block_stack)
382#define stack_block_stack (cfun->stmt->x_stack_block_stack)
383#define cond_stack (cfun->stmt->x_cond_stack)
384#define loop_stack (cfun->stmt->x_loop_stack)
385#define case_stack (cfun->stmt->x_case_stack)
386#define nesting_stack (cfun->stmt->x_nesting_stack)
387#define nesting_depth (cfun->stmt->x_nesting_depth)
388#define current_block_start_count (cfun->stmt->x_block_start_count)
389#define last_expr_type (cfun->stmt->x_last_expr_type)
390#define last_expr_value (cfun->stmt->x_last_expr_value)
391#define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
392#define emit_filename (cfun->stmt->x_emit_filename)
393#define emit_lineno (cfun->stmt->x_emit_lineno)
394#define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
395
396/* Non-zero if we are using EH to handle cleanus.  */
397static int using_eh_for_cleanups_p = 0;
398
399static int n_occurrences		PARAMS ((int, const char *));
400static bool parse_input_constraint	PARAMS ((const char **, int, int, int,
401						 int, const char * const *,
402						 bool *, bool *));
403static void expand_goto_internal	PARAMS ((tree, rtx, rtx));
404static int expand_fixup			PARAMS ((tree, rtx, rtx));
405static rtx expand_nl_handler_label	PARAMS ((rtx, rtx));
406static void expand_nl_goto_receiver	PARAMS ((void));
407static void expand_nl_goto_receivers	PARAMS ((struct nesting *));
408static void fixup_gotos			PARAMS ((struct nesting *, rtx, tree,
409					       rtx, int));
410static bool check_operand_nalternatives	PARAMS ((tree, tree));
411static bool check_unique_operand_names	PARAMS ((tree, tree));
412static tree resolve_operand_names	PARAMS ((tree, tree, tree,
413						 const char **));
414static char *resolve_operand_name_1	PARAMS ((char *, tree, tree));
415static void expand_null_return_1	PARAMS ((rtx));
416static void expand_value_return		PARAMS ((rtx));
417static int tail_recursion_args		PARAMS ((tree, tree));
418static void expand_cleanups		PARAMS ((tree, tree, int, int));
419static void check_seenlabel		PARAMS ((void));
420static void do_jump_if_equal		PARAMS ((rtx, rtx, rtx, int));
421static int estimate_case_costs		PARAMS ((case_node_ptr));
422static void group_case_nodes		PARAMS ((case_node_ptr));
423static void balance_case_nodes		PARAMS ((case_node_ptr *,
424					       case_node_ptr));
425static int node_has_low_bound		PARAMS ((case_node_ptr, tree));
426static int node_has_high_bound		PARAMS ((case_node_ptr, tree));
427static int node_is_bounded		PARAMS ((case_node_ptr, tree));
428static void emit_jump_if_reachable	PARAMS ((rtx));
429static void emit_case_nodes		PARAMS ((rtx, case_node_ptr, rtx, tree));
430static struct case_node *case_tree2list	PARAMS ((case_node *, case_node *));
431static void mark_cond_nesting           PARAMS ((struct nesting *));
432static void mark_loop_nesting           PARAMS ((struct nesting *));
433static void mark_block_nesting          PARAMS ((struct nesting *));
434static void mark_case_nesting           PARAMS ((struct nesting *));
435static void mark_case_node		PARAMS ((struct case_node *));
436static void mark_goto_fixup             PARAMS ((struct goto_fixup *));
437static void free_case_nodes             PARAMS ((case_node_ptr));
438
439void
440using_eh_for_cleanups ()
441{
442  using_eh_for_cleanups_p = 1;
443}
444
445/* Mark N (known to be a cond-nesting) for GC.  */
446
447static void
448mark_cond_nesting (n)
449     struct nesting *n;
450{
451  while (n)
452    {
453      ggc_mark_rtx (n->exit_label);
454      ggc_mark_rtx (n->data.cond.endif_label);
455      ggc_mark_rtx (n->data.cond.next_label);
456
457      n = n->next;
458    }
459}
460
461/* Mark N (known to be a loop-nesting) for GC.  */
462
463static void
464mark_loop_nesting (n)
465     struct nesting *n;
466{
467
468  while (n)
469    {
470      ggc_mark_rtx (n->exit_label);
471      ggc_mark_rtx (n->data.loop.start_label);
472      ggc_mark_rtx (n->data.loop.end_label);
473      ggc_mark_rtx (n->data.loop.alt_end_label);
474      ggc_mark_rtx (n->data.loop.continue_label);
475
476      n = n->next;
477    }
478}
479
480/* Mark N (known to be a block-nesting) for GC.  */
481
482static void
483mark_block_nesting (n)
484     struct nesting *n;
485{
486  while (n)
487    {
488      struct label_chain *l;
489
490      ggc_mark_rtx (n->exit_label);
491      ggc_mark_rtx (n->data.block.stack_level);
492      ggc_mark_rtx (n->data.block.first_insn);
493      ggc_mark_tree (n->data.block.cleanups);
494      ggc_mark_tree (n->data.block.outer_cleanups);
495
496      for (l = n->data.block.label_chain; l != NULL; l = l->next)
497	{
498	  ggc_mark (l);
499	  ggc_mark_tree (l->label);
500	}
501
502      ggc_mark_rtx (n->data.block.last_unconditional_cleanup);
503
504      /* ??? cleanup_ptr never points outside the stack, does it?  */
505
506      n = n->next;
507    }
508}
509
510/* Mark N (known to be a case-nesting) for GC.  */
511
512static void
513mark_case_nesting (n)
514     struct nesting *n;
515{
516  while (n)
517    {
518      ggc_mark_rtx (n->exit_label);
519      ggc_mark_rtx (n->data.case_stmt.start);
520
521      ggc_mark_tree (n->data.case_stmt.default_label);
522      ggc_mark_tree (n->data.case_stmt.index_expr);
523      ggc_mark_tree (n->data.case_stmt.nominal_type);
524
525      mark_case_node (n->data.case_stmt.case_list);
526      n = n->next;
527    }
528}
529
530/* Mark C for GC.  */
531
532static void
533mark_case_node (c)
534     struct case_node *c;
535{
536  if (c != 0)
537    {
538      ggc_mark_tree (c->low);
539      ggc_mark_tree (c->high);
540      ggc_mark_tree (c->code_label);
541
542      mark_case_node (c->right);
543      mark_case_node (c->left);
544    }
545}
546
547/* Mark G for GC.  */
548
549static void
550mark_goto_fixup (g)
551     struct goto_fixup *g;
552{
553  while (g)
554    {
555      ggc_mark (g);
556      ggc_mark_rtx (g->before_jump);
557      ggc_mark_tree (g->target);
558      ggc_mark_tree (g->context);
559      ggc_mark_rtx (g->target_rtl);
560      ggc_mark_rtx (g->stack_level);
561      ggc_mark_tree (g->cleanup_list_list);
562
563      g = g->next;
564    }
565}
566
567/* Clear out all parts of the state in F that can safely be discarded
568   after the function has been compiled, to let garbage collection
569   reclaim the memory.  */
570
571void
572free_stmt_status (f)
573     struct function *f;
574{
575  /* We're about to free the function obstack.  If we hold pointers to
576     things allocated there, then we'll try to mark them when we do
577     GC.  So, we clear them out here explicitly.  */
578  if (f->stmt)
579    free (f->stmt);
580  f->stmt = NULL;
581}
582
583/* Mark P for GC.  */
584
585void
586mark_stmt_status (p)
587     struct stmt_status *p;
588{
589  if (p == 0)
590    return;
591
592  mark_block_nesting (p->x_block_stack);
593  mark_cond_nesting (p->x_cond_stack);
594  mark_loop_nesting (p->x_loop_stack);
595  mark_case_nesting (p->x_case_stack);
596
597  ggc_mark_tree (p->x_last_expr_type);
598  /* last_epxr_value is only valid if last_expr_type is nonzero.  */
599  if (p->x_last_expr_type)
600    ggc_mark_rtx (p->x_last_expr_value);
601
602  mark_goto_fixup (p->x_goto_fixup_chain);
603}
604
605void
606init_stmt ()
607{
608  gcc_obstack_init (&stmt_obstack);
609}
610
611void
612init_stmt_for_function ()
613{
614  cfun->stmt = (struct stmt_status *) xmalloc (sizeof (struct stmt_status));
615
616  /* We are not currently within any block, conditional, loop or case.  */
617  block_stack = 0;
618  stack_block_stack = 0;
619  loop_stack = 0;
620  case_stack = 0;
621  cond_stack = 0;
622  nesting_stack = 0;
623  nesting_depth = 0;
624
625  current_block_start_count = 0;
626
627  /* No gotos have been expanded yet.  */
628  goto_fixup_chain = 0;
629
630  /* We are not processing a ({...}) grouping.  */
631  expr_stmts_for_value = 0;
632  last_expr_type = 0;
633  last_expr_value = NULL_RTX;
634}
635
636/* Return nonzero if anything is pushed on the loop, condition, or case
637   stack.  */
638int
639in_control_zone_p ()
640{
641  return cond_stack || loop_stack || case_stack;
642}
643
644/* Record the current file and line.  Called from emit_line_note.  */
645void
646set_file_and_line_for_stmt (file, line)
647     const char *file;
648     int line;
649{
650  /* If we're outputting an inline function, and we add a line note,
651     there may be no CFUN->STMT information.  So, there's no need to
652     update it.  */
653  if (cfun->stmt)
654    {
655      emit_filename = file;
656      emit_lineno = line;
657    }
658}
659
660/* Emit a no-op instruction.  */
661
662void
663emit_nop ()
664{
665  rtx last_insn;
666
667  last_insn = get_last_insn ();
668  if (!optimize
669      && (GET_CODE (last_insn) == CODE_LABEL
670	  || (GET_CODE (last_insn) == NOTE
671	      && prev_real_insn (last_insn) == 0)))
672    emit_insn (gen_nop ());
673}
674
675/* Return the rtx-label that corresponds to a LABEL_DECL,
676   creating it if necessary.  */
677
678rtx
679label_rtx (label)
680     tree label;
681{
682  if (TREE_CODE (label) != LABEL_DECL)
683    abort ();
684
685  if (!DECL_RTL_SET_P (label))
686    SET_DECL_RTL (label, gen_label_rtx ());
687
688  return DECL_RTL (label);
689}
690
691
692/* Add an unconditional jump to LABEL as the next sequential instruction.  */
693
694void
695emit_jump (label)
696     rtx label;
697{
698  do_pending_stack_adjust ();
699  emit_jump_insn (gen_jump (label));
700  emit_barrier ();
701}
702
703/* Emit code to jump to the address
704   specified by the pointer expression EXP.  */
705
706void
707expand_computed_goto (exp)
708     tree exp;
709{
710  rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
711
712#ifdef POINTERS_EXTEND_UNSIGNED
713  if (GET_MODE (x) != Pmode)
714    x = convert_memory_address (Pmode, x);
715#endif
716
717  emit_queue ();
718  do_pending_stack_adjust ();
719  emit_indirect_jump (x);
720
721  current_function_has_computed_jump = 1;
722}
723
724/* Handle goto statements and the labels that they can go to.  */
725
726/* Specify the location in the RTL code of a label LABEL,
727   which is a LABEL_DECL tree node.
728
729   This is used for the kind of label that the user can jump to with a
730   goto statement, and for alternatives of a switch or case statement.
731   RTL labels generated for loops and conditionals don't go through here;
732   they are generated directly at the RTL level, by other functions below.
733
734   Note that this has nothing to do with defining label *names*.
735   Languages vary in how they do that and what that even means.  */
736
737void
738expand_label (label)
739     tree label;
740{
741  struct label_chain *p;
742
743  do_pending_stack_adjust ();
744  emit_label (label_rtx (label));
745  if (DECL_NAME (label))
746    LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
747
748  if (stack_block_stack != 0)
749    {
750      p = (struct label_chain *) ggc_alloc (sizeof (struct label_chain));
751      p->next = stack_block_stack->data.block.label_chain;
752      stack_block_stack->data.block.label_chain = p;
753      p->label = label;
754    }
755}
756
757/* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
758   from nested functions.  */
759
760void
761declare_nonlocal_label (label)
762     tree label;
763{
764  rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
765
766  nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
767  LABEL_PRESERVE_P (label_rtx (label)) = 1;
768  if (nonlocal_goto_handler_slots == 0)
769    {
770      emit_stack_save (SAVE_NONLOCAL,
771		       &nonlocal_goto_stack_level,
772		       PREV_INSN (tail_recursion_reentry));
773    }
774  nonlocal_goto_handler_slots
775    = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
776}
777
778/* Generate RTL code for a `goto' statement with target label LABEL.
779   LABEL should be a LABEL_DECL tree node that was or will later be
780   defined with `expand_label'.  */
781
782void
783expand_goto (label)
784     tree label;
785{
786  tree context;
787
788  /* Check for a nonlocal goto to a containing function.  */
789  context = decl_function_context (label);
790  if (context != 0 && context != current_function_decl)
791    {
792      struct function *p = find_function_data (context);
793      rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
794      rtx handler_slot, static_chain, save_area, insn;
795      tree link;
796
797      /* Find the corresponding handler slot for this label.  */
798      handler_slot = p->x_nonlocal_goto_handler_slots;
799      for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
800	   link = TREE_CHAIN (link))
801	handler_slot = XEXP (handler_slot, 1);
802      handler_slot = XEXP (handler_slot, 0);
803
804      p->has_nonlocal_label = 1;
805      current_function_has_nonlocal_goto = 1;
806      LABEL_REF_NONLOCAL_P (label_ref) = 1;
807
808      /* Copy the rtl for the slots so that they won't be shared in
809	 case the virtual stack vars register gets instantiated differently
810	 in the parent than in the child.  */
811
812      static_chain = copy_to_reg (lookup_static_chain (label));
813
814      /* Get addr of containing function's current nonlocal goto handler,
815	 which will do any cleanups and then jump to the label.  */
816      handler_slot = copy_to_reg (replace_rtx (copy_rtx (handler_slot),
817					       virtual_stack_vars_rtx,
818					       static_chain));
819
820      /* Get addr of containing function's nonlocal save area.  */
821      save_area = p->x_nonlocal_goto_stack_level;
822      if (save_area)
823	save_area = replace_rtx (copy_rtx (save_area),
824				 virtual_stack_vars_rtx, static_chain);
825
826#if HAVE_nonlocal_goto
827      if (HAVE_nonlocal_goto)
828	emit_insn (gen_nonlocal_goto (static_chain, handler_slot,
829				      save_area, label_ref));
830      else
831#endif
832	{
833	  /* Restore frame pointer for containing function.
834	     This sets the actual hard register used for the frame pointer
835	     to the location of the function's incoming static chain info.
836	     The non-local goto handler will then adjust it to contain the
837	     proper value and reload the argument pointer, if needed.  */
838	  emit_move_insn (hard_frame_pointer_rtx, static_chain);
839	  emit_stack_restore (SAVE_NONLOCAL, save_area, NULL_RTX);
840
841	  /* USE of hard_frame_pointer_rtx added for consistency;
842	     not clear if really needed.  */
843	  emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
844	  emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
845	  emit_indirect_jump (handler_slot);
846	}
847
848      /* Search backwards to the jump insn and mark it as a
849	 non-local goto.  */
850      for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
851	{
852	  if (GET_CODE (insn) == JUMP_INSN)
853	    {
854	      REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
855						  const0_rtx, REG_NOTES (insn));
856	      break;
857	    }
858	  else if (GET_CODE (insn) == CALL_INSN)
859	      break;
860	}
861    }
862  else
863    expand_goto_internal (label, label_rtx (label), NULL_RTX);
864}
865
866/* Generate RTL code for a `goto' statement with target label BODY.
867   LABEL should be a LABEL_REF.
868   LAST_INSN, if non-0, is the rtx we should consider as the last
869   insn emitted (for the purposes of cleaning up a return).  */
870
871static void
872expand_goto_internal (body, label, last_insn)
873     tree body;
874     rtx label;
875     rtx last_insn;
876{
877  struct nesting *block;
878  rtx stack_level = 0;
879
880  if (GET_CODE (label) != CODE_LABEL)
881    abort ();
882
883  /* If label has already been defined, we can tell now
884     whether and how we must alter the stack level.  */
885
886  if (PREV_INSN (label) != 0)
887    {
888      /* Find the innermost pending block that contains the label.
889	 (Check containment by comparing insn-uids.)
890	 Then restore the outermost stack level within that block,
891	 and do cleanups of all blocks contained in it.  */
892      for (block = block_stack; block; block = block->next)
893	{
894	  if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
895	    break;
896	  if (block->data.block.stack_level != 0)
897	    stack_level = block->data.block.stack_level;
898	  /* Execute the cleanups for blocks we are exiting.  */
899	  if (block->data.block.cleanups != 0)
900	    {
901	      expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
902	      do_pending_stack_adjust ();
903	    }
904	}
905
906      if (stack_level)
907	{
908	  /* Ensure stack adjust isn't done by emit_jump, as this
909	     would clobber the stack pointer.  This one should be
910	     deleted as dead by flow.  */
911	  clear_pending_stack_adjust ();
912	  do_pending_stack_adjust ();
913
914	  /* Don't do this adjust if it's to the end label and this function
915	     is to return with a depressed stack pointer.  */
916	  if (label == return_label
917	      && (((TREE_CODE (TREE_TYPE (current_function_decl))
918		   == FUNCTION_TYPE)
919		   && (TYPE_RETURNS_STACK_DEPRESSED
920		       (TREE_TYPE (current_function_decl))))))
921	    ;
922	  else
923	    emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
924	}
925
926      if (body != 0 && DECL_TOO_LATE (body))
927	error ("jump to `%s' invalidly jumps into binding contour",
928	       IDENTIFIER_POINTER (DECL_NAME (body)));
929    }
930  /* Label not yet defined: may need to put this goto
931     on the fixup list.  */
932  else if (! expand_fixup (body, label, last_insn))
933    {
934      /* No fixup needed.  Record that the label is the target
935	 of at least one goto that has no fixup.  */
936      if (body != 0)
937	TREE_ADDRESSABLE (body) = 1;
938    }
939
940  emit_jump (label);
941}
942
943/* Generate if necessary a fixup for a goto
944   whose target label in tree structure (if any) is TREE_LABEL
945   and whose target in rtl is RTL_LABEL.
946
947   If LAST_INSN is nonzero, we pretend that the jump appears
948   after insn LAST_INSN instead of at the current point in the insn stream.
949
950   The fixup will be used later to insert insns just before the goto.
951   Those insns will restore the stack level as appropriate for the
952   target label, and will (in the case of C++) also invoke any object
953   destructors which have to be invoked when we exit the scopes which
954   are exited by the goto.
955
956   Value is nonzero if a fixup is made.  */
957
958static int
959expand_fixup (tree_label, rtl_label, last_insn)
960     tree tree_label;
961     rtx rtl_label;
962     rtx last_insn;
963{
964  struct nesting *block, *end_block;
965
966  /* See if we can recognize which block the label will be output in.
967     This is possible in some very common cases.
968     If we succeed, set END_BLOCK to that block.
969     Otherwise, set it to 0.  */
970
971  if (cond_stack
972      && (rtl_label == cond_stack->data.cond.endif_label
973	  || rtl_label == cond_stack->data.cond.next_label))
974    end_block = cond_stack;
975  /* If we are in a loop, recognize certain labels which
976     are likely targets.  This reduces the number of fixups
977     we need to create.  */
978  else if (loop_stack
979      && (rtl_label == loop_stack->data.loop.start_label
980	  || rtl_label == loop_stack->data.loop.end_label
981	  || rtl_label == loop_stack->data.loop.continue_label))
982    end_block = loop_stack;
983  else
984    end_block = 0;
985
986  /* Now set END_BLOCK to the binding level to which we will return.  */
987
988  if (end_block)
989    {
990      struct nesting *next_block = end_block->all;
991      block = block_stack;
992
993      /* First see if the END_BLOCK is inside the innermost binding level.
994	 If so, then no cleanups or stack levels are relevant.  */
995      while (next_block && next_block != block)
996	next_block = next_block->all;
997
998      if (next_block)
999	return 0;
1000
1001      /* Otherwise, set END_BLOCK to the innermost binding level
1002	 which is outside the relevant control-structure nesting.  */
1003      next_block = block_stack->next;
1004      for (block = block_stack; block != end_block; block = block->all)
1005	if (block == next_block)
1006	  next_block = next_block->next;
1007      end_block = next_block;
1008    }
1009
1010  /* Does any containing block have a stack level or cleanups?
1011     If not, no fixup is needed, and that is the normal case
1012     (the only case, for standard C).  */
1013  for (block = block_stack; block != end_block; block = block->next)
1014    if (block->data.block.stack_level != 0
1015	|| block->data.block.cleanups != 0)
1016      break;
1017
1018  if (block != end_block)
1019    {
1020      /* Ok, a fixup is needed.  Add a fixup to the list of such.  */
1021      struct goto_fixup *fixup
1022	= (struct goto_fixup *) ggc_alloc (sizeof (struct goto_fixup));
1023      /* In case an old stack level is restored, make sure that comes
1024	 after any pending stack adjust.  */
1025      /* ?? If the fixup isn't to come at the present position,
1026	 doing the stack adjust here isn't useful.  Doing it with our
1027	 settings at that location isn't useful either.  Let's hope
1028	 someone does it!  */
1029      if (last_insn == 0)
1030	do_pending_stack_adjust ();
1031      fixup->target = tree_label;
1032      fixup->target_rtl = rtl_label;
1033
1034      /* Create a BLOCK node and a corresponding matched set of
1035	 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
1036	 this point.  The notes will encapsulate any and all fixup
1037	 code which we might later insert at this point in the insn
1038	 stream.  Also, the BLOCK node will be the parent (i.e. the
1039	 `SUPERBLOCK') of any other BLOCK nodes which we might create
1040	 later on when we are expanding the fixup code.
1041
1042	 Note that optimization passes (including expand_end_loop)
1043	 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
1044	 as a placeholder.  */
1045
1046      {
1047        rtx original_before_jump
1048          = last_insn ? last_insn : get_last_insn ();
1049	rtx start;
1050	rtx end;
1051	tree block;
1052
1053	block = make_node (BLOCK);
1054	TREE_USED (block) = 1;
1055
1056	if (!cfun->x_whole_function_mode_p)
1057	  insert_block (block);
1058	else
1059	  {
1060	    BLOCK_CHAIN (block)
1061	      = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1062	    BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
1063	      = block;
1064	  }
1065
1066        start_sequence ();
1067        start = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
1068	if (cfun->x_whole_function_mode_p)
1069	  NOTE_BLOCK (start) = block;
1070	fixup->before_jump = emit_note (NULL, NOTE_INSN_DELETED);
1071	end = emit_note (NULL, NOTE_INSN_BLOCK_END);
1072	if (cfun->x_whole_function_mode_p)
1073	  NOTE_BLOCK (end) = block;
1074        fixup->context = block;
1075        end_sequence ();
1076        emit_insns_after (start, original_before_jump);
1077      }
1078
1079      fixup->block_start_count = current_block_start_count;
1080      fixup->stack_level = 0;
1081      fixup->cleanup_list_list
1082	= ((block->data.block.outer_cleanups
1083	    || block->data.block.cleanups)
1084	   ? tree_cons (NULL_TREE, block->data.block.cleanups,
1085			block->data.block.outer_cleanups)
1086	   : 0);
1087      fixup->next = goto_fixup_chain;
1088      goto_fixup_chain = fixup;
1089    }
1090
1091  return block != 0;
1092}
1093
1094/* Expand any needed fixups in the outputmost binding level of the
1095   function.  FIRST_INSN is the first insn in the function.  */
1096
1097void
1098expand_fixups (first_insn)
1099     rtx first_insn;
1100{
1101  fixup_gotos (NULL, NULL_RTX, NULL_TREE, first_insn, 0);
1102}
1103
1104/* When exiting a binding contour, process all pending gotos requiring fixups.
1105   THISBLOCK is the structure that describes the block being exited.
1106   STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1107   CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1108   FIRST_INSN is the insn that began this contour.
1109
1110   Gotos that jump out of this contour must restore the
1111   stack level and do the cleanups before actually jumping.
1112
1113   DONT_JUMP_IN nonzero means report error there is a jump into this
1114   contour from before the beginning of the contour.
1115   This is also done if STACK_LEVEL is nonzero.  */
1116
1117static void
1118fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1119     struct nesting *thisblock;
1120     rtx stack_level;
1121     tree cleanup_list;
1122     rtx first_insn;
1123     int dont_jump_in;
1124{
1125  struct goto_fixup *f, *prev;
1126
1127  /* F is the fixup we are considering; PREV is the previous one.  */
1128  /* We run this loop in two passes so that cleanups of exited blocks
1129     are run first, and blocks that are exited are marked so
1130     afterwards.  */
1131
1132  for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1133    {
1134      /* Test for a fixup that is inactive because it is already handled.  */
1135      if (f->before_jump == 0)
1136	{
1137	  /* Delete inactive fixup from the chain, if that is easy to do.  */
1138	  if (prev != 0)
1139	    prev->next = f->next;
1140	}
1141      /* Has this fixup's target label been defined?
1142	 If so, we can finalize it.  */
1143      else if (PREV_INSN (f->target_rtl) != 0)
1144	{
1145	  rtx cleanup_insns;
1146
1147	  /* If this fixup jumped into this contour from before the beginning
1148	     of this contour, report an error.   This code used to use
1149	     the first non-label insn after f->target_rtl, but that's
1150	     wrong since such can be added, by things like put_var_into_stack
1151	     and have INSN_UIDs that are out of the range of the block.  */
1152	  /* ??? Bug: this does not detect jumping in through intermediate
1153	     blocks that have stack levels or cleanups.
1154	     It detects only a problem with the innermost block
1155	     around the label.  */
1156	  if (f->target != 0
1157	      && (dont_jump_in || stack_level || cleanup_list)
1158	      && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
1159	      && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1160	      && ! DECL_ERROR_ISSUED (f->target))
1161	    {
1162	      error_with_decl (f->target,
1163			       "label `%s' used before containing binding contour");
1164	      /* Prevent multiple errors for one label.  */
1165	      DECL_ERROR_ISSUED (f->target) = 1;
1166	    }
1167
1168	  /* We will expand the cleanups into a sequence of their own and
1169	     then later on we will attach this new sequence to the insn
1170	     stream just ahead of the actual jump insn.  */
1171
1172	  start_sequence ();
1173
1174	  /* Temporarily restore the lexical context where we will
1175	     logically be inserting the fixup code.  We do this for the
1176	     sake of getting the debugging information right.  */
1177
1178	  pushlevel (0);
1179	  set_block (f->context);
1180
1181	  /* Expand the cleanups for blocks this jump exits.  */
1182	  if (f->cleanup_list_list)
1183	    {
1184	      tree lists;
1185	      for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1186		/* Marked elements correspond to blocks that have been closed.
1187		   Do their cleanups.  */
1188		if (TREE_ADDRESSABLE (lists)
1189		    && TREE_VALUE (lists) != 0)
1190		  {
1191		    expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1192		    /* Pop any pushes done in the cleanups,
1193		       in case function is about to return.  */
1194		    do_pending_stack_adjust ();
1195		  }
1196	    }
1197
1198	  /* Restore stack level for the biggest contour that this
1199	     jump jumps out of.  */
1200	  if (f->stack_level
1201	      && ! (f->target_rtl == return_label
1202		    && ((TREE_CODE (TREE_TYPE (current_function_decl))
1203			 == FUNCTION_TYPE)
1204			&& (TYPE_RETURNS_STACK_DEPRESSED
1205			    (TREE_TYPE (current_function_decl))))))
1206	    emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1207
1208	  /* Finish up the sequence containing the insns which implement the
1209	     necessary cleanups, and then attach that whole sequence to the
1210	     insn stream just ahead of the actual jump insn.  Attaching it
1211	     at that point insures that any cleanups which are in fact
1212	     implicit C++ object destructions (which must be executed upon
1213	     leaving the block) appear (to the debugger) to be taking place
1214	     in an area of the generated code where the object(s) being
1215	     destructed are still "in scope".  */
1216
1217	  cleanup_insns = get_insns ();
1218	  poplevel (1, 0, 0);
1219
1220	  end_sequence ();
1221	  emit_insns_after (cleanup_insns, f->before_jump);
1222
1223	  f->before_jump = 0;
1224	}
1225    }
1226
1227  /* For any still-undefined labels, do the cleanups for this block now.
1228     We must do this now since items in the cleanup list may go out
1229     of scope when the block ends.  */
1230  for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1231    if (f->before_jump != 0
1232	&& PREV_INSN (f->target_rtl) == 0
1233	/* Label has still not appeared.  If we are exiting a block with
1234	   a stack level to restore, that started before the fixup,
1235	   mark this stack level as needing restoration
1236	   when the fixup is later finalized.  */
1237	&& thisblock != 0
1238	/* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1239	   means the label is undefined.  That's erroneous, but possible.  */
1240	&& (thisblock->data.block.block_start_count
1241	    <= f->block_start_count))
1242      {
1243	tree lists = f->cleanup_list_list;
1244	rtx cleanup_insns;
1245
1246	for (; lists; lists = TREE_CHAIN (lists))
1247	  /* If the following elt. corresponds to our containing block
1248	     then the elt. must be for this block.  */
1249	  if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1250	    {
1251	      start_sequence ();
1252	      pushlevel (0);
1253	      set_block (f->context);
1254	      expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1255	      do_pending_stack_adjust ();
1256	      cleanup_insns = get_insns ();
1257	      poplevel (1, 0, 0);
1258	      end_sequence ();
1259	      if (cleanup_insns != 0)
1260		f->before_jump
1261		  = emit_insns_after (cleanup_insns, f->before_jump);
1262
1263	      f->cleanup_list_list = TREE_CHAIN (lists);
1264	    }
1265
1266	if (stack_level)
1267	  f->stack_level = stack_level;
1268      }
1269}
1270
1271/* Return the number of times character C occurs in string S.  */
1272static int
1273n_occurrences (c, s)
1274     int c;
1275     const char *s;
1276{
1277  int n = 0;
1278  while (*s)
1279    n += (*s++ == c);
1280  return n;
1281}
1282
1283/* Generate RTL for an asm statement (explicit assembler code).
1284   BODY is a STRING_CST node containing the assembler code text,
1285   or an ADDR_EXPR containing a STRING_CST.  */
1286
1287void
1288expand_asm (body)
1289     tree body;
1290{
1291  if (TREE_CODE (body) == ADDR_EXPR)
1292    body = TREE_OPERAND (body, 0);
1293
1294  emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1295				TREE_STRING_POINTER (body)));
1296  last_expr_type = 0;
1297}
1298
1299/* Parse the output constraint pointed to by *CONSTRAINT_P.  It is the
1300   OPERAND_NUMth output operand, indexed from zero.  There are NINPUTS
1301   inputs and NOUTPUTS outputs to this extended-asm.  Upon return,
1302   *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
1303   memory operand.  Similarly, *ALLOWS_REG will be TRUE iff the
1304   constraint allows the use of a register operand.  And, *IS_INOUT
1305   will be true if the operand is read-write, i.e., if it is used as
1306   an input as well as an output.  If *CONSTRAINT_P is not in
1307   canonical form, it will be made canonical.  (Note that `+' will be
1308   rpelaced with `=' as part of this process.)
1309
1310   Returns TRUE if all went well; FALSE if an error occurred.  */
1311
1312bool
1313parse_output_constraint (constraint_p, operand_num, ninputs, noutputs,
1314			 allows_mem, allows_reg, is_inout)
1315     const char **constraint_p;
1316     int operand_num;
1317     int ninputs;
1318     int noutputs;
1319     bool *allows_mem;
1320     bool *allows_reg;
1321     bool *is_inout;
1322{
1323  const char *constraint = *constraint_p;
1324  const char *p;
1325
1326  /* Assume the constraint doesn't allow the use of either a register
1327     or memory.  */
1328  *allows_mem = false;
1329  *allows_reg = false;
1330
1331  /* Allow the `=' or `+' to not be at the beginning of the string,
1332     since it wasn't explicitly documented that way, and there is a
1333     large body of code that puts it last.  Swap the character to
1334     the front, so as not to uglify any place else.  */
1335  p = strchr (constraint, '=');
1336  if (!p)
1337    p = strchr (constraint, '+');
1338
1339  /* If the string doesn't contain an `=', issue an error
1340     message.  */
1341  if (!p)
1342    {
1343      error ("output operand constraint lacks `='");
1344      return false;
1345    }
1346
1347  /* If the constraint begins with `+', then the operand is both read
1348     from and written to.  */
1349  *is_inout = (*p == '+');
1350
1351  /* Canonicalize the output constraint so that it begins with `='.  */
1352  if (p != constraint || is_inout)
1353    {
1354      char *buf;
1355      size_t c_len = strlen (constraint);
1356
1357      if (p != constraint)
1358	warning ("output constraint `%c' for operand %d is not at the beginning",
1359		 *p, operand_num);
1360
1361      /* Make a copy of the constraint.  */
1362      buf = alloca (c_len + 1);
1363      strcpy (buf, constraint);
1364      /* Swap the first character and the `=' or `+'.  */
1365      buf[p - constraint] = buf[0];
1366      /* Make sure the first character is an `='.  (Until we do this,
1367	 it might be a `+'.)  */
1368      buf[0] = '=';
1369      /* Replace the constraint with the canonicalized string.  */
1370      *constraint_p = ggc_alloc_string (buf, c_len);
1371      constraint = *constraint_p;
1372    }
1373
1374  /* Loop through the constraint string.  */
1375  for (p = constraint + 1; *p; ++p)
1376    switch (*p)
1377      {
1378      case '+':
1379      case '=':
1380	error ("operand constraint contains incorrectly positioned '+' or '='");
1381	return false;
1382
1383      case '%':
1384	if (operand_num + 1 == ninputs + noutputs)
1385	  {
1386	    error ("`%%' constraint used with last operand");
1387	    return false;
1388	  }
1389	break;
1390
1391      case 'V':  case 'm':  case 'o':
1392	*allows_mem = true;
1393	break;
1394
1395      case '?':  case '!':  case '*':  case '&':  case '#':
1396      case 'E':  case 'F':  case 'G':  case 'H':
1397      case 's':  case 'i':  case 'n':
1398      case 'I':  case 'J':  case 'K':  case 'L':  case 'M':
1399      case 'N':  case 'O':  case 'P':  case ',':
1400	break;
1401
1402      case '0':  case '1':  case '2':  case '3':  case '4':
1403      case '5':  case '6':  case '7':  case '8':  case '9':
1404      case '[':
1405	error ("matching constraint not valid in output operand");
1406	return false;
1407
1408      case '<':  case '>':
1409	/* ??? Before flow, auto inc/dec insns are not supposed to exist,
1410	   excepting those that expand_call created.  So match memory
1411	   and hope.  */
1412	*allows_mem = true;
1413	break;
1414
1415      case 'g':  case 'X':
1416	*allows_reg = true;
1417	*allows_mem = true;
1418	break;
1419
1420      case 'p': case 'r':
1421	*allows_reg = true;
1422	break;
1423
1424      default:
1425	if (!ISALPHA (*p))
1426	  break;
1427	if (REG_CLASS_FROM_LETTER (*p) != NO_REGS)
1428	  *allows_reg = true;
1429#ifdef EXTRA_CONSTRAINT
1430	else
1431	  {
1432	    /* Otherwise we can't assume anything about the nature of
1433	       the constraint except that it isn't purely registers.
1434	       Treat it like "g" and hope for the best.  */
1435	    *allows_reg = true;
1436	    *allows_mem = true;
1437	  }
1438#endif
1439	break;
1440      }
1441
1442  return true;
1443}
1444
1445/* Similar, but for input constraints.  */
1446
1447static bool
1448parse_input_constraint (constraint_p, input_num, ninputs, noutputs, ninout,
1449			constraints, allows_mem, allows_reg)
1450     const char **constraint_p;
1451     int input_num;
1452     int ninputs;
1453     int noutputs;
1454     int ninout;
1455     const char * const * constraints;
1456     bool *allows_mem;
1457     bool *allows_reg;
1458{
1459  const char *constraint = *constraint_p;
1460  const char *orig_constraint = constraint;
1461  size_t c_len = strlen (constraint);
1462  size_t j;
1463
1464  /* Assume the constraint doesn't allow the use of either
1465     a register or memory.  */
1466  *allows_mem = false;
1467  *allows_reg = false;
1468
1469  /* Make sure constraint has neither `=', `+', nor '&'.  */
1470
1471  for (j = 0; j < c_len; j++)
1472    switch (constraint[j])
1473      {
1474      case '+':  case '=':  case '&':
1475	if (constraint == orig_constraint)
1476	  {
1477	    error ("input operand constraint contains `%c'", constraint[j]);
1478	    return false;
1479	  }
1480	break;
1481
1482      case '%':
1483	if (constraint == orig_constraint
1484	    && input_num + 1 == ninputs - ninout)
1485	  {
1486	    error ("`%%' constraint used with last operand");
1487	    return false;
1488	  }
1489	break;
1490
1491      case 'V':  case 'm':  case 'o':
1492	*allows_mem = true;
1493	break;
1494
1495      case '<':  case '>':
1496      case '?':  case '!':  case '*':  case '#':
1497      case 'E':  case 'F':  case 'G':  case 'H':
1498      case 's':  case 'i':  case 'n':
1499      case 'I':  case 'J':  case 'K':  case 'L':  case 'M':
1500      case 'N':  case 'O':  case 'P':  case ',':
1501	break;
1502
1503	/* Whether or not a numeric constraint allows a register is
1504	   decided by the matching constraint, and so there is no need
1505	   to do anything special with them.  We must handle them in
1506	   the default case, so that we don't unnecessarily force
1507	   operands to memory.  */
1508      case '0':  case '1':  case '2':  case '3':  case '4':
1509      case '5':  case '6':  case '7':  case '8':  case '9':
1510	{
1511	  char *end;
1512	  unsigned long match;
1513
1514	  match = strtoul (constraint + j, &end, 10);
1515	  if (match >= (unsigned long) noutputs)
1516	    {
1517	      error ("matching constraint references invalid operand number");
1518	      return false;
1519	    }
1520
1521	  /* Try and find the real constraint for this dup.  Only do this
1522	     if the matching constraint is the only alternative.  */
1523	  if (*end == '\0'
1524	      && (j == 0 || (j == 1 && constraint[0] == '%')))
1525	    {
1526	      constraint = constraints[match];
1527	      *constraint_p = constraint;
1528	      c_len = strlen (constraint);
1529	      j = 0;
1530	      break;
1531	    }
1532	  else
1533	    j = end - constraint;
1534	}
1535	/* Fall through.  */
1536
1537      case 'p':  case 'r':
1538	*allows_reg = true;
1539	break;
1540
1541      case 'g':  case 'X':
1542	*allows_reg = true;
1543	*allows_mem = true;
1544	break;
1545
1546      default:
1547	if (! ISALPHA (constraint[j]))
1548	  {
1549	    error ("invalid punctuation `%c' in constraint", constraint[j]);
1550	    return false;
1551	  }
1552	if (REG_CLASS_FROM_LETTER (constraint[j]) != NO_REGS)
1553	  *allows_reg = true;
1554#ifdef EXTRA_CONSTRAINT
1555	else
1556	  {
1557	    /* Otherwise we can't assume anything about the nature of
1558	       the constraint except that it isn't purely registers.
1559	       Treat it like "g" and hope for the best.  */
1560	    *allows_reg = true;
1561	    *allows_mem = true;
1562	  }
1563#endif
1564	break;
1565      }
1566
1567  return true;
1568}
1569
1570/* Generate RTL for an asm statement with arguments.
1571   STRING is the instruction template.
1572   OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1573   Each output or input has an expression in the TREE_VALUE and
1574   and a tree list in TREE_PURPOSE which in turn contains a constraint
1575   name in TREE_VALUE (or NULL_TREE) and a constraint string
1576   in TREE_PURPOSE.
1577   CLOBBERS is a list of STRING_CST nodes each naming a hard register
1578   that is clobbered by this insn.
1579
1580   Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1581   Some elements of OUTPUTS may be replaced with trees representing temporary
1582   values.  The caller should copy those temporary values to the originally
1583   specified lvalues.
1584
1585   VOL nonzero means the insn is volatile; don't optimize it.  */
1586
1587void
1588expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1589     tree string, outputs, inputs, clobbers;
1590     int vol;
1591     const char *filename;
1592     int line;
1593{
1594  rtvec argvec, constraintvec;
1595  rtx body;
1596  int ninputs = list_length (inputs);
1597  int noutputs = list_length (outputs);
1598  int ninout;
1599  int nclobbers;
1600  tree tail;
1601  int i;
1602  /* Vector of RTX's of evaluated output operands.  */
1603  rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1604  int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1605  rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1606  enum machine_mode *inout_mode
1607    = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1608  const char **constraints
1609    = (const char **) alloca ((noutputs + ninputs) * sizeof (const char *));
1610  /* The insn we have emitted.  */
1611  rtx insn;
1612  int old_generating_concat_p = generating_concat_p;
1613
1614  /* An ASM with no outputs needs to be treated as volatile, for now.  */
1615  if (noutputs == 0)
1616    vol = 1;
1617
1618  if (! check_operand_nalternatives (outputs, inputs))
1619    return;
1620
1621  if (! check_unique_operand_names (outputs, inputs))
1622    return;
1623
1624  string = resolve_operand_names (string, outputs, inputs, constraints);
1625
1626#ifdef MD_ASM_CLOBBERS
1627  /* Sometimes we wish to automatically clobber registers across an asm.
1628     Case in point is when the i386 backend moved from cc0 to a hard reg --
1629     maintaining source-level compatibility means automatically clobbering
1630     the flags register.  */
1631  MD_ASM_CLOBBERS (clobbers);
1632#endif
1633
1634  /* Count the number of meaningful clobbered registers, ignoring what
1635     we would ignore later.  */
1636  nclobbers = 0;
1637  for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1638    {
1639      const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1640
1641      i = decode_reg_name (regname);
1642      if (i >= 0 || i == -4)
1643	++nclobbers;
1644      else if (i == -2)
1645	error ("unknown register name `%s' in `asm'", regname);
1646    }
1647
1648  last_expr_type = 0;
1649
1650  /* First pass over inputs and outputs checks validity and sets
1651     mark_addressable if needed.  */
1652
1653  ninout = 0;
1654  for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1655    {
1656      tree val = TREE_VALUE (tail);
1657      tree type = TREE_TYPE (val);
1658      const char *constraint;
1659      bool is_inout;
1660      bool allows_reg;
1661      bool allows_mem;
1662
1663      /* If there's an erroneous arg, emit no insn.  */
1664      if (type == error_mark_node)
1665	return;
1666
1667      /* Try to parse the output constraint.  If that fails, there's
1668	 no point in going further.  */
1669      constraint = constraints[i];
1670      if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
1671				    &allows_mem, &allows_reg, &is_inout))
1672	return;
1673
1674      if (! allows_reg
1675	  && (allows_mem
1676	      || is_inout
1677	      || (DECL_P (val)
1678		  && GET_CODE (DECL_RTL (val)) == REG
1679		  && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
1680	mark_addressable (val);
1681
1682      if (is_inout)
1683	ninout++;
1684    }
1685
1686  ninputs += ninout;
1687  if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1688    {
1689      error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1690      return;
1691    }
1692
1693  for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
1694    {
1695      bool allows_reg, allows_mem;
1696      const char *constraint;
1697
1698      /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
1699	 would get VOIDmode and that could cause a crash in reload.  */
1700      if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1701	return;
1702
1703      constraint = constraints[i + noutputs];
1704      if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1705				    constraints, &allows_mem, &allows_reg))
1706	return;
1707
1708      if (! allows_reg && allows_mem)
1709	mark_addressable (TREE_VALUE (tail));
1710    }
1711
1712  /* Second pass evaluates arguments.  */
1713
1714  ninout = 0;
1715  for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1716    {
1717      tree val = TREE_VALUE (tail);
1718      tree type = TREE_TYPE (val);
1719      bool is_inout;
1720      bool allows_reg;
1721      bool allows_mem;
1722
1723      if (!parse_output_constraint (&constraints[i], i, ninputs,
1724				    noutputs, &allows_mem, &allows_reg,
1725				    &is_inout))
1726	abort ();
1727
1728      /* If an output operand is not a decl or indirect ref and our constraint
1729	 allows a register, make a temporary to act as an intermediate.
1730	 Make the asm insn write into that, then our caller will copy it to
1731	 the real output operand.  Likewise for promoted variables.  */
1732
1733      generating_concat_p = 0;
1734
1735      real_output_rtx[i] = NULL_RTX;
1736      if ((TREE_CODE (val) == INDIRECT_REF
1737	   && allows_mem)
1738	  || (DECL_P (val)
1739	      && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1740	      && ! (GET_CODE (DECL_RTL (val)) == REG
1741		    && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1742	  || ! allows_reg
1743	  || is_inout)
1744	{
1745	  output_rtx[i] = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
1746
1747	  if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1748	    error ("output number %d not directly addressable", i);
1749	  if ((! allows_mem && GET_CODE (output_rtx[i]) == MEM)
1750	      || GET_CODE (output_rtx[i]) == CONCAT)
1751	    {
1752    	      real_output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1753	      output_rtx[i] = gen_reg_rtx (GET_MODE (output_rtx[i]));
1754	      if (is_inout)
1755		emit_move_insn (output_rtx[i], real_output_rtx[i]);
1756	    }
1757	}
1758      else
1759	{
1760	  output_rtx[i] = assign_temp (type, 0, 0, 1);
1761	  TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1762	}
1763
1764      generating_concat_p = old_generating_concat_p;
1765
1766      if (is_inout)
1767	{
1768	  inout_mode[ninout] = TYPE_MODE (type);
1769	  inout_opnum[ninout++] = i;
1770	}
1771    }
1772
1773  /* Make vectors for the expression-rtx, constraint strings,
1774     and named operands.  */
1775
1776  argvec = rtvec_alloc (ninputs);
1777  constraintvec = rtvec_alloc (ninputs);
1778
1779  body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1780				: GET_MODE (output_rtx[0])),
1781			       TREE_STRING_POINTER (string),
1782			       empty_string, 0, argvec, constraintvec,
1783			       filename, line);
1784
1785  MEM_VOLATILE_P (body) = vol;
1786
1787  /* Eval the inputs and put them into ARGVEC.
1788     Put their constraints into ASM_INPUTs and store in CONSTRAINTS.  */
1789
1790  for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
1791    {
1792      bool allows_reg, allows_mem;
1793      const char *constraint;
1794      tree val, type;
1795      rtx op;
1796
1797      constraint = constraints[i + noutputs];
1798      if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1799				    constraints, &allows_mem, &allows_reg))
1800	abort ();
1801
1802      generating_concat_p = 0;
1803
1804      val = TREE_VALUE (tail);
1805      type = TREE_TYPE (val);
1806      op = expand_expr (val, NULL_RTX, VOIDmode, 0);
1807
1808      /* Never pass a CONCAT to an ASM.  */
1809      if (GET_CODE (op) == CONCAT)
1810	op = force_reg (GET_MODE (op), op);
1811
1812      if (asm_operand_ok (op, constraint) <= 0)
1813	{
1814	  if (allows_reg)
1815	    op = force_reg (TYPE_MODE (type), op);
1816	  else if (!allows_mem)
1817	    warning ("asm operand %d probably doesn't match constraints",
1818		     i + noutputs);
1819	  else if (CONSTANT_P (op))
1820	    op = force_const_mem (TYPE_MODE (type), op);
1821	  else if (GET_CODE (op) == REG
1822		   || GET_CODE (op) == SUBREG
1823		   || GET_CODE (op) == ADDRESSOF
1824		   || GET_CODE (op) == CONCAT)
1825	    {
1826	      tree qual_type = build_qualified_type (type,
1827						     (TYPE_QUALS (type)
1828						      | TYPE_QUAL_CONST));
1829	      rtx memloc = assign_temp (qual_type, 1, 1, 1);
1830
1831	      emit_move_insn (memloc, op);
1832	      op = memloc;
1833	    }
1834
1835	  else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1836	    {
1837	      /* We won't recognize volatile memory as available a
1838		 memory_operand at this point.  Ignore it.  */
1839	    }
1840	  else if (queued_subexp_p (op))
1841	    ;
1842	  else
1843	    /* ??? Leave this only until we have experience with what
1844	       happens in combine and elsewhere when constraints are
1845	       not satisfied.  */
1846	    warning ("asm operand %d probably doesn't match constraints",
1847		     i + noutputs);
1848	}
1849
1850      generating_concat_p = old_generating_concat_p;
1851      ASM_OPERANDS_INPUT (body, i) = op;
1852
1853      ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1854	= gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
1855    }
1856
1857  /* Protect all the operands from the queue now that they have all been
1858     evaluated.  */
1859
1860  generating_concat_p = 0;
1861
1862  for (i = 0; i < ninputs - ninout; i++)
1863    ASM_OPERANDS_INPUT (body, i)
1864      = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1865
1866  for (i = 0; i < noutputs; i++)
1867    output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1868
1869  /* For in-out operands, copy output rtx to input rtx.  */
1870  for (i = 0; i < ninout; i++)
1871    {
1872      int j = inout_opnum[i];
1873      char buffer[16];
1874
1875      ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1876	= output_rtx[j];
1877
1878      sprintf (buffer, "%d", j);
1879      ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1880	= gen_rtx_ASM_INPUT (inout_mode[i], ggc_alloc_string (buffer, -1));
1881    }
1882
1883  generating_concat_p = old_generating_concat_p;
1884
1885  /* Now, for each output, construct an rtx
1886     (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1887			       ARGVEC CONSTRAINTS OPNAMES))
1888     If there is more than one, put them inside a PARALLEL.  */
1889
1890  if (noutputs == 1 && nclobbers == 0)
1891    {
1892      ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
1893      insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1894    }
1895
1896  else if (noutputs == 0 && nclobbers == 0)
1897    {
1898      /* No output operands: put in a raw ASM_OPERANDS rtx.  */
1899      insn = emit_insn (body);
1900    }
1901
1902  else
1903    {
1904      rtx obody = body;
1905      int num = noutputs;
1906
1907      if (num == 0)
1908	num = 1;
1909
1910      body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1911
1912      /* For each output operand, store a SET.  */
1913      for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1914	{
1915	  XVECEXP (body, 0, i)
1916	    = gen_rtx_SET (VOIDmode,
1917			   output_rtx[i],
1918			   gen_rtx_ASM_OPERANDS
1919			   (GET_MODE (output_rtx[i]),
1920			    TREE_STRING_POINTER (string),
1921			    constraints[i], i, argvec, constraintvec,
1922			    filename, line));
1923
1924	  MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1925	}
1926
1927      /* If there are no outputs (but there are some clobbers)
1928	 store the bare ASM_OPERANDS into the PARALLEL.  */
1929
1930      if (i == 0)
1931	XVECEXP (body, 0, i++) = obody;
1932
1933      /* Store (clobber REG) for each clobbered register specified.  */
1934
1935      for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1936	{
1937	  const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1938	  int j = decode_reg_name (regname);
1939
1940	  if (j < 0)
1941	    {
1942	      if (j == -3)	/* `cc', which is not a register */
1943		continue;
1944
1945	      if (j == -4)	/* `memory', don't cache memory across asm */
1946		{
1947		  XVECEXP (body, 0, i++)
1948		    = gen_rtx_CLOBBER (VOIDmode,
1949				       gen_rtx_MEM
1950				       (BLKmode,
1951					gen_rtx_SCRATCH (VOIDmode)));
1952		  continue;
1953		}
1954
1955	      /* Ignore unknown register, error already signaled.  */
1956	      continue;
1957	    }
1958
1959	  /* Use QImode since that's guaranteed to clobber just one reg.  */
1960	  XVECEXP (body, 0, i++)
1961	    = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
1962	}
1963
1964      insn = emit_insn (body);
1965    }
1966
1967  /* For any outputs that needed reloading into registers, spill them
1968     back to where they belong.  */
1969  for (i = 0; i < noutputs; ++i)
1970    if (real_output_rtx[i])
1971      emit_move_insn (real_output_rtx[i], output_rtx[i]);
1972
1973  free_temp_slots ();
1974}
1975
1976/* A subroutine of expand_asm_operands.  Check that all operands have
1977   the same number of alternatives.  Return true if so.  */
1978
1979static bool
1980check_operand_nalternatives (outputs, inputs)
1981     tree outputs, inputs;
1982{
1983  if (outputs || inputs)
1984    {
1985      tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1986      int nalternatives
1987	= n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1988      tree next = inputs;
1989
1990      if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1991	{
1992	  error ("too many alternatives in `asm'");
1993	  return false;
1994	}
1995
1996      tmp = outputs;
1997      while (tmp)
1998	{
1999	  const char *constraint
2000	    = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
2001
2002	  if (n_occurrences (',', constraint) != nalternatives)
2003	    {
2004	      error ("operand constraints for `asm' differ in number of alternatives");
2005	      return false;
2006	    }
2007
2008	  if (TREE_CHAIN (tmp))
2009	    tmp = TREE_CHAIN (tmp);
2010	  else
2011	    tmp = next, next = 0;
2012	}
2013    }
2014
2015  return true;
2016}
2017
2018/* A subroutine of expand_asm_operands.  Check that all operand names
2019   are unique.  Return true if so.  We rely on the fact that these names
2020   are identifiers, and so have been canonicalized by get_identifier,
2021   so all we need are pointer comparisons.  */
2022
2023static bool
2024check_unique_operand_names (outputs, inputs)
2025     tree outputs, inputs;
2026{
2027  tree i, j;
2028
2029  for (i = outputs; i ; i = TREE_CHAIN (i))
2030    {
2031      tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
2032      if (! i_name)
2033	continue;
2034
2035      for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
2036	if (i_name == TREE_PURPOSE (TREE_PURPOSE (j)))
2037	  goto failure;
2038    }
2039
2040  for (i = inputs; i ; i = TREE_CHAIN (i))
2041    {
2042      tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
2043      if (! i_name)
2044	continue;
2045
2046      for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
2047	if (i_name == TREE_PURPOSE (TREE_PURPOSE (j)))
2048	  goto failure;
2049      for (j = outputs; j ; j = TREE_CHAIN (j))
2050	if (i_name == TREE_PURPOSE (TREE_PURPOSE (j)))
2051	  goto failure;
2052    }
2053
2054  return true;
2055
2056 failure:
2057  error ("duplicate asm operand name '%s'",
2058	 IDENTIFIER_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
2059  return false;
2060}
2061
2062/* A subroutine of expand_asm_operands.  Resolve the names of the operands
2063   in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
2064   STRING and in the constraints to those numbers.  */
2065
2066static tree
2067resolve_operand_names (string, outputs, inputs, pconstraints)
2068     tree string;
2069     tree outputs, inputs;
2070     const char **pconstraints;
2071{
2072  char *buffer = xstrdup (TREE_STRING_POINTER (string));
2073  char *p;
2074  tree t;
2075
2076  /* Assume that we will not need extra space to perform the substitution.
2077     This because we get to remove '[' and ']', which means we cannot have
2078     a problem until we have more than 999 operands.  */
2079
2080  p = buffer;
2081  while ((p = strchr (p, '%')) != NULL)
2082    {
2083      if (p[1] == '[')
2084	p += 1;
2085      else if (ISALPHA (p[1]) && p[2] == '[')
2086	p += 2;
2087      else
2088	{
2089	  p += 1;
2090	  continue;
2091	}
2092
2093      p = resolve_operand_name_1 (p, outputs, inputs);
2094    }
2095
2096  string = build_string (strlen (buffer), buffer);
2097  free (buffer);
2098
2099  /* Collect output constraints here because it's convenient.
2100     There should be no named operands here; this is verified
2101     in expand_asm_operand.  */
2102  for (t = outputs; t ; t = TREE_CHAIN (t), pconstraints++)
2103    *pconstraints = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2104
2105  /* Substitute [<name>] in input constraint strings.  */
2106  for (t = inputs; t ; t = TREE_CHAIN (t), pconstraints++)
2107    {
2108      const char *c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2109      if (strchr (c, '[') == NULL)
2110	*pconstraints = c;
2111      else
2112	{
2113	  p = buffer = xstrdup (c);
2114	  while ((p = strchr (p, '[')) != NULL)
2115	    p = resolve_operand_name_1 (p, outputs, inputs);
2116
2117	  *pconstraints = ggc_alloc_string (buffer, -1);
2118	  free (buffer);
2119	}
2120    }
2121
2122  return string;
2123}
2124
2125/* A subroutine of resolve_operand_names.  P points to the '[' for a
2126   potential named operand of the form [<name>].  In place, replace
2127   the name and brackets with a number.  Return a pointer to the
2128   balance of the string after substitution.  */
2129
2130static char *
2131resolve_operand_name_1 (p, outputs, inputs)
2132     char *p;
2133     tree outputs, inputs;
2134{
2135  char *q;
2136  int op;
2137  tree t;
2138  size_t len;
2139
2140  /* Collect the operand name.  */
2141  q = strchr (p, ']');
2142  if (!q)
2143    {
2144      error ("missing close brace for named operand");
2145      return strchr (p, '\0');
2146    }
2147  len = q - p - 1;
2148
2149  /* Resolve the name to a number.  */
2150  for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
2151    {
2152      tree id = TREE_PURPOSE (TREE_PURPOSE (t));
2153      if (id)
2154	{
2155	  const char *c = IDENTIFIER_POINTER (id);
2156	  if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2157	    goto found;
2158	}
2159    }
2160  for (t = inputs; t ; t = TREE_CHAIN (t), op++)
2161    {
2162      tree id = TREE_PURPOSE (TREE_PURPOSE (t));
2163      if (id)
2164	{
2165	  const char *c = IDENTIFIER_POINTER (id);
2166	  if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2167	    goto found;
2168	}
2169    }
2170
2171  *q = '\0';
2172  error ("undefined named operand '%s'", p + 1);
2173  op = 0;
2174 found:
2175
2176  /* Replace the name with the number.  Unfortunately, not all libraries
2177     get the return value of sprintf correct, so search for the end of the
2178     generated string by hand.  */
2179  sprintf (p, "%d", op);
2180  p = strchr (p, '\0');
2181
2182  /* Verify the no extra buffer space assumption.  */
2183  if (p > q)
2184    abort ();
2185
2186  /* Shift the rest of the buffer down to fill the gap.  */
2187  memmove (p, q + 1, strlen (q + 1) + 1);
2188
2189  return p;
2190}
2191
2192/* Generate RTL to evaluate the expression EXP
2193   and remember it in case this is the VALUE in a ({... VALUE; }) constr.
2194   Provided just for backward-compatibility.  expand_expr_stmt_value()
2195   should be used for new code.  */
2196
2197void
2198expand_expr_stmt (exp)
2199     tree exp;
2200{
2201  expand_expr_stmt_value (exp, -1, 1);
2202}
2203
2204/* Generate RTL to evaluate the expression EXP.  WANT_VALUE tells
2205   whether to (1) save the value of the expression, (0) discard it or
2206   (-1) use expr_stmts_for_value to tell.  The use of -1 is
2207   deprecated, and retained only for backward compatibility.  */
2208
2209void
2210expand_expr_stmt_value (exp, want_value, maybe_last)
2211     tree exp;
2212     int want_value, maybe_last;
2213{
2214  rtx value;
2215  tree type;
2216
2217  if (want_value == -1)
2218    want_value = expr_stmts_for_value != 0;
2219
2220  /* If -W, warn about statements with no side effects,
2221     except for an explicit cast to void (e.g. for assert()), and
2222     except for last statement in ({...}) where they may be useful.  */
2223  if (! want_value
2224      && (expr_stmts_for_value == 0 || ! maybe_last)
2225      && exp != error_mark_node)
2226    {
2227      if (! TREE_SIDE_EFFECTS (exp))
2228	{
2229	  if ((extra_warnings || warn_unused_value)
2230	      && !(TREE_CODE (exp) == CONVERT_EXPR
2231		   && VOID_TYPE_P (TREE_TYPE (exp))))
2232	    warning_with_file_and_line (emit_filename, emit_lineno,
2233				        "statement with no effect");
2234	}
2235      else if (warn_unused_value)
2236	warn_if_unused_value (exp);
2237    }
2238
2239  /* If EXP is of function type and we are expanding statements for
2240     value, convert it to pointer-to-function.  */
2241  if (want_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
2242    exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
2243
2244  /* The call to `expand_expr' could cause last_expr_type and
2245     last_expr_value to get reset.  Therefore, we set last_expr_value
2246     and last_expr_type *after* calling expand_expr.  */
2247  value = expand_expr (exp, want_value ? NULL_RTX : const0_rtx,
2248		       VOIDmode, 0);
2249  type = TREE_TYPE (exp);
2250
2251  /* If all we do is reference a volatile value in memory,
2252     copy it to a register to be sure it is actually touched.  */
2253  if (value && GET_CODE (value) == MEM && TREE_THIS_VOLATILE (exp))
2254    {
2255      if (TYPE_MODE (type) == VOIDmode)
2256	;
2257      else if (TYPE_MODE (type) != BLKmode)
2258	value = copy_to_reg (value);
2259      else
2260	{
2261	  rtx lab = gen_label_rtx ();
2262
2263	  /* Compare the value with itself to reference it.  */
2264	  emit_cmp_and_jump_insns (value, value, EQ,
2265				   expand_expr (TYPE_SIZE (type),
2266						NULL_RTX, VOIDmode, 0),
2267				   BLKmode, 0, lab);
2268	  emit_label (lab);
2269	}
2270    }
2271
2272  /* If this expression is part of a ({...}) and is in memory, we may have
2273     to preserve temporaries.  */
2274  preserve_temp_slots (value);
2275
2276  /* Free any temporaries used to evaluate this expression.  Any temporary
2277     used as a result of this expression will already have been preserved
2278     above.  */
2279  free_temp_slots ();
2280
2281  if (want_value)
2282    {
2283      last_expr_value = value;
2284      last_expr_type = type;
2285    }
2286
2287  emit_queue ();
2288}
2289
2290/* Warn if EXP contains any computations whose results are not used.
2291   Return 1 if a warning is printed; 0 otherwise.  */
2292
2293int
2294warn_if_unused_value (exp)
2295     tree exp;
2296{
2297  if (TREE_USED (exp))
2298    return 0;
2299
2300  /* Don't warn about void constructs.  This includes casting to void,
2301     void function calls, and statement expressions with a final cast
2302     to void.  */
2303  if (VOID_TYPE_P (TREE_TYPE (exp)))
2304    return 0;
2305
2306  switch (TREE_CODE (exp))
2307    {
2308    case PREINCREMENT_EXPR:
2309    case POSTINCREMENT_EXPR:
2310    case PREDECREMENT_EXPR:
2311    case POSTDECREMENT_EXPR:
2312    case MODIFY_EXPR:
2313    case INIT_EXPR:
2314    case TARGET_EXPR:
2315    case CALL_EXPR:
2316    case METHOD_CALL_EXPR:
2317    case RTL_EXPR:
2318    case TRY_CATCH_EXPR:
2319    case WITH_CLEANUP_EXPR:
2320    case EXIT_EXPR:
2321      return 0;
2322
2323    case BIND_EXPR:
2324      /* For a binding, warn if no side effect within it.  */
2325      return warn_if_unused_value (TREE_OPERAND (exp, 1));
2326
2327    case SAVE_EXPR:
2328      return warn_if_unused_value (TREE_OPERAND (exp, 1));
2329
2330    case TRUTH_ORIF_EXPR:
2331    case TRUTH_ANDIF_EXPR:
2332      /* In && or ||, warn if 2nd operand has no side effect.  */
2333      return warn_if_unused_value (TREE_OPERAND (exp, 1));
2334
2335    case COMPOUND_EXPR:
2336      if (TREE_NO_UNUSED_WARNING (exp))
2337	return 0;
2338      if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2339	return 1;
2340      /* Let people do `(foo (), 0)' without a warning.  */
2341      if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2342	return 0;
2343      return warn_if_unused_value (TREE_OPERAND (exp, 1));
2344
2345    case NOP_EXPR:
2346    case CONVERT_EXPR:
2347    case NON_LVALUE_EXPR:
2348      /* Don't warn about conversions not explicit in the user's program.  */
2349      if (TREE_NO_UNUSED_WARNING (exp))
2350	return 0;
2351      /* Assignment to a cast usually results in a cast of a modify.
2352	 Don't complain about that.  There can be an arbitrary number of
2353	 casts before the modify, so we must loop until we find the first
2354	 non-cast expression and then test to see if that is a modify.  */
2355      {
2356	tree tem = TREE_OPERAND (exp, 0);
2357
2358	while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2359	  tem = TREE_OPERAND (tem, 0);
2360
2361	if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2362	    || TREE_CODE (tem) == CALL_EXPR)
2363	  return 0;
2364      }
2365      goto maybe_warn;
2366
2367    case INDIRECT_REF:
2368      /* Don't warn about automatic dereferencing of references, since
2369	 the user cannot control it.  */
2370      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2371	return warn_if_unused_value (TREE_OPERAND (exp, 0));
2372      /* Fall through.  */
2373
2374    default:
2375      /* Referencing a volatile value is a side effect, so don't warn.  */
2376      if ((DECL_P (exp)
2377	   || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2378	  && TREE_THIS_VOLATILE (exp))
2379	return 0;
2380
2381      /* If this is an expression which has no operands, there is no value
2382	 to be unused.  There are no such language-independent codes,
2383	 but front ends may define such.  */
2384      if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2385	  && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2386	return 0;
2387
2388    maybe_warn:
2389      /* If this is an expression with side effects, don't warn.  */
2390      if (TREE_SIDE_EFFECTS (exp))
2391	return 0;
2392
2393      warning_with_file_and_line (emit_filename, emit_lineno,
2394				  "value computed is not used");
2395      return 1;
2396    }
2397}
2398
2399/* Clear out the memory of the last expression evaluated.  */
2400
2401void
2402clear_last_expr ()
2403{
2404  last_expr_type = 0;
2405}
2406
2407/* Begin a statement-expression, i.e., a series of statements which
2408   may return a value.  Return the RTL_EXPR for this statement expr.
2409   The caller must save that value and pass it to
2410   expand_end_stmt_expr.  If HAS_SCOPE is nonzero, temporaries created
2411   in the statement-expression are deallocated at the end of the
2412   expression.  */
2413
2414tree
2415expand_start_stmt_expr (has_scope)
2416     int has_scope;
2417{
2418  tree t;
2419
2420  /* Make the RTL_EXPR node temporary, not momentary,
2421     so that rtl_expr_chain doesn't become garbage.  */
2422  t = make_node (RTL_EXPR);
2423  do_pending_stack_adjust ();
2424  if (has_scope)
2425    start_sequence_for_rtl_expr (t);
2426  else
2427    start_sequence ();
2428  NO_DEFER_POP;
2429  expr_stmts_for_value++;
2430  last_expr_value = NULL_RTX;
2431  return t;
2432}
2433
2434/* Restore the previous state at the end of a statement that returns a value.
2435   Returns a tree node representing the statement's value and the
2436   insns to compute the value.
2437
2438   The nodes of that expression have been freed by now, so we cannot use them.
2439   But we don't want to do that anyway; the expression has already been
2440   evaluated and now we just want to use the value.  So generate a RTL_EXPR
2441   with the proper type and RTL value.
2442
2443   If the last substatement was not an expression,
2444   return something with type `void'.  */
2445
2446tree
2447expand_end_stmt_expr (t)
2448     tree t;
2449{
2450  OK_DEFER_POP;
2451
2452  if (! last_expr_value || ! last_expr_type)
2453    {
2454      last_expr_value = const0_rtx;
2455      last_expr_type = void_type_node;
2456    }
2457  else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2458    /* Remove any possible QUEUED.  */
2459    last_expr_value = protect_from_queue (last_expr_value, 0);
2460
2461  emit_queue ();
2462
2463  TREE_TYPE (t) = last_expr_type;
2464  RTL_EXPR_RTL (t) = last_expr_value;
2465  RTL_EXPR_SEQUENCE (t) = get_insns ();
2466
2467  rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2468
2469  end_sequence ();
2470
2471  /* Don't consider deleting this expr or containing exprs at tree level.  */
2472  TREE_SIDE_EFFECTS (t) = 1;
2473  /* Propagate volatility of the actual RTL expr.  */
2474  TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2475
2476  last_expr_type = 0;
2477  expr_stmts_for_value--;
2478
2479  return t;
2480}
2481
2482/* Generate RTL for the start of an if-then.  COND is the expression
2483   whose truth should be tested.
2484
2485   If EXITFLAG is nonzero, this conditional is visible to
2486   `exit_something'.  */
2487
2488void
2489expand_start_cond (cond, exitflag)
2490     tree cond;
2491     int exitflag;
2492{
2493  struct nesting *thiscond = ALLOC_NESTING ();
2494
2495  /* Make an entry on cond_stack for the cond we are entering.  */
2496
2497  thiscond->next = cond_stack;
2498  thiscond->all = nesting_stack;
2499  thiscond->depth = ++nesting_depth;
2500  thiscond->data.cond.next_label = gen_label_rtx ();
2501  /* Before we encounter an `else', we don't need a separate exit label
2502     unless there are supposed to be exit statements
2503     to exit this conditional.  */
2504  thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2505  thiscond->data.cond.endif_label = thiscond->exit_label;
2506  cond_stack = thiscond;
2507  nesting_stack = thiscond;
2508
2509  do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2510}
2511
2512/* Generate RTL between then-clause and the elseif-clause
2513   of an if-then-elseif-....  */
2514
2515void
2516expand_start_elseif (cond)
2517     tree cond;
2518{
2519  if (cond_stack->data.cond.endif_label == 0)
2520    cond_stack->data.cond.endif_label = gen_label_rtx ();
2521  emit_jump (cond_stack->data.cond.endif_label);
2522  emit_label (cond_stack->data.cond.next_label);
2523  cond_stack->data.cond.next_label = gen_label_rtx ();
2524  do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2525}
2526
2527/* Generate RTL between the then-clause and the else-clause
2528   of an if-then-else.  */
2529
2530void
2531expand_start_else ()
2532{
2533  if (cond_stack->data.cond.endif_label == 0)
2534    cond_stack->data.cond.endif_label = gen_label_rtx ();
2535
2536  emit_jump (cond_stack->data.cond.endif_label);
2537  emit_label (cond_stack->data.cond.next_label);
2538  cond_stack->data.cond.next_label = 0;  /* No more _else or _elseif calls.  */
2539}
2540
2541/* After calling expand_start_else, turn this "else" into an "else if"
2542   by providing another condition.  */
2543
2544void
2545expand_elseif (cond)
2546     tree cond;
2547{
2548  cond_stack->data.cond.next_label = gen_label_rtx ();
2549  do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2550}
2551
2552/* Generate RTL for the end of an if-then.
2553   Pop the record for it off of cond_stack.  */
2554
2555void
2556expand_end_cond ()
2557{
2558  struct nesting *thiscond = cond_stack;
2559
2560  do_pending_stack_adjust ();
2561  if (thiscond->data.cond.next_label)
2562    emit_label (thiscond->data.cond.next_label);
2563  if (thiscond->data.cond.endif_label)
2564    emit_label (thiscond->data.cond.endif_label);
2565
2566  POPSTACK (cond_stack);
2567  last_expr_type = 0;
2568}
2569
2570/* Generate RTL for the start of a loop.  EXIT_FLAG is nonzero if this
2571   loop should be exited by `exit_something'.  This is a loop for which
2572   `expand_continue' will jump to the top of the loop.
2573
2574   Make an entry on loop_stack to record the labels associated with
2575   this loop.  */
2576
2577struct nesting *
2578expand_start_loop (exit_flag)
2579     int exit_flag;
2580{
2581  struct nesting *thisloop = ALLOC_NESTING ();
2582
2583  /* Make an entry on loop_stack for the loop we are entering.  */
2584
2585  thisloop->next = loop_stack;
2586  thisloop->all = nesting_stack;
2587  thisloop->depth = ++nesting_depth;
2588  thisloop->data.loop.start_label = gen_label_rtx ();
2589  thisloop->data.loop.end_label = gen_label_rtx ();
2590  thisloop->data.loop.alt_end_label = 0;
2591  thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2592  thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2593  loop_stack = thisloop;
2594  nesting_stack = thisloop;
2595
2596  do_pending_stack_adjust ();
2597  emit_queue ();
2598  emit_note (NULL, NOTE_INSN_LOOP_BEG);
2599  emit_label (thisloop->data.loop.start_label);
2600
2601  return thisloop;
2602}
2603
2604/* Like expand_start_loop but for a loop where the continuation point
2605   (for expand_continue_loop) will be specified explicitly.  */
2606
2607struct nesting *
2608expand_start_loop_continue_elsewhere (exit_flag)
2609     int exit_flag;
2610{
2611  struct nesting *thisloop = expand_start_loop (exit_flag);
2612  loop_stack->data.loop.continue_label = gen_label_rtx ();
2613  return thisloop;
2614}
2615
2616/* Begin a null, aka do { } while (0) "loop".  But since the contents
2617   of said loop can still contain a break, we must frob the loop nest.  */
2618
2619struct nesting *
2620expand_start_null_loop ()
2621{
2622  struct nesting *thisloop = ALLOC_NESTING ();
2623
2624  /* Make an entry on loop_stack for the loop we are entering.  */
2625
2626  thisloop->next = loop_stack;
2627  thisloop->all = nesting_stack;
2628  thisloop->depth = ++nesting_depth;
2629  thisloop->data.loop.start_label = emit_note (NULL, NOTE_INSN_DELETED);
2630  thisloop->data.loop.end_label = gen_label_rtx ();
2631  thisloop->data.loop.alt_end_label = NULL_RTX;
2632  thisloop->data.loop.continue_label = thisloop->data.loop.end_label;
2633  thisloop->exit_label = thisloop->data.loop.end_label;
2634  loop_stack = thisloop;
2635  nesting_stack = thisloop;
2636
2637  return thisloop;
2638}
2639
2640/* Specify the continuation point for a loop started with
2641   expand_start_loop_continue_elsewhere.
2642   Use this at the point in the code to which a continue statement
2643   should jump.  */
2644
2645void
2646expand_loop_continue_here ()
2647{
2648  do_pending_stack_adjust ();
2649  emit_note (NULL, NOTE_INSN_LOOP_CONT);
2650  emit_label (loop_stack->data.loop.continue_label);
2651}
2652
2653/* Finish a loop.  Generate a jump back to the top and the loop-exit label.
2654   Pop the block off of loop_stack.  */
2655
2656void
2657expand_end_loop ()
2658{
2659  rtx start_label = loop_stack->data.loop.start_label;
2660  rtx etc_note;
2661  int eh_regions, debug_blocks;
2662
2663  /* Mark the continue-point at the top of the loop if none elsewhere.  */
2664  if (start_label == loop_stack->data.loop.continue_label)
2665    emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2666
2667  do_pending_stack_adjust ();
2668
2669  /* If the loop starts with a loop exit, roll that to the end where
2670     it will optimize together with the jump back.
2671
2672     If the loop presently looks like this (in pseudo-C):
2673
2674	LOOP_BEG
2675	start_label:
2676	  if (test) goto end_label;
2677	LOOP_END_TOP_COND
2678	  body;
2679	  goto start_label;
2680	end_label:
2681
2682     transform it to look like:
2683
2684	LOOP_BEG
2685	  goto start_label;
2686	top_label:
2687	  body;
2688	start_label:
2689	  if (test) goto end_label;
2690	  goto top_label;
2691	end_label:
2692
2693     We rely on the presence of NOTE_INSN_LOOP_END_TOP_COND to mark
2694     the end of the entry condtional.  Without this, our lexical scan
2695     can't tell the difference between an entry conditional and a
2696     body conditional that exits the loop.  Mistaking the two means
2697     that we can misplace the NOTE_INSN_LOOP_CONT note, which can
2698     screw up loop unrolling.
2699
2700     Things will be oh so much better when loop optimization is done
2701     off of a proper control flow graph...  */
2702
2703  /* Scan insns from the top of the loop looking for the END_TOP_COND note.  */
2704
2705  eh_regions = debug_blocks = 0;
2706  for (etc_note = start_label; etc_note ; etc_note = NEXT_INSN (etc_note))
2707    if (GET_CODE (etc_note) == NOTE)
2708      {
2709	if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_END_TOP_COND)
2710	  break;
2711
2712	/* We must not walk into a nested loop.  */
2713	else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_BEG)
2714	  {
2715	    etc_note = NULL_RTX;
2716	    break;
2717	  }
2718
2719	/* At the same time, scan for EH region notes, as we don't want
2720	   to scrog region nesting.  This shouldn't happen, but...  */
2721	else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_BEG)
2722	  eh_regions++;
2723	else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_END)
2724	  {
2725	    if (--eh_regions < 0)
2726	      /* We've come to the end of an EH region, but never saw the
2727		 beginning of that region.  That means that an EH region
2728		 begins before the top of the loop, and ends in the middle
2729		 of it.  The existence of such a situation violates a basic
2730		 assumption in this code, since that would imply that even
2731		 when EH_REGIONS is zero, we might move code out of an
2732		 exception region.  */
2733	      abort ();
2734	  }
2735
2736	/* Likewise for debug scopes.  In this case we'll either (1) move
2737	   all of the notes if they are properly nested or (2) leave the
2738	   notes alone and only rotate the loop at high optimization
2739	   levels when we expect to scrog debug info.  */
2740	else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_BEG)
2741	  debug_blocks++;
2742	else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_END)
2743	  debug_blocks--;
2744      }
2745
2746  if (etc_note
2747      && optimize
2748      && eh_regions == 0
2749      && (debug_blocks == 0 || optimize >= 2)
2750      && NEXT_INSN (etc_note) != NULL_RTX
2751      && ! any_condjump_p (get_last_insn ()))
2752    {
2753      /* We found one.  Move everything from START to ETC to the end
2754	 of the loop, and add a jump from the top of the loop.  */
2755      rtx top_label = gen_label_rtx ();
2756      rtx start_move = start_label;
2757
2758      /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2759	 then we want to move this note also.  */
2760      if (GET_CODE (PREV_INSN (start_move)) == NOTE
2761	  && NOTE_LINE_NUMBER (PREV_INSN (start_move)) == NOTE_INSN_LOOP_CONT)
2762	start_move = PREV_INSN (start_move);
2763
2764      emit_label_before (top_label, start_move);
2765
2766      /* Actually move the insns.  If the debug scopes are nested, we
2767	 can move everything at once.  Otherwise we have to move them
2768	 one by one and squeeze out the block notes.  */
2769      if (debug_blocks == 0)
2770	reorder_insns (start_move, etc_note, get_last_insn ());
2771      else
2772	{
2773	  rtx insn, next_insn;
2774	  for (insn = start_move; insn; insn = next_insn)
2775	    {
2776	      /* Figure out which insn comes after this one.  We have
2777		 to do this before we move INSN.  */
2778	      next_insn = (insn == etc_note ? NULL : NEXT_INSN (insn));
2779
2780	      if (GET_CODE (insn) == NOTE
2781		  && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2782		      || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2783		continue;
2784
2785	      reorder_insns (insn, insn, get_last_insn ());
2786	    }
2787	}
2788
2789      /* Add the jump from the top of the loop.  */
2790      emit_jump_insn_before (gen_jump (start_label), top_label);
2791      emit_barrier_before (top_label);
2792      start_label = top_label;
2793    }
2794
2795  emit_jump (start_label);
2796  emit_note (NULL, NOTE_INSN_LOOP_END);
2797  emit_label (loop_stack->data.loop.end_label);
2798
2799  POPSTACK (loop_stack);
2800
2801  last_expr_type = 0;
2802}
2803
2804/* Finish a null loop, aka do { } while (0).  */
2805
2806void
2807expand_end_null_loop ()
2808{
2809  do_pending_stack_adjust ();
2810  emit_label (loop_stack->data.loop.end_label);
2811
2812  POPSTACK (loop_stack);
2813
2814  last_expr_type = 0;
2815}
2816
2817/* Generate a jump to the current loop's continue-point.
2818   This is usually the top of the loop, but may be specified
2819   explicitly elsewhere.  If not currently inside a loop,
2820   return 0 and do nothing; caller will print an error message.  */
2821
2822int
2823expand_continue_loop (whichloop)
2824     struct nesting *whichloop;
2825{
2826  last_expr_type = 0;
2827  if (whichloop == 0)
2828    whichloop = loop_stack;
2829  if (whichloop == 0)
2830    return 0;
2831  expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2832			NULL_RTX);
2833  return 1;
2834}
2835
2836/* Generate a jump to exit the current loop.  If not currently inside a loop,
2837   return 0 and do nothing; caller will print an error message.  */
2838
2839int
2840expand_exit_loop (whichloop)
2841     struct nesting *whichloop;
2842{
2843  last_expr_type = 0;
2844  if (whichloop == 0)
2845    whichloop = loop_stack;
2846  if (whichloop == 0)
2847    return 0;
2848  expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2849  return 1;
2850}
2851
2852/* Generate a conditional jump to exit the current loop if COND
2853   evaluates to zero.  If not currently inside a loop,
2854   return 0 and do nothing; caller will print an error message.  */
2855
2856int
2857expand_exit_loop_if_false (whichloop, cond)
2858     struct nesting *whichloop;
2859     tree cond;
2860{
2861  rtx label = gen_label_rtx ();
2862  rtx last_insn;
2863  last_expr_type = 0;
2864
2865  if (whichloop == 0)
2866    whichloop = loop_stack;
2867  if (whichloop == 0)
2868    return 0;
2869  /* In order to handle fixups, we actually create a conditional jump
2870     around an unconditional branch to exit the loop.  If fixups are
2871     necessary, they go before the unconditional branch.  */
2872
2873  do_jump (cond, NULL_RTX, label);
2874  last_insn = get_last_insn ();
2875  if (GET_CODE (last_insn) == CODE_LABEL)
2876    whichloop->data.loop.alt_end_label = last_insn;
2877  expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2878			NULL_RTX);
2879  emit_label (label);
2880
2881  return 1;
2882}
2883
2884/* Like expand_exit_loop_if_false except also emit a note marking
2885   the end of the conditional.  Should only be used immediately
2886   after expand_loop_start.  */
2887
2888int
2889expand_exit_loop_top_cond (whichloop, cond)
2890     struct nesting *whichloop;
2891     tree cond;
2892{
2893  if (! expand_exit_loop_if_false (whichloop, cond))
2894    return 0;
2895
2896  emit_note (NULL, NOTE_INSN_LOOP_END_TOP_COND);
2897  return 1;
2898}
2899
2900/* Return nonzero if the loop nest is empty.  Else return zero.  */
2901
2902int
2903stmt_loop_nest_empty ()
2904{
2905  /* cfun->stmt can be NULL if we are building a call to get the
2906     EH context for a setjmp/longjmp EH target and the current
2907     function was a deferred inline function.  */
2908  return (cfun->stmt == NULL || loop_stack == NULL);
2909}
2910
2911/* Return non-zero if we should preserve sub-expressions as separate
2912   pseudos.  We never do so if we aren't optimizing.  We always do so
2913   if -fexpensive-optimizations.
2914
2915   Otherwise, we only do so if we are in the "early" part of a loop.  I.e.,
2916   the loop may still be a small one.  */
2917
2918int
2919preserve_subexpressions_p ()
2920{
2921  rtx insn;
2922
2923  if (flag_expensive_optimizations)
2924    return 1;
2925
2926  if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
2927    return 0;
2928
2929  insn = get_last_insn_anywhere ();
2930
2931  return (insn
2932	  && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2933	      < n_non_fixed_regs * 3));
2934
2935}
2936
2937/* Generate a jump to exit the current loop, conditional, binding contour
2938   or case statement.  Not all such constructs are visible to this function,
2939   only those started with EXIT_FLAG nonzero.  Individual languages use
2940   the EXIT_FLAG parameter to control which kinds of constructs you can
2941   exit this way.
2942
2943   If not currently inside anything that can be exited,
2944   return 0 and do nothing; caller will print an error message.  */
2945
2946int
2947expand_exit_something ()
2948{
2949  struct nesting *n;
2950  last_expr_type = 0;
2951  for (n = nesting_stack; n; n = n->all)
2952    if (n->exit_label != 0)
2953      {
2954	expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2955	return 1;
2956      }
2957
2958  return 0;
2959}
2960
2961/* Generate RTL to return from the current function, with no value.
2962   (That is, we do not do anything about returning any value.)  */
2963
2964void
2965expand_null_return ()
2966{
2967  rtx last_insn = get_last_insn ();
2968
2969  /* If this function was declared to return a value, but we
2970     didn't, clobber the return registers so that they are not
2971     propagated live to the rest of the function.  */
2972  clobber_return_register ();
2973
2974  expand_null_return_1 (last_insn);
2975}
2976
2977/* Generate RTL to return from the current function, with value VAL.  */
2978
2979static void
2980expand_value_return (val)
2981     rtx val;
2982{
2983  rtx last_insn = get_last_insn ();
2984  rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2985
2986  /* Copy the value to the return location
2987     unless it's already there.  */
2988
2989  if (return_reg != val)
2990    {
2991      tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2992#ifdef PROMOTE_FUNCTION_RETURN
2993      int unsignedp = TREE_UNSIGNED (type);
2994      enum machine_mode old_mode
2995	= DECL_MODE (DECL_RESULT (current_function_decl));
2996      enum machine_mode mode
2997	= promote_mode (type, old_mode, &unsignedp, 1);
2998
2999      if (mode != old_mode)
3000	val = convert_modes (mode, old_mode, val, unsignedp);
3001#endif
3002      if (GET_CODE (return_reg) == PARALLEL)
3003	emit_group_load (return_reg, val, int_size_in_bytes (type));
3004      else
3005	emit_move_insn (return_reg, val);
3006    }
3007
3008  expand_null_return_1 (last_insn);
3009}
3010
3011/* Output a return with no value.  If LAST_INSN is nonzero,
3012   pretend that the return takes place after LAST_INSN.  */
3013
3014static void
3015expand_null_return_1 (last_insn)
3016     rtx last_insn;
3017{
3018  rtx end_label = cleanup_label ? cleanup_label : return_label;
3019
3020  clear_pending_stack_adjust ();
3021  do_pending_stack_adjust ();
3022  last_expr_type = 0;
3023
3024  if (end_label == 0)
3025     end_label = return_label = gen_label_rtx ();
3026  expand_goto_internal (NULL_TREE, end_label, last_insn);
3027}
3028
3029/* Generate RTL to evaluate the expression RETVAL and return it
3030   from the current function.  */
3031
3032void
3033expand_return (retval)
3034     tree retval;
3035{
3036  /* If there are any cleanups to be performed, then they will
3037     be inserted following LAST_INSN.  It is desirable
3038     that the last_insn, for such purposes, should be the
3039     last insn before computing the return value.  Otherwise, cleanups
3040     which call functions can clobber the return value.  */
3041  /* ??? rms: I think that is erroneous, because in C++ it would
3042     run destructors on variables that might be used in the subsequent
3043     computation of the return value.  */
3044  rtx last_insn = 0;
3045  rtx result_rtl;
3046  rtx val = 0;
3047  tree retval_rhs;
3048
3049  /* If function wants no value, give it none.  */
3050  if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3051    {
3052      expand_expr (retval, NULL_RTX, VOIDmode, 0);
3053      emit_queue ();
3054      expand_null_return ();
3055      return;
3056    }
3057
3058  if (retval == error_mark_node)
3059    {
3060      /* Treat this like a return of no value from a function that
3061	 returns a value.  */
3062      expand_null_return ();
3063      return;
3064    }
3065  else if (TREE_CODE (retval) == RESULT_DECL)
3066    retval_rhs = retval;
3067  else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
3068	   && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3069    retval_rhs = TREE_OPERAND (retval, 1);
3070  else if (VOID_TYPE_P (TREE_TYPE (retval)))
3071    /* Recognize tail-recursive call to void function.  */
3072    retval_rhs = retval;
3073  else
3074    retval_rhs = NULL_TREE;
3075
3076  last_insn = get_last_insn ();
3077
3078  /* Distribute return down conditional expr if either of the sides
3079     may involve tail recursion (see test below).  This enhances the number
3080     of tail recursions we see.  Don't do this always since it can produce
3081     sub-optimal code in some cases and we distribute assignments into
3082     conditional expressions when it would help.  */
3083
3084  if (optimize && retval_rhs != 0
3085      && frame_offset == 0
3086      && TREE_CODE (retval_rhs) == COND_EXPR
3087      && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
3088	  || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
3089    {
3090      rtx label = gen_label_rtx ();
3091      tree expr;
3092
3093      do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
3094      start_cleanup_deferral ();
3095      expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3096		    DECL_RESULT (current_function_decl),
3097		    TREE_OPERAND (retval_rhs, 1));
3098      TREE_SIDE_EFFECTS (expr) = 1;
3099      expand_return (expr);
3100      emit_label (label);
3101
3102      expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3103		    DECL_RESULT (current_function_decl),
3104		    TREE_OPERAND (retval_rhs, 2));
3105      TREE_SIDE_EFFECTS (expr) = 1;
3106      expand_return (expr);
3107      end_cleanup_deferral ();
3108      return;
3109    }
3110
3111  result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3112
3113  /* If the result is an aggregate that is being returned in one (or more)
3114     registers, load the registers here.  The compiler currently can't handle
3115     copying a BLKmode value into registers.  We could put this code in a
3116     more general area (for use by everyone instead of just function
3117     call/return), but until this feature is generally usable it is kept here
3118     (and in expand_call).  The value must go into a pseudo in case there
3119     are cleanups that will clobber the real return register.  */
3120
3121  if (retval_rhs != 0
3122      && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3123      && GET_CODE (result_rtl) == REG)
3124    {
3125      int i;
3126      unsigned HOST_WIDE_INT bitpos, xbitpos;
3127      unsigned HOST_WIDE_INT big_endian_correction = 0;
3128      unsigned HOST_WIDE_INT bytes
3129	= int_size_in_bytes (TREE_TYPE (retval_rhs));
3130      int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3131      unsigned int bitsize
3132	= MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
3133      rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
3134      rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
3135      rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
3136      enum machine_mode tmpmode, result_reg_mode;
3137
3138      if (bytes == 0)
3139	{
3140	  expand_null_return ();
3141	  return;
3142	}
3143
3144      /* Structures whose size is not a multiple of a word are aligned
3145	 to the least significant byte (to the right).  On a BYTES_BIG_ENDIAN
3146	 machine, this means we must skip the empty high order bytes when
3147	 calculating the bit offset.  */
3148      if (BYTES_BIG_ENDIAN
3149	  && !FUNCTION_ARG_REG_LITTLE_ENDIAN
3150	  && bytes % UNITS_PER_WORD)
3151	big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
3152						  * BITS_PER_UNIT));
3153
3154      /* Copy the structure BITSIZE bits at a time.  */
3155      for (bitpos = 0, xbitpos = big_endian_correction;
3156	   bitpos < bytes * BITS_PER_UNIT;
3157	   bitpos += bitsize, xbitpos += bitsize)
3158	{
3159	  /* We need a new destination pseudo each time xbitpos is
3160	     on a word boundary and when xbitpos == big_endian_correction
3161	     (the first time through).  */
3162	  if (xbitpos % BITS_PER_WORD == 0
3163	      || xbitpos == big_endian_correction)
3164	    {
3165	      /* Generate an appropriate register.  */
3166	      dst = gen_reg_rtx (word_mode);
3167	      result_pseudos[xbitpos / BITS_PER_WORD] = dst;
3168
3169	      /* Clear the destination before we move anything into it.  */
3170	      emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
3171	    }
3172
3173	  /* We need a new source operand each time bitpos is on a word
3174	     boundary.  */
3175	  if (bitpos % BITS_PER_WORD == 0)
3176	    src = operand_subword_force (result_val,
3177					 bitpos / BITS_PER_WORD,
3178					 BLKmode);
3179
3180	  /* Use bitpos for the source extraction (left justified) and
3181	     xbitpos for the destination store (right justified).  */
3182	  store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3183			   extract_bit_field (src, bitsize,
3184					      bitpos % BITS_PER_WORD, 1,
3185					      NULL_RTX, word_mode, word_mode,
3186					      BITS_PER_WORD),
3187			   BITS_PER_WORD);
3188	}
3189
3190      /* Find the smallest integer mode large enough to hold the
3191	 entire structure and use that mode instead of BLKmode
3192	 on the USE insn for the return register.  */
3193      for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3194	   tmpmode != VOIDmode;
3195	   tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3196	/* Have we found a large enough mode?  */
3197	if (GET_MODE_SIZE (tmpmode) >= bytes)
3198	  break;
3199
3200      /* No suitable mode found.  */
3201      if (tmpmode == VOIDmode)
3202	abort ();
3203
3204      PUT_MODE (result_rtl, tmpmode);
3205
3206      if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3207	result_reg_mode = word_mode;
3208      else
3209	result_reg_mode = tmpmode;
3210      result_reg = gen_reg_rtx (result_reg_mode);
3211
3212      emit_queue ();
3213      for (i = 0; i < n_regs; i++)
3214	emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3215			result_pseudos[i]);
3216
3217      if (tmpmode != result_reg_mode)
3218	result_reg = gen_lowpart (tmpmode, result_reg);
3219
3220      expand_value_return (result_reg);
3221    }
3222  else if (retval_rhs != 0
3223	   && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3224	   && (GET_CODE (result_rtl) == REG
3225	       || (GET_CODE (result_rtl) == PARALLEL)))
3226    {
3227      /* Calculate the return value into a temporary (usually a pseudo
3228         reg).  */
3229      tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3230      tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3231
3232      val = assign_temp (nt, 0, 0, 1);
3233      val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3234      val = force_not_mem (val);
3235      emit_queue ();
3236      /* Return the calculated value, doing cleanups first.  */
3237      expand_value_return (val);
3238    }
3239  else
3240    {
3241      /* No cleanups or no hard reg used;
3242	 calculate value into hard return reg.  */
3243      expand_expr (retval, const0_rtx, VOIDmode, 0);
3244      emit_queue ();
3245      expand_value_return (result_rtl);
3246    }
3247}
3248
3249/* Return 1 if the end of the generated RTX is not a barrier.
3250   This means code already compiled can drop through.  */
3251
3252int
3253drop_through_at_end_p ()
3254{
3255  rtx insn = get_last_insn ();
3256  while (insn && GET_CODE (insn) == NOTE)
3257    insn = PREV_INSN (insn);
3258  return insn && GET_CODE (insn) != BARRIER;
3259}
3260
3261/* Attempt to optimize a potential tail recursion call into a goto.
3262   ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
3263   where to place the jump to the tail recursion label.
3264
3265   Return TRUE if the call was optimized into a goto.  */
3266
3267int
3268optimize_tail_recursion (arguments, last_insn)
3269     tree arguments;
3270     rtx last_insn;
3271{
3272  /* Finish checking validity, and if valid emit code to set the
3273     argument variables for the new call.  */
3274  if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
3275    {
3276      if (tail_recursion_label == 0)
3277	{
3278	  tail_recursion_label = gen_label_rtx ();
3279	  emit_label_after (tail_recursion_label,
3280			    tail_recursion_reentry);
3281	}
3282      emit_queue ();
3283      expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3284      emit_barrier ();
3285      return 1;
3286    }
3287  return 0;
3288}
3289
3290/* Emit code to alter this function's formal parms for a tail-recursive call.
3291   ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3292   FORMALS is the chain of decls of formals.
3293   Return 1 if this can be done;
3294   otherwise return 0 and do not emit any code.  */
3295
3296static int
3297tail_recursion_args (actuals, formals)
3298     tree actuals, formals;
3299{
3300  tree a = actuals, f = formals;
3301  int i;
3302  rtx *argvec;
3303
3304  /* Check that number and types of actuals are compatible
3305     with the formals.  This is not always true in valid C code.
3306     Also check that no formal needs to be addressable
3307     and that all formals are scalars.  */
3308
3309  /* Also count the args.  */
3310
3311  for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3312    {
3313      if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3314	  != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
3315	return 0;
3316      if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3317	return 0;
3318    }
3319  if (a != 0 || f != 0)
3320    return 0;
3321
3322  /* Compute all the actuals.  */
3323
3324  argvec = (rtx *) alloca (i * sizeof (rtx));
3325
3326  for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3327    argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3328
3329  /* Find which actual values refer to current values of previous formals.
3330     Copy each of them now, before any formal is changed.  */
3331
3332  for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3333    {
3334      int copy = 0;
3335      int j;
3336      for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3337	if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3338	  {
3339	    copy = 1;
3340	    break;
3341	  }
3342      if (copy)
3343	argvec[i] = copy_to_reg (argvec[i]);
3344    }
3345
3346  /* Store the values of the actuals into the formals.  */
3347
3348  for (f = formals, a = actuals, i = 0; f;
3349       f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3350    {
3351      if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3352	emit_move_insn (DECL_RTL (f), argvec[i]);
3353      else
3354	{
3355	  rtx tmp = argvec[i];
3356
3357	  if (DECL_MODE (f) != GET_MODE (DECL_RTL (f)))
3358	    {
3359	      tmp = gen_reg_rtx (DECL_MODE (f));
3360	      convert_move (tmp, argvec[i],
3361			    TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3362	    }
3363	  convert_move (DECL_RTL (f), tmp,
3364			TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3365	}
3366    }
3367
3368  free_temp_slots ();
3369  return 1;
3370}
3371
3372/* Generate the RTL code for entering a binding contour.
3373   The variables are declared one by one, by calls to `expand_decl'.
3374
3375   FLAGS is a bitwise or of the following flags:
3376
3377     1 - Nonzero if this construct should be visible to
3378         `exit_something'.
3379
3380     2 - Nonzero if this contour does not require a
3381	 NOTE_INSN_BLOCK_BEG note.  Virtually all calls from
3382	 language-independent code should set this flag because they
3383	 will not create corresponding BLOCK nodes.  (There should be
3384	 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3385	 and BLOCKs.)  If this flag is set, MARK_ENDS should be zero
3386	 when expand_end_bindings is called.
3387
3388    If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3389    optionally be supplied.  If so, it becomes the NOTE_BLOCK for the
3390    note.  */
3391
3392void
3393expand_start_bindings_and_block (flags, block)
3394     int flags;
3395     tree block;
3396{
3397  struct nesting *thisblock = ALLOC_NESTING ();
3398  rtx note;
3399  int exit_flag = ((flags & 1) != 0);
3400  int block_flag = ((flags & 2) == 0);
3401
3402  /* If a BLOCK is supplied, then the caller should be requesting a
3403     NOTE_INSN_BLOCK_BEG note.  */
3404  if (!block_flag && block)
3405    abort ();
3406
3407  /* Create a note to mark the beginning of the block.  */
3408  if (block_flag)
3409    {
3410      note = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
3411      NOTE_BLOCK (note) = block;
3412    }
3413  else
3414    note = emit_note (NULL, NOTE_INSN_DELETED);
3415
3416  /* Make an entry on block_stack for the block we are entering.  */
3417
3418  thisblock->next = block_stack;
3419  thisblock->all = nesting_stack;
3420  thisblock->depth = ++nesting_depth;
3421  thisblock->data.block.stack_level = 0;
3422  thisblock->data.block.cleanups = 0;
3423  thisblock->data.block.n_function_calls = 0;
3424  thisblock->data.block.exception_region = 0;
3425  thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
3426
3427  thisblock->data.block.conditional_code = 0;
3428  thisblock->data.block.last_unconditional_cleanup = note;
3429  /* When we insert instructions after the last unconditional cleanup,
3430     we don't adjust last_insn.  That means that a later add_insn will
3431     clobber the instructions we've just added.  The easiest way to
3432     fix this is to just insert another instruction here, so that the
3433     instructions inserted after the last unconditional cleanup are
3434     never the last instruction.  */
3435  emit_note (NULL, NOTE_INSN_DELETED);
3436  thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3437
3438  if (block_stack
3439      && !(block_stack->data.block.cleanups == NULL_TREE
3440	   && block_stack->data.block.outer_cleanups == NULL_TREE))
3441    thisblock->data.block.outer_cleanups
3442      = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3443		   block_stack->data.block.outer_cleanups);
3444  else
3445    thisblock->data.block.outer_cleanups = 0;
3446  thisblock->data.block.label_chain = 0;
3447  thisblock->data.block.innermost_stack_block = stack_block_stack;
3448  thisblock->data.block.first_insn = note;
3449  thisblock->data.block.block_start_count = ++current_block_start_count;
3450  thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3451  block_stack = thisblock;
3452  nesting_stack = thisblock;
3453
3454  /* Make a new level for allocating stack slots.  */
3455  push_temp_slots ();
3456}
3457
3458/* Specify the scope of temporaries created by TARGET_EXPRs.  Similar
3459   to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3460   expand_expr are made.  After we end the region, we know that all
3461   space for all temporaries that were created by TARGET_EXPRs will be
3462   destroyed and their space freed for reuse.  */
3463
3464void
3465expand_start_target_temps ()
3466{
3467  /* This is so that even if the result is preserved, the space
3468     allocated will be freed, as we know that it is no longer in use.  */
3469  push_temp_slots ();
3470
3471  /* Start a new binding layer that will keep track of all cleanup
3472     actions to be performed.  */
3473  expand_start_bindings (2);
3474
3475  target_temp_slot_level = temp_slot_level;
3476}
3477
3478void
3479expand_end_target_temps ()
3480{
3481  expand_end_bindings (NULL_TREE, 0, 0);
3482
3483  /* This is so that even if the result is preserved, the space
3484     allocated will be freed, as we know that it is no longer in use.  */
3485  pop_temp_slots ();
3486}
3487
3488/* Given a pointer to a BLOCK node return non-zero if (and only if) the node
3489   in question represents the outermost pair of curly braces (i.e. the "body
3490   block") of a function or method.
3491
3492   For any BLOCK node representing a "body block" of a function or method, the
3493   BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3494   represents the outermost (function) scope for the function or method (i.e.
3495   the one which includes the formal parameters).  The BLOCK_SUPERCONTEXT of
3496   *that* node in turn will point to the relevant FUNCTION_DECL node.  */
3497
3498int
3499is_body_block (stmt)
3500     tree stmt;
3501{
3502  if (TREE_CODE (stmt) == BLOCK)
3503    {
3504      tree parent = BLOCK_SUPERCONTEXT (stmt);
3505
3506      if (parent && TREE_CODE (parent) == BLOCK)
3507	{
3508	  tree grandparent = BLOCK_SUPERCONTEXT (parent);
3509
3510	  if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3511	    return 1;
3512	}
3513    }
3514
3515  return 0;
3516}
3517
3518/* True if we are currently emitting insns in an area of output code
3519   that is controlled by a conditional expression.  This is used by
3520   the cleanup handling code to generate conditional cleanup actions.  */
3521
3522int
3523conditional_context ()
3524{
3525  return block_stack && block_stack->data.block.conditional_code;
3526}
3527
3528/* Return an opaque pointer to the current nesting level, so frontend code
3529   can check its own sanity.  */
3530
3531struct nesting *
3532current_nesting_level ()
3533{
3534  return cfun ? block_stack : 0;
3535}
3536
3537/* Emit a handler label for a nonlocal goto handler.
3538   Also emit code to store the handler label in SLOT before BEFORE_INSN.  */
3539
3540static rtx
3541expand_nl_handler_label (slot, before_insn)
3542     rtx slot, before_insn;
3543{
3544  rtx insns;
3545  rtx handler_label = gen_label_rtx ();
3546
3547  /* Don't let cleanup_cfg delete the handler.  */
3548  LABEL_PRESERVE_P (handler_label) = 1;
3549
3550  start_sequence ();
3551  emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3552  insns = get_insns ();
3553  end_sequence ();
3554  emit_insns_before (insns, before_insn);
3555
3556  emit_label (handler_label);
3557
3558  return handler_label;
3559}
3560
3561/* Emit code to restore vital registers at the beginning of a nonlocal goto
3562   handler.  */
3563static void
3564expand_nl_goto_receiver ()
3565{
3566#ifdef HAVE_nonlocal_goto
3567  if (! HAVE_nonlocal_goto)
3568#endif
3569    /* First adjust our frame pointer to its actual value.  It was
3570       previously set to the start of the virtual area corresponding to
3571       the stacked variables when we branched here and now needs to be
3572       adjusted to the actual hardware fp value.
3573
3574       Assignments are to virtual registers are converted by
3575       instantiate_virtual_regs into the corresponding assignment
3576       to the underlying register (fp in this case) that makes
3577       the original assignment true.
3578       So the following insn will actually be
3579       decrementing fp by STARTING_FRAME_OFFSET.  */
3580    emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3581
3582#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3583  if (fixed_regs[ARG_POINTER_REGNUM])
3584    {
3585#ifdef ELIMINABLE_REGS
3586      /* If the argument pointer can be eliminated in favor of the
3587	 frame pointer, we don't need to restore it.  We assume here
3588	 that if such an elimination is present, it can always be used.
3589	 This is the case on all known machines; if we don't make this
3590	 assumption, we do unnecessary saving on many machines.  */
3591      static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
3592      size_t i;
3593
3594      for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
3595	if (elim_regs[i].from == ARG_POINTER_REGNUM
3596	    && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3597	  break;
3598
3599      if (i == ARRAY_SIZE (elim_regs))
3600#endif
3601	{
3602	  /* Now restore our arg pointer from the address at which it
3603	     was saved in our stack frame.  */
3604	  emit_move_insn (virtual_incoming_args_rtx,
3605			  copy_to_reg (get_arg_pointer_save_area (cfun)));
3606	}
3607    }
3608#endif
3609
3610#ifdef HAVE_nonlocal_goto_receiver
3611  if (HAVE_nonlocal_goto_receiver)
3612    emit_insn (gen_nonlocal_goto_receiver ());
3613#endif
3614}
3615
3616/* Make handlers for nonlocal gotos taking place in the function calls in
3617   block THISBLOCK.  */
3618
3619static void
3620expand_nl_goto_receivers (thisblock)
3621     struct nesting *thisblock;
3622{
3623  tree link;
3624  rtx afterward = gen_label_rtx ();
3625  rtx insns, slot;
3626  rtx label_list;
3627  int any_invalid;
3628
3629  /* Record the handler address in the stack slot for that purpose,
3630     during this block, saving and restoring the outer value.  */
3631  if (thisblock->next != 0)
3632    for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3633      {
3634	rtx save_receiver = gen_reg_rtx (Pmode);
3635	emit_move_insn (XEXP (slot, 0), save_receiver);
3636
3637	start_sequence ();
3638	emit_move_insn (save_receiver, XEXP (slot, 0));
3639	insns = get_insns ();
3640	end_sequence ();
3641	emit_insns_before (insns, thisblock->data.block.first_insn);
3642      }
3643
3644  /* Jump around the handlers; they run only when specially invoked.  */
3645  emit_jump (afterward);
3646
3647  /* Make a separate handler for each label.  */
3648  link = nonlocal_labels;
3649  slot = nonlocal_goto_handler_slots;
3650  label_list = NULL_RTX;
3651  for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3652    /* Skip any labels we shouldn't be able to jump to from here,
3653       we generate one special handler for all of them below which just calls
3654       abort.  */
3655    if (! DECL_TOO_LATE (TREE_VALUE (link)))
3656      {
3657	rtx lab;
3658	lab = expand_nl_handler_label (XEXP (slot, 0),
3659				       thisblock->data.block.first_insn);
3660	label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3661
3662	expand_nl_goto_receiver ();
3663
3664	/* Jump to the "real" nonlocal label.  */
3665	expand_goto (TREE_VALUE (link));
3666      }
3667
3668  /* A second pass over all nonlocal labels; this time we handle those
3669     we should not be able to jump to at this point.  */
3670  link = nonlocal_labels;
3671  slot = nonlocal_goto_handler_slots;
3672  any_invalid = 0;
3673  for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3674    if (DECL_TOO_LATE (TREE_VALUE (link)))
3675      {
3676	rtx lab;
3677	lab = expand_nl_handler_label (XEXP (slot, 0),
3678				       thisblock->data.block.first_insn);
3679	label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3680	any_invalid = 1;
3681      }
3682
3683  if (any_invalid)
3684    {
3685      expand_nl_goto_receiver ();
3686      expand_builtin_trap ();
3687    }
3688
3689  nonlocal_goto_handler_labels = label_list;
3690  emit_label (afterward);
3691}
3692
3693/* Warn about any unused VARS (which may contain nodes other than
3694   VAR_DECLs, but such nodes are ignored).  The nodes are connected
3695   via the TREE_CHAIN field.  */
3696
3697void
3698warn_about_unused_variables (vars)
3699     tree vars;
3700{
3701  tree decl;
3702
3703  if (warn_unused_variable)
3704    for (decl = vars; decl; decl = TREE_CHAIN (decl))
3705      if (TREE_CODE (decl) == VAR_DECL
3706	  && ! TREE_USED (decl)
3707	  && ! DECL_IN_SYSTEM_HEADER (decl)
3708	  && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3709	warning_with_decl (decl, "unused variable `%s'");
3710}
3711
3712/* Generate RTL code to terminate a binding contour.
3713
3714   VARS is the chain of VAR_DECL nodes for the variables bound in this
3715   contour.  There may actually be other nodes in this chain, but any
3716   nodes other than VAR_DECLS are ignored.
3717
3718   MARK_ENDS is nonzero if we should put a note at the beginning
3719   and end of this binding contour.
3720
3721   DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3722   (That is true automatically if the contour has a saved stack level.)  */
3723
3724void
3725expand_end_bindings (vars, mark_ends, dont_jump_in)
3726     tree vars;
3727     int mark_ends;
3728     int dont_jump_in;
3729{
3730  struct nesting *thisblock = block_stack;
3731
3732  /* If any of the variables in this scope were not used, warn the
3733     user.  */
3734  warn_about_unused_variables (vars);
3735
3736  if (thisblock->exit_label)
3737    {
3738      do_pending_stack_adjust ();
3739      emit_label (thisblock->exit_label);
3740    }
3741
3742  /* If necessary, make handlers for nonlocal gotos taking
3743     place in the function calls in this block.  */
3744  if (function_call_count != thisblock->data.block.n_function_calls
3745      && nonlocal_labels
3746      /* Make handler for outermost block
3747	 if there were any nonlocal gotos to this function.  */
3748      && (thisblock->next == 0 ? current_function_has_nonlocal_label
3749	  /* Make handler for inner block if it has something
3750	     special to do when you jump out of it.  */
3751	  : (thisblock->data.block.cleanups != 0
3752	     || thisblock->data.block.stack_level != 0)))
3753    expand_nl_goto_receivers (thisblock);
3754
3755  /* Don't allow jumping into a block that has a stack level.
3756     Cleanups are allowed, though.  */
3757  if (dont_jump_in
3758      || thisblock->data.block.stack_level != 0)
3759    {
3760      struct label_chain *chain;
3761
3762      /* Any labels in this block are no longer valid to go to.
3763	 Mark them to cause an error message.  */
3764      for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3765	{
3766	  DECL_TOO_LATE (chain->label) = 1;
3767	  /* If any goto without a fixup came to this label,
3768	     that must be an error, because gotos without fixups
3769	     come from outside all saved stack-levels.  */
3770	  if (TREE_ADDRESSABLE (chain->label))
3771	    error_with_decl (chain->label,
3772			     "label `%s' used before containing binding contour");
3773	}
3774    }
3775
3776  /* Restore stack level in effect before the block
3777     (only if variable-size objects allocated).  */
3778  /* Perform any cleanups associated with the block.  */
3779
3780  if (thisblock->data.block.stack_level != 0
3781      || thisblock->data.block.cleanups != 0)
3782    {
3783      int reachable;
3784      rtx insn;
3785
3786      /* Don't let cleanups affect ({...}) constructs.  */
3787      int old_expr_stmts_for_value = expr_stmts_for_value;
3788      rtx old_last_expr_value = last_expr_value;
3789      tree old_last_expr_type = last_expr_type;
3790      expr_stmts_for_value = 0;
3791
3792      /* Only clean up here if this point can actually be reached.  */
3793      insn = get_last_insn ();
3794      if (GET_CODE (insn) == NOTE)
3795	insn = prev_nonnote_insn (insn);
3796      reachable = (! insn || GET_CODE (insn) != BARRIER);
3797
3798      /* Do the cleanups.  */
3799      expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3800      if (reachable)
3801	do_pending_stack_adjust ();
3802
3803      expr_stmts_for_value = old_expr_stmts_for_value;
3804      last_expr_value = old_last_expr_value;
3805      last_expr_type = old_last_expr_type;
3806
3807      /* Restore the stack level.  */
3808
3809      if (reachable && thisblock->data.block.stack_level != 0)
3810	{
3811	  emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3812			      thisblock->data.block.stack_level, NULL_RTX);
3813	  if (nonlocal_goto_handler_slots != 0)
3814	    emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3815			     NULL_RTX);
3816	}
3817
3818      /* Any gotos out of this block must also do these things.
3819	 Also report any gotos with fixups that came to labels in this
3820	 level.  */
3821      fixup_gotos (thisblock,
3822		   thisblock->data.block.stack_level,
3823		   thisblock->data.block.cleanups,
3824		   thisblock->data.block.first_insn,
3825		   dont_jump_in);
3826    }
3827
3828  /* Mark the beginning and end of the scope if requested.
3829     We do this now, after running cleanups on the variables
3830     just going out of scope, so they are in scope for their cleanups.  */
3831
3832  if (mark_ends)
3833    {
3834      rtx note = emit_note (NULL, NOTE_INSN_BLOCK_END);
3835      NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3836    }
3837  else
3838    /* Get rid of the beginning-mark if we don't make an end-mark.  */
3839    NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3840
3841  /* Restore the temporary level of TARGET_EXPRs.  */
3842  target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3843
3844  /* Restore block_stack level for containing block.  */
3845
3846  stack_block_stack = thisblock->data.block.innermost_stack_block;
3847  POPSTACK (block_stack);
3848
3849  /* Pop the stack slot nesting and free any slots at this level.  */
3850  pop_temp_slots ();
3851}
3852
3853/* Generate code to save the stack pointer at the start of the current block
3854   and set up to restore it on exit.  */
3855
3856void
3857save_stack_pointer ()
3858{
3859  struct nesting *thisblock = block_stack;
3860
3861  if (thisblock->data.block.stack_level == 0)
3862    {
3863      emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3864		       &thisblock->data.block.stack_level,
3865		       thisblock->data.block.first_insn);
3866      stack_block_stack = thisblock;
3867    }
3868}
3869
3870/* Generate RTL for the automatic variable declaration DECL.
3871   (Other kinds of declarations are simply ignored if seen here.)  */
3872
3873void
3874expand_decl (decl)
3875     tree decl;
3876{
3877  struct nesting *thisblock;
3878  tree type;
3879
3880  type = TREE_TYPE (decl);
3881
3882  /* For a CONST_DECL, set mode, alignment, and sizes from those of the
3883     type in case this node is used in a reference.  */
3884  if (TREE_CODE (decl) == CONST_DECL)
3885    {
3886      DECL_MODE (decl) = TYPE_MODE (type);
3887      DECL_ALIGN (decl) = TYPE_ALIGN (type);
3888      DECL_SIZE (decl) = TYPE_SIZE (type);
3889      DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
3890      return;
3891    }
3892
3893  /* Otherwise, only automatic variables need any expansion done.  Static and
3894     external variables, and external functions, will be handled by
3895     `assemble_variable' (called from finish_decl).  TYPE_DECL requires
3896     nothing.  PARM_DECLs are handled in `assign_parms'.  */
3897  if (TREE_CODE (decl) != VAR_DECL)
3898    return;
3899
3900  if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3901    return;
3902
3903  thisblock = block_stack;
3904
3905  /* Create the RTL representation for the variable.  */
3906
3907  if (type == error_mark_node)
3908    SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
3909
3910  else if (DECL_SIZE (decl) == 0)
3911    /* Variable with incomplete type.  */
3912    {
3913      rtx x;
3914      if (DECL_INITIAL (decl) == 0)
3915	/* Error message was already done; now avoid a crash.  */
3916	x = gen_rtx_MEM (BLKmode, const0_rtx);
3917      else
3918	/* An initializer is going to decide the size of this array.
3919	   Until we know the size, represent its address with a reg.  */
3920	x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3921
3922      set_mem_attributes (x, decl, 1);
3923      SET_DECL_RTL (decl, x);
3924    }
3925  else if (DECL_MODE (decl) != BLKmode
3926	   /* If -ffloat-store, don't put explicit float vars
3927	      into regs.  */
3928	   && !(flag_float_store
3929		&& TREE_CODE (type) == REAL_TYPE)
3930	   && ! TREE_THIS_VOLATILE (decl)
3931	   && (DECL_REGISTER (decl) || optimize))
3932    {
3933      /* Automatic variable that can go in a register.  */
3934      int unsignedp = TREE_UNSIGNED (type);
3935      enum machine_mode reg_mode
3936	= promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3937
3938      SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
3939
3940      if (GET_CODE (DECL_RTL (decl)) == REG)
3941	REGNO_DECL (REGNO (DECL_RTL (decl))) = decl;
3942      else if (GET_CODE (DECL_RTL (decl)) == CONCAT)
3943	{
3944	  REGNO_DECL (REGNO (XEXP (DECL_RTL (decl), 0))) = decl;
3945	  REGNO_DECL (REGNO (XEXP (DECL_RTL (decl), 1))) = decl;
3946	}
3947
3948      mark_user_reg (DECL_RTL (decl));
3949
3950      if (POINTER_TYPE_P (type))
3951	mark_reg_pointer (DECL_RTL (decl),
3952			  TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
3953
3954      maybe_set_unchanging (DECL_RTL (decl), decl);
3955
3956      /* If something wants our address, try to use ADDRESSOF.  */
3957      if (TREE_ADDRESSABLE (decl))
3958	put_var_into_stack (decl);
3959    }
3960
3961  else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
3962	   && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3963		 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3964					  STACK_CHECK_MAX_VAR_SIZE)))
3965    {
3966      /* Variable of fixed size that goes on the stack.  */
3967      rtx oldaddr = 0;
3968      rtx addr;
3969      rtx x;
3970
3971      /* If we previously made RTL for this decl, it must be an array
3972	 whose size was determined by the initializer.
3973	 The old address was a register; set that register now
3974	 to the proper address.  */
3975      if (DECL_RTL_SET_P (decl))
3976	{
3977	  if (GET_CODE (DECL_RTL (decl)) != MEM
3978	      || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3979	    abort ();
3980	  oldaddr = XEXP (DECL_RTL (decl), 0);
3981	}
3982
3983      /* Set alignment we actually gave this decl.  */
3984      DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3985			   : GET_MODE_BITSIZE (DECL_MODE (decl)));
3986      DECL_USER_ALIGN (decl) = 0;
3987
3988      x = assign_temp (decl, 1, 1, 1);
3989      set_mem_attributes (x, decl, 1);
3990      SET_DECL_RTL (decl, x);
3991
3992      if (oldaddr)
3993	{
3994	  addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3995	  if (addr != oldaddr)
3996	    emit_move_insn (oldaddr, addr);
3997	}
3998    }
3999  else
4000    /* Dynamic-size object: must push space on the stack.  */
4001    {
4002      rtx address, size, x;
4003
4004      /* Record the stack pointer on entry to block, if have
4005	 not already done so.  */
4006      do_pending_stack_adjust ();
4007      save_stack_pointer ();
4008
4009      /* In function-at-a-time mode, variable_size doesn't expand this,
4010	 so do it now.  */
4011      if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
4012	expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
4013		     const0_rtx, VOIDmode, 0);
4014
4015      /* Compute the variable's size, in bytes.  */
4016      size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
4017      free_temp_slots ();
4018
4019      /* Allocate space on the stack for the variable.  Note that
4020	 DECL_ALIGN says how the variable is to be aligned and we
4021	 cannot use it to conclude anything about the alignment of
4022	 the size.  */
4023      address = allocate_dynamic_stack_space (size, NULL_RTX,
4024					      TYPE_ALIGN (TREE_TYPE (decl)));
4025
4026      /* Reference the variable indirect through that rtx.  */
4027      x = gen_rtx_MEM (DECL_MODE (decl), address);
4028      set_mem_attributes (x, decl, 1);
4029      SET_DECL_RTL (decl, x);
4030
4031
4032      /* Indicate the alignment we actually gave this variable.  */
4033#ifdef STACK_BOUNDARY
4034      DECL_ALIGN (decl) = STACK_BOUNDARY;
4035#else
4036      DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
4037#endif
4038      DECL_USER_ALIGN (decl) = 0;
4039    }
4040}
4041
4042/* Emit code to perform the initialization of a declaration DECL.  */
4043
4044void
4045expand_decl_init (decl)
4046     tree decl;
4047{
4048  int was_used = TREE_USED (decl);
4049
4050  /* If this is a CONST_DECL, we don't have to generate any code.  Likewise
4051     for static decls.  */
4052  if (TREE_CODE (decl) == CONST_DECL
4053      || TREE_STATIC (decl))
4054    return;
4055
4056  /* Compute and store the initial value now.  */
4057
4058  if (DECL_INITIAL (decl) == error_mark_node)
4059    {
4060      enum tree_code code = TREE_CODE (TREE_TYPE (decl));
4061
4062      if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
4063	  || code == POINTER_TYPE || code == REFERENCE_TYPE)
4064	expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
4065			   0, 0);
4066      emit_queue ();
4067    }
4068  else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
4069    {
4070      emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
4071      expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
4072      emit_queue ();
4073    }
4074
4075  /* Don't let the initialization count as "using" the variable.  */
4076  TREE_USED (decl) = was_used;
4077
4078  /* Free any temporaries we made while initializing the decl.  */
4079  preserve_temp_slots (NULL_RTX);
4080  free_temp_slots ();
4081}
4082
4083/* CLEANUP is an expression to be executed at exit from this binding contour;
4084   for example, in C++, it might call the destructor for this variable.
4085
4086   We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
4087   CLEANUP multiple times, and have the correct semantics.  This
4088   happens in exception handling, for gotos, returns, breaks that
4089   leave the current scope.
4090
4091   If CLEANUP is nonzero and DECL is zero, we record a cleanup
4092   that is not associated with any particular variable.  */
4093
4094int
4095expand_decl_cleanup (decl, cleanup)
4096     tree decl, cleanup;
4097{
4098  struct nesting *thisblock;
4099
4100  /* Error if we are not in any block.  */
4101  if (cfun == 0 || block_stack == 0)
4102    return 0;
4103
4104  thisblock = block_stack;
4105
4106  /* Record the cleanup if there is one.  */
4107
4108  if (cleanup != 0)
4109    {
4110      tree t;
4111      rtx seq;
4112      tree *cleanups = &thisblock->data.block.cleanups;
4113      int cond_context = conditional_context ();
4114
4115      if (cond_context)
4116	{
4117	  rtx flag = gen_reg_rtx (word_mode);
4118	  rtx set_flag_0;
4119	  tree cond;
4120
4121	  start_sequence ();
4122	  emit_move_insn (flag, const0_rtx);
4123	  set_flag_0 = get_insns ();
4124	  end_sequence ();
4125
4126	  thisblock->data.block.last_unconditional_cleanup
4127	    = emit_insns_after (set_flag_0,
4128				thisblock->data.block.last_unconditional_cleanup);
4129
4130	  emit_move_insn (flag, const1_rtx);
4131
4132	  cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
4133	  SET_DECL_RTL (cond, flag);
4134
4135	  /* Conditionalize the cleanup.  */
4136	  cleanup = build (COND_EXPR, void_type_node,
4137			   truthvalue_conversion (cond),
4138			   cleanup, integer_zero_node);
4139	  cleanup = fold (cleanup);
4140
4141	  cleanups = thisblock->data.block.cleanup_ptr;
4142	}
4143
4144      cleanup = unsave_expr (cleanup);
4145
4146      t = *cleanups = tree_cons (decl, cleanup, *cleanups);
4147
4148      if (! cond_context)
4149	/* If this block has a cleanup, it belongs in stack_block_stack.  */
4150	stack_block_stack = thisblock;
4151
4152      if (cond_context)
4153	{
4154	  start_sequence ();
4155	}
4156
4157      if (! using_eh_for_cleanups_p)
4158	TREE_ADDRESSABLE (t) = 1;
4159      else
4160	expand_eh_region_start ();
4161
4162      if (cond_context)
4163	{
4164	  seq = get_insns ();
4165	  end_sequence ();
4166	  if (seq)
4167	    thisblock->data.block.last_unconditional_cleanup
4168	      = emit_insns_after (seq,
4169				  thisblock->data.block.last_unconditional_cleanup);
4170	}
4171      else
4172	{
4173	  thisblock->data.block.last_unconditional_cleanup
4174	    = get_last_insn ();
4175	  /* When we insert instructions after the last unconditional cleanup,
4176	     we don't adjust last_insn.  That means that a later add_insn will
4177	     clobber the instructions we've just added.  The easiest way to
4178	     fix this is to just insert another instruction here, so that the
4179	     instructions inserted after the last unconditional cleanup are
4180	     never the last instruction.  */
4181	  emit_note (NULL, NOTE_INSN_DELETED);
4182	  thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
4183	}
4184    }
4185  return 1;
4186}
4187
4188/* Like expand_decl_cleanup, but maybe only run the cleanup if an exception
4189   is thrown.  */
4190
4191int
4192expand_decl_cleanup_eh (decl, cleanup, eh_only)
4193     tree decl, cleanup;
4194     int eh_only;
4195{
4196  int ret = expand_decl_cleanup (decl, cleanup);
4197  if (cleanup && ret)
4198    {
4199      tree node = block_stack->data.block.cleanups;
4200      CLEANUP_EH_ONLY (node) = eh_only;
4201    }
4202  return ret;
4203}
4204
4205/* DECL is an anonymous union.  CLEANUP is a cleanup for DECL.
4206   DECL_ELTS is the list of elements that belong to DECL's type.
4207   In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup.  */
4208
4209void
4210expand_anon_union_decl (decl, cleanup, decl_elts)
4211     tree decl, cleanup, decl_elts;
4212{
4213  struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
4214  rtx x;
4215  tree t;
4216
4217  /* If any of the elements are addressable, so is the entire union.  */
4218  for (t = decl_elts; t; t = TREE_CHAIN (t))
4219    if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4220      {
4221	TREE_ADDRESSABLE (decl) = 1;
4222	break;
4223      }
4224
4225  expand_decl (decl);
4226  expand_decl_cleanup (decl, cleanup);
4227  x = DECL_RTL (decl);
4228
4229  /* Go through the elements, assigning RTL to each.  */
4230  for (t = decl_elts; t; t = TREE_CHAIN (t))
4231    {
4232      tree decl_elt = TREE_VALUE (t);
4233      tree cleanup_elt = TREE_PURPOSE (t);
4234      enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4235
4236      /* If any of the elements are addressable, so is the entire
4237	 union.  */
4238      if (TREE_USED (decl_elt))
4239	TREE_USED (decl) = 1;
4240
4241      /* Propagate the union's alignment to the elements.  */
4242      DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4243      DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
4244
4245      /* If the element has BLKmode and the union doesn't, the union is
4246         aligned such that the element doesn't need to have BLKmode, so
4247         change the element's mode to the appropriate one for its size.  */
4248      if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4249	DECL_MODE (decl_elt) = mode
4250	  = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
4251
4252      /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4253         instead create a new MEM rtx with the proper mode.  */
4254      if (GET_CODE (x) == MEM)
4255	{
4256	  if (mode == GET_MODE (x))
4257	    SET_DECL_RTL (decl_elt, x);
4258	  else
4259	    SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
4260	}
4261      else if (GET_CODE (x) == REG)
4262	{
4263	  if (mode == GET_MODE (x))
4264	    SET_DECL_RTL (decl_elt, x);
4265	  else
4266	    SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
4267	}
4268      else
4269	abort ();
4270
4271      /* Record the cleanup if there is one.  */
4272
4273      if (cleanup != 0)
4274	thisblock->data.block.cleanups
4275	  = tree_cons (decl_elt, cleanup_elt,
4276		       thisblock->data.block.cleanups);
4277    }
4278}
4279
4280/* Expand a list of cleanups LIST.
4281   Elements may be expressions or may be nested lists.
4282
4283   If DONT_DO is nonnull, then any list-element
4284   whose TREE_PURPOSE matches DONT_DO is omitted.
4285   This is sometimes used to avoid a cleanup associated with
4286   a value that is being returned out of the scope.
4287
4288   If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
4289   goto and handle protection regions specially in that case.
4290
4291   If REACHABLE, we emit code, otherwise just inform the exception handling
4292   code about this finalization.  */
4293
4294static void
4295expand_cleanups (list, dont_do, in_fixup, reachable)
4296     tree list;
4297     tree dont_do;
4298     int in_fixup;
4299     int reachable;
4300{
4301  tree tail;
4302  for (tail = list; tail; tail = TREE_CHAIN (tail))
4303    if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4304      {
4305	if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4306	  expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
4307	else
4308	  {
4309	    if (! in_fixup && using_eh_for_cleanups_p)
4310	      expand_eh_region_end_cleanup (TREE_VALUE (tail));
4311
4312	    if (reachable && !CLEANUP_EH_ONLY (tail))
4313	      {
4314		/* Cleanups may be run multiple times.  For example,
4315		   when exiting a binding contour, we expand the
4316		   cleanups associated with that contour.  When a goto
4317		   within that binding contour has a target outside that
4318		   contour, it will expand all cleanups from its scope to
4319		   the target.  Though the cleanups are expanded multiple
4320		   times, the control paths are non-overlapping so the
4321		   cleanups will not be executed twice.  */
4322
4323		/* We may need to protect from outer cleanups.  */
4324		if (in_fixup && using_eh_for_cleanups_p)
4325		  {
4326		    expand_eh_region_start ();
4327
4328		    expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4329
4330		    expand_eh_region_end_fixup (TREE_VALUE (tail));
4331		  }
4332		else
4333		  expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4334
4335		free_temp_slots ();
4336	      }
4337	  }
4338      }
4339}
4340
4341/* Mark when the context we are emitting RTL for as a conditional
4342   context, so that any cleanup actions we register with
4343   expand_decl_init will be properly conditionalized when those
4344   cleanup actions are later performed.  Must be called before any
4345   expression (tree) is expanded that is within a conditional context.  */
4346
4347void
4348start_cleanup_deferral ()
4349{
4350  /* block_stack can be NULL if we are inside the parameter list.  It is
4351     OK to do nothing, because cleanups aren't possible here.  */
4352  if (block_stack)
4353    ++block_stack->data.block.conditional_code;
4354}
4355
4356/* Mark the end of a conditional region of code.  Because cleanup
4357   deferrals may be nested, we may still be in a conditional region
4358   after we end the currently deferred cleanups, only after we end all
4359   deferred cleanups, are we back in unconditional code.  */
4360
4361void
4362end_cleanup_deferral ()
4363{
4364  /* block_stack can be NULL if we are inside the parameter list.  It is
4365     OK to do nothing, because cleanups aren't possible here.  */
4366  if (block_stack)
4367    --block_stack->data.block.conditional_code;
4368}
4369
4370/* Move all cleanups from the current block_stack
4371   to the containing block_stack, where they are assumed to
4372   have been created.  If anything can cause a temporary to
4373   be created, but not expanded for more than one level of
4374   block_stacks, then this code will have to change.  */
4375
4376void
4377move_cleanups_up ()
4378{
4379  struct nesting *block = block_stack;
4380  struct nesting *outer = block->next;
4381
4382  outer->data.block.cleanups
4383    = chainon (block->data.block.cleanups,
4384	       outer->data.block.cleanups);
4385  block->data.block.cleanups = 0;
4386}
4387
4388tree
4389last_cleanup_this_contour ()
4390{
4391  if (block_stack == 0)
4392    return 0;
4393
4394  return block_stack->data.block.cleanups;
4395}
4396
4397/* Return 1 if there are any pending cleanups at this point.
4398   If THIS_CONTOUR is nonzero, check the current contour as well.
4399   Otherwise, look only at the contours that enclose this one.  */
4400
4401int
4402any_pending_cleanups (this_contour)
4403     int this_contour;
4404{
4405  struct nesting *block;
4406
4407  if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
4408    return 0;
4409
4410  if (this_contour && block_stack->data.block.cleanups != NULL)
4411    return 1;
4412  if (block_stack->data.block.cleanups == 0
4413      && block_stack->data.block.outer_cleanups == 0)
4414    return 0;
4415
4416  for (block = block_stack->next; block; block = block->next)
4417    if (block->data.block.cleanups != 0)
4418      return 1;
4419
4420  return 0;
4421}
4422
4423/* Enter a case (Pascal) or switch (C) statement.
4424   Push a block onto case_stack and nesting_stack
4425   to accumulate the case-labels that are seen
4426   and to record the labels generated for the statement.
4427
4428   EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4429   Otherwise, this construct is transparent for `exit_something'.
4430
4431   EXPR is the index-expression to be dispatched on.
4432   TYPE is its nominal type.  We could simply convert EXPR to this type,
4433   but instead we take short cuts.  */
4434
4435void
4436expand_start_case (exit_flag, expr, type, printname)
4437     int exit_flag;
4438     tree expr;
4439     tree type;
4440     const char *printname;
4441{
4442  struct nesting *thiscase = ALLOC_NESTING ();
4443
4444  /* Make an entry on case_stack for the case we are entering.  */
4445
4446  thiscase->next = case_stack;
4447  thiscase->all = nesting_stack;
4448  thiscase->depth = ++nesting_depth;
4449  thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4450  thiscase->data.case_stmt.case_list = 0;
4451  thiscase->data.case_stmt.index_expr = expr;
4452  thiscase->data.case_stmt.nominal_type = type;
4453  thiscase->data.case_stmt.default_label = 0;
4454  thiscase->data.case_stmt.printname = printname;
4455  thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4456  case_stack = thiscase;
4457  nesting_stack = thiscase;
4458
4459  do_pending_stack_adjust ();
4460
4461  /* Make sure case_stmt.start points to something that won't
4462     need any transformation before expand_end_case.  */
4463  if (GET_CODE (get_last_insn ()) != NOTE)
4464    emit_note (NULL, NOTE_INSN_DELETED);
4465
4466  thiscase->data.case_stmt.start = get_last_insn ();
4467
4468  start_cleanup_deferral ();
4469}
4470
4471/* Start a "dummy case statement" within which case labels are invalid
4472   and are not connected to any larger real case statement.
4473   This can be used if you don't want to let a case statement jump
4474   into the middle of certain kinds of constructs.  */
4475
4476void
4477expand_start_case_dummy ()
4478{
4479  struct nesting *thiscase = ALLOC_NESTING ();
4480
4481  /* Make an entry on case_stack for the dummy.  */
4482
4483  thiscase->next = case_stack;
4484  thiscase->all = nesting_stack;
4485  thiscase->depth = ++nesting_depth;
4486  thiscase->exit_label = 0;
4487  thiscase->data.case_stmt.case_list = 0;
4488  thiscase->data.case_stmt.start = 0;
4489  thiscase->data.case_stmt.nominal_type = 0;
4490  thiscase->data.case_stmt.default_label = 0;
4491  case_stack = thiscase;
4492  nesting_stack = thiscase;
4493  start_cleanup_deferral ();
4494}
4495
4496/* End a dummy case statement.  */
4497
4498void
4499expand_end_case_dummy ()
4500{
4501  end_cleanup_deferral ();
4502  POPSTACK (case_stack);
4503}
4504
4505/* Return the data type of the index-expression
4506   of the innermost case statement, or null if none.  */
4507
4508tree
4509case_index_expr_type ()
4510{
4511  if (case_stack)
4512    return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4513  return 0;
4514}
4515
4516static void
4517check_seenlabel ()
4518{
4519  /* If this is the first label, warn if any insns have been emitted.  */
4520  if (case_stack->data.case_stmt.line_number_status >= 0)
4521    {
4522      rtx insn;
4523
4524      restore_line_number_status
4525	(case_stack->data.case_stmt.line_number_status);
4526      case_stack->data.case_stmt.line_number_status = -1;
4527
4528      for (insn = case_stack->data.case_stmt.start;
4529	   insn;
4530	   insn = NEXT_INSN (insn))
4531	{
4532	  if (GET_CODE (insn) == CODE_LABEL)
4533	    break;
4534	  if (GET_CODE (insn) != NOTE
4535	      && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4536	    {
4537	      do
4538		insn = PREV_INSN (insn);
4539	      while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4540
4541	      /* If insn is zero, then there must have been a syntax error.  */
4542	      if (insn)
4543		warning_with_file_and_line (NOTE_SOURCE_FILE (insn),
4544					    NOTE_LINE_NUMBER (insn),
4545					    "unreachable code at beginning of %s",
4546					    case_stack->data.case_stmt.printname);
4547	      break;
4548	    }
4549	}
4550    }
4551}
4552
4553/* Accumulate one case or default label inside a case or switch statement.
4554   VALUE is the value of the case (a null pointer, for a default label).
4555   The function CONVERTER, when applied to arguments T and V,
4556   converts the value V to the type T.
4557
4558   If not currently inside a case or switch statement, return 1 and do
4559   nothing.  The caller will print a language-specific error message.
4560   If VALUE is a duplicate or overlaps, return 2 and do nothing
4561   except store the (first) duplicate node in *DUPLICATE.
4562   If VALUE is out of range, return 3 and do nothing.
4563   If we are jumping into the scope of a cleanup or var-sized array, return 5.
4564   Return 0 on success.
4565
4566   Extended to handle range statements.  */
4567
4568int
4569pushcase (value, converter, label, duplicate)
4570     tree value;
4571     tree (*converter) PARAMS ((tree, tree));
4572     tree label;
4573     tree *duplicate;
4574{
4575  tree index_type;
4576  tree nominal_type;
4577
4578  /* Fail if not inside a real case statement.  */
4579  if (! (case_stack && case_stack->data.case_stmt.start))
4580    return 1;
4581
4582  if (stack_block_stack
4583      && stack_block_stack->depth > case_stack->depth)
4584    return 5;
4585
4586  index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4587  nominal_type = case_stack->data.case_stmt.nominal_type;
4588
4589  /* If the index is erroneous, avoid more problems: pretend to succeed.  */
4590  if (index_type == error_mark_node)
4591    return 0;
4592
4593  /* Convert VALUE to the type in which the comparisons are nominally done.  */
4594  if (value != 0)
4595    value = (*converter) (nominal_type, value);
4596
4597  check_seenlabel ();
4598
4599  /* Fail if this value is out of range for the actual type of the index
4600     (which may be narrower than NOMINAL_TYPE).  */
4601  if (value != 0
4602      && (TREE_CONSTANT_OVERFLOW (value)
4603	  || ! int_fits_type_p (value, index_type)))
4604    return 3;
4605
4606  return add_case_node (value, value, label, duplicate);
4607}
4608
4609/* Like pushcase but this case applies to all values between VALUE1 and
4610   VALUE2 (inclusive).  If VALUE1 is NULL, the range starts at the lowest
4611   value of the index type and ends at VALUE2.  If VALUE2 is NULL, the range
4612   starts at VALUE1 and ends at the highest value of the index type.
4613   If both are NULL, this case applies to all values.
4614
4615   The return value is the same as that of pushcase but there is one
4616   additional error code: 4 means the specified range was empty.  */
4617
4618int
4619pushcase_range (value1, value2, converter, label, duplicate)
4620     tree value1, value2;
4621     tree (*converter) PARAMS ((tree, tree));
4622     tree label;
4623     tree *duplicate;
4624{
4625  tree index_type;
4626  tree nominal_type;
4627
4628  /* Fail if not inside a real case statement.  */
4629  if (! (case_stack && case_stack->data.case_stmt.start))
4630    return 1;
4631
4632  if (stack_block_stack
4633      && stack_block_stack->depth > case_stack->depth)
4634    return 5;
4635
4636  index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4637  nominal_type = case_stack->data.case_stmt.nominal_type;
4638
4639  /* If the index is erroneous, avoid more problems: pretend to succeed.  */
4640  if (index_type == error_mark_node)
4641    return 0;
4642
4643  check_seenlabel ();
4644
4645  /* Convert VALUEs to type in which the comparisons are nominally done
4646     and replace any unspecified value with the corresponding bound.  */
4647  if (value1 == 0)
4648    value1 = TYPE_MIN_VALUE (index_type);
4649  if (value2 == 0)
4650    value2 = TYPE_MAX_VALUE (index_type);
4651
4652  /* Fail if the range is empty.  Do this before any conversion since
4653     we want to allow out-of-range empty ranges.  */
4654  if (value2 != 0 && tree_int_cst_lt (value2, value1))
4655    return 4;
4656
4657  /* If the max was unbounded, use the max of the nominal_type we are
4658     converting to.  Do this after the < check above to suppress false
4659     positives.  */
4660  if (value2 == 0)
4661    value2 = TYPE_MAX_VALUE (nominal_type);
4662
4663  value1 = (*converter) (nominal_type, value1);
4664  value2 = (*converter) (nominal_type, value2);
4665
4666  /* Fail if these values are out of range.  */
4667  if (TREE_CONSTANT_OVERFLOW (value1)
4668      || ! int_fits_type_p (value1, index_type))
4669    return 3;
4670
4671  if (TREE_CONSTANT_OVERFLOW (value2)
4672      || ! int_fits_type_p (value2, index_type))
4673    return 3;
4674
4675  return add_case_node (value1, value2, label, duplicate);
4676}
4677
4678/* Do the actual insertion of a case label for pushcase and pushcase_range
4679   into case_stack->data.case_stmt.case_list.  Use an AVL tree to avoid
4680   slowdown for large switch statements.  */
4681
4682int
4683add_case_node (low, high, label, duplicate)
4684     tree low, high;
4685     tree label;
4686     tree *duplicate;
4687{
4688  struct case_node *p, **q, *r;
4689
4690  /* If there's no HIGH value, then this is not a case range; it's
4691     just a simple case label.  But that's just a degenerate case
4692     range.  */
4693  if (!high)
4694    high = low;
4695
4696  /* Handle default labels specially.  */
4697  if (!high && !low)
4698    {
4699      if (case_stack->data.case_stmt.default_label != 0)
4700	{
4701	  *duplicate = case_stack->data.case_stmt.default_label;
4702	  return 2;
4703	}
4704      case_stack->data.case_stmt.default_label = label;
4705      expand_label (label);
4706      return 0;
4707    }
4708
4709  q = &case_stack->data.case_stmt.case_list;
4710  p = *q;
4711
4712  while ((r = *q))
4713    {
4714      p = r;
4715
4716      /* Keep going past elements distinctly greater than HIGH.  */
4717      if (tree_int_cst_lt (high, p->low))
4718	q = &p->left;
4719
4720      /* or distinctly less than LOW.  */
4721      else if (tree_int_cst_lt (p->high, low))
4722	q = &p->right;
4723
4724      else
4725	{
4726	  /* We have an overlap; this is an error.  */
4727	  *duplicate = p->code_label;
4728	  return 2;
4729	}
4730    }
4731
4732  /* Add this label to the chain, and succeed.  */
4733
4734  r = (struct case_node *) xmalloc (sizeof (struct case_node));
4735  r->low = low;
4736
4737  /* If the bounds are equal, turn this into the one-value case.  */
4738  if (tree_int_cst_equal (low, high))
4739    r->high = r->low;
4740  else
4741    r->high = high;
4742
4743  r->code_label = label;
4744  expand_label (label);
4745
4746  *q = r;
4747  r->parent = p;
4748  r->left = 0;
4749  r->right = 0;
4750  r->balance = 0;
4751
4752  while (p)
4753    {
4754      struct case_node *s;
4755
4756      if (r == p->left)
4757	{
4758	  int b;
4759
4760	  if (! (b = p->balance))
4761	    /* Growth propagation from left side.  */
4762	    p->balance = -1;
4763	  else if (b < 0)
4764	    {
4765	      if (r->balance < 0)
4766		{
4767		  /* R-Rotation */
4768		  if ((p->left = s = r->right))
4769		    s->parent = p;
4770
4771		  r->right = p;
4772		  p->balance = 0;
4773		  r->balance = 0;
4774		  s = p->parent;
4775		  p->parent = r;
4776
4777		  if ((r->parent = s))
4778		    {
4779		      if (s->left == p)
4780			s->left = r;
4781		      else
4782			s->right = r;
4783		    }
4784		  else
4785		    case_stack->data.case_stmt.case_list = r;
4786		}
4787	      else
4788		/* r->balance == +1 */
4789		{
4790		  /* LR-Rotation */
4791
4792		  int b2;
4793		  struct case_node *t = r->right;
4794
4795		  if ((p->left = s = t->right))
4796		    s->parent = p;
4797
4798		  t->right = p;
4799		  if ((r->right = s = t->left))
4800		    s->parent = r;
4801
4802		  t->left = r;
4803		  b = t->balance;
4804		  b2 = b < 0;
4805		  p->balance = b2;
4806		  b2 = -b2 - b;
4807		  r->balance = b2;
4808		  t->balance = 0;
4809		  s = p->parent;
4810		  p->parent = t;
4811		  r->parent = t;
4812
4813		  if ((t->parent = s))
4814		    {
4815		      if (s->left == p)
4816			s->left = t;
4817		      else
4818			s->right = t;
4819		    }
4820		  else
4821		    case_stack->data.case_stmt.case_list = t;
4822		}
4823	      break;
4824	    }
4825
4826	  else
4827	    {
4828	      /* p->balance == +1; growth of left side balances the node.  */
4829	      p->balance = 0;
4830	      break;
4831	    }
4832	}
4833      else
4834	/* r == p->right */
4835	{
4836	  int b;
4837
4838	  if (! (b = p->balance))
4839	    /* Growth propagation from right side.  */
4840	    p->balance++;
4841	  else if (b > 0)
4842	    {
4843	      if (r->balance > 0)
4844		{
4845		  /* L-Rotation */
4846
4847		  if ((p->right = s = r->left))
4848		    s->parent = p;
4849
4850		  r->left = p;
4851		  p->balance = 0;
4852		  r->balance = 0;
4853		  s = p->parent;
4854		  p->parent = r;
4855		  if ((r->parent = s))
4856		    {
4857		      if (s->left == p)
4858			s->left = r;
4859		      else
4860			s->right = r;
4861		    }
4862
4863		  else
4864		    case_stack->data.case_stmt.case_list = r;
4865		}
4866
4867	      else
4868		/* r->balance == -1 */
4869		{
4870		  /* RL-Rotation */
4871		  int b2;
4872		  struct case_node *t = r->left;
4873
4874		  if ((p->right = s = t->left))
4875		    s->parent = p;
4876
4877		  t->left = p;
4878
4879		  if ((r->left = s = t->right))
4880		    s->parent = r;
4881
4882		  t->right = r;
4883		  b = t->balance;
4884		  b2 = b < 0;
4885		  r->balance = b2;
4886		  b2 = -b2 - b;
4887		  p->balance = b2;
4888		  t->balance = 0;
4889		  s = p->parent;
4890		  p->parent = t;
4891		  r->parent = t;
4892
4893		  if ((t->parent = s))
4894		    {
4895		      if (s->left == p)
4896			s->left = t;
4897		      else
4898			s->right = t;
4899		    }
4900
4901		  else
4902		    case_stack->data.case_stmt.case_list = t;
4903		}
4904	      break;
4905	    }
4906	  else
4907	    {
4908	      /* p->balance == -1; growth of right side balances the node.  */
4909	      p->balance = 0;
4910	      break;
4911	    }
4912	}
4913
4914      r = p;
4915      p = p->parent;
4916    }
4917
4918  return 0;
4919}
4920
4921/* Returns the number of possible values of TYPE.
4922   Returns -1 if the number is unknown, variable, or if the number does not
4923   fit in a HOST_WIDE_INT.
4924   Sets *SPARSENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4925   do not increase monotonically (there may be duplicates);
4926   to 1 if the values increase monotonically, but not always by 1;
4927   otherwise sets it to 0.  */
4928
4929HOST_WIDE_INT
4930all_cases_count (type, sparseness)
4931     tree type;
4932     int *sparseness;
4933{
4934  tree t;
4935  HOST_WIDE_INT count, minval, lastval;
4936
4937  *sparseness = 0;
4938
4939  switch (TREE_CODE (type))
4940    {
4941    case BOOLEAN_TYPE:
4942      count = 2;
4943      break;
4944
4945    case CHAR_TYPE:
4946      count = 1 << BITS_PER_UNIT;
4947      break;
4948
4949    default:
4950    case INTEGER_TYPE:
4951      if (TYPE_MAX_VALUE (type) != 0
4952	  && 0 != (t = fold (build (MINUS_EXPR, type, TYPE_MAX_VALUE (type),
4953				    TYPE_MIN_VALUE (type))))
4954	  && 0 != (t = fold (build (PLUS_EXPR, type, t,
4955				    convert (type, integer_zero_node))))
4956	  && host_integerp (t, 1))
4957	count = tree_low_cst (t, 1);
4958      else
4959	return -1;
4960      break;
4961
4962    case ENUMERAL_TYPE:
4963      /* Don't waste time with enumeral types with huge values.  */
4964      if (! host_integerp (TYPE_MIN_VALUE (type), 0)
4965	  || TYPE_MAX_VALUE (type) == 0
4966	  || ! host_integerp (TYPE_MAX_VALUE (type), 0))
4967	return -1;
4968
4969      lastval = minval = tree_low_cst (TYPE_MIN_VALUE (type), 0);
4970      count = 0;
4971
4972      for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4973	{
4974	  HOST_WIDE_INT thisval = tree_low_cst (TREE_VALUE (t), 0);
4975
4976	  if (*sparseness == 2 || thisval <= lastval)
4977	    *sparseness = 2;
4978	  else if (thisval != minval + count)
4979	    *sparseness = 1;
4980
4981	  lastval = thisval;
4982	  count++;
4983	}
4984    }
4985
4986  return count;
4987}
4988
4989#define BITARRAY_TEST(ARRAY, INDEX) \
4990  ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4991			  & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4992#define BITARRAY_SET(ARRAY, INDEX) \
4993  ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4994			  |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4995
4996/* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4997   with the case values we have seen, assuming the case expression
4998   has the given TYPE.
4999   SPARSENESS is as determined by all_cases_count.
5000
5001   The time needed is proportional to COUNT, unless
5002   SPARSENESS is 2, in which case quadratic time is needed.  */
5003
5004void
5005mark_seen_cases (type, cases_seen, count, sparseness)
5006     tree type;
5007     unsigned char *cases_seen;
5008     HOST_WIDE_INT count;
5009     int sparseness;
5010{
5011  tree next_node_to_try = NULL_TREE;
5012  HOST_WIDE_INT next_node_offset = 0;
5013
5014  struct case_node *n, *root = case_stack->data.case_stmt.case_list;
5015  tree val = make_node (INTEGER_CST);
5016
5017  TREE_TYPE (val) = type;
5018  if (! root)
5019    /* Do nothing.  */
5020    ;
5021  else if (sparseness == 2)
5022    {
5023      tree t;
5024      unsigned HOST_WIDE_INT xlo;
5025
5026      /* This less efficient loop is only needed to handle
5027	 duplicate case values (multiple enum constants
5028	 with the same value).  */
5029      TREE_TYPE (val) = TREE_TYPE (root->low);
5030      for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
5031	   t = TREE_CHAIN (t), xlo++)
5032	{
5033	  TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
5034	  TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
5035	  n = root;
5036	  do
5037	    {
5038	      /* Keep going past elements distinctly greater than VAL.  */
5039	      if (tree_int_cst_lt (val, n->low))
5040		n = n->left;
5041
5042	      /* or distinctly less than VAL.  */
5043	      else if (tree_int_cst_lt (n->high, val))
5044		n = n->right;
5045
5046	      else
5047		{
5048		  /* We have found a matching range.  */
5049		  BITARRAY_SET (cases_seen, xlo);
5050		  break;
5051		}
5052	    }
5053	  while (n);
5054	}
5055    }
5056  else
5057    {
5058      if (root->left)
5059	case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
5060
5061      for (n = root; n; n = n->right)
5062	{
5063	  TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
5064	  TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
5065	  while (! tree_int_cst_lt (n->high, val))
5066	    {
5067	      /* Calculate (into xlo) the "offset" of the integer (val).
5068		 The element with lowest value has offset 0, the next smallest
5069		 element has offset 1, etc.  */
5070
5071	      unsigned HOST_WIDE_INT xlo;
5072	      HOST_WIDE_INT xhi;
5073	      tree t;
5074
5075	      if (sparseness && TYPE_VALUES (type) != NULL_TREE)
5076		{
5077		  /* The TYPE_VALUES will be in increasing order, so
5078		     starting searching where we last ended.  */
5079		  t = next_node_to_try;
5080		  xlo = next_node_offset;
5081		  xhi = 0;
5082		  for (;;)
5083		    {
5084		      if (t == NULL_TREE)
5085			{
5086			  t = TYPE_VALUES (type);
5087			  xlo = 0;
5088			}
5089		      if (tree_int_cst_equal (val, TREE_VALUE (t)))
5090			{
5091			  next_node_to_try = TREE_CHAIN (t);
5092			  next_node_offset = xlo + 1;
5093			  break;
5094			}
5095		      xlo++;
5096		      t = TREE_CHAIN (t);
5097		      if (t == next_node_to_try)
5098			{
5099			  xlo = -1;
5100			  break;
5101			}
5102		    }
5103		}
5104	      else
5105		{
5106		  t = TYPE_MIN_VALUE (type);
5107		  if (t)
5108		    neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
5109				&xlo, &xhi);
5110		  else
5111		    xlo = xhi = 0;
5112		  add_double (xlo, xhi,
5113			      TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5114			      &xlo, &xhi);
5115		}
5116
5117	      if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
5118		BITARRAY_SET (cases_seen, xlo);
5119
5120	      add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5121			  1, 0,
5122			  &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
5123	    }
5124	}
5125    }
5126}
5127
5128/* Called when the index of a switch statement is an enumerated type
5129   and there is no default label.
5130
5131   Checks that all enumeration literals are covered by the case
5132   expressions of a switch.  Also, warn if there are any extra
5133   switch cases that are *not* elements of the enumerated type.
5134
5135   If all enumeration literals were covered by the case expressions,
5136   turn one of the expressions into the default expression since it should
5137   not be possible to fall through such a switch.  */
5138
5139void
5140check_for_full_enumeration_handling (type)
5141     tree type;
5142{
5143  struct case_node *n;
5144  tree chain;
5145
5146  /* True iff the selector type is a numbered set mode.  */
5147  int sparseness = 0;
5148
5149  /* The number of possible selector values.  */
5150  HOST_WIDE_INT size;
5151
5152  /* For each possible selector value. a one iff it has been matched
5153     by a case value alternative.  */
5154  unsigned char *cases_seen;
5155
5156  /* The allocated size of cases_seen, in chars.  */
5157  HOST_WIDE_INT bytes_needed;
5158
5159  if (! warn_switch)
5160    return;
5161
5162  size = all_cases_count (type, &sparseness);
5163  bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
5164
5165  if (size > 0 && size < 600000
5166      /* We deliberately use calloc here, not cmalloc, so that we can suppress
5167	 this optimization if we don't have enough memory rather than
5168	 aborting, as xmalloc would do.  */
5169      && (cases_seen =
5170	  (unsigned char *) really_call_calloc (bytes_needed, 1)) != NULL)
5171    {
5172      HOST_WIDE_INT i;
5173      tree v = TYPE_VALUES (type);
5174
5175      /* The time complexity of this code is normally O(N), where
5176	 N being the number of members in the enumerated type.
5177	 However, if type is a ENUMERAL_TYPE whose values do not
5178	 increase monotonically, O(N*log(N)) time may be needed.  */
5179
5180      mark_seen_cases (type, cases_seen, size, sparseness);
5181
5182      for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
5183	if (BITARRAY_TEST (cases_seen, i) == 0)
5184	  warning ("enumeration value `%s' not handled in switch",
5185		   IDENTIFIER_POINTER (TREE_PURPOSE (v)));
5186
5187      free (cases_seen);
5188    }
5189
5190  /* Now we go the other way around; we warn if there are case
5191     expressions that don't correspond to enumerators.  This can
5192     occur since C and C++ don't enforce type-checking of
5193     assignments to enumeration variables.  */
5194
5195  if (case_stack->data.case_stmt.case_list
5196      && case_stack->data.case_stmt.case_list->left)
5197    case_stack->data.case_stmt.case_list
5198      = case_tree2list (case_stack->data.case_stmt.case_list, 0);
5199  if (warn_switch)
5200    for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5201      {
5202	for (chain = TYPE_VALUES (type);
5203	     chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5204	     chain = TREE_CHAIN (chain))
5205	  ;
5206
5207	if (!chain)
5208	  {
5209	    if (TYPE_NAME (type) == 0)
5210	      warning ("case value `%ld' not in enumerated type",
5211		       (long) TREE_INT_CST_LOW (n->low));
5212	    else
5213	      warning ("case value `%ld' not in enumerated type `%s'",
5214		       (long) TREE_INT_CST_LOW (n->low),
5215		       IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5216					    == IDENTIFIER_NODE)
5217					   ? TYPE_NAME (type)
5218					   : DECL_NAME (TYPE_NAME (type))));
5219	  }
5220	if (!tree_int_cst_equal (n->low, n->high))
5221	  {
5222	    for (chain = TYPE_VALUES (type);
5223		 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5224		 chain = TREE_CHAIN (chain))
5225	      ;
5226
5227	    if (!chain)
5228	      {
5229		if (TYPE_NAME (type) == 0)
5230		  warning ("case value `%ld' not in enumerated type",
5231			   (long) TREE_INT_CST_LOW (n->high));
5232		else
5233		  warning ("case value `%ld' not in enumerated type `%s'",
5234			   (long) TREE_INT_CST_LOW (n->high),
5235			   IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5236						== IDENTIFIER_NODE)
5237					       ? TYPE_NAME (type)
5238					       : DECL_NAME (TYPE_NAME (type))));
5239	      }
5240	  }
5241      }
5242}
5243
5244/* Free CN, and its children.  */
5245
5246static void
5247free_case_nodes (cn)
5248     case_node_ptr cn;
5249{
5250  if (cn)
5251    {
5252      free_case_nodes (cn->left);
5253      free_case_nodes (cn->right);
5254      free (cn);
5255    }
5256}
5257
5258
5259
5260/* Terminate a case (Pascal) or switch (C) statement
5261   in which ORIG_INDEX is the expression to be tested.
5262   If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
5263   type as given in the source before any compiler conversions.
5264   Generate the code to test it and jump to the right place.  */
5265
5266void
5267expand_end_case_type (orig_index, orig_type)
5268     tree orig_index, orig_type;
5269{
5270  tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
5271  rtx default_label = 0;
5272  struct case_node *n;
5273  unsigned int count;
5274  rtx index;
5275  rtx table_label;
5276  int ncases;
5277  rtx *labelvec;
5278  int i;
5279  rtx before_case, end;
5280  struct nesting *thiscase = case_stack;
5281  tree index_expr, index_type;
5282  int unsignedp;
5283
5284  /* Don't crash due to previous errors.  */
5285  if (thiscase == NULL)
5286    return;
5287
5288  table_label = gen_label_rtx ();
5289  index_expr = thiscase->data.case_stmt.index_expr;
5290  index_type = TREE_TYPE (index_expr);
5291  unsignedp = TREE_UNSIGNED (index_type);
5292  if (orig_type == NULL)
5293    orig_type = TREE_TYPE (orig_index);
5294
5295  do_pending_stack_adjust ();
5296
5297  /* This might get an spurious warning in the presence of a syntax error;
5298     it could be fixed by moving the call to check_seenlabel after the
5299     check for error_mark_node, and copying the code of check_seenlabel that
5300     deals with case_stack->data.case_stmt.line_number_status /
5301     restore_line_number_status in front of the call to end_cleanup_deferral;
5302     However, this might miss some useful warnings in the presence of
5303     non-syntax errors.  */
5304  check_seenlabel ();
5305
5306  /* An ERROR_MARK occurs for various reasons including invalid data type.  */
5307  if (index_type != error_mark_node)
5308    {
5309      /* If switch expression was an enumerated type, check that all
5310	 enumeration literals are covered by the cases.
5311	 No sense trying this if there's a default case, however.  */
5312
5313      if (!thiscase->data.case_stmt.default_label
5314	  && TREE_CODE (orig_type) == ENUMERAL_TYPE
5315	  && TREE_CODE (index_expr) != INTEGER_CST)
5316	check_for_full_enumeration_handling (orig_type);
5317
5318      /* If we don't have a default-label, create one here,
5319	 after the body of the switch.  */
5320      if (thiscase->data.case_stmt.default_label == 0)
5321	{
5322	  thiscase->data.case_stmt.default_label
5323	    = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5324	  expand_label (thiscase->data.case_stmt.default_label);
5325	}
5326      default_label = label_rtx (thiscase->data.case_stmt.default_label);
5327
5328      before_case = get_last_insn ();
5329
5330      if (thiscase->data.case_stmt.case_list
5331	  && thiscase->data.case_stmt.case_list->left)
5332	thiscase->data.case_stmt.case_list
5333	  = case_tree2list (thiscase->data.case_stmt.case_list, 0);
5334
5335      /* Simplify the case-list before we count it.  */
5336      group_case_nodes (thiscase->data.case_stmt.case_list);
5337
5338      /* Get upper and lower bounds of case values.
5339	 Also convert all the case values to the index expr's data type.  */
5340
5341      count = 0;
5342      for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5343	{
5344	  /* Check low and high label values are integers.  */
5345	  if (TREE_CODE (n->low) != INTEGER_CST)
5346	    abort ();
5347	  if (TREE_CODE (n->high) != INTEGER_CST)
5348	    abort ();
5349
5350	  n->low = convert (index_type, n->low);
5351	  n->high = convert (index_type, n->high);
5352
5353	  /* Count the elements and track the largest and smallest
5354	     of them (treating them as signed even if they are not).  */
5355	  if (count++ == 0)
5356	    {
5357	      minval = n->low;
5358	      maxval = n->high;
5359	    }
5360	  else
5361	    {
5362	      if (INT_CST_LT (n->low, minval))
5363		minval = n->low;
5364	      if (INT_CST_LT (maxval, n->high))
5365		maxval = n->high;
5366	    }
5367	  /* A range counts double, since it requires two compares.  */
5368	  if (! tree_int_cst_equal (n->low, n->high))
5369	    count++;
5370	}
5371
5372      /* Compute span of values.  */
5373      if (count != 0)
5374	range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5375
5376      end_cleanup_deferral ();
5377
5378      if (count == 0)
5379	{
5380	  expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5381	  emit_queue ();
5382	  emit_jump (default_label);
5383	}
5384
5385      /* If range of values is much bigger than number of values,
5386	 make a sequence of conditional branches instead of a dispatch.
5387	 If the switch-index is a constant, do it this way
5388	 because we can optimize it.  */
5389
5390      else if (count < case_values_threshold ()
5391	       || compare_tree_int (range, 10 * count) > 0
5392	       /* RANGE may be signed, and really large ranges will show up
5393		  as negative numbers.  */
5394	       || compare_tree_int (range, 0) < 0
5395#ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5396	       || flag_pic
5397#endif
5398	       || TREE_CODE (index_expr) == INTEGER_CST
5399	       || (TREE_CODE (index_expr) == COMPOUND_EXPR
5400		   && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5401	{
5402	  index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5403
5404	  /* If the index is a short or char that we do not have
5405	     an insn to handle comparisons directly, convert it to
5406	     a full integer now, rather than letting each comparison
5407	     generate the conversion.  */
5408
5409	  if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5410	      && ! have_insn_for (COMPARE, GET_MODE (index)))
5411	    {
5412	      enum machine_mode wider_mode;
5413	      for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5414		   wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5415		if (have_insn_for (COMPARE, wider_mode))
5416		  {
5417		    index = convert_to_mode (wider_mode, index, unsignedp);
5418		    break;
5419		  }
5420	    }
5421
5422	  emit_queue ();
5423	  do_pending_stack_adjust ();
5424
5425	  index = protect_from_queue (index, 0);
5426	  if (GET_CODE (index) == MEM)
5427	    index = copy_to_reg (index);
5428	  if (GET_CODE (index) == CONST_INT
5429	      || TREE_CODE (index_expr) == INTEGER_CST)
5430	    {
5431	      /* Make a tree node with the proper constant value
5432		 if we don't already have one.  */
5433	      if (TREE_CODE (index_expr) != INTEGER_CST)
5434		{
5435		  index_expr
5436		    = build_int_2 (INTVAL (index),
5437				   unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5438		  index_expr = convert (index_type, index_expr);
5439		}
5440
5441	      /* For constant index expressions we need only
5442		 issue an unconditional branch to the appropriate
5443		 target code.  The job of removing any unreachable
5444		 code is left to the optimisation phase if the
5445		 "-O" option is specified.  */
5446	      for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5447		if (! tree_int_cst_lt (index_expr, n->low)
5448		    && ! tree_int_cst_lt (n->high, index_expr))
5449		  break;
5450
5451	      if (n)
5452		emit_jump (label_rtx (n->code_label));
5453	      else
5454		emit_jump (default_label);
5455	    }
5456	  else
5457	    {
5458	      /* If the index expression is not constant we generate
5459		 a binary decision tree to select the appropriate
5460		 target code.  This is done as follows:
5461
5462		 The list of cases is rearranged into a binary tree,
5463		 nearly optimal assuming equal probability for each case.
5464
5465		 The tree is transformed into RTL, eliminating
5466		 redundant test conditions at the same time.
5467
5468		 If program flow could reach the end of the
5469		 decision tree an unconditional jump to the
5470		 default code is emitted.  */
5471
5472	      use_cost_table
5473		= (TREE_CODE (orig_type) != ENUMERAL_TYPE
5474		   && estimate_case_costs (thiscase->data.case_stmt.case_list));
5475	      balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
5476	      emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5477			       default_label, index_type);
5478	      emit_jump_if_reachable (default_label);
5479	    }
5480	}
5481      else
5482	{
5483	  if (! try_casesi (index_type, index_expr, minval, range,
5484			    table_label, default_label))
5485	    {
5486	      index_type = thiscase->data.case_stmt.nominal_type;
5487
5488              /* Index jumptables from zero for suitable values of
5489                 minval to avoid a subtraction.  */
5490              if (! optimize_size
5491                  && compare_tree_int (minval, 0) > 0
5492                  && compare_tree_int (minval, 3) < 0)
5493                {
5494                  minval = integer_zero_node;
5495                  range = maxval;
5496                }
5497
5498	      if (! try_tablejump (index_type, index_expr, minval, range,
5499				   table_label, default_label))
5500		abort ();
5501	    }
5502
5503	  /* Get table of labels to jump to, in order of case index.  */
5504
5505	  ncases = tree_low_cst (range, 0) + 1;
5506	  labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5507	  memset ((char *) labelvec, 0, ncases * sizeof (rtx));
5508
5509	  for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5510	    {
5511	      /* Compute the low and high bounds relative to the minimum
5512		 value since that should fit in a HOST_WIDE_INT while the
5513		 actual values may not.  */
5514	      HOST_WIDE_INT i_low
5515		= tree_low_cst (fold (build (MINUS_EXPR, index_type,
5516                                             n->low, minval)), 1);
5517	      HOST_WIDE_INT i_high
5518		= tree_low_cst (fold (build (MINUS_EXPR, index_type,
5519                                             n->high, minval)), 1);
5520	      HOST_WIDE_INT i;
5521
5522	      for (i = i_low; i <= i_high; i ++)
5523		labelvec[i]
5524		  = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5525	    }
5526
5527	  /* Fill in the gaps with the default.  */
5528	  for (i = 0; i < ncases; i++)
5529	    if (labelvec[i] == 0)
5530	      labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5531
5532	  /* Output the table */
5533	  emit_label (table_label);
5534
5535	  if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5536	    emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5537						   gen_rtx_LABEL_REF (Pmode, table_label),
5538						   gen_rtvec_v (ncases, labelvec),
5539						   const0_rtx, const0_rtx));
5540	  else
5541	    emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5542					      gen_rtvec_v (ncases, labelvec)));
5543
5544	  /* If the case insn drops through the table,
5545	     after the table we must jump to the default-label.
5546	     Otherwise record no drop-through after the table.  */
5547#ifdef CASE_DROPS_THROUGH
5548	  emit_jump (default_label);
5549#else
5550	  emit_barrier ();
5551#endif
5552	}
5553
5554      before_case = NEXT_INSN (before_case);
5555      end = get_last_insn ();
5556      if (squeeze_notes (&before_case, &end))
5557	abort ();
5558      reorder_insns (before_case, end,
5559		     thiscase->data.case_stmt.start);
5560    }
5561  else
5562    end_cleanup_deferral ();
5563
5564  if (thiscase->exit_label)
5565    emit_label (thiscase->exit_label);
5566
5567  free_case_nodes (case_stack->data.case_stmt.case_list);
5568  POPSTACK (case_stack);
5569
5570  free_temp_slots ();
5571}
5572
5573/* Convert the tree NODE into a list linked by the right field, with the left
5574   field zeroed.  RIGHT is used for recursion; it is a list to be placed
5575   rightmost in the resulting list.  */
5576
5577static struct case_node *
5578case_tree2list (node, right)
5579     struct case_node *node, *right;
5580{
5581  struct case_node *left;
5582
5583  if (node->right)
5584    right = case_tree2list (node->right, right);
5585
5586  node->right = right;
5587  if ((left = node->left))
5588    {
5589      node->left = 0;
5590      return case_tree2list (left, node);
5591    }
5592
5593  return node;
5594}
5595
5596/* Generate code to jump to LABEL if OP1 and OP2 are equal.  */
5597
5598static void
5599do_jump_if_equal (op1, op2, label, unsignedp)
5600     rtx op1, op2, label;
5601     int unsignedp;
5602{
5603  if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
5604    {
5605      if (INTVAL (op1) == INTVAL (op2))
5606	emit_jump (label);
5607    }
5608  else
5609    emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
5610			     (GET_MODE (op1) == VOIDmode
5611			     ? GET_MODE (op2) : GET_MODE (op1)),
5612			     unsignedp, label);
5613}
5614
5615/* Not all case values are encountered equally.  This function
5616   uses a heuristic to weight case labels, in cases where that
5617   looks like a reasonable thing to do.
5618
5619   Right now, all we try to guess is text, and we establish the
5620   following weights:
5621
5622	chars above space:	16
5623	digits:			16
5624	default:		12
5625	space, punct:		8
5626	tab:			4
5627	newline:		2
5628	other "\" chars:	1
5629	remaining chars:	0
5630
5631   If we find any cases in the switch that are not either -1 or in the range
5632   of valid ASCII characters, or are control characters other than those
5633   commonly used with "\", don't treat this switch scanning text.
5634
5635   Return 1 if these nodes are suitable for cost estimation, otherwise
5636   return 0.  */
5637
5638static int
5639estimate_case_costs (node)
5640     case_node_ptr node;
5641{
5642  tree min_ascii = integer_minus_one_node;
5643  tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5644  case_node_ptr n;
5645  int i;
5646
5647  /* If we haven't already made the cost table, make it now.  Note that the
5648     lower bound of the table is -1, not zero.  */
5649
5650  if (! cost_table_initialized)
5651    {
5652      cost_table_initialized = 1;
5653
5654      for (i = 0; i < 128; i++)
5655	{
5656	  if (ISALNUM (i))
5657	    COST_TABLE (i) = 16;
5658	  else if (ISPUNCT (i))
5659	    COST_TABLE (i) = 8;
5660	  else if (ISCNTRL (i))
5661	    COST_TABLE (i) = -1;
5662	}
5663
5664      COST_TABLE (' ') = 8;
5665      COST_TABLE ('\t') = 4;
5666      COST_TABLE ('\0') = 4;
5667      COST_TABLE ('\n') = 2;
5668      COST_TABLE ('\f') = 1;
5669      COST_TABLE ('\v') = 1;
5670      COST_TABLE ('\b') = 1;
5671    }
5672
5673  /* See if all the case expressions look like text.  It is text if the
5674     constant is >= -1 and the highest constant is <= 127.  Do all comparisons
5675     as signed arithmetic since we don't want to ever access cost_table with a
5676     value less than -1.  Also check that none of the constants in a range
5677     are strange control characters.  */
5678
5679  for (n = node; n; n = n->right)
5680    {
5681      if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5682	return 0;
5683
5684      for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5685	   i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
5686	if (COST_TABLE (i) < 0)
5687	  return 0;
5688    }
5689
5690  /* All interesting values are within the range of interesting
5691     ASCII characters.  */
5692  return 1;
5693}
5694
5695/* Scan an ordered list of case nodes
5696   combining those with consecutive values or ranges.
5697
5698   Eg. three separate entries 1: 2: 3: become one entry 1..3:  */
5699
5700static void
5701group_case_nodes (head)
5702     case_node_ptr head;
5703{
5704  case_node_ptr node = head;
5705
5706  while (node)
5707    {
5708      rtx lb = next_real_insn (label_rtx (node->code_label));
5709      rtx lb2;
5710      case_node_ptr np = node;
5711
5712      /* Try to group the successors of NODE with NODE.  */
5713      while (((np = np->right) != 0)
5714	     /* Do they jump to the same place?  */
5715	     && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5716		 || (lb != 0 && lb2 != 0
5717		     && simplejump_p (lb)
5718		     && simplejump_p (lb2)
5719		     && rtx_equal_p (SET_SRC (PATTERN (lb)),
5720				     SET_SRC (PATTERN (lb2)))))
5721	     /* Are their ranges consecutive?  */
5722	     && tree_int_cst_equal (np->low,
5723				    fold (build (PLUS_EXPR,
5724						 TREE_TYPE (node->high),
5725						 node->high,
5726						 integer_one_node)))
5727	     /* An overflow is not consecutive.  */
5728	     && tree_int_cst_lt (node->high,
5729				 fold (build (PLUS_EXPR,
5730					      TREE_TYPE (node->high),
5731					      node->high,
5732					      integer_one_node))))
5733	{
5734	  node->high = np->high;
5735	}
5736      /* NP is the first node after NODE which can't be grouped with it.
5737	 Delete the nodes in between, and move on to that node.  */
5738      node->right = np;
5739      node = np;
5740    }
5741}
5742
5743/* Take an ordered list of case nodes
5744   and transform them into a near optimal binary tree,
5745   on the assumption that any target code selection value is as
5746   likely as any other.
5747
5748   The transformation is performed by splitting the ordered
5749   list into two equal sections plus a pivot.  The parts are
5750   then attached to the pivot as left and right branches.  Each
5751   branch is then transformed recursively.  */
5752
5753static void
5754balance_case_nodes (head, parent)
5755     case_node_ptr *head;
5756     case_node_ptr parent;
5757{
5758  case_node_ptr np;
5759
5760  np = *head;
5761  if (np)
5762    {
5763      int cost = 0;
5764      int i = 0;
5765      int ranges = 0;
5766      case_node_ptr *npp;
5767      case_node_ptr left;
5768
5769      /* Count the number of entries on branch.  Also count the ranges.  */
5770
5771      while (np)
5772	{
5773	  if (!tree_int_cst_equal (np->low, np->high))
5774	    {
5775	      ranges++;
5776	      if (use_cost_table)
5777		cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
5778	    }
5779
5780	  if (use_cost_table)
5781	    cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
5782
5783	  i++;
5784	  np = np->right;
5785	}
5786
5787      if (i > 2)
5788	{
5789	  /* Split this list if it is long enough for that to help.  */
5790	  npp = head;
5791	  left = *npp;
5792	  if (use_cost_table)
5793	    {
5794	      /* Find the place in the list that bisects the list's total cost,
5795		 Here I gets half the total cost.  */
5796	      int n_moved = 0;
5797	      i = (cost + 1) / 2;
5798	      while (1)
5799		{
5800		  /* Skip nodes while their cost does not reach that amount.  */
5801		  if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5802		    i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
5803		  i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
5804		  if (i <= 0)
5805		    break;
5806		  npp = &(*npp)->right;
5807		  n_moved += 1;
5808		}
5809	      if (n_moved == 0)
5810		{
5811		  /* Leave this branch lopsided, but optimize left-hand
5812		     side and fill in `parent' fields for right-hand side.  */
5813		  np = *head;
5814		  np->parent = parent;
5815		  balance_case_nodes (&np->left, np);
5816		  for (; np->right; np = np->right)
5817		    np->right->parent = np;
5818		  return;
5819		}
5820	    }
5821	  /* If there are just three nodes, split at the middle one.  */
5822	  else if (i == 3)
5823	    npp = &(*npp)->right;
5824	  else
5825	    {
5826	      /* Find the place in the list that bisects the list's total cost,
5827		 where ranges count as 2.
5828		 Here I gets half the total cost.  */
5829	      i = (i + ranges + 1) / 2;
5830	      while (1)
5831		{
5832		  /* Skip nodes while their cost does not reach that amount.  */
5833		  if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5834		    i--;
5835		  i--;
5836		  if (i <= 0)
5837		    break;
5838		  npp = &(*npp)->right;
5839		}
5840	    }
5841	  *head = np = *npp;
5842	  *npp = 0;
5843	  np->parent = parent;
5844	  np->left = left;
5845
5846	  /* Optimize each of the two split parts.  */
5847	  balance_case_nodes (&np->left, np);
5848	  balance_case_nodes (&np->right, np);
5849	}
5850      else
5851	{
5852	  /* Else leave this branch as one level,
5853	     but fill in `parent' fields.  */
5854	  np = *head;
5855	  np->parent = parent;
5856	  for (; np->right; np = np->right)
5857	    np->right->parent = np;
5858	}
5859    }
5860}
5861
5862/* Search the parent sections of the case node tree
5863   to see if a test for the lower bound of NODE would be redundant.
5864   INDEX_TYPE is the type of the index expression.
5865
5866   The instructions to generate the case decision tree are
5867   output in the same order as nodes are processed so it is
5868   known that if a parent node checks the range of the current
5869   node minus one that the current node is bounded at its lower
5870   span.  Thus the test would be redundant.  */
5871
5872static int
5873node_has_low_bound (node, index_type)
5874     case_node_ptr node;
5875     tree index_type;
5876{
5877  tree low_minus_one;
5878  case_node_ptr pnode;
5879
5880  /* If the lower bound of this node is the lowest value in the index type,
5881     we need not test it.  */
5882
5883  if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5884    return 1;
5885
5886  /* If this node has a left branch, the value at the left must be less
5887     than that at this node, so it cannot be bounded at the bottom and
5888     we need not bother testing any further.  */
5889
5890  if (node->left)
5891    return 0;
5892
5893  low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5894			       node->low, integer_one_node));
5895
5896  /* If the subtraction above overflowed, we can't verify anything.
5897     Otherwise, look for a parent that tests our value - 1.  */
5898
5899  if (! tree_int_cst_lt (low_minus_one, node->low))
5900    return 0;
5901
5902  for (pnode = node->parent; pnode; pnode = pnode->parent)
5903    if (tree_int_cst_equal (low_minus_one, pnode->high))
5904      return 1;
5905
5906  return 0;
5907}
5908
5909/* Search the parent sections of the case node tree
5910   to see if a test for the upper bound of NODE would be redundant.
5911   INDEX_TYPE is the type of the index expression.
5912
5913   The instructions to generate the case decision tree are
5914   output in the same order as nodes are processed so it is
5915   known that if a parent node checks the range of the current
5916   node plus one that the current node is bounded at its upper
5917   span.  Thus the test would be redundant.  */
5918
5919static int
5920node_has_high_bound (node, index_type)
5921     case_node_ptr node;
5922     tree index_type;
5923{
5924  tree high_plus_one;
5925  case_node_ptr pnode;
5926
5927  /* If there is no upper bound, obviously no test is needed.  */
5928
5929  if (TYPE_MAX_VALUE (index_type) == NULL)
5930    return 1;
5931
5932  /* If the upper bound of this node is the highest value in the type
5933     of the index expression, we need not test against it.  */
5934
5935  if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5936    return 1;
5937
5938  /* If this node has a right branch, the value at the right must be greater
5939     than that at this node, so it cannot be bounded at the top and
5940     we need not bother testing any further.  */
5941
5942  if (node->right)
5943    return 0;
5944
5945  high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5946			       node->high, integer_one_node));
5947
5948  /* If the addition above overflowed, we can't verify anything.
5949     Otherwise, look for a parent that tests our value + 1.  */
5950
5951  if (! tree_int_cst_lt (node->high, high_plus_one))
5952    return 0;
5953
5954  for (pnode = node->parent; pnode; pnode = pnode->parent)
5955    if (tree_int_cst_equal (high_plus_one, pnode->low))
5956      return 1;
5957
5958  return 0;
5959}
5960
5961/* Search the parent sections of the
5962   case node tree to see if both tests for the upper and lower
5963   bounds of NODE would be redundant.  */
5964
5965static int
5966node_is_bounded (node, index_type)
5967     case_node_ptr node;
5968     tree index_type;
5969{
5970  return (node_has_low_bound (node, index_type)
5971	  && node_has_high_bound (node, index_type));
5972}
5973
5974/*  Emit an unconditional jump to LABEL unless it would be dead code.  */
5975
5976static void
5977emit_jump_if_reachable (label)
5978     rtx label;
5979{
5980  if (GET_CODE (get_last_insn ()) != BARRIER)
5981    emit_jump (label);
5982}
5983
5984/* Emit step-by-step code to select a case for the value of INDEX.
5985   The thus generated decision tree follows the form of the
5986   case-node binary tree NODE, whose nodes represent test conditions.
5987   INDEX_TYPE is the type of the index of the switch.
5988
5989   Care is taken to prune redundant tests from the decision tree
5990   by detecting any boundary conditions already checked by
5991   emitted rtx.  (See node_has_high_bound, node_has_low_bound
5992   and node_is_bounded, above.)
5993
5994   Where the test conditions can be shown to be redundant we emit
5995   an unconditional jump to the target code.  As a further
5996   optimization, the subordinates of a tree node are examined to
5997   check for bounded nodes.  In this case conditional and/or
5998   unconditional jumps as a result of the boundary check for the
5999   current node are arranged to target the subordinates associated
6000   code for out of bound conditions on the current node.
6001
6002   We can assume that when control reaches the code generated here,
6003   the index value has already been compared with the parents
6004   of this node, and determined to be on the same side of each parent
6005   as this node is.  Thus, if this node tests for the value 51,
6006   and a parent tested for 52, we don't need to consider
6007   the possibility of a value greater than 51.  If another parent
6008   tests for the value 50, then this node need not test anything.  */
6009
6010static void
6011emit_case_nodes (index, node, default_label, index_type)
6012     rtx index;
6013     case_node_ptr node;
6014     rtx default_label;
6015     tree index_type;
6016{
6017  /* If INDEX has an unsigned type, we must make unsigned branches.  */
6018  int unsignedp = TREE_UNSIGNED (index_type);
6019  enum machine_mode mode = GET_MODE (index);
6020  enum machine_mode imode = TYPE_MODE (index_type);
6021
6022  /* See if our parents have already tested everything for us.
6023     If they have, emit an unconditional jump for this node.  */
6024  if (node_is_bounded (node, index_type))
6025    emit_jump (label_rtx (node->code_label));
6026
6027  else if (tree_int_cst_equal (node->low, node->high))
6028    {
6029      /* Node is single valued.  First see if the index expression matches
6030	 this node and then check our children, if any.  */
6031
6032      do_jump_if_equal (index,
6033			convert_modes (mode, imode,
6034				       expand_expr (node->low, NULL_RTX,
6035						    VOIDmode, 0),
6036				       unsignedp),
6037			label_rtx (node->code_label), unsignedp);
6038
6039      if (node->right != 0 && node->left != 0)
6040	{
6041	  /* This node has children on both sides.
6042	     Dispatch to one side or the other
6043	     by comparing the index value with this node's value.
6044	     If one subtree is bounded, check that one first,
6045	     so we can avoid real branches in the tree.  */
6046
6047	  if (node_is_bounded (node->right, index_type))
6048	    {
6049	      emit_cmp_and_jump_insns (index,
6050				       convert_modes
6051				       (mode, imode,
6052					expand_expr (node->high, NULL_RTX,
6053						     VOIDmode, 0),
6054					unsignedp),
6055				       GT, NULL_RTX, mode, unsignedp,
6056				       label_rtx (node->right->code_label));
6057	      emit_case_nodes (index, node->left, default_label, index_type);
6058	    }
6059
6060	  else if (node_is_bounded (node->left, index_type))
6061	    {
6062	      emit_cmp_and_jump_insns (index,
6063				       convert_modes
6064				       (mode, imode,
6065					expand_expr (node->high, NULL_RTX,
6066						     VOIDmode, 0),
6067					unsignedp),
6068				       LT, NULL_RTX, mode, unsignedp,
6069				       label_rtx (node->left->code_label));
6070	      emit_case_nodes (index, node->right, default_label, index_type);
6071	    }
6072
6073	  else
6074	    {
6075	      /* Neither node is bounded.  First distinguish the two sides;
6076		 then emit the code for one side at a time.  */
6077
6078	      tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6079
6080	      /* See if the value is on the right.  */
6081	      emit_cmp_and_jump_insns (index,
6082				       convert_modes
6083				       (mode, imode,
6084					expand_expr (node->high, NULL_RTX,
6085						     VOIDmode, 0),
6086					unsignedp),
6087				       GT, NULL_RTX, mode, unsignedp,
6088				       label_rtx (test_label));
6089
6090	      /* Value must be on the left.
6091		 Handle the left-hand subtree.  */
6092	      emit_case_nodes (index, node->left, default_label, index_type);
6093	      /* If left-hand subtree does nothing,
6094		 go to default.  */
6095	      emit_jump_if_reachable (default_label);
6096
6097	      /* Code branches here for the right-hand subtree.  */
6098	      expand_label (test_label);
6099	      emit_case_nodes (index, node->right, default_label, index_type);
6100	    }
6101	}
6102
6103      else if (node->right != 0 && node->left == 0)
6104	{
6105	  /* Here we have a right child but no left so we issue conditional
6106	     branch to default and process the right child.
6107
6108	     Omit the conditional branch to default if we it avoid only one
6109	     right child; it costs too much space to save so little time.  */
6110
6111	  if (node->right->right || node->right->left
6112	      || !tree_int_cst_equal (node->right->low, node->right->high))
6113	    {
6114	      if (!node_has_low_bound (node, index_type))
6115		{
6116		  emit_cmp_and_jump_insns (index,
6117					   convert_modes
6118					   (mode, imode,
6119					    expand_expr (node->high, NULL_RTX,
6120							 VOIDmode, 0),
6121					    unsignedp),
6122					   LT, NULL_RTX, mode, unsignedp,
6123					   default_label);
6124		}
6125
6126	      emit_case_nodes (index, node->right, default_label, index_type);
6127	    }
6128	  else
6129	    /* We cannot process node->right normally
6130	       since we haven't ruled out the numbers less than
6131	       this node's value.  So handle node->right explicitly.  */
6132	    do_jump_if_equal (index,
6133			      convert_modes
6134			      (mode, imode,
6135			       expand_expr (node->right->low, NULL_RTX,
6136					    VOIDmode, 0),
6137			       unsignedp),
6138			      label_rtx (node->right->code_label), unsignedp);
6139	}
6140
6141      else if (node->right == 0 && node->left != 0)
6142	{
6143	  /* Just one subtree, on the left.  */
6144	  if (node->left->left || node->left->right
6145	      || !tree_int_cst_equal (node->left->low, node->left->high))
6146	    {
6147	      if (!node_has_high_bound (node, index_type))
6148		{
6149		  emit_cmp_and_jump_insns (index,
6150					   convert_modes
6151					   (mode, imode,
6152					    expand_expr (node->high, NULL_RTX,
6153							 VOIDmode, 0),
6154					    unsignedp),
6155					   GT, NULL_RTX, mode, unsignedp,
6156					   default_label);
6157		}
6158
6159	      emit_case_nodes (index, node->left, default_label, index_type);
6160	    }
6161	  else
6162	    /* We cannot process node->left normally
6163	       since we haven't ruled out the numbers less than
6164	       this node's value.  So handle node->left explicitly.  */
6165	    do_jump_if_equal (index,
6166			      convert_modes
6167			      (mode, imode,
6168			       expand_expr (node->left->low, NULL_RTX,
6169					    VOIDmode, 0),
6170			       unsignedp),
6171			      label_rtx (node->left->code_label), unsignedp);
6172	}
6173    }
6174  else
6175    {
6176      /* Node is a range.  These cases are very similar to those for a single
6177	 value, except that we do not start by testing whether this node
6178	 is the one to branch to.  */
6179
6180      if (node->right != 0 && node->left != 0)
6181	{
6182	  /* Node has subtrees on both sides.
6183	     If the right-hand subtree is bounded,
6184	     test for it first, since we can go straight there.
6185	     Otherwise, we need to make a branch in the control structure,
6186	     then handle the two subtrees.  */
6187	  tree test_label = 0;
6188
6189	  if (node_is_bounded (node->right, index_type))
6190	    /* Right hand node is fully bounded so we can eliminate any
6191	       testing and branch directly to the target code.  */
6192	    emit_cmp_and_jump_insns (index,
6193				     convert_modes
6194				     (mode, imode,
6195				      expand_expr (node->high, NULL_RTX,
6196						   VOIDmode, 0),
6197				      unsignedp),
6198				     GT, NULL_RTX, mode, unsignedp,
6199				     label_rtx (node->right->code_label));
6200	  else
6201	    {
6202	      /* Right hand node requires testing.
6203		 Branch to a label where we will handle it later.  */
6204
6205	      test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6206	      emit_cmp_and_jump_insns (index,
6207				       convert_modes
6208				       (mode, imode,
6209					expand_expr (node->high, NULL_RTX,
6210						     VOIDmode, 0),
6211					unsignedp),
6212				       GT, NULL_RTX, mode, unsignedp,
6213				       label_rtx (test_label));
6214	    }
6215
6216	  /* Value belongs to this node or to the left-hand subtree.  */
6217
6218	  emit_cmp_and_jump_insns (index,
6219				   convert_modes
6220				   (mode, imode,
6221				    expand_expr (node->low, NULL_RTX,
6222						 VOIDmode, 0),
6223				    unsignedp),
6224				   GE, NULL_RTX, mode, unsignedp,
6225				   label_rtx (node->code_label));
6226
6227	  /* Handle the left-hand subtree.  */
6228	  emit_case_nodes (index, node->left, default_label, index_type);
6229
6230	  /* If right node had to be handled later, do that now.  */
6231
6232	  if (test_label)
6233	    {
6234	      /* If the left-hand subtree fell through,
6235		 don't let it fall into the right-hand subtree.  */
6236	      emit_jump_if_reachable (default_label);
6237
6238	      expand_label (test_label);
6239	      emit_case_nodes (index, node->right, default_label, index_type);
6240	    }
6241	}
6242
6243      else if (node->right != 0 && node->left == 0)
6244	{
6245	  /* Deal with values to the left of this node,
6246	     if they are possible.  */
6247	  if (!node_has_low_bound (node, index_type))
6248	    {
6249	      emit_cmp_and_jump_insns (index,
6250				       convert_modes
6251				       (mode, imode,
6252					expand_expr (node->low, NULL_RTX,
6253						     VOIDmode, 0),
6254					unsignedp),
6255				       LT, NULL_RTX, mode, unsignedp,
6256				       default_label);
6257	    }
6258
6259	  /* Value belongs to this node or to the right-hand subtree.  */
6260
6261	  emit_cmp_and_jump_insns (index,
6262				   convert_modes
6263				   (mode, imode,
6264				    expand_expr (node->high, NULL_RTX,
6265						 VOIDmode, 0),
6266				    unsignedp),
6267				   LE, NULL_RTX, mode, unsignedp,
6268				   label_rtx (node->code_label));
6269
6270	  emit_case_nodes (index, node->right, default_label, index_type);
6271	}
6272
6273      else if (node->right == 0 && node->left != 0)
6274	{
6275	  /* Deal with values to the right of this node,
6276	     if they are possible.  */
6277	  if (!node_has_high_bound (node, index_type))
6278	    {
6279	      emit_cmp_and_jump_insns (index,
6280				       convert_modes
6281				       (mode, imode,
6282					expand_expr (node->high, NULL_RTX,
6283						     VOIDmode, 0),
6284					unsignedp),
6285				       GT, NULL_RTX, mode, unsignedp,
6286				       default_label);
6287	    }
6288
6289	  /* Value belongs to this node or to the left-hand subtree.  */
6290
6291	  emit_cmp_and_jump_insns (index,
6292				   convert_modes
6293				   (mode, imode,
6294				    expand_expr (node->low, NULL_RTX,
6295						 VOIDmode, 0),
6296				    unsignedp),
6297				   GE, NULL_RTX, mode, unsignedp,
6298				   label_rtx (node->code_label));
6299
6300	  emit_case_nodes (index, node->left, default_label, index_type);
6301	}
6302
6303      else
6304	{
6305	  /* Node has no children so we check low and high bounds to remove
6306	     redundant tests.  Only one of the bounds can exist,
6307	     since otherwise this node is bounded--a case tested already.  */
6308	  int high_bound = node_has_high_bound (node, index_type);
6309	  int low_bound = node_has_low_bound (node, index_type);
6310
6311	  if (!high_bound && low_bound)
6312	    {
6313	      emit_cmp_and_jump_insns (index,
6314				       convert_modes
6315				       (mode, imode,
6316					expand_expr (node->high, NULL_RTX,
6317						     VOIDmode, 0),
6318					unsignedp),
6319				       GT, NULL_RTX, mode, unsignedp,
6320				       default_label);
6321	    }
6322
6323	  else if (!low_bound && high_bound)
6324	    {
6325	      emit_cmp_and_jump_insns (index,
6326				       convert_modes
6327				       (mode, imode,
6328					expand_expr (node->low, NULL_RTX,
6329						     VOIDmode, 0),
6330					unsignedp),
6331				       LT, NULL_RTX, mode, unsignedp,
6332				       default_label);
6333	    }
6334	  else if (!low_bound && !high_bound)
6335	    {
6336	      /* Widen LOW and HIGH to the same width as INDEX.  */
6337	      tree type = type_for_mode (mode, unsignedp);
6338	      tree low = build1 (CONVERT_EXPR, type, node->low);
6339	      tree high = build1 (CONVERT_EXPR, type, node->high);
6340	      rtx low_rtx, new_index, new_bound;
6341
6342	      /* Instead of doing two branches, emit one unsigned branch for
6343		 (index-low) > (high-low).  */
6344	      low_rtx = expand_expr (low, NULL_RTX, mode, 0);
6345	      new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
6346					       NULL_RTX, unsignedp,
6347					       OPTAB_WIDEN);
6348	      new_bound = expand_expr (fold (build (MINUS_EXPR, type,
6349						    high, low)),
6350				       NULL_RTX, mode, 0);
6351
6352	      emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
6353				       mode, 1, default_label);
6354	    }
6355
6356	  emit_jump (label_rtx (node->code_label));
6357	}
6358    }
6359}
6360