protector.c revision 1.14
1/* RTL buffer overflow protection function for GNU C compiler
2   Copyright (C) 1987, 88, 89, 92-7, 1998 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 2, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING.  If not, write to the Free
18Software Foundation, 59 Temple Place - Suite 330, Boston, MA
1902111-1307, USA.  */
20
21#include "config.h"
22#include "system.h"
23#include "machmode.h"
24
25#include "rtl.h"
26#include "tree.h"
27#include "regs.h"
28#include "flags.h"
29#include "insn-config.h"
30#include "insn-flags.h"
31#include "expr.h"
32#include "output.h"
33#include "recog.h"
34#include "hard-reg-set.h"
35#include "real.h"
36#include "except.h"
37#include "function.h"
38#include "toplev.h"
39#include "conditions.h"
40#include "insn-attr.h"
41#include "c-tree.h"
42#include "optabs.h"
43#include "reload.h"
44#include "protector.h"
45
46
47/* Warn when not issuing stack smashing protection for some reason */
48int warn_stack_protector;
49
50/* Round a value to the lowest integer less than it that is a multiple of
51   the required alignment.  Avoid using division in case the value is
52   negative.  Assume the alignment is a power of two.  */
53#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
54
55/* Similar, but round to the next highest integer that meets the
56   alignment.  */
57#define CEIL_ROUND(VALUE,ALIGN)	(((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
58
59
60/* Nonzero means use propolice as a stack protection method */
61extern int flag_propolice_protection;
62
63/* This file contains several memory arrangement functions to protect
64   the return address and the frame pointer of the stack
65   from a stack-smashing attack. It also
66   provides the function that protects pointer variables. */
67
68/* Nonzero if function being compiled can define string buffers that may be
69   damaged by the stack-smash attack */
70static int current_function_defines_vulnerable_string;
71static int current_function_defines_short_string;
72static int current_function_has_variable_string;
73static int current_function_defines_vsized_array;
74static int current_function_is_inlinable;
75static int is_array;
76
77static rtx guard_area, _guard;
78static rtx function_first_insn, prologue_insert_point;
79
80/*  */
81static HOST_WIDE_INT sweep_frame_offset;
82static HOST_WIDE_INT push_allocated_offset = 0;
83static HOST_WIDE_INT push_frame_offset = 0;
84static int saved_cse_not_expected = 0;
85
86static int search_string_from_argsandvars PARAMS ((int caller));
87static int search_string_from_local_vars PARAMS ((tree block));
88static int search_pointer_def PARAMS ((tree names));
89static int search_func_pointer PARAMS ((tree type));
90static int check_used_flag PARAMS ((rtx x));
91static void reset_used_flags_for_insns PARAMS ((rtx insn));
92static void reset_used_flags_for_decls PARAMS ((tree block));
93static void reset_used_flags_of_plus PARAMS ((rtx x));
94static void rtl_prologue PARAMS ((rtx insn));
95static void rtl_epilogue PARAMS ((rtx fnlastinsn));
96static void arrange_var_order PARAMS ((tree blocks));
97static void copy_args_for_protection PARAMS ((void));
98static void sweep_string_variable
99	PARAMS ((rtx sweep_var, HOST_WIDE_INT var_size));
100static void sweep_string_in_decls
101	PARAMS ((tree block, HOST_WIDE_INT sweep_offset, HOST_WIDE_INT size));
102static void sweep_string_in_args
103	PARAMS ((tree parms, HOST_WIDE_INT sweep_offset, HOST_WIDE_INT size));
104static void sweep_string_use_of_insns
105	PARAMS ((rtx insn, HOST_WIDE_INT sweep_offset, HOST_WIDE_INT size));
106static void sweep_string_in_operand
107	PARAMS ((rtx insn, rtx *loc,
108		 HOST_WIDE_INT sweep_offset, HOST_WIDE_INT size));
109static void move_arg_location
110	PARAMS ((rtx insn, rtx orig, rtx new, HOST_WIDE_INT var_size));
111static void change_arg_use_of_insns
112	PARAMS ((rtx insn, rtx orig, rtx *new, HOST_WIDE_INT size));
113static void change_arg_use_of_insns_2
114	PARAMS ((rtx insn, rtx orig, rtx *new, HOST_WIDE_INT size));
115static void change_arg_use_in_operand
116	PARAMS ((rtx insn, rtx x, rtx orig, rtx *new, HOST_WIDE_INT size));
117static void validate_insns_of_varrefs PARAMS ((rtx insn));
118static void validate_operand_of_varrefs PARAMS ((rtx insn, rtx *loc));
119
120#define SUSPICIOUS_BUF_SIZE 8
121
122#define AUTO_BASEPTR(X) \
123  (GET_CODE (X) == PLUS ? XEXP (X, 0) : X)
124#define AUTO_OFFSET(X) \
125  (GET_CODE (X) == PLUS ? INTVAL (XEXP (X, 1)) : 0)
126#undef PARM_PASSED_IN_MEMORY
127#define PARM_PASSED_IN_MEMORY(PARM) \
128 (GET_CODE (DECL_INCOMING_RTL (PARM)) == MEM)
129#define VIRTUAL_STACK_VARS_P(X) \
130 ((X) == virtual_stack_vars_rtx || (GET_CODE (X) == REG && (X)->used))
131#define TREE_VISITED(NODE) ((NODE)->common.unused_0)
132
133
134
135void
136prepare_stack_protection (inlinable)
137     int inlinable;
138{
139  tree blocks = DECL_INITIAL (current_function_decl);
140  current_function_is_inlinable = inlinable && !flag_no_inline;
141  push_frame_offset = push_allocated_offset = 0;
142  saved_cse_not_expected = 0;
143
144  /*
145    skip the protection if the function has no block
146    or it is an inline function
147  */
148  if (current_function_is_inlinable) validate_insns_of_varrefs (get_insns ());
149  if (! blocks || current_function_is_inlinable) return;
150
151  current_function_defines_vulnerable_string
152    = search_string_from_argsandvars (0);
153
154  if (current_function_defines_vulnerable_string
155      || flag_stack_protection)
156    {
157      HOST_WIDE_INT offset;
158      function_first_insn = get_insns ();
159
160      if (current_function_contains_functions) {
161	  if (warn_stack_protector)
162             warning ("not protecting function: it contains functions");
163	  return;
164      }
165
166      /* Initialize recognition, indicating that volatile is OK.  */
167      init_recog ();
168
169      sweep_frame_offset = 0;
170
171#ifdef STACK_GROWS_DOWNWARD
172      /*
173	frame_offset: offset to end of allocated area of stack frame.
174	 It is defined in the function.c
175      */
176
177      /* the location must be before buffers */
178      guard_area = assign_stack_local (BLKmode, UNITS_PER_GUARD, -1);
179      PUT_MODE (guard_area, GUARD_m);
180      MEM_VOLATILE_P (guard_area) = 1;
181
182#ifndef FRAME_GROWS_DOWNWARD
183      sweep_frame_offset = frame_offset;
184#endif
185
186      /* For making room for guard value, scan all insns and fix the offset
187	 address of the variable that is based on frame pointer.
188	 Scan all declarations of variables and fix the offset address
189	 of the variable that is based on the frame pointer */
190      sweep_string_variable (guard_area, UNITS_PER_GUARD);
191
192
193      /* the location of guard area moves to the beginning of stack frame */
194      if ((offset = AUTO_OFFSET(XEXP (guard_area, 0))))
195	XEXP (XEXP (guard_area, 0), 1)
196	  = gen_rtx_CONST_INT (VOIDmode, sweep_frame_offset);
197
198
199      /* Insert prologue rtl instructions */
200      rtl_prologue (function_first_insn);
201
202      if (! current_function_has_variable_string)
203	{
204	  /* Generate argument saving instruction */
205	  copy_args_for_protection ();
206
207#ifndef FRAME_GROWS_DOWNWARD
208	  /* If frame grows upward, character string copied from an arg
209	     stays top of the guard variable.
210	     So sweep the guard variable again */
211	  sweep_frame_offset = CEIL_ROUND (frame_offset,
212					   BIGGEST_ALIGNMENT / BITS_PER_UNIT);
213	  sweep_string_variable (guard_area, UNITS_PER_GUARD);
214#endif
215	}
216      else if (warn_stack_protector)
217	warning ("not protecting variables: it has a variable length buffer");
218#endif
219#ifndef FRAME_GROWS_DOWNWARD
220      if (STARTING_FRAME_OFFSET == 0)
221	{
222	  /* this may be only for alpha */
223	  push_allocated_offset = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
224	  assign_stack_local (BLKmode, push_allocated_offset, -1);
225	  sweep_frame_offset = frame_offset;
226	  sweep_string_variable (const0_rtx, -push_allocated_offset);
227	  sweep_frame_offset = AUTO_OFFSET (XEXP (guard_area, 0));
228	}
229#endif
230
231      /* Arrange the order of local variables */
232      arrange_var_order (blocks);
233
234#ifdef STACK_GROWS_DOWNWARD
235      /* Insert epilogue rtl instructions */
236      rtl_epilogue (get_last_insn ());
237#endif
238      init_recog_no_volatile ();
239    }
240  else if (current_function_defines_short_string
241	   && warn_stack_protector)
242    warning ("not protecting function: buffer is less than %d bytes long",
243	     SUSPICIOUS_BUF_SIZE);
244}
245
246/*
247  search string from arguments and local variables
248  caller: 0 means call from protector_stack_protection
249          1 means call from push_frame
250*/
251static int
252search_string_from_argsandvars (caller)
253     int caller;
254{
255  tree blocks, parms;
256  int string_p;
257
258  /* saves a latest search result as a cached information */
259  static tree __latest_search_decl = 0;
260  static int  __latest_search_result = FALSE;
261
262  if (__latest_search_decl == current_function_decl)
263    return __latest_search_result;
264  else if (caller) return FALSE;
265  __latest_search_decl = current_function_decl;
266  __latest_search_result = TRUE;
267
268  current_function_defines_short_string = FALSE;
269  current_function_has_variable_string = FALSE;
270  current_function_defines_vsized_array = FALSE;
271
272  /*
273    search a string variable from local variables
274  */
275  blocks = DECL_INITIAL (current_function_decl);
276  string_p = search_string_from_local_vars (blocks);
277
278  if (!current_function_defines_vsized_array && current_function_calls_alloca)
279    {
280      current_function_has_variable_string = TRUE;
281      return TRUE;
282    }
283
284  if (string_p) return TRUE;
285
286#ifdef STACK_GROWS_DOWNWARD
287  /*
288    search a string variable from arguments
289  */
290  parms = DECL_ARGUMENTS (current_function_decl);
291
292  for (; parms; parms = TREE_CHAIN (parms))
293    if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
294      {
295	if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
296	  {
297	    string_p = search_string_def (TREE_TYPE(parms));
298	    if (string_p) return TRUE;
299	  }
300      }
301#endif
302
303  __latest_search_result = FALSE;
304  return FALSE;
305}
306
307
308static int
309search_string_from_local_vars (block)
310     tree block;
311{
312  tree types;
313  int found = FALSE;
314
315  while (block && TREE_CODE(block)==BLOCK)
316    {
317      types = BLOCK_VARS(block);
318
319      while (types)
320	{
321	  /* skip the declaration that refers an external variable */
322	  /* name: types.decl.name.identifier.id                   */
323	  if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)
324	      && TREE_CODE (types) == VAR_DECL
325	      && ! DECL_ARTIFICIAL (types)
326	      && DECL_RTL_SET_P (types)
327	      && GET_CODE (DECL_RTL (types)) == MEM
328
329	      && search_string_def (TREE_TYPE (types)))
330	    {
331	      rtx home = DECL_RTL (types);
332
333	      if (GET_CODE (home) == MEM
334		  && (GET_CODE (XEXP (home, 0)) == MEM
335		      ||
336		      (GET_CODE (XEXP (home, 0)) == REG
337		       && XEXP (home, 0) != virtual_stack_vars_rtx
338		       && REGNO (XEXP (home, 0)) != HARD_FRAME_POINTER_REGNUM
339		       && REGNO (XEXP (home, 0)) != STACK_POINTER_REGNUM
340#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
341		       && REGNO (XEXP (home, 0)) != ARG_POINTER_REGNUM
342#endif
343		       )))
344		/* If the value is indirect by memory or by a register
345		   that isn't the frame pointer then it means the object is
346		   variable-sized and address through
347		   that register or stack slot.
348		   The protection has no way to hide pointer variables
349		   behind the array, so all we can do is staying
350		   the order of variables and arguments. */
351		{
352		  current_function_has_variable_string = TRUE;
353		}
354
355	      /* found character array */
356	      found = TRUE;
357	    }
358
359	  types = TREE_CHAIN(types);
360	}
361
362      if (search_string_from_local_vars (BLOCK_SUBBLOCKS (block)))
363	{
364	  found = TRUE;
365	}
366
367      block = BLOCK_CHAIN (block);
368    }
369
370  return found;
371}
372
373
374/*
375 * search a character array from the specified type tree
376 */
377int
378search_string_def (type)
379     tree type;
380{
381  tree tem;
382
383  if (! type)
384    return FALSE;
385
386  switch (TREE_CODE (type))
387    {
388    case ARRAY_TYPE:
389      /* Check if the array is a variable-sized array */
390      if (TYPE_DOMAIN (type) == 0
391	  || (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
392	      && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) == NOP_EXPR))
393	current_function_defines_vsized_array = TRUE;
394
395      /* TREE_CODE( TREE_TYPE(type) ) == INTEGER_TYPE */
396      if (TYPE_MAIN_VARIANT (TREE_TYPE(type)) == char_type_node
397	  || TYPE_MAIN_VARIANT (TREE_TYPE(type)) == signed_char_type_node
398	  || TYPE_MAIN_VARIANT (TREE_TYPE(type)) == unsigned_char_type_node)
399	{
400	  /* Check if the string is a variable string */
401	  if (TYPE_DOMAIN (type) == 0
402	      ||
403	      (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
404	       && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) == NOP_EXPR))
405	    return TRUE;
406
407	  /* Check if the string size is greater than SUSPICIOUS_BUF_SIZE */
408#if SUSPICIOUS_BUF_SIZE > 0
409	  if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
410	      &&
411	      TREE_INT_CST_LOW(TYPE_MAX_VALUE(TYPE_DOMAIN(type)))+1
412	      >= SUSPICIOUS_BUF_SIZE)
413	    return TRUE;
414
415	  current_function_defines_short_string = TRUE;
416#else
417	  return TRUE;
418#endif
419	}
420
421      /* to protect every functions, sweep any arrays to the frame top */
422      is_array = TRUE;
423
424      return search_string_def(TREE_TYPE(type));
425
426    case UNION_TYPE:
427    case QUAL_UNION_TYPE:
428    case RECORD_TYPE:
429      if (! TREE_VISITED (type))
430	{
431	  /* mark the type as having been visited already */
432	  TREE_VISITED (type) = 1;
433
434	  /* Output the name, type, position (in bits), size (in bits) of each
435	     field.  */
436	  for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
437	    {
438	      /* Omit here local type decls until we know how to support
439		 them. */
440	      if ((TREE_CODE (tem) == TYPE_DECL)
441		  || (TREE_CODE (tem) == VAR_DECL && TREE_STATIC (tem)))
442	        continue;
443
444	      if (search_string_def(TREE_TYPE(tem)))
445		{
446		  TREE_VISITED (type) = 0;
447		  return TRUE;
448		}
449	    }
450
451	  TREE_VISITED (type) = 0;
452	}
453      break;
454
455    case POINTER_TYPE:
456    case REFERENCE_TYPE:
457      /* I'm not sure whether OFFSET_TYPE needs this treatment,
458	 so I'll play safe and return 1.  */
459    case OFFSET_TYPE:
460    default:
461      break;
462    }
463
464  return FALSE;
465}
466
467/*
468 * examine whether the input contains frame pointer addressing
469 */
470int
471contains_fp (op)
472     rtx op;
473{
474  register enum rtx_code code;
475  rtx x;
476  int i, j;
477  const char *fmt;
478
479  x = op;
480  if (x == 0)
481    return FALSE;
482
483  code = GET_CODE (x);
484
485  switch (code)
486    {
487    case CONST_INT:
488    case CONST_DOUBLE:
489    case CONST:
490    case SYMBOL_REF:
491    case CODE_LABEL:
492    case REG:
493    case ADDRESSOF:
494      return FALSE;
495
496    case PLUS:
497      if (XEXP (x, 0) == virtual_stack_vars_rtx
498	  && CONSTANT_P (XEXP (x, 1)))
499	return TRUE;
500
501    default:
502      break;
503    }
504
505  /* Scan all subexpressions.  */
506  fmt = GET_RTX_FORMAT (code);
507  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
508    if (*fmt == 'e')
509      {
510	if (contains_fp (XEXP (x, i))) return TRUE;
511      }
512    else if (*fmt == 'E')
513      for (j = 0; j < XVECLEN (x, i); j++)
514	if (contains_fp (XVECEXP (x, i, j))) return TRUE;
515
516  return FALSE;
517}
518
519
520static int
521search_pointer_def (type)
522     tree type;
523{
524  tree tem;
525
526  if (! type)
527    return FALSE;
528
529  switch (TREE_CODE (type))
530    {
531    case UNION_TYPE:
532    case QUAL_UNION_TYPE:
533    case RECORD_TYPE:
534      if (! TREE_VISITED (type))
535	{
536	  /* mark the type as having been visited already */
537	  TREE_VISITED (type) = 1;
538
539	  /* Output the name, type, position (in bits), size (in bits) of each
540	     field.  */
541	  for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
542	    {
543	      /* Omit here local type decls until we know how to support
544		 them. */
545	      if ((TREE_CODE (tem) == TYPE_DECL)
546		  || (TREE_CODE (tem) == VAR_DECL && TREE_STATIC (tem)))
547	        continue;
548
549	      if (search_pointer_def(TREE_TYPE(tem)))
550		{
551		  TREE_VISITED (type) = 0;
552		  return TRUE;
553		}
554	    }
555
556	  TREE_VISITED (type) = 0;
557	}
558      break;
559
560    case ARRAY_TYPE:
561      return search_pointer_def (TREE_TYPE(type));
562
563    case POINTER_TYPE:
564    case REFERENCE_TYPE:
565    case OFFSET_TYPE:
566      if (TYPE_READONLY (TREE_TYPE (type)))
567	{
568	  /* unless this pointer contains function pointer,
569	     it should be protected */
570	  return search_func_pointer (TREE_TYPE (type));
571	}
572      return TRUE;
573
574    default:
575      break;
576    }
577
578  return FALSE;
579}
580
581
582static int
583search_func_pointer (type)
584     tree type;
585{
586  tree tem;
587
588  if (! type)
589    return FALSE;
590
591  switch (TREE_CODE (type))
592    {
593    case UNION_TYPE:
594    case QUAL_UNION_TYPE:
595    case RECORD_TYPE:
596	if (! TREE_VISITED (type))
597	  {
598	    /* mark the type as having been visited already */
599	    TREE_VISITED (type) = 1;
600
601	    /* Output the name, type, position (in bits), size (in bits) of
602	       each field.  */
603	    for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
604	      {
605		if (TREE_CODE (tem) == FIELD_DECL
606		    && search_func_pointer (TREE_TYPE(tem))) {
607		  TREE_VISITED (type) = 0;
608		  return TRUE;
609		}
610	      }
611
612	    TREE_VISITED (type) = 0;
613	  }
614	break;
615
616    case ARRAY_TYPE:
617      return search_func_pointer (TREE_TYPE(type));
618
619    case POINTER_TYPE:
620    case REFERENCE_TYPE:
621      /* I'm not sure whether OFFSET_TYPE needs this treatment,
622	 so I'll play safe and return 1.  */
623    case OFFSET_TYPE:
624      if (TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
625	return TRUE;
626      return search_func_pointer (TREE_TYPE(type));
627
628    default:
629      break;
630    }
631
632  return FALSE;
633}
634
635
636/*
637 * check whether the specified rtx contains PLUS rtx with used flag.
638 */
639static int
640check_used_flag (x)
641     rtx x;
642{
643  register int i, j;
644  register enum rtx_code code;
645  register const char *format_ptr;
646
647  if (x == 0)
648    return FALSE;
649
650  code = GET_CODE (x);
651
652  switch (code)
653    {
654    case REG:
655    case QUEUED:
656    case CONST_INT:
657    case CONST_DOUBLE:
658    case SYMBOL_REF:
659    case CODE_LABEL:
660    case PC:
661    case CC0:
662      return FALSE;
663
664    case PLUS:
665      if (x->used)
666	return TRUE;
667
668    default:
669      break;
670    }
671
672  format_ptr = GET_RTX_FORMAT (code);
673  for (i = 0; i < GET_RTX_LENGTH (code); i++)
674    {
675      switch (*format_ptr++)
676	{
677	case 'e':
678	  if (check_used_flag (XEXP (x, i)))
679	    return TRUE;
680	  break;
681
682	case 'E':
683	  for (j = 0; j < XVECLEN (x, i); j++)
684	    if (check_used_flag (XVECEXP (x, i, j)))
685	      return TRUE;
686	  break;
687	}
688    }
689
690  return FALSE;
691}
692
693
694static void
695reset_used_flags_for_insns (insn)
696     rtx insn;
697{
698  register int i, j;
699  register enum rtx_code code;
700  register const char *format_ptr;
701
702  for (; insn; insn = NEXT_INSN (insn))
703    if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
704	|| GET_CODE (insn) == CALL_INSN)
705      {
706	code = GET_CODE (insn);
707	insn->used = 0;
708	format_ptr = GET_RTX_FORMAT (code);
709
710	for (i = 0; i < GET_RTX_LENGTH (code); i++)
711	  {
712	    switch (*format_ptr++) {
713	    case 'e':
714	      reset_used_flags_of_plus (XEXP (insn, i));
715	      break;
716
717	    case 'E':
718	      for (j = 0; j < XVECLEN (insn, i); j++)
719		reset_used_flags_of_plus (XVECEXP (insn, i, j));
720	      break;
721	    }
722	  }
723      }
724}
725
726static void
727reset_used_flags_for_decls (block)
728     tree block;
729{
730  tree types;
731  rtx home;
732
733  while (block && TREE_CODE(block)==BLOCK)
734    {
735      types = BLOCK_VARS(block);
736
737      while (types)
738	{
739	  /* skip the declaration that refers an external variable and
740	     also skip an global variable */
741	  if (! DECL_EXTERNAL (types))
742	    {
743	      if (!DECL_RTL_SET_P (types)) goto next;
744	      home = DECL_RTL (types);
745
746	      if (GET_CODE (home) == MEM
747		  && GET_CODE (XEXP (home, 0)) == PLUS
748		  && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
749		{
750		  XEXP (home, 0)->used = 0;
751		}
752	    }
753	next:
754	  types = TREE_CHAIN(types);
755	}
756
757      reset_used_flags_for_decls (BLOCK_SUBBLOCKS (block));
758
759      block = BLOCK_CHAIN (block);
760    }
761}
762
763/* Clear the USED bits only of type PLUS in X */
764
765static void
766reset_used_flags_of_plus (x)
767     rtx x;
768{
769  register int i, j;
770  register enum rtx_code code;
771  register const char *format_ptr;
772
773  if (x == 0)
774    return;
775
776  code = GET_CODE (x);
777
778  /* These types may be freely shared so we needn't do any resetting
779     for them.  */
780
781  switch (code)
782    {
783    case REG:
784    case QUEUED:
785    case CONST_INT:
786    case CONST_DOUBLE:
787    case SYMBOL_REF:
788    case CODE_LABEL:
789    case PC:
790    case CC0:
791      return;
792
793    case INSN:
794    case JUMP_INSN:
795    case CALL_INSN:
796    case NOTE:
797    case LABEL_REF:
798    case BARRIER:
799      /* The chain of insns is not being copied.  */
800      return;
801
802    case PLUS:
803      x->used = 0;
804      break;
805
806    case CALL_PLACEHOLDER:
807      reset_used_flags_for_insns (XEXP (x, 0));
808      reset_used_flags_for_insns (XEXP (x, 1));
809      reset_used_flags_for_insns (XEXP (x, 2));
810      break;
811
812    default:
813      break;
814    }
815
816  format_ptr = GET_RTX_FORMAT (code);
817  for (i = 0; i < GET_RTX_LENGTH (code); i++)
818    {
819      switch (*format_ptr++)
820	{
821	case 'e':
822	  reset_used_flags_of_plus (XEXP (x, i));
823	  break;
824
825	case 'E':
826	  for (j = 0; j < XVECLEN (x, i); j++)
827	    reset_used_flags_of_plus (XVECEXP (x, i, j));
828	  break;
829	}
830    }
831}
832
833
834static void
835rtl_prologue (insn)
836     rtx insn;
837{
838#if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
839#undef HAS_INIT_SECTION
840#define HAS_INIT_SECTION
841#endif
842
843  rtx _val;
844
845  for (; insn; insn = NEXT_INSN (insn))
846    if (GET_CODE (insn) == NOTE
847	&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
848      break;
849
850#if !defined (HAS_INIT_SECTION)
851  /* If this function is `main', skip a call to `__main'
852     to run guard instruments after global initializers, etc.  */
853  if (DECL_NAME (current_function_decl)
854      && MAIN_NAME_P (DECL_NAME (current_function_decl))
855      && DECL_CONTEXT (current_function_decl) == NULL_TREE)
856    {
857      rtx fbinsn = insn;
858      for (; insn; insn = NEXT_INSN (insn))
859	if (GET_CODE (insn) == NOTE
860	    && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
861	  break;
862      if (insn == 0) insn = fbinsn;
863    }
864#endif
865
866  /* mark the next insn of FUNCTION_BEG insn */
867  prologue_insert_point = NEXT_INSN (insn);
868
869  start_sequence ();
870
871  _guard = gen_rtx_MEM (GUARD_m, gen_rtx_SYMBOL_REF (Pmode, "__guard"));
872  emit_move_insn ( guard_area, _guard);
873
874  _val = get_insns ();
875  end_sequence ();
876
877  emit_insn_before (_val, prologue_insert_point);
878}
879
880static void
881rtl_epilogue (insn)
882     rtx insn;
883{
884  rtx if_false_label;
885  rtx _val;
886  rtx funcname;
887  tree funcstr;
888  int  flag_have_return = FALSE;
889
890  start_sequence ();
891
892#ifdef HAVE_return
893  if (HAVE_return)
894    {
895      rtx insn;
896      return_label = gen_label_rtx ();
897
898      for (insn = prologue_insert_point; insn; insn = NEXT_INSN (insn))
899	if (GET_CODE (insn) == JUMP_INSN
900	    && GET_CODE (PATTERN (insn)) == RETURN
901	    && GET_MODE (PATTERN (insn)) == VOIDmode)
902	  {
903	    rtx pat = gen_rtx_SET (VOIDmode,
904				   pc_rtx,
905				   gen_rtx_LABEL_REF (VOIDmode,
906						      return_label));
907	    PATTERN (insn) = pat;
908	    flag_have_return = TRUE;
909	  }
910
911
912      emit_label (return_label);
913    }
914#endif
915
916  /*                                          if (guard_area != _guard) */
917  compare_from_rtx (guard_area, _guard, NE, 0, GUARD_m, NULL_RTX);
918
919  if_false_label = gen_label_rtx ();		/* { */
920  emit_jump_insn ( gen_beq(if_false_label));
921
922  /* generate string for the current function name */
923  funcstr = build_string (strlen(current_function_name)+1,
924			  current_function_name);
925  TREE_TYPE (funcstr) = build_array_type (char_type_node, 0);
926  funcname = output_constant_def (funcstr, 1);
927
928  emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__stack_smash_handler"),
929		     0, VOIDmode, 2,
930                     XEXP (funcname, 0), Pmode, guard_area, GUARD_m);
931
932  /* generate RTL to return from the current function */
933
934  emit_barrier ();				/* } */
935  emit_label (if_false_label);
936
937  /* generate RTL to return from the current function */
938  if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
939    use_return_register ();
940
941#ifdef HAVE_return
942  if (HAVE_return && flag_have_return)
943    {
944      emit_jump_insn (gen_return ());
945      emit_barrier ();
946    }
947#endif
948
949  _val = get_insns ();
950  end_sequence ();
951
952  emit_insn_after (_val, insn);
953}
954
955
956static void
957arrange_var_order (block)
958     tree block;
959{
960  tree types;
961  HOST_WIDE_INT offset;
962
963  while (block && TREE_CODE(block)==BLOCK)
964    {
965      /* arrange the location of character arrays in depth first.  */
966      arrange_var_order (BLOCK_SUBBLOCKS (block));
967
968      types = BLOCK_VARS (block);
969
970      while (types)
971	{
972	  /* skip the declaration that refers an external variable */
973	  /* name: types.decl.assembler_name.id			   */
974	  if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)
975	      && TREE_CODE (types) == VAR_DECL
976	      && ! DECL_ARTIFICIAL (types)
977	      && ! DECL_VAR_INLINE (types)	/* don't sweep inlined string */
978	      && DECL_RTL_SET_P (types)
979	      && GET_CODE (DECL_RTL (types)) == MEM
980	      && GET_MODE (DECL_RTL (types)) == BLKmode
981
982	      && (is_array=0, search_string_def (TREE_TYPE (types))
983		  || (! current_function_defines_vulnerable_string
984		      && is_array)))
985	    {
986	      rtx home = DECL_RTL (types);
987
988	      if (!(GET_CODE (home) == MEM
989		    && (GET_CODE (XEXP (home, 0)) == MEM
990			||
991			(GET_CODE (XEXP (home, 0)) == REG
992			 && XEXP (home, 0) != virtual_stack_vars_rtx
993			 && REGNO (XEXP (home, 0)) != HARD_FRAME_POINTER_REGNUM
994			 && REGNO (XEXP (home, 0)) != STACK_POINTER_REGNUM
995#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
996			 && REGNO (XEXP (home, 0)) != ARG_POINTER_REGNUM
997#endif
998			 ))))
999		{
1000		  /* found a string variable */
1001		  HOST_WIDE_INT var_size =
1002		    ((TREE_INT_CST_LOW (DECL_SIZE (types)) + BITS_PER_UNIT - 1)
1003		     / BITS_PER_UNIT);
1004
1005		  /* confirmed it is BLKmode.  */
1006		  int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1007		  var_size = CEIL_ROUND (var_size, alignment);
1008
1009		  /* skip the variable if it is top of the region
1010		     specified by sweep_frame_offset */
1011		  offset = AUTO_OFFSET (XEXP (DECL_RTL (types), 0));
1012		  if (offset == sweep_frame_offset - var_size)
1013		    sweep_frame_offset -= var_size;
1014
1015		  else if (offset < sweep_frame_offset - var_size)
1016		    sweep_string_variable (DECL_RTL (types), var_size);
1017		}
1018	    }
1019
1020	  types = TREE_CHAIN(types);
1021	}
1022
1023      block = BLOCK_CHAIN (block);
1024    }
1025}
1026
1027
1028static void
1029copy_args_for_protection ()
1030{
1031  tree parms = DECL_ARGUMENTS (current_function_decl);
1032  rtx temp_rtx;
1033
1034  parms = DECL_ARGUMENTS (current_function_decl);
1035  for (; parms; parms = TREE_CHAIN (parms))
1036    if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
1037      {
1038	if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
1039	  {
1040	    int string_p;
1041
1042	    /*
1043	      skip argument protection if the last argument is used
1044	      for the variable argument
1045	    */
1046	    /*
1047	      tree fntype;
1048	      if (TREE_CHAIN (parms) == 0)
1049	      {
1050	        fntype = TREE_TYPE (current_function_decl);
1051
1052	        if ((TYPE_ARG_TYPES (fntype) != 0
1053		     && TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
1054		          != void_type_node)
1055	             || current_function_varargs)
1056	          continue;
1057	      }
1058	    */
1059
1060	    string_p = search_string_def (TREE_TYPE(parms));
1061
1062	    /* check if it is a candidate to move */
1063	    if (string_p || search_pointer_def (TREE_TYPE (parms)))
1064	      {
1065		int arg_size
1066		  = ((TREE_INT_CST_LOW (DECL_SIZE (parms)) + BITS_PER_UNIT - 1)
1067		     / BITS_PER_UNIT);
1068		tree passed_type = DECL_ARG_TYPE (parms);
1069		tree nominal_type = TREE_TYPE (parms);
1070
1071		start_sequence ();
1072
1073		if (GET_CODE (DECL_RTL (parms)) == REG)
1074		  {
1075		    rtx safe = 0;
1076
1077		    change_arg_use_of_insns (prologue_insert_point,
1078					     DECL_RTL (parms), &safe, 0);
1079		    if (safe)
1080		      {
1081			/* generate codes for copying the content */
1082			rtx movinsn = emit_move_insn (safe, DECL_RTL (parms));
1083
1084			/* avoid register elimination in gcse.c (COPY-PROP)*/
1085			PATTERN (movinsn)->volatil = 1;
1086
1087			/* save debugger info */
1088			DECL_INCOMING_RTL (parms) = safe;
1089		      }
1090		  }
1091		else if (GET_CODE (DECL_RTL (parms)) == MEM
1092			 && GET_CODE (XEXP (DECL_RTL (parms), 0)) == ADDRESSOF)
1093		  {
1094		    rtx movinsn;
1095		    rtx safe = gen_reg_rtx (GET_MODE (DECL_RTL (parms)));
1096
1097		    /* generate codes for copying the content */
1098		    movinsn = emit_move_insn (safe, DECL_INCOMING_RTL (parms));
1099		    /* avoid register elimination in gcse.c (COPY-PROP)*/
1100		    PATTERN (movinsn)->volatil = 1;
1101
1102		    /* change the addressof information to the newly
1103		       allocated pseudo register */
1104		    emit_move_insn (DECL_RTL (parms), safe);
1105
1106		    /* save debugger info */
1107		    DECL_INCOMING_RTL (parms) = safe;
1108		  }
1109
1110		/* See if the frontend wants to pass this by invisible
1111		   reference.  */
1112		else if (passed_type != nominal_type
1113			 && POINTER_TYPE_P (passed_type)
1114			 && TREE_TYPE (passed_type) == nominal_type)
1115		  {
1116		    rtx safe = 0, orig = XEXP (DECL_RTL (parms), 0);
1117
1118		    change_arg_use_of_insns (prologue_insert_point,
1119					     orig, &safe, 0);
1120		    if (safe)
1121		      {
1122			/* generate codes for copying the content */
1123			rtx movinsn = emit_move_insn (safe, orig);
1124
1125			/* avoid register elimination in gcse.c (COPY-PROP)*/
1126			PATTERN (movinsn)->volatil = 1;
1127
1128			/* save debugger info */
1129			DECL_INCOMING_RTL (parms) = safe;
1130		      }
1131		  }
1132
1133		else
1134		  {
1135		    /* declare temporary local variable DECL_NAME (parms) */
1136		    temp_rtx
1137		      = assign_stack_local (DECL_MODE (parms), arg_size,
1138					    DECL_MODE (parms) == BLKmode ?
1139					    -1 : 0);
1140
1141		    MEM_IN_STRUCT_P (temp_rtx)
1142		      = AGGREGATE_TYPE_P (TREE_TYPE (parms));
1143		    set_mem_alias_set (temp_rtx, get_alias_set (parms));
1144
1145		    /* move_arg_location may change the contents of
1146		       DECL_RTL (parms). to avoid this, copies the contents */
1147		    /* SET_DECL_RTL (parms, copy_rtx (DECL_RTL (parms))); */
1148
1149		    /* generate codes for copying the content */
1150		    store_expr (parms, temp_rtx, 0);
1151
1152		    /* change the reference for each instructions */
1153		    move_arg_location (prologue_insert_point, DECL_RTL (parms),
1154				       temp_rtx, arg_size);
1155
1156		    /* change the location of parms variable */
1157		    SET_DECL_RTL (parms, temp_rtx);
1158
1159		    /* change debugger info */
1160		    DECL_INCOMING_RTL (parms) = temp_rtx;
1161		  }
1162
1163		emit_insn_before (get_insns (), prologue_insert_point);
1164		end_sequence ();
1165
1166#ifdef FRAME_GROWS_DOWNWARD
1167		/* process the string argument */
1168		if (string_p && DECL_MODE (parms) == BLKmode)
1169		  {
1170		    int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1171		    arg_size = CEIL_ROUND (arg_size, alignment);
1172
1173		    /* change the reference for each instructions */
1174		    sweep_string_variable (DECL_RTL (parms), arg_size);
1175		  }
1176#endif
1177	      }
1178	  }
1179      }
1180}
1181
1182
1183/*
1184  sweep a string variable to the local variable addressed
1185  by sweep_frame_offset, that is a last position of string variables.
1186*/
1187static void
1188sweep_string_variable (sweep_var, var_size)
1189     rtx sweep_var;
1190     HOST_WIDE_INT var_size;
1191{
1192  HOST_WIDE_INT sweep_offset;
1193
1194  switch (GET_CODE (sweep_var))
1195    {
1196    case MEM:
1197      if (GET_CODE (XEXP (sweep_var, 0)) == ADDRESSOF
1198	  && GET_CODE (XEXP (XEXP (sweep_var, 0), 0)) == REG)
1199	return;
1200      sweep_offset = AUTO_OFFSET(XEXP (sweep_var, 0));
1201      break;
1202    case CONST_INT:
1203      sweep_offset = INTVAL (sweep_var);
1204      break;
1205    default:
1206      abort ();
1207    }
1208
1209  /* scan all declarations of variables and fix the offset address of
1210     the variable based on the frame pointer */
1211  sweep_string_in_decls (DECL_INITIAL (current_function_decl),
1212			 sweep_offset, var_size);
1213
1214  /* scan all argument variable and fix the offset address based on
1215     the frame pointer */
1216  sweep_string_in_args (DECL_ARGUMENTS (current_function_decl),
1217			sweep_offset, var_size);
1218
1219  /* For making room for sweep variable, scan all insns and
1220     fix the offset address of the variable that is based on frame pointer */
1221  sweep_string_use_of_insns (function_first_insn, sweep_offset, var_size);
1222
1223
1224  /* Clear all the USED bits in operands of all insns and declarations of
1225     local vars */
1226  reset_used_flags_for_decls (DECL_INITIAL (current_function_decl));
1227  reset_used_flags_for_insns (function_first_insn);
1228
1229  sweep_frame_offset -= var_size;
1230}
1231
1232
1233
1234/*
1235  move an argument to the local variable addressed by frame_offset
1236*/
1237static void
1238move_arg_location (insn, orig, new, var_size)
1239     rtx  insn, orig, new;
1240     HOST_WIDE_INT var_size;
1241{
1242  /* For making room for sweep variable, scan all insns and
1243     fix the offset address of the variable that is based on frame pointer */
1244  change_arg_use_of_insns (insn, orig, &new, var_size);
1245
1246
1247  /* Clear all the USED bits in operands of all insns and declarations
1248     of local vars */
1249  reset_used_flags_for_insns (insn);
1250}
1251
1252
1253static void
1254sweep_string_in_decls (block, sweep_offset, sweep_size)
1255     tree block;
1256     HOST_WIDE_INT sweep_offset, sweep_size;
1257{
1258  tree types;
1259  HOST_WIDE_INT offset;
1260  rtx home;
1261
1262  while (block && TREE_CODE(block)==BLOCK)
1263    {
1264      types = BLOCK_VARS(block);
1265
1266      while (types)
1267	{
1268	  /* skip the declaration that refers an external variable and
1269	     also skip an global variable */
1270	  if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)) {
1271
1272	    if (!DECL_RTL_SET_P (types)) goto next;
1273	    home = DECL_RTL (types);
1274
1275	    /* process for static local variable */
1276	    if (GET_CODE (home) == MEM
1277		&& GET_CODE (XEXP (home, 0)) == SYMBOL_REF)
1278	      goto next;
1279
1280	    if (GET_CODE (home) == MEM
1281		&& XEXP (home, 0) == virtual_stack_vars_rtx)
1282	      {
1283		offset = 0;
1284
1285		/* the operand related to the sweep variable */
1286		if (sweep_offset <= offset
1287		    && offset < sweep_offset + sweep_size)
1288		  {
1289		    offset = sweep_frame_offset - sweep_size - sweep_offset;
1290
1291		    XEXP (home, 0) = plus_constant (virtual_stack_vars_rtx,
1292						    offset);
1293		    XEXP (home, 0)->used = 1;
1294		  }
1295		else if (sweep_offset <= offset
1296			 && offset < sweep_frame_offset)
1297		  {
1298		    /* the rest of variables under sweep_frame_offset,
1299		       shift the location */
1300		    XEXP (home, 0) = plus_constant (virtual_stack_vars_rtx,
1301						    -sweep_size);
1302		    XEXP (home, 0)->used = 1;
1303		  }
1304	      }
1305
1306	    if (GET_CODE (home) == MEM
1307		&& GET_CODE (XEXP (home, 0)) == MEM)
1308	      {
1309		/* process for dynamically allocated aray */
1310		home = XEXP (home, 0);
1311	      }
1312
1313	    if (GET_CODE (home) == MEM
1314		&& GET_CODE (XEXP (home, 0)) == PLUS
1315		&& XEXP (XEXP (home, 0), 0) == virtual_stack_vars_rtx
1316		&& GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
1317	      {
1318		if (! XEXP (home, 0)->used)
1319		  {
1320		    offset = AUTO_OFFSET(XEXP (home, 0));
1321
1322		    /* the operand related to the sweep variable */
1323		    if (sweep_offset <= offset
1324			&& offset < sweep_offset + sweep_size)
1325		      {
1326
1327			offset
1328			  += sweep_frame_offset - sweep_size - sweep_offset;
1329			XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode,
1330								      offset);
1331
1332			/* mark */
1333			XEXP (home, 0)->used = 1;
1334		      }
1335		    else if (sweep_offset <= offset
1336			     && offset < sweep_frame_offset)
1337		      {	/* the rest of variables under sweep_frame_offset,
1338			   so shift the location */
1339
1340			XEXP (XEXP (home, 0), 1)
1341			  = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
1342
1343			/* mark */
1344			XEXP (home, 0)->used = 1;
1345		      }
1346		  }
1347	      }
1348
1349	  }
1350	next:
1351	  types = TREE_CHAIN(types);
1352	}
1353
1354      sweep_string_in_decls (BLOCK_SUBBLOCKS (block),
1355			     sweep_offset, sweep_size);
1356      block = BLOCK_CHAIN (block);
1357    }
1358}
1359
1360
1361static void
1362sweep_string_in_args (parms, sweep_offset, sweep_size)
1363     tree parms;
1364     HOST_WIDE_INT sweep_offset, sweep_size;
1365{
1366  rtx home;
1367  HOST_WIDE_INT offset;
1368
1369  for (; parms; parms = TREE_CHAIN (parms))
1370    if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
1371      {
1372	if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
1373	  {
1374	    home = DECL_INCOMING_RTL (parms);
1375
1376	    if (XEXP (home, 0)->used) continue;
1377
1378	    offset = AUTO_OFFSET(XEXP (home, 0));
1379
1380	    /* the operand related to the sweep variable */
1381	    if (AUTO_BASEPTR (XEXP (home, 0)) == virtual_stack_vars_rtx)
1382	      {
1383		if (sweep_offset <= offset
1384		    && offset < sweep_offset + sweep_size)
1385		  {
1386		    offset += sweep_frame_offset - sweep_size - sweep_offset;
1387		    XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode,
1388								  offset);
1389
1390		    /* mark */
1391		    XEXP (home, 0)->used = 1;
1392		  }
1393		else if (sweep_offset <= offset
1394			 && offset < sweep_frame_offset)
1395		  {
1396		    /* the rest of variables under sweep_frame_offset,
1397		       shift the location */
1398		    XEXP (XEXP (home, 0), 1)
1399		      = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
1400
1401		    /* mark */
1402		    XEXP (home, 0)->used = 1;
1403		  }
1404	      }
1405	  }
1406      }
1407}
1408
1409
1410static int has_virtual_reg;
1411
1412static void
1413sweep_string_use_of_insns (insn, sweep_offset, sweep_size)
1414     rtx insn;
1415     HOST_WIDE_INT sweep_offset, sweep_size;
1416{
1417  for (; insn; insn = NEXT_INSN (insn))
1418    if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1419	|| GET_CODE (insn) == CALL_INSN)
1420      {
1421	has_virtual_reg = FALSE;
1422	sweep_string_in_operand (insn, &PATTERN (insn),
1423				 sweep_offset, sweep_size);
1424	sweep_string_in_operand (insn, &REG_NOTES (insn),
1425				 sweep_offset, sweep_size);
1426      }
1427}
1428
1429
1430static void
1431sweep_string_in_operand (insn, loc, sweep_offset, sweep_size)
1432     rtx insn, *loc;
1433     HOST_WIDE_INT sweep_offset, sweep_size;
1434{
1435  register rtx x = *loc;
1436  register enum rtx_code code;
1437  int i, j, k = 0;
1438  HOST_WIDE_INT offset;
1439  const char *fmt;
1440
1441  if (x == 0)
1442    return;
1443
1444  code = GET_CODE (x);
1445
1446  switch (code)
1447    {
1448    case CONST_INT:
1449    case CONST_DOUBLE:
1450    case CONST:
1451    case SYMBOL_REF:
1452    case CODE_LABEL:
1453    case PC:
1454    case CC0:
1455    case ASM_INPUT:
1456    case ADDR_VEC:
1457    case ADDR_DIFF_VEC:
1458    case RETURN:
1459    case ADDRESSOF:
1460      return;
1461
1462    case REG:
1463      if (x == virtual_incoming_args_rtx
1464	  || x == virtual_stack_vars_rtx
1465	  || x == virtual_stack_dynamic_rtx
1466	  || x == virtual_outgoing_args_rtx
1467	  || x == virtual_cfa_rtx)
1468	has_virtual_reg = TRUE;
1469      return;
1470
1471    case SET:
1472      /*
1473	skip setjmp setup insn and setjmp restore insn
1474	Example:
1475	(set (MEM (reg:SI xx)) (virtual_stack_vars_rtx)))
1476	(set (virtual_stack_vars_rtx) (REG))
1477      */
1478      if (GET_CODE (XEXP (x, 0)) == MEM
1479	  && XEXP (x, 1) == virtual_stack_vars_rtx)
1480	return;
1481      if (XEXP (x, 0) == virtual_stack_vars_rtx
1482	  && GET_CODE (XEXP (x, 1)) == REG)
1483	return;
1484      break;
1485
1486    case PLUS:
1487      /* Handle typical case of frame register plus constant.  */
1488      if (XEXP (x, 0) == virtual_stack_vars_rtx
1489	  && CONSTANT_P (XEXP (x, 1)))
1490	{
1491	  if (x->used) goto single_use_of_virtual_reg;
1492
1493	  offset = AUTO_OFFSET(x);
1494	  if (RTX_INTEGRATED_P (x)) k = -1; /* for inline base ptr */
1495
1496	  /* the operand related to the sweep variable */
1497	  if (sweep_offset <= offset + k
1498	      && offset + k < sweep_offset + sweep_size)
1499	    {
1500	      offset += sweep_frame_offset - sweep_size - sweep_offset;
1501
1502	      XEXP (x, 0) = virtual_stack_vars_rtx;
1503	      XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
1504	      x->used = 1;
1505	    }
1506	  else if (sweep_offset <= offset + k
1507		   && offset + k < sweep_frame_offset)
1508	    {
1509	      /* the rest of variables under sweep_frame_offset,
1510		 shift the location */
1511	      XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
1512	      x->used = 1;
1513	    }
1514
1515	single_use_of_virtual_reg:
1516	  if (has_virtual_reg) {
1517	    /* excerpt from insn_invalid_p in recog.c */
1518	    int icode = recog_memoized (insn);
1519
1520	    if (icode < 0 && asm_noperands (PATTERN (insn)) < 0)
1521	      {
1522		rtx temp, seq;
1523
1524		start_sequence ();
1525		temp = force_operand (x, NULL_RTX);
1526		seq = get_insns ();
1527		end_sequence ();
1528
1529		emit_insn_before (seq, insn);
1530		if (! validate_change (insn, loc, temp, 0)
1531		    && ! validate_replace_rtx (x, temp, insn))
1532		  fatal_insn ("sweep_string_in_operand", insn);
1533	      }
1534	  }
1535
1536	  has_virtual_reg = TRUE;
1537	  return;
1538	}
1539
1540#ifdef FRAME_GROWS_DOWNWARD
1541      /*
1542	alert the case of frame register plus constant given by reg.
1543	*/
1544      else if (XEXP (x, 0) == virtual_stack_vars_rtx
1545	       && GET_CODE (XEXP (x, 1)) == REG)
1546	fatal_insn ("sweep_string_in_operand: unknown addressing", insn);
1547#endif
1548
1549      /*
1550	process further subtree:
1551	Example:  (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
1552	(const_int 5))
1553      */
1554      break;
1555
1556    case CALL_PLACEHOLDER:
1557      for (i = 0; i < 3; i++)
1558	{
1559	  rtx seq = XEXP (x, i);
1560	  if (seq)
1561	    {
1562	      push_to_sequence (seq);
1563	      sweep_string_use_of_insns (XEXP (x, i),
1564					 sweep_offset, sweep_size);
1565	      XEXP (x, i) = get_insns ();
1566	      end_sequence ();
1567	    }
1568	}
1569      break;
1570
1571    default:
1572      break;
1573    }
1574
1575  /* Scan all subexpressions.  */
1576  fmt = GET_RTX_FORMAT (code);
1577  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1578    if (*fmt == 'e')
1579      {
1580	/*
1581	  virtual_stack_vars_rtx without offset
1582	  Example:
1583	    (set (reg:SI xx) (reg:SI 78))
1584	    (set (reg:SI xx) (MEM (reg:SI 78)))
1585	*/
1586	if (XEXP (x, i) == virtual_stack_vars_rtx)
1587	  fatal_insn ("sweep_string_in_operand: unknown fp usage", insn);
1588	sweep_string_in_operand (insn, &XEXP (x, i), sweep_offset, sweep_size);
1589      }
1590    else if (*fmt == 'E')
1591      for (j = 0; j < XVECLEN (x, i); j++)
1592	sweep_string_in_operand (insn, &XVECEXP (x, i, j), sweep_offset, sweep_size);
1593}
1594
1595
1596/*
1597  change a argument variable to the local variable addressed
1598  by the "new" variable.
1599*/
1600static void
1601change_arg_use_of_insns (insn, orig, new, size)
1602     rtx insn, orig, *new;
1603     HOST_WIDE_INT size;
1604{
1605  change_arg_use_of_insns_2 (insn, orig, new, size);
1606}
1607
1608static void
1609change_arg_use_of_insns_2 (insn, orig, new, size)
1610     rtx insn, orig, *new;
1611     HOST_WIDE_INT size;
1612{
1613  for (; insn; insn = NEXT_INSN (insn))
1614    if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1615	|| GET_CODE (insn) == CALL_INSN)
1616      {
1617	rtx seq;
1618
1619	start_sequence ();
1620	change_arg_use_in_operand (insn, PATTERN (insn), orig, new, size);
1621
1622	seq = get_insns ();
1623	end_sequence ();
1624	emit_insn_before (seq, insn);
1625
1626	/* load_multiple insn from virtual_incoming_args_rtx have several
1627	   load insns. If every insn change the load address of arg
1628	   to frame region, those insns are moved before the PARALLEL insn
1629	   and remove the PARALLEL insn.  */
1630	if (GET_CODE (PATTERN (insn)) == PARALLEL
1631	    && XVECLEN (PATTERN (insn), 0) == 0)
1632	  delete_insn (insn);
1633      }
1634}
1635
1636
1637
1638static void
1639change_arg_use_in_operand (insn, x, orig, new, size)
1640     rtx insn, x, orig, *new;
1641     HOST_WIDE_INT size;
1642{
1643  register enum rtx_code code;
1644  int i, j;
1645  HOST_WIDE_INT offset;
1646  const char *fmt;
1647
1648  if (x == 0)
1649    return;
1650
1651  code = GET_CODE (x);
1652
1653  switch (code)
1654    {
1655    case CONST_INT:
1656    case CONST_DOUBLE:
1657    case CONST:
1658    case SYMBOL_REF:
1659    case CODE_LABEL:
1660    case PC:
1661    case CC0:
1662    case ASM_INPUT:
1663    case ADDR_VEC:
1664    case ADDR_DIFF_VEC:
1665    case RETURN:
1666    case REG:
1667    case ADDRESSOF:
1668      return;
1669
1670    case MEM:
1671      /* Handle special case of MEM (incoming_args)  */
1672      if (GET_CODE (orig) == MEM
1673	  && XEXP (x, 0) == virtual_incoming_args_rtx)
1674	{
1675	  offset = 0;
1676
1677	  /* the operand related to the sweep variable */
1678	  if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
1679	      offset < AUTO_OFFSET(XEXP (orig, 0)) + size) {
1680
1681	    offset = AUTO_OFFSET(XEXP (*new, 0))
1682	      + (offset - AUTO_OFFSET(XEXP (orig, 0)));
1683
1684	    XEXP (x, 0) = plus_constant (virtual_stack_vars_rtx, offset);
1685	    XEXP (x, 0)->used = 1;
1686
1687	    return;
1688	  }
1689	}
1690      break;
1691
1692    case PLUS:
1693      /* Handle special case of frame register plus constant.  */
1694      if (GET_CODE (orig) == MEM
1695	  && XEXP (x, 0) == virtual_incoming_args_rtx
1696	  && CONSTANT_P (XEXP (x, 1))
1697	  && ! x->used)
1698	{
1699	  offset = AUTO_OFFSET(x);
1700
1701	  /* the operand related to the sweep variable */
1702	  if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
1703	      offset < AUTO_OFFSET(XEXP (orig, 0)) + size) {
1704
1705	    offset = AUTO_OFFSET(XEXP (*new, 0))
1706	      + (offset - AUTO_OFFSET(XEXP (orig, 0)));
1707
1708	    XEXP (x, 0) = virtual_stack_vars_rtx;
1709	    XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
1710	    x->used = 1;
1711
1712	    return;
1713	  }
1714
1715	  /*
1716	    process further subtree:
1717	    Example:  (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
1718	    (const_int 5))
1719	  */
1720	}
1721      break;
1722
1723    case SET:
1724      /* Handle special case of "set (REG or MEM) (incoming_args)".
1725	 It means that the the address of the 1st argument is stored. */
1726      if (GET_CODE (orig) == MEM
1727	  && XEXP (x, 1) == virtual_incoming_args_rtx)
1728	{
1729	  offset = 0;
1730
1731	  /* the operand related to the sweep variable */
1732	  if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
1733	      offset < AUTO_OFFSET(XEXP (orig, 0)) + size) {
1734
1735	    offset = AUTO_OFFSET(XEXP (*new, 0))
1736	      + (offset - AUTO_OFFSET(XEXP (orig, 0)));
1737
1738	    XEXP (x, 1) = force_operand (plus_constant (virtual_stack_vars_rtx,
1739							offset), NULL_RTX);
1740	    XEXP (x, 1)->used = 1;
1741
1742	    return;
1743	  }
1744	}
1745      break;
1746
1747    case CALL_PLACEHOLDER:
1748      for (i = 0; i < 3; i++)
1749	{
1750	  rtx seq = XEXP (x, i);
1751	  if (seq)
1752	    {
1753	      push_to_sequence (seq);
1754	      change_arg_use_of_insns_2 (XEXP (x, i), orig, new, size);
1755	      XEXP (x, i) = get_insns ();
1756	      end_sequence ();
1757	    }
1758	}
1759      break;
1760
1761    case PARALLEL:
1762      for (j = 0; j < XVECLEN (x, 0); j++)
1763  	{
1764	  change_arg_use_in_operand (insn, XVECEXP (x, 0, j), orig, new, size);
1765	}
1766      if (recog_memoized (insn) < 0)
1767	{
1768	  for (i = 0, j = 0; j < XVECLEN (x, 0); j++)
1769	    {
1770	      /* if parallel insn has a insn used virtual_incoming_args_rtx,
1771		 the insn is removed from this PARALLEL insn.  */
1772	      if (check_used_flag (XVECEXP (x, 0, j)))
1773		{
1774		  emit_insn (XVECEXP (x, 0, j));
1775		  XVECEXP (x, 0, j) = NULL;
1776		}
1777	      else
1778		XVECEXP (x, 0, i++) = XVECEXP (x, 0, j);
1779	    }
1780	  PUT_NUM_ELEM (XVEC (x, 0), i);
1781	}
1782      return;
1783
1784    default:
1785      break;
1786    }
1787
1788  /* Scan all subexpressions.  */
1789  fmt = GET_RTX_FORMAT (code);
1790  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1791    if (*fmt == 'e')
1792      {
1793	if (XEXP (x, i) == orig)
1794	  {
1795	    if (*new == 0) *new = gen_reg_rtx (GET_MODE (orig));
1796	    XEXP (x, i) = *new;
1797	    continue;
1798	  }
1799	change_arg_use_in_operand (insn, XEXP (x, i), orig, new, size);
1800      }
1801    else if (*fmt == 'E')
1802      for (j = 0; j < XVECLEN (x, i); j++)
1803	{
1804
1805	  if (XVECEXP (x, i, j) == orig)
1806	    {
1807	      if (*new == 0) *new = gen_reg_rtx (GET_MODE (orig));
1808	      XVECEXP (x, i, j) = *new;
1809	      continue;
1810	    }
1811	  change_arg_use_in_operand (insn, XVECEXP (x, i, j), orig, new, size);
1812	}
1813}
1814
1815
1816static void
1817validate_insns_of_varrefs (insn)
1818     rtx insn;
1819{
1820  rtx next;
1821
1822  /* Initialize recognition, indicating that volatile is OK.  */
1823  init_recog ();
1824
1825  for (; insn; insn = next)
1826    {
1827      next = NEXT_INSN (insn);
1828      if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1829	  || GET_CODE (insn) == CALL_INSN)
1830	{
1831	  /* excerpt from insn_invalid_p in recog.c */
1832	  int icode = recog_memoized (insn);
1833
1834	  if (icode < 0 && asm_noperands (PATTERN (insn)) < 0)
1835	    validate_operand_of_varrefs (insn, &PATTERN (insn));
1836	}
1837    }
1838
1839  init_recog_no_volatile ();
1840}
1841
1842
1843static void
1844validate_operand_of_varrefs (insn, loc)
1845     rtx insn, *loc;
1846{
1847  register enum rtx_code code;
1848  rtx x, temp, seq;
1849  int i, j;
1850  const char *fmt;
1851
1852  x = *loc;
1853  if (x == 0)
1854    return;
1855
1856  code = GET_CODE (x);
1857
1858  switch (code)
1859    {
1860    case USE:
1861    case CONST_INT:
1862    case CONST_DOUBLE:
1863    case CONST:
1864    case SYMBOL_REF:
1865    case CODE_LABEL:
1866    case PC:
1867    case CC0:
1868    case ASM_INPUT:
1869    case ADDR_VEC:
1870    case ADDR_DIFF_VEC:
1871    case RETURN:
1872    case REG:
1873    case ADDRESSOF:
1874      return;
1875
1876    case PLUS:
1877      /* validate insn of frame register plus constant.  */
1878      if (GET_CODE (x) == PLUS
1879	  && XEXP (x, 0) == virtual_stack_vars_rtx
1880	  && CONSTANT_P (XEXP (x, 1)))
1881	{
1882	  start_sequence ();
1883
1884	  { /* excerpt from expand_binop in optabs.c */
1885	    optab binoptab = add_optab;
1886	    enum machine_mode mode = GET_MODE (x);
1887	    int icode = (int) binoptab->handlers[(int) mode].insn_code;
1888	    enum machine_mode mode1 = insn_data[icode].operand[2].mode;
1889	    rtx pat;
1890	    rtx xop0 = XEXP (x, 0), xop1 = XEXP (x, 1);
1891	    temp = gen_reg_rtx (mode);
1892
1893	    /* Now, if insn's predicates don't allow offset operands,
1894	       put them into pseudo regs.  */
1895
1896	    if (! (*insn_data[icode].operand[2].predicate) (xop1, mode1)
1897		&& mode1 != VOIDmode)
1898	      xop1 = copy_to_mode_reg (mode1, xop1);
1899
1900	    pat = GEN_FCN (icode) (temp, xop0, xop1);
1901	    if (pat)
1902	      emit_insn (pat);
1903	  }
1904	  seq = get_insns ();
1905	  end_sequence ();
1906
1907	  emit_insn_before (seq, insn);
1908	  if (! validate_change (insn, loc, temp, 0))
1909	    abort ();
1910	  return;
1911	}
1912	break;
1913
1914
1915    case CALL_PLACEHOLDER:
1916      for (i = 0; i < 3; i++)
1917	{
1918	  rtx seq = XEXP (x, i);
1919	  if (seq)
1920	    {
1921	      push_to_sequence (seq);
1922	      validate_insns_of_varrefs (XEXP (x, i));
1923	      XEXP (x, i) = get_insns ();
1924	      end_sequence ();
1925	    }
1926	}
1927      break;
1928
1929    default:
1930      break;
1931    }
1932
1933  /* Scan all subexpressions.  */
1934  fmt = GET_RTX_FORMAT (code);
1935  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1936    if (*fmt == 'e')
1937      validate_operand_of_varrefs (insn, &XEXP (x, i));
1938    else if (*fmt == 'E')
1939      for (j = 0; j < XVECLEN (x, i); j++)
1940	validate_operand_of_varrefs (insn, &XVECEXP (x, i, j));
1941}
1942
1943
1944
1945/* Return size that is not allocated for stack frame. It will be allocated
1946   to modify the home of pseudo registers called from global_alloc.  */
1947
1948HOST_WIDE_INT
1949get_frame_free_size ()
1950{
1951  if (! flag_propolice_protection)
1952    return 0;
1953
1954  return push_allocated_offset - push_frame_offset;
1955}
1956
1957
1958/*
1959  The following codes are invoked after the instantiation of pseuso registers.
1960
1961  Reorder local variables to place a peudo register after buffers to avoid
1962  the corruption of local variables that could be used to further corrupt
1963  arbitrary memory locations.
1964*/
1965#if !defined(FRAME_GROWS_DOWNWARD) && defined(STACK_GROWS_DOWNWARD)
1966static void push_frame
1967	PARAMS ((HOST_WIDE_INT var_size, HOST_WIDE_INT boundary));
1968static void push_frame_in_decls
1969	PARAMS ((tree block, HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
1970static void push_frame_in_args
1971	PARAMS ((tree parms, HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
1972static void push_frame_of_insns
1973	PARAMS ((rtx insn, HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
1974static void push_frame_in_operand
1975	PARAMS ((rtx insn, rtx orig,
1976		 HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
1977static void push_frame_of_reg_equiv_memory_loc
1978	PARAMS ((HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
1979static void push_frame_of_reg_equiv_constant
1980	PARAMS ((HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
1981static void reset_used_flags_for_push_frame PARAMS ((void));
1982static int check_out_of_frame_access
1983	PARAMS ((rtx insn, HOST_WIDE_INT boundary));
1984static int check_out_of_frame_access_in_operand
1985	PARAMS ((rtx, HOST_WIDE_INT boundary));
1986#endif
1987
1988rtx
1989assign_stack_local_for_pseudo_reg (mode, size, align)
1990     enum machine_mode mode;
1991     HOST_WIDE_INT size;
1992     int align;
1993{
1994#if defined(FRAME_GROWS_DOWNWARD) || !defined(STACK_GROWS_DOWNWARD)
1995  return assign_stack_local (mode, size, align);
1996#else
1997  tree blocks = DECL_INITIAL (current_function_decl);
1998  rtx new;
1999  HOST_WIDE_INT saved_frame_offset, units_per_push, starting_frame;
2000  int first_call_from_purge_addressof, first_call_from_global_alloc;
2001
2002  if (! flag_propolice_protection
2003      || size == 0
2004      || ! blocks
2005      || current_function_is_inlinable
2006      || ! search_string_from_argsandvars (1)
2007      || current_function_contains_functions)
2008    return assign_stack_local (mode, size, align);
2009
2010  first_call_from_purge_addressof = !push_frame_offset && !cse_not_expected;
2011  first_call_from_global_alloc = !saved_cse_not_expected && cse_not_expected;
2012  saved_cse_not_expected = cse_not_expected;
2013
2014  starting_frame = (STARTING_FRAME_OFFSET)?
2015    STARTING_FRAME_OFFSET:BIGGEST_ALIGNMENT / BITS_PER_UNIT;
2016  units_per_push = MAX(BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2017		       GET_MODE_SIZE (mode));
2018
2019  if (first_call_from_purge_addressof)
2020    {
2021      push_frame_offset = push_allocated_offset;
2022      if (check_out_of_frame_access (get_insns (), starting_frame))
2023	{
2024	  /* if there is an access beyond frame, push dummy region to separate
2025	     the address of instantiated variables */
2026	  push_frame (GET_MODE_SIZE (DImode), 0);
2027	  assign_stack_local (BLKmode, GET_MODE_SIZE (DImode), -1);
2028	}
2029    }
2030
2031  if (first_call_from_global_alloc)
2032    {
2033      push_frame_offset = push_allocated_offset = 0;
2034      if (check_out_of_frame_access (get_insns (), starting_frame))
2035	{
2036	  if (STARTING_FRAME_OFFSET)
2037	    {
2038	      /* if there is an access beyond frame, push dummy region
2039		 to separate the address of instantiated variables */
2040	      push_frame (GET_MODE_SIZE (DImode), 0);
2041	      assign_stack_local (BLKmode, GET_MODE_SIZE (DImode), -1);
2042	    }
2043	  else
2044	    push_allocated_offset = starting_frame;
2045	}
2046    }
2047
2048  saved_frame_offset = frame_offset;
2049  frame_offset = push_frame_offset;
2050
2051  new = assign_stack_local (mode, size, align);
2052
2053  push_frame_offset = frame_offset;
2054  frame_offset = saved_frame_offset;
2055
2056  if (push_frame_offset > push_allocated_offset)
2057    {
2058      push_frame (units_per_push,
2059		  push_allocated_offset + STARTING_FRAME_OFFSET);
2060
2061      assign_stack_local (BLKmode, units_per_push, -1);
2062      push_allocated_offset += units_per_push;
2063    }
2064
2065  /* At the second call from global alloc, alpha push frame and assign
2066     a local variable to the top of the stack */
2067  if (first_call_from_global_alloc && STARTING_FRAME_OFFSET == 0)
2068    push_frame_offset = push_allocated_offset = 0;
2069
2070  return new;
2071#endif
2072}
2073
2074
2075#if !defined(FRAME_GROWS_DOWNWARD) && defined(STACK_GROWS_DOWNWARD)
2076/*
2077  push frame information for instantiating pseudo register at the top of stack.
2078  This is only for the "frame grows upward", it means FRAME_GROWS_DOWNWARD is
2079  not defined.
2080
2081  It is called by purge_addressof function and global_alloc (or reload)
2082  function.
2083*/
2084static void
2085push_frame (var_size, boundary)
2086     HOST_WIDE_INT var_size, boundary;
2087{
2088  reset_used_flags_for_push_frame();
2089
2090  /* scan all declarations of variables and fix the offset address of
2091     the variable based on the frame pointer */
2092  push_frame_in_decls (DECL_INITIAL (current_function_decl),
2093		       var_size, boundary);
2094
2095  /* scan all argument variable and fix the offset address based on
2096     the frame pointer */
2097  push_frame_in_args (DECL_ARGUMENTS (current_function_decl),
2098		      var_size, boundary);
2099
2100  /* scan all operands of all insns and fix the offset address
2101     based on the frame pointer */
2102  push_frame_of_insns (get_insns (), var_size, boundary);
2103
2104  /* scan all reg_equiv_memory_loc and reg_equiv_constant*/
2105  push_frame_of_reg_equiv_memory_loc (var_size, boundary);
2106  push_frame_of_reg_equiv_constant (var_size, boundary);
2107
2108  reset_used_flags_for_push_frame();
2109}
2110
2111static void
2112reset_used_flags_for_push_frame()
2113{
2114  int i;
2115  extern rtx *reg_equiv_memory_loc;
2116  extern rtx *reg_equiv_constant;
2117
2118  /* Clear all the USED bits in operands of all insns and declarations of
2119     local vars */
2120  reset_used_flags_for_decls (DECL_INITIAL (current_function_decl));
2121  reset_used_flags_for_insns (get_insns ());
2122
2123
2124  /* The following codes are processed if the push_frame is called from
2125     global_alloc (or reload) function */
2126  if (reg_equiv_memory_loc == 0) return;
2127
2128  for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
2129    if (reg_equiv_memory_loc[i])
2130      {
2131	rtx x = reg_equiv_memory_loc[i];
2132
2133	if (GET_CODE (x) == MEM
2134	    && GET_CODE (XEXP (x, 0)) == PLUS
2135	    && AUTO_BASEPTR (XEXP (x, 0)) == frame_pointer_rtx)
2136	  {
2137	    /* reset */
2138	    XEXP (x, 0)->used = 0;
2139	  }
2140      }
2141
2142
2143  if (reg_equiv_constant == 0) return;
2144
2145  for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
2146    if (reg_equiv_constant[i])
2147      {
2148	rtx x = reg_equiv_constant[i];
2149
2150	if (GET_CODE (x) == PLUS
2151	    && AUTO_BASEPTR (x) == frame_pointer_rtx)
2152	  {
2153	    /* reset */
2154	    x->used = 0;
2155	  }
2156      }
2157}
2158
2159static void
2160push_frame_in_decls (block, push_size, boundary)
2161     tree block;
2162     HOST_WIDE_INT push_size, boundary;
2163{
2164  tree types;
2165  HOST_WIDE_INT offset;
2166  rtx home;
2167
2168  while (block && TREE_CODE(block)==BLOCK)
2169    {
2170      types = BLOCK_VARS(block);
2171
2172      while (types)
2173	{
2174	  /* skip the declaration that refers an external variable and
2175	     also skip an global variable */
2176	  if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types))
2177	    {
2178
2179	      if (!DECL_RTL_SET_P (types)) goto next;
2180	      home = DECL_RTL (types);
2181
2182	      /* process for static local variable */
2183	      if (GET_CODE (home) == MEM
2184		  && GET_CODE (XEXP (home, 0)) == SYMBOL_REF)
2185		goto next;
2186
2187	      if (GET_CODE (home) == MEM
2188		  && GET_CODE (XEXP (home, 0)) == REG)
2189		{
2190		  if (XEXP (home, 0) != frame_pointer_rtx
2191		      || boundary != 0)
2192		    goto next;
2193
2194		  XEXP (home, 0) = plus_constant (frame_pointer_rtx,
2195						  push_size);
2196
2197		  /* mark */
2198		  XEXP (home, 0)->used = 1;
2199		}
2200
2201	      if (GET_CODE (home) == MEM
2202		  && GET_CODE (XEXP (home, 0)) == MEM)
2203		{
2204
2205		  /* process for dynamically allocated aray */
2206		  home = XEXP (home, 0);
2207		}
2208
2209	      if (GET_CODE (home) == MEM
2210		  && GET_CODE (XEXP (home, 0)) == PLUS
2211		  && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
2212		{
2213		  offset = AUTO_OFFSET(XEXP (home, 0));
2214
2215		  if (! XEXP (home, 0)->used
2216		      && offset >= boundary)
2217		    {
2218		      offset += push_size;
2219		      XEXP (XEXP (home, 0), 1)
2220			= gen_rtx_CONST_INT (VOIDmode, offset);
2221
2222		      /* mark */
2223		      XEXP (home, 0)->used = 1;
2224		    }
2225		}
2226
2227	    }
2228	next:
2229	  types = TREE_CHAIN(types);
2230	}
2231
2232      push_frame_in_decls (BLOCK_SUBBLOCKS (block), push_size, boundary);
2233      block = BLOCK_CHAIN (block);
2234    }
2235}
2236
2237
2238static void
2239push_frame_in_args (parms, push_size, boundary)
2240     tree parms;
2241     HOST_WIDE_INT push_size, boundary;
2242{
2243  rtx home;
2244  HOST_WIDE_INT offset;
2245
2246  for (; parms; parms = TREE_CHAIN (parms))
2247    if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
2248      {
2249	if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
2250	  {
2251	    home = DECL_INCOMING_RTL (parms);
2252	    offset = AUTO_OFFSET(XEXP (home, 0));
2253
2254	    if (XEXP (home, 0)->used || offset < boundary) continue;
2255
2256	    /* the operand related to the sweep variable */
2257	    if (AUTO_BASEPTR (XEXP (home, 0)) == frame_pointer_rtx)
2258	      {
2259		if (XEXP (home, 0) == frame_pointer_rtx)
2260		  XEXP (home, 0) = plus_constant (frame_pointer_rtx,
2261						  push_size);
2262		else {
2263		  offset += push_size;
2264		  XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode,
2265								offset);
2266		}
2267
2268		/* mark */
2269		XEXP (home, 0)->used = 1;
2270	      }
2271	  }
2272      }
2273}
2274
2275
2276static int insn_pushed;
2277static int *fp_equiv = 0;
2278
2279static void
2280push_frame_of_insns (insn, push_size, boundary)
2281     rtx insn;
2282     HOST_WIDE_INT push_size, boundary;
2283{
2284  /* init fp_equiv */
2285  fp_equiv = (int *) xcalloc (max_reg_num (), sizeof (int));
2286
2287  for (; insn; insn = NEXT_INSN (insn))
2288    if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2289	|| GET_CODE (insn) == CALL_INSN)
2290      {
2291	rtx last;
2292
2293	insn_pushed = FALSE;
2294
2295	/* push frame in INSN operation */
2296	push_frame_in_operand (insn, PATTERN (insn), push_size, boundary);
2297
2298	/* push frame in NOTE */
2299	push_frame_in_operand (insn, REG_NOTES (insn), push_size, boundary);
2300
2301	/* push frame in CALL EXPR_LIST */
2302	if (GET_CODE (insn) == CALL_INSN)
2303	  push_frame_in_operand (insn, CALL_INSN_FUNCTION_USAGE (insn),
2304				 push_size, boundary);
2305
2306	if (insn_pushed
2307	    && (last = try_split (PATTERN (insn), insn, 1)) != insn)
2308	  {
2309	    rtx first = NEXT_INSN (insn);
2310	    rtx trial = NEXT_INSN (first);
2311	    rtx pattern = PATTERN (trial);
2312	    rtx set;
2313
2314	    /* update REG_EQUIV info to the first splitted insn */
2315	    if ((set = single_set (insn))
2316		&& find_reg_note (insn, REG_EQUIV, SET_SRC (set))
2317		&& GET_CODE (PATTERN (first)) == SET)
2318	      {
2319		REG_NOTES (first)
2320		  = gen_rtx_EXPR_LIST (REG_EQUIV,
2321				       SET_SRC (PATTERN (first)),
2322				       REG_NOTES (first));
2323	      }
2324
2325	    /* copy the first insn of splitted insns to the original insn and
2326	       delete the first insn,
2327	       because the original insn is pointed from records:
2328	       insn_chain, reg_equiv_init, used for global_alloc.  */
2329	    if (cse_not_expected)
2330	      {
2331		add_insn_before (insn, first);
2332
2333		/* Copy the various flags, and other information.  */
2334		memcpy (insn, first, sizeof (struct rtx_def) - sizeof (rtunion));
2335		PATTERN (insn) = PATTERN (first);
2336		INSN_CODE (insn) = INSN_CODE (first);
2337		LOG_LINKS (insn) = LOG_LINKS (first);
2338		REG_NOTES (insn) = REG_NOTES (first);
2339
2340		/* then remove the first insn of splitted insns.  */
2341		remove_insn (first);
2342		INSN_DELETED_P (first) = 1;
2343	      }
2344
2345	    if (GET_CODE (pattern) == SET
2346		&& GET_CODE (XEXP (pattern, 0)) == REG
2347		&& GET_CODE (XEXP (pattern, 1)) == PLUS
2348		&& XEXP (pattern, 0) == XEXP (XEXP (pattern, 1), 0)
2349		&& CONSTANT_P (XEXP (XEXP (pattern, 1), 1)))
2350	      {
2351		rtx offset = XEXP (XEXP (pattern, 1), 1);
2352		fp_equiv[REGNO (XEXP (pattern, 0))] = INTVAL (offset);
2353
2354		delete_insn (trial);
2355	      }
2356
2357	    insn = last;
2358	  }
2359      }
2360
2361  /* Clean up.  */
2362  free (fp_equiv);
2363}
2364
2365
2366static void
2367push_frame_in_operand (insn, orig, push_size, boundary)
2368     rtx insn, orig;
2369     HOST_WIDE_INT push_size, boundary;
2370{
2371  register rtx x = orig, prev_insn;
2372  register enum rtx_code code;
2373  int i, j;
2374  HOST_WIDE_INT offset;
2375  const char *fmt;
2376
2377  if (x == 0)
2378    return;
2379
2380  code = GET_CODE (x);
2381
2382  switch (code)
2383    {
2384    case CONST_INT:
2385    case CONST_DOUBLE:
2386    case CONST:
2387    case SYMBOL_REF:
2388    case CODE_LABEL:
2389    case PC:
2390    case CC0:
2391    case ASM_INPUT:
2392    case ADDR_VEC:
2393    case ADDR_DIFF_VEC:
2394    case RETURN:
2395    case REG:
2396    case ADDRESSOF:
2397    case USE:
2398      return;
2399
2400    case SET:
2401      /*
2402	skip setjmp setup insn and setjmp restore insn
2403	alpha case:
2404	(set (MEM (reg:SI xx)) (frame_pointer_rtx)))
2405	(set (frame_pointer_rtx) (REG))
2406      */
2407      if (GET_CODE (XEXP (x, 0)) == MEM
2408	  && XEXP (x, 1) == frame_pointer_rtx)
2409	return;
2410      if (XEXP (x, 0) == frame_pointer_rtx
2411	  && GET_CODE (XEXP (x, 1)) == REG)
2412	return;
2413
2414      /*
2415	powerpc case: restores setjmp address
2416	(set (frame_pointer_rtx) (plus frame_pointer_rtx const_int -n))
2417	or
2418	(set (reg) (plus frame_pointer_rtx const_int -n))
2419	(set (frame_pointer_rtx) (reg))
2420      */
2421      if (GET_CODE (XEXP (x, 0)) == REG
2422	  && GET_CODE (XEXP (x, 1)) == PLUS
2423	  && XEXP (XEXP (x, 1), 0) == frame_pointer_rtx
2424	  && CONSTANT_P (XEXP (XEXP (x, 1), 1))
2425	  && INTVAL (XEXP (XEXP (x, 1), 1)) < 0)
2426	{
2427	  x = XEXP (x, 1);
2428	  offset = AUTO_OFFSET(x);
2429	  if (x->used || abs (offset) < boundary)
2430	    return;
2431
2432	  XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset - push_size);
2433	  x->used = 1; insn_pushed = TRUE;
2434	  return;
2435	}
2436
2437      /* reset fp_equiv register */
2438      else if (GET_CODE (XEXP (x, 0)) == REG
2439	  && fp_equiv[REGNO (XEXP (x, 0))])
2440	fp_equiv[REGNO (XEXP (x, 0))] = 0;
2441
2442      /* propagate fp_equiv register */
2443      else if (GET_CODE (XEXP (x, 0)) == REG
2444	       && GET_CODE (XEXP (x, 1)) == REG
2445	       && fp_equiv[REGNO (XEXP (x, 1))])
2446	if (REGNO (XEXP (x, 0)) <= LAST_VIRTUAL_REGISTER
2447	    || reg_renumber != 0 && reg_renumber[REGNO (XEXP (x, 0))] >= 0)
2448	  fp_equiv[REGNO (XEXP (x, 0))] = fp_equiv[REGNO (XEXP (x, 1))];
2449      break;
2450
2451    case MEM:
2452      if (XEXP (x, 0) == frame_pointer_rtx
2453	  && boundary == 0)
2454	{
2455	  XEXP (x, 0) = plus_constant (frame_pointer_rtx, push_size);
2456	  XEXP (x, 0)->used = 1; insn_pushed = TRUE;
2457	  return;
2458	}
2459      break;
2460
2461    case PLUS:
2462      offset = AUTO_OFFSET(x);
2463      prev_insn = prev_nonnote_insn (insn);
2464
2465      /* Handle special case of frame register plus constant.  */
2466      if (CONSTANT_P (XEXP (x, 1))
2467	  && XEXP (x, 0) == frame_pointer_rtx)
2468	{
2469	  if (x->used || offset < boundary)
2470	    return;
2471
2472	  XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
2473	  x->used = 1; insn_pushed = TRUE;
2474
2475	  return;
2476	}
2477      /*
2478	Handle alpha case:
2479	 (plus:SI (subreg:SI (reg:DI 63 FP) 0) (const_int 64 [0x40]))
2480      */
2481      if (CONSTANT_P (XEXP (x, 1))
2482	  && GET_CODE (XEXP (x, 0)) == SUBREG
2483	  && SUBREG_REG (XEXP (x, 0)) == frame_pointer_rtx)
2484	{
2485	  if (x->used || offset < boundary)
2486	    return;
2487
2488	  XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
2489	  x->used = 1; insn_pushed = TRUE;
2490
2491	  return;
2492	}
2493      /*
2494	Handle powerpc case:
2495	 (set (reg x) (plus fp const))
2496	 (set (.....) (... (plus (reg x) (const B))))
2497      */
2498      else if (CONSTANT_P (XEXP (x, 1))
2499	       && GET_CODE (XEXP (x, 0)) == REG
2500	       && fp_equiv[REGNO (XEXP (x, 0))])
2501	{
2502	  if (x->used) return;
2503
2504	  offset += fp_equiv[REGNO (XEXP (x, 0))];
2505
2506	  XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
2507	  x->used = 1; insn_pushed = TRUE;
2508
2509	  return;
2510	}
2511      /*
2512	Handle special case of frame register plus reg (constant).
2513	 (set (reg x) (const B))
2514	 (set (....) (...(plus fp (reg x))))
2515      */
2516      else if (XEXP (x, 0) == frame_pointer_rtx
2517	       && GET_CODE (XEXP (x, 1)) == REG
2518	       && prev_insn
2519	       && PATTERN (prev_insn)
2520	       && SET_DEST (PATTERN (prev_insn)) == XEXP (x, 1)
2521	       && CONSTANT_P (SET_SRC (PATTERN (prev_insn))))
2522	{
2523	  HOST_WIDE_INT offset = INTVAL (SET_SRC (PATTERN (prev_insn)));
2524
2525	  if (x->used || offset < boundary)
2526	    return;
2527
2528	  SET_SRC (PATTERN (prev_insn))
2529	    = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
2530	  x->used = 1;
2531	  XEXP (x, 1)->used = 1;
2532
2533	  return;
2534	}
2535      /* Handle special case of frame register plus reg (used).  */
2536      else if (XEXP (x, 0) == frame_pointer_rtx
2537	       && XEXP (x, 1)->used)
2538	{
2539	  x->used = 1;
2540	  return;
2541	}
2542      /*
2543	process further subtree:
2544	Example:  (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
2545	(const_int 5))
2546      */
2547      break;
2548
2549    case CALL_PLACEHOLDER:
2550      push_frame_of_insns (XEXP (x, 0), push_size, boundary);
2551      push_frame_of_insns (XEXP (x, 1), push_size, boundary);
2552      push_frame_of_insns (XEXP (x, 2), push_size, boundary);
2553      break;
2554
2555    default:
2556      break;
2557    }
2558
2559  /* Scan all subexpressions.  */
2560  fmt = GET_RTX_FORMAT (code);
2561  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2562    if (*fmt == 'e')
2563      {
2564	if (XEXP (x, i) == frame_pointer_rtx && boundary == 0)
2565	  fatal_insn ("push_frame_in_operand", insn);
2566	push_frame_in_operand (insn, XEXP (x, i), push_size, boundary);
2567      }
2568    else if (*fmt == 'E')
2569      for (j = 0; j < XVECLEN (x, i); j++)
2570	push_frame_in_operand (insn, XVECEXP (x, i, j), push_size, boundary);
2571}
2572
2573static void
2574push_frame_of_reg_equiv_memory_loc (push_size, boundary)
2575     HOST_WIDE_INT push_size, boundary;
2576{
2577  int i;
2578  extern rtx *reg_equiv_memory_loc;
2579
2580  /* This function is processed if the push_frame is called from
2581     global_alloc (or reload) function */
2582  if (reg_equiv_memory_loc == 0) return;
2583
2584  for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
2585    if (reg_equiv_memory_loc[i])
2586      {
2587	rtx x = reg_equiv_memory_loc[i];
2588	int offset;
2589
2590	if (GET_CODE (x) == MEM
2591	    && GET_CODE (XEXP (x, 0)) == PLUS
2592	    && XEXP (XEXP (x, 0), 0) == frame_pointer_rtx)
2593	  {
2594	    offset = AUTO_OFFSET(XEXP (x, 0));
2595
2596	    if (! XEXP (x, 0)->used
2597		&& offset >= boundary)
2598	      {
2599		offset += push_size;
2600		XEXP (XEXP (x, 0), 1) = gen_rtx_CONST_INT (VOIDmode, offset);
2601
2602		/* mark */
2603		XEXP (x, 0)->used = 1;
2604	      }
2605	  }
2606	else if (GET_CODE (x) == MEM
2607		 && XEXP (x, 0) == frame_pointer_rtx
2608		 && boundary == 0)
2609	  {
2610	    XEXP (x, 0) = plus_constant (frame_pointer_rtx, push_size);
2611	    XEXP (x, 0)->used = 1; insn_pushed = TRUE;
2612	  }
2613      }
2614}
2615
2616static void
2617push_frame_of_reg_equiv_constant (push_size, boundary)
2618     HOST_WIDE_INT push_size, boundary;
2619{
2620  int i;
2621  extern rtx *reg_equiv_constant;
2622
2623  /* This function is processed if the push_frame is called from
2624     global_alloc (or reload) function */
2625  if (reg_equiv_constant == 0) return;
2626
2627  for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
2628    if (reg_equiv_constant[i])
2629      {
2630	rtx x = reg_equiv_constant[i];
2631	int offset;
2632
2633	if (GET_CODE (x) == PLUS
2634	    && XEXP (x, 0) == frame_pointer_rtx)
2635	  {
2636	    offset = AUTO_OFFSET(x);
2637
2638	    if (! x->used
2639		&& offset >= boundary)
2640	      {
2641		offset += push_size;
2642		XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
2643
2644		/* mark */
2645		x->used = 1;
2646	      }
2647	  }
2648	else if (x == frame_pointer_rtx
2649		 && boundary == 0)
2650	  {
2651	    reg_equiv_constant[i]
2652	      = plus_constant (frame_pointer_rtx, push_size);
2653	    reg_equiv_constant[i]->used = 1; insn_pushed = TRUE;
2654	  }
2655      }
2656}
2657
2658static int
2659check_out_of_frame_access (insn, boundary)
2660     rtx insn;
2661     HOST_WIDE_INT boundary;
2662{
2663  for (; insn; insn = NEXT_INSN (insn))
2664    if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2665	|| GET_CODE (insn) == CALL_INSN)
2666      {
2667	if (check_out_of_frame_access_in_operand (PATTERN (insn), boundary))
2668	  return TRUE;
2669      }
2670  return FALSE;
2671}
2672
2673
2674static int
2675check_out_of_frame_access_in_operand (orig, boundary)
2676     rtx orig;
2677     HOST_WIDE_INT boundary;
2678{
2679  register rtx x = orig;
2680  register enum rtx_code code;
2681  int i, j;
2682  const char *fmt;
2683
2684  if (x == 0)
2685    return FALSE;
2686
2687  code = GET_CODE (x);
2688
2689  switch (code)
2690    {
2691    case CONST_INT:
2692    case CONST_DOUBLE:
2693    case CONST:
2694    case SYMBOL_REF:
2695    case CODE_LABEL:
2696    case PC:
2697    case CC0:
2698    case ASM_INPUT:
2699    case ADDR_VEC:
2700    case ADDR_DIFF_VEC:
2701    case RETURN:
2702    case REG:
2703    case ADDRESSOF:
2704      return FALSE;
2705
2706    case MEM:
2707      if (XEXP (x, 0) == frame_pointer_rtx)
2708	if (0 < boundary) return TRUE;
2709      break;
2710
2711    case PLUS:
2712      /* Handle special case of frame register plus constant.  */
2713      if (CONSTANT_P (XEXP (x, 1))
2714	  && XEXP (x, 0) == frame_pointer_rtx)
2715	{
2716	  if (0 <= AUTO_OFFSET(x)
2717	      && AUTO_OFFSET(x) < boundary) return TRUE;
2718	  return FALSE;
2719	}
2720      /*
2721	process further subtree:
2722	Example:  (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
2723	(const_int 5))
2724      */
2725      break;
2726
2727    case CALL_PLACEHOLDER:
2728      if (check_out_of_frame_access (XEXP (x, 0), boundary)) return TRUE;
2729      if (check_out_of_frame_access (XEXP (x, 1), boundary)) return TRUE;
2730      if (check_out_of_frame_access (XEXP (x, 2), boundary)) return TRUE;
2731      break;
2732
2733    default:
2734      break;
2735    }
2736
2737  /* Scan all subexpressions.  */
2738  fmt = GET_RTX_FORMAT (code);
2739  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2740    if (*fmt == 'e')
2741      {
2742	if (check_out_of_frame_access_in_operand (XEXP (x, i), boundary))
2743	  return TRUE;
2744      }
2745    else if (*fmt == 'E')
2746      for (j = 0; j < XVECLEN (x, i); j++)
2747	if (check_out_of_frame_access_in_operand (XVECEXP (x, i, j), boundary))
2748	  return TRUE;
2749
2750  return FALSE;
2751}
2752#endif
2753