1/* Exported functions from emit-rtl.c
2   Copyright (C) 2004-2020 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3.  If not see
18<http://www.gnu.org/licenses/>.  */
19
20#ifndef GCC_EMIT_RTL_H
21#define GCC_EMIT_RTL_H
22
23class temp_slot;
24typedef class temp_slot *temp_slot_p;
25class predefined_function_abi;
26
27/* Information mainlined about RTL representation of incoming arguments.  */
28struct GTY(()) incoming_args {
29  /* Number of bytes of args popped by function being compiled on its return.
30     Zero if no bytes are to be popped.
31     May affect compilation of return insn or of function epilogue.  */
32  poly_int64_pod pops_args;
33
34  /* If function's args have a fixed size, this is that size, in bytes.
35     Otherwise, it is -1.
36     May affect compilation of return insn or of function epilogue.  */
37  poly_int64_pod size;
38
39  /* # bytes the prologue should push and pretend that the caller pushed them.
40     The prologue must do this, but only if parms can be passed in
41     registers.  */
42  int pretend_args_size;
43
44  /* This is the offset from the arg pointer to the place where the first
45     anonymous arg can be found, if there is one.  */
46  rtx arg_offset_rtx;
47
48  /* Quantities of various kinds of registers
49     used for the current function's args.  */
50  CUMULATIVE_ARGS info;
51
52  /* The arg pointer hard register, or the pseudo into which it was copied.  */
53  rtx internal_arg_pointer;
54};
55
56
57/* Datastructures maintained for currently processed function in RTL form.  */
58struct GTY(()) rtl_data {
59  void init_stack_alignment ();
60
61  struct expr_status expr;
62  struct emit_status emit;
63  struct varasm_status varasm;
64  struct incoming_args args;
65  struct function_subsections subsections;
66  struct rtl_eh eh;
67
68  /* The ABI of the function, i.e. the interface it presents to its callers.
69     This is the ABI that should be queried to see which registers the
70     function needs to save before it uses them.
71
72     Other functions (including those called by this function) might use
73     different ABIs.  */
74  const predefined_function_abi *GTY((skip)) abi;
75
76  /* For function.c  */
77
78  /* # of bytes of outgoing arguments.  If ACCUMULATE_OUTGOING_ARGS is
79     defined, the needed space is pushed by the prologue.  */
80  poly_int64_pod outgoing_args_size;
81
82  /* If nonzero, an RTL expression for the location at which the current
83     function returns its result.  If the current function returns its
84     result in a register, current_function_return_rtx will always be
85     the hard register containing the result.  */
86  rtx return_rtx;
87
88  /* Vector of initial-value pairs.  Each pair consists of a pseudo
89     register of approprite mode that stores the initial value a hard
90     register REGNO, and that hard register itself.  */
91  /* ??? This could be a VEC but there is currently no way to define an
92	 opaque VEC type.  */
93  struct initial_value_struct *hard_reg_initial_vals;
94
95  /* A variable living at the top of the frame that holds a known value.
96     Used for detecting stack clobbers.  */
97  tree stack_protect_guard;
98
99  /* The __stack_chk_guard variable or expression holding the stack
100     protector canary value.  */
101  tree stack_protect_guard_decl;
102
103  /* List (chain of INSN_LIST) of labels heading the current handlers for
104     nonlocal gotos.  */
105  rtx_insn_list *x_nonlocal_goto_handler_labels;
106
107  /* Label that will go on function epilogue.
108     Jumping to this label serves as a "return" instruction
109     on machines which require execution of the epilogue on all returns.  */
110  rtx_code_label *x_return_label;
111
112  /* Label that will go on the end of function epilogue.
113     Jumping to this label serves as a "naked return" instruction
114     on machines which require execution of the epilogue on all returns.  */
115  rtx_code_label *x_naked_return_label;
116
117  /* List (chain of EXPR_LISTs) of all stack slots in this function.
118     Made for the sake of unshare_all_rtl.  */
119  vec<rtx, va_gc> *x_stack_slot_list;
120
121  /* List of empty areas in the stack frame.  */
122  class frame_space *frame_space_list;
123
124  /* Place after which to insert the tail_recursion_label if we need one.  */
125  rtx_note *x_stack_check_probe_note;
126
127  /* Location at which to save the argument pointer if it will need to be
128     referenced.  There are two cases where this is done: if nonlocal gotos
129     exist, or if vars stored at an offset from the argument pointer will be
130     needed by inner routines.  */
131  rtx x_arg_pointer_save_area;
132
133  /* Dynamic Realign Argument Pointer used for realigning stack.  */
134  rtx drap_reg;
135
136  /* Offset to end of allocated area of stack frame.
137     If stack grows down, this is the address of the last stack slot allocated.
138     If stack grows up, this is the address for the next slot.  */
139  poly_int64_pod x_frame_offset;
140
141  /* Insn after which register parms and SAVE_EXPRs are born, if nonopt.  */
142  rtx_insn *x_parm_birth_insn;
143
144  /* List of all used temporaries allocated, by level.  */
145  vec<temp_slot_p, va_gc> *x_used_temp_slots;
146
147  /* List of available temp slots.  */
148  class temp_slot *x_avail_temp_slots;
149
150  /* Current nesting level for temporaries.  */
151  int x_temp_slot_level;
152
153  /* The largest alignment needed on the stack, including requirement
154     for outgoing stack alignment.  */
155  unsigned int stack_alignment_needed;
156
157  /* Preferred alignment of the end of stack frame, which is preferred
158     to call other functions.  */
159  unsigned int preferred_stack_boundary;
160
161  /* The minimum alignment of parameter stack.  */
162  unsigned int parm_stack_boundary;
163
164  /* The largest alignment of slot allocated on the stack.  */
165  unsigned int max_used_stack_slot_alignment;
166
167  /* The stack alignment estimated before reload, with consideration of
168     following factors:
169     1. Alignment of local stack variables (max_used_stack_slot_alignment)
170     2. Alignment requirement to call other functions
171        (preferred_stack_boundary)
172     3. Alignment of non-local stack variables but might be spilled in
173        local stack.  */
174  unsigned int stack_alignment_estimated;
175
176  /* For reorg.  */
177
178  /* Nonzero if function being compiled called builtin_return_addr or
179     builtin_frame_address with nonzero count.  */
180  bool accesses_prior_frames;
181
182  /* Nonzero if the function calls __builtin_eh_return.  */
183  bool calls_eh_return;
184
185  /* Nonzero if function saves all registers, e.g. if it has a nonlocal
186     label that can reach the exit block via non-exceptional paths. */
187  bool saves_all_registers;
188
189  /* Nonzero if function being compiled has nonlocal gotos to parent
190     function.  */
191  bool has_nonlocal_goto;
192
193  /* Nonzero if function being compiled has an asm statement.  */
194  bool has_asm_statement;
195
196  /* This bit is used by the exception handling logic.  It is set if all
197     calls (if any) are sibling calls.  Such functions do not have to
198     have EH tables generated, as they cannot throw.  A call to such a
199     function, however, should be treated as throwing if any of its callees
200     can throw.  */
201  bool all_throwers_are_sibcalls;
202
203  /* Nonzero if stack limit checking should be enabled in the current
204     function.  */
205  bool limit_stack;
206
207  /* Nonzero if profiling code should be generated.  */
208  bool profile;
209
210  /* Nonzero if the current function uses the constant pool.  */
211  bool uses_const_pool;
212
213  /* Nonzero if the current function uses pic_offset_table_rtx.  */
214  bool uses_pic_offset_table;
215
216  /* Nonzero if the current function needs an lsda for exception handling.  */
217  bool uses_eh_lsda;
218
219  /* Set when the tail call has been produced.  */
220  bool tail_call_emit;
221
222  /* Nonzero if code to initialize arg_pointer_save_area has been emitted.  */
223  bool arg_pointer_save_area_init;
224
225  /* Nonzero if current function must be given a frame pointer.
226     Set in reload1.c or lra-eliminations.c if anything is allocated
227     on the stack there.  */
228  bool frame_pointer_needed;
229
230  /* When set, expand should optimize for speed.  */
231  bool maybe_hot_insn_p;
232
233  /* Nonzero if function stack realignment is needed.  This flag may be
234     set twice: before and after reload.  It is set before reload wrt
235     stack alignment estimation before reload.  It will be changed after
236     reload if by then criteria of stack realignment is different.
237     The value set after reload is the accurate one and is finalized.  */
238  bool stack_realign_needed;
239
240  /* Nonzero if function stack realignment is tried.  This flag is set
241     only once before reload.  It affects register elimination.  This
242     is used to generate DWARF debug info for stack variables.  */
243  bool stack_realign_tried;
244
245  /* Nonzero if function being compiled needs dynamic realigned
246     argument pointer (drap) if stack needs realigning.  */
247  bool need_drap;
248
249  /* Nonzero if function stack realignment estimation is done, namely
250     stack_realign_needed flag has been set before reload wrt estimated
251     stack alignment info.  */
252  bool stack_realign_processed;
253
254  /* Nonzero if function stack realignment has been finalized, namely
255     stack_realign_needed flag has been set and finalized after reload.  */
256  bool stack_realign_finalized;
257
258  /* True if dbr_schedule has already been called for this function.  */
259  bool dbr_scheduled_p;
260
261  /* True if current function cannot throw.  Unlike
262     TREE_NOTHROW (current_function_decl) it is set even for overwritable
263     function where currently compiled version of it is nothrow.  */
264  bool nothrow;
265
266  /* True if we performed shrink-wrapping for the current function.  */
267  bool shrink_wrapped;
268
269  /* True if we performed shrink-wrapping for separate components for
270     the current function.  */
271  bool shrink_wrapped_separate;
272
273  /* Nonzero if function being compiled doesn't modify the stack pointer
274     (ignoring the prologue and epilogue).  This is only valid after
275     pass_stack_ptr_mod has run.  */
276  bool sp_is_unchanging;
277
278  /* True if the stack pointer is clobbered by asm statement.  */
279  bool sp_is_clobbered_by_asm;
280
281  /* Nonzero if function being compiled doesn't contain any calls
282     (ignoring the prologue and epilogue).  This is set prior to
283     register allocation in IRA and is valid for the remaining
284     compiler passes.  */
285  bool is_leaf;
286
287  /* Nonzero if the function being compiled is a leaf function which only
288     uses leaf registers.  This is valid after reload (specifically after
289     sched2) and is useful only if the port defines LEAF_REGISTERS.  */
290  bool uses_only_leaf_regs;
291
292  /* Nonzero if the function being compiled has undergone hot/cold partitioning
293     (under flag_reorder_blocks_and_partition) and has at least one cold
294     block.  */
295  bool has_bb_partition;
296
297  /* Nonzero if the function being compiled has completed the bb reordering
298     pass.  */
299  bool bb_reorder_complete;
300
301  /* Like regs_ever_live, but 1 if a reg is set or clobbered from an
302     asm.  Unlike regs_ever_live, elements of this array corresponding
303     to eliminable regs (like the frame pointer) are set if an asm
304     sets them.  */
305  HARD_REG_SET asm_clobbers;
306
307  /* The highest address seen during shorten_branches.  */
308  int max_insn_address;
309};
310
311#define return_label (crtl->x_return_label)
312#define naked_return_label (crtl->x_naked_return_label)
313#define stack_slot_list (crtl->x_stack_slot_list)
314#define parm_birth_insn (crtl->x_parm_birth_insn)
315#define frame_offset (crtl->x_frame_offset)
316#define stack_check_probe_note (crtl->x_stack_check_probe_note)
317#define arg_pointer_save_area (crtl->x_arg_pointer_save_area)
318#define used_temp_slots (crtl->x_used_temp_slots)
319#define avail_temp_slots (crtl->x_avail_temp_slots)
320#define temp_slot_level (crtl->x_temp_slot_level)
321#define nonlocal_goto_handler_labels (crtl->x_nonlocal_goto_handler_labels)
322#define frame_pointer_needed (crtl->frame_pointer_needed)
323#define stack_realign_fp (crtl->stack_realign_needed && !crtl->need_drap)
324#define stack_realign_drap (crtl->stack_realign_needed && crtl->need_drap)
325
326extern GTY(()) struct rtl_data x_rtl;
327
328/* Accessor to RTL datastructures.  We keep them statically allocated now since
329   we never keep multiple functions.  For threaded compiler we might however
330   want to do differently.  */
331#define crtl (&x_rtl)
332
333/* Return whether two MEM_ATTRs are equal.  */
334bool mem_attrs_eq_p (const class mem_attrs *, const class mem_attrs *);
335
336/* Set the alias set of MEM to SET.  */
337extern void set_mem_alias_set (rtx, alias_set_type);
338
339/* Set the alignment of MEM to ALIGN bits.  */
340extern void set_mem_align (rtx, unsigned int);
341
342/* Set the address space of MEM to ADDRSPACE.  */
343extern void set_mem_addr_space (rtx, addr_space_t);
344
345/* Set the expr for MEM to EXPR.  */
346extern void set_mem_expr (rtx, tree);
347
348/* Set the offset for MEM to OFFSET.  */
349extern void set_mem_offset (rtx, poly_int64);
350
351/* Clear the offset recorded for MEM.  */
352extern void clear_mem_offset (rtx);
353
354/* Set the size for MEM to SIZE.  */
355extern void set_mem_size (rtx, poly_int64);
356
357/* Clear the size recorded for MEM.  */
358extern void clear_mem_size (rtx);
359
360/* Set the attributes for MEM appropriate for a spill slot.  */
361extern void set_mem_attrs_for_spill (rtx);
362extern tree get_spill_slot_decl (bool);
363
364/* Return a memory reference like MEMREF, but with its address changed to
365   ADDR.  The caller is asserting that the actual piece of memory pointed
366   to is the same, just the form of the address is being changed, such as
367   by putting something into a register.  */
368extern rtx replace_equiv_address (rtx, rtx, bool = false);
369
370/* Likewise, but the reference is not required to be valid.  */
371extern rtx replace_equiv_address_nv (rtx, rtx, bool = false);
372
373extern rtx gen_blockage (void);
374extern rtvec gen_rtvec (int, ...);
375extern rtx copy_insn_1 (rtx);
376extern rtx copy_insn (rtx);
377extern rtx_insn *copy_delay_slot_insn (rtx_insn *);
378extern rtx gen_int_mode (poly_int64, machine_mode);
379extern rtx_insn *emit_copy_of_insn_after (rtx_insn *, rtx_insn *);
380extern void set_reg_attrs_from_value (rtx, rtx);
381extern void set_reg_attrs_for_parm (rtx, rtx);
382extern void set_reg_attrs_for_decl_rtl (tree t, rtx x);
383extern void adjust_reg_mode (rtx, machine_mode);
384extern int mem_expr_equal_p (const_tree, const_tree);
385extern rtx gen_int_shift_amount (machine_mode, poly_int64);
386
387extern bool need_atomic_barrier_p (enum memmodel, bool);
388
389/* Return the current sequence.  */
390
391static inline struct sequence_stack *
392get_current_sequence (void)
393{
394  return &crtl->emit.seq;
395}
396
397/* Return the outermost sequence.  */
398
399static inline struct sequence_stack *
400get_topmost_sequence (void)
401{
402  struct sequence_stack *seq, *top;
403
404  seq = get_current_sequence ();
405  do
406    {
407      top = seq;
408      seq = seq->next;
409    } while (seq);
410  return top;
411}
412
413/* Return the first insn of the current sequence or current function.  */
414
415static inline rtx_insn *
416get_insns (void)
417{
418  return get_current_sequence ()->first;
419}
420
421/* Specify a new insn as the first in the chain.  */
422
423static inline void
424set_first_insn (rtx_insn *insn)
425{
426  gcc_checking_assert (!insn || !PREV_INSN (insn));
427  get_current_sequence ()->first = insn;
428}
429
430/* Return the last insn emitted in current sequence or current function.  */
431
432static inline rtx_insn *
433get_last_insn (void)
434{
435  return get_current_sequence ()->last;
436}
437
438/* Specify a new insn as the last in the chain.  */
439
440static inline void
441set_last_insn (rtx_insn *insn)
442{
443  gcc_checking_assert (!insn || !NEXT_INSN (insn));
444  get_current_sequence ()->last = insn;
445}
446
447/* Return a number larger than any instruction's uid in this function.  */
448
449static inline int
450get_max_uid (void)
451{
452  return crtl->emit.x_cur_insn_uid;
453}
454
455extern bool valid_for_const_vector_p (machine_mode, rtx);
456extern rtx gen_const_vec_duplicate (machine_mode, rtx);
457extern rtx gen_vec_duplicate (machine_mode, rtx);
458
459extern rtx gen_const_vec_series (machine_mode, rtx, rtx);
460extern rtx gen_vec_series (machine_mode, rtx, rtx);
461
462extern void set_decl_incoming_rtl (tree, rtx, bool);
463
464/* Return a memory reference like MEMREF, but with its mode changed
465   to MODE and its address changed to ADDR.
466   (VOIDmode means don't change the mode.
467   NULL for ADDR means don't change the address.)  */
468extern rtx change_address (rtx, machine_mode, rtx);
469
470/* Return a memory reference like MEMREF, but with its mode changed
471   to MODE and its address offset by OFFSET bytes.  */
472#define adjust_address(MEMREF, MODE, OFFSET) \
473  adjust_address_1 (MEMREF, MODE, OFFSET, 1, 1, 0, 0)
474
475/* Likewise, but the reference is not required to be valid.  */
476#define adjust_address_nv(MEMREF, MODE, OFFSET) \
477  adjust_address_1 (MEMREF, MODE, OFFSET, 0, 1, 0, 0)
478
479/* Return a memory reference like MEMREF, but with its mode changed
480   to MODE and its address offset by OFFSET bytes.  Assume that it's
481   for a bitfield and conservatively drop the underlying object if we
482   cannot be sure to stay within its bounds.  */
483#define adjust_bitfield_address(MEMREF, MODE, OFFSET) \
484  adjust_address_1 (MEMREF, MODE, OFFSET, 1, 1, 1, 0)
485
486/* As for adjust_bitfield_address, but specify that the width of
487   BLKmode accesses is SIZE bytes.  */
488#define adjust_bitfield_address_size(MEMREF, MODE, OFFSET, SIZE) \
489  adjust_address_1 (MEMREF, MODE, OFFSET, 1, 1, 1, SIZE)
490
491/* Likewise, but the reference is not required to be valid.  */
492#define adjust_bitfield_address_nv(MEMREF, MODE, OFFSET) \
493  adjust_address_1 (MEMREF, MODE, OFFSET, 0, 1, 1, 0)
494
495/* Return a memory reference like MEMREF, but with its mode changed
496   to MODE and its address changed to ADDR, which is assumed to be
497   increased by OFFSET bytes from MEMREF.  */
498#define adjust_automodify_address(MEMREF, MODE, ADDR, OFFSET) \
499  adjust_automodify_address_1 (MEMREF, MODE, ADDR, OFFSET, 1)
500
501/* Likewise, but the reference is not required to be valid.  */
502#define adjust_automodify_address_nv(MEMREF, MODE, ADDR, OFFSET) \
503  adjust_automodify_address_1 (MEMREF, MODE, ADDR, OFFSET, 0)
504
505extern rtx adjust_address_1 (rtx, machine_mode, poly_int64, int, int,
506			     int, poly_int64);
507extern rtx adjust_automodify_address_1 (rtx, machine_mode, rtx,
508					poly_int64, int);
509
510/* Return a memory reference like MEMREF, but whose address is changed by
511   adding OFFSET, an RTX, to it.  POW2 is the highest power of two factor
512   known to be in OFFSET (possibly 1).  */
513extern rtx offset_address (rtx, rtx, unsigned HOST_WIDE_INT);
514
515/* Given REF, a MEM, and T, either the type of X or the expression
516   corresponding to REF, set the memory attributes.  OBJECTP is nonzero
517   if we are making a new object of this type.  */
518extern void set_mem_attributes (rtx, tree, int);
519
520/* Similar, except that BITPOS has not yet been applied to REF, so if
521   we alter MEM_OFFSET according to T then we should subtract BITPOS
522   expecting that it'll be added back in later.  */
523extern void set_mem_attributes_minus_bitpos (rtx, tree, int, poly_int64);
524
525/* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
526   bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
527   -1 if not known.  */
528extern int get_mem_align_offset (rtx, unsigned int);
529
530/* Return a memory reference like MEMREF, but with its mode widened to
531   MODE and adjusted by OFFSET.  */
532extern rtx widen_memory_access (rtx, machine_mode, poly_int64);
533
534extern void maybe_set_max_label_num (rtx_code_label *x);
535
536#endif /* GCC_EMIT_RTL_H */
537