1/* Language independent return value optimizations
2   Copyright (C) 2004-2022 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 3, or (at your option)
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3.  If not see
18<http://www.gnu.org/licenses/>.  */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "backend.h"
24#include "tree.h"
25#include "gimple.h"
26#include "tree-pass.h"
27#include "ssa.h"
28#include "tree-pretty-print.h"
29#include "gimple-iterator.h"
30#include "gimple-walk.h"
31#include "internal-fn.h"
32
33/* This file implements return value optimizations for functions which
34   return aggregate types.
35
36   Basically this pass searches the function for return statements which
37   return a local aggregate.  When converted to RTL such statements will
38   generate a copy from the local aggregate to final return value destination
39   mandated by the target's ABI.
40
41   That copy can often be avoided by directly constructing the return value
42   into the final destination mandated by the target's ABI.
43
44   This is basically a generic equivalent to the C++ front-end's
45   Named Return Value optimization.  */
46
47struct nrv_data_t
48{
49  /* This is the temporary (a VAR_DECL) which appears in all of
50     this function's RETURN_EXPR statements.  */
51  tree var;
52
53  /* This is the function's RESULT_DECL.  We will replace all occurrences
54     of VAR with RESULT_DECL when we apply this optimization.  */
55  tree result;
56  int modified;
57};
58
59static tree finalize_nrv_r (tree *, int *, void *);
60
61/* Callback for the tree walker.
62
63   If TP refers to a RETURN_EXPR, then set the expression being returned
64   to nrv_data->result.
65
66   If TP refers to nrv_data->var, then replace nrv_data->var with
67   nrv_data->result.
68
69   If we reach a node where we know all the subtrees are uninteresting,
70   then set *WALK_SUBTREES to zero.  */
71
72static tree
73finalize_nrv_r (tree *tp, int *walk_subtrees, void *data)
74{
75  struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
76  struct nrv_data_t *dp = (struct nrv_data_t *) wi->info;
77
78  /* No need to walk into types.  */
79  if (TYPE_P (*tp))
80    *walk_subtrees = 0;
81
82  /* Otherwise replace all occurrences of VAR with RESULT.  */
83  else if (*tp == dp->var)
84    {
85      *tp = dp->result;
86      dp->modified = 1;
87    }
88
89  /* Keep iterating.  */
90  return NULL_TREE;
91}
92
93/* Main entry point for return value optimizations.
94
95   If this function always returns the same local variable, and that
96   local variable is an aggregate type, then replace the variable with
97   the function's DECL_RESULT.
98
99   This is the equivalent of the C++ named return value optimization
100   applied to optimized trees in a language independent form.  If we
101   ever encounter languages which prevent this kind of optimization,
102   then we could either have the languages register the optimization or
103   we could change the gating function to check the current language.  */
104
105namespace {
106
107const pass_data pass_data_nrv =
108{
109  GIMPLE_PASS, /* type */
110  "nrv", /* name */
111  OPTGROUP_NONE, /* optinfo_flags */
112  TV_TREE_NRV, /* tv_id */
113  ( PROP_ssa | PROP_cfg ), /* properties_required */
114  0, /* properties_provided */
115  0, /* properties_destroyed */
116  0, /* todo_flags_start */
117  0, /* todo_flags_finish */
118};
119
120class pass_nrv : public gimple_opt_pass
121{
122public:
123  pass_nrv (gcc::context *ctxt)
124    : gimple_opt_pass (pass_data_nrv, ctxt)
125  {}
126
127  /* opt_pass methods: */
128  virtual bool gate (function *) { return optimize > 0; }
129
130  virtual unsigned int execute (function *);
131
132}; // class pass_nrv
133
134unsigned int
135pass_nrv::execute (function *fun)
136{
137  tree result = DECL_RESULT (current_function_decl);
138  tree result_type = TREE_TYPE (result);
139  tree found = NULL;
140  basic_block bb;
141  gimple_stmt_iterator gsi;
142  struct nrv_data_t data;
143
144  /* If this function does not return an aggregate type in memory, then
145     there is nothing to do.  */
146  if (!aggregate_value_p (result, current_function_decl))
147    return 0;
148
149  /* If a GIMPLE type is returned in memory, finalize_nrv_r might create
150     non-GIMPLE.  */
151  if (is_gimple_reg_type (result_type))
152    return 0;
153
154  /* If the front end already did something like this, don't do it here.  */
155  if (DECL_NAME (result))
156    return 0;
157
158  /* If the result has its address taken then it might be modified
159     by means not detected in the following loop.  Bail out in this
160     case.  */
161  if (TREE_ADDRESSABLE (result))
162    return 0;
163
164  /* Look through each block for assignments to the RESULT_DECL.  */
165  FOR_EACH_BB_FN (bb, fun)
166    {
167      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
168	{
169	  gimple *stmt = gsi_stmt (gsi);
170	  tree ret_val;
171
172	  if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
173	    {
174	      /* In a function with an aggregate return value, the
175		 gimplifier has changed all non-empty RETURN_EXPRs to
176		 return the RESULT_DECL.  */
177	      ret_val = gimple_return_retval (return_stmt);
178	      if (ret_val)
179		gcc_assert (ret_val == result);
180	    }
181	  else if (gimple_has_lhs (stmt)
182		   && gimple_get_lhs (stmt) == result)
183	    {
184              tree rhs;
185
186	      if (!gimple_assign_copy_p (stmt))
187		return 0;
188
189	      rhs = gimple_assign_rhs1 (stmt);
190
191	      /* Now verify that this return statement uses the same value
192		 as any previously encountered return statement.  */
193	      if (found != NULL)
194		{
195		  /* If we found a return statement using a different variable
196		     than previous return statements, then we cannot perform
197		     NRV optimizations.  */
198		  if (found != rhs)
199		    return 0;
200		}
201	      else
202		found = rhs;
203
204	      /* The returned value must be a local automatic variable of the
205		 same type and alignment as the function's result.  */
206	      if (!VAR_P (found)
207		  || TREE_THIS_VOLATILE (found)
208		  || !auto_var_in_fn_p (found, current_function_decl)
209		  || TREE_ADDRESSABLE (found)
210		  || DECL_ALIGN (found) > DECL_ALIGN (result)
211		  || !useless_type_conversion_p (result_type,
212						 TREE_TYPE (found)))
213		return 0;
214	    }
215	  else if (gimple_has_lhs (stmt))
216	    {
217	      tree addr = get_base_address (gimple_get_lhs (stmt));
218	       /* If there's any MODIFY of component of RESULT,
219		  then bail out.  */
220	      if (addr && addr == result)
221		return 0;
222	    }
223	}
224    }
225
226  if (!found)
227    return 0;
228
229  /* If dumping details, then note once and only the NRV replacement.  */
230  if (dump_file && (dump_flags & TDF_DETAILS))
231    {
232      fprintf (dump_file, "NRV Replaced: ");
233      print_generic_expr (dump_file, found, dump_flags);
234      fprintf (dump_file, "  with: ");
235      print_generic_expr (dump_file, result, dump_flags);
236      fprintf (dump_file, "\n");
237    }
238
239  TREE_ADDRESSABLE (result) |= TREE_ADDRESSABLE (found);
240
241  /* Now walk through the function changing all references to VAR to be
242     RESULT.  */
243  data.var = found;
244  data.result = result;
245  FOR_EACH_BB_FN (bb, fun)
246    {
247      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
248	{
249	  gimple *stmt = gsi_stmt (gsi);
250	  /* If this is a copy from VAR to RESULT, remove it.  */
251	  if (gimple_assign_copy_p (stmt)
252	      && gimple_assign_lhs (stmt) == result
253	      && gimple_assign_rhs1 (stmt) == found)
254	    {
255	      unlink_stmt_vdef (stmt);
256	      gsi_remove (&gsi, true);
257	      release_defs (stmt);
258	    }
259	  else
260	    {
261	      struct walk_stmt_info wi;
262	      memset (&wi, 0, sizeof (wi));
263	      wi.info = &data;
264	      data.modified = 0;
265	      walk_gimple_op (stmt, finalize_nrv_r, &wi);
266	      if (data.modified)
267		update_stmt (stmt);
268	      gsi_next (&gsi);
269	    }
270	}
271    }
272
273  SET_DECL_VALUE_EXPR (found, result);
274  DECL_HAS_VALUE_EXPR_P (found) = 1;
275
276  return 0;
277}
278
279} // anon namespace
280
281gimple_opt_pass *
282make_pass_nrv (gcc::context *ctxt)
283{
284  return new pass_nrv (ctxt);
285}
286
287/* Determine (pessimistically) whether DEST is available for NRV
288   optimization, where DEST is expected to be the LHS of a modify
289   expression where the RHS is a function returning an aggregate.
290
291   DEST is available if it is not clobbered or used by the call.  */
292
293static bool
294dest_safe_for_nrv_p (gcall *call)
295{
296  tree dest = gimple_call_lhs (call);
297
298  dest = get_base_address (dest);
299  if (! dest)
300    return false;
301
302  if (TREE_CODE (dest) == SSA_NAME)
303    return true;
304
305  if (call_may_clobber_ref_p (call, dest, false)
306      || ref_maybe_used_by_stmt_p (call, dest, false))
307    return false;
308
309  return true;
310}
311
312/* Walk through the function looking for GIMPLE_ASSIGNs with calls that
313   return in memory on the RHS.  For each of these, determine whether it is
314   safe to pass the address of the LHS as the return slot, and mark the
315   call appropriately if so.
316
317   The NRV shares the return slot with a local variable in the callee; this
318   optimization shares the return slot with the target of the call within
319   the caller.  If the NRV is performed (which we can't know in general),
320   this optimization is safe if the address of the target has not
321   escaped prior to the call.  If it has, modifications to the local
322   variable will produce visible changes elsewhere, as in PR c++/19317.  */
323
324namespace {
325
326const pass_data pass_data_return_slot =
327{
328  GIMPLE_PASS, /* type */
329  "retslot", /* name */
330  OPTGROUP_NONE, /* optinfo_flags */
331  TV_NONE, /* tv_id */
332  PROP_ssa, /* properties_required */
333  0, /* properties_provided */
334  0, /* properties_destroyed */
335  0, /* todo_flags_start */
336  0, /* todo_flags_finish */
337};
338
339class pass_return_slot : public gimple_opt_pass
340{
341public:
342  pass_return_slot (gcc::context *ctxt)
343    : gimple_opt_pass (pass_data_return_slot, ctxt)
344  {}
345
346  /* opt_pass methods: */
347  virtual unsigned int execute (function *);
348
349}; // class pass_return_slot
350
351unsigned int
352pass_return_slot::execute (function *fun)
353{
354  basic_block bb;
355
356  FOR_EACH_BB_FN (bb, fun)
357    {
358      gimple_stmt_iterator gsi;
359      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
360	{
361	  gcall *stmt;
362	  bool slot_opt_p;
363
364	  stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
365	  if (stmt
366	      && gimple_call_lhs (stmt)
367	      && !gimple_call_return_slot_opt_p (stmt)
368	      /* Ignore internal functions, those are expanded specially
369		 and aggregate_value_p on their result might result in
370		 undesirable warnings with some backends.  */
371	      && !gimple_call_internal_p (stmt)
372	      && aggregate_value_p (TREE_TYPE (gimple_call_lhs (stmt)),
373				    gimple_call_fndecl (stmt)))
374	    {
375	      /* Check if the location being assigned to is
376		 clobbered by the call.  */
377	      slot_opt_p = dest_safe_for_nrv_p (stmt);
378	      gimple_call_set_return_slot_opt (stmt, slot_opt_p);
379	    }
380	}
381    }
382  return 0;
383}
384
385} // anon namespace
386
387gimple_opt_pass *
388make_pass_return_slot (gcc::context *ctxt)
389{
390  return new pass_return_slot (ctxt);
391}
392