1/* Gimple IR support functions.
2
3   Copyright (C) 2007-2020 Free Software Foundation, Inc.
4   Contributed by Aldy Hernandez <aldyh@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3.  If not see
20<http://www.gnu.org/licenses/>.  */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "backend.h"
26#include "tree.h"
27#include "gimple.h"
28#include "ssa.h"
29#include "cgraph.h"
30#include "diagnostic.h"
31#include "alias.h"
32#include "fold-const.h"
33#include "calls.h"
34#include "stor-layout.h"
35#include "internal-fn.h"
36#include "tree-eh.h"
37#include "gimple-iterator.h"
38#include "gimple-walk.h"
39#include "gimplify.h"
40#include "target.h"
41#include "builtins.h"
42#include "selftest.h"
43#include "gimple-pretty-print.h"
44#include "stringpool.h"
45#include "attribs.h"
46#include "asan.h"
47#include "langhooks.h"
48
49
50/* All the tuples have their operand vector (if present) at the very bottom
51   of the structure.  Therefore, the offset required to find the
52   operands vector the size of the structure minus the size of the 1
53   element tree array at the end (see gimple_ops).  */
54#define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
55	(HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
56EXPORTED_CONST size_t gimple_ops_offset_[] = {
57#include "gsstruct.def"
58};
59#undef DEFGSSTRUCT
60
61#define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof (struct STRUCT),
62static const size_t gsstruct_code_size[] = {
63#include "gsstruct.def"
64};
65#undef DEFGSSTRUCT
66
67#define DEFGSCODE(SYM, NAME, GSSCODE)	NAME,
68const char *const gimple_code_name[] = {
69#include "gimple.def"
70};
71#undef DEFGSCODE
72
73#define DEFGSCODE(SYM, NAME, GSSCODE)	GSSCODE,
74EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
75#include "gimple.def"
76};
77#undef DEFGSCODE
78
79/* Gimple stats.  */
80
81uint64_t gimple_alloc_counts[(int) gimple_alloc_kind_all];
82uint64_t gimple_alloc_sizes[(int) gimple_alloc_kind_all];
83
84/* Keep in sync with gimple.h:enum gimple_alloc_kind.  */
85static const char * const gimple_alloc_kind_names[] = {
86    "assignments",
87    "phi nodes",
88    "conditionals",
89    "everything else"
90};
91
92/* Static gimple tuple members.  */
93const enum gimple_code gassign::code_;
94const enum gimple_code gcall::code_;
95const enum gimple_code gcond::code_;
96
97
98/* Gimple tuple constructors.
99   Note: Any constructor taking a ``gimple_seq'' as a parameter, can
100   be passed a NULL to start with an empty sequence.  */
101
102/* Set the code for statement G to CODE.  */
103
104static inline void
105gimple_set_code (gimple *g, enum gimple_code code)
106{
107  g->code = code;
108}
109
110/* Return the number of bytes needed to hold a GIMPLE statement with
111   code CODE.  */
112
113size_t
114gimple_size (enum gimple_code code, unsigned num_ops)
115{
116  size_t size = gsstruct_code_size[gss_for_code (code)];
117  if (num_ops > 0)
118    size += (sizeof (tree) * (num_ops - 1));
119  return size;
120}
121
122/* Initialize GIMPLE statement G with CODE and NUM_OPS.  */
123
124void
125gimple_init (gimple *g, enum gimple_code code, unsigned num_ops)
126{
127  gimple_set_code (g, code);
128  gimple_set_num_ops (g, num_ops);
129
130  /* Do not call gimple_set_modified here as it has other side
131     effects and this tuple is still not completely built.  */
132  g->modified = 1;
133  gimple_init_singleton (g);
134}
135
136/* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
137   operands.  */
138
139gimple *
140gimple_alloc (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
141{
142  size_t size;
143  gimple *stmt;
144
145  size = gimple_size (code, num_ops);
146  if (GATHER_STATISTICS)
147    {
148      enum gimple_alloc_kind kind = gimple_alloc_kind (code);
149      gimple_alloc_counts[(int) kind]++;
150      gimple_alloc_sizes[(int) kind] += size;
151    }
152
153  stmt = ggc_alloc_cleared_gimple_statement_stat (size PASS_MEM_STAT);
154  gimple_init (stmt, code, num_ops);
155  return stmt;
156}
157
158/* Set SUBCODE to be the code of the expression computed by statement G.  */
159
160static inline void
161gimple_set_subcode (gimple *g, unsigned subcode)
162{
163  /* We only have 16 bits for the RHS code.  Assert that we are not
164     overflowing it.  */
165  gcc_assert (subcode < (1 << 16));
166  g->subcode = subcode;
167}
168
169
170
171/* Build a tuple with operands.  CODE is the statement to build (which
172   must be one of the GIMPLE_WITH_OPS tuples).  SUBCODE is the subcode
173   for the new tuple.  NUM_OPS is the number of operands to allocate.  */
174
175#define gimple_build_with_ops(c, s, n) \
176  gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
177
178static gimple *
179gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
180		            unsigned num_ops MEM_STAT_DECL)
181{
182  gimple *s = gimple_alloc (code, num_ops PASS_MEM_STAT);
183  gimple_set_subcode (s, subcode);
184
185  return s;
186}
187
188
189/* Build a GIMPLE_RETURN statement returning RETVAL.  */
190
191greturn *
192gimple_build_return (tree retval)
193{
194  greturn *s
195    = as_a <greturn *> (gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK,
196					       2));
197  if (retval)
198    gimple_return_set_retval (s, retval);
199  return s;
200}
201
202/* Reset alias information on call S.  */
203
204void
205gimple_call_reset_alias_info (gcall *s)
206{
207  if (gimple_call_flags (s) & ECF_CONST)
208    memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
209  else
210    pt_solution_reset (gimple_call_use_set (s));
211  if (gimple_call_flags (s) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
212    memset (gimple_call_clobber_set (s), 0, sizeof (struct pt_solution));
213  else
214    pt_solution_reset (gimple_call_clobber_set (s));
215}
216
217/* Helper for gimple_build_call, gimple_build_call_valist,
218   gimple_build_call_vec and gimple_build_call_from_tree.  Build the basic
219   components of a GIMPLE_CALL statement to function FN with NARGS
220   arguments.  */
221
222static inline gcall *
223gimple_build_call_1 (tree fn, unsigned nargs)
224{
225  gcall *s
226    = as_a <gcall *> (gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK,
227					     nargs + 3));
228  if (TREE_CODE (fn) == FUNCTION_DECL)
229    fn = build_fold_addr_expr (fn);
230  gimple_set_op (s, 1, fn);
231  gimple_call_set_fntype (s, TREE_TYPE (TREE_TYPE (fn)));
232  gimple_call_reset_alias_info (s);
233  return s;
234}
235
236
237/* Build a GIMPLE_CALL statement to function FN with the arguments
238   specified in vector ARGS.  */
239
240gcall *
241gimple_build_call_vec (tree fn, vec<tree> args)
242{
243  unsigned i;
244  unsigned nargs = args.length ();
245  gcall *call = gimple_build_call_1 (fn, nargs);
246
247  for (i = 0; i < nargs; i++)
248    gimple_call_set_arg (call, i, args[i]);
249
250  return call;
251}
252
253
254/* Build a GIMPLE_CALL statement to function FN.  NARGS is the number of
255   arguments.  The ... are the arguments.  */
256
257gcall *
258gimple_build_call (tree fn, unsigned nargs, ...)
259{
260  va_list ap;
261  gcall *call;
262  unsigned i;
263
264  gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
265
266  call = gimple_build_call_1 (fn, nargs);
267
268  va_start (ap, nargs);
269  for (i = 0; i < nargs; i++)
270    gimple_call_set_arg (call, i, va_arg (ap, tree));
271  va_end (ap);
272
273  return call;
274}
275
276
277/* Build a GIMPLE_CALL statement to function FN.  NARGS is the number of
278   arguments.  AP contains the arguments.  */
279
280gcall *
281gimple_build_call_valist (tree fn, unsigned nargs, va_list ap)
282{
283  gcall *call;
284  unsigned i;
285
286  gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
287
288  call = gimple_build_call_1 (fn, nargs);
289
290  for (i = 0; i < nargs; i++)
291    gimple_call_set_arg (call, i, va_arg (ap, tree));
292
293  return call;
294}
295
296
297/* Helper for gimple_build_call_internal and gimple_build_call_internal_vec.
298   Build the basic components of a GIMPLE_CALL statement to internal
299   function FN with NARGS arguments.  */
300
301static inline gcall *
302gimple_build_call_internal_1 (enum internal_fn fn, unsigned nargs)
303{
304  gcall *s
305    = as_a <gcall *> (gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK,
306					     nargs + 3));
307  s->subcode |= GF_CALL_INTERNAL;
308  gimple_call_set_internal_fn (s, fn);
309  gimple_call_reset_alias_info (s);
310  return s;
311}
312
313
314/* Build a GIMPLE_CALL statement to internal function FN.  NARGS is
315   the number of arguments.  The ... are the arguments.  */
316
317gcall *
318gimple_build_call_internal (enum internal_fn fn, unsigned nargs, ...)
319{
320  va_list ap;
321  gcall *call;
322  unsigned i;
323
324  call = gimple_build_call_internal_1 (fn, nargs);
325  va_start (ap, nargs);
326  for (i = 0; i < nargs; i++)
327    gimple_call_set_arg (call, i, va_arg (ap, tree));
328  va_end (ap);
329
330  return call;
331}
332
333
334/* Build a GIMPLE_CALL statement to internal function FN with the arguments
335   specified in vector ARGS.  */
336
337gcall *
338gimple_build_call_internal_vec (enum internal_fn fn, vec<tree> args)
339{
340  unsigned i, nargs;
341  gcall *call;
342
343  nargs = args.length ();
344  call = gimple_build_call_internal_1 (fn, nargs);
345  for (i = 0; i < nargs; i++)
346    gimple_call_set_arg (call, i, args[i]);
347
348  return call;
349}
350
351
352/* Build a GIMPLE_CALL statement from CALL_EXPR T.  Note that T is
353   assumed to be in GIMPLE form already.  Minimal checking is done of
354   this fact.  */
355
356gcall *
357gimple_build_call_from_tree (tree t, tree fnptrtype)
358{
359  unsigned i, nargs;
360  gcall *call;
361
362  gcc_assert (TREE_CODE (t) == CALL_EXPR);
363
364  nargs = call_expr_nargs (t);
365
366  tree fndecl = NULL_TREE;
367  if (CALL_EXPR_FN (t) == NULL_TREE)
368    call = gimple_build_call_internal_1 (CALL_EXPR_IFN (t), nargs);
369  else
370    {
371      fndecl = get_callee_fndecl (t);
372      call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
373    }
374
375  for (i = 0; i < nargs; i++)
376    gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
377
378  gimple_set_block (call, TREE_BLOCK (t));
379  gimple_set_location (call, EXPR_LOCATION (t));
380
381  /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL.  */
382  gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
383  gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
384  gimple_call_set_must_tail (call, CALL_EXPR_MUST_TAIL_CALL (t));
385  gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
386  if (fndecl
387      && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL)
388      && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (fndecl)))
389    gimple_call_set_alloca_for_var (call, CALL_ALLOCA_FOR_VAR_P (t));
390  else
391    gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
392  gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
393  gimple_call_set_nothrow (call, TREE_NOTHROW (t));
394  gimple_call_set_by_descriptor (call, CALL_EXPR_BY_DESCRIPTOR (t));
395  gimple_set_no_warning (call, TREE_NO_WARNING (t));
396
397  if (fnptrtype)
398    {
399      gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
400
401      /* Check if it's an indirect CALL and the type has the
402 	 nocf_check attribute. In that case propagate the information
403	 to the gimple CALL insn.  */
404      if (!fndecl)
405	{
406	  gcc_assert (POINTER_TYPE_P (fnptrtype));
407	  tree fntype = TREE_TYPE (fnptrtype);
408
409	  if (lookup_attribute ("nocf_check", TYPE_ATTRIBUTES (fntype)))
410	    gimple_call_set_nocf_check (call, TRUE);
411	}
412    }
413
414  return call;
415}
416
417
418/* Build a GIMPLE_ASSIGN statement.
419
420   LHS of the assignment.
421   RHS of the assignment which can be unary or binary.  */
422
423gassign *
424gimple_build_assign (tree lhs, tree rhs MEM_STAT_DECL)
425{
426  enum tree_code subcode;
427  tree op1, op2, op3;
428
429  extract_ops_from_tree (rhs, &subcode, &op1, &op2, &op3);
430  return gimple_build_assign (lhs, subcode, op1, op2, op3 PASS_MEM_STAT);
431}
432
433
434/* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operands
435   OP1, OP2 and OP3.  */
436
437static inline gassign *
438gimple_build_assign_1 (tree lhs, enum tree_code subcode, tree op1,
439		       tree op2, tree op3 MEM_STAT_DECL)
440{
441  unsigned num_ops;
442  gassign *p;
443
444  /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
445     code).  */
446  num_ops = get_gimple_rhs_num_ops (subcode) + 1;
447
448  p = as_a <gassign *> (
449        gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
450				    PASS_MEM_STAT));
451  gimple_assign_set_lhs (p, lhs);
452  gimple_assign_set_rhs1 (p, op1);
453  if (op2)
454    {
455      gcc_assert (num_ops > 2);
456      gimple_assign_set_rhs2 (p, op2);
457    }
458
459  if (op3)
460    {
461      gcc_assert (num_ops > 3);
462      gimple_assign_set_rhs3 (p, op3);
463    }
464
465  return p;
466}
467
468/* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operands
469   OP1, OP2 and OP3.  */
470
471gassign *
472gimple_build_assign (tree lhs, enum tree_code subcode, tree op1,
473		     tree op2, tree op3 MEM_STAT_DECL)
474{
475  return gimple_build_assign_1 (lhs, subcode, op1, op2, op3 PASS_MEM_STAT);
476}
477
478/* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operands
479   OP1 and OP2.  */
480
481gassign *
482gimple_build_assign (tree lhs, enum tree_code subcode, tree op1,
483		     tree op2 MEM_STAT_DECL)
484{
485  return gimple_build_assign_1 (lhs, subcode, op1, op2, NULL_TREE
486				PASS_MEM_STAT);
487}
488
489/* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operand OP1.  */
490
491gassign *
492gimple_build_assign (tree lhs, enum tree_code subcode, tree op1 MEM_STAT_DECL)
493{
494  return gimple_build_assign_1 (lhs, subcode, op1, NULL_TREE, NULL_TREE
495				PASS_MEM_STAT);
496}
497
498
499/* Build a GIMPLE_COND statement.
500
501   PRED is the condition used to compare LHS and the RHS.
502   T_LABEL is the label to jump to if the condition is true.
503   F_LABEL is the label to jump to otherwise.  */
504
505gcond *
506gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
507		   tree t_label, tree f_label)
508{
509  gcond *p;
510
511  gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
512  p = as_a <gcond *> (gimple_build_with_ops (GIMPLE_COND, pred_code, 4));
513  gimple_cond_set_lhs (p, lhs);
514  gimple_cond_set_rhs (p, rhs);
515  gimple_cond_set_true_label (p, t_label);
516  gimple_cond_set_false_label (p, f_label);
517  return p;
518}
519
520/* Build a GIMPLE_COND statement from the conditional expression tree
521   COND.  T_LABEL and F_LABEL are as in gimple_build_cond.  */
522
523gcond *
524gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
525{
526  enum tree_code code;
527  tree lhs, rhs;
528
529  gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
530  return gimple_build_cond (code, lhs, rhs, t_label, f_label);
531}
532
533/* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
534   boolean expression tree COND.  */
535
536void
537gimple_cond_set_condition_from_tree (gcond *stmt, tree cond)
538{
539  enum tree_code code;
540  tree lhs, rhs;
541
542  gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
543  gimple_cond_set_condition (stmt, code, lhs, rhs);
544}
545
546/* Build a GIMPLE_LABEL statement for LABEL.  */
547
548glabel *
549gimple_build_label (tree label)
550{
551  glabel *p
552    = as_a <glabel *> (gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1));
553  gimple_label_set_label (p, label);
554  return p;
555}
556
557/* Build a GIMPLE_GOTO statement to label DEST.  */
558
559ggoto *
560gimple_build_goto (tree dest)
561{
562  ggoto *p
563    = as_a <ggoto *> (gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1));
564  gimple_goto_set_dest (p, dest);
565  return p;
566}
567
568
569/* Build a GIMPLE_NOP statement.  */
570
571gimple *
572gimple_build_nop (void)
573{
574  return gimple_alloc (GIMPLE_NOP, 0);
575}
576
577
578/* Build a GIMPLE_BIND statement.
579   VARS are the variables in BODY.
580   BLOCK is the containing block.  */
581
582gbind *
583gimple_build_bind (tree vars, gimple_seq body, tree block)
584{
585  gbind *p = as_a <gbind *> (gimple_alloc (GIMPLE_BIND, 0));
586  gimple_bind_set_vars (p, vars);
587  if (body)
588    gimple_bind_set_body (p, body);
589  if (block)
590    gimple_bind_set_block (p, block);
591  return p;
592}
593
594/* Helper function to set the simple fields of a asm stmt.
595
596   STRING is a pointer to a string that is the asm blocks assembly code.
597   NINPUT is the number of register inputs.
598   NOUTPUT is the number of register outputs.
599   NCLOBBERS is the number of clobbered registers.
600   */
601
602static inline gasm *
603gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
604                    unsigned nclobbers, unsigned nlabels)
605{
606  gasm *p;
607  int size = strlen (string);
608
609  /* ASMs with labels cannot have outputs.  This should have been
610     enforced by the front end.  */
611  gcc_assert (nlabels == 0 || noutputs == 0);
612
613  p = as_a <gasm *> (
614        gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
615			       ninputs + noutputs + nclobbers + nlabels));
616
617  p->ni = ninputs;
618  p->no = noutputs;
619  p->nc = nclobbers;
620  p->nl = nlabels;
621  p->string = ggc_alloc_string (string, size);
622
623  if (GATHER_STATISTICS)
624    gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
625
626  return p;
627}
628
629/* Build a GIMPLE_ASM statement.
630
631   STRING is the assembly code.
632   NINPUT is the number of register inputs.
633   NOUTPUT is the number of register outputs.
634   NCLOBBERS is the number of clobbered registers.
635   INPUTS is a vector of the input register parameters.
636   OUTPUTS is a vector of the output register parameters.
637   CLOBBERS is a vector of the clobbered register parameters.
638   LABELS is a vector of destination labels.  */
639
640gasm *
641gimple_build_asm_vec (const char *string, vec<tree, va_gc> *inputs,
642                      vec<tree, va_gc> *outputs, vec<tree, va_gc> *clobbers,
643		      vec<tree, va_gc> *labels)
644{
645  gasm *p;
646  unsigned i;
647
648  p = gimple_build_asm_1 (string,
649                          vec_safe_length (inputs),
650                          vec_safe_length (outputs),
651                          vec_safe_length (clobbers),
652			  vec_safe_length (labels));
653
654  for (i = 0; i < vec_safe_length (inputs); i++)
655    gimple_asm_set_input_op (p, i, (*inputs)[i]);
656
657  for (i = 0; i < vec_safe_length (outputs); i++)
658    gimple_asm_set_output_op (p, i, (*outputs)[i]);
659
660  for (i = 0; i < vec_safe_length (clobbers); i++)
661    gimple_asm_set_clobber_op (p, i, (*clobbers)[i]);
662
663  for (i = 0; i < vec_safe_length (labels); i++)
664    gimple_asm_set_label_op (p, i, (*labels)[i]);
665
666  return p;
667}
668
669/* Build a GIMPLE_CATCH statement.
670
671  TYPES are the catch types.
672  HANDLER is the exception handler.  */
673
674gcatch *
675gimple_build_catch (tree types, gimple_seq handler)
676{
677  gcatch *p = as_a <gcatch *> (gimple_alloc (GIMPLE_CATCH, 0));
678  gimple_catch_set_types (p, types);
679  if (handler)
680    gimple_catch_set_handler (p, handler);
681
682  return p;
683}
684
685/* Build a GIMPLE_EH_FILTER statement.
686
687   TYPES are the filter's types.
688   FAILURE is the filter's failure action.  */
689
690geh_filter *
691gimple_build_eh_filter (tree types, gimple_seq failure)
692{
693  geh_filter *p = as_a <geh_filter *> (gimple_alloc (GIMPLE_EH_FILTER, 0));
694  gimple_eh_filter_set_types (p, types);
695  if (failure)
696    gimple_eh_filter_set_failure (p, failure);
697
698  return p;
699}
700
701/* Build a GIMPLE_EH_MUST_NOT_THROW statement.  */
702
703geh_mnt *
704gimple_build_eh_must_not_throw (tree decl)
705{
706  geh_mnt *p = as_a <geh_mnt *> (gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0));
707
708  gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
709  gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
710  gimple_eh_must_not_throw_set_fndecl (p, decl);
711
712  return p;
713}
714
715/* Build a GIMPLE_EH_ELSE statement.  */
716
717geh_else *
718gimple_build_eh_else (gimple_seq n_body, gimple_seq e_body)
719{
720  geh_else *p = as_a <geh_else *> (gimple_alloc (GIMPLE_EH_ELSE, 0));
721  gimple_eh_else_set_n_body (p, n_body);
722  gimple_eh_else_set_e_body (p, e_body);
723  return p;
724}
725
726/* Build a GIMPLE_TRY statement.
727
728   EVAL is the expression to evaluate.
729   CLEANUP is the cleanup expression.
730   KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
731   whether this is a try/catch or a try/finally respectively.  */
732
733gtry *
734gimple_build_try (gimple_seq eval, gimple_seq cleanup,
735    		  enum gimple_try_flags kind)
736{
737  gtry *p;
738
739  gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
740  p = as_a <gtry *> (gimple_alloc (GIMPLE_TRY, 0));
741  gimple_set_subcode (p, kind);
742  if (eval)
743    gimple_try_set_eval (p, eval);
744  if (cleanup)
745    gimple_try_set_cleanup (p, cleanup);
746
747  return p;
748}
749
750/* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
751
752   CLEANUP is the cleanup expression.  */
753
754gimple *
755gimple_build_wce (gimple_seq cleanup)
756{
757  gimple *p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
758  if (cleanup)
759    gimple_wce_set_cleanup (p, cleanup);
760
761  return p;
762}
763
764
765/* Build a GIMPLE_RESX statement.  */
766
767gresx *
768gimple_build_resx (int region)
769{
770  gresx *p
771    = as_a <gresx *> (gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0));
772  p->region = region;
773  return p;
774}
775
776
777/* The helper for constructing a gimple switch statement.
778   INDEX is the switch's index.
779   NLABELS is the number of labels in the switch excluding the default.
780   DEFAULT_LABEL is the default label for the switch statement.  */
781
782gswitch *
783gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
784{
785  /* nlabels + 1 default label + 1 index.  */
786  gcc_checking_assert (default_label);
787  gswitch *p = as_a <gswitch *> (gimple_build_with_ops (GIMPLE_SWITCH,
788							ERROR_MARK,
789							1 + 1 + nlabels));
790  gimple_switch_set_index (p, index);
791  gimple_switch_set_default_label (p, default_label);
792  return p;
793}
794
795/* Build a GIMPLE_SWITCH statement.
796
797   INDEX is the switch's index.
798   DEFAULT_LABEL is the default label
799   ARGS is a vector of labels excluding the default.  */
800
801gswitch *
802gimple_build_switch (tree index, tree default_label, vec<tree> args)
803{
804  unsigned i, nlabels = args.length ();
805
806  gswitch *p = gimple_build_switch_nlabels (nlabels, index, default_label);
807
808  /* Copy the labels from the vector to the switch statement.  */
809  for (i = 0; i < nlabels; i++)
810    gimple_switch_set_label (p, i + 1, args[i]);
811
812  return p;
813}
814
815/* Build a GIMPLE_EH_DISPATCH statement.  */
816
817geh_dispatch *
818gimple_build_eh_dispatch (int region)
819{
820  geh_dispatch *p
821    = as_a <geh_dispatch *> (
822	gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0));
823  p->region = region;
824  return p;
825}
826
827/* Build a new GIMPLE_DEBUG_BIND statement.
828
829   VAR is bound to VALUE; block and location are taken from STMT.  */
830
831gdebug *
832gimple_build_debug_bind (tree var, tree value, gimple *stmt MEM_STAT_DECL)
833{
834  gdebug *p
835    = as_a <gdebug *> (gimple_build_with_ops_stat (GIMPLE_DEBUG,
836						   (unsigned)GIMPLE_DEBUG_BIND, 2
837						   PASS_MEM_STAT));
838  gimple_debug_bind_set_var (p, var);
839  gimple_debug_bind_set_value (p, value);
840  if (stmt)
841    gimple_set_location (p, gimple_location (stmt));
842
843  return p;
844}
845
846
847/* Build a new GIMPLE_DEBUG_SOURCE_BIND statement.
848
849   VAR is bound to VALUE; block and location are taken from STMT.  */
850
851gdebug *
852gimple_build_debug_source_bind (tree var, tree value,
853				     gimple *stmt MEM_STAT_DECL)
854{
855  gdebug *p
856    = as_a <gdebug *> (
857        gimple_build_with_ops_stat (GIMPLE_DEBUG,
858				    (unsigned)GIMPLE_DEBUG_SOURCE_BIND, 2
859				    PASS_MEM_STAT));
860
861  gimple_debug_source_bind_set_var (p, var);
862  gimple_debug_source_bind_set_value (p, value);
863  if (stmt)
864    gimple_set_location (p, gimple_location (stmt));
865
866  return p;
867}
868
869
870/* Build a new GIMPLE_DEBUG_BEGIN_STMT statement in BLOCK at
871   LOCATION.  */
872
873gdebug *
874gimple_build_debug_begin_stmt (tree block, location_t location
875				    MEM_STAT_DECL)
876{
877  gdebug *p
878    = as_a <gdebug *> (
879        gimple_build_with_ops_stat (GIMPLE_DEBUG,
880				    (unsigned)GIMPLE_DEBUG_BEGIN_STMT, 0
881				    PASS_MEM_STAT));
882
883  gimple_set_location (p, location);
884  gimple_set_block (p, block);
885  cfun->debug_marker_count++;
886
887  return p;
888}
889
890
891/* Build a new GIMPLE_DEBUG_INLINE_ENTRY statement in BLOCK at
892   LOCATION.  The BLOCK links to the inlined function.  */
893
894gdebug *
895gimple_build_debug_inline_entry (tree block, location_t location
896				      MEM_STAT_DECL)
897{
898  gdebug *p
899    = as_a <gdebug *> (
900        gimple_build_with_ops_stat (GIMPLE_DEBUG,
901				    (unsigned)GIMPLE_DEBUG_INLINE_ENTRY, 0
902				    PASS_MEM_STAT));
903
904  gimple_set_location (p, location);
905  gimple_set_block (p, block);
906  cfun->debug_marker_count++;
907
908  return p;
909}
910
911
912/* Build a GIMPLE_OMP_CRITICAL statement.
913
914   BODY is the sequence of statements for which only one thread can execute.
915   NAME is optional identifier for this critical block.
916   CLAUSES are clauses for this critical block.  */
917
918gomp_critical *
919gimple_build_omp_critical (gimple_seq body, tree name, tree clauses)
920{
921  gomp_critical *p
922    = as_a <gomp_critical *> (gimple_alloc (GIMPLE_OMP_CRITICAL, 0));
923  gimple_omp_critical_set_name (p, name);
924  gimple_omp_critical_set_clauses (p, clauses);
925  if (body)
926    gimple_omp_set_body (p, body);
927
928  return p;
929}
930
931/* Build a GIMPLE_OMP_FOR statement.
932
933   BODY is sequence of statements inside the for loop.
934   KIND is the `for' variant.
935   CLAUSES are any of the construct's clauses.
936   COLLAPSE is the collapse count.
937   PRE_BODY is the sequence of statements that are loop invariant.  */
938
939gomp_for *
940gimple_build_omp_for (gimple_seq body, int kind, tree clauses, size_t collapse,
941		      gimple_seq pre_body)
942{
943  gomp_for *p = as_a <gomp_for *> (gimple_alloc (GIMPLE_OMP_FOR, 0));
944  if (body)
945    gimple_omp_set_body (p, body);
946  gimple_omp_for_set_clauses (p, clauses);
947  gimple_omp_for_set_kind (p, kind);
948  p->collapse = collapse;
949  p->iter =  ggc_cleared_vec_alloc<gimple_omp_for_iter> (collapse);
950
951  if (pre_body)
952    gimple_omp_for_set_pre_body (p, pre_body);
953
954  return p;
955}
956
957
958/* Build a GIMPLE_OMP_PARALLEL statement.
959
960   BODY is sequence of statements which are executed in parallel.
961   CLAUSES are the OMP parallel construct's clauses.
962   CHILD_FN is the function created for the parallel threads to execute.
963   DATA_ARG are the shared data argument(s).  */
964
965gomp_parallel *
966gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
967			   tree data_arg)
968{
969  gomp_parallel *p
970    = as_a <gomp_parallel *> (gimple_alloc (GIMPLE_OMP_PARALLEL, 0));
971  if (body)
972    gimple_omp_set_body (p, body);
973  gimple_omp_parallel_set_clauses (p, clauses);
974  gimple_omp_parallel_set_child_fn (p, child_fn);
975  gimple_omp_parallel_set_data_arg (p, data_arg);
976
977  return p;
978}
979
980
981/* Build a GIMPLE_OMP_TASK statement.
982
983   BODY is sequence of statements which are executed by the explicit task.
984   CLAUSES are the OMP task construct's clauses.
985   CHILD_FN is the function created for the parallel threads to execute.
986   DATA_ARG are the shared data argument(s).
987   COPY_FN is the optional function for firstprivate initialization.
988   ARG_SIZE and ARG_ALIGN are size and alignment of the data block.  */
989
990gomp_task *
991gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
992		       tree data_arg, tree copy_fn, tree arg_size,
993		       tree arg_align)
994{
995  gomp_task *p = as_a <gomp_task *> (gimple_alloc (GIMPLE_OMP_TASK, 0));
996  if (body)
997    gimple_omp_set_body (p, body);
998  gimple_omp_task_set_clauses (p, clauses);
999  gimple_omp_task_set_child_fn (p, child_fn);
1000  gimple_omp_task_set_data_arg (p, data_arg);
1001  gimple_omp_task_set_copy_fn (p, copy_fn);
1002  gimple_omp_task_set_arg_size (p, arg_size);
1003  gimple_omp_task_set_arg_align (p, arg_align);
1004
1005  return p;
1006}
1007
1008
1009/* Build a GIMPLE_OMP_SECTION statement for a sections statement.
1010
1011   BODY is the sequence of statements in the section.  */
1012
1013gimple *
1014gimple_build_omp_section (gimple_seq body)
1015{
1016  gimple *p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
1017  if (body)
1018    gimple_omp_set_body (p, body);
1019
1020  return p;
1021}
1022
1023
1024/* Build a GIMPLE_OMP_MASTER statement.
1025
1026   BODY is the sequence of statements to be executed by just the master.  */
1027
1028gimple *
1029gimple_build_omp_master (gimple_seq body)
1030{
1031  gimple *p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
1032  if (body)
1033    gimple_omp_set_body (p, body);
1034
1035  return p;
1036}
1037
1038/* Build a GIMPLE_OMP_GRID_BODY statement.
1039
1040   BODY is the sequence of statements to be executed by the kernel.  */
1041
1042gimple *
1043gimple_build_omp_grid_body (gimple_seq body)
1044{
1045  gimple *p = gimple_alloc (GIMPLE_OMP_GRID_BODY, 0);
1046  if (body)
1047    gimple_omp_set_body (p, body);
1048
1049  return p;
1050}
1051
1052/* Build a GIMPLE_OMP_TASKGROUP statement.
1053
1054   BODY is the sequence of statements to be executed by the taskgroup
1055   construct.
1056   CLAUSES are any of the construct's clauses.  */
1057
1058gimple *
1059gimple_build_omp_taskgroup (gimple_seq body, tree clauses)
1060{
1061  gimple *p = gimple_alloc (GIMPLE_OMP_TASKGROUP, 0);
1062  gimple_omp_taskgroup_set_clauses (p, clauses);
1063  if (body)
1064    gimple_omp_set_body (p, body);
1065
1066  return p;
1067}
1068
1069
1070/* Build a GIMPLE_OMP_CONTINUE statement.
1071
1072   CONTROL_DEF is the definition of the control variable.
1073   CONTROL_USE is the use of the control variable.  */
1074
1075gomp_continue *
1076gimple_build_omp_continue (tree control_def, tree control_use)
1077{
1078  gomp_continue *p
1079    = as_a <gomp_continue *> (gimple_alloc (GIMPLE_OMP_CONTINUE, 0));
1080  gimple_omp_continue_set_control_def (p, control_def);
1081  gimple_omp_continue_set_control_use (p, control_use);
1082  return p;
1083}
1084
1085/* Build a GIMPLE_OMP_ORDERED statement.
1086
1087   BODY is the sequence of statements inside a loop that will executed in
1088   sequence.
1089   CLAUSES are clauses for this statement.  */
1090
1091gomp_ordered *
1092gimple_build_omp_ordered (gimple_seq body, tree clauses)
1093{
1094  gomp_ordered *p
1095    = as_a <gomp_ordered *> (gimple_alloc (GIMPLE_OMP_ORDERED, 0));
1096  gimple_omp_ordered_set_clauses (p, clauses);
1097  if (body)
1098    gimple_omp_set_body (p, body);
1099
1100  return p;
1101}
1102
1103
1104/* Build a GIMPLE_OMP_RETURN statement.
1105   WAIT_P is true if this is a non-waiting return.  */
1106
1107gimple *
1108gimple_build_omp_return (bool wait_p)
1109{
1110  gimple *p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
1111  if (wait_p)
1112    gimple_omp_return_set_nowait (p);
1113
1114  return p;
1115}
1116
1117
1118/* Build a GIMPLE_OMP_SCAN statement.
1119
1120   BODY is the sequence of statements to be executed by the scan
1121   construct.
1122   CLAUSES are any of the construct's clauses.  */
1123
1124gomp_scan *
1125gimple_build_omp_scan (gimple_seq body, tree clauses)
1126{
1127  gomp_scan *p
1128    = as_a <gomp_scan *> (gimple_alloc (GIMPLE_OMP_SCAN, 0));
1129  gimple_omp_scan_set_clauses (p, clauses);
1130  if (body)
1131    gimple_omp_set_body (p, body);
1132
1133  return p;
1134}
1135
1136
1137/* Build a GIMPLE_OMP_SECTIONS statement.
1138
1139   BODY is a sequence of section statements.
1140   CLAUSES are any of the OMP sections contsruct's clauses: private,
1141   firstprivate, lastprivate, reduction, and nowait.  */
1142
1143gomp_sections *
1144gimple_build_omp_sections (gimple_seq body, tree clauses)
1145{
1146  gomp_sections *p
1147    = as_a <gomp_sections *> (gimple_alloc (GIMPLE_OMP_SECTIONS, 0));
1148  if (body)
1149    gimple_omp_set_body (p, body);
1150  gimple_omp_sections_set_clauses (p, clauses);
1151
1152  return p;
1153}
1154
1155
1156/* Build a GIMPLE_OMP_SECTIONS_SWITCH.  */
1157
1158gimple *
1159gimple_build_omp_sections_switch (void)
1160{
1161  return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1162}
1163
1164
1165/* Build a GIMPLE_OMP_SINGLE statement.
1166
1167   BODY is the sequence of statements that will be executed once.
1168   CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1169   copyprivate, nowait.  */
1170
1171gomp_single *
1172gimple_build_omp_single (gimple_seq body, tree clauses)
1173{
1174  gomp_single *p
1175    = as_a <gomp_single *> (gimple_alloc (GIMPLE_OMP_SINGLE, 0));
1176  if (body)
1177    gimple_omp_set_body (p, body);
1178  gimple_omp_single_set_clauses (p, clauses);
1179
1180  return p;
1181}
1182
1183
1184/* Build a GIMPLE_OMP_TARGET statement.
1185
1186   BODY is the sequence of statements that will be executed.
1187   KIND is the kind of the region.
1188   CLAUSES are any of the construct's clauses.  */
1189
1190gomp_target *
1191gimple_build_omp_target (gimple_seq body, int kind, tree clauses)
1192{
1193  gomp_target *p
1194    = as_a <gomp_target *> (gimple_alloc (GIMPLE_OMP_TARGET, 0));
1195  if (body)
1196    gimple_omp_set_body (p, body);
1197  gimple_omp_target_set_clauses (p, clauses);
1198  gimple_omp_target_set_kind (p, kind);
1199
1200  return p;
1201}
1202
1203
1204/* Build a GIMPLE_OMP_TEAMS statement.
1205
1206   BODY is the sequence of statements that will be executed.
1207   CLAUSES are any of the OMP teams construct's clauses.  */
1208
1209gomp_teams *
1210gimple_build_omp_teams (gimple_seq body, tree clauses)
1211{
1212  gomp_teams *p = as_a <gomp_teams *> (gimple_alloc (GIMPLE_OMP_TEAMS, 0));
1213  if (body)
1214    gimple_omp_set_body (p, body);
1215  gimple_omp_teams_set_clauses (p, clauses);
1216
1217  return p;
1218}
1219
1220
1221/* Build a GIMPLE_OMP_ATOMIC_LOAD statement.  */
1222
1223gomp_atomic_load *
1224gimple_build_omp_atomic_load (tree lhs, tree rhs, enum omp_memory_order mo)
1225{
1226  gomp_atomic_load *p
1227    = as_a <gomp_atomic_load *> (gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0));
1228  gimple_omp_atomic_load_set_lhs (p, lhs);
1229  gimple_omp_atomic_load_set_rhs (p, rhs);
1230  gimple_omp_atomic_set_memory_order (p, mo);
1231  return p;
1232}
1233
1234/* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1235
1236   VAL is the value we are storing.  */
1237
1238gomp_atomic_store *
1239gimple_build_omp_atomic_store (tree val, enum omp_memory_order mo)
1240{
1241  gomp_atomic_store *p
1242    = as_a <gomp_atomic_store *> (gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0));
1243  gimple_omp_atomic_store_set_val (p, val);
1244  gimple_omp_atomic_set_memory_order (p, mo);
1245  return p;
1246}
1247
1248/* Build a GIMPLE_TRANSACTION statement.  */
1249
1250gtransaction *
1251gimple_build_transaction (gimple_seq body)
1252{
1253  gtransaction *p
1254    = as_a <gtransaction *> (gimple_alloc (GIMPLE_TRANSACTION, 0));
1255  gimple_transaction_set_body (p, body);
1256  gimple_transaction_set_label_norm (p, 0);
1257  gimple_transaction_set_label_uninst (p, 0);
1258  gimple_transaction_set_label_over (p, 0);
1259  return p;
1260}
1261
1262#if defined ENABLE_GIMPLE_CHECKING
1263/* Complain of a gimple type mismatch and die.  */
1264
1265void
1266gimple_check_failed (const gimple *gs, const char *file, int line,
1267		     const char *function, enum gimple_code code,
1268		     enum tree_code subcode)
1269{
1270  internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1271      		  gimple_code_name[code],
1272		  get_tree_code_name (subcode),
1273		  gimple_code_name[gimple_code (gs)],
1274		  gs->subcode > 0
1275		    ? get_tree_code_name ((enum tree_code) gs->subcode)
1276		    : "",
1277		  function, trim_filename (file), line);
1278}
1279#endif /* ENABLE_GIMPLE_CHECKING */
1280
1281
1282/* Link gimple statement GS to the end of the sequence *SEQ_P.  If
1283   *SEQ_P is NULL, a new sequence is allocated.  */
1284
1285void
1286gimple_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
1287{
1288  gimple_stmt_iterator si;
1289  if (gs == NULL)
1290    return;
1291
1292  si = gsi_last (*seq_p);
1293  gsi_insert_after (&si, gs, GSI_NEW_STMT);
1294}
1295
1296/* Link gimple statement GS to the end of the sequence *SEQ_P.  If
1297   *SEQ_P is NULL, a new sequence is allocated.  This function is
1298   similar to gimple_seq_add_stmt, but does not scan the operands.
1299   During gimplification, we need to manipulate statement sequences
1300   before the def/use vectors have been constructed.  */
1301
1302void
1303gimple_seq_add_stmt_without_update (gimple_seq *seq_p, gimple *gs)
1304{
1305  gimple_stmt_iterator si;
1306
1307  if (gs == NULL)
1308    return;
1309
1310  si = gsi_last (*seq_p);
1311  gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
1312}
1313
1314/* Append sequence SRC to the end of sequence *DST_P.  If *DST_P is
1315   NULL, a new sequence is allocated.  */
1316
1317void
1318gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1319{
1320  gimple_stmt_iterator si;
1321  if (src == NULL)
1322    return;
1323
1324  si = gsi_last (*dst_p);
1325  gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1326}
1327
1328/* Append sequence SRC to the end of sequence *DST_P.  If *DST_P is
1329   NULL, a new sequence is allocated.  This function is
1330   similar to gimple_seq_add_seq, but does not scan the operands.  */
1331
1332void
1333gimple_seq_add_seq_without_update (gimple_seq *dst_p, gimple_seq src)
1334{
1335  gimple_stmt_iterator si;
1336  if (src == NULL)
1337    return;
1338
1339  si = gsi_last (*dst_p);
1340  gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
1341}
1342
1343/* Determine whether to assign a location to the statement GS.  */
1344
1345static bool
1346should_carry_location_p (gimple *gs)
1347{
1348  /* Don't emit a line note for a label.  We particularly don't want to
1349     emit one for the break label, since it doesn't actually correspond
1350     to the beginning of the loop/switch.  */
1351  if (gimple_code (gs) == GIMPLE_LABEL)
1352    return false;
1353
1354  return true;
1355}
1356
1357/* Set the location for gimple statement GS to LOCATION.  */
1358
1359static void
1360annotate_one_with_location (gimple *gs, location_t location)
1361{
1362  if (!gimple_has_location (gs)
1363      && !gimple_do_not_emit_location_p (gs)
1364      && should_carry_location_p (gs))
1365    gimple_set_location (gs, location);
1366}
1367
1368/* Set LOCATION for all the statements after iterator GSI in sequence
1369   SEQ.  If GSI is pointing to the end of the sequence, start with the
1370   first statement in SEQ.  */
1371
1372void
1373annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi,
1374				  location_t location)
1375{
1376  if (gsi_end_p (gsi))
1377    gsi = gsi_start (seq);
1378  else
1379    gsi_next (&gsi);
1380
1381  for (; !gsi_end_p (gsi); gsi_next (&gsi))
1382    annotate_one_with_location (gsi_stmt (gsi), location);
1383}
1384
1385/* Set the location for all the statements in a sequence STMT_P to LOCATION.  */
1386
1387void
1388annotate_all_with_location (gimple_seq stmt_p, location_t location)
1389{
1390  gimple_stmt_iterator i;
1391
1392  if (gimple_seq_empty_p (stmt_p))
1393    return;
1394
1395  for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
1396    {
1397      gimple *gs = gsi_stmt (i);
1398      annotate_one_with_location (gs, location);
1399    }
1400}
1401
1402/* Helper function of empty_body_p.  Return true if STMT is an empty
1403   statement.  */
1404
1405static bool
1406empty_stmt_p (gimple *stmt)
1407{
1408  if (gimple_code (stmt) == GIMPLE_NOP)
1409    return true;
1410  if (gbind *bind_stmt = dyn_cast <gbind *> (stmt))
1411    return empty_body_p (gimple_bind_body (bind_stmt));
1412  return false;
1413}
1414
1415
1416/* Return true if BODY contains nothing but empty statements.  */
1417
1418bool
1419empty_body_p (gimple_seq body)
1420{
1421  gimple_stmt_iterator i;
1422
1423  if (gimple_seq_empty_p (body))
1424    return true;
1425  for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
1426    if (!empty_stmt_p (gsi_stmt (i))
1427	&& !is_gimple_debug (gsi_stmt (i)))
1428      return false;
1429
1430  return true;
1431}
1432
1433
1434/* Perform a deep copy of sequence SRC and return the result.  */
1435
1436gimple_seq
1437gimple_seq_copy (gimple_seq src)
1438{
1439  gimple_stmt_iterator gsi;
1440  gimple_seq new_seq = NULL;
1441  gimple *stmt;
1442
1443  for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1444    {
1445      stmt = gimple_copy (gsi_stmt (gsi));
1446      gimple_seq_add_stmt (&new_seq, stmt);
1447    }
1448
1449  return new_seq;
1450}
1451
1452
1453
1454/* Return true if calls C1 and C2 are known to go to the same function.  */
1455
1456bool
1457gimple_call_same_target_p (const gimple *c1, const gimple *c2)
1458{
1459  if (gimple_call_internal_p (c1))
1460    return (gimple_call_internal_p (c2)
1461	    && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2)
1462	    && (!gimple_call_internal_unique_p (as_a <const gcall *> (c1))
1463		|| c1 == c2));
1464  else
1465    return (gimple_call_fn (c1) == gimple_call_fn (c2)
1466	    || (gimple_call_fndecl (c1)
1467		&& gimple_call_fndecl (c1) == gimple_call_fndecl (c2)));
1468}
1469
1470/* Detect flags from a GIMPLE_CALL.  This is just like
1471   call_expr_flags, but for gimple tuples.  */
1472
1473int
1474gimple_call_flags (const gimple *stmt)
1475{
1476  int flags = 0;
1477
1478  if (gimple_call_internal_p (stmt))
1479    flags = internal_fn_flags (gimple_call_internal_fn (stmt));
1480  else
1481    {
1482      tree decl = gimple_call_fndecl (stmt);
1483      if (decl)
1484	flags = flags_from_decl_or_type (decl);
1485      flags |= flags_from_decl_or_type (gimple_call_fntype (stmt));
1486    }
1487
1488  if (stmt->subcode & GF_CALL_NOTHROW)
1489    flags |= ECF_NOTHROW;
1490
1491  if (stmt->subcode & GF_CALL_BY_DESCRIPTOR)
1492    flags |= ECF_BY_DESCRIPTOR;
1493
1494  return flags;
1495}
1496
1497/* Return the "fn spec" string for call STMT.  */
1498
1499static const_tree
1500gimple_call_fnspec (const gcall *stmt)
1501{
1502  tree type, attr;
1503
1504  if (gimple_call_internal_p (stmt))
1505    return internal_fn_fnspec (gimple_call_internal_fn (stmt));
1506
1507  type = gimple_call_fntype (stmt);
1508  if (!type)
1509    return NULL_TREE;
1510
1511  attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1512  if (!attr)
1513    return NULL_TREE;
1514
1515  return TREE_VALUE (TREE_VALUE (attr));
1516}
1517
1518/* Detects argument flags for argument number ARG on call STMT.  */
1519
1520int
1521gimple_call_arg_flags (const gcall *stmt, unsigned arg)
1522{
1523  const_tree attr = gimple_call_fnspec (stmt);
1524
1525  if (!attr || 1 + arg >= (unsigned) TREE_STRING_LENGTH (attr))
1526    return 0;
1527
1528  switch (TREE_STRING_POINTER (attr)[1 + arg])
1529    {
1530    case 'x':
1531    case 'X':
1532      return EAF_UNUSED;
1533
1534    case 'R':
1535      return EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE;
1536
1537    case 'r':
1538      return EAF_NOCLOBBER | EAF_NOESCAPE;
1539
1540    case 'W':
1541      return EAF_DIRECT | EAF_NOESCAPE;
1542
1543    case 'w':
1544      return EAF_NOESCAPE;
1545
1546    case '.':
1547    default:
1548      return 0;
1549    }
1550}
1551
1552/* Detects return flags for the call STMT.  */
1553
1554int
1555gimple_call_return_flags (const gcall *stmt)
1556{
1557  const_tree attr;
1558
1559  if (gimple_call_flags (stmt) & ECF_MALLOC)
1560    return ERF_NOALIAS;
1561
1562  attr = gimple_call_fnspec (stmt);
1563  if (!attr || TREE_STRING_LENGTH (attr) < 1)
1564    return 0;
1565
1566  switch (TREE_STRING_POINTER (attr)[0])
1567    {
1568    case '1':
1569    case '2':
1570    case '3':
1571    case '4':
1572      return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
1573
1574    case 'm':
1575      return ERF_NOALIAS;
1576
1577    case '.':
1578    default:
1579      return 0;
1580    }
1581}
1582
1583
1584/* Return true if call STMT is known to return a non-zero result.  */
1585
1586bool
1587gimple_call_nonnull_result_p (gcall *call)
1588{
1589  tree fndecl = gimple_call_fndecl (call);
1590  if (!fndecl)
1591    return false;
1592  if (flag_delete_null_pointer_checks && !flag_check_new
1593      && DECL_IS_OPERATOR_NEW_P (fndecl)
1594      && !TREE_NOTHROW (fndecl))
1595    return true;
1596
1597  /* References are always non-NULL.  */
1598  if (flag_delete_null_pointer_checks
1599      && TREE_CODE (TREE_TYPE (fndecl)) == REFERENCE_TYPE)
1600    return true;
1601
1602  if (flag_delete_null_pointer_checks
1603      && lookup_attribute ("returns_nonnull",
1604			   TYPE_ATTRIBUTES (gimple_call_fntype (call))))
1605    return true;
1606  return gimple_alloca_call_p (call);
1607}
1608
1609
1610/* If CALL returns a non-null result in an argument, return that arg.  */
1611
1612tree
1613gimple_call_nonnull_arg (gcall *call)
1614{
1615  tree fndecl = gimple_call_fndecl (call);
1616  if (!fndecl)
1617    return NULL_TREE;
1618
1619  unsigned rf = gimple_call_return_flags (call);
1620  if (rf & ERF_RETURNS_ARG)
1621    {
1622      unsigned argnum = rf & ERF_RETURN_ARG_MASK;
1623      if (argnum < gimple_call_num_args (call))
1624	{
1625	  tree arg = gimple_call_arg (call, argnum);
1626	  if (SSA_VAR_P (arg)
1627	      && infer_nonnull_range_by_attribute (call, arg))
1628	    return arg;
1629	}
1630    }
1631  return NULL_TREE;
1632}
1633
1634
1635/* Return true if GS is a copy assignment.  */
1636
1637bool
1638gimple_assign_copy_p (gimple *gs)
1639{
1640  return (gimple_assign_single_p (gs)
1641	  && is_gimple_val (gimple_op (gs, 1)));
1642}
1643
1644
1645/* Return true if GS is a SSA_NAME copy assignment.  */
1646
1647bool
1648gimple_assign_ssa_name_copy_p (gimple *gs)
1649{
1650  return (gimple_assign_single_p (gs)
1651	  && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
1652	  && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
1653}
1654
1655
1656/* Return true if GS is an assignment with a unary RHS, but the
1657   operator has no effect on the assigned value.  The logic is adapted
1658   from STRIP_NOPS.  This predicate is intended to be used in tuplifying
1659   instances in which STRIP_NOPS was previously applied to the RHS of
1660   an assignment.
1661
1662   NOTE: In the use cases that led to the creation of this function
1663   and of gimple_assign_single_p, it is typical to test for either
1664   condition and to proceed in the same manner.  In each case, the
1665   assigned value is represented by the single RHS operand of the
1666   assignment.  I suspect there may be cases where gimple_assign_copy_p,
1667   gimple_assign_single_p, or equivalent logic is used where a similar
1668   treatment of unary NOPs is appropriate.  */
1669
1670bool
1671gimple_assign_unary_nop_p (gimple *gs)
1672{
1673  return (is_gimple_assign (gs)
1674          && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
1675              || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
1676          && gimple_assign_rhs1 (gs) != error_mark_node
1677          && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
1678              == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
1679}
1680
1681/* Set BB to be the basic block holding G.  */
1682
1683void
1684gimple_set_bb (gimple *stmt, basic_block bb)
1685{
1686  stmt->bb = bb;
1687
1688  if (gimple_code (stmt) != GIMPLE_LABEL)
1689    return;
1690
1691  /* If the statement is a label, add the label to block-to-labels map
1692     so that we can speed up edge creation for GIMPLE_GOTOs.  */
1693  if (cfun->cfg)
1694    {
1695      tree t;
1696      int uid;
1697
1698      t = gimple_label_label (as_a <glabel *> (stmt));
1699      uid = LABEL_DECL_UID (t);
1700      if (uid == -1)
1701	{
1702	  unsigned old_len =
1703	    vec_safe_length (label_to_block_map_for_fn (cfun));
1704	  LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
1705	  if (old_len <= (unsigned) uid)
1706	    {
1707	      unsigned new_len = 3 * uid / 2 + 1;
1708
1709	      vec_safe_grow_cleared (label_to_block_map_for_fn (cfun),
1710				     new_len);
1711	    }
1712	}
1713
1714      (*label_to_block_map_for_fn (cfun))[uid] = bb;
1715    }
1716}
1717
1718
1719/* Modify the RHS of the assignment pointed-to by GSI using the
1720   operands in the expression tree EXPR.
1721
1722   NOTE: The statement pointed-to by GSI may be reallocated if it
1723   did not have enough operand slots.
1724
1725   This function is useful to convert an existing tree expression into
1726   the flat representation used for the RHS of a GIMPLE assignment.
1727   It will reallocate memory as needed to expand or shrink the number
1728   of operand slots needed to represent EXPR.
1729
1730   NOTE: If you find yourself building a tree and then calling this
1731   function, you are most certainly doing it the slow way.  It is much
1732   better to build a new assignment or to use the function
1733   gimple_assign_set_rhs_with_ops, which does not require an
1734   expression tree to be built.  */
1735
1736void
1737gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
1738{
1739  enum tree_code subcode;
1740  tree op1, op2, op3;
1741
1742  extract_ops_from_tree (expr, &subcode, &op1, &op2, &op3);
1743  gimple_assign_set_rhs_with_ops (gsi, subcode, op1, op2, op3);
1744}
1745
1746
1747/* Set the RHS of assignment statement pointed-to by GSI to CODE with
1748   operands OP1, OP2 and OP3.
1749
1750   NOTE: The statement pointed-to by GSI may be reallocated if it
1751   did not have enough operand slots.  */
1752
1753void
1754gimple_assign_set_rhs_with_ops (gimple_stmt_iterator *gsi, enum tree_code code,
1755				tree op1, tree op2, tree op3)
1756{
1757  unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
1758  gimple *stmt = gsi_stmt (*gsi);
1759  gimple *old_stmt = stmt;
1760
1761  /* If the new CODE needs more operands, allocate a new statement.  */
1762  if (gimple_num_ops (stmt) < new_rhs_ops + 1)
1763    {
1764      tree lhs = gimple_assign_lhs (old_stmt);
1765      stmt = gimple_alloc (gimple_code (old_stmt), new_rhs_ops + 1);
1766      memcpy (stmt, old_stmt, gimple_size (gimple_code (old_stmt)));
1767      gimple_init_singleton (stmt);
1768
1769      /* The LHS needs to be reset as this also changes the SSA name
1770	 on the LHS.  */
1771      gimple_assign_set_lhs (stmt, lhs);
1772    }
1773
1774  gimple_set_num_ops (stmt, new_rhs_ops + 1);
1775  gimple_set_subcode (stmt, code);
1776  gimple_assign_set_rhs1 (stmt, op1);
1777  if (new_rhs_ops > 1)
1778    gimple_assign_set_rhs2 (stmt, op2);
1779  if (new_rhs_ops > 2)
1780    gimple_assign_set_rhs3 (stmt, op3);
1781  if (stmt != old_stmt)
1782    gsi_replace (gsi, stmt, false);
1783}
1784
1785
1786/* Return the LHS of a statement that performs an assignment,
1787   either a GIMPLE_ASSIGN or a GIMPLE_CALL.  Returns NULL_TREE
1788   for a call to a function that returns no value, or for a
1789   statement other than an assignment or a call.  */
1790
1791tree
1792gimple_get_lhs (const gimple *stmt)
1793{
1794  enum gimple_code code = gimple_code (stmt);
1795
1796  if (code == GIMPLE_ASSIGN)
1797    return gimple_assign_lhs (stmt);
1798  else if (code == GIMPLE_CALL)
1799    return gimple_call_lhs (stmt);
1800  else if (code == GIMPLE_PHI)
1801    return gimple_phi_result (stmt);
1802  else
1803    return NULL_TREE;
1804}
1805
1806
1807/* Set the LHS of a statement that performs an assignment,
1808   either a GIMPLE_ASSIGN or a GIMPLE_CALL.  */
1809
1810void
1811gimple_set_lhs (gimple *stmt, tree lhs)
1812{
1813  enum gimple_code code = gimple_code (stmt);
1814
1815  if (code == GIMPLE_ASSIGN)
1816    gimple_assign_set_lhs (stmt, lhs);
1817  else if (code == GIMPLE_CALL)
1818    gimple_call_set_lhs (stmt, lhs);
1819  else
1820    gcc_unreachable ();
1821}
1822
1823
1824/* Return a deep copy of statement STMT.  All the operands from STMT
1825   are reallocated and copied using unshare_expr.  The DEF, USE, VDEF
1826   and VUSE operand arrays are set to empty in the new copy.  The new
1827   copy isn't part of any sequence.  */
1828
1829gimple *
1830gimple_copy (gimple *stmt)
1831{
1832  enum gimple_code code = gimple_code (stmt);
1833  unsigned num_ops = gimple_num_ops (stmt);
1834  gimple *copy = gimple_alloc (code, num_ops);
1835  unsigned i;
1836
1837  /* Shallow copy all the fields from STMT.  */
1838  memcpy (copy, stmt, gimple_size (code));
1839  gimple_init_singleton (copy);
1840
1841  /* If STMT has sub-statements, deep-copy them as well.  */
1842  if (gimple_has_substatements (stmt))
1843    {
1844      gimple_seq new_seq;
1845      tree t;
1846
1847      switch (gimple_code (stmt))
1848	{
1849	case GIMPLE_BIND:
1850	  {
1851	    gbind *bind_stmt = as_a <gbind *> (stmt);
1852	    gbind *bind_copy = as_a <gbind *> (copy);
1853	    new_seq = gimple_seq_copy (gimple_bind_body (bind_stmt));
1854	    gimple_bind_set_body (bind_copy, new_seq);
1855	    gimple_bind_set_vars (bind_copy,
1856				  unshare_expr (gimple_bind_vars (bind_stmt)));
1857	    gimple_bind_set_block (bind_copy, gimple_bind_block (bind_stmt));
1858	  }
1859	  break;
1860
1861	case GIMPLE_CATCH:
1862	  {
1863	    gcatch *catch_stmt = as_a <gcatch *> (stmt);
1864	    gcatch *catch_copy = as_a <gcatch *> (copy);
1865	    new_seq = gimple_seq_copy (gimple_catch_handler (catch_stmt));
1866	    gimple_catch_set_handler (catch_copy, new_seq);
1867	    t = unshare_expr (gimple_catch_types (catch_stmt));
1868	    gimple_catch_set_types (catch_copy, t);
1869	  }
1870	  break;
1871
1872	case GIMPLE_EH_FILTER:
1873	  {
1874	    geh_filter *eh_filter_stmt = as_a <geh_filter *> (stmt);
1875	    geh_filter *eh_filter_copy = as_a <geh_filter *> (copy);
1876	    new_seq
1877	      = gimple_seq_copy (gimple_eh_filter_failure (eh_filter_stmt));
1878	    gimple_eh_filter_set_failure (eh_filter_copy, new_seq);
1879	    t = unshare_expr (gimple_eh_filter_types (eh_filter_stmt));
1880	    gimple_eh_filter_set_types (eh_filter_copy, t);
1881	  }
1882	  break;
1883
1884	case GIMPLE_EH_ELSE:
1885	  {
1886	    geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
1887	    geh_else *eh_else_copy = as_a <geh_else *> (copy);
1888	    new_seq = gimple_seq_copy (gimple_eh_else_n_body (eh_else_stmt));
1889	    gimple_eh_else_set_n_body (eh_else_copy, new_seq);
1890	    new_seq = gimple_seq_copy (gimple_eh_else_e_body (eh_else_stmt));
1891	    gimple_eh_else_set_e_body (eh_else_copy, new_seq);
1892	  }
1893	  break;
1894
1895	case GIMPLE_TRY:
1896	  {
1897	    gtry *try_stmt = as_a <gtry *> (stmt);
1898	    gtry *try_copy = as_a <gtry *> (copy);
1899	    new_seq = gimple_seq_copy (gimple_try_eval (try_stmt));
1900	    gimple_try_set_eval (try_copy, new_seq);
1901	    new_seq = gimple_seq_copy (gimple_try_cleanup (try_stmt));
1902	    gimple_try_set_cleanup (try_copy, new_seq);
1903	  }
1904	  break;
1905
1906	case GIMPLE_OMP_FOR:
1907	  new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
1908	  gimple_omp_for_set_pre_body (copy, new_seq);
1909	  t = unshare_expr (gimple_omp_for_clauses (stmt));
1910	  gimple_omp_for_set_clauses (copy, t);
1911	  {
1912	    gomp_for *omp_for_copy = as_a <gomp_for *> (copy);
1913	    omp_for_copy->iter = ggc_vec_alloc<gimple_omp_for_iter>
1914	      ( gimple_omp_for_collapse (stmt));
1915          }
1916	  for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1917	    {
1918	      gimple_omp_for_set_cond (copy, i,
1919				       gimple_omp_for_cond (stmt, i));
1920	      gimple_omp_for_set_index (copy, i,
1921					gimple_omp_for_index (stmt, i));
1922	      t = unshare_expr (gimple_omp_for_initial (stmt, i));
1923	      gimple_omp_for_set_initial (copy, i, t);
1924	      t = unshare_expr (gimple_omp_for_final (stmt, i));
1925	      gimple_omp_for_set_final (copy, i, t);
1926	      t = unshare_expr (gimple_omp_for_incr (stmt, i));
1927	      gimple_omp_for_set_incr (copy, i, t);
1928	    }
1929	  goto copy_omp_body;
1930
1931	case GIMPLE_OMP_PARALLEL:
1932	  {
1933	    gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1934	    gomp_parallel *omp_par_copy = as_a <gomp_parallel *> (copy);
1935	    t = unshare_expr (gimple_omp_parallel_clauses (omp_par_stmt));
1936	    gimple_omp_parallel_set_clauses (omp_par_copy, t);
1937	    t = unshare_expr (gimple_omp_parallel_child_fn (omp_par_stmt));
1938	    gimple_omp_parallel_set_child_fn (omp_par_copy, t);
1939	    t = unshare_expr (gimple_omp_parallel_data_arg (omp_par_stmt));
1940	    gimple_omp_parallel_set_data_arg (omp_par_copy, t);
1941	  }
1942	  goto copy_omp_body;
1943
1944	case GIMPLE_OMP_TASK:
1945	  t = unshare_expr (gimple_omp_task_clauses (stmt));
1946	  gimple_omp_task_set_clauses (copy, t);
1947	  t = unshare_expr (gimple_omp_task_child_fn (stmt));
1948	  gimple_omp_task_set_child_fn (copy, t);
1949	  t = unshare_expr (gimple_omp_task_data_arg (stmt));
1950	  gimple_omp_task_set_data_arg (copy, t);
1951	  t = unshare_expr (gimple_omp_task_copy_fn (stmt));
1952	  gimple_omp_task_set_copy_fn (copy, t);
1953	  t = unshare_expr (gimple_omp_task_arg_size (stmt));
1954	  gimple_omp_task_set_arg_size (copy, t);
1955	  t = unshare_expr (gimple_omp_task_arg_align (stmt));
1956	  gimple_omp_task_set_arg_align (copy, t);
1957	  goto copy_omp_body;
1958
1959	case GIMPLE_OMP_CRITICAL:
1960	  t = unshare_expr (gimple_omp_critical_name
1961				(as_a <gomp_critical *> (stmt)));
1962	  gimple_omp_critical_set_name (as_a <gomp_critical *> (copy), t);
1963	  t = unshare_expr (gimple_omp_critical_clauses
1964				(as_a <gomp_critical *> (stmt)));
1965	  gimple_omp_critical_set_clauses (as_a <gomp_critical *> (copy), t);
1966	  goto copy_omp_body;
1967
1968	case GIMPLE_OMP_ORDERED:
1969	  t = unshare_expr (gimple_omp_ordered_clauses
1970				(as_a <gomp_ordered *> (stmt)));
1971	  gimple_omp_ordered_set_clauses (as_a <gomp_ordered *> (copy), t);
1972	  goto copy_omp_body;
1973
1974	case GIMPLE_OMP_SCAN:
1975	  t = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt));
1976	  t = unshare_expr (t);
1977	  gimple_omp_scan_set_clauses (as_a <gomp_scan *> (copy), t);
1978	  goto copy_omp_body;
1979
1980	case GIMPLE_OMP_TASKGROUP:
1981	  t = unshare_expr (gimple_omp_taskgroup_clauses (stmt));
1982	  gimple_omp_taskgroup_set_clauses (copy, t);
1983	  goto copy_omp_body;
1984
1985	case GIMPLE_OMP_SECTIONS:
1986	  t = unshare_expr (gimple_omp_sections_clauses (stmt));
1987	  gimple_omp_sections_set_clauses (copy, t);
1988	  t = unshare_expr (gimple_omp_sections_control (stmt));
1989	  gimple_omp_sections_set_control (copy, t);
1990	  goto copy_omp_body;
1991
1992	case GIMPLE_OMP_SINGLE:
1993	  {
1994	    gomp_single *omp_single_copy = as_a <gomp_single *> (copy);
1995	    t = unshare_expr (gimple_omp_single_clauses (stmt));
1996	    gimple_omp_single_set_clauses (omp_single_copy, t);
1997	  }
1998	  goto copy_omp_body;
1999
2000	case GIMPLE_OMP_TARGET:
2001	  {
2002	    gomp_target *omp_target_stmt = as_a <gomp_target *> (stmt);
2003	    gomp_target *omp_target_copy = as_a <gomp_target *> (copy);
2004	    t = unshare_expr (gimple_omp_target_clauses (omp_target_stmt));
2005	    gimple_omp_target_set_clauses (omp_target_copy, t);
2006	    t = unshare_expr (gimple_omp_target_data_arg (omp_target_stmt));
2007	    gimple_omp_target_set_data_arg (omp_target_copy, t);
2008	  }
2009	  goto copy_omp_body;
2010
2011	case GIMPLE_OMP_TEAMS:
2012	  {
2013	    gomp_teams *omp_teams_copy = as_a <gomp_teams *> (copy);
2014	    t = unshare_expr (gimple_omp_teams_clauses (stmt));
2015	    gimple_omp_teams_set_clauses (omp_teams_copy, t);
2016	  }
2017	  /* FALLTHRU  */
2018
2019	case GIMPLE_OMP_SECTION:
2020	case GIMPLE_OMP_MASTER:
2021	case GIMPLE_OMP_GRID_BODY:
2022	copy_omp_body:
2023	  new_seq = gimple_seq_copy (gimple_omp_body (stmt));
2024	  gimple_omp_set_body (copy, new_seq);
2025	  break;
2026
2027	case GIMPLE_TRANSACTION:
2028	  new_seq = gimple_seq_copy (gimple_transaction_body (
2029				       as_a <gtransaction *> (stmt)));
2030	  gimple_transaction_set_body (as_a <gtransaction *> (copy),
2031				       new_seq);
2032	  break;
2033
2034	case GIMPLE_WITH_CLEANUP_EXPR:
2035	  new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
2036	  gimple_wce_set_cleanup (copy, new_seq);
2037	  break;
2038
2039	default:
2040	  gcc_unreachable ();
2041	}
2042    }
2043
2044  /* Make copy of operands.  */
2045  for (i = 0; i < num_ops; i++)
2046    gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
2047
2048  if (gimple_has_mem_ops (stmt))
2049    {
2050      gimple_set_vdef (copy, gimple_vdef (stmt));
2051      gimple_set_vuse (copy, gimple_vuse (stmt));
2052    }
2053
2054  /* Clear out SSA operand vectors on COPY.  */
2055  if (gimple_has_ops (stmt))
2056    {
2057      gimple_set_use_ops (copy, NULL);
2058
2059      /* SSA operands need to be updated.  */
2060      gimple_set_modified (copy, true);
2061    }
2062
2063  if (gimple_debug_nonbind_marker_p (stmt))
2064    cfun->debug_marker_count++;
2065
2066  return copy;
2067}
2068
2069/* Move OLD_STMT's vuse and vdef operands to NEW_STMT, on the assumption
2070   that OLD_STMT is about to be removed.  */
2071
2072void
2073gimple_move_vops (gimple *new_stmt, gimple *old_stmt)
2074{
2075  tree vdef = gimple_vdef (old_stmt);
2076  gimple_set_vuse (new_stmt, gimple_vuse (old_stmt));
2077  gimple_set_vdef (new_stmt, vdef);
2078  if (vdef && TREE_CODE (vdef) == SSA_NAME)
2079    SSA_NAME_DEF_STMT (vdef) = new_stmt;
2080}
2081
2082/* Return true if statement S has side-effects.  We consider a
2083   statement to have side effects if:
2084
2085   - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
2086   - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS.  */
2087
2088bool
2089gimple_has_side_effects (const gimple *s)
2090{
2091  if (is_gimple_debug (s))
2092    return false;
2093
2094  /* We don't have to scan the arguments to check for
2095     volatile arguments, though, at present, we still
2096     do a scan to check for TREE_SIDE_EFFECTS.  */
2097  if (gimple_has_volatile_ops (s))
2098    return true;
2099
2100  if (gimple_code (s) == GIMPLE_ASM
2101      && gimple_asm_volatile_p (as_a <const gasm *> (s)))
2102    return true;
2103
2104  if (is_gimple_call (s))
2105    {
2106      int flags = gimple_call_flags (s);
2107
2108      /* An infinite loop is considered a side effect.  */
2109      if (!(flags & (ECF_CONST | ECF_PURE))
2110	  || (flags & ECF_LOOPING_CONST_OR_PURE))
2111	return true;
2112
2113      return false;
2114    }
2115
2116  return false;
2117}
2118
2119/* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
2120   Return true if S can trap.  When INCLUDE_MEM is true, check whether
2121   the memory operations could trap.  When INCLUDE_STORES is true and
2122   S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked.  */
2123
2124bool
2125gimple_could_trap_p_1 (gimple *s, bool include_mem, bool include_stores)
2126{
2127  tree t, div = NULL_TREE;
2128  enum tree_code op;
2129
2130  if (include_mem)
2131    {
2132      unsigned i, start = (is_gimple_assign (s) && !include_stores) ? 1 : 0;
2133
2134      for (i = start; i < gimple_num_ops (s); i++)
2135	if (tree_could_trap_p (gimple_op (s, i)))
2136	  return true;
2137    }
2138
2139  switch (gimple_code (s))
2140    {
2141    case GIMPLE_ASM:
2142      return gimple_asm_volatile_p (as_a <gasm *> (s));
2143
2144    case GIMPLE_CALL:
2145      t = gimple_call_fndecl (s);
2146      /* Assume that calls to weak functions may trap.  */
2147      if (!t || !DECL_P (t) || DECL_WEAK (t))
2148	return true;
2149      return false;
2150
2151    case GIMPLE_ASSIGN:
2152      op = gimple_assign_rhs_code (s);
2153
2154      /* For COND_EXPR and VEC_COND_EXPR only the condition may trap.  */
2155      if (op == COND_EXPR || op == VEC_COND_EXPR)
2156	return tree_could_trap_p (gimple_assign_rhs1 (s));
2157
2158      /* For comparisons we need to check rhs operand types instead of rhs type
2159         (which is BOOLEAN_TYPE).  */
2160      if (TREE_CODE_CLASS (op) == tcc_comparison)
2161	t = TREE_TYPE (gimple_assign_rhs1 (s));
2162      else
2163	t = gimple_expr_type (s);
2164
2165      if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
2166	div = gimple_assign_rhs2 (s);
2167
2168      return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
2169				      (INTEGRAL_TYPE_P (t)
2170				       && TYPE_OVERFLOW_TRAPS (t)),
2171				      div));
2172
2173    case GIMPLE_COND:
2174      t = TREE_TYPE (gimple_cond_lhs (s));
2175      return operation_could_trap_p (gimple_cond_code (s),
2176				     FLOAT_TYPE_P (t), false, NULL_TREE);
2177
2178    default:
2179      break;
2180    }
2181
2182  return false;
2183}
2184
2185/* Return true if statement S can trap.  */
2186
2187bool
2188gimple_could_trap_p (gimple *s)
2189{
2190  return gimple_could_trap_p_1 (s, true, true);
2191}
2192
2193/* Return true if RHS of a GIMPLE_ASSIGN S can trap.  */
2194
2195bool
2196gimple_assign_rhs_could_trap_p (gimple *s)
2197{
2198  gcc_assert (is_gimple_assign (s));
2199  return gimple_could_trap_p_1 (s, true, false);
2200}
2201
2202
2203/* Print debugging information for gimple stmts generated.  */
2204
2205void
2206dump_gimple_statistics (void)
2207{
2208  int i;
2209  uint64_t total_tuples = 0, total_bytes = 0;
2210
2211  if (! GATHER_STATISTICS)
2212    {
2213      fprintf (stderr, "No GIMPLE statistics\n");
2214      return;
2215    }
2216
2217  fprintf (stderr, "\nGIMPLE statements\n");
2218  fprintf (stderr, "Kind                   Stmts      Bytes\n");
2219  fprintf (stderr, "---------------------------------------\n");
2220  for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
2221    {
2222      fprintf (stderr, "%-20s %7" PRIu64 "%c %10" PRIu64 "%c\n",
2223	       gimple_alloc_kind_names[i],
2224	       SIZE_AMOUNT (gimple_alloc_counts[i]),
2225	       SIZE_AMOUNT (gimple_alloc_sizes[i]));
2226      total_tuples += gimple_alloc_counts[i];
2227      total_bytes += gimple_alloc_sizes[i];
2228    }
2229  fprintf (stderr, "---------------------------------------\n");
2230  fprintf (stderr, "%-20s %7" PRIu64 "%c %10" PRIu64 "%c\n", "Total",
2231	   SIZE_AMOUNT (total_tuples), SIZE_AMOUNT (total_bytes));
2232  fprintf (stderr, "---------------------------------------\n");
2233}
2234
2235
2236/* Return the number of operands needed on the RHS of a GIMPLE
2237   assignment for an expression with tree code CODE.  */
2238
2239unsigned
2240get_gimple_rhs_num_ops (enum tree_code code)
2241{
2242  switch (get_gimple_rhs_class (code))
2243    {
2244    case GIMPLE_UNARY_RHS:
2245    case GIMPLE_SINGLE_RHS:
2246      return 1;
2247    case GIMPLE_BINARY_RHS:
2248      return 2;
2249    case GIMPLE_TERNARY_RHS:
2250      return 3;
2251    default:
2252      gcc_unreachable ();
2253    }
2254}
2255
2256#define DEFTREECODE(SYM, STRING, TYPE, NARGS)   			    \
2257  (unsigned char)							    \
2258  ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS				    \
2259   : ((TYPE) == tcc_binary						    \
2260      || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS   		    \
2261   : ((TYPE) == tcc_constant						    \
2262      || (TYPE) == tcc_declaration					    \
2263      || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS			    \
2264   : ((SYM) == TRUTH_AND_EXPR						    \
2265      || (SYM) == TRUTH_OR_EXPR						    \
2266      || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS			    \
2267   : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS				    \
2268   : ((SYM) == COND_EXPR						    \
2269      || (SYM) == WIDEN_MULT_PLUS_EXPR					    \
2270      || (SYM) == WIDEN_MULT_MINUS_EXPR					    \
2271      || (SYM) == DOT_PROD_EXPR						    \
2272      || (SYM) == SAD_EXPR						    \
2273      || (SYM) == REALIGN_LOAD_EXPR					    \
2274      || (SYM) == VEC_COND_EXPR						    \
2275      || (SYM) == VEC_PERM_EXPR                                             \
2276      || (SYM) == BIT_INSERT_EXPR) ? GIMPLE_TERNARY_RHS			    \
2277   : ((SYM) == CONSTRUCTOR						    \
2278      || (SYM) == OBJ_TYPE_REF						    \
2279      || (SYM) == ASSERT_EXPR						    \
2280      || (SYM) == ADDR_EXPR						    \
2281      || (SYM) == WITH_SIZE_EXPR					    \
2282      || (SYM) == SSA_NAME) ? GIMPLE_SINGLE_RHS				    \
2283   : GIMPLE_INVALID_RHS),
2284#define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2285
2286const unsigned char gimple_rhs_class_table[] = {
2287#include "all-tree.def"
2288};
2289
2290#undef DEFTREECODE
2291#undef END_OF_BASE_TREE_CODES
2292
2293/* Canonicalize a tree T for use in a COND_EXPR as conditional.  Returns
2294   a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
2295   we failed to create one.  */
2296
2297tree
2298canonicalize_cond_expr_cond (tree t)
2299{
2300  /* Strip conversions around boolean operations.  */
2301  if (CONVERT_EXPR_P (t)
2302      && (truth_value_p (TREE_CODE (TREE_OPERAND (t, 0)))
2303          || TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0)))
2304	     == BOOLEAN_TYPE))
2305    t = TREE_OPERAND (t, 0);
2306
2307  /* For !x use x == 0.  */
2308  if (TREE_CODE (t) == TRUTH_NOT_EXPR)
2309    {
2310      tree top0 = TREE_OPERAND (t, 0);
2311      t = build2 (EQ_EXPR, TREE_TYPE (t),
2312		  top0, build_int_cst (TREE_TYPE (top0), 0));
2313    }
2314  /* For cmp ? 1 : 0 use cmp.  */
2315  else if (TREE_CODE (t) == COND_EXPR
2316	   && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
2317	   && integer_onep (TREE_OPERAND (t, 1))
2318	   && integer_zerop (TREE_OPERAND (t, 2)))
2319    {
2320      tree top0 = TREE_OPERAND (t, 0);
2321      t = build2 (TREE_CODE (top0), TREE_TYPE (t),
2322		  TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
2323    }
2324  /* For x ^ y use x != y.  */
2325  else if (TREE_CODE (t) == BIT_XOR_EXPR)
2326    t = build2 (NE_EXPR, TREE_TYPE (t),
2327		TREE_OPERAND (t, 0), TREE_OPERAND (t, 1));
2328
2329  if (is_gimple_condexpr (t))
2330    return t;
2331
2332  return NULL_TREE;
2333}
2334
2335/* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
2336   the positions marked by the set ARGS_TO_SKIP.  */
2337
2338gcall *
2339gimple_call_copy_skip_args (gcall *stmt, bitmap args_to_skip)
2340{
2341  int i;
2342  int nargs = gimple_call_num_args (stmt);
2343  auto_vec<tree> vargs (nargs);
2344  gcall *new_stmt;
2345
2346  for (i = 0; i < nargs; i++)
2347    if (!bitmap_bit_p (args_to_skip, i))
2348      vargs.quick_push (gimple_call_arg (stmt, i));
2349
2350  if (gimple_call_internal_p (stmt))
2351    new_stmt = gimple_build_call_internal_vec (gimple_call_internal_fn (stmt),
2352					       vargs);
2353  else
2354    new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
2355
2356  if (gimple_call_lhs (stmt))
2357    gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2358
2359  gimple_set_vuse (new_stmt, gimple_vuse (stmt));
2360  gimple_set_vdef (new_stmt, gimple_vdef (stmt));
2361
2362  if (gimple_has_location (stmt))
2363    gimple_set_location (new_stmt, gimple_location (stmt));
2364  gimple_call_copy_flags (new_stmt, stmt);
2365  gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2366
2367  gimple_set_modified (new_stmt, true);
2368
2369  return new_stmt;
2370}
2371
2372
2373
2374/* Return true if the field decls F1 and F2 are at the same offset.
2375
2376   This is intended to be used on GIMPLE types only.  */
2377
2378bool
2379gimple_compare_field_offset (tree f1, tree f2)
2380{
2381  if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
2382    {
2383      tree offset1 = DECL_FIELD_OFFSET (f1);
2384      tree offset2 = DECL_FIELD_OFFSET (f2);
2385      return ((offset1 == offset2
2386	       /* Once gimplification is done, self-referential offsets are
2387		  instantiated as operand #2 of the COMPONENT_REF built for
2388		  each access and reset.  Therefore, they are not relevant
2389		  anymore and fields are interchangeable provided that they
2390		  represent the same access.  */
2391	       || (TREE_CODE (offset1) == PLACEHOLDER_EXPR
2392		   && TREE_CODE (offset2) == PLACEHOLDER_EXPR
2393		   && (DECL_SIZE (f1) == DECL_SIZE (f2)
2394		       || (TREE_CODE (DECL_SIZE (f1)) == PLACEHOLDER_EXPR
2395			   && TREE_CODE (DECL_SIZE (f2)) == PLACEHOLDER_EXPR)
2396		       || operand_equal_p (DECL_SIZE (f1), DECL_SIZE (f2), 0))
2397		   && DECL_ALIGN (f1) == DECL_ALIGN (f2))
2398	       || operand_equal_p (offset1, offset2, 0))
2399	      && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
2400				     DECL_FIELD_BIT_OFFSET (f2)));
2401    }
2402
2403  /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
2404     should be, so handle differing ones specially by decomposing
2405     the offset into a byte and bit offset manually.  */
2406  if (tree_fits_shwi_p (DECL_FIELD_OFFSET (f1))
2407      && tree_fits_shwi_p (DECL_FIELD_OFFSET (f2)))
2408    {
2409      unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
2410      unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
2411      bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
2412      byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
2413		      + bit_offset1 / BITS_PER_UNIT);
2414      bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
2415      byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
2416		      + bit_offset2 / BITS_PER_UNIT);
2417      if (byte_offset1 != byte_offset2)
2418	return false;
2419      return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
2420    }
2421
2422  return false;
2423}
2424
2425
2426/* Return a type the same as TYPE except unsigned or
2427   signed according to UNSIGNEDP.  */
2428
2429static tree
2430gimple_signed_or_unsigned_type (bool unsignedp, tree type)
2431{
2432  tree type1;
2433  int i;
2434
2435  type1 = TYPE_MAIN_VARIANT (type);
2436  if (type1 == signed_char_type_node
2437      || type1 == char_type_node
2438      || type1 == unsigned_char_type_node)
2439    return unsignedp ? unsigned_char_type_node : signed_char_type_node;
2440  if (type1 == integer_type_node || type1 == unsigned_type_node)
2441    return unsignedp ? unsigned_type_node : integer_type_node;
2442  if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
2443    return unsignedp ? short_unsigned_type_node : short_integer_type_node;
2444  if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
2445    return unsignedp ? long_unsigned_type_node : long_integer_type_node;
2446  if (type1 == long_long_integer_type_node
2447      || type1 == long_long_unsigned_type_node)
2448    return unsignedp
2449           ? long_long_unsigned_type_node
2450	   : long_long_integer_type_node;
2451
2452  for (i = 0; i < NUM_INT_N_ENTS; i ++)
2453    if (int_n_enabled_p[i]
2454	&& (type1 == int_n_trees[i].unsigned_type
2455	    || type1 == int_n_trees[i].signed_type))
2456	return unsignedp
2457	  ? int_n_trees[i].unsigned_type
2458	  : int_n_trees[i].signed_type;
2459
2460#if HOST_BITS_PER_WIDE_INT >= 64
2461  if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
2462    return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
2463#endif
2464  if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
2465    return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
2466  if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
2467    return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
2468  if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
2469    return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
2470  if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
2471    return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
2472
2473#define GIMPLE_FIXED_TYPES(NAME)	    \
2474  if (type1 == short_ ## NAME ## _type_node \
2475      || type1 == unsigned_short_ ## NAME ## _type_node) \
2476    return unsignedp ? unsigned_short_ ## NAME ## _type_node \
2477		     : short_ ## NAME ## _type_node; \
2478  if (type1 == NAME ## _type_node \
2479      || type1 == unsigned_ ## NAME ## _type_node) \
2480    return unsignedp ? unsigned_ ## NAME ## _type_node \
2481		     : NAME ## _type_node; \
2482  if (type1 == long_ ## NAME ## _type_node \
2483      || type1 == unsigned_long_ ## NAME ## _type_node) \
2484    return unsignedp ? unsigned_long_ ## NAME ## _type_node \
2485		     : long_ ## NAME ## _type_node; \
2486  if (type1 == long_long_ ## NAME ## _type_node \
2487      || type1 == unsigned_long_long_ ## NAME ## _type_node) \
2488    return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
2489		     : long_long_ ## NAME ## _type_node;
2490
2491#define GIMPLE_FIXED_MODE_TYPES(NAME) \
2492  if (type1 == NAME ## _type_node \
2493      || type1 == u ## NAME ## _type_node) \
2494    return unsignedp ? u ## NAME ## _type_node \
2495		     : NAME ## _type_node;
2496
2497#define GIMPLE_FIXED_TYPES_SAT(NAME) \
2498  if (type1 == sat_ ## short_ ## NAME ## _type_node \
2499      || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
2500    return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
2501		     : sat_ ## short_ ## NAME ## _type_node; \
2502  if (type1 == sat_ ## NAME ## _type_node \
2503      || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
2504    return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
2505		     : sat_ ## NAME ## _type_node; \
2506  if (type1 == sat_ ## long_ ## NAME ## _type_node \
2507      || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
2508    return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
2509		     : sat_ ## long_ ## NAME ## _type_node; \
2510  if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
2511      || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
2512    return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
2513		     : sat_ ## long_long_ ## NAME ## _type_node;
2514
2515#define GIMPLE_FIXED_MODE_TYPES_SAT(NAME)	\
2516  if (type1 == sat_ ## NAME ## _type_node \
2517      || type1 == sat_ ## u ## NAME ## _type_node) \
2518    return unsignedp ? sat_ ## u ## NAME ## _type_node \
2519		     : sat_ ## NAME ## _type_node;
2520
2521  GIMPLE_FIXED_TYPES (fract);
2522  GIMPLE_FIXED_TYPES_SAT (fract);
2523  GIMPLE_FIXED_TYPES (accum);
2524  GIMPLE_FIXED_TYPES_SAT (accum);
2525
2526  GIMPLE_FIXED_MODE_TYPES (qq);
2527  GIMPLE_FIXED_MODE_TYPES (hq);
2528  GIMPLE_FIXED_MODE_TYPES (sq);
2529  GIMPLE_FIXED_MODE_TYPES (dq);
2530  GIMPLE_FIXED_MODE_TYPES (tq);
2531  GIMPLE_FIXED_MODE_TYPES_SAT (qq);
2532  GIMPLE_FIXED_MODE_TYPES_SAT (hq);
2533  GIMPLE_FIXED_MODE_TYPES_SAT (sq);
2534  GIMPLE_FIXED_MODE_TYPES_SAT (dq);
2535  GIMPLE_FIXED_MODE_TYPES_SAT (tq);
2536  GIMPLE_FIXED_MODE_TYPES (ha);
2537  GIMPLE_FIXED_MODE_TYPES (sa);
2538  GIMPLE_FIXED_MODE_TYPES (da);
2539  GIMPLE_FIXED_MODE_TYPES (ta);
2540  GIMPLE_FIXED_MODE_TYPES_SAT (ha);
2541  GIMPLE_FIXED_MODE_TYPES_SAT (sa);
2542  GIMPLE_FIXED_MODE_TYPES_SAT (da);
2543  GIMPLE_FIXED_MODE_TYPES_SAT (ta);
2544
2545  /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
2546     the precision; they have precision set to match their range, but
2547     may use a wider mode to match an ABI.  If we change modes, we may
2548     wind up with bad conversions.  For INTEGER_TYPEs in C, must check
2549     the precision as well, so as to yield correct results for
2550     bit-field types.  C++ does not have these separate bit-field
2551     types, and producing a signed or unsigned variant of an
2552     ENUMERAL_TYPE may cause other problems as well.  */
2553  if (!INTEGRAL_TYPE_P (type)
2554      || TYPE_UNSIGNED (type) == unsignedp)
2555    return type;
2556
2557#define TYPE_OK(node)							    \
2558  (TYPE_MODE (type) == TYPE_MODE (node)					    \
2559   && TYPE_PRECISION (type) == TYPE_PRECISION (node))
2560  if (TYPE_OK (signed_char_type_node))
2561    return unsignedp ? unsigned_char_type_node : signed_char_type_node;
2562  if (TYPE_OK (integer_type_node))
2563    return unsignedp ? unsigned_type_node : integer_type_node;
2564  if (TYPE_OK (short_integer_type_node))
2565    return unsignedp ? short_unsigned_type_node : short_integer_type_node;
2566  if (TYPE_OK (long_integer_type_node))
2567    return unsignedp ? long_unsigned_type_node : long_integer_type_node;
2568  if (TYPE_OK (long_long_integer_type_node))
2569    return (unsignedp
2570	    ? long_long_unsigned_type_node
2571	    : long_long_integer_type_node);
2572
2573  for (i = 0; i < NUM_INT_N_ENTS; i ++)
2574    if (int_n_enabled_p[i]
2575	&& TYPE_MODE (type) == int_n_data[i].m
2576	&& TYPE_PRECISION (type) == int_n_data[i].bitsize)
2577	return unsignedp
2578	  ? int_n_trees[i].unsigned_type
2579	  : int_n_trees[i].signed_type;
2580
2581#if HOST_BITS_PER_WIDE_INT >= 64
2582  if (TYPE_OK (intTI_type_node))
2583    return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
2584#endif
2585  if (TYPE_OK (intDI_type_node))
2586    return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
2587  if (TYPE_OK (intSI_type_node))
2588    return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
2589  if (TYPE_OK (intHI_type_node))
2590    return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
2591  if (TYPE_OK (intQI_type_node))
2592    return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
2593
2594#undef GIMPLE_FIXED_TYPES
2595#undef GIMPLE_FIXED_MODE_TYPES
2596#undef GIMPLE_FIXED_TYPES_SAT
2597#undef GIMPLE_FIXED_MODE_TYPES_SAT
2598#undef TYPE_OK
2599
2600  return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
2601}
2602
2603
2604/* Return an unsigned type the same as TYPE in other respects.  */
2605
2606tree
2607gimple_unsigned_type (tree type)
2608{
2609  return gimple_signed_or_unsigned_type (true, type);
2610}
2611
2612
2613/* Return a signed type the same as TYPE in other respects.  */
2614
2615tree
2616gimple_signed_type (tree type)
2617{
2618  return gimple_signed_or_unsigned_type (false, type);
2619}
2620
2621
2622/* Return the typed-based alias set for T, which may be an expression
2623   or a type.  Return -1 if we don't do anything special.  */
2624
2625alias_set_type
2626gimple_get_alias_set (tree t)
2627{
2628  /* That's all the expressions we handle specially.  */
2629  if (!TYPE_P (t))
2630    return -1;
2631
2632  /* For convenience, follow the C standard when dealing with
2633     character types.  Any object may be accessed via an lvalue that
2634     has character type.  */
2635  if (t == char_type_node
2636      || t == signed_char_type_node
2637      || t == unsigned_char_type_node)
2638    return 0;
2639
2640  /* Allow aliasing between signed and unsigned variants of the same
2641     type.  We treat the signed variant as canonical.  */
2642  if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
2643    {
2644      tree t1 = gimple_signed_type (t);
2645
2646      /* t1 == t can happen for boolean nodes which are always unsigned.  */
2647      if (t1 != t)
2648	return get_alias_set (t1);
2649    }
2650
2651  /* Allow aliasing between enumeral types and the underlying
2652     integer type.  This is required for C since those are
2653     compatible types.  */
2654  else if (TREE_CODE (t) == ENUMERAL_TYPE)
2655    {
2656      tree t1 = lang_hooks.types.type_for_size (tree_to_uhwi (TYPE_SIZE (t)),
2657						false /* short-cut above */);
2658      return get_alias_set (t1);
2659    }
2660
2661  return -1;
2662}
2663
2664
2665/* Helper for gimple_ior_addresses_taken_1.  */
2666
2667static bool
2668gimple_ior_addresses_taken_1 (gimple *, tree addr, tree, void *data)
2669{
2670  bitmap addresses_taken = (bitmap)data;
2671  addr = get_base_address (addr);
2672  if (addr
2673      && DECL_P (addr))
2674    {
2675      bitmap_set_bit (addresses_taken, DECL_UID (addr));
2676      return true;
2677    }
2678  return false;
2679}
2680
2681/* Set the bit for the uid of all decls that have their address taken
2682   in STMT in the ADDRESSES_TAKEN bitmap.  Returns true if there
2683   were any in this stmt.  */
2684
2685bool
2686gimple_ior_addresses_taken (bitmap addresses_taken, gimple *stmt)
2687{
2688  return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
2689					gimple_ior_addresses_taken_1);
2690}
2691
2692
2693/* Return true when STMTs arguments and return value match those of FNDECL,
2694   a decl of a builtin function.  */
2695
2696bool
2697gimple_builtin_call_types_compatible_p (const gimple *stmt, tree fndecl)
2698{
2699  gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) != NOT_BUILT_IN);
2700
2701  tree ret = gimple_call_lhs (stmt);
2702  if (ret
2703      && !useless_type_conversion_p (TREE_TYPE (ret),
2704				     TREE_TYPE (TREE_TYPE (fndecl))))
2705    return false;
2706
2707  tree targs = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2708  unsigned nargs = gimple_call_num_args (stmt);
2709  for (unsigned i = 0; i < nargs; ++i)
2710    {
2711      /* Variadic args follow.  */
2712      if (!targs)
2713	return true;
2714      tree arg = gimple_call_arg (stmt, i);
2715      tree type = TREE_VALUE (targs);
2716      if (!useless_type_conversion_p (type, TREE_TYPE (arg))
2717	  /* char/short integral arguments are promoted to int
2718	     by several frontends if targetm.calls.promote_prototypes
2719	     is true.  Allow such promotion too.  */
2720	  && !(INTEGRAL_TYPE_P (type)
2721	       && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
2722	       && targetm.calls.promote_prototypes (TREE_TYPE (fndecl))
2723	       && useless_type_conversion_p (integer_type_node,
2724					     TREE_TYPE (arg))))
2725	return false;
2726      targs = TREE_CHAIN (targs);
2727    }
2728  if (targs && !VOID_TYPE_P (TREE_VALUE (targs)))
2729    return false;
2730  return true;
2731}
2732
2733/* Return true when STMT is operator a replaceable delete call.  */
2734
2735bool
2736gimple_call_replaceable_operator_delete_p (const gcall *stmt)
2737{
2738  tree fndecl;
2739
2740  if ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE)
2741    return DECL_IS_REPLACEABLE_OPERATOR_DELETE_P (fndecl);
2742  return false;
2743}
2744
2745/* Return true when STMT is builtins call.  */
2746
2747bool
2748gimple_call_builtin_p (const gimple *stmt)
2749{
2750  tree fndecl;
2751  if (is_gimple_call (stmt)
2752      && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
2753      && DECL_BUILT_IN_CLASS (fndecl) != NOT_BUILT_IN)
2754    return gimple_builtin_call_types_compatible_p (stmt, fndecl);
2755  return false;
2756}
2757
2758/* Return true when STMT is builtins call to CLASS.  */
2759
2760bool
2761gimple_call_builtin_p (const gimple *stmt, enum built_in_class klass)
2762{
2763  tree fndecl;
2764  if (is_gimple_call (stmt)
2765      && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
2766      && DECL_BUILT_IN_CLASS (fndecl) == klass)
2767    return gimple_builtin_call_types_compatible_p (stmt, fndecl);
2768  return false;
2769}
2770
2771/* Return true when STMT is builtins call to CODE of CLASS.  */
2772
2773bool
2774gimple_call_builtin_p (const gimple *stmt, enum built_in_function code)
2775{
2776  tree fndecl;
2777  if (is_gimple_call (stmt)
2778      && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
2779      && fndecl_built_in_p (fndecl, code))
2780    return gimple_builtin_call_types_compatible_p (stmt, fndecl);
2781  return false;
2782}
2783
2784/* If CALL is a call to a combined_fn (i.e. an internal function or
2785   a normal built-in function), return its code, otherwise return
2786   CFN_LAST.  */
2787
2788combined_fn
2789gimple_call_combined_fn (const gimple *stmt)
2790{
2791  if (const gcall *call = dyn_cast <const gcall *> (stmt))
2792    {
2793      if (gimple_call_internal_p (call))
2794	return as_combined_fn (gimple_call_internal_fn (call));
2795
2796      tree fndecl = gimple_call_fndecl (stmt);
2797      if (fndecl
2798	  && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL)
2799	  && gimple_builtin_call_types_compatible_p (stmt, fndecl))
2800	return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
2801    }
2802  return CFN_LAST;
2803}
2804
2805/* Return true if STMT clobbers memory.  STMT is required to be a
2806   GIMPLE_ASM.  */
2807
2808bool
2809gimple_asm_clobbers_memory_p (const gasm *stmt)
2810{
2811  unsigned i;
2812
2813  for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
2814    {
2815      tree op = gimple_asm_clobber_op (stmt, i);
2816      if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
2817	return true;
2818    }
2819
2820  /* Non-empty basic ASM implicitly clobbers memory.  */
2821  if (gimple_asm_input_p (stmt) && strlen (gimple_asm_string (stmt)) != 0)
2822    return true;
2823
2824  return false;
2825}
2826
2827/* Dump bitmap SET (assumed to contain VAR_DECLs) to FILE.  */
2828
2829void
2830dump_decl_set (FILE *file, bitmap set)
2831{
2832  if (set)
2833    {
2834      bitmap_iterator bi;
2835      unsigned i;
2836
2837      fprintf (file, "{ ");
2838
2839      EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
2840	{
2841	  fprintf (file, "D.%u", i);
2842	  fprintf (file, " ");
2843	}
2844
2845      fprintf (file, "}");
2846    }
2847  else
2848    fprintf (file, "NIL");
2849}
2850
2851/* Return true when CALL is a call stmt that definitely doesn't
2852   free any memory or makes it unavailable otherwise.  */
2853bool
2854nonfreeing_call_p (gimple *call)
2855{
2856  if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
2857      && gimple_call_flags (call) & ECF_LEAF)
2858    switch (DECL_FUNCTION_CODE (gimple_call_fndecl (call)))
2859      {
2860	/* Just in case these become ECF_LEAF in the future.  */
2861	case BUILT_IN_FREE:
2862	case BUILT_IN_TM_FREE:
2863	case BUILT_IN_REALLOC:
2864	case BUILT_IN_STACK_RESTORE:
2865	  return false;
2866	default:
2867	  return true;
2868      }
2869  else if (gimple_call_internal_p (call))
2870    switch (gimple_call_internal_fn (call))
2871      {
2872      case IFN_ABNORMAL_DISPATCHER:
2873        return true;
2874      case IFN_ASAN_MARK:
2875	return tree_to_uhwi (gimple_call_arg (call, 0)) == ASAN_MARK_UNPOISON;
2876      default:
2877	if (gimple_call_flags (call) & ECF_LEAF)
2878	  return true;
2879	return false;
2880      }
2881
2882  tree fndecl = gimple_call_fndecl (call);
2883  if (!fndecl)
2884    return false;
2885  struct cgraph_node *n = cgraph_node::get (fndecl);
2886  if (!n)
2887    return false;
2888  enum availability availability;
2889  n = n->function_symbol (&availability);
2890  if (!n || availability <= AVAIL_INTERPOSABLE)
2891    return false;
2892  return n->nonfreeing_fn;
2893}
2894
2895/* Return true when CALL is a call stmt that definitely need not
2896   be considered to be a memory barrier.  */
2897bool
2898nonbarrier_call_p (gimple *call)
2899{
2900  if (gimple_call_flags (call) & (ECF_PURE | ECF_CONST))
2901    return true;
2902  /* Should extend this to have a nonbarrier_fn flag, just as above in
2903     the nonfreeing case.  */
2904  return false;
2905}
2906
2907/* Callback for walk_stmt_load_store_ops.
2908
2909   Return TRUE if OP will dereference the tree stored in DATA, FALSE
2910   otherwise.
2911
2912   This routine only makes a superficial check for a dereference.  Thus
2913   it must only be used if it is safe to return a false negative.  */
2914static bool
2915check_loadstore (gimple *, tree op, tree, void *data)
2916{
2917  if (TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
2918    {
2919      /* Some address spaces may legitimately dereference zero.  */
2920      addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (op));
2921      if (targetm.addr_space.zero_address_valid (as))
2922	return false;
2923
2924      return operand_equal_p (TREE_OPERAND (op, 0), (tree)data, 0);
2925    }
2926  return false;
2927}
2928
2929
2930/* Return true if OP can be inferred to be non-NULL after STMT executes,
2931   either by using a pointer dereference or attributes.  */
2932bool
2933infer_nonnull_range (gimple *stmt, tree op)
2934{
2935  return (infer_nonnull_range_by_dereference (stmt, op)
2936	  || infer_nonnull_range_by_attribute (stmt, op));
2937}
2938
2939/* Return true if OP can be inferred to be non-NULL after STMT
2940   executes by using a pointer dereference.  */
2941bool
2942infer_nonnull_range_by_dereference (gimple *stmt, tree op)
2943{
2944  /* We can only assume that a pointer dereference will yield
2945     non-NULL if -fdelete-null-pointer-checks is enabled.  */
2946  if (!flag_delete_null_pointer_checks
2947      || !POINTER_TYPE_P (TREE_TYPE (op))
2948      || gimple_code (stmt) == GIMPLE_ASM
2949      || gimple_clobber_p (stmt))
2950    return false;
2951
2952  if (walk_stmt_load_store_ops (stmt, (void *)op,
2953				check_loadstore, check_loadstore))
2954    return true;
2955
2956  return false;
2957}
2958
2959/* Return true if OP can be inferred to be a non-NULL after STMT
2960   executes by using attributes.  */
2961bool
2962infer_nonnull_range_by_attribute (gimple *stmt, tree op)
2963{
2964  /* We can only assume that a pointer dereference will yield
2965     non-NULL if -fdelete-null-pointer-checks is enabled.  */
2966  if (!flag_delete_null_pointer_checks
2967      || !POINTER_TYPE_P (TREE_TYPE (op))
2968      || gimple_code (stmt) == GIMPLE_ASM)
2969    return false;
2970
2971  if (is_gimple_call (stmt) && !gimple_call_internal_p (stmt))
2972    {
2973      tree fntype = gimple_call_fntype (stmt);
2974      tree attrs = TYPE_ATTRIBUTES (fntype);
2975      for (; attrs; attrs = TREE_CHAIN (attrs))
2976	{
2977	  attrs = lookup_attribute ("nonnull", attrs);
2978
2979	  /* If "nonnull" wasn't specified, we know nothing about
2980	     the argument.  */
2981	  if (attrs == NULL_TREE)
2982	    return false;
2983
2984	  /* If "nonnull" applies to all the arguments, then ARG
2985	     is non-null if it's in the argument list.  */
2986	  if (TREE_VALUE (attrs) == NULL_TREE)
2987	    {
2988	      for (unsigned int i = 0; i < gimple_call_num_args (stmt); i++)
2989		{
2990		  if (POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (stmt, i)))
2991		      && operand_equal_p (op, gimple_call_arg (stmt, i), 0))
2992		    return true;
2993		}
2994	      return false;
2995	    }
2996
2997	  /* Now see if op appears in the nonnull list.  */
2998	  for (tree t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
2999	    {
3000	      unsigned int idx = TREE_INT_CST_LOW (TREE_VALUE (t)) - 1;
3001	      if (idx < gimple_call_num_args (stmt))
3002		{
3003		  tree arg = gimple_call_arg (stmt, idx);
3004		  if (operand_equal_p (op, arg, 0))
3005		    return true;
3006		}
3007	    }
3008	}
3009    }
3010
3011  /* If this function is marked as returning non-null, then we can
3012     infer OP is non-null if it is used in the return statement.  */
3013  if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
3014    if (gimple_return_retval (return_stmt)
3015	&& operand_equal_p (gimple_return_retval (return_stmt), op, 0)
3016	&& lookup_attribute ("returns_nonnull",
3017			     TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
3018      return true;
3019
3020  return false;
3021}
3022
3023/* Compare two case labels.  Because the front end should already have
3024   made sure that case ranges do not overlap, it is enough to only compare
3025   the CASE_LOW values of each case label.  */
3026
3027static int
3028compare_case_labels (const void *p1, const void *p2)
3029{
3030  const_tree const case1 = *(const_tree const*)p1;
3031  const_tree const case2 = *(const_tree const*)p2;
3032
3033  /* The 'default' case label always goes first.  */
3034  if (!CASE_LOW (case1))
3035    return -1;
3036  else if (!CASE_LOW (case2))
3037    return 1;
3038  else
3039    return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
3040}
3041
3042/* Sort the case labels in LABEL_VEC in place in ascending order.  */
3043
3044void
3045sort_case_labels (vec<tree> label_vec)
3046{
3047  label_vec.qsort (compare_case_labels);
3048}
3049
3050/* Prepare a vector of case labels to be used in a GIMPLE_SWITCH statement.
3051
3052   LABELS is a vector that contains all case labels to look at.
3053
3054   INDEX_TYPE is the type of the switch index expression.  Case labels
3055   in LABELS are discarded if their values are not in the value range
3056   covered by INDEX_TYPE.  The remaining case label values are folded
3057   to INDEX_TYPE.
3058
3059   If a default case exists in LABELS, it is removed from LABELS and
3060   returned in DEFAULT_CASEP.  If no default case exists, but the
3061   case labels already cover the whole range of INDEX_TYPE, a default
3062   case is returned pointing to one of the existing case labels.
3063   Otherwise DEFAULT_CASEP is set to NULL_TREE.
3064
3065   DEFAULT_CASEP may be NULL, in which case the above comment doesn't
3066   apply and no action is taken regardless of whether a default case is
3067   found or not.  */
3068
3069void
3070preprocess_case_label_vec_for_gimple (vec<tree> labels,
3071				      tree index_type,
3072				      tree *default_casep)
3073{
3074  tree min_value, max_value;
3075  tree default_case = NULL_TREE;
3076  size_t i, len;
3077
3078  i = 0;
3079  min_value = TYPE_MIN_VALUE (index_type);
3080  max_value = TYPE_MAX_VALUE (index_type);
3081  while (i < labels.length ())
3082    {
3083      tree elt = labels[i];
3084      tree low = CASE_LOW (elt);
3085      tree high = CASE_HIGH (elt);
3086      bool remove_element = FALSE;
3087
3088      if (low)
3089	{
3090	  gcc_checking_assert (TREE_CODE (low) == INTEGER_CST);
3091	  gcc_checking_assert (!high || TREE_CODE (high) == INTEGER_CST);
3092
3093	  /* This is a non-default case label, i.e. it has a value.
3094
3095	     See if the case label is reachable within the range of
3096	     the index type.  Remove out-of-range case values.  Turn
3097	     case ranges into a canonical form (high > low strictly)
3098	     and convert the case label values to the index type.
3099
3100	     NB: The type of gimple_switch_index() may be the promoted
3101	     type, but the case labels retain the original type.  */
3102
3103	  if (high)
3104	    {
3105	      /* This is a case range.  Discard empty ranges.
3106		 If the bounds or the range are equal, turn this
3107		 into a simple (one-value) case.  */
3108	      int cmp = tree_int_cst_compare (high, low);
3109	      if (cmp < 0)
3110		remove_element = TRUE;
3111	      else if (cmp == 0)
3112		high = NULL_TREE;
3113	    }
3114
3115	  if (! high)
3116	    {
3117	      /* If the simple case value is unreachable, ignore it.  */
3118	      if ((TREE_CODE (min_value) == INTEGER_CST
3119		   && tree_int_cst_compare (low, min_value) < 0)
3120		  || (TREE_CODE (max_value) == INTEGER_CST
3121		      && tree_int_cst_compare (low, max_value) > 0))
3122		remove_element = TRUE;
3123	      else
3124		low = fold_convert (index_type, low);
3125	    }
3126	  else
3127	    {
3128	      /* If the entire case range is unreachable, ignore it.  */
3129	      if ((TREE_CODE (min_value) == INTEGER_CST
3130		   && tree_int_cst_compare (high, min_value) < 0)
3131		  || (TREE_CODE (max_value) == INTEGER_CST
3132		      && tree_int_cst_compare (low, max_value) > 0))
3133		remove_element = TRUE;
3134	      else
3135		{
3136		  /* If the lower bound is less than the index type's
3137		     minimum value, truncate the range bounds.  */
3138		  if (TREE_CODE (min_value) == INTEGER_CST
3139		      && tree_int_cst_compare (low, min_value) < 0)
3140		    low = min_value;
3141		  low = fold_convert (index_type, low);
3142
3143		  /* If the upper bound is greater than the index type's
3144		     maximum value, truncate the range bounds.  */
3145		  if (TREE_CODE (max_value) == INTEGER_CST
3146		      && tree_int_cst_compare (high, max_value) > 0)
3147		    high = max_value;
3148		  high = fold_convert (index_type, high);
3149
3150		  /* We may have folded a case range to a one-value case.  */
3151		  if (tree_int_cst_equal (low, high))
3152		    high = NULL_TREE;
3153		}
3154	    }
3155
3156	  CASE_LOW (elt) = low;
3157	  CASE_HIGH (elt) = high;
3158	}
3159      else
3160	{
3161	  gcc_assert (!default_case);
3162	  default_case = elt;
3163	  /* The default case must be passed separately to the
3164	     gimple_build_switch routine.  But if DEFAULT_CASEP
3165	     is NULL, we do not remove the default case (it would
3166	     be completely lost).  */
3167	  if (default_casep)
3168	    remove_element = TRUE;
3169	}
3170
3171      if (remove_element)
3172	labels.ordered_remove (i);
3173      else
3174	i++;
3175    }
3176  len = i;
3177
3178  if (!labels.is_empty ())
3179    sort_case_labels (labels);
3180
3181  if (default_casep && !default_case)
3182    {
3183      /* If the switch has no default label, add one, so that we jump
3184	 around the switch body.  If the labels already cover the whole
3185	 range of the switch index_type, add the default label pointing
3186	 to one of the existing labels.  */
3187      if (len
3188	  && TYPE_MIN_VALUE (index_type)
3189	  && TYPE_MAX_VALUE (index_type)
3190	  && tree_int_cst_equal (CASE_LOW (labels[0]),
3191				 TYPE_MIN_VALUE (index_type)))
3192	{
3193	  tree low, high = CASE_HIGH (labels[len - 1]);
3194	  if (!high)
3195	    high = CASE_LOW (labels[len - 1]);
3196	  if (tree_int_cst_equal (high, TYPE_MAX_VALUE (index_type)))
3197	    {
3198	      tree widest_label = labels[0];
3199	      for (i = 1; i < len; i++)
3200		{
3201		  high = CASE_LOW (labels[i]);
3202		  low = CASE_HIGH (labels[i - 1]);
3203		  if (!low)
3204		    low = CASE_LOW (labels[i - 1]);
3205
3206		  if (CASE_HIGH (labels[i]) != NULL_TREE
3207		      && (CASE_HIGH (widest_label) == NULL_TREE
3208			  || (wi::gtu_p
3209			      (wi::to_wide (CASE_HIGH (labels[i]))
3210			       - wi::to_wide (CASE_LOW (labels[i])),
3211			       wi::to_wide (CASE_HIGH (widest_label))
3212			       - wi::to_wide (CASE_LOW (widest_label))))))
3213		    widest_label = labels[i];
3214
3215		  if (wi::to_wide (low) + 1 != wi::to_wide (high))
3216		    break;
3217		}
3218	      if (i == len)
3219		{
3220		  /* Designate the label with the widest range to be the
3221		     default label.  */
3222		  tree label = CASE_LABEL (widest_label);
3223		  default_case = build_case_label (NULL_TREE, NULL_TREE,
3224						   label);
3225		}
3226	    }
3227	}
3228    }
3229
3230  if (default_casep)
3231    *default_casep = default_case;
3232}
3233
3234/* Set the location of all statements in SEQ to LOC.  */
3235
3236void
3237gimple_seq_set_location (gimple_seq seq, location_t loc)
3238{
3239  for (gimple_stmt_iterator i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
3240    gimple_set_location (gsi_stmt (i), loc);
3241}
3242
3243/* Release SSA_NAMEs in SEQ as well as the GIMPLE statements.  */
3244
3245void
3246gimple_seq_discard (gimple_seq seq)
3247{
3248  gimple_stmt_iterator gsi;
3249
3250  for (gsi = gsi_start (seq); !gsi_end_p (gsi); )
3251    {
3252      gimple *stmt = gsi_stmt (gsi);
3253      gsi_remove (&gsi, true);
3254      release_defs (stmt);
3255      ggc_free (stmt);
3256    }
3257}
3258
3259/* See if STMT now calls function that takes no parameters and if so, drop
3260   call arguments.  This is used when devirtualization machinery redirects
3261   to __builtin_unreachable or __cxa_pure_virtual.  */
3262
3263void
3264maybe_remove_unused_call_args (struct function *fn, gimple *stmt)
3265{
3266  tree decl = gimple_call_fndecl (stmt);
3267  if (TYPE_ARG_TYPES (TREE_TYPE (decl))
3268      && TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl))) == void_type_node
3269      && gimple_call_num_args (stmt))
3270    {
3271      gimple_set_num_ops (stmt, 3);
3272      update_stmt_fn (fn, stmt);
3273    }
3274}
3275
3276/* Return false if STMT will likely expand to real function call.  */
3277
3278bool
3279gimple_inexpensive_call_p (gcall *stmt)
3280{
3281  if (gimple_call_internal_p (stmt))
3282    return true;
3283  tree decl = gimple_call_fndecl (stmt);
3284  if (decl && is_inexpensive_builtin (decl))
3285    return true;
3286  return false;
3287}
3288
3289/* Return a non-artificial location for STMT.  If STMT does not have
3290   location information, get the location from EXPR.  */
3291
3292location_t
3293gimple_or_expr_nonartificial_location (gimple *stmt, tree expr)
3294{
3295  location_t loc = gimple_nonartificial_location (stmt);
3296  if (loc == UNKNOWN_LOCATION && EXPR_HAS_LOCATION (expr))
3297    loc = tree_nonartificial_location (expr);
3298  return expansion_point_location_if_in_system_header (loc);
3299}
3300
3301
3302#if CHECKING_P
3303
3304namespace selftest {
3305
3306/* Selftests for core gimple structures.  */
3307
3308/* Verify that STMT is pretty-printed as EXPECTED.
3309   Helper function for selftests.  */
3310
3311static void
3312verify_gimple_pp (const char *expected, gimple *stmt)
3313{
3314  pretty_printer pp;
3315  pp_gimple_stmt_1 (&pp, stmt, 0 /* spc */, TDF_NONE /* flags */);
3316  ASSERT_STREQ (expected, pp_formatted_text (&pp));
3317}
3318
3319/* Build a GIMPLE_ASSIGN equivalent to
3320     tmp = 5;
3321   and verify various properties of it.  */
3322
3323static void
3324test_assign_single ()
3325{
3326  tree type = integer_type_node;
3327  tree lhs = build_decl (UNKNOWN_LOCATION, VAR_DECL,
3328			 get_identifier ("tmp"),
3329			 type);
3330  tree rhs = build_int_cst (type, 5);
3331  gassign *stmt = gimple_build_assign (lhs, rhs);
3332  verify_gimple_pp ("tmp = 5;", stmt);
3333
3334  ASSERT_TRUE (is_gimple_assign (stmt));
3335  ASSERT_EQ (lhs, gimple_assign_lhs (stmt));
3336  ASSERT_EQ (lhs, gimple_get_lhs (stmt));
3337  ASSERT_EQ (rhs, gimple_assign_rhs1 (stmt));
3338  ASSERT_EQ (NULL, gimple_assign_rhs2 (stmt));
3339  ASSERT_EQ (NULL, gimple_assign_rhs3 (stmt));
3340  ASSERT_TRUE (gimple_assign_single_p (stmt));
3341  ASSERT_EQ (INTEGER_CST, gimple_assign_rhs_code (stmt));
3342}
3343
3344/* Build a GIMPLE_ASSIGN equivalent to
3345     tmp = a * b;
3346   and verify various properties of it.  */
3347
3348static void
3349test_assign_binop ()
3350{
3351  tree type = integer_type_node;
3352  tree lhs = build_decl (UNKNOWN_LOCATION, VAR_DECL,
3353			 get_identifier ("tmp"),
3354			 type);
3355  tree a = build_decl (UNKNOWN_LOCATION, VAR_DECL,
3356		       get_identifier ("a"),
3357		       type);
3358  tree b = build_decl (UNKNOWN_LOCATION, VAR_DECL,
3359		       get_identifier ("b"),
3360		       type);
3361  gassign *stmt = gimple_build_assign (lhs, MULT_EXPR, a, b);
3362  verify_gimple_pp ("tmp = a * b;", stmt);
3363
3364  ASSERT_TRUE (is_gimple_assign (stmt));
3365  ASSERT_EQ (lhs, gimple_assign_lhs (stmt));
3366  ASSERT_EQ (lhs, gimple_get_lhs (stmt));
3367  ASSERT_EQ (a, gimple_assign_rhs1 (stmt));
3368  ASSERT_EQ (b, gimple_assign_rhs2 (stmt));
3369  ASSERT_EQ (NULL, gimple_assign_rhs3 (stmt));
3370  ASSERT_FALSE (gimple_assign_single_p (stmt));
3371  ASSERT_EQ (MULT_EXPR, gimple_assign_rhs_code (stmt));
3372}
3373
3374/* Build a GIMPLE_NOP and verify various properties of it.  */
3375
3376static void
3377test_nop_stmt ()
3378{
3379  gimple *stmt = gimple_build_nop ();
3380  verify_gimple_pp ("GIMPLE_NOP", stmt);
3381  ASSERT_EQ (GIMPLE_NOP, gimple_code (stmt));
3382  ASSERT_EQ (NULL, gimple_get_lhs (stmt));
3383  ASSERT_FALSE (gimple_assign_single_p (stmt));
3384}
3385
3386/* Build a GIMPLE_RETURN equivalent to
3387     return 7;
3388   and verify various properties of it.  */
3389
3390static void
3391test_return_stmt ()
3392{
3393  tree type = integer_type_node;
3394  tree val = build_int_cst (type, 7);
3395  greturn *stmt = gimple_build_return (val);
3396  verify_gimple_pp ("return 7;", stmt);
3397
3398  ASSERT_EQ (GIMPLE_RETURN, gimple_code (stmt));
3399  ASSERT_EQ (NULL, gimple_get_lhs (stmt));
3400  ASSERT_EQ (val, gimple_return_retval (stmt));
3401  ASSERT_FALSE (gimple_assign_single_p (stmt));
3402}
3403
3404/* Build a GIMPLE_RETURN equivalent to
3405     return;
3406   and verify various properties of it.  */
3407
3408static void
3409test_return_without_value ()
3410{
3411  greturn *stmt = gimple_build_return (NULL);
3412  verify_gimple_pp ("return;", stmt);
3413
3414  ASSERT_EQ (GIMPLE_RETURN, gimple_code (stmt));
3415  ASSERT_EQ (NULL, gimple_get_lhs (stmt));
3416  ASSERT_EQ (NULL, gimple_return_retval (stmt));
3417  ASSERT_FALSE (gimple_assign_single_p (stmt));
3418}
3419
3420/* Run all of the selftests within this file.  */
3421
3422void
3423gimple_c_tests ()
3424{
3425  test_assign_single ();
3426  test_assign_binop ();
3427  test_nop_stmt ();
3428  test_return_stmt ();
3429  test_return_without_value ();
3430}
3431
3432} // namespace selftest
3433
3434
3435#endif /* CHECKING_P */
3436