targhooks.c revision 1.5
1/* Default target hook functions.
2   Copyright (C) 2003-2015 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3.  If not see
18<http://www.gnu.org/licenses/>.  */
19
20/* The migration of target macros to target hooks works as follows:
21
22   1. Create a target hook that uses the existing target macros to
23      implement the same functionality.
24
25   2. Convert all the MI files to use the hook instead of the macro.
26
27   3. Repeat for a majority of the remaining target macros.  This will
28      take some time.
29
30   4. Tell target maintainers to start migrating.
31
32   5. Eventually convert the backends to override the hook instead of
33      defining the macros.  This will take some time too.
34
35   6. TBD when, poison the macros.  Unmigrated targets will break at
36      this point.
37
38   Note that we expect steps 1-3 to be done by the people that
39   understand what the MI does with each macro, and step 5 to be done
40   by the target maintainers for their respective targets.
41
42   Note that steps 1 and 2 don't have to be done together, but no
43   target can override the new hook until step 2 is complete for it.
44
45   Once the macros are poisoned, we will revert to the old migration
46   rules - migrate the macro, callers, and targets all at once.  This
47   comment can thus be removed at that point.  */
48
49#include "config.h"
50#include "system.h"
51#include "coretypes.h"
52#include "tm.h"
53#include "machmode.h"
54#include "rtl.h"
55#include "hash-set.h"
56#include "vec.h"
57#include "double-int.h"
58#include "input.h"
59#include "alias.h"
60#include "symtab.h"
61#include "wide-int.h"
62#include "inchash.h"
63#include "tree.h"
64#include "fold-const.h"
65#include "stor-layout.h"
66#include "varasm.h"
67#include "hashtab.h"
68#include "hard-reg-set.h"
69#include "function.h"
70#include "flags.h"
71#include "statistics.h"
72#include "real.h"
73#include "fixed-value.h"
74#include "insn-config.h"
75#include "expmed.h"
76#include "dojump.h"
77#include "explow.h"
78#include "calls.h"
79#include "emit-rtl.h"
80#include "stmt.h"
81#include "expr.h"
82#include "output.h"
83#include "diagnostic-core.h"
84#include "target.h"
85#include "tm_p.h"
86#include "target-def.h"
87#include "regs.h"
88#include "reload.h"
89#include "insn-codes.h"
90#include "optabs.h"
91#include "recog.h"
92#include "intl.h"
93#include "opts.h"
94#include "tree-ssa-alias.h"
95#include "gimple-expr.h"
96#include "gimplify.h"
97#include "stringpool.h"
98#include "tree-ssanames.h"
99
100
101bool
102default_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED,
103			      rtx addr ATTRIBUTE_UNUSED,
104			      bool strict ATTRIBUTE_UNUSED)
105{
106#ifdef GO_IF_LEGITIMATE_ADDRESS
107  /* Defer to the old implementation using a goto.  */
108  if (strict)
109    return strict_memory_address_p (mode, addr);
110  else
111    return memory_address_p (mode, addr);
112#else
113  gcc_unreachable ();
114#endif
115}
116
117void
118default_external_libcall (rtx fun ATTRIBUTE_UNUSED)
119{
120#ifdef ASM_OUTPUT_EXTERNAL_LIBCALL
121  ASM_OUTPUT_EXTERNAL_LIBCALL (asm_out_file, fun);
122#endif
123}
124
125int
126default_unspec_may_trap_p (const_rtx x, unsigned flags)
127{
128  int i;
129
130  /* Any floating arithmetic may trap.  */
131  if ((SCALAR_FLOAT_MODE_P (GET_MODE (x)) && flag_trapping_math))
132    return 1;
133
134  for (i = 0; i < XVECLEN (x, 0); ++i)
135    {
136      if (may_trap_p_1 (XVECEXP (x, 0, i), flags))
137	return 1;
138    }
139
140  return 0;
141}
142
143machine_mode
144default_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
145			       machine_mode mode,
146			       int *punsignedp ATTRIBUTE_UNUSED,
147			       const_tree funtype ATTRIBUTE_UNUSED,
148			       int for_return ATTRIBUTE_UNUSED)
149{
150  if (type != NULL_TREE && for_return == 2)
151    return promote_mode (type, mode, punsignedp);
152  return mode;
153}
154
155machine_mode
156default_promote_function_mode_always_promote (const_tree type,
157					      machine_mode mode,
158					      int *punsignedp,
159					      const_tree funtype ATTRIBUTE_UNUSED,
160					      int for_return ATTRIBUTE_UNUSED)
161{
162  return promote_mode (type, mode, punsignedp);
163}
164
165machine_mode
166default_cc_modes_compatible (machine_mode m1, machine_mode m2)
167{
168  if (m1 == m2)
169    return m1;
170  return VOIDmode;
171}
172
173bool
174default_return_in_memory (const_tree type,
175			  const_tree fntype ATTRIBUTE_UNUSED)
176{
177  return (TYPE_MODE (type) == BLKmode);
178}
179
180rtx
181default_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
182			    machine_mode mode ATTRIBUTE_UNUSED)
183{
184  return x;
185}
186
187bool
188default_legitimize_address_displacement (rtx *disp ATTRIBUTE_UNUSED,
189					 rtx *offset ATTRIBUTE_UNUSED,
190					 machine_mode mode ATTRIBUTE_UNUSED)
191{
192  return false;
193}
194
195rtx
196default_expand_builtin_saveregs (void)
197{
198  error ("__builtin_saveregs not supported by this target");
199  return const0_rtx;
200}
201
202void
203default_setup_incoming_varargs (cumulative_args_t ca ATTRIBUTE_UNUSED,
204				machine_mode mode ATTRIBUTE_UNUSED,
205				tree type ATTRIBUTE_UNUSED,
206				int *pretend_arg_size ATTRIBUTE_UNUSED,
207				int second_time ATTRIBUTE_UNUSED)
208{
209}
210
211/* The default implementation of TARGET_BUILTIN_SETJMP_FRAME_VALUE.  */
212
213rtx
214default_builtin_setjmp_frame_value (void)
215{
216  return virtual_stack_vars_rtx;
217}
218
219/* Generic hook that takes a CUMULATIVE_ARGS pointer and returns false.  */
220
221bool
222hook_bool_CUMULATIVE_ARGS_false (cumulative_args_t ca ATTRIBUTE_UNUSED)
223{
224  return false;
225}
226
227bool
228default_pretend_outgoing_varargs_named (cumulative_args_t ca ATTRIBUTE_UNUSED)
229{
230  return (targetm.calls.setup_incoming_varargs
231	  != default_setup_incoming_varargs);
232}
233
234machine_mode
235default_eh_return_filter_mode (void)
236{
237  return targetm.unwind_word_mode ();
238}
239
240machine_mode
241default_libgcc_cmp_return_mode (void)
242{
243  return word_mode;
244}
245
246machine_mode
247default_libgcc_shift_count_mode (void)
248{
249  return word_mode;
250}
251
252machine_mode
253default_unwind_word_mode (void)
254{
255  return word_mode;
256}
257
258/* The default implementation of TARGET_SHIFT_TRUNCATION_MASK.  */
259
260unsigned HOST_WIDE_INT
261default_shift_truncation_mask (machine_mode mode)
262{
263  return SHIFT_COUNT_TRUNCATED ? GET_MODE_BITSIZE (mode) - 1 : 0;
264}
265
266/* The default implementation of TARGET_MIN_DIVISIONS_FOR_RECIP_MUL.  */
267
268unsigned int
269default_min_divisions_for_recip_mul (machine_mode mode ATTRIBUTE_UNUSED)
270{
271  return have_insn_for (DIV, mode) ? 3 : 2;
272}
273
274/* The default implementation of TARGET_MODE_REP_EXTENDED.  */
275
276int
277default_mode_rep_extended (machine_mode mode ATTRIBUTE_UNUSED,
278			   machine_mode mode_rep ATTRIBUTE_UNUSED)
279{
280  return UNKNOWN;
281}
282
283/* Generic hook that takes a CUMULATIVE_ARGS pointer and returns true.  */
284
285bool
286hook_bool_CUMULATIVE_ARGS_true (cumulative_args_t a ATTRIBUTE_UNUSED)
287{
288  return true;
289}
290
291/* Return machine mode for non-standard suffix
292   or VOIDmode if non-standard suffixes are unsupported.  */
293machine_mode
294default_mode_for_suffix (char suffix ATTRIBUTE_UNUSED)
295{
296  return VOIDmode;
297}
298
299/* The generic C++ ABI specifies this is a 64-bit value.  */
300tree
301default_cxx_guard_type (void)
302{
303  return long_long_integer_type_node;
304}
305
306/* Returns the size of the cookie to use when allocating an array
307   whose elements have the indicated TYPE.  Assumes that it is already
308   known that a cookie is needed.  */
309
310tree
311default_cxx_get_cookie_size (tree type)
312{
313  tree cookie_size;
314
315  /* We need to allocate an additional max (sizeof (size_t), alignof
316     (true_type)) bytes.  */
317  tree sizetype_size;
318  tree type_align;
319
320  sizetype_size = size_in_bytes (sizetype);
321  type_align = size_int (TYPE_ALIGN_UNIT (type));
322  if (tree_int_cst_lt (type_align, sizetype_size))
323    cookie_size = sizetype_size;
324  else
325    cookie_size = type_align;
326
327  return cookie_size;
328}
329
330/* Return true if a parameter must be passed by reference.  This version
331   of the TARGET_PASS_BY_REFERENCE hook uses just MUST_PASS_IN_STACK.  */
332
333bool
334hook_pass_by_reference_must_pass_in_stack (cumulative_args_t c ATTRIBUTE_UNUSED,
335	machine_mode mode ATTRIBUTE_UNUSED, const_tree type ATTRIBUTE_UNUSED,
336	bool named_arg ATTRIBUTE_UNUSED)
337{
338  return targetm.calls.must_pass_in_stack (mode, type);
339}
340
341/* Return true if a parameter follows callee copies conventions.  This
342   version of the hook is true for all named arguments.  */
343
344bool
345hook_callee_copies_named (cumulative_args_t ca ATTRIBUTE_UNUSED,
346			  machine_mode mode ATTRIBUTE_UNUSED,
347			  const_tree type ATTRIBUTE_UNUSED, bool named)
348{
349  return named;
350}
351
352/* Emit to STREAM the assembler syntax for insn operand X.  */
353
354void
355default_print_operand (FILE *stream ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED,
356		       int code ATTRIBUTE_UNUSED)
357{
358#ifdef PRINT_OPERAND
359  PRINT_OPERAND (stream, x, code);
360#else
361  gcc_unreachable ();
362#endif
363}
364
365/* Emit to STREAM the assembler syntax for an insn operand whose memory
366   address is X.  */
367
368void
369default_print_operand_address (FILE *stream ATTRIBUTE_UNUSED,
370			       rtx x ATTRIBUTE_UNUSED)
371{
372#ifdef PRINT_OPERAND_ADDRESS
373  PRINT_OPERAND_ADDRESS (stream, x);
374#else
375  gcc_unreachable ();
376#endif
377}
378
379/* Return true if CODE is a valid punctuation character for the
380   `print_operand' hook.  */
381
382bool
383default_print_operand_punct_valid_p (unsigned char code ATTRIBUTE_UNUSED)
384{
385#ifdef PRINT_OPERAND_PUNCT_VALID_P
386  return PRINT_OPERAND_PUNCT_VALID_P (code);
387#else
388  return false;
389#endif
390}
391
392/* The default implementation of TARGET_MANGLE_ASSEMBLER_NAME.  */
393tree
394default_mangle_assembler_name (const char *name ATTRIBUTE_UNUSED)
395{
396  const char *skipped = name + (*name == '*' ? 1 : 0);
397  const char *stripped = targetm.strip_name_encoding (skipped);
398  if (*name != '*' && user_label_prefix[0])
399    stripped = ACONCAT ((user_label_prefix, stripped, NULL));
400  return get_identifier (stripped);
401}
402
403/* True if MODE is valid for the target.  By "valid", we mean able to
404   be manipulated in non-trivial ways.  In particular, this means all
405   the arithmetic is supported.
406
407   By default we guess this means that any C type is supported.  If
408   we can't map the mode back to a type that would be available in C,
409   then reject it.  Special case, here, is the double-word arithmetic
410   supported by optabs.c.  */
411
412bool
413default_scalar_mode_supported_p (machine_mode mode)
414{
415  int precision = GET_MODE_PRECISION (mode);
416
417  switch (GET_MODE_CLASS (mode))
418    {
419    case MODE_PARTIAL_INT:
420    case MODE_INT:
421      if (precision == CHAR_TYPE_SIZE)
422	return true;
423      if (precision == SHORT_TYPE_SIZE)
424	return true;
425      if (precision == INT_TYPE_SIZE)
426	return true;
427      if (precision == LONG_TYPE_SIZE)
428	return true;
429      if (precision == LONG_LONG_TYPE_SIZE)
430	return true;
431      if (precision == 2 * BITS_PER_WORD)
432	return true;
433      return false;
434
435    case MODE_FLOAT:
436      if (precision == FLOAT_TYPE_SIZE)
437	return true;
438      if (precision == DOUBLE_TYPE_SIZE)
439	return true;
440      if (precision == LONG_DOUBLE_TYPE_SIZE)
441	return true;
442      return false;
443
444    case MODE_DECIMAL_FLOAT:
445    case MODE_FRACT:
446    case MODE_UFRACT:
447    case MODE_ACCUM:
448    case MODE_UACCUM:
449      return false;
450
451    default:
452      gcc_unreachable ();
453    }
454}
455
456/* Return true if libgcc supports floating-point mode MODE (known to
457   be supported as a scalar mode).  */
458
459bool
460default_libgcc_floating_mode_supported_p (machine_mode mode)
461{
462  switch (mode)
463    {
464#ifdef HAVE_SFmode
465    case SFmode:
466#endif
467#ifdef HAVE_DFmode
468    case DFmode:
469#endif
470#ifdef HAVE_XFmode
471    case XFmode:
472#endif
473#ifdef HAVE_TFmode
474    case TFmode:
475#endif
476      return true;
477
478    default:
479      return false;
480    }
481}
482
483/* Make some target macros useable by target-independent code.  */
484bool
485targhook_words_big_endian (void)
486{
487  return !!WORDS_BIG_ENDIAN;
488}
489
490bool
491targhook_float_words_big_endian (void)
492{
493  return !!FLOAT_WORDS_BIG_ENDIAN;
494}
495
496/* True if the target supports floating-point exceptions and rounding
497   modes.  */
498
499bool
500default_float_exceptions_rounding_supported_p (void)
501{
502#ifdef HAVE_adddf3
503  return HAVE_adddf3;
504#else
505  return false;
506#endif
507}
508
509/* True if the target supports decimal floating point.  */
510
511bool
512default_decimal_float_supported_p (void)
513{
514  return ENABLE_DECIMAL_FLOAT;
515}
516
517/* True if the target supports fixed-point arithmetic.  */
518
519bool
520default_fixed_point_supported_p (void)
521{
522  return ENABLE_FIXED_POINT;
523}
524
525/* True if the target supports GNU indirect functions.  */
526
527bool
528default_has_ifunc_p (void)
529{
530  return HAVE_GNU_INDIRECT_FUNCTION;
531}
532
533/* NULL if INSN insn is valid within a low-overhead loop, otherwise returns
534   an error message.
535
536   This function checks whether a given INSN is valid within a low-overhead
537   loop.  If INSN is invalid it returns the reason for that, otherwise it
538   returns NULL. A called function may clobber any special registers required
539   for low-overhead looping. Additionally, some targets (eg, PPC) use the count
540   register for branch on table instructions. We reject the doloop pattern in
541   these cases.  */
542
543const char *
544default_invalid_within_doloop (const rtx_insn *insn)
545{
546  if (CALL_P (insn))
547    return "Function call in loop.";
548
549  if (tablejump_p (insn, NULL, NULL) || computed_jump_p (insn))
550    return "Computed branch in the loop.";
551
552  return NULL;
553}
554
555/* Mapping of builtin functions to vectorized variants.  */
556
557tree
558default_builtin_vectorized_function (tree fndecl ATTRIBUTE_UNUSED,
559				     tree type_out ATTRIBUTE_UNUSED,
560				     tree type_in ATTRIBUTE_UNUSED)
561{
562  return NULL_TREE;
563}
564
565/* Vectorized conversion.  */
566
567tree
568default_builtin_vectorized_conversion (unsigned int code ATTRIBUTE_UNUSED,
569				       tree dest_type ATTRIBUTE_UNUSED,
570				       tree src_type ATTRIBUTE_UNUSED)
571{
572  return NULL_TREE;
573}
574
575/* Default vectorizer cost model values.  */
576
577int
578default_builtin_vectorization_cost (enum vect_cost_for_stmt type_of_cost,
579                                    tree vectype,
580                                    int misalign ATTRIBUTE_UNUSED)
581{
582  unsigned elements;
583
584  switch (type_of_cost)
585    {
586      case scalar_stmt:
587      case scalar_load:
588      case scalar_store:
589      case vector_stmt:
590      case vector_load:
591      case vector_store:
592      case vec_to_scalar:
593      case scalar_to_vec:
594      case cond_branch_not_taken:
595      case vec_perm:
596      case vec_promote_demote:
597        return 1;
598
599      case unaligned_load:
600      case unaligned_store:
601        return 2;
602
603      case cond_branch_taken:
604        return 3;
605
606      case vec_construct:
607	elements = TYPE_VECTOR_SUBPARTS (vectype);
608	return elements / 2 + 1;
609
610      default:
611        gcc_unreachable ();
612    }
613}
614
615/* Reciprocal.  */
616
617tree
618default_builtin_reciprocal (unsigned int fn ATTRIBUTE_UNUSED,
619			    bool md_fn ATTRIBUTE_UNUSED,
620			    bool sqrt ATTRIBUTE_UNUSED)
621{
622  return NULL_TREE;
623}
624
625bool
626hook_bool_CUMULATIVE_ARGS_mode_tree_bool_false (
627	cumulative_args_t ca ATTRIBUTE_UNUSED,
628	machine_mode mode ATTRIBUTE_UNUSED,
629	const_tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
630{
631  return false;
632}
633
634bool
635hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true (
636	cumulative_args_t ca ATTRIBUTE_UNUSED,
637	machine_mode mode ATTRIBUTE_UNUSED,
638	const_tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
639{
640  return true;
641}
642
643int
644hook_int_CUMULATIVE_ARGS_mode_tree_bool_0 (
645	cumulative_args_t ca ATTRIBUTE_UNUSED,
646	machine_mode mode ATTRIBUTE_UNUSED,
647	tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
648{
649  return 0;
650}
651
652void
653default_function_arg_advance (cumulative_args_t ca ATTRIBUTE_UNUSED,
654			      machine_mode mode ATTRIBUTE_UNUSED,
655			      const_tree type ATTRIBUTE_UNUSED,
656			      bool named ATTRIBUTE_UNUSED)
657{
658  gcc_unreachable ();
659}
660
661rtx
662default_function_arg (cumulative_args_t ca ATTRIBUTE_UNUSED,
663		      machine_mode mode ATTRIBUTE_UNUSED,
664		      const_tree type ATTRIBUTE_UNUSED,
665		      bool named ATTRIBUTE_UNUSED)
666{
667  gcc_unreachable ();
668}
669
670rtx
671default_function_incoming_arg (cumulative_args_t ca ATTRIBUTE_UNUSED,
672			       machine_mode mode ATTRIBUTE_UNUSED,
673			       const_tree type ATTRIBUTE_UNUSED,
674			       bool named ATTRIBUTE_UNUSED)
675{
676  gcc_unreachable ();
677}
678
679unsigned int
680default_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
681			       const_tree type ATTRIBUTE_UNUSED)
682{
683  return PARM_BOUNDARY;
684}
685
686unsigned int
687default_function_arg_round_boundary (machine_mode mode ATTRIBUTE_UNUSED,
688				     const_tree type ATTRIBUTE_UNUSED)
689{
690  return PARM_BOUNDARY;
691}
692
693void
694hook_void_bitmap (bitmap regs ATTRIBUTE_UNUSED)
695{
696}
697
698const char *
699hook_invalid_arg_for_unprototyped_fn (
700	const_tree typelist ATTRIBUTE_UNUSED,
701	const_tree funcdecl ATTRIBUTE_UNUSED,
702	const_tree val ATTRIBUTE_UNUSED)
703{
704  return NULL;
705}
706
707/* Initialize the stack protection decls.  */
708
709/* Stack protection related decls living in libgcc.  */
710static GTY(()) tree stack_chk_guard_decl;
711
712tree
713default_stack_protect_guard (void)
714{
715  tree t = stack_chk_guard_decl;
716
717  if (t == NULL)
718    {
719      rtx x;
720
721      t = build_decl (UNKNOWN_LOCATION,
722		      VAR_DECL, get_identifier ("__stack_chk_guard"),
723		      ptr_type_node);
724      TREE_STATIC (t) = 1;
725      TREE_PUBLIC (t) = 1;
726      DECL_EXTERNAL (t) = 1;
727      TREE_USED (t) = 1;
728      TREE_THIS_VOLATILE (t) = 1;
729      DECL_ARTIFICIAL (t) = 1;
730      DECL_IGNORED_P (t) = 1;
731
732      /* Do not share RTL as the declaration is visible outside of
733	 current function.  */
734      x = DECL_RTL (t);
735      RTX_FLAG (x, used) = 1;
736
737      stack_chk_guard_decl = t;
738    }
739
740  return t;
741}
742
743static GTY(()) tree stack_chk_fail_decl;
744
745tree
746default_external_stack_protect_fail (void)
747{
748  tree t = stack_chk_fail_decl;
749
750  if (t == NULL_TREE)
751    {
752      t = build_function_type_list (void_type_node, NULL_TREE);
753      t = build_decl (UNKNOWN_LOCATION,
754		      FUNCTION_DECL, get_identifier ("__stack_chk_fail"), t);
755      TREE_STATIC (t) = 1;
756      TREE_PUBLIC (t) = 1;
757      DECL_EXTERNAL (t) = 1;
758      TREE_USED (t) = 1;
759      TREE_THIS_VOLATILE (t) = 1;
760      TREE_NOTHROW (t) = 1;
761      DECL_ARTIFICIAL (t) = 1;
762      DECL_IGNORED_P (t) = 1;
763      DECL_VISIBILITY (t) = VISIBILITY_DEFAULT;
764      DECL_VISIBILITY_SPECIFIED (t) = 1;
765
766      stack_chk_fail_decl = t;
767    }
768
769  return build_call_expr (t, 0);
770}
771
772tree
773default_hidden_stack_protect_fail (void)
774{
775#ifndef HAVE_GAS_HIDDEN
776  return default_external_stack_protect_fail ();
777#else
778  tree t = stack_chk_fail_decl;
779
780  if (!flag_pic)
781    return default_external_stack_protect_fail ();
782
783  if (t == NULL_TREE)
784    {
785      t = build_function_type_list (void_type_node, NULL_TREE);
786      t = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
787		      get_identifier ("__stack_chk_fail_local"), t);
788      TREE_STATIC (t) = 1;
789      TREE_PUBLIC (t) = 1;
790      DECL_EXTERNAL (t) = 1;
791      TREE_USED (t) = 1;
792      TREE_THIS_VOLATILE (t) = 1;
793      TREE_NOTHROW (t) = 1;
794      DECL_ARTIFICIAL (t) = 1;
795      DECL_IGNORED_P (t) = 1;
796      DECL_VISIBILITY_SPECIFIED (t) = 1;
797#if 1
798      /*
799       * This is a hack:
800       * It appears that our gas does not generate @PLT for hidden
801       * symbols. It could be that we need a newer version, or that
802       * this local function is handled differently on linux.
803       */
804      DECL_VISIBILITY (t) = VISIBILITY_DEFAULT;
805#else
806      DECL_VISIBILITY (t) = VISIBILITY_HIDDEN;
807#endif
808
809      stack_chk_fail_decl = t;
810    }
811
812  return build_call_expr (t, 0);
813#endif
814}
815
816bool
817hook_bool_const_rtx_commutative_p (const_rtx x,
818				   int outer_code ATTRIBUTE_UNUSED)
819{
820  return COMMUTATIVE_P (x);
821}
822
823rtx
824default_function_value (const_tree ret_type ATTRIBUTE_UNUSED,
825			const_tree fn_decl_or_type,
826			bool outgoing ATTRIBUTE_UNUSED)
827{
828  /* The old interface doesn't handle receiving the function type.  */
829  if (fn_decl_or_type
830      && !DECL_P (fn_decl_or_type))
831    fn_decl_or_type = NULL;
832
833#ifdef FUNCTION_VALUE
834  return FUNCTION_VALUE (ret_type, fn_decl_or_type);
835#else
836  gcc_unreachable ();
837#endif
838}
839
840rtx
841default_libcall_value (machine_mode mode ATTRIBUTE_UNUSED,
842		       const_rtx fun ATTRIBUTE_UNUSED)
843{
844#ifdef LIBCALL_VALUE
845  return LIBCALL_VALUE (mode);
846#else
847  gcc_unreachable ();
848#endif
849}
850
851/* The default hook for TARGET_FUNCTION_VALUE_REGNO_P.  */
852
853bool
854default_function_value_regno_p (const unsigned int regno ATTRIBUTE_UNUSED)
855{
856#ifdef FUNCTION_VALUE_REGNO_P
857  return FUNCTION_VALUE_REGNO_P (regno);
858#else
859  gcc_unreachable ();
860#endif
861}
862
863rtx
864default_internal_arg_pointer (void)
865{
866  /* If the reg that the virtual arg pointer will be translated into is
867     not a fixed reg or is the stack pointer, make a copy of the virtual
868     arg pointer, and address parms via the copy.  The frame pointer is
869     considered fixed even though it is not marked as such.  */
870  if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
871       || ! (fixed_regs[ARG_POINTER_REGNUM]
872	     || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
873    return copy_to_reg (virtual_incoming_args_rtx);
874  else
875    return virtual_incoming_args_rtx;
876}
877
878rtx
879default_static_chain (const_tree ARG_UNUSED (fndecl_or_type), bool incoming_p)
880{
881  if (incoming_p)
882    {
883#ifdef STATIC_CHAIN_INCOMING_REGNUM
884      return gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
885#endif
886    }
887
888#ifdef STATIC_CHAIN_REGNUM
889  return gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
890#endif
891
892  {
893    static bool issued_error;
894    if (!issued_error)
895      {
896	issued_error = true;
897	sorry ("nested functions not supported on this target");
898      }
899
900    /* It really doesn't matter what we return here, so long at it
901       doesn't cause the rest of the compiler to crash.  */
902    return gen_rtx_MEM (Pmode, stack_pointer_rtx);
903  }
904}
905
906void
907default_trampoline_init (rtx ARG_UNUSED (m_tramp), tree ARG_UNUSED (t_func),
908			 rtx ARG_UNUSED (r_chain))
909{
910  sorry ("nested function trampolines not supported on this target");
911}
912
913int
914default_return_pops_args (tree fundecl ATTRIBUTE_UNUSED,
915			  tree funtype ATTRIBUTE_UNUSED,
916			  int size ATTRIBUTE_UNUSED)
917{
918  return 0;
919}
920
921reg_class_t
922default_branch_target_register_class (void)
923{
924  return NO_REGS;
925}
926
927extern bool
928default_lra_p (void)
929{
930  return false;
931}
932
933int
934default_register_priority (int hard_regno ATTRIBUTE_UNUSED)
935{
936  return 0;
937}
938
939extern bool
940default_register_usage_leveling_p (void)
941{
942  return false;
943}
944
945extern bool
946default_different_addr_displacement_p (void)
947{
948  return false;
949}
950
951reg_class_t
952default_secondary_reload (bool in_p ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED,
953			  reg_class_t reload_class_i ATTRIBUTE_UNUSED,
954			  machine_mode reload_mode ATTRIBUTE_UNUSED,
955			  secondary_reload_info *sri)
956{
957  enum reg_class rclass = NO_REGS;
958  enum reg_class reload_class = (enum reg_class) reload_class_i;
959
960  if (sri->prev_sri && sri->prev_sri->t_icode != CODE_FOR_nothing)
961    {
962      sri->icode = sri->prev_sri->t_icode;
963      return NO_REGS;
964    }
965#ifdef SECONDARY_INPUT_RELOAD_CLASS
966  if (in_p)
967    rclass = SECONDARY_INPUT_RELOAD_CLASS (reload_class, reload_mode, x);
968#endif
969#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
970  if (! in_p)
971    rclass = SECONDARY_OUTPUT_RELOAD_CLASS (reload_class, reload_mode, x);
972#endif
973  if (rclass != NO_REGS)
974    {
975      enum insn_code icode
976	= direct_optab_handler (in_p ? reload_in_optab : reload_out_optab,
977				reload_mode);
978
979      if (icode != CODE_FOR_nothing
980	  && !insn_operand_matches (icode, in_p, x))
981	icode = CODE_FOR_nothing;
982      else if (icode != CODE_FOR_nothing)
983	{
984	  const char *insn_constraint, *scratch_constraint;
985	  enum reg_class insn_class, scratch_class;
986
987	  gcc_assert (insn_data[(int) icode].n_operands == 3);
988	  insn_constraint = insn_data[(int) icode].operand[!in_p].constraint;
989	  if (!*insn_constraint)
990	    insn_class = ALL_REGS;
991	  else
992	    {
993	      if (in_p)
994		{
995		  gcc_assert (*insn_constraint == '=');
996		  insn_constraint++;
997		}
998	      insn_class = (reg_class_for_constraint
999			    (lookup_constraint (insn_constraint)));
1000	      gcc_assert (insn_class != NO_REGS);
1001	    }
1002
1003	  scratch_constraint = insn_data[(int) icode].operand[2].constraint;
1004	  /* The scratch register's constraint must start with "=&",
1005	     except for an input reload, where only "=" is necessary,
1006	     and where it might be beneficial to re-use registers from
1007	     the input.  */
1008	  gcc_assert (scratch_constraint[0] == '='
1009		      && (in_p || scratch_constraint[1] == '&'));
1010	  scratch_constraint++;
1011	  if (*scratch_constraint == '&')
1012	    scratch_constraint++;
1013	  scratch_class = (reg_class_for_constraint
1014			   (lookup_constraint (scratch_constraint)));
1015
1016	  if (reg_class_subset_p (reload_class, insn_class))
1017	    {
1018	      gcc_assert (scratch_class == rclass);
1019	      rclass = NO_REGS;
1020	    }
1021	  else
1022	    rclass = insn_class;
1023
1024        }
1025      if (rclass == NO_REGS)
1026	sri->icode = icode;
1027      else
1028	sri->t_icode = icode;
1029    }
1030  return rclass;
1031}
1032
1033/* By default, if flag_pic is true, then neither local nor global relocs
1034   should be placed in readonly memory.  */
1035
1036int
1037default_reloc_rw_mask (void)
1038{
1039  return flag_pic ? 3 : 0;
1040}
1041
1042/* By default, do no modification. */
1043tree default_mangle_decl_assembler_name (tree decl ATTRIBUTE_UNUSED,
1044					 tree id)
1045{
1046   return id;
1047}
1048
1049/* Default to natural alignment for vector types.  */
1050HOST_WIDE_INT
1051default_vector_alignment (const_tree type)
1052{
1053  return tree_to_shwi (TYPE_SIZE (type));
1054}
1055
1056bool
1057default_builtin_vector_alignment_reachable (const_tree type, bool is_packed)
1058{
1059  if (is_packed)
1060    return false;
1061
1062  /* Assuming that types whose size is > pointer-size are not guaranteed to be
1063     naturally aligned.  */
1064  if (tree_int_cst_compare (TYPE_SIZE (type), bitsize_int (POINTER_SIZE)) > 0)
1065    return false;
1066
1067  /* Assuming that types whose size is <= pointer-size
1068     are naturally aligned.  */
1069  return true;
1070}
1071
1072/* By default, assume that a target supports any factor of misalignment
1073   memory access if it supports movmisalign patten.
1074   is_packed is true if the memory access is defined in a packed struct.  */
1075bool
1076default_builtin_support_vector_misalignment (machine_mode mode,
1077					     const_tree type
1078					     ATTRIBUTE_UNUSED,
1079					     int misalignment
1080					     ATTRIBUTE_UNUSED,
1081					     bool is_packed
1082					     ATTRIBUTE_UNUSED)
1083{
1084  if (optab_handler (movmisalign_optab, mode) != CODE_FOR_nothing)
1085    return true;
1086  return false;
1087}
1088
1089/* By default, only attempt to parallelize bitwise operations, and
1090   possibly adds/subtracts using bit-twiddling.  */
1091
1092machine_mode
1093default_preferred_simd_mode (machine_mode mode ATTRIBUTE_UNUSED)
1094{
1095  return word_mode;
1096}
1097
1098/* By default only the size derived from the preferred vector mode
1099   is tried.  */
1100
1101unsigned int
1102default_autovectorize_vector_sizes (void)
1103{
1104  return 0;
1105}
1106
1107/* By default, the cost model accumulates three separate costs (prologue,
1108   loop body, and epilogue) for a vectorized loop or block.  So allocate an
1109   array of three unsigned ints, set it to zero, and return its address.  */
1110
1111void *
1112default_init_cost (struct loop *loop_info ATTRIBUTE_UNUSED)
1113{
1114  unsigned *cost = XNEWVEC (unsigned, 3);
1115  cost[vect_prologue] = cost[vect_body] = cost[vect_epilogue] = 0;
1116  return cost;
1117}
1118
1119/* By default, the cost model looks up the cost of the given statement
1120   kind and mode, multiplies it by the occurrence count, accumulates
1121   it into the cost specified by WHERE, and returns the cost added.  */
1122
1123unsigned
1124default_add_stmt_cost (void *data, int count, enum vect_cost_for_stmt kind,
1125		       struct _stmt_vec_info *stmt_info, int misalign,
1126		       enum vect_cost_model_location where)
1127{
1128  unsigned *cost = (unsigned *) data;
1129  unsigned retval = 0;
1130
1131  tree vectype = stmt_info ? stmt_vectype (stmt_info) : NULL_TREE;
1132  int stmt_cost = targetm.vectorize.builtin_vectorization_cost (kind, vectype,
1133								misalign);
1134   /* Statements in an inner loop relative to the loop being
1135      vectorized are weighted more heavily.  The value here is
1136      arbitrary and could potentially be improved with analysis.  */
1137  if (where == vect_body && stmt_info && stmt_in_inner_loop_p (stmt_info))
1138    count *= 50;  /* FIXME.  */
1139
1140  retval = (unsigned) (count * stmt_cost);
1141  cost[where] += retval;
1142
1143  return retval;
1144}
1145
1146/* By default, the cost model just returns the accumulated costs.  */
1147
1148void
1149default_finish_cost (void *data, unsigned *prologue_cost,
1150		     unsigned *body_cost, unsigned *epilogue_cost)
1151{
1152  unsigned *cost = (unsigned *) data;
1153  *prologue_cost = cost[vect_prologue];
1154  *body_cost     = cost[vect_body];
1155  *epilogue_cost = cost[vect_epilogue];
1156}
1157
1158/* Free the cost data.  */
1159
1160void
1161default_destroy_cost_data (void *data)
1162{
1163  free (data);
1164}
1165
1166/* Determine whether or not a pointer mode is valid. Assume defaults
1167   of ptr_mode or Pmode - can be overridden.  */
1168bool
1169default_valid_pointer_mode (machine_mode mode)
1170{
1171  return (mode == ptr_mode || mode == Pmode);
1172}
1173
1174/* Determine whether the memory reference specified by REF may alias
1175   the C libraries errno location.  */
1176bool
1177default_ref_may_alias_errno (ao_ref *ref)
1178{
1179  tree base = ao_ref_base (ref);
1180  /* The default implementation assumes the errno location is
1181     a declaration of type int or is always accessed via a
1182     pointer to int.  We assume that accesses to errno are
1183     not deliberately obfuscated (even in conforming ways).  */
1184  if (TYPE_UNSIGNED (TREE_TYPE (base))
1185      || TYPE_MODE (TREE_TYPE (base)) != TYPE_MODE (integer_type_node))
1186    return false;
1187  /* The default implementation assumes an errno location
1188     declaration is never defined in the current compilation unit.  */
1189  if (DECL_P (base)
1190      && !TREE_STATIC (base))
1191    return true;
1192  else if (TREE_CODE (base) == MEM_REF
1193	   && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1194    {
1195      struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1196      return !pi || pi->pt.anything || pi->pt.nonlocal;
1197    }
1198  return false;
1199}
1200
1201/* Return the mode for a pointer to a given ADDRSPACE, defaulting to ptr_mode
1202   for the generic address space only.  */
1203
1204machine_mode
1205default_addr_space_pointer_mode (addr_space_t addrspace ATTRIBUTE_UNUSED)
1206{
1207  gcc_assert (ADDR_SPACE_GENERIC_P (addrspace));
1208  return ptr_mode;
1209}
1210
1211/* Return the mode for an address in a given ADDRSPACE, defaulting to Pmode
1212   for the generic address space only.  */
1213
1214machine_mode
1215default_addr_space_address_mode (addr_space_t addrspace ATTRIBUTE_UNUSED)
1216{
1217  gcc_assert (ADDR_SPACE_GENERIC_P (addrspace));
1218  return Pmode;
1219}
1220
1221/* Named address space version of valid_pointer_mode.  */
1222
1223bool
1224default_addr_space_valid_pointer_mode (machine_mode mode, addr_space_t as)
1225{
1226  if (!ADDR_SPACE_GENERIC_P (as))
1227    return (mode == targetm.addr_space.pointer_mode (as)
1228	    || mode == targetm.addr_space.address_mode (as));
1229
1230  return targetm.valid_pointer_mode (mode);
1231}
1232
1233/* Some places still assume that all pointer or address modes are the
1234   standard Pmode and ptr_mode.  These optimizations become invalid if
1235   the target actually supports multiple different modes.  For now,
1236   we disable such optimizations on such targets, using this function.  */
1237
1238bool
1239target_default_pointer_address_modes_p (void)
1240{
1241  if (targetm.addr_space.address_mode != default_addr_space_address_mode)
1242    return false;
1243  if (targetm.addr_space.pointer_mode != default_addr_space_pointer_mode)
1244    return false;
1245
1246  return true;
1247}
1248
1249/* Named address space version of legitimate_address_p.  */
1250
1251bool
1252default_addr_space_legitimate_address_p (machine_mode mode, rtx mem,
1253					 bool strict, addr_space_t as)
1254{
1255  if (!ADDR_SPACE_GENERIC_P (as))
1256    gcc_unreachable ();
1257
1258  return targetm.legitimate_address_p (mode, mem, strict);
1259}
1260
1261/* Named address space version of LEGITIMIZE_ADDRESS.  */
1262
1263rtx
1264default_addr_space_legitimize_address (rtx x, rtx oldx,
1265				       machine_mode mode, addr_space_t as)
1266{
1267  if (!ADDR_SPACE_GENERIC_P (as))
1268    return x;
1269
1270  return targetm.legitimize_address (x, oldx, mode);
1271}
1272
1273/* The default hook for determining if one named address space is a subset of
1274   another and to return which address space to use as the common address
1275   space.  */
1276
1277bool
1278default_addr_space_subset_p (addr_space_t subset, addr_space_t superset)
1279{
1280  return (subset == superset);
1281}
1282
1283/* The default hook for TARGET_ADDR_SPACE_CONVERT. This hook should never be
1284   called for targets with only a generic address space.  */
1285
1286rtx
1287default_addr_space_convert (rtx op ATTRIBUTE_UNUSED,
1288			    tree from_type ATTRIBUTE_UNUSED,
1289			    tree to_type ATTRIBUTE_UNUSED)
1290{
1291  gcc_unreachable ();
1292}
1293
1294bool
1295default_hard_regno_scratch_ok (unsigned int regno ATTRIBUTE_UNUSED)
1296{
1297  return true;
1298}
1299
1300/* The default implementation of TARGET_MODE_DEPENDENT_ADDRESS_P.  */
1301
1302bool
1303default_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED,
1304				  addr_space_t addrspace ATTRIBUTE_UNUSED)
1305{
1306  return false;
1307}
1308
1309bool
1310default_target_option_valid_attribute_p (tree ARG_UNUSED (fndecl),
1311					 tree ARG_UNUSED (name),
1312					 tree ARG_UNUSED (args),
1313					 int ARG_UNUSED (flags))
1314{
1315  warning (OPT_Wattributes,
1316	   "target attribute is not supported on this machine");
1317
1318  return false;
1319}
1320
1321bool
1322default_target_option_pragma_parse (tree ARG_UNUSED (args),
1323				    tree ARG_UNUSED (pop_target))
1324{
1325  /* If args is NULL the caller is handle_pragma_pop_options ().  In that case,
1326     emit no warning because "#pragma GCC pop_target" is valid on targets that
1327     do not have the "target" pragma.  */
1328  if (args)
1329    warning (OPT_Wpragmas,
1330	     "#pragma GCC target is not supported for this machine");
1331
1332  return false;
1333}
1334
1335bool
1336default_target_can_inline_p (tree caller, tree callee)
1337{
1338  bool ret = false;
1339  tree callee_opts = DECL_FUNCTION_SPECIFIC_TARGET (callee);
1340  tree caller_opts = DECL_FUNCTION_SPECIFIC_TARGET (caller);
1341
1342  /* If callee has no option attributes, then it is ok to inline */
1343  if (!callee_opts)
1344    ret = true;
1345
1346  /* If caller has no option attributes, but callee does then it is not ok to
1347     inline */
1348  else if (!caller_opts)
1349    ret = false;
1350
1351  /* If both caller and callee have attributes, assume that if the
1352     pointer is different, the two functions have different target
1353     options since build_target_option_node uses a hash table for the
1354     options.  */
1355  else
1356    ret = (callee_opts == caller_opts);
1357
1358  return ret;
1359}
1360
1361#ifndef HAVE_casesi
1362# define HAVE_casesi 0
1363#endif
1364
1365/* If the machine does not have a case insn that compares the bounds,
1366   this means extra overhead for dispatch tables, which raises the
1367   threshold for using them.  */
1368
1369unsigned int
1370default_case_values_threshold (void)
1371{
1372  return (HAVE_casesi ? 4 : 5);
1373}
1374
1375bool
1376default_have_conditional_execution (void)
1377{
1378#ifdef HAVE_conditional_execution
1379  return HAVE_conditional_execution;
1380#else
1381  return false;
1382#endif
1383}
1384
1385/* By default we assume that c99 functions are present at the runtime,
1386   but sincos is not.  */
1387bool
1388default_libc_has_function (enum function_class fn_class)
1389{
1390  if (fn_class == function_c94
1391      || fn_class == function_c99_misc
1392      || fn_class == function_c99_math_complex)
1393    return true;
1394
1395  return false;
1396}
1397
1398bool
1399gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
1400{
1401  return true;
1402}
1403
1404bool
1405no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
1406{
1407  return false;
1408}
1409
1410tree
1411default_builtin_tm_load_store (tree ARG_UNUSED (type))
1412{
1413  return NULL_TREE;
1414}
1415
1416/* Compute cost of moving registers to/from memory.  */
1417
1418int
1419default_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
1420			  reg_class_t rclass ATTRIBUTE_UNUSED,
1421			  bool in ATTRIBUTE_UNUSED)
1422{
1423#ifndef MEMORY_MOVE_COST
1424    return (4 + memory_move_secondary_cost (mode, (enum reg_class) rclass, in));
1425#else
1426    return MEMORY_MOVE_COST (mode, (enum reg_class) rclass, in);
1427#endif
1428}
1429
1430/* Compute cost of moving data from a register of class FROM to one of
1431   TO, using MODE.  */
1432
1433int
1434default_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
1435                            reg_class_t from ATTRIBUTE_UNUSED,
1436                            reg_class_t to ATTRIBUTE_UNUSED)
1437{
1438#ifndef REGISTER_MOVE_COST
1439  return 2;
1440#else
1441  return REGISTER_MOVE_COST (mode, (enum reg_class) from, (enum reg_class) to);
1442#endif
1443}
1444
1445/* For hooks which use the MOVE_RATIO macro, this gives the legacy default
1446   behaviour.  SPEED_P is true if we are compiling for speed.  */
1447
1448unsigned int
1449get_move_ratio (bool speed_p ATTRIBUTE_UNUSED)
1450{
1451  unsigned int move_ratio;
1452#ifdef MOVE_RATIO
1453  move_ratio = (unsigned int) MOVE_RATIO (speed_p);
1454#else
1455#if defined (HAVE_movmemqi) || defined (HAVE_movmemhi) || defined (HAVE_movmemsi) || defined (HAVE_movmemdi) || defined (HAVE_movmemti)
1456  move_ratio = 2;
1457#else /* No movmem patterns, pick a default.  */
1458  move_ratio = ((speed_p) ? 15 : 3);
1459#endif
1460#endif
1461  return move_ratio;
1462}
1463
1464/* Return TRUE if the move_by_pieces/set_by_pieces infrastructure should be
1465   used; return FALSE if the movmem/setmem optab should be expanded, or
1466   a call to memcpy emitted.  */
1467
1468bool
1469default_use_by_pieces_infrastructure_p (unsigned HOST_WIDE_INT size,
1470					unsigned int alignment,
1471					enum by_pieces_operation op,
1472					bool speed_p)
1473{
1474  unsigned int max_size = 0;
1475  unsigned int ratio = 0;
1476
1477  switch (op)
1478    {
1479      case CLEAR_BY_PIECES:
1480	max_size = STORE_MAX_PIECES;
1481	ratio = CLEAR_RATIO (speed_p);
1482	break;
1483      case MOVE_BY_PIECES:
1484	max_size = MOVE_MAX_PIECES;
1485	ratio = get_move_ratio (speed_p);
1486	break;
1487      case SET_BY_PIECES:
1488	max_size = STORE_MAX_PIECES;
1489	ratio = SET_RATIO (speed_p);
1490	break;
1491      case STORE_BY_PIECES:
1492	max_size = STORE_MAX_PIECES;
1493	ratio = get_move_ratio (speed_p);
1494	break;
1495    }
1496
1497  return move_by_pieces_ninsns (size, alignment, max_size + 1) < ratio;
1498}
1499
1500bool
1501default_profile_before_prologue (void)
1502{
1503#ifdef PROFILE_BEFORE_PROLOGUE
1504  return true;
1505#else
1506  return false;
1507#endif
1508}
1509
1510/* The default implementation of TARGET_PREFERRED_RELOAD_CLASS.  */
1511
1512reg_class_t
1513default_preferred_reload_class (rtx x ATTRIBUTE_UNUSED,
1514			        reg_class_t rclass)
1515{
1516#ifdef PREFERRED_RELOAD_CLASS
1517  return (reg_class_t) PREFERRED_RELOAD_CLASS (x, (enum reg_class) rclass);
1518#else
1519  return rclass;
1520#endif
1521}
1522
1523/* The default implementation of TARGET_OUTPUT_PREFERRED_RELOAD_CLASS.  */
1524
1525reg_class_t
1526default_preferred_output_reload_class (rtx x ATTRIBUTE_UNUSED,
1527				       reg_class_t rclass)
1528{
1529  return rclass;
1530}
1531
1532/* The default implementation of TARGET_PREFERRED_RENAME_CLASS.  */
1533reg_class_t
1534default_preferred_rename_class (reg_class_t rclass ATTRIBUTE_UNUSED)
1535{
1536  return NO_REGS;
1537}
1538
1539/* The default implementation of TARGET_CLASS_LIKELY_SPILLED_P.  */
1540
1541bool
1542default_class_likely_spilled_p (reg_class_t rclass)
1543{
1544  return (reg_class_size[(int) rclass] == 1);
1545}
1546
1547/* The default implementation of TARGET_CLASS_MAX_NREGS.  */
1548
1549unsigned char
1550default_class_max_nregs (reg_class_t rclass ATTRIBUTE_UNUSED,
1551			 machine_mode mode ATTRIBUTE_UNUSED)
1552{
1553#ifdef CLASS_MAX_NREGS
1554  return (unsigned char) CLASS_MAX_NREGS ((enum reg_class) rclass, mode);
1555#else
1556  return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1557#endif
1558}
1559
1560/* Determine the debugging unwind mechanism for the target.  */
1561
1562enum unwind_info_type
1563default_debug_unwind_info (void)
1564{
1565  /* If the target wants to force the use of dwarf2 unwind info, let it.  */
1566  /* ??? Change all users to the hook, then poison this.  */
1567#ifdef DWARF2_FRAME_INFO
1568  if (DWARF2_FRAME_INFO)
1569    return UI_DWARF2;
1570#endif
1571
1572  /* Otherwise, only turn it on if dwarf2 debugging is enabled.  */
1573#ifdef DWARF2_DEBUGGING_INFO
1574  if (write_symbols == DWARF2_DEBUG || write_symbols == VMS_AND_DWARF2_DEBUG)
1575    return UI_DWARF2;
1576#endif
1577
1578  return UI_NONE;
1579}
1580
1581/* Determine the correct mode for a Dwarf frame register that represents
1582   register REGNO.  */
1583
1584machine_mode
1585default_dwarf_frame_reg_mode (int regno)
1586{
1587  machine_mode save_mode = reg_raw_mode[regno];
1588
1589  if (HARD_REGNO_CALL_PART_CLOBBERED (regno, save_mode))
1590    save_mode = choose_hard_reg_mode (regno, 1, true);
1591  return save_mode;
1592}
1593
1594/* To be used by targets where reg_raw_mode doesn't return the right
1595   mode for registers used in apply_builtin_return and apply_builtin_arg.  */
1596
1597machine_mode
1598default_get_reg_raw_mode (int regno)
1599{
1600  return reg_raw_mode[regno];
1601}
1602
1603/* Return true if a leaf function should stay leaf even with profiling
1604   enabled.  */
1605
1606bool
1607default_keep_leaf_when_profiled ()
1608{
1609  return false;
1610}
1611
1612/* Return true if the state of option OPTION should be stored in PCH files
1613   and checked by default_pch_valid_p.  Store the option's current state
1614   in STATE if so.  */
1615
1616static inline bool
1617option_affects_pch_p (int option, struct cl_option_state *state)
1618{
1619  if ((cl_options[option].flags & CL_TARGET) == 0)
1620    return false;
1621  if ((cl_options[option].flags & CL_PCH_IGNORE) != 0)
1622    return false;
1623  if (option_flag_var (option, &global_options) == &target_flags)
1624    if (targetm.check_pch_target_flags)
1625      return false;
1626  return get_option_state (&global_options, option, state);
1627}
1628
1629/* Default version of get_pch_validity.
1630   By default, every flag difference is fatal; that will be mostly right for
1631   most targets, but completely right for very few.  */
1632
1633void *
1634default_get_pch_validity (size_t *sz)
1635{
1636  struct cl_option_state state;
1637  size_t i;
1638  char *result, *r;
1639
1640  *sz = 2;
1641  if (targetm.check_pch_target_flags)
1642    *sz += sizeof (target_flags);
1643  for (i = 0; i < cl_options_count; i++)
1644    if (option_affects_pch_p (i, &state))
1645      *sz += state.size;
1646
1647  result = r = XNEWVEC (char, *sz);
1648  r[0] = flag_pic;
1649  r[1] = flag_pie;
1650  r += 2;
1651  if (targetm.check_pch_target_flags)
1652    {
1653      memcpy (r, &target_flags, sizeof (target_flags));
1654      r += sizeof (target_flags);
1655    }
1656
1657  for (i = 0; i < cl_options_count; i++)
1658    if (option_affects_pch_p (i, &state))
1659      {
1660	memcpy (r, state.data, state.size);
1661	r += state.size;
1662      }
1663
1664  return result;
1665}
1666
1667/* Return a message which says that a PCH file was created with a different
1668   setting of OPTION.  */
1669
1670static const char *
1671pch_option_mismatch (const char *option)
1672{
1673  return xasprintf (_("created and used with differing settings of '%s'"),
1674		    option);
1675}
1676
1677/* Default version of pch_valid_p.  */
1678
1679const char *
1680default_pch_valid_p (const void *data_p, size_t len)
1681{
1682  struct cl_option_state state;
1683  const char *data = (const char *)data_p;
1684  size_t i;
1685
1686  /* -fpic and -fpie also usually make a PCH invalid.  */
1687  if (data[0] != flag_pic)
1688    return _("created and used with different settings of -fpic");
1689  if (data[1] != flag_pie)
1690    return _("created and used with different settings of -fpie");
1691  data += 2;
1692
1693  /* Check target_flags.  */
1694  if (targetm.check_pch_target_flags)
1695    {
1696      int tf;
1697      const char *r;
1698
1699      memcpy (&tf, data, sizeof (target_flags));
1700      data += sizeof (target_flags);
1701      len -= sizeof (target_flags);
1702      r = targetm.check_pch_target_flags (tf);
1703      if (r != NULL)
1704	return r;
1705    }
1706
1707  for (i = 0; i < cl_options_count; i++)
1708    if (option_affects_pch_p (i, &state))
1709      {
1710	if (memcmp (data, state.data, state.size) != 0)
1711	  return pch_option_mismatch (cl_options[i].opt_text);
1712	data += state.size;
1713	len -= state.size;
1714      }
1715
1716  return NULL;
1717}
1718
1719/* Default version of cstore_mode.  */
1720
1721machine_mode
1722default_cstore_mode (enum insn_code icode)
1723{
1724  return insn_data[(int) icode].operand[0].mode;
1725}
1726
1727/* Default version of member_type_forces_blk.  */
1728
1729bool
1730default_member_type_forces_blk (const_tree, machine_mode)
1731{
1732  return false;
1733}
1734
1735rtx
1736default_load_bounds_for_arg (rtx addr ATTRIBUTE_UNUSED,
1737			     rtx ptr ATTRIBUTE_UNUSED,
1738			     rtx bnd ATTRIBUTE_UNUSED)
1739{
1740  gcc_unreachable ();
1741}
1742
1743void
1744default_store_bounds_for_arg (rtx val ATTRIBUTE_UNUSED,
1745			      rtx addr ATTRIBUTE_UNUSED,
1746			      rtx bounds ATTRIBUTE_UNUSED,
1747			      rtx to ATTRIBUTE_UNUSED)
1748{
1749  gcc_unreachable ();
1750}
1751
1752rtx
1753default_load_returned_bounds (rtx slot ATTRIBUTE_UNUSED)
1754{
1755  gcc_unreachable ();
1756}
1757
1758void
1759default_store_returned_bounds (rtx slot ATTRIBUTE_UNUSED,
1760			       rtx bounds ATTRIBUTE_UNUSED)
1761{
1762  gcc_unreachable ();
1763}
1764
1765/* Default version of canonicalize_comparison.  */
1766
1767void
1768default_canonicalize_comparison (int *, rtx *, rtx *, bool)
1769{
1770}
1771
1772/* Default implementation of TARGET_ATOMIC_ASSIGN_EXPAND_FENV.  */
1773
1774void
1775default_atomic_assign_expand_fenv (tree *, tree *, tree *)
1776{
1777}
1778
1779#ifndef PAD_VARARGS_DOWN
1780#define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
1781#endif
1782
1783/* Build an indirect-ref expression over the given TREE, which represents a
1784   piece of a va_arg() expansion.  */
1785tree
1786build_va_arg_indirect_ref (tree addr)
1787{
1788  addr = build_simple_mem_ref_loc (EXPR_LOCATION (addr), addr);
1789  return addr;
1790}
1791
1792/* The "standard" implementation of va_arg: read the value from the
1793   current (padded) address and increment by the (padded) size.  */
1794
1795tree
1796std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
1797			  gimple_seq *post_p)
1798{
1799  tree addr, t, type_size, rounded_size, valist_tmp;
1800  unsigned HOST_WIDE_INT align, boundary;
1801  bool indirect;
1802
1803#ifdef ARGS_GROW_DOWNWARD
1804  /* All of the alignment and movement below is for args-grow-up machines.
1805     As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
1806     implement their own specialized gimplify_va_arg_expr routines.  */
1807  gcc_unreachable ();
1808#endif
1809
1810  indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
1811  if (indirect)
1812    type = build_pointer_type (type);
1813
1814  align = PARM_BOUNDARY / BITS_PER_UNIT;
1815  boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
1816
1817  /* When we align parameter on stack for caller, if the parameter
1818     alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
1819     aligned at MAX_SUPPORTED_STACK_ALIGNMENT.  We will match callee
1820     here with caller.  */
1821  if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
1822    boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
1823
1824  boundary /= BITS_PER_UNIT;
1825
1826  /* Hoist the valist value into a temporary for the moment.  */
1827  valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
1828
1829  /* va_list pointer is aligned to PARM_BOUNDARY.  If argument actually
1830     requires greater alignment, we must perform dynamic alignment.  */
1831  if (boundary > align
1832      && !integer_zerop (TYPE_SIZE (type)))
1833    {
1834      t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
1835		  fold_build_pointer_plus_hwi (valist_tmp, boundary - 1));
1836      gimplify_and_add (t, pre_p);
1837
1838      t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
1839		  fold_build2 (BIT_AND_EXPR, TREE_TYPE (valist),
1840			       valist_tmp,
1841			       build_int_cst (TREE_TYPE (valist), -boundary)));
1842      gimplify_and_add (t, pre_p);
1843    }
1844  else
1845    boundary = align;
1846
1847  /* If the actual alignment is less than the alignment of the type,
1848     adjust the type accordingly so that we don't assume strict alignment
1849     when dereferencing the pointer.  */
1850  boundary *= BITS_PER_UNIT;
1851  if (boundary < TYPE_ALIGN (type))
1852    {
1853      type = build_variant_type_copy (type);
1854      TYPE_ALIGN (type) = boundary;
1855    }
1856
1857  /* Compute the rounded size of the type.  */
1858  type_size = size_in_bytes (type);
1859  rounded_size = round_up (type_size, align);
1860
1861  /* Reduce rounded_size so it's sharable with the postqueue.  */
1862  gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
1863
1864  /* Get AP.  */
1865  addr = valist_tmp;
1866  if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
1867    {
1868      /* Small args are padded downward.  */
1869      t = fold_build2_loc (input_location, GT_EXPR, sizetype,
1870		       rounded_size, size_int (align));
1871      t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
1872		       size_binop (MINUS_EXPR, rounded_size, type_size));
1873      addr = fold_build_pointer_plus (addr, t);
1874    }
1875
1876  /* Compute new value for AP.  */
1877  t = fold_build_pointer_plus (valist_tmp, rounded_size);
1878  t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
1879  gimplify_and_add (t, pre_p);
1880
1881  addr = fold_convert (build_pointer_type (type), addr);
1882
1883  if (indirect)
1884    addr = build_va_arg_indirect_ref (addr);
1885
1886  return build_va_arg_indirect_ref (addr);
1887}
1888
1889tree
1890default_chkp_bound_type (void)
1891{
1892  tree res = make_node (POINTER_BOUNDS_TYPE);
1893  TYPE_PRECISION (res) = TYPE_PRECISION (size_type_node) * 2;
1894  TYPE_NAME (res) = get_identifier ("__bounds_type");
1895  SET_TYPE_MODE (res, targetm.chkp_bound_mode ());
1896  layout_type (res);
1897  return res;
1898}
1899
1900enum machine_mode
1901default_chkp_bound_mode (void)
1902{
1903  return VOIDmode;
1904}
1905
1906tree
1907default_builtin_chkp_function (unsigned int fcode ATTRIBUTE_UNUSED)
1908{
1909  return NULL_TREE;
1910}
1911
1912rtx
1913default_chkp_function_value_bounds (const_tree ret_type ATTRIBUTE_UNUSED,
1914				    const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1915				    bool outgoing ATTRIBUTE_UNUSED)
1916{
1917  gcc_unreachable ();
1918}
1919
1920tree
1921default_chkp_make_bounds_constant (HOST_WIDE_INT lb ATTRIBUTE_UNUSED,
1922				   HOST_WIDE_INT ub ATTRIBUTE_UNUSED)
1923{
1924  return NULL_TREE;
1925}
1926
1927int
1928default_chkp_initialize_bounds (tree var ATTRIBUTE_UNUSED,
1929				tree lb ATTRIBUTE_UNUSED,
1930				tree ub ATTRIBUTE_UNUSED,
1931				tree *stmts ATTRIBUTE_UNUSED)
1932{
1933  return 0;
1934}
1935
1936void
1937default_setup_incoming_vararg_bounds (cumulative_args_t ca ATTRIBUTE_UNUSED,
1938				      enum machine_mode mode ATTRIBUTE_UNUSED,
1939				      tree type ATTRIBUTE_UNUSED,
1940				      int *pretend_arg_size ATTRIBUTE_UNUSED,
1941				      int second_time ATTRIBUTE_UNUSED)
1942{
1943}
1944
1945/* An implementation of TARGET_CAN_USE_DOLOOP_P for targets that do
1946   not support nested low-overhead loops.  */
1947
1948bool
1949can_use_doloop_if_innermost (const widest_int &, const widest_int &,
1950			     unsigned int loop_depth, bool)
1951{
1952  return loop_depth == 1;
1953}
1954
1955#include "gt-targhooks.h"
1956