1/* Subroutines for insn-output.c for ATMEL AVR micro controllers
2   Copyright (C) 1998-2015 Free Software Foundation, Inc.
3   Contributed by Denis Chertykov (chertykov@gmail.com)
4
5   This file is part of GCC.
6
7   GCC is free software; you can redistribute it and/or modify
8   it under the terms of the GNU General Public License as published by
9   the Free Software Foundation; either version 3, or (at your option)
10   any later version.
11
12   GCC is distributed in the hope that it will be useful,
13   but WITHOUT ANY WARRANTY; without even the implied warranty of
14   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15   GNU General Public License for more details.
16
17   You should have received a copy of the GNU General Public License
18   along with GCC; see the file COPYING3.  If not see
19   <http://www.gnu.org/licenses/>.  */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "rtl.h"
26#include "regs.h"
27#include "hard-reg-set.h"
28#include "insn-config.h"
29#include "conditions.h"
30#include "insn-attr.h"
31#include "insn-codes.h"
32#include "flags.h"
33#include "reload.h"
34#include "hash-set.h"
35#include "machmode.h"
36#include "vec.h"
37#include "double-int.h"
38#include "input.h"
39#include "alias.h"
40#include "symtab.h"
41#include "wide-int.h"
42#include "inchash.h"
43#include "tree.h"
44#include "fold-const.h"
45#include "varasm.h"
46#include "print-tree.h"
47#include "calls.h"
48#include "stor-layout.h"
49#include "stringpool.h"
50#include "output.h"
51#include "hashtab.h"
52#include "function.h"
53#include "statistics.h"
54#include "real.h"
55#include "fixed-value.h"
56#include "expmed.h"
57#include "dojump.h"
58#include "explow.h"
59#include "emit-rtl.h"
60#include "stmt.h"
61#include "expr.h"
62#include "c-family/c-common.h"
63#include "diagnostic-core.h"
64#include "obstack.h"
65#include "recog.h"
66#include "optabs.h"
67#include "ggc.h"
68#include "langhooks.h"
69#include "tm_p.h"
70#include "target.h"
71#include "target-def.h"
72#include "params.h"
73#include "dominance.h"
74#include "cfg.h"
75#include "cfgrtl.h"
76#include "cfganal.h"
77#include "lcm.h"
78#include "cfgbuild.h"
79#include "cfgcleanup.h"
80#include "predict.h"
81#include "basic-block.h"
82#include "df.h"
83#include "builtins.h"
84#include "context.h"
85#include "tree-pass.h"
86
87/* Maximal allowed offset for an address in the LD command */
88#define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
89
90/* Return true if STR starts with PREFIX and false, otherwise.  */
91#define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
92
93/* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
94   address space where data is to be located.
95   As the only non-generic address spaces are all located in flash,
96   this can be used to test if data shall go into some .progmem* section.
97   This must be the rightmost field of machine dependent section flags.  */
98#define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
99
100/* Similar 4-bit region for SYMBOL_REF_FLAGS.  */
101#define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
102
103/* Similar 4-bit region in SYMBOL_REF_FLAGS:
104   Set address-space AS in SYMBOL_REF_FLAGS of SYM  */
105#define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS)                       \
106  do {                                                          \
107    SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM;         \
108    SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP;      \
109  } while (0)
110
111/* Read address-space from SYMBOL_REF_FLAGS of SYM  */
112#define AVR_SYMBOL_GET_ADDR_SPACE(SYM)                          \
113  ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM)           \
114   / SYMBOL_FLAG_MACH_DEP)
115
116#define TINY_ADIW(REG1, REG2, I)                                \
117    "subi " #REG1 ",lo8(-(" #I "))" CR_TAB                      \
118    "sbci " #REG2 ",hi8(-(" #I "))"
119
120#define TINY_SBIW(REG1, REG2, I)                                \
121    "subi " #REG1 ",lo8((" #I "))" CR_TAB                       \
122    "sbci " #REG2 ",hi8((" #I "))"
123
124#define AVR_TMP_REGNO (AVR_TINY ? TMP_REGNO_TINY : TMP_REGNO)
125#define AVR_ZERO_REGNO (AVR_TINY ? ZERO_REGNO_TINY : ZERO_REGNO)
126
127/* Known address spaces.  The order must be the same as in the respective
128   enum from avr.h (or designated initialized must be used).  */
129const avr_addrspace_t avr_addrspace[ADDR_SPACE_COUNT] =
130{
131  { ADDR_SPACE_RAM,  0, 2, "", 0, NULL },
132  { ADDR_SPACE_FLASH,  1, 2, "__flash",   0, ".progmem.data" },
133  { ADDR_SPACE_FLASH1, 1, 2, "__flash1",  1, ".progmem1.data" },
134  { ADDR_SPACE_FLASH2, 1, 2, "__flash2",  2, ".progmem2.data" },
135  { ADDR_SPACE_FLASH3, 1, 2, "__flash3",  3, ".progmem3.data" },
136  { ADDR_SPACE_FLASH4, 1, 2, "__flash4",  4, ".progmem4.data" },
137  { ADDR_SPACE_FLASH5, 1, 2, "__flash5",  5, ".progmem5.data" },
138  { ADDR_SPACE_MEMX, 1, 3, "__memx",  0, ".progmemx.data" },
139};
140
141
142/* Holding RAM addresses of some SFRs used by the compiler and that
143   are unique over all devices in an architecture like 'avr4'.  */
144
145typedef struct
146{
147  /* SREG: The processor status */
148  int sreg;
149
150  /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
151  int ccp;
152  int rampd;
153  int rampx;
154  int rampy;
155
156  /* RAMPZ: The high byte of 24-bit address used with ELPM */
157  int rampz;
158
159  /* SP: The stack pointer and its low and high byte */
160  int sp_l;
161  int sp_h;
162} avr_addr_t;
163
164static avr_addr_t avr_addr;
165
166
167/* Prototypes for local helper functions.  */
168
169static const char* out_movqi_r_mr (rtx_insn *, rtx[], int*);
170static const char* out_movhi_r_mr (rtx_insn *, rtx[], int*);
171static const char* out_movsi_r_mr (rtx_insn *, rtx[], int*);
172static const char* out_movqi_mr_r (rtx_insn *, rtx[], int*);
173static const char* out_movhi_mr_r (rtx_insn *, rtx[], int*);
174static const char* out_movsi_mr_r (rtx_insn *, rtx[], int*);
175
176static int get_sequence_length (rtx_insn *insns);
177static int sequent_regs_live (void);
178static const char *ptrreg_to_str (int);
179static const char *cond_string (enum rtx_code);
180static int avr_num_arg_regs (machine_mode, const_tree);
181static int avr_operand_rtx_cost (rtx, machine_mode, enum rtx_code,
182                                 int, bool);
183static void output_reload_in_const (rtx*, rtx, int*, bool);
184static struct machine_function * avr_init_machine_status (void);
185
186
187/* Prototypes for hook implementors if needed before their implementation.  */
188
189static bool avr_rtx_costs (rtx, int, int, int, int*, bool);
190
191
192/* Allocate registers from r25 to r8 for parameters for function calls.  */
193#define FIRST_CUM_REG 26
194
195/* Last call saved register */
196#define LAST_CALLEE_SAVED_REG (AVR_TINY ? 19 : 17)
197
198/* Implicit target register of LPM instruction (R0) */
199extern GTY(()) rtx lpm_reg_rtx;
200rtx lpm_reg_rtx;
201
202/* (Implicit) address register of LPM instruction (R31:R30 = Z) */
203extern GTY(()) rtx lpm_addr_reg_rtx;
204rtx lpm_addr_reg_rtx;
205
206/* Temporary register RTX (reg:QI TMP_REGNO) */
207extern GTY(()) rtx tmp_reg_rtx;
208rtx tmp_reg_rtx;
209
210/* Zeroed register RTX (reg:QI ZERO_REGNO) */
211extern GTY(()) rtx zero_reg_rtx;
212rtx zero_reg_rtx;
213
214/* RTXs for all general purpose registers as QImode */
215extern GTY(()) rtx all_regs_rtx[32];
216rtx all_regs_rtx[32];
217
218/* SREG, the processor status */
219extern GTY(()) rtx sreg_rtx;
220rtx sreg_rtx;
221
222/* RAMP* special function registers */
223extern GTY(()) rtx rampd_rtx;
224extern GTY(()) rtx rampx_rtx;
225extern GTY(()) rtx rampy_rtx;
226extern GTY(()) rtx rampz_rtx;
227rtx rampd_rtx;
228rtx rampx_rtx;
229rtx rampy_rtx;
230rtx rampz_rtx;
231
232/* RTX containing the strings "" and "e", respectively */
233static GTY(()) rtx xstring_empty;
234static GTY(()) rtx xstring_e;
235
236/* Current architecture.  */
237const avr_arch_t *avr_arch;
238
239/* Section to put switch tables in.  */
240static GTY(()) section *progmem_swtable_section;
241
242/* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
243   or to address space __flash* or __memx.  Only used as singletons inside
244   avr_asm_select_section, but it must not be local there because of GTY.  */
245static GTY(()) section *progmem_section[ADDR_SPACE_COUNT];
246
247/* Condition for insns/expanders from avr-dimode.md.  */
248bool avr_have_dimode = true;
249
250/* To track if code will use .bss and/or .data.  */
251bool avr_need_clear_bss_p = false;
252bool avr_need_copy_data_p = false;
253
254
255/* Transform UP into lowercase and write the result to LO.
256   You must provide enough space for LO.  Return LO.  */
257
258static char*
259avr_tolower (char *lo, const char *up)
260{
261  char *lo0 = lo;
262
263  for (; *up; up++, lo++)
264    *lo = TOLOWER (*up);
265
266  *lo = '\0';
267
268  return lo0;
269}
270
271
272/* Custom function to count number of set bits.  */
273
274static inline int
275avr_popcount (unsigned int val)
276{
277  int pop = 0;
278
279  while (val)
280    {
281      val &= val-1;
282      pop++;
283    }
284
285  return pop;
286}
287
288
289/* Constraint helper function.  XVAL is a CONST_INT or a CONST_DOUBLE.
290   Return true if the least significant N_BYTES bytes of XVAL all have a
291   popcount in POP_MASK and false, otherwise.  POP_MASK represents a subset
292   of integers which contains an integer N iff bit N of POP_MASK is set.  */
293
294bool
295avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
296{
297  int i;
298
299  machine_mode mode = GET_MODE (xval);
300
301  if (VOIDmode == mode)
302    mode = SImode;
303
304  for (i = 0; i < n_bytes; i++)
305    {
306      rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
307      unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
308
309      if (0 == (pop_mask & (1 << avr_popcount (val8))))
310        return false;
311    }
312
313  return true;
314}
315
316
317/* Access some RTX as INT_MODE.  If X is a CONST_FIXED we can get
318   the bit representation of X by "casting" it to CONST_INT.  */
319
320rtx
321avr_to_int_mode (rtx x)
322{
323  machine_mode mode = GET_MODE (x);
324
325  return VOIDmode == mode
326    ? x
327    : simplify_gen_subreg (int_mode_for_mode (mode), x, mode, 0);
328}
329
330
331static const pass_data avr_pass_data_recompute_notes =
332{
333  RTL_PASS,      // type
334  "",            // name (will be patched)
335  OPTGROUP_NONE, // optinfo_flags
336  TV_DF_SCAN,    // tv_id
337  0,             // properties_required
338  0,             // properties_provided
339  0,             // properties_destroyed
340  0,             // todo_flags_start
341  TODO_df_finish | TODO_df_verify // todo_flags_finish
342};
343
344
345class avr_pass_recompute_notes : public rtl_opt_pass
346{
347public:
348  avr_pass_recompute_notes (gcc::context *ctxt, const char *name)
349    : rtl_opt_pass (avr_pass_data_recompute_notes, ctxt)
350  {
351    this->name = name;
352  }
353
354  virtual unsigned int execute (function*)
355  {
356    df_note_add_problem ();
357    df_analyze ();
358
359    return 0;
360  }
361}; // avr_pass_recompute_notes
362
363
364static void
365avr_register_passes (void)
366{
367  /* This avr-specific pass (re)computes insn notes, in particular REG_DEAD
368     notes which are used by `avr.c::reg_unused_after' and branch offset
369     computations.  These notes must be correct, i.e. there must be no
370     dangling REG_DEAD notes; otherwise wrong code might result, cf. PR64331.
371
372     DF needs (correct) CFG, hence right before free_cfg is the last
373     opportunity to rectify notes.  */
374
375  register_pass (new avr_pass_recompute_notes (g, "avr-notes-free-cfg"),
376                 PASS_POS_INSERT_BEFORE, "*free_cfg", 1);
377}
378
379
380/* Set `avr_arch' as specified by `-mmcu='.
381   Return true on success.  */
382
383static bool
384avr_set_core_architecture (void)
385{
386  /* Search for mcu core architecture.  */
387
388  if (!avr_mmcu)
389    avr_mmcu = AVR_MMCU_DEFAULT;
390
391  avr_arch = &avr_arch_types[0];
392
393  for (const avr_mcu_t *mcu = avr_mcu_types; ; mcu++)
394    {
395      if (NULL == mcu->name)
396        {
397          /* Reached the end of `avr_mcu_types'.  This should actually never
398             happen as options are provided by device-specs.  It could be a
399             typo in a device-specs or calling the compiler proper directly
400             with -mmcu=<device>. */
401
402          error ("unknown core architecture %qs specified with %qs",
403                 avr_mmcu, "-mmcu=");
404          avr_inform_core_architectures ();
405          break;
406        }
407      else if (0 == strcmp (mcu->name, avr_mmcu)
408               // Is this a proper architecture ?
409               && NULL == mcu->macro)
410        {
411          avr_arch = &avr_arch_types[mcu->arch_id];
412          if (avr_n_flash < 0)
413            avr_n_flash = mcu->n_flash;
414
415          return true;
416        }
417    }
418
419  return false;
420}
421
422
423/* Implement `TARGET_OPTION_OVERRIDE'.  */
424
425static void
426avr_option_override (void)
427{
428  /* Disable -fdelete-null-pointer-checks option for AVR target.
429     This option compiler assumes that dereferencing of a null pointer
430     would halt the program.  For AVR this assumption is not true and
431     programs can safely dereference null pointers.  Changes made by this
432     option may not work properly for AVR.  So disable this option. */
433
434  flag_delete_null_pointer_checks = 0;
435
436  /* caller-save.c looks for call-clobbered hard registers that are assigned
437     to pseudos that cross calls and tries so save-restore them around calls
438     in order to reduce the number of stack slots needed.
439
440     This might lead to situations where reload is no more able to cope
441     with the challenge of AVR's very few address registers and fails to
442     perform the requested spills.  */
443
444  if (avr_strict_X)
445    flag_caller_saves = 0;
446
447  /* Unwind tables currently require a frame pointer for correctness,
448     see toplev.c:process_options().  */
449
450  if ((flag_unwind_tables
451       || flag_non_call_exceptions
452       || flag_asynchronous_unwind_tables)
453      && !ACCUMULATE_OUTGOING_ARGS)
454    {
455      flag_omit_frame_pointer = 0;
456    }
457
458  if (flag_pic == 1)
459    warning (OPT_fpic, "-fpic is not supported");
460  if (flag_pic == 2)
461    warning (OPT_fPIC, "-fPIC is not supported");
462  if (flag_pie == 1)
463    warning (OPT_fpie, "-fpie is not supported");
464  if (flag_pie == 2)
465    warning (OPT_fPIE, "-fPIE is not supported");
466
467  if (!avr_set_core_architecture())
468    return;
469
470  /* RAM addresses of some SFRs common to all devices in respective arch. */
471
472  /* SREG: Status Register containing flags like I (global IRQ) */
473  avr_addr.sreg = 0x3F + avr_arch->sfr_offset;
474
475  /* RAMPZ: Address' high part when loading via ELPM */
476  avr_addr.rampz = 0x3B + avr_arch->sfr_offset;
477
478  avr_addr.rampy = 0x3A + avr_arch->sfr_offset;
479  avr_addr.rampx = 0x39 + avr_arch->sfr_offset;
480  avr_addr.rampd = 0x38 + avr_arch->sfr_offset;
481  avr_addr.ccp = (AVR_TINY ? 0x3C : 0x34) + avr_arch->sfr_offset;
482
483  /* SP: Stack Pointer (SP_H:SP_L) */
484  avr_addr.sp_l = 0x3D + avr_arch->sfr_offset;
485  avr_addr.sp_h = avr_addr.sp_l + 1;
486
487  init_machine_status = avr_init_machine_status;
488
489  avr_log_set_avr_log();
490
491  /* Register some avr-specific pass(es).  There is no canonical place for
492     pass registration.  This function is convenient.  */
493
494  avr_register_passes ();
495}
496
497/* Function to set up the backend function structure.  */
498
499static struct machine_function *
500avr_init_machine_status (void)
501{
502  return ggc_cleared_alloc<machine_function> ();
503}
504
505
506/* Implement `INIT_EXPANDERS'.  */
507/* The function works like a singleton.  */
508
509void
510avr_init_expanders (void)
511{
512  int regno;
513
514  for (regno = 0; regno < 32; regno ++)
515    all_regs_rtx[regno] = gen_rtx_REG (QImode, regno);
516
517  lpm_reg_rtx  = all_regs_rtx[LPM_REGNO];
518  tmp_reg_rtx  = all_regs_rtx[AVR_TMP_REGNO];
519  zero_reg_rtx = all_regs_rtx[AVR_ZERO_REGNO];
520
521  lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
522
523  sreg_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.sreg));
524  rampd_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampd));
525  rampx_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampx));
526  rampy_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampy));
527  rampz_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampz));
528
529  xstring_empty = gen_rtx_CONST_STRING (VOIDmode, "");
530  xstring_e = gen_rtx_CONST_STRING (VOIDmode, "e");
531
532  /* TINY core does not have regs r10-r16, but avr-dimode.md expects them
533     to be present */
534  if (AVR_TINY)
535    avr_have_dimode = false;
536}
537
538
539/* Implement `REGNO_REG_CLASS'.  */
540/* Return register class for register R.  */
541
542enum reg_class
543avr_regno_reg_class (int r)
544{
545  static const enum reg_class reg_class_tab[] =
546    {
547      R0_REG,
548      /* r1 - r15 */
549      NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
550      NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
551      NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
552      NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
553      /* r16 - r23 */
554      SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
555      SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
556      /* r24, r25 */
557      ADDW_REGS, ADDW_REGS,
558      /* X: r26, 27 */
559      POINTER_X_REGS, POINTER_X_REGS,
560      /* Y: r28, r29 */
561      POINTER_Y_REGS, POINTER_Y_REGS,
562      /* Z: r30, r31 */
563      POINTER_Z_REGS, POINTER_Z_REGS,
564      /* SP: SPL, SPH */
565      STACK_REG, STACK_REG
566    };
567
568  if (r <= 33)
569    return reg_class_tab[r];
570
571  return ALL_REGS;
572}
573
574
575/* Implement `TARGET_SCALAR_MODE_SUPPORTED_P'.  */
576
577static bool
578avr_scalar_mode_supported_p (machine_mode mode)
579{
580  if (ALL_FIXED_POINT_MODE_P (mode))
581    return true;
582
583  if (PSImode == mode)
584    return true;
585
586  return default_scalar_mode_supported_p (mode);
587}
588
589
590/* Return TRUE if DECL is a VAR_DECL located in flash and FALSE, otherwise.  */
591
592static bool
593avr_decl_flash_p (tree decl)
594{
595  if (TREE_CODE (decl) != VAR_DECL
596      || TREE_TYPE (decl) == error_mark_node)
597    {
598      return false;
599    }
600
601  return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
602}
603
604
605/* Return TRUE if DECL is a VAR_DECL located in the 24-bit flash
606   address space and FALSE, otherwise.  */
607
608static bool
609avr_decl_memx_p (tree decl)
610{
611  if (TREE_CODE (decl) != VAR_DECL
612      || TREE_TYPE (decl) == error_mark_node)
613    {
614      return false;
615    }
616
617  return (ADDR_SPACE_MEMX == TYPE_ADDR_SPACE (TREE_TYPE (decl)));
618}
619
620
621/* Return TRUE if X is a MEM rtx located in flash and FALSE, otherwise.  */
622
623bool
624avr_mem_flash_p (rtx x)
625{
626  return (MEM_P (x)
627          && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)));
628}
629
630
631/* Return TRUE if X is a MEM rtx located in the 24-bit flash
632   address space and FALSE, otherwise.  */
633
634bool
635avr_mem_memx_p (rtx x)
636{
637  return (MEM_P (x)
638          && ADDR_SPACE_MEMX == MEM_ADDR_SPACE (x));
639}
640
641
642/* A helper for the subsequent function attribute used to dig for
643   attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
644
645static inline int
646avr_lookup_function_attribute1 (const_tree func, const char *name)
647{
648  if (FUNCTION_DECL == TREE_CODE (func))
649    {
650      if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
651        {
652          return true;
653        }
654
655      func = TREE_TYPE (func);
656    }
657
658  gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
659              || TREE_CODE (func) == METHOD_TYPE);
660
661  return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
662}
663
664/* Return nonzero if FUNC is a naked function.  */
665
666static int
667avr_naked_function_p (tree func)
668{
669  return avr_lookup_function_attribute1 (func, "naked");
670}
671
672/* Return nonzero if FUNC is an interrupt function as specified
673   by the "interrupt" attribute.  */
674
675static int
676avr_interrupt_function_p (tree func)
677{
678  return avr_lookup_function_attribute1 (func, "interrupt");
679}
680
681/* Return nonzero if FUNC is a signal function as specified
682   by the "signal" attribute.  */
683
684static int
685avr_signal_function_p (tree func)
686{
687  return avr_lookup_function_attribute1 (func, "signal");
688}
689
690/* Return nonzero if FUNC is an OS_task function.  */
691
692static int
693avr_OS_task_function_p (tree func)
694{
695  return avr_lookup_function_attribute1 (func, "OS_task");
696}
697
698/* Return nonzero if FUNC is an OS_main function.  */
699
700static int
701avr_OS_main_function_p (tree func)
702{
703  return avr_lookup_function_attribute1 (func, "OS_main");
704}
705
706
707/* Implement `TARGET_SET_CURRENT_FUNCTION'.  */
708/* Sanity cheching for above function attributes.  */
709
710static void
711avr_set_current_function (tree decl)
712{
713  location_t loc;
714  const char *isr;
715
716  if (decl == NULL_TREE
717      || current_function_decl == NULL_TREE
718      || current_function_decl == error_mark_node
719      || ! cfun->machine
720      || cfun->machine->attributes_checked_p)
721    return;
722
723  loc = DECL_SOURCE_LOCATION (decl);
724
725  cfun->machine->is_naked = avr_naked_function_p (decl);
726  cfun->machine->is_signal = avr_signal_function_p (decl);
727  cfun->machine->is_interrupt = avr_interrupt_function_p (decl);
728  cfun->machine->is_OS_task = avr_OS_task_function_p (decl);
729  cfun->machine->is_OS_main = avr_OS_main_function_p (decl);
730
731  isr = cfun->machine->is_interrupt ? "interrupt" : "signal";
732
733  /* Too much attributes make no sense as they request conflicting features. */
734
735  if (cfun->machine->is_OS_task + cfun->machine->is_OS_main
736      + (cfun->machine->is_signal || cfun->machine->is_interrupt) > 1)
737    error_at (loc, "function attributes %qs, %qs and %qs are mutually"
738               " exclusive", "OS_task", "OS_main", isr);
739
740  /* 'naked' will hide effects of 'OS_task' and 'OS_main'.  */
741
742  if (cfun->machine->is_naked
743      && (cfun->machine->is_OS_task || cfun->machine->is_OS_main))
744    warning_at (loc, OPT_Wattributes, "function attributes %qs and %qs have"
745                " no effect on %qs function", "OS_task", "OS_main", "naked");
746
747  if (cfun->machine->is_interrupt || cfun->machine->is_signal)
748    {
749      tree args = TYPE_ARG_TYPES (TREE_TYPE (decl));
750      tree ret = TREE_TYPE (TREE_TYPE (decl));
751      const char *name;
752
753      name = DECL_ASSEMBLER_NAME_SET_P (decl)
754        ? IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))
755        : IDENTIFIER_POINTER (DECL_NAME (decl));
756
757      /* Skip a leading '*' that might still prefix the assembler name,
758         e.g. in non-LTO runs.  */
759
760      name = default_strip_name_encoding (name);
761
762      /* Silently ignore 'signal' if 'interrupt' is present.  AVR-LibC startet
763         using this when it switched from SIGNAL and INTERRUPT to ISR.  */
764
765      if (cfun->machine->is_interrupt)
766        cfun->machine->is_signal = 0;
767
768      /* Interrupt handlers must be  void __vector (void)  functions.  */
769
770      if (args && TREE_CODE (TREE_VALUE (args)) != VOID_TYPE)
771        error_at (loc, "%qs function cannot have arguments", isr);
772
773      if (TREE_CODE (ret) != VOID_TYPE)
774        error_at (loc, "%qs function cannot return a value", isr);
775
776      /* If the function has the 'signal' or 'interrupt' attribute, ensure
777         that the name of the function is "__vector_NN" so as to catch
778         when the user misspells the vector name.  */
779
780      if (!STR_PREFIX_P (name, "__vector"))
781        warning_at (loc, 0, "%qs appears to be a misspelled %s handler",
782                    name, isr);
783    }
784
785  /* Don't print the above diagnostics more than once.  */
786
787  cfun->machine->attributes_checked_p = 1;
788}
789
790
791/* Implement `ACCUMULATE_OUTGOING_ARGS'.  */
792
793int
794avr_accumulate_outgoing_args (void)
795{
796  if (!cfun)
797    return TARGET_ACCUMULATE_OUTGOING_ARGS;
798
799  /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
800        what offset is correct.  In some cases it is relative to
801        virtual_outgoing_args_rtx and in others it is relative to
802        virtual_stack_vars_rtx.  For example code see
803            gcc.c-torture/execute/built-in-setjmp.c
804            gcc.c-torture/execute/builtins/sprintf-chk.c   */
805
806  return (TARGET_ACCUMULATE_OUTGOING_ARGS
807          && !(cfun->calls_setjmp
808               || cfun->has_nonlocal_label));
809}
810
811
812/* Report contribution of accumulated outgoing arguments to stack size.  */
813
814static inline int
815avr_outgoing_args_size (void)
816{
817  return ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0;
818}
819
820
821/* Implement `STARTING_FRAME_OFFSET'.  */
822/* This is the offset from the frame pointer register to the first stack slot
823   that contains a variable living in the frame.  */
824
825int
826avr_starting_frame_offset (void)
827{
828  return 1 + avr_outgoing_args_size ();
829}
830
831
832/* Return the number of hard registers to push/pop in the prologue/epilogue
833   of the current function, and optionally store these registers in SET.  */
834
835static int
836avr_regs_to_save (HARD_REG_SET *set)
837{
838  int reg, count;
839  int int_or_sig_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
840
841  if (set)
842    CLEAR_HARD_REG_SET (*set);
843  count = 0;
844
845  /* No need to save any registers if the function never returns or
846     has the "OS_task" or "OS_main" attribute.  */
847
848  if (TREE_THIS_VOLATILE (current_function_decl)
849      || cfun->machine->is_OS_task
850      || cfun->machine->is_OS_main)
851    return 0;
852
853  for (reg = 0; reg < 32; reg++)
854    {
855      /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
856         any global register variables.  */
857
858      if (fixed_regs[reg])
859        continue;
860
861      if ((int_or_sig_p && !crtl->is_leaf && call_used_regs[reg])
862          || (df_regs_ever_live_p (reg)
863              && (int_or_sig_p || !call_used_regs[reg])
864              /* Don't record frame pointer registers here.  They are treated
865                 indivitually in prologue.  */
866              && !(frame_pointer_needed
867                   && (reg == REG_Y || reg == (REG_Y+1)))))
868        {
869          if (set)
870            SET_HARD_REG_BIT (*set, reg);
871          count++;
872        }
873    }
874  return count;
875}
876
877
878/* Implement `TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS' */
879
880static bool
881avr_allocate_stack_slots_for_args (void)
882{
883  return !cfun->machine->is_naked;
884}
885
886
887/* Return true if register FROM can be eliminated via register TO.  */
888
889static bool
890avr_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
891{
892  return ((frame_pointer_needed && to == FRAME_POINTER_REGNUM)
893          || !frame_pointer_needed);
894}
895
896
897/* Implement `TARGET_WARN_FUNC_RETURN'.  */
898
899static bool
900avr_warn_func_return (tree decl)
901{
902  /* Naked functions are implemented entirely in assembly, including the
903     return sequence, so suppress warnings about this.  */
904
905  return !avr_naked_function_p (decl);
906}
907
908/* Compute offset between arg_pointer and frame_pointer.  */
909
910int
911avr_initial_elimination_offset (int from, int to)
912{
913  if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
914    return 0;
915  else
916    {
917      int offset = frame_pointer_needed ? 2 : 0;
918      int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
919
920      offset += avr_regs_to_save (NULL);
921      return (get_frame_size () + avr_outgoing_args_size()
922              + avr_pc_size + 1 + offset);
923    }
924}
925
926
927/* Helper for the function below.  */
928
929static void
930avr_adjust_type_node (tree *node, machine_mode mode, int sat_p)
931{
932  *node = make_node (FIXED_POINT_TYPE);
933  TYPE_SATURATING (*node) = sat_p;
934  TYPE_UNSIGNED (*node) = UNSIGNED_FIXED_POINT_MODE_P (mode);
935  TYPE_IBIT (*node) = GET_MODE_IBIT (mode);
936  TYPE_FBIT (*node) = GET_MODE_FBIT (mode);
937  TYPE_PRECISION (*node) = GET_MODE_BITSIZE (mode);
938  TYPE_ALIGN (*node) = 8;
939  SET_TYPE_MODE (*node, mode);
940
941  layout_type (*node);
942}
943
944
945/* Implement `TARGET_BUILD_BUILTIN_VA_LIST'.  */
946
947static tree
948avr_build_builtin_va_list (void)
949{
950  /* avr-modes.def adjusts [U]TA to be 64-bit modes with 48 fractional bits.
951     This is more appropriate for the 8-bit machine AVR than 128-bit modes.
952     The ADJUST_IBIT/FBIT are handled in toplev:init_adjust_machine_modes()
953     which is auto-generated by genmodes, but the compiler assigns [U]DAmode
954     to the long long accum modes instead of the desired [U]TAmode.
955
956     Fix this now, right after node setup in tree.c:build_common_tree_nodes().
957     This must run before c-cppbuiltin.c:builtin_define_fixed_point_constants()
958     which built-in defines macros like __ULLACCUM_FBIT__ that are used by
959     libgcc to detect IBIT and FBIT.  */
960
961  avr_adjust_type_node (&ta_type_node, TAmode, 0);
962  avr_adjust_type_node (&uta_type_node, UTAmode, 0);
963  avr_adjust_type_node (&sat_ta_type_node, TAmode, 1);
964  avr_adjust_type_node (&sat_uta_type_node, UTAmode, 1);
965
966  unsigned_long_long_accum_type_node = uta_type_node;
967  long_long_accum_type_node = ta_type_node;
968  sat_unsigned_long_long_accum_type_node = sat_uta_type_node;
969  sat_long_long_accum_type_node = sat_ta_type_node;
970
971  /* Dispatch to the default handler.  */
972
973  return std_build_builtin_va_list ();
974}
975
976
977/* Implement `TARGET_BUILTIN_SETJMP_FRAME_VALUE'.  */
978/* Actual start of frame is virtual_stack_vars_rtx this is offset from
979   frame pointer by +STARTING_FRAME_OFFSET.
980   Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
981   avoids creating add/sub of offset in nonlocal goto and setjmp.  */
982
983static rtx
984avr_builtin_setjmp_frame_value (void)
985{
986  rtx xval = gen_reg_rtx (Pmode);
987  emit_insn (gen_subhi3 (xval, virtual_stack_vars_rtx,
988                         gen_int_mode (STARTING_FRAME_OFFSET, Pmode)));
989  return xval;
990}
991
992
993/* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3-byte PC).
994   This is return address of function.  */
995
996rtx
997avr_return_addr_rtx (int count, rtx tem)
998{
999  rtx r;
1000
1001  /* Can only return this function's return address. Others not supported.  */
1002  if (count)
1003     return NULL;
1004
1005  if (AVR_3_BYTE_PC)
1006    {
1007      r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
1008      warning (0, "%<builtin_return_address%> contains only 2 bytes"
1009               " of address");
1010    }
1011  else
1012    r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
1013
1014  r = gen_rtx_PLUS (Pmode, tem, r);
1015  r = gen_frame_mem (Pmode, memory_address (Pmode, r));
1016  r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
1017  return  r;
1018}
1019
1020/* Return 1 if the function epilogue is just a single "ret".  */
1021
1022int
1023avr_simple_epilogue (void)
1024{
1025  return (! frame_pointer_needed
1026          && get_frame_size () == 0
1027          && avr_outgoing_args_size() == 0
1028          && avr_regs_to_save (NULL) == 0
1029          && ! cfun->machine->is_interrupt
1030          && ! cfun->machine->is_signal
1031          && ! cfun->machine->is_naked
1032          && ! TREE_THIS_VOLATILE (current_function_decl));
1033}
1034
1035/* This function checks sequence of live registers.  */
1036
1037static int
1038sequent_regs_live (void)
1039{
1040  int reg;
1041  int live_seq = 0;
1042  int cur_seq = 0;
1043
1044  for (reg = 0; reg <= LAST_CALLEE_SAVED_REG; ++reg)
1045    {
1046      if (fixed_regs[reg])
1047        {
1048          /* Don't recognize sequences that contain global register
1049             variables.  */
1050
1051          if (live_seq != 0)
1052            return 0;
1053          else
1054            continue;
1055        }
1056
1057      if (!call_used_regs[reg])
1058        {
1059          if (df_regs_ever_live_p (reg))
1060            {
1061              ++live_seq;
1062              ++cur_seq;
1063            }
1064          else
1065            cur_seq = 0;
1066        }
1067    }
1068
1069  if (!frame_pointer_needed)
1070    {
1071      if (df_regs_ever_live_p (REG_Y))
1072        {
1073          ++live_seq;
1074          ++cur_seq;
1075        }
1076      else
1077        cur_seq = 0;
1078
1079      if (df_regs_ever_live_p (REG_Y+1))
1080        {
1081          ++live_seq;
1082          ++cur_seq;
1083        }
1084      else
1085        cur_seq = 0;
1086    }
1087  else
1088    {
1089      cur_seq += 2;
1090      live_seq += 2;
1091    }
1092  return (cur_seq == live_seq) ? live_seq : 0;
1093}
1094
1095/* Obtain the length sequence of insns.  */
1096
1097int
1098get_sequence_length (rtx_insn *insns)
1099{
1100  rtx_insn *insn;
1101  int length;
1102
1103  for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
1104    length += get_attr_length (insn);
1105
1106  return length;
1107}
1108
1109
1110/*  Implement `INCOMING_RETURN_ADDR_RTX'.  */
1111
1112rtx
1113avr_incoming_return_addr_rtx (void)
1114{
1115  /* The return address is at the top of the stack.  Note that the push
1116     was via post-decrement, which means the actual address is off by one.  */
1117  return gen_frame_mem (HImode, plus_constant (Pmode, stack_pointer_rtx, 1));
1118}
1119
1120/*  Helper for expand_prologue.  Emit a push of a byte register.  */
1121
1122static void
1123emit_push_byte (unsigned regno, bool frame_related_p)
1124{
1125  rtx mem, reg;
1126  rtx_insn *insn;
1127
1128  mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
1129  mem = gen_frame_mem (QImode, mem);
1130  reg = gen_rtx_REG (QImode, regno);
1131
1132  insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
1133  if (frame_related_p)
1134    RTX_FRAME_RELATED_P (insn) = 1;
1135
1136  cfun->machine->stack_usage++;
1137}
1138
1139
1140/*  Helper for expand_prologue.  Emit a push of a SFR via tmp_reg.
1141    SFR is a MEM representing the memory location of the SFR.
1142    If CLR_P then clear the SFR after the push using zero_reg.  */
1143
1144static void
1145emit_push_sfr (rtx sfr, bool frame_related_p, bool clr_p)
1146{
1147  rtx_insn *insn;
1148
1149  gcc_assert (MEM_P (sfr));
1150
1151  /* IN __tmp_reg__, IO(SFR) */
1152  insn = emit_move_insn (tmp_reg_rtx, sfr);
1153  if (frame_related_p)
1154    RTX_FRAME_RELATED_P (insn) = 1;
1155
1156  /* PUSH __tmp_reg__ */
1157  emit_push_byte (AVR_TMP_REGNO, frame_related_p);
1158
1159  if (clr_p)
1160    {
1161      /* OUT IO(SFR), __zero_reg__ */
1162      insn = emit_move_insn (sfr, const0_rtx);
1163      if (frame_related_p)
1164        RTX_FRAME_RELATED_P (insn) = 1;
1165    }
1166}
1167
1168static void
1169avr_prologue_setup_frame (HOST_WIDE_INT size, HARD_REG_SET set)
1170{
1171  rtx_insn *insn;
1172  bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1173  int live_seq = sequent_regs_live ();
1174
1175  HOST_WIDE_INT size_max
1176    = (HOST_WIDE_INT) GET_MODE_MASK (AVR_HAVE_8BIT_SP ? QImode : Pmode);
1177
1178  bool minimize = (TARGET_CALL_PROLOGUES
1179                   && size < size_max
1180                   && live_seq
1181                   && !isr_p
1182                   && !cfun->machine->is_OS_task
1183                   && !cfun->machine->is_OS_main
1184                   && !AVR_TINY);
1185
1186  if (minimize
1187      && (frame_pointer_needed
1188          || avr_outgoing_args_size() > 8
1189          || (AVR_2_BYTE_PC && live_seq > 6)
1190          || live_seq > 7))
1191    {
1192      rtx pattern;
1193      int first_reg, reg, offset;
1194
1195      emit_move_insn (gen_rtx_REG (HImode, REG_X),
1196                      gen_int_mode (size, HImode));
1197
1198      pattern = gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
1199                                         gen_int_mode (live_seq+size, HImode));
1200      insn = emit_insn (pattern);
1201      RTX_FRAME_RELATED_P (insn) = 1;
1202
1203      /* Describe the effect of the unspec_volatile call to prologue_saves.
1204         Note that this formulation assumes that add_reg_note pushes the
1205         notes to the front.  Thus we build them in the reverse order of
1206         how we want dwarf2out to process them.  */
1207
1208      /* The function does always set frame_pointer_rtx, but whether that
1209         is going to be permanent in the function is frame_pointer_needed.  */
1210
1211      add_reg_note (insn, REG_CFA_ADJUST_CFA,
1212                    gen_rtx_SET (VOIDmode, (frame_pointer_needed
1213                                            ? frame_pointer_rtx
1214                                            : stack_pointer_rtx),
1215                                 plus_constant (Pmode, stack_pointer_rtx,
1216                                                -(size + live_seq))));
1217
1218      /* Note that live_seq always contains r28+r29, but the other
1219         registers to be saved are all below 18.  */
1220
1221      first_reg = (LAST_CALLEE_SAVED_REG + 1) - (live_seq - 2);
1222
1223      for (reg = 29, offset = -live_seq + 1;
1224           reg >= first_reg;
1225           reg = (reg == 28 ? LAST_CALLEE_SAVED_REG : reg - 1), ++offset)
1226        {
1227          rtx m, r;
1228
1229          m = gen_rtx_MEM (QImode, plus_constant (Pmode, stack_pointer_rtx,
1230                                                  offset));
1231          r = gen_rtx_REG (QImode, reg);
1232          add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
1233        }
1234
1235      cfun->machine->stack_usage += size + live_seq;
1236    }
1237  else /* !minimize */
1238    {
1239      int reg;
1240
1241      for (reg = 0; reg < 32; ++reg)
1242        if (TEST_HARD_REG_BIT (set, reg))
1243          emit_push_byte (reg, true);
1244
1245      if (frame_pointer_needed
1246          && (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main)))
1247        {
1248          /* Push frame pointer.  Always be consistent about the
1249             ordering of pushes -- epilogue_restores expects the
1250             register pair to be pushed low byte first.  */
1251
1252          emit_push_byte (REG_Y, true);
1253          emit_push_byte (REG_Y + 1, true);
1254        }
1255
1256      if (frame_pointer_needed
1257          && size == 0)
1258        {
1259          insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1260          RTX_FRAME_RELATED_P (insn) = 1;
1261        }
1262
1263      if (size != 0)
1264        {
1265          /*  Creating a frame can be done by direct manipulation of the
1266              stack or via the frame pointer. These two methods are:
1267                  fp =  sp
1268                  fp -= size
1269                  sp =  fp
1270              or
1271                  sp -= size
1272                  fp =  sp    (*)
1273              the optimum method depends on function type, stack and
1274              frame size.  To avoid a complex logic, both methods are
1275              tested and shortest is selected.
1276
1277              There is also the case where SIZE != 0 and no frame pointer is
1278              needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
1279              In that case, insn (*) is not needed in that case.
1280              We use the X register as scratch. This is save because in X
1281              is call-clobbered.
1282                 In an interrupt routine, the case of SIZE != 0 together with
1283              !frame_pointer_needed can only occur if the function is not a
1284              leaf function and thus X has already been saved.  */
1285
1286          int irq_state = -1;
1287          HOST_WIDE_INT size_cfa = size, neg_size;
1288          rtx_insn *fp_plus_insns;
1289          rtx fp, my_fp;
1290
1291          gcc_assert (frame_pointer_needed
1292                      || !isr_p
1293                      || !crtl->is_leaf);
1294
1295          fp = my_fp = (frame_pointer_needed
1296                        ? frame_pointer_rtx
1297                        : gen_rtx_REG (Pmode, REG_X));
1298
1299          if (AVR_HAVE_8BIT_SP)
1300            {
1301              /* The high byte (r29) does not change:
1302                 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size).  */
1303
1304              my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1305            }
1306
1307          /* Cut down size and avoid size = 0 so that we don't run
1308             into ICE like PR52488 in the remainder.  */
1309
1310          if (size > size_max)
1311            {
1312              /* Don't error so that insane code from newlib still compiles
1313                 and does not break building newlib.  As PR51345 is implemented
1314                 now, there are multilib variants with -msp8.
1315
1316                 If user wants sanity checks he can use -Wstack-usage=
1317                 or similar options.
1318
1319                 For CFA we emit the original, non-saturated size so that
1320                 the generic machinery is aware of the real stack usage and
1321                 will print the above diagnostic as expected.  */
1322
1323              size = size_max;
1324            }
1325
1326          size = trunc_int_for_mode (size, GET_MODE (my_fp));
1327          neg_size = trunc_int_for_mode (-size, GET_MODE (my_fp));
1328
1329          /************  Method 1: Adjust frame pointer  ************/
1330
1331          start_sequence ();
1332
1333          /* Normally, the dwarf2out frame-related-expr interpreter does
1334             not expect to have the CFA change once the frame pointer is
1335             set up.  Thus, we avoid marking the move insn below and
1336             instead indicate that the entire operation is complete after
1337             the frame pointer subtraction is done.  */
1338
1339          insn = emit_move_insn (fp, stack_pointer_rtx);
1340          if (frame_pointer_needed)
1341            {
1342              RTX_FRAME_RELATED_P (insn) = 1;
1343              add_reg_note (insn, REG_CFA_ADJUST_CFA,
1344                            gen_rtx_SET (VOIDmode, fp, stack_pointer_rtx));
1345            }
1346
1347          insn = emit_move_insn (my_fp, plus_constant (GET_MODE (my_fp),
1348                                                       my_fp, neg_size));
1349
1350          if (frame_pointer_needed)
1351            {
1352              RTX_FRAME_RELATED_P (insn) = 1;
1353              add_reg_note (insn, REG_CFA_ADJUST_CFA,
1354                            gen_rtx_SET (VOIDmode, fp,
1355                                         plus_constant (Pmode, fp,
1356                                                        -size_cfa)));
1357            }
1358
1359          /* Copy to stack pointer.  Note that since we've already
1360             changed the CFA to the frame pointer this operation
1361             need not be annotated if frame pointer is needed.
1362             Always move through unspec, see PR50063.
1363             For meaning of irq_state see movhi_sp_r insn.  */
1364
1365          if (cfun->machine->is_interrupt)
1366            irq_state = 1;
1367
1368          if (TARGET_NO_INTERRUPTS
1369              || cfun->machine->is_signal
1370              || cfun->machine->is_OS_main)
1371            irq_state = 0;
1372
1373          if (AVR_HAVE_8BIT_SP)
1374            irq_state = 2;
1375
1376          insn = emit_insn (gen_movhi_sp_r (stack_pointer_rtx,
1377                                            fp, GEN_INT (irq_state)));
1378          if (!frame_pointer_needed)
1379            {
1380              RTX_FRAME_RELATED_P (insn) = 1;
1381              add_reg_note (insn, REG_CFA_ADJUST_CFA,
1382                            gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1383                                         plus_constant (Pmode,
1384                                                        stack_pointer_rtx,
1385                                                        -size_cfa)));
1386            }
1387
1388          fp_plus_insns = get_insns ();
1389          end_sequence ();
1390
1391          /************  Method 2: Adjust Stack pointer  ************/
1392
1393          /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1394             can only handle specific offsets.  */
1395
1396          if (avr_sp_immediate_operand (gen_int_mode (-size, HImode), HImode))
1397            {
1398              rtx_insn *sp_plus_insns;
1399
1400              start_sequence ();
1401
1402              insn = emit_move_insn (stack_pointer_rtx,
1403                                     plus_constant (Pmode, stack_pointer_rtx,
1404                                                    -size));
1405              RTX_FRAME_RELATED_P (insn) = 1;
1406              add_reg_note (insn, REG_CFA_ADJUST_CFA,
1407                            gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1408                                         plus_constant (Pmode,
1409                                                        stack_pointer_rtx,
1410                                                        -size_cfa)));
1411              if (frame_pointer_needed)
1412                {
1413                  insn = emit_move_insn (fp, stack_pointer_rtx);
1414                  RTX_FRAME_RELATED_P (insn) = 1;
1415                }
1416
1417              sp_plus_insns = get_insns ();
1418              end_sequence ();
1419
1420              /************ Use shortest method  ************/
1421
1422              emit_insn (get_sequence_length (sp_plus_insns)
1423                         < get_sequence_length (fp_plus_insns)
1424                         ? sp_plus_insns
1425                         : fp_plus_insns);
1426            }
1427          else
1428            {
1429              emit_insn (fp_plus_insns);
1430            }
1431
1432          cfun->machine->stack_usage += size_cfa;
1433        } /* !minimize && size != 0 */
1434    } /* !minimize */
1435}
1436
1437
1438/*  Output function prologue.  */
1439
1440void
1441avr_expand_prologue (void)
1442{
1443  HARD_REG_SET set;
1444  HOST_WIDE_INT size;
1445
1446  size = get_frame_size() + avr_outgoing_args_size();
1447
1448  cfun->machine->stack_usage = 0;
1449
1450  /* Prologue: naked.  */
1451  if (cfun->machine->is_naked)
1452    {
1453      return;
1454    }
1455
1456  avr_regs_to_save (&set);
1457
1458  if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1459    {
1460      /* Enable interrupts.  */
1461      if (cfun->machine->is_interrupt)
1462        emit_insn (gen_enable_interrupt ());
1463
1464      /* Push zero reg.  */
1465      emit_push_byte (AVR_ZERO_REGNO, true);
1466
1467      /* Push tmp reg.  */
1468      emit_push_byte (AVR_TMP_REGNO, true);
1469
1470      /* Push SREG.  */
1471      /* ??? There's no dwarf2 column reserved for SREG.  */
1472      emit_push_sfr (sreg_rtx, false, false /* clr */);
1473
1474      /* Clear zero reg.  */
1475      emit_move_insn (zero_reg_rtx, const0_rtx);
1476
1477      /* Prevent any attempt to delete the setting of ZERO_REG!  */
1478      emit_use (zero_reg_rtx);
1479
1480      /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1481         ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z.  */
1482
1483      if (AVR_HAVE_RAMPD)
1484        emit_push_sfr (rampd_rtx, false /* frame-related */, true /* clr */);
1485
1486      if (AVR_HAVE_RAMPX
1487          && TEST_HARD_REG_BIT (set, REG_X)
1488          && TEST_HARD_REG_BIT (set, REG_X + 1))
1489        {
1490          emit_push_sfr (rampx_rtx, false /* frame-related */, true /* clr */);
1491        }
1492
1493      if (AVR_HAVE_RAMPY
1494          && (frame_pointer_needed
1495              || (TEST_HARD_REG_BIT (set, REG_Y)
1496                  && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1497        {
1498          emit_push_sfr (rampy_rtx, false /* frame-related */, true /* clr */);
1499        }
1500
1501      if (AVR_HAVE_RAMPZ
1502          && TEST_HARD_REG_BIT (set, REG_Z)
1503          && TEST_HARD_REG_BIT (set, REG_Z + 1))
1504        {
1505          emit_push_sfr (rampz_rtx, false /* frame-related */, AVR_HAVE_RAMPD);
1506        }
1507    }  /* is_interrupt is_signal */
1508
1509  avr_prologue_setup_frame (size, set);
1510
1511  if (flag_stack_usage_info)
1512    current_function_static_stack_size = cfun->machine->stack_usage;
1513}
1514
1515
1516/* Implement `TARGET_ASM_FUNCTION_END_PROLOGUE'.  */
1517/* Output summary at end of function prologue.  */
1518
1519static void
1520avr_asm_function_end_prologue (FILE *file)
1521{
1522  if (cfun->machine->is_naked)
1523    {
1524      fputs ("/* prologue: naked */\n", file);
1525    }
1526  else
1527    {
1528      if (cfun->machine->is_interrupt)
1529        {
1530          fputs ("/* prologue: Interrupt */\n", file);
1531        }
1532      else if (cfun->machine->is_signal)
1533        {
1534          fputs ("/* prologue: Signal */\n", file);
1535        }
1536      else
1537        fputs ("/* prologue: function */\n", file);
1538    }
1539
1540  if (ACCUMULATE_OUTGOING_ARGS)
1541    fprintf (file, "/* outgoing args size = %d */\n",
1542             avr_outgoing_args_size());
1543
1544  fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
1545                 get_frame_size());
1546  fprintf (file, "/* stack size = %d */\n",
1547                 cfun->machine->stack_usage);
1548  /* Create symbol stack offset here so all functions have it. Add 1 to stack
1549     usage for offset so that SP + .L__stack_offset = return address.  */
1550  fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
1551}
1552
1553
1554/* Implement `EPILOGUE_USES'.  */
1555
1556int
1557avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1558{
1559  if (reload_completed
1560      && cfun->machine
1561      && (cfun->machine->is_interrupt || cfun->machine->is_signal))
1562    return 1;
1563  return 0;
1564}
1565
1566/*  Helper for avr_expand_epilogue.  Emit a pop of a byte register.  */
1567
1568static void
1569emit_pop_byte (unsigned regno)
1570{
1571  rtx mem, reg;
1572
1573  mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1574  mem = gen_frame_mem (QImode, mem);
1575  reg = gen_rtx_REG (QImode, regno);
1576
1577  emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1578}
1579
1580/*  Output RTL epilogue.  */
1581
1582void
1583avr_expand_epilogue (bool sibcall_p)
1584{
1585  int reg;
1586  int live_seq;
1587  HARD_REG_SET set;
1588  int minimize;
1589  HOST_WIDE_INT size;
1590  bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1591
1592  size = get_frame_size() + avr_outgoing_args_size();
1593
1594  /* epilogue: naked  */
1595  if (cfun->machine->is_naked)
1596    {
1597      gcc_assert (!sibcall_p);
1598
1599      emit_jump_insn (gen_return ());
1600      return;
1601    }
1602
1603  avr_regs_to_save (&set);
1604  live_seq = sequent_regs_live ();
1605
1606  minimize = (TARGET_CALL_PROLOGUES
1607              && live_seq
1608              && !isr_p
1609              && !cfun->machine->is_OS_task
1610              && !cfun->machine->is_OS_main
1611              && !AVR_TINY);
1612
1613  if (minimize
1614      && (live_seq > 4
1615          || frame_pointer_needed
1616          || size))
1617    {
1618      /*  Get rid of frame.  */
1619
1620      if (!frame_pointer_needed)
1621        {
1622          emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1623        }
1624
1625      if (size)
1626        {
1627          emit_move_insn (frame_pointer_rtx,
1628                          plus_constant (Pmode, frame_pointer_rtx, size));
1629        }
1630
1631      emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1632      return;
1633    }
1634
1635  if (size)
1636    {
1637      /* Try two methods to adjust stack and select shortest.  */
1638
1639      int irq_state = -1;
1640      rtx fp, my_fp;
1641      rtx_insn *fp_plus_insns;
1642      HOST_WIDE_INT size_max;
1643
1644      gcc_assert (frame_pointer_needed
1645                  || !isr_p
1646                  || !crtl->is_leaf);
1647
1648      fp = my_fp = (frame_pointer_needed
1649                    ? frame_pointer_rtx
1650                    : gen_rtx_REG (Pmode, REG_X));
1651
1652      if (AVR_HAVE_8BIT_SP)
1653        {
1654          /* The high byte (r29) does not change:
1655             Prefer SUBI (1 cycle) over SBIW (2 cycles).  */
1656
1657          my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1658        }
1659
1660      /* For rationale see comment in prologue generation.  */
1661
1662      size_max = (HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (my_fp));
1663      if (size > size_max)
1664        size = size_max;
1665      size = trunc_int_for_mode (size, GET_MODE (my_fp));
1666
1667      /********** Method 1: Adjust fp register  **********/
1668
1669      start_sequence ();
1670
1671      if (!frame_pointer_needed)
1672        emit_move_insn (fp, stack_pointer_rtx);
1673
1674      emit_move_insn (my_fp, plus_constant (GET_MODE (my_fp), my_fp, size));
1675
1676      /* Copy to stack pointer.  */
1677
1678      if (TARGET_NO_INTERRUPTS)
1679        irq_state = 0;
1680
1681      if (AVR_HAVE_8BIT_SP)
1682        irq_state = 2;
1683
1684      emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp,
1685                                 GEN_INT (irq_state)));
1686
1687      fp_plus_insns = get_insns ();
1688      end_sequence ();
1689
1690      /********** Method 2: Adjust Stack pointer  **********/
1691
1692      if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
1693        {
1694          rtx_insn *sp_plus_insns;
1695
1696          start_sequence ();
1697
1698          emit_move_insn (stack_pointer_rtx,
1699                          plus_constant (Pmode, stack_pointer_rtx, size));
1700
1701          sp_plus_insns = get_insns ();
1702          end_sequence ();
1703
1704          /************ Use shortest method  ************/
1705
1706          emit_insn (get_sequence_length (sp_plus_insns)
1707                     < get_sequence_length (fp_plus_insns)
1708                     ? sp_plus_insns
1709                     : fp_plus_insns);
1710        }
1711      else
1712        emit_insn (fp_plus_insns);
1713    } /* size != 0 */
1714
1715  if (frame_pointer_needed
1716      && !(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1717    {
1718      /* Restore previous frame_pointer.  See avr_expand_prologue for
1719         rationale for not using pophi.  */
1720
1721      emit_pop_byte (REG_Y + 1);
1722      emit_pop_byte (REG_Y);
1723    }
1724
1725  /* Restore used registers.  */
1726
1727  for (reg = 31; reg >= 0; --reg)
1728    if (TEST_HARD_REG_BIT (set, reg))
1729      emit_pop_byte (reg);
1730
1731  if (isr_p)
1732    {
1733      /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
1734         The conditions to restore them must be tha same as in prologue.  */
1735
1736      if (AVR_HAVE_RAMPZ
1737          && TEST_HARD_REG_BIT (set, REG_Z)
1738          && TEST_HARD_REG_BIT (set, REG_Z + 1))
1739        {
1740          emit_pop_byte (TMP_REGNO);
1741          emit_move_insn (rampz_rtx, tmp_reg_rtx);
1742        }
1743
1744      if (AVR_HAVE_RAMPY
1745          && (frame_pointer_needed
1746              || (TEST_HARD_REG_BIT (set, REG_Y)
1747                  && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1748        {
1749          emit_pop_byte (TMP_REGNO);
1750          emit_move_insn (rampy_rtx, tmp_reg_rtx);
1751        }
1752
1753      if (AVR_HAVE_RAMPX
1754          && TEST_HARD_REG_BIT (set, REG_X)
1755          && TEST_HARD_REG_BIT (set, REG_X + 1))
1756        {
1757          emit_pop_byte (TMP_REGNO);
1758          emit_move_insn (rampx_rtx, tmp_reg_rtx);
1759        }
1760
1761      if (AVR_HAVE_RAMPD)
1762        {
1763          emit_pop_byte (TMP_REGNO);
1764          emit_move_insn (rampd_rtx, tmp_reg_rtx);
1765        }
1766
1767      /* Restore SREG using tmp_reg as scratch.  */
1768
1769      emit_pop_byte (AVR_TMP_REGNO);
1770      emit_move_insn (sreg_rtx, tmp_reg_rtx);
1771
1772      /* Restore tmp REG.  */
1773      emit_pop_byte (AVR_TMP_REGNO);
1774
1775      /* Restore zero REG.  */
1776      emit_pop_byte (AVR_ZERO_REGNO);
1777    }
1778
1779  if (!sibcall_p)
1780    emit_jump_insn (gen_return ());
1781}
1782
1783
1784/* Implement `TARGET_ASM_FUNCTION_BEGIN_EPILOGUE'.  */
1785
1786static void
1787avr_asm_function_begin_epilogue (FILE *file)
1788{
1789  fprintf (file, "/* epilogue start */\n");
1790}
1791
1792
1793/* Implement `TARGET_CANNOT_MODITY_JUMPS_P'.  */
1794
1795static bool
1796avr_cannot_modify_jumps_p (void)
1797{
1798
1799  /* Naked Functions must not have any instructions after
1800     their epilogue, see PR42240 */
1801
1802  if (reload_completed
1803      && cfun->machine
1804      && cfun->machine->is_naked)
1805    {
1806      return true;
1807    }
1808
1809  return false;
1810}
1811
1812
1813/* Implement `TARGET_MODE_DEPENDENT_ADDRESS_P'.  */
1814
1815static bool
1816avr_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED, addr_space_t as)
1817{
1818  /* FIXME:  Non-generic addresses are not mode-dependent in themselves.
1819       This hook just serves to hack around PR rtl-optimization/52543 by
1820       claiming that non-generic addresses were mode-dependent so that
1821       lower-subreg.c will skip these addresses.  lower-subreg.c sets up fake
1822       RTXes to probe SET and MEM costs and assumes that MEM is always in the
1823       generic address space which is not true.  */
1824
1825  return !ADDR_SPACE_GENERIC_P (as);
1826}
1827
1828
1829/* Helper function for `avr_legitimate_address_p'.  */
1830
1831static inline bool
1832avr_reg_ok_for_addr_p (rtx reg, addr_space_t as,
1833                       RTX_CODE outer_code, bool strict)
1834{
1835  return (REG_P (reg)
1836          && (avr_regno_mode_code_ok_for_base_p (REGNO (reg), QImode,
1837                                                 as, outer_code, UNKNOWN)
1838              || (!strict
1839                  && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
1840}
1841
1842
1843/* Return nonzero if X (an RTX) is a legitimate memory address on the target
1844   machine for a memory operand of mode MODE.  */
1845
1846static bool
1847avr_legitimate_address_p (machine_mode mode, rtx x, bool strict)
1848{
1849  bool ok = CONSTANT_ADDRESS_P (x);
1850
1851  switch (GET_CODE (x))
1852    {
1853    case REG:
1854      ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
1855                                  MEM, strict);
1856
1857      if (strict
1858          && GET_MODE_SIZE (mode) > 4
1859          && REG_X == REGNO (x))
1860        {
1861          ok = false;
1862        }
1863      break;
1864
1865    case POST_INC:
1866    case PRE_DEC:
1867      ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
1868                                  GET_CODE (x), strict);
1869      break;
1870
1871    case PLUS:
1872      {
1873        rtx reg = XEXP (x, 0);
1874        rtx op1 = XEXP (x, 1);
1875
1876        if (REG_P (reg)
1877            && CONST_INT_P (op1)
1878            && INTVAL (op1) >= 0)
1879          {
1880            bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
1881
1882            if (fit)
1883              {
1884                ok = (! strict
1885                      || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
1886                                                PLUS, strict));
1887
1888                if (reg == frame_pointer_rtx
1889                    || reg == arg_pointer_rtx)
1890                  {
1891                    ok = true;
1892                  }
1893              }
1894            else if (frame_pointer_needed
1895                     && reg == frame_pointer_rtx)
1896              {
1897                ok = true;
1898              }
1899          }
1900      }
1901      break;
1902
1903    default:
1904      break;
1905    }
1906
1907  if (AVR_TINY
1908      && CONSTANT_ADDRESS_P (x))
1909    {
1910      /* avrtiny's load / store instructions only cover addresses 0..0xbf:
1911         IN / OUT range is 0..0x3f and LDS / STS can access 0x40..0xbf.  */
1912
1913      ok = (CONST_INT_P (x)
1914            && IN_RANGE (INTVAL (x), 0, 0xc0 - GET_MODE_SIZE (mode)));
1915    }
1916
1917  if (avr_log.legitimate_address_p)
1918    {
1919      avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1920                 "reload_completed=%d reload_in_progress=%d %s:",
1921                 ok, mode, strict, reload_completed, reload_in_progress,
1922                 reg_renumber ? "(reg_renumber)" : "");
1923
1924      if (GET_CODE (x) == PLUS
1925          && REG_P (XEXP (x, 0))
1926          && CONST_INT_P (XEXP (x, 1))
1927          && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1928          && reg_renumber)
1929        {
1930          avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1931                     true_regnum (XEXP (x, 0)));
1932        }
1933
1934      avr_edump ("\n%r\n", x);
1935    }
1936
1937  return ok;
1938}
1939
1940
1941/* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1942   now only a helper for avr_addr_space_legitimize_address.  */
1943/* Attempts to replace X with a valid
1944   memory address for an operand of mode MODE  */
1945
1946static rtx
1947avr_legitimize_address (rtx x, rtx oldx, machine_mode mode)
1948{
1949  bool big_offset_p = false;
1950
1951  x = oldx;
1952
1953  if (GET_CODE (oldx) == PLUS
1954      && REG_P (XEXP (oldx, 0)))
1955    {
1956      if (REG_P (XEXP (oldx, 1)))
1957        x = force_reg (GET_MODE (oldx), oldx);
1958      else if (CONST_INT_P (XEXP (oldx, 1)))
1959        {
1960          int offs = INTVAL (XEXP (oldx, 1));
1961          if (frame_pointer_rtx != XEXP (oldx, 0)
1962              && offs > MAX_LD_OFFSET (mode))
1963            {
1964              big_offset_p = true;
1965              x = force_reg (GET_MODE (oldx), oldx);
1966            }
1967        }
1968    }
1969
1970  if (avr_log.legitimize_address)
1971    {
1972      avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
1973
1974      if (x != oldx)
1975        avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
1976    }
1977
1978  return x;
1979}
1980
1981
1982/* Implement `LEGITIMIZE_RELOAD_ADDRESS'.  */
1983/* This will allow register R26/27 to be used where it is no worse than normal
1984   base pointers R28/29 or R30/31.  For example, if base offset is greater
1985   than 63 bytes or for R++ or --R addressing.  */
1986
1987rtx
1988avr_legitimize_reload_address (rtx *px, machine_mode mode,
1989                               int opnum, int type, int addr_type,
1990                               int ind_levels ATTRIBUTE_UNUSED,
1991                               rtx (*mk_memloc)(rtx,int))
1992{
1993  rtx x = *px;
1994
1995  if (avr_log.legitimize_reload_address)
1996    avr_edump ("\n%?:%m %r\n", mode, x);
1997
1998  if (1 && (GET_CODE (x) == POST_INC
1999            || GET_CODE (x) == PRE_DEC))
2000    {
2001      push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
2002                   POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
2003                   opnum, RELOAD_OTHER);
2004
2005      if (avr_log.legitimize_reload_address)
2006        avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
2007                   POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
2008
2009      return x;
2010    }
2011
2012  if (GET_CODE (x) == PLUS
2013      && REG_P (XEXP (x, 0))
2014      && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
2015      && CONST_INT_P (XEXP (x, 1))
2016      && INTVAL (XEXP (x, 1)) >= 1)
2017    {
2018      bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
2019
2020      if (fit)
2021        {
2022          if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
2023            {
2024              int regno = REGNO (XEXP (x, 0));
2025              rtx mem = mk_memloc (x, regno);
2026
2027              push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
2028                           POINTER_REGS, Pmode, VOIDmode, 0, 0,
2029                           1, (enum reload_type) addr_type);
2030
2031              if (avr_log.legitimize_reload_address)
2032                avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
2033                           POINTER_REGS, XEXP (mem, 0), NULL_RTX);
2034
2035              push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
2036                           BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
2037                           opnum, (enum reload_type) type);
2038
2039              if (avr_log.legitimize_reload_address)
2040                avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
2041                           BASE_POINTER_REGS, mem, NULL_RTX);
2042
2043              return x;
2044            }
2045        }
2046      else if (! (frame_pointer_needed
2047                  && XEXP (x, 0) == frame_pointer_rtx))
2048        {
2049          push_reload (x, NULL_RTX, px, NULL,
2050                       POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
2051                       opnum, (enum reload_type) type);
2052
2053          if (avr_log.legitimize_reload_address)
2054            avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
2055                       POINTER_REGS, x, NULL_RTX);
2056
2057          return x;
2058        }
2059    }
2060
2061  return NULL_RTX;
2062}
2063
2064
2065/* Implement `TARGET_SECONDARY_RELOAD' */
2066
2067static reg_class_t
2068avr_secondary_reload (bool in_p, rtx x,
2069                      reg_class_t reload_class ATTRIBUTE_UNUSED,
2070                      machine_mode mode, secondary_reload_info *sri)
2071{
2072  if (in_p
2073      && MEM_P (x)
2074      && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
2075      && ADDR_SPACE_MEMX != MEM_ADDR_SPACE (x))
2076    {
2077      /* For the non-generic 16-bit spaces we need a d-class scratch.  */
2078
2079      switch (mode)
2080        {
2081        default:
2082          gcc_unreachable();
2083
2084        case QImode:  sri->icode = CODE_FOR_reload_inqi; break;
2085        case QQmode:  sri->icode = CODE_FOR_reload_inqq; break;
2086        case UQQmode: sri->icode = CODE_FOR_reload_inuqq; break;
2087
2088        case HImode:  sri->icode = CODE_FOR_reload_inhi; break;
2089        case HQmode:  sri->icode = CODE_FOR_reload_inhq; break;
2090        case HAmode:  sri->icode = CODE_FOR_reload_inha; break;
2091        case UHQmode: sri->icode = CODE_FOR_reload_inuhq; break;
2092        case UHAmode: sri->icode = CODE_FOR_reload_inuha; break;
2093
2094        case PSImode: sri->icode = CODE_FOR_reload_inpsi; break;
2095
2096        case SImode:  sri->icode = CODE_FOR_reload_insi; break;
2097        case SFmode:  sri->icode = CODE_FOR_reload_insf; break;
2098        case SQmode:  sri->icode = CODE_FOR_reload_insq; break;
2099        case SAmode:  sri->icode = CODE_FOR_reload_insa; break;
2100        case USQmode: sri->icode = CODE_FOR_reload_inusq; break;
2101        case USAmode: sri->icode = CODE_FOR_reload_inusa; break;
2102        }
2103    }
2104
2105  return NO_REGS;
2106}
2107
2108
2109/* Helper function to print assembler resp. track instruction
2110   sequence lengths.  Always return "".
2111
2112   If PLEN == NULL:
2113       Output assembler code from template TPL with operands supplied
2114       by OPERANDS.  This is just forwarding to output_asm_insn.
2115
2116   If PLEN != NULL:
2117       If N_WORDS >= 0  Add N_WORDS to *PLEN.
2118       If N_WORDS < 0   Set *PLEN to -N_WORDS.
2119       Don't output anything.
2120*/
2121
2122static const char*
2123avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
2124{
2125  if (NULL == plen)
2126    {
2127      output_asm_insn (tpl, operands);
2128    }
2129  else
2130    {
2131      if (n_words < 0)
2132        *plen = -n_words;
2133      else
2134        *plen += n_words;
2135    }
2136
2137  return "";
2138}
2139
2140
2141/* Return a pointer register name as a string.  */
2142
2143static const char*
2144ptrreg_to_str (int regno)
2145{
2146  switch (regno)
2147    {
2148    case REG_X: return "X";
2149    case REG_Y: return "Y";
2150    case REG_Z: return "Z";
2151    default:
2152      output_operand_lossage ("address operand requires constraint for"
2153                              " X, Y, or Z register");
2154    }
2155  return NULL;
2156}
2157
2158/* Return the condition name as a string.
2159   Used in conditional jump constructing  */
2160
2161static const char*
2162cond_string (enum rtx_code code)
2163{
2164  switch (code)
2165    {
2166    case NE:
2167      return "ne";
2168    case EQ:
2169      return "eq";
2170    case GE:
2171      if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2172        return "pl";
2173      else
2174        return "ge";
2175    case LT:
2176      if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2177        return "mi";
2178      else
2179        return "lt";
2180    case GEU:
2181      return "sh";
2182    case LTU:
2183      return "lo";
2184    default:
2185      gcc_unreachable ();
2186    }
2187
2188  return "";
2189}
2190
2191
2192/* Implement `TARGET_PRINT_OPERAND_ADDRESS'.  */
2193/* Output ADDR to FILE as address.  */
2194
2195static void
2196avr_print_operand_address (FILE *file, rtx addr)
2197{
2198  switch (GET_CODE (addr))
2199    {
2200    case REG:
2201      fprintf (file, ptrreg_to_str (REGNO (addr)));
2202      break;
2203
2204    case PRE_DEC:
2205      fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
2206      break;
2207
2208    case POST_INC:
2209      fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
2210      break;
2211
2212    default:
2213      if (CONSTANT_ADDRESS_P (addr)
2214          && text_segment_operand (addr, VOIDmode))
2215        {
2216          rtx x = addr;
2217          if (GET_CODE (x) == CONST)
2218            x = XEXP (x, 0);
2219          if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
2220            {
2221              /* Assembler gs() will implant word address.  Make offset
2222                 a byte offset inside gs() for assembler.  This is
2223                 needed because the more logical (constant+gs(sym)) is not
2224                 accepted by gas.  For 128K and smaller devices this is ok.
2225                 For large devices it will create a trampoline to offset
2226                 from symbol which may not be what the user really wanted.  */
2227
2228              fprintf (file, "gs(");
2229              output_addr_const (file, XEXP (x,0));
2230              fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC ")",
2231                       2 * INTVAL (XEXP (x, 1)));
2232              if (AVR_3_BYTE_PC)
2233                if (warning (0, "pointer offset from symbol maybe incorrect"))
2234                  {
2235                    output_addr_const (stderr, addr);
2236                    fprintf(stderr,"\n");
2237                  }
2238            }
2239          else
2240            {
2241              fprintf (file, "gs(");
2242              output_addr_const (file, addr);
2243              fprintf (file, ")");
2244            }
2245        }
2246      else
2247        output_addr_const (file, addr);
2248    }
2249}
2250
2251
2252/* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'.  */
2253
2254static bool
2255avr_print_operand_punct_valid_p (unsigned char code)
2256{
2257  return code == '~' || code == '!';
2258}
2259
2260
2261/* Implement `TARGET_PRINT_OPERAND'.  */
2262/* Output X as assembler operand to file FILE.
2263   For a description of supported %-codes, see top of avr.md.  */
2264
2265static void
2266avr_print_operand (FILE *file, rtx x, int code)
2267{
2268  int abcd = 0, ef = 0, ij = 0;
2269
2270  if (code >= 'A' && code <= 'D')
2271    abcd = code - 'A';
2272  else if (code == 'E' || code == 'F')
2273    ef = code - 'E';
2274  else if (code == 'I' || code == 'J')
2275    ij = code - 'I';
2276
2277  if (code == '~')
2278    {
2279      if (!AVR_HAVE_JMP_CALL)
2280        fputc ('r', file);
2281    }
2282  else if (code == '!')
2283    {
2284      if (AVR_HAVE_EIJMP_EICALL)
2285        fputc ('e', file);
2286    }
2287  else if (code == 't'
2288           || code == 'T')
2289    {
2290      static int t_regno = -1;
2291      static int t_nbits = -1;
2292
2293      if (REG_P (x) && t_regno < 0 && code == 'T')
2294        {
2295          t_regno = REGNO (x);
2296          t_nbits = GET_MODE_BITSIZE (GET_MODE (x));
2297        }
2298      else if (CONST_INT_P (x) && t_regno >= 0
2299               && IN_RANGE (INTVAL (x), 0, t_nbits - 1))
2300        {
2301          int bpos = INTVAL (x);
2302
2303          fprintf (file, "%s", reg_names[t_regno + bpos / 8]);
2304          if (code == 'T')
2305            fprintf (file, ",%d", bpos % 8);
2306
2307          t_regno = -1;
2308        }
2309      else
2310        fatal_insn ("operands to %T/%t must be reg + const_int:", x);
2311    }
2312  else if (code == 'E' || code == 'F')
2313    {
2314      rtx op = XEXP(x, 0);
2315      fprintf (file, reg_names[REGNO (op) + ef]);
2316    }
2317  else if (code == 'I' || code == 'J')
2318    {
2319      rtx op = XEXP(XEXP(x, 0), 0);
2320      fprintf (file, reg_names[REGNO (op) + ij]);
2321    }
2322  else if (REG_P (x))
2323    {
2324      if (x == zero_reg_rtx)
2325        fprintf (file, "__zero_reg__");
2326      else if (code == 'r' && REGNO (x) < 32)
2327        fprintf (file, "%d", (int) REGNO (x));
2328      else
2329        fprintf (file, reg_names[REGNO (x) + abcd]);
2330    }
2331  else if (CONST_INT_P (x))
2332    {
2333      HOST_WIDE_INT ival = INTVAL (x);
2334
2335      if ('i' != code)
2336        fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival + abcd);
2337      else if (low_io_address_operand (x, VOIDmode)
2338               || high_io_address_operand (x, VOIDmode))
2339        {
2340          if (AVR_HAVE_RAMPZ && ival == avr_addr.rampz)
2341            fprintf (file, "__RAMPZ__");
2342          else if (AVR_HAVE_RAMPY && ival == avr_addr.rampy)
2343            fprintf (file, "__RAMPY__");
2344          else if (AVR_HAVE_RAMPX && ival == avr_addr.rampx)
2345            fprintf (file, "__RAMPX__");
2346          else if (AVR_HAVE_RAMPD && ival == avr_addr.rampd)
2347            fprintf (file, "__RAMPD__");
2348          else if ((AVR_XMEGA || AVR_TINY) && ival == avr_addr.ccp)
2349            fprintf (file, "__CCP__");
2350          else if (ival == avr_addr.sreg)   fprintf (file, "__SREG__");
2351          else if (ival == avr_addr.sp_l)   fprintf (file, "__SP_L__");
2352          else if (ival == avr_addr.sp_h)   fprintf (file, "__SP_H__");
2353          else
2354            {
2355              fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2356                       ival - avr_arch->sfr_offset);
2357            }
2358        }
2359      else
2360        fatal_insn ("bad address, not an I/O address:", x);
2361    }
2362  else if (MEM_P (x))
2363    {
2364      rtx addr = XEXP (x, 0);
2365
2366      if (code == 'm')
2367        {
2368          if (!CONSTANT_P (addr))
2369            fatal_insn ("bad address, not a constant:", addr);
2370          /* Assembler template with m-code is data - not progmem section */
2371          if (text_segment_operand (addr, VOIDmode))
2372            if (warning (0, "accessing data memory with"
2373                         " program memory address"))
2374              {
2375                output_addr_const (stderr, addr);
2376                fprintf(stderr,"\n");
2377              }
2378          output_addr_const (file, addr);
2379        }
2380      else if (code == 'i')
2381        {
2382          avr_print_operand (file, addr, 'i');
2383        }
2384      else if (code == 'o')
2385        {
2386          if (GET_CODE (addr) != PLUS)
2387            fatal_insn ("bad address, not (reg+disp):", addr);
2388
2389          avr_print_operand (file, XEXP (addr, 1), 0);
2390        }
2391      else if (code == 'b')
2392        {
2393          if (GET_CODE (addr) != PLUS)
2394               fatal_insn ("bad address, not (reg+disp):", addr);
2395
2396          avr_print_operand_address (file, XEXP (addr, 0));
2397        }
2398      else if (code == 'p' || code == 'r')
2399        {
2400          if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
2401            fatal_insn ("bad address, not post_inc or pre_dec:", addr);
2402
2403          if (code == 'p')
2404            avr_print_operand_address (file, XEXP (addr, 0));  /* X, Y, Z */
2405          else
2406            avr_print_operand (file, XEXP (addr, 0), 0);  /* r26, r28, r30 */
2407        }
2408      else if (GET_CODE (addr) == PLUS)
2409        {
2410          avr_print_operand_address (file, XEXP (addr,0));
2411          if (REGNO (XEXP (addr, 0)) == REG_X)
2412            fatal_insn ("internal compiler error.  Bad address:"
2413                        ,addr);
2414          fputc ('+', file);
2415          avr_print_operand (file, XEXP (addr,1), code);
2416        }
2417      else
2418        avr_print_operand_address (file, addr);
2419    }
2420  else if (code == 'i')
2421    {
2422      if (GET_CODE (x) == SYMBOL_REF && (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_IO))
2423	avr_print_operand_address
2424	  (file, plus_constant (HImode, x, -avr_arch->sfr_offset));
2425      else
2426	fatal_insn ("bad address, not an I/O address:", x);
2427    }
2428  else if (code == 'x')
2429    {
2430      /* Constant progmem address - like used in jmp or call */
2431      if (0 == text_segment_operand (x, VOIDmode))
2432        if (warning (0, "accessing program memory"
2433                     " with data memory address"))
2434          {
2435            output_addr_const (stderr, x);
2436            fprintf(stderr,"\n");
2437          }
2438      /* Use normal symbol for direct address no linker trampoline needed */
2439      output_addr_const (file, x);
2440    }
2441  else if (CONST_FIXED_P (x))
2442    {
2443      HOST_WIDE_INT ival = INTVAL (avr_to_int_mode (x));
2444      if (code != 0)
2445        output_operand_lossage ("Unsupported code '%c' for fixed-point:",
2446                                code);
2447      fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival);
2448    }
2449  else if (GET_CODE (x) == CONST_DOUBLE)
2450    {
2451      long val;
2452      REAL_VALUE_TYPE rv;
2453      if (GET_MODE (x) != SFmode)
2454        fatal_insn ("internal compiler error.  Unknown mode:", x);
2455      REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
2456      REAL_VALUE_TO_TARGET_SINGLE (rv, val);
2457      fprintf (file, "0x%lx", val);
2458    }
2459  else if (GET_CODE (x) == CONST_STRING)
2460    fputs (XSTR (x, 0), file);
2461  else if (code == 'j')
2462    fputs (cond_string (GET_CODE (x)), file);
2463  else if (code == 'k')
2464    fputs (cond_string (reverse_condition (GET_CODE (x))), file);
2465  else
2466    avr_print_operand_address (file, x);
2467}
2468
2469
2470/* Worker function for `NOTICE_UPDATE_CC'.  */
2471/* Update the condition code in the INSN.  */
2472
2473void
2474avr_notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx_insn *insn)
2475{
2476  rtx set;
2477  enum attr_cc cc = get_attr_cc (insn);
2478
2479  switch (cc)
2480    {
2481    default:
2482      break;
2483
2484    case CC_PLUS:
2485    case CC_LDI:
2486      {
2487        rtx *op = recog_data.operand;
2488        int len_dummy, icc;
2489
2490        /* Extract insn's operands.  */
2491        extract_constrain_insn_cached (insn);
2492
2493        switch (cc)
2494          {
2495          default:
2496            gcc_unreachable();
2497
2498          case CC_PLUS:
2499            avr_out_plus (insn, op, &len_dummy, &icc);
2500            cc = (enum attr_cc) icc;
2501            break;
2502
2503          case CC_LDI:
2504
2505            cc = (op[1] == CONST0_RTX (GET_MODE (op[0]))
2506                  && reg_overlap_mentioned_p (op[0], zero_reg_rtx))
2507              /* Loading zero-reg with 0 uses CLR and thus clobbers cc0.  */
2508              ? CC_CLOBBER
2509              /* Any other "r,rL" combination does not alter cc0.  */
2510              : CC_NONE;
2511
2512            break;
2513          } /* inner switch */
2514
2515        break;
2516      }
2517    } /* outer swicth */
2518
2519  switch (cc)
2520    {
2521    default:
2522      /* Special values like CC_OUT_PLUS from above have been
2523         mapped to "standard" CC_* values so we never come here.  */
2524
2525      gcc_unreachable();
2526      break;
2527
2528    case CC_NONE:
2529      /* Insn does not affect CC at all.  */
2530      break;
2531
2532    case CC_SET_N:
2533      CC_STATUS_INIT;
2534      break;
2535
2536    case CC_SET_ZN:
2537      set = single_set (insn);
2538      CC_STATUS_INIT;
2539      if (set)
2540        {
2541          cc_status.flags |= CC_NO_OVERFLOW;
2542          cc_status.value1 = SET_DEST (set);
2543        }
2544      break;
2545
2546    case CC_SET_VZN:
2547      /* Insn like INC, DEC, NEG that set Z,N,V.  We currently don't make use
2548         of this combination, cf. also PR61055.  */
2549      CC_STATUS_INIT;
2550      break;
2551
2552    case CC_SET_CZN:
2553      /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2554         The V flag may or may not be known but that's ok because
2555         alter_cond will change tests to use EQ/NE.  */
2556      set = single_set (insn);
2557      CC_STATUS_INIT;
2558      if (set)
2559        {
2560          cc_status.value1 = SET_DEST (set);
2561          cc_status.flags |= CC_OVERFLOW_UNUSABLE;
2562        }
2563      break;
2564
2565    case CC_COMPARE:
2566      set = single_set (insn);
2567      CC_STATUS_INIT;
2568      if (set)
2569        cc_status.value1 = SET_SRC (set);
2570      break;
2571
2572    case CC_CLOBBER:
2573      /* Insn doesn't leave CC in a usable state.  */
2574      CC_STATUS_INIT;
2575      break;
2576    }
2577}
2578
2579/* Choose mode for jump insn:
2580   1 - relative jump in range -63 <= x <= 62 ;
2581   2 - relative jump in range -2046 <= x <= 2045 ;
2582   3 - absolute jump (only for ATmega[16]03).  */
2583
2584int
2585avr_jump_mode (rtx x, rtx_insn *insn)
2586{
2587  int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
2588                                            ? XEXP (x, 0) : x));
2589  int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
2590  int jump_distance = cur_addr - dest_addr;
2591
2592  if (-63 <= jump_distance && jump_distance <= 62)
2593    return 1;
2594  else if (-2046 <= jump_distance && jump_distance <= 2045)
2595    return 2;
2596  else if (AVR_HAVE_JMP_CALL)
2597    return 3;
2598
2599  return 2;
2600}
2601
2602/* Return an AVR condition jump commands.
2603   X is a comparison RTX.
2604   LEN is a number returned by avr_jump_mode function.
2605   If REVERSE nonzero then condition code in X must be reversed.  */
2606
2607const char*
2608ret_cond_branch (rtx x, int len, int reverse)
2609{
2610  RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
2611
2612  switch (cond)
2613    {
2614    case GT:
2615      if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2616	return (len == 1 ? ("breq .+2" CR_TAB
2617			    "brpl %0") :
2618		len == 2 ? ("breq .+4" CR_TAB
2619			    "brmi .+2" CR_TAB
2620			    "rjmp %0") :
2621		("breq .+6" CR_TAB
2622		 "brmi .+4" CR_TAB
2623		 "jmp %0"));
2624
2625      else
2626	return (len == 1 ? ("breq .+2" CR_TAB
2627			    "brge %0") :
2628		len == 2 ? ("breq .+4" CR_TAB
2629			    "brlt .+2" CR_TAB
2630			    "rjmp %0") :
2631		("breq .+6" CR_TAB
2632		 "brlt .+4" CR_TAB
2633		 "jmp %0"));
2634    case GTU:
2635      return (len == 1 ? ("breq .+2" CR_TAB
2636                          "brsh %0") :
2637              len == 2 ? ("breq .+4" CR_TAB
2638                          "brlo .+2" CR_TAB
2639                          "rjmp %0") :
2640              ("breq .+6" CR_TAB
2641               "brlo .+4" CR_TAB
2642               "jmp %0"));
2643    case LE:
2644      if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2645	return (len == 1 ? ("breq %0" CR_TAB
2646			    "brmi %0") :
2647		len == 2 ? ("breq .+2" CR_TAB
2648			    "brpl .+2" CR_TAB
2649			    "rjmp %0") :
2650		("breq .+2" CR_TAB
2651		 "brpl .+4" CR_TAB
2652		 "jmp %0"));
2653      else
2654	return (len == 1 ? ("breq %0" CR_TAB
2655			    "brlt %0") :
2656		len == 2 ? ("breq .+2" CR_TAB
2657			    "brge .+2" CR_TAB
2658			    "rjmp %0") :
2659		("breq .+2" CR_TAB
2660		 "brge .+4" CR_TAB
2661		 "jmp %0"));
2662    case LEU:
2663      return (len == 1 ? ("breq %0" CR_TAB
2664                          "brlo %0") :
2665              len == 2 ? ("breq .+2" CR_TAB
2666                          "brsh .+2" CR_TAB
2667			  "rjmp %0") :
2668              ("breq .+2" CR_TAB
2669               "brsh .+4" CR_TAB
2670	       "jmp %0"));
2671    default:
2672      if (reverse)
2673	{
2674	  switch (len)
2675	    {
2676	    case 1:
2677	      return "br%k1 %0";
2678	    case 2:
2679	      return ("br%j1 .+2" CR_TAB
2680		      "rjmp %0");
2681	    default:
2682	      return ("br%j1 .+4" CR_TAB
2683		      "jmp %0");
2684	    }
2685	}
2686      else
2687        {
2688          switch (len)
2689            {
2690            case 1:
2691              return "br%j1 %0";
2692            case 2:
2693              return ("br%k1 .+2" CR_TAB
2694                      "rjmp %0");
2695            default:
2696              return ("br%k1 .+4" CR_TAB
2697                      "jmp %0");
2698            }
2699        }
2700    }
2701  return "";
2702}
2703
2704
2705/* Worker function for `FINAL_PRESCAN_INSN'.  */
2706/* Output insn cost for next insn.  */
2707
2708void
2709avr_final_prescan_insn (rtx_insn *insn, rtx *operand ATTRIBUTE_UNUSED,
2710                        int num_operands ATTRIBUTE_UNUSED)
2711{
2712  if (avr_log.rtx_costs)
2713    {
2714      rtx set = single_set (insn);
2715
2716      if (set)
2717        fprintf (asm_out_file, "/* DEBUG: cost = %d.  */\n",
2718                 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
2719      else
2720        fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d.  */\n",
2721                 rtx_cost (PATTERN (insn), INSN, 0,
2722                           optimize_insn_for_speed_p()));
2723    }
2724}
2725
2726/* Return 0 if undefined, 1 if always true or always false.  */
2727
2728int
2729avr_simplify_comparison_p (machine_mode mode, RTX_CODE op, rtx x)
2730{
2731  unsigned int max = (mode == QImode ? 0xff :
2732                      mode == HImode ? 0xffff :
2733                      mode == PSImode ? 0xffffff :
2734                      mode == SImode ? 0xffffffff : 0);
2735  if (max && op && CONST_INT_P (x))
2736    {
2737      if (unsigned_condition (op) != op)
2738        max >>= 1;
2739
2740      if (max != (INTVAL (x) & max)
2741          && INTVAL (x) != 0xff)
2742        return 1;
2743    }
2744  return 0;
2745}
2746
2747
2748/* Worker function for `FUNCTION_ARG_REGNO_P'.  */
2749/* Returns nonzero if REGNO is the number of a hard
2750   register in which function arguments are sometimes passed.  */
2751
2752int
2753avr_function_arg_regno_p(int r)
2754{
2755  return (AVR_TINY ? r >= 20 && r <= 25 : r >= 8 && r <= 25);
2756}
2757
2758
2759/* Worker function for `INIT_CUMULATIVE_ARGS'.  */
2760/* Initializing the variable cum for the state at the beginning
2761   of the argument list.  */
2762
2763void
2764avr_init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
2765                          tree fndecl ATTRIBUTE_UNUSED)
2766{
2767  cum->nregs = AVR_TINY ? 6 : 18;
2768  cum->regno = FIRST_CUM_REG;
2769  if (!libname && stdarg_p (fntype))
2770    cum->nregs = 0;
2771
2772  /* Assume the calle may be tail called */
2773
2774  cfun->machine->sibcall_fails = 0;
2775}
2776
2777/* Returns the number of registers to allocate for a function argument.  */
2778
2779static int
2780avr_num_arg_regs (machine_mode mode, const_tree type)
2781{
2782  int size;
2783
2784  if (mode == BLKmode)
2785    size = int_size_in_bytes (type);
2786  else
2787    size = GET_MODE_SIZE (mode);
2788
2789  /* Align all function arguments to start in even-numbered registers.
2790     Odd-sized arguments leave holes above them.  */
2791
2792  return (size + 1) & ~1;
2793}
2794
2795
2796/* Implement `TARGET_FUNCTION_ARG'.  */
2797/* Controls whether a function argument is passed
2798   in a register, and which register.  */
2799
2800static rtx
2801avr_function_arg (cumulative_args_t cum_v, machine_mode mode,
2802                  const_tree type, bool named ATTRIBUTE_UNUSED)
2803{
2804  CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2805  int bytes = avr_num_arg_regs (mode, type);
2806
2807  if (cum->nregs && bytes <= cum->nregs)
2808    return gen_rtx_REG (mode, cum->regno - bytes);
2809
2810  return NULL_RTX;
2811}
2812
2813
2814/* Implement `TARGET_FUNCTION_ARG_ADVANCE'.  */
2815/* Update the summarizer variable CUM to advance past an argument
2816   in the argument list.  */
2817
2818static void
2819avr_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
2820                          const_tree type, bool named ATTRIBUTE_UNUSED)
2821{
2822  CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2823  int bytes = avr_num_arg_regs (mode, type);
2824
2825  cum->nregs -= bytes;
2826  cum->regno -= bytes;
2827
2828  /* A parameter is being passed in a call-saved register.  As the original
2829     contents of these regs has to be restored before leaving the function,
2830     a function must not pass arguments in call-saved regs in order to get
2831     tail-called.  */
2832
2833  if (cum->regno >= 8
2834      && cum->nregs >= 0
2835      && !call_used_regs[cum->regno])
2836    {
2837      /* FIXME: We ship info on failing tail-call in struct machine_function.
2838         This uses internals of calls.c:expand_call() and the way args_so_far
2839         is used.  targetm.function_ok_for_sibcall() needs to be extended to
2840         pass &args_so_far, too.  At present, CUMULATIVE_ARGS is target
2841         dependent so that such an extension is not wanted.  */
2842
2843      cfun->machine->sibcall_fails = 1;
2844    }
2845
2846  /* Test if all registers needed by the ABI are actually available.  If the
2847     user has fixed a GPR needed to pass an argument, an (implicit) function
2848     call will clobber that fixed register.  See PR45099 for an example.  */
2849
2850  if (cum->regno >= 8
2851      && cum->nregs >= 0)
2852    {
2853      int regno;
2854
2855      for (regno = cum->regno; regno < cum->regno + bytes; regno++)
2856        if (fixed_regs[regno])
2857          warning (0, "fixed register %s used to pass parameter to function",
2858                   reg_names[regno]);
2859    }
2860
2861  if (cum->nregs <= 0)
2862    {
2863      cum->nregs = 0;
2864      cum->regno = FIRST_CUM_REG;
2865    }
2866}
2867
2868/* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2869/* Decide whether we can make a sibling call to a function.  DECL is the
2870   declaration of the function being targeted by the call and EXP is the
2871   CALL_EXPR representing the call.  */
2872
2873static bool
2874avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
2875{
2876  tree fntype_callee;
2877
2878  /* Tail-calling must fail if callee-saved regs are used to pass
2879     function args.  We must not tail-call when `epilogue_restores'
2880     is used.  Unfortunately, we cannot tell at this point if that
2881     actually will happen or not, and we cannot step back from
2882     tail-calling.  Thus, we inhibit tail-calling with -mcall-prologues.  */
2883
2884  if (cfun->machine->sibcall_fails
2885      || TARGET_CALL_PROLOGUES)
2886    {
2887      return false;
2888    }
2889
2890  fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
2891
2892  if (decl_callee)
2893    {
2894      decl_callee = TREE_TYPE (decl_callee);
2895    }
2896  else
2897    {
2898      decl_callee = fntype_callee;
2899
2900      while (FUNCTION_TYPE != TREE_CODE (decl_callee)
2901             && METHOD_TYPE != TREE_CODE (decl_callee))
2902        {
2903          decl_callee = TREE_TYPE (decl_callee);
2904        }
2905    }
2906
2907  /* Ensure that caller and callee have compatible epilogues */
2908
2909  if (cfun->machine->is_interrupt
2910      || cfun->machine->is_signal
2911      || cfun->machine->is_naked
2912      || avr_naked_function_p (decl_callee)
2913      /* FIXME: For OS_task and OS_main, this might be over-conservative.  */
2914      || (avr_OS_task_function_p (decl_callee)
2915          != cfun->machine->is_OS_task)
2916      || (avr_OS_main_function_p (decl_callee)
2917          != cfun->machine->is_OS_main))
2918    {
2919      return false;
2920    }
2921
2922  return true;
2923}
2924
2925/***********************************************************************
2926  Functions for outputting various mov's for a various modes
2927************************************************************************/
2928
2929/* Return true if a value of mode MODE is read from flash by
2930   __load_* function from libgcc.  */
2931
2932bool
2933avr_load_libgcc_p (rtx op)
2934{
2935  machine_mode mode = GET_MODE (op);
2936  int n_bytes = GET_MODE_SIZE (mode);
2937
2938  return (n_bytes > 2
2939          && !AVR_HAVE_LPMX
2940          && avr_mem_flash_p (op));
2941}
2942
2943/* Return true if a value of mode MODE is read by __xload_* function.  */
2944
2945bool
2946avr_xload_libgcc_p (machine_mode mode)
2947{
2948  int n_bytes = GET_MODE_SIZE (mode);
2949
2950  return (n_bytes > 1
2951          || avr_n_flash > 1);
2952}
2953
2954
2955/* Fixme: This is a hack because secondary reloads don't works as expected.
2956
2957   Find an unused d-register to be used as scratch in INSN.
2958   EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
2959   is a register, skip all possible return values that overlap EXCLUDE.
2960   The policy for the returned register is similar to that of
2961   `reg_unused_after', i.e. the returned register may overlap the SET_DEST
2962   of INSN.
2963
2964   Return a QImode d-register or NULL_RTX if nothing found.  */
2965
2966static rtx
2967avr_find_unused_d_reg (rtx_insn *insn, rtx exclude)
2968{
2969  int regno;
2970  bool isr_p = (avr_interrupt_function_p (current_function_decl)
2971                || avr_signal_function_p (current_function_decl));
2972
2973  for (regno = 16; regno < 32; regno++)
2974    {
2975      rtx reg = all_regs_rtx[regno];
2976
2977      if ((exclude
2978           && reg_overlap_mentioned_p (exclude, reg))
2979          || fixed_regs[regno])
2980        {
2981          continue;
2982        }
2983
2984      /* Try non-live register */
2985
2986      if (!df_regs_ever_live_p (regno)
2987          && (TREE_THIS_VOLATILE (current_function_decl)
2988              || cfun->machine->is_OS_task
2989              || cfun->machine->is_OS_main
2990              || (!isr_p && call_used_regs[regno])))
2991        {
2992          return reg;
2993        }
2994
2995      /* Any live register can be used if it is unused after.
2996         Prologue/epilogue will care for it as needed.  */
2997
2998      if (df_regs_ever_live_p (regno)
2999          && reg_unused_after (insn, reg))
3000        {
3001          return reg;
3002        }
3003    }
3004
3005  return NULL_RTX;
3006}
3007
3008
3009/* Helper function for the next function in the case where only restricted
3010   version of LPM instruction is available.  */
3011
3012static const char*
3013avr_out_lpm_no_lpmx (rtx_insn *insn, rtx *xop, int *plen)
3014{
3015  rtx dest = xop[0];
3016  rtx addr = xop[1];
3017  int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
3018  int regno_dest;
3019
3020  regno_dest = REGNO (dest);
3021
3022  /* The implicit target register of LPM.  */
3023  xop[3] = lpm_reg_rtx;
3024
3025  switch (GET_CODE (addr))
3026    {
3027    default:
3028      gcc_unreachable();
3029
3030    case REG:
3031
3032      gcc_assert (REG_Z == REGNO (addr));
3033
3034      switch (n_bytes)
3035        {
3036        default:
3037          gcc_unreachable();
3038
3039        case 1:
3040          avr_asm_len ("%4lpm", xop, plen, 1);
3041
3042          if (regno_dest != LPM_REGNO)
3043            avr_asm_len ("mov %0,%3", xop, plen, 1);
3044
3045          return "";
3046
3047        case 2:
3048          if (REGNO (dest) == REG_Z)
3049            return avr_asm_len ("%4lpm"      CR_TAB
3050                                "push %3"    CR_TAB
3051                                "adiw %2,1"  CR_TAB
3052                                "%4lpm"      CR_TAB
3053                                "mov %B0,%3" CR_TAB
3054                                "pop %A0", xop, plen, 6);
3055
3056          avr_asm_len ("%4lpm"      CR_TAB
3057                       "mov %A0,%3" CR_TAB
3058                       "adiw %2,1"  CR_TAB
3059                       "%4lpm"      CR_TAB
3060                       "mov %B0,%3", xop, plen, 5);
3061
3062          if (!reg_unused_after (insn, addr))
3063            avr_asm_len ("sbiw %2,1", xop, plen, 1);
3064
3065          break; /* 2 */
3066        }
3067
3068      break; /* REG */
3069
3070    case POST_INC:
3071
3072      gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
3073                  && n_bytes <= 4);
3074
3075      if (regno_dest == LPM_REGNO)
3076        avr_asm_len ("%4lpm"      CR_TAB
3077                     "adiw %2,1", xop, plen, 2);
3078      else
3079        avr_asm_len ("%4lpm"      CR_TAB
3080                     "mov %A0,%3" CR_TAB
3081                     "adiw %2,1", xop, plen, 3);
3082
3083      if (n_bytes >= 2)
3084        avr_asm_len ("%4lpm"      CR_TAB
3085                     "mov %B0,%3" CR_TAB
3086                     "adiw %2,1", xop, plen, 3);
3087
3088      if (n_bytes >= 3)
3089        avr_asm_len ("%4lpm"      CR_TAB
3090                     "mov %C0,%3" CR_TAB
3091                     "adiw %2,1", xop, plen, 3);
3092
3093      if (n_bytes >= 4)
3094        avr_asm_len ("%4lpm"      CR_TAB
3095                     "mov %D0,%3" CR_TAB
3096                     "adiw %2,1", xop, plen, 3);
3097
3098      break; /* POST_INC */
3099
3100    } /* switch CODE (addr) */
3101
3102  return "";
3103}
3104
3105
3106/* If PLEN == NULL: Ouput instructions to load a value from a memory location
3107   OP[1] in AS1 to register OP[0].
3108   If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
3109   Return "".  */
3110
3111const char*
3112avr_out_lpm (rtx_insn *insn, rtx *op, int *plen)
3113{
3114  rtx xop[7];
3115  rtx dest = op[0];
3116  rtx src = SET_SRC (single_set (insn));
3117  rtx addr;
3118  int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
3119  int segment;
3120  RTX_CODE code;
3121  addr_space_t as = MEM_ADDR_SPACE (src);
3122
3123  if (plen)
3124    *plen = 0;
3125
3126  if (MEM_P (dest))
3127    {
3128      warning (0, "writing to address space %qs not supported",
3129               avr_addrspace[MEM_ADDR_SPACE (dest)].name);
3130
3131      return "";
3132    }
3133
3134  addr = XEXP (src, 0);
3135  code = GET_CODE (addr);
3136
3137  gcc_assert (REG_P (dest));
3138  gcc_assert (REG == code || POST_INC == code);
3139
3140  xop[0] = dest;
3141  xop[1] = addr;
3142  xop[2] = lpm_addr_reg_rtx;
3143  xop[4] = xstring_empty;
3144  xop[5] = tmp_reg_rtx;
3145  xop[6] = XEXP (rampz_rtx, 0);
3146
3147  segment = avr_addrspace[as].segment;
3148
3149  /* Set RAMPZ as needed.  */
3150
3151  if (segment)
3152    {
3153      xop[4] = GEN_INT (segment);
3154      xop[3] = avr_find_unused_d_reg (insn, lpm_addr_reg_rtx);
3155
3156      if (xop[3] != NULL_RTX)
3157        {
3158          avr_asm_len ("ldi %3,%4" CR_TAB
3159                       "out %i6,%3", xop, plen, 2);
3160        }
3161      else if (segment == 1)
3162        {
3163          avr_asm_len ("clr %5" CR_TAB
3164                       "inc %5" CR_TAB
3165                       "out %i6,%5", xop, plen, 3);
3166        }
3167      else
3168        {
3169          avr_asm_len ("mov %5,%2"         CR_TAB
3170                       "ldi %2,%4"         CR_TAB
3171                       "out %i6,%2"  CR_TAB
3172                       "mov %2,%5", xop, plen, 4);
3173        }
3174
3175      xop[4] = xstring_e;
3176
3177      if (!AVR_HAVE_ELPMX)
3178        return avr_out_lpm_no_lpmx (insn, xop, plen);
3179    }
3180  else if (!AVR_HAVE_LPMX)
3181    {
3182      return avr_out_lpm_no_lpmx (insn, xop, plen);
3183    }
3184
3185  /* We have [E]LPMX: Output reading from Flash the comfortable way.  */
3186
3187  switch (GET_CODE (addr))
3188    {
3189    default:
3190      gcc_unreachable();
3191
3192    case REG:
3193
3194      gcc_assert (REG_Z == REGNO (addr));
3195
3196      switch (n_bytes)
3197        {
3198        default:
3199          gcc_unreachable();
3200
3201        case 1:
3202          return avr_asm_len ("%4lpm %0,%a2", xop, plen, 1);
3203
3204        case 2:
3205          if (REGNO (dest) == REG_Z)
3206            return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3207                                "%4lpm %B0,%a2" CR_TAB
3208                                "mov %A0,%5", xop, plen, 3);
3209          else
3210            {
3211              avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3212                           "%4lpm %B0,%a2", xop, plen, 2);
3213
3214              if (!reg_unused_after (insn, addr))
3215                avr_asm_len ("sbiw %2,1", xop, plen, 1);
3216            }
3217
3218          break; /* 2 */
3219
3220        case 3:
3221
3222          avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3223                       "%4lpm %B0,%a2+" CR_TAB
3224                       "%4lpm %C0,%a2", xop, plen, 3);
3225
3226          if (!reg_unused_after (insn, addr))
3227            avr_asm_len ("sbiw %2,2", xop, plen, 1);
3228
3229          break; /* 3 */
3230
3231        case 4:
3232
3233          avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3234                       "%4lpm %B0,%a2+", xop, plen, 2);
3235
3236          if (REGNO (dest) == REG_Z - 2)
3237            return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3238                                "%4lpm %C0,%a2"          CR_TAB
3239                                "mov %D0,%5", xop, plen, 3);
3240          else
3241            {
3242              avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
3243                           "%4lpm %D0,%a2", xop, plen, 2);
3244
3245              if (!reg_unused_after (insn, addr))
3246                avr_asm_len ("sbiw %2,3", xop, plen, 1);
3247            }
3248
3249          break; /* 4 */
3250        } /* n_bytes */
3251
3252      break; /* REG */
3253
3254    case POST_INC:
3255
3256      gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
3257                  && n_bytes <= 4);
3258
3259      avr_asm_len                    ("%4lpm %A0,%a2+", xop, plen, 1);
3260      if (n_bytes >= 2)  avr_asm_len ("%4lpm %B0,%a2+", xop, plen, 1);
3261      if (n_bytes >= 3)  avr_asm_len ("%4lpm %C0,%a2+", xop, plen, 1);
3262      if (n_bytes >= 4)  avr_asm_len ("%4lpm %D0,%a2+", xop, plen, 1);
3263
3264      break; /* POST_INC */
3265
3266    } /* switch CODE (addr) */
3267
3268  if (xop[4] == xstring_e && AVR_HAVE_RAMPD)
3269    {
3270      /* Reset RAMPZ to 0 so that EBI devices don't read garbage from RAM.  */
3271
3272      xop[0] = zero_reg_rtx;
3273      avr_asm_len ("out %i6,%0", xop, plen, 1);
3274    }
3275
3276  return "";
3277}
3278
3279
3280/* Worker function for xload_8 insn.  */
3281
3282const char*
3283avr_out_xload (rtx_insn *insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
3284{
3285  rtx xop[4];
3286
3287  xop[0] = op[0];
3288  xop[1] = op[1];
3289  xop[2] = lpm_addr_reg_rtx;
3290  xop[3] = AVR_HAVE_LPMX ? op[0] : lpm_reg_rtx;
3291
3292  avr_asm_len (AVR_HAVE_LPMX ? "lpm %3,%a2" : "lpm", xop, plen, -1);
3293
3294  avr_asm_len ("sbrc %1,7" CR_TAB
3295               "ld %3,%a2", xop, plen, 2);
3296
3297  if (REGNO (xop[0]) != REGNO (xop[3]))
3298    avr_asm_len ("mov %0,%3", xop, plen, 1);
3299
3300  return "";
3301}
3302
3303
3304const char*
3305output_movqi (rtx_insn *insn, rtx operands[], int *plen)
3306{
3307  rtx dest = operands[0];
3308  rtx src = operands[1];
3309
3310  if (avr_mem_flash_p (src)
3311      || avr_mem_flash_p (dest))
3312    {
3313      return avr_out_lpm (insn, operands, plen);
3314    }
3315
3316  gcc_assert (1 == GET_MODE_SIZE (GET_MODE (dest)));
3317
3318  if (REG_P (dest))
3319    {
3320      if (REG_P (src)) /* mov r,r */
3321        {
3322          if (test_hard_reg_class (STACK_REG, dest))
3323            return avr_asm_len ("out %0,%1", operands, plen, -1);
3324          else if (test_hard_reg_class (STACK_REG, src))
3325            return avr_asm_len ("in %0,%1", operands, plen, -1);
3326
3327          return avr_asm_len ("mov %0,%1", operands, plen, -1);
3328        }
3329      else if (CONSTANT_P (src))
3330        {
3331          output_reload_in_const (operands, NULL_RTX, plen, false);
3332          return "";
3333        }
3334      else if (MEM_P (src))
3335        return out_movqi_r_mr (insn, operands, plen); /* mov r,m */
3336    }
3337  else if (MEM_P (dest))
3338    {
3339      rtx xop[2];
3340
3341      xop[0] = dest;
3342      xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3343
3344      return out_movqi_mr_r (insn, xop, plen);
3345    }
3346
3347  return "";
3348}
3349
3350
3351const char *
3352output_movhi (rtx_insn *insn, rtx xop[], int *plen)
3353{
3354  rtx dest = xop[0];
3355  rtx src = xop[1];
3356
3357  gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 2);
3358
3359  if (avr_mem_flash_p (src)
3360      || avr_mem_flash_p (dest))
3361    {
3362      return avr_out_lpm (insn, xop, plen);
3363    }
3364
3365  gcc_assert (2 == GET_MODE_SIZE (GET_MODE (dest)));
3366
3367  if (REG_P (dest))
3368    {
3369      if (REG_P (src)) /* mov r,r */
3370        {
3371          if (test_hard_reg_class (STACK_REG, dest))
3372            {
3373              if (AVR_HAVE_8BIT_SP)
3374                return avr_asm_len ("out __SP_L__,%A1", xop, plen, -1);
3375
3376              if (AVR_XMEGA)
3377                return avr_asm_len ("out __SP_L__,%A1" CR_TAB
3378                                    "out __SP_H__,%B1", xop, plen, -2);
3379
3380              /* Use simple load of SP if no interrupts are  used.  */
3381
3382              return TARGET_NO_INTERRUPTS
3383                ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
3384                               "out __SP_L__,%A1", xop, plen, -2)
3385                : avr_asm_len ("in __tmp_reg__,__SREG__"  CR_TAB
3386                               "cli"                      CR_TAB
3387                               "out __SP_H__,%B1"         CR_TAB
3388                               "out __SREG__,__tmp_reg__" CR_TAB
3389                               "out __SP_L__,%A1", xop, plen, -5);
3390            }
3391          else if (test_hard_reg_class (STACK_REG, src))
3392            {
3393              return !AVR_HAVE_SPH
3394                ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
3395                               "clr %B0", xop, plen, -2)
3396
3397                : avr_asm_len ("in %A0,__SP_L__" CR_TAB
3398                               "in %B0,__SP_H__", xop, plen, -2);
3399            }
3400
3401          return AVR_HAVE_MOVW
3402            ? avr_asm_len ("movw %0,%1", xop, plen, -1)
3403
3404            : avr_asm_len ("mov %A0,%A1" CR_TAB
3405                           "mov %B0,%B1", xop, plen, -2);
3406        } /* REG_P (src) */
3407      else if (CONSTANT_P (src))
3408        {
3409          return output_reload_inhi (xop, NULL, plen);
3410        }
3411      else if (MEM_P (src))
3412        {
3413          return out_movhi_r_mr (insn, xop, plen); /* mov r,m */
3414        }
3415    }
3416  else if (MEM_P (dest))
3417    {
3418      rtx xop[2];
3419
3420      xop[0] = dest;
3421      xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3422
3423      return out_movhi_mr_r (insn, xop, plen);
3424    }
3425
3426  fatal_insn ("invalid insn:", insn);
3427
3428  return "";
3429}
3430
3431
3432/* Same as out_movqi_r_mr, but TINY does not have ADIW or SBIW */
3433
3434static const char*
3435avr_out_movqi_r_mr_reg_disp_tiny (rtx_insn *insn, rtx op[], int *plen)
3436{
3437  rtx dest = op[0];
3438  rtx src = op[1];
3439  rtx x = XEXP (src, 0);
3440
3441  avr_asm_len (TINY_ADIW (%I1, %J1, %o1) CR_TAB
3442               "ld %0,%b1" , op, plen, -3);
3443
3444  if (!reg_overlap_mentioned_p (dest, XEXP (x,0))
3445      && !reg_unused_after (insn, XEXP (x,0)))
3446    avr_asm_len (TINY_SBIW (%I1, %J1, %o1), op, plen, 2);
3447
3448  return "";
3449}
3450
3451static const char*
3452out_movqi_r_mr (rtx_insn *insn, rtx op[], int *plen)
3453{
3454  rtx dest = op[0];
3455  rtx src = op[1];
3456  rtx x = XEXP (src, 0);
3457
3458  if (CONSTANT_ADDRESS_P (x))
3459    {
3460      int n_words = AVR_TINY ? 1 : 2;
3461      return optimize > 0 && io_address_operand (x, QImode)
3462        ? avr_asm_len ("in %0,%i1", op, plen, -1)
3463        : avr_asm_len ("lds %0,%m1", op, plen, -n_words);
3464    }
3465
3466  if (GET_CODE (x) == PLUS
3467           && REG_P (XEXP (x, 0))
3468           && CONST_INT_P (XEXP (x, 1)))
3469    {
3470      /* memory access by reg+disp */
3471
3472      int disp = INTVAL (XEXP (x, 1));
3473
3474      if (AVR_TINY)
3475        return avr_out_movqi_r_mr_reg_disp_tiny (insn, op, plen);
3476
3477      if (disp - GET_MODE_SIZE (GET_MODE (src)) >= 63)
3478        {
3479          if (REGNO (XEXP (x, 0)) != REG_Y)
3480            fatal_insn ("incorrect insn:",insn);
3481
3482          if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3483            return avr_asm_len ("adiw r28,%o1-63" CR_TAB
3484                                "ldd %0,Y+63"     CR_TAB
3485                                "sbiw r28,%o1-63", op, plen, -3);
3486
3487          return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3488                              "sbci r29,hi8(-%o1)" CR_TAB
3489                              "ld %0,Y"            CR_TAB
3490                              "subi r28,lo8(%o1)"  CR_TAB
3491                              "sbci r29,hi8(%o1)", op, plen, -5);
3492        }
3493      else if (REGNO (XEXP (x, 0)) == REG_X)
3494        {
3495          /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
3496             it but I have this situation with extremal optimizing options.  */
3497
3498          avr_asm_len ("adiw r26,%o1" CR_TAB
3499                       "ld %0,X", op, plen, -2);
3500
3501          if (!reg_overlap_mentioned_p (dest, XEXP (x,0))
3502              && !reg_unused_after (insn, XEXP (x,0)))
3503            {
3504              avr_asm_len ("sbiw r26,%o1", op, plen, 1);
3505            }
3506
3507          return "";
3508        }
3509
3510      return avr_asm_len ("ldd %0,%1", op, plen, -1);
3511    }
3512
3513  return avr_asm_len ("ld %0,%1", op, plen, -1);
3514}
3515
3516
3517/* Same as movhi_r_mr, but TINY does not have ADIW, SBIW and LDD */
3518
3519static const char*
3520avr_out_movhi_r_mr_reg_no_disp_tiny (rtx op[], int *plen)
3521{
3522  rtx dest = op[0];
3523  rtx src = op[1];
3524  rtx base = XEXP (src, 0);
3525
3526  int reg_dest = true_regnum (dest);
3527  int reg_base = true_regnum (base);
3528
3529  if (reg_dest == reg_base)         /* R = (R) */
3530    return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3531			"ld %B0,%1"          CR_TAB
3532			"mov %A0,__tmp_reg__", op, plen, -3);
3533
3534  return avr_asm_len ("ld %A0,%1"             CR_TAB
3535                      TINY_ADIW (%E1, %F1, 1) CR_TAB
3536                      "ld %B0,%1"             CR_TAB
3537                      TINY_SBIW (%E1, %F1, 1), op, plen, -6);
3538}
3539
3540
3541/* Same as movhi_r_mr, but TINY does not have ADIW, SBIW and LDD */
3542
3543static const char*
3544avr_out_movhi_r_mr_reg_disp_tiny (rtx op[], int *plen)
3545{
3546  rtx dest = op[0];
3547  rtx src = op[1];
3548  rtx base = XEXP (src, 0);
3549
3550  int reg_dest = true_regnum (dest);
3551  int reg_base = true_regnum (XEXP (base, 0));
3552
3553  if (reg_base == reg_dest)
3554    {
3555      return avr_asm_len (TINY_ADIW (%I1, %J1, %o1) CR_TAB
3556                          "ld __tmp_reg__,%b1+"     CR_TAB
3557                          "ld %B0,%b1"              CR_TAB
3558                          "mov %A0,__tmp_reg__", op, plen, -5);
3559    }
3560  else
3561    {
3562      return avr_asm_len (TINY_ADIW (%I1, %J1, %o1) CR_TAB
3563                          "ld %A0,%b1+"             CR_TAB
3564                          "ld %B0,%b1"              CR_TAB
3565                          TINY_SBIW (%I1, %J1, %o1+1), op, plen, -6);
3566    }
3567}
3568
3569
3570/* Same as movhi_r_mr, but TINY does not have ADIW, SBIW and LDD */
3571
3572static const char*
3573avr_out_movhi_r_mr_pre_dec_tiny (rtx_insn *insn, rtx op[], int *plen)
3574{
3575  int mem_volatile_p = 0;
3576  rtx dest = op[0];
3577  rtx src = op[1];
3578  rtx base = XEXP (src, 0);
3579
3580  /* "volatile" forces reading low byte first, even if less efficient,
3581     for correct operation with 16-bit I/O registers.  */
3582  mem_volatile_p = MEM_VOLATILE_P (src);
3583
3584  if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3585    fatal_insn ("incorrect insn:", insn);
3586
3587  if (!mem_volatile_p)
3588    return avr_asm_len ("ld %B0,%1" CR_TAB
3589                        "ld %A0,%1", op, plen, -2);
3590
3591  return avr_asm_len (TINY_SBIW (%I1, %J1, 2)  CR_TAB
3592                      "ld %A0,%p1+"            CR_TAB
3593                      "ld %B0,%p1"             CR_TAB
3594                      TINY_SBIW (%I1, %J1, 1), op, plen, -6);
3595}
3596
3597
3598static const char*
3599out_movhi_r_mr (rtx_insn *insn, rtx op[], int *plen)
3600{
3601  rtx dest = op[0];
3602  rtx src = op[1];
3603  rtx base = XEXP (src, 0);
3604  int reg_dest = true_regnum (dest);
3605  int reg_base = true_regnum (base);
3606  /* "volatile" forces reading low byte first, even if less efficient,
3607     for correct operation with 16-bit I/O registers.  */
3608  int mem_volatile_p = MEM_VOLATILE_P (src);
3609
3610  if (reg_base > 0)
3611    {
3612      if (AVR_TINY)
3613        return avr_out_movhi_r_mr_reg_no_disp_tiny (op, plen);
3614
3615      if (reg_dest == reg_base)         /* R = (R) */
3616        return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3617                            "ld %B0,%1"          CR_TAB
3618                            "mov %A0,__tmp_reg__", op, plen, -3);
3619
3620      if (reg_base != REG_X)
3621        return avr_asm_len ("ld %A0,%1" CR_TAB
3622                            "ldd %B0,%1+1", op, plen, -2);
3623
3624      avr_asm_len ("ld %A0,X+" CR_TAB
3625                   "ld %B0,X", op, plen, -2);
3626
3627      if (!reg_unused_after (insn, base))
3628        avr_asm_len ("sbiw r26,1", op, plen, 1);
3629
3630      return "";
3631    }
3632  else if (GET_CODE (base) == PLUS) /* (R + i) */
3633    {
3634      int disp = INTVAL (XEXP (base, 1));
3635      int reg_base = true_regnum (XEXP (base, 0));
3636
3637      if (AVR_TINY)
3638        return avr_out_movhi_r_mr_reg_disp_tiny (op, plen);
3639
3640      if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3641        {
3642          if (REGNO (XEXP (base, 0)) != REG_Y)
3643            fatal_insn ("incorrect insn:",insn);
3644
3645          return disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))
3646            ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3647                           "ldd %A0,Y+62"    CR_TAB
3648                           "ldd %B0,Y+63"    CR_TAB
3649                           "sbiw r28,%o1-62", op, plen, -4)
3650
3651              : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3652                           "sbci r29,hi8(-%o1)" CR_TAB
3653                           "ld %A0,Y"           CR_TAB
3654                           "ldd %B0,Y+1"        CR_TAB
3655                           "subi r28,lo8(%o1)"  CR_TAB
3656                           "sbci r29,hi8(%o1)", op, plen, -6);
3657        }
3658
3659      /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3660         it but I have this situation with extremal
3661         optimization options.  */
3662
3663      if (reg_base == REG_X)
3664        return reg_base == reg_dest
3665          ? avr_asm_len ("adiw r26,%o1"      CR_TAB
3666                         "ld __tmp_reg__,X+" CR_TAB
3667                         "ld %B0,X"          CR_TAB
3668                         "mov %A0,__tmp_reg__", op, plen, -4)
3669
3670          : avr_asm_len ("adiw r26,%o1" CR_TAB
3671                         "ld %A0,X+"    CR_TAB
3672                         "ld %B0,X"     CR_TAB
3673                         "sbiw r26,%o1+1", op, plen, -4);
3674
3675      return reg_base == reg_dest
3676        ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3677                       "ldd %B0,%B1"         CR_TAB
3678                       "mov %A0,__tmp_reg__", op, plen, -3)
3679
3680        : avr_asm_len ("ldd %A0,%A1" CR_TAB
3681                       "ldd %B0,%B1", op, plen, -2);
3682    }
3683  else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3684    {
3685      if (AVR_TINY)
3686	return avr_out_movhi_r_mr_pre_dec_tiny (insn, op, plen);
3687
3688      if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3689        fatal_insn ("incorrect insn:", insn);
3690
3691      if (!mem_volatile_p)
3692        return avr_asm_len ("ld %B0,%1" CR_TAB
3693                            "ld %A0,%1", op, plen, -2);
3694
3695      return REGNO (XEXP (base, 0)) == REG_X
3696        ? avr_asm_len ("sbiw r26,2"  CR_TAB
3697                       "ld %A0,X+"   CR_TAB
3698                       "ld %B0,X"    CR_TAB
3699                       "sbiw r26,1", op, plen, -4)
3700
3701        : avr_asm_len ("sbiw %r1,2"  CR_TAB
3702                       "ld %A0,%p1"  CR_TAB
3703                       "ldd %B0,%p1+1", op, plen, -3);
3704    }
3705  else if (GET_CODE (base) == POST_INC) /* (R++) */
3706    {
3707      if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3708        fatal_insn ("incorrect insn:", insn);
3709
3710      return avr_asm_len ("ld %A0,%1"  CR_TAB
3711                          "ld %B0,%1", op, plen, -2);
3712    }
3713  else if (CONSTANT_ADDRESS_P (base))
3714    {
3715      int n_words = AVR_TINY ? 2 : 4;
3716      return optimize > 0 && io_address_operand (base, HImode)
3717        ? avr_asm_len ("in %A0,%i1" CR_TAB
3718                       "in %B0,%i1+1", op, plen, -2)
3719
3720        : avr_asm_len ("lds %A0,%m1" CR_TAB
3721                       "lds %B0,%m1+1", op, plen, -n_words);
3722    }
3723
3724  fatal_insn ("unknown move insn:",insn);
3725  return "";
3726}
3727
3728static const char*
3729avr_out_movsi_r_mr_reg_no_disp_tiny (rtx_insn *insn, rtx op[], int *l)
3730{
3731  rtx dest = op[0];
3732  rtx src = op[1];
3733  rtx base = XEXP (src, 0);
3734  int reg_dest = true_regnum (dest);
3735  int reg_base = true_regnum (base);
3736
3737  if (reg_dest == reg_base)
3738    {
3739      /* "ld r26,-X" is undefined */
3740      return *l = 9, (TINY_ADIW (%E1, %F1, 3) CR_TAB
3741		      "ld %D0,%1"             CR_TAB
3742		      "ld %C0,-%1"            CR_TAB
3743		      "ld __tmp_reg__,-%1"    CR_TAB
3744		      TINY_SBIW (%E1, %F1, 1) CR_TAB
3745		      "ld %A0,%1"             CR_TAB
3746		      "mov %B0,__tmp_reg__");
3747    }
3748  else if (reg_dest == reg_base - 2)
3749    {
3750      return *l = 5, ("ld %A0,%1+"            CR_TAB
3751		      "ld %B0,%1+"            CR_TAB
3752		      "ld __tmp_reg__,%1+"    CR_TAB
3753		      "ld %D0,%1"             CR_TAB
3754		      "mov %C0,__tmp_reg__");
3755    }
3756  else if (reg_unused_after (insn, base))
3757    {
3758      return *l = 4, ("ld %A0,%1+"    CR_TAB
3759		      "ld %B0,%1+"    CR_TAB
3760		      "ld %C0,%1+"    CR_TAB
3761		      "ld %D0,%1");
3762    }
3763  else
3764    {
3765      return *l = 6, ("ld %A0,%1+"    CR_TAB
3766		      "ld %B0,%1+"    CR_TAB
3767		      "ld %C0,%1+"    CR_TAB
3768		      "ld %D0,%1"     CR_TAB
3769		      TINY_SBIW (%E1, %F1, 3));
3770    }
3771}
3772
3773
3774static const char*
3775avr_out_movsi_r_mr_reg_disp_tiny (rtx_insn *insn, rtx op[], int *l)
3776{
3777  rtx dest = op[0];
3778  rtx src = op[1];
3779  rtx base = XEXP (src, 0);
3780  int reg_dest = true_regnum (dest);
3781  int reg_base = true_regnum (XEXP (base, 0));
3782
3783  if (reg_dest == reg_base)
3784    {
3785      /* "ld r26,-X" is undefined */
3786      return *l = 9, (TINY_ADIW (%I1, %J1, %o1+3) CR_TAB
3787                      "ld %D0,%b1"                CR_TAB
3788                      "ld %C0,-%b1"               CR_TAB
3789                      "ld __tmp_reg__,-%b1"       CR_TAB
3790                      TINY_SBIW (%I1, %J1, 1)     CR_TAB
3791                      "ld %A0,%b1"                CR_TAB
3792                      "mov %B0,__tmp_reg__");
3793    }
3794  else if (reg_dest == reg_base - 2)
3795    {
3796      return *l = 7, (TINY_ADIW (%I1, %J1, %o1) CR_TAB
3797                      "ld %A0,%b1+"             CR_TAB
3798                      "ld %B0,%b1+"             CR_TAB
3799                      "ld __tmp_reg__,%b1+"     CR_TAB
3800                      "ld %D0,%b1"              CR_TAB
3801                      "mov %C0,__tmp_reg__");
3802    }
3803  else if (reg_unused_after (insn, XEXP (base, 0)))
3804    {
3805      return *l = 6, (TINY_ADIW (%I1, %J1, %o1) CR_TAB
3806                      "ld %A0,%b1+"             CR_TAB
3807                      "ld %B0,%b1+"             CR_TAB
3808                      "ld %C0,%b1+"             CR_TAB
3809                      "ld %D0,%b1");
3810    }
3811  else
3812    {
3813      return *l = 8, (TINY_ADIW (%I1, %J1, %o1)  CR_TAB
3814                      "ld %A0,%b1+"              CR_TAB
3815                      "ld %B0,%b1+"              CR_TAB
3816                      "ld %C0,%b1+"              CR_TAB
3817                      "ld %D0,%b1"               CR_TAB
3818                      TINY_SBIW (%I1, %J1, %o1+3));
3819    }
3820}
3821
3822static const char*
3823out_movsi_r_mr (rtx_insn *insn, rtx op[], int *l)
3824{
3825  rtx dest = op[0];
3826  rtx src = op[1];
3827  rtx base = XEXP (src, 0);
3828  int reg_dest = true_regnum (dest);
3829  int reg_base = true_regnum (base);
3830  int tmp;
3831
3832  if (!l)
3833    l = &tmp;
3834
3835  if (reg_base > 0)
3836    {
3837      if (AVR_TINY)
3838        return avr_out_movsi_r_mr_reg_no_disp_tiny (insn, op, l);
3839
3840      if (reg_base == REG_X)        /* (R26) */
3841        {
3842          if (reg_dest == REG_X)
3843	    /* "ld r26,-X" is undefined */
3844	    return *l=7, ("adiw r26,3"        CR_TAB
3845			  "ld r29,X"          CR_TAB
3846			  "ld r28,-X"         CR_TAB
3847			  "ld __tmp_reg__,-X" CR_TAB
3848			  "sbiw r26,1"        CR_TAB
3849			  "ld r26,X"          CR_TAB
3850			  "mov r27,__tmp_reg__");
3851          else if (reg_dest == REG_X - 2)
3852            return *l=5, ("ld %A0,X+"          CR_TAB
3853                          "ld %B0,X+"          CR_TAB
3854                          "ld __tmp_reg__,X+"  CR_TAB
3855                          "ld %D0,X"           CR_TAB
3856                          "mov %C0,__tmp_reg__");
3857          else if (reg_unused_after (insn, base))
3858            return  *l=4, ("ld %A0,X+"  CR_TAB
3859                           "ld %B0,X+" CR_TAB
3860                           "ld %C0,X+" CR_TAB
3861                           "ld %D0,X");
3862          else
3863            return  *l=5, ("ld %A0,X+"  CR_TAB
3864                           "ld %B0,X+" CR_TAB
3865                           "ld %C0,X+" CR_TAB
3866                           "ld %D0,X"  CR_TAB
3867                           "sbiw r26,3");
3868        }
3869      else
3870        {
3871          if (reg_dest == reg_base)
3872            return *l=5, ("ldd %D0,%1+3" CR_TAB
3873                          "ldd %C0,%1+2" CR_TAB
3874                          "ldd __tmp_reg__,%1+1"  CR_TAB
3875                          "ld %A0,%1"  CR_TAB
3876                          "mov %B0,__tmp_reg__");
3877          else if (reg_base == reg_dest + 2)
3878            return *l=5, ("ld %A0,%1"             CR_TAB
3879                          "ldd %B0,%1+1"          CR_TAB
3880                          "ldd __tmp_reg__,%1+2"  CR_TAB
3881                          "ldd %D0,%1+3"          CR_TAB
3882                          "mov %C0,__tmp_reg__");
3883          else
3884            return *l=4, ("ld %A0,%1"    CR_TAB
3885                          "ldd %B0,%1+1" CR_TAB
3886                          "ldd %C0,%1+2" CR_TAB
3887                          "ldd %D0,%1+3");
3888        }
3889    }
3890  else if (GET_CODE (base) == PLUS) /* (R + i) */
3891    {
3892      int disp = INTVAL (XEXP (base, 1));
3893
3894      if (AVR_TINY)
3895        return avr_out_movsi_r_mr_reg_disp_tiny (insn, op, l);
3896
3897      if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3898	{
3899	  if (REGNO (XEXP (base, 0)) != REG_Y)
3900	    fatal_insn ("incorrect insn:",insn);
3901
3902	  if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3903	    return *l = 6, ("adiw r28,%o1-60" CR_TAB
3904			    "ldd %A0,Y+60"    CR_TAB
3905			    "ldd %B0,Y+61"    CR_TAB
3906			    "ldd %C0,Y+62"    CR_TAB
3907			    "ldd %D0,Y+63"    CR_TAB
3908			    "sbiw r28,%o1-60");
3909
3910	  return *l = 8, ("subi r28,lo8(-%o1)" CR_TAB
3911			  "sbci r29,hi8(-%o1)" CR_TAB
3912			  "ld %A0,Y"           CR_TAB
3913			  "ldd %B0,Y+1"        CR_TAB
3914			  "ldd %C0,Y+2"        CR_TAB
3915			  "ldd %D0,Y+3"        CR_TAB
3916			  "subi r28,lo8(%o1)"  CR_TAB
3917			  "sbci r29,hi8(%o1)");
3918	}
3919
3920      reg_base = true_regnum (XEXP (base, 0));
3921      if (reg_base == REG_X)
3922	{
3923	  /* R = (X + d) */
3924	  if (reg_dest == REG_X)
3925	    {
3926	      *l = 7;
3927	      /* "ld r26,-X" is undefined */
3928	      return ("adiw r26,%o1+3"    CR_TAB
3929		      "ld r29,X"          CR_TAB
3930		      "ld r28,-X"         CR_TAB
3931		      "ld __tmp_reg__,-X" CR_TAB
3932		      "sbiw r26,1"        CR_TAB
3933		      "ld r26,X"          CR_TAB
3934		      "mov r27,__tmp_reg__");
3935	    }
3936	  *l = 6;
3937	  if (reg_dest == REG_X - 2)
3938	    return ("adiw r26,%o1"      CR_TAB
3939		    "ld r24,X+"         CR_TAB
3940		    "ld r25,X+"         CR_TAB
3941		    "ld __tmp_reg__,X+" CR_TAB
3942		    "ld r27,X"          CR_TAB
3943		    "mov r26,__tmp_reg__");
3944
3945	  return ("adiw r26,%o1" CR_TAB
3946		  "ld %A0,X+"    CR_TAB
3947		  "ld %B0,X+"    CR_TAB
3948		  "ld %C0,X+"    CR_TAB
3949		  "ld %D0,X"     CR_TAB
3950		  "sbiw r26,%o1+3");
3951	}
3952      if (reg_dest == reg_base)
3953        return *l=5, ("ldd %D0,%D1"          CR_TAB
3954                      "ldd %C0,%C1"          CR_TAB
3955                      "ldd __tmp_reg__,%B1"  CR_TAB
3956                      "ldd %A0,%A1"          CR_TAB
3957                      "mov %B0,__tmp_reg__");
3958      else if (reg_dest == reg_base - 2)
3959        return *l=5, ("ldd %A0,%A1"          CR_TAB
3960                      "ldd %B0,%B1"          CR_TAB
3961                      "ldd __tmp_reg__,%C1"  CR_TAB
3962                      "ldd %D0,%D1"          CR_TAB
3963                      "mov %C0,__tmp_reg__");
3964      return *l=4, ("ldd %A0,%A1" CR_TAB
3965                    "ldd %B0,%B1" CR_TAB
3966                    "ldd %C0,%C1" CR_TAB
3967                    "ldd %D0,%D1");
3968    }
3969  else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3970    return *l=4, ("ld %D0,%1" CR_TAB
3971		  "ld %C0,%1" CR_TAB
3972		  "ld %B0,%1" CR_TAB
3973		  "ld %A0,%1");
3974  else if (GET_CODE (base) == POST_INC) /* (R++) */
3975    return *l=4, ("ld %A0,%1" CR_TAB
3976		  "ld %B0,%1" CR_TAB
3977		  "ld %C0,%1" CR_TAB
3978		  "ld %D0,%1");
3979  else if (CONSTANT_ADDRESS_P (base))
3980    {
3981      if (io_address_operand (base, SImode))
3982        {
3983          *l = 4;
3984          return ("in %A0,%i1"   CR_TAB
3985                  "in %B0,%i1+1" CR_TAB
3986                  "in %C0,%i1+2" CR_TAB
3987                  "in %D0,%i1+3");
3988        }
3989      else
3990        {
3991          *l = AVR_TINY ? 4 : 8;
3992          return ("lds %A0,%m1"   CR_TAB
3993                  "lds %B0,%m1+1" CR_TAB
3994                  "lds %C0,%m1+2" CR_TAB
3995                  "lds %D0,%m1+3");
3996        }
3997    }
3998
3999  fatal_insn ("unknown move insn:",insn);
4000  return "";
4001}
4002
4003static const char*
4004avr_out_movsi_mr_r_reg_no_disp_tiny (rtx_insn *insn, rtx op[], int *l)
4005{
4006  rtx dest = op[0];
4007  rtx src = op[1];
4008  rtx base = XEXP (dest, 0);
4009  int reg_base = true_regnum (base);
4010  int reg_src = true_regnum (src);
4011
4012  if (reg_base == reg_src)
4013    {
4014	  /* "ld r26,-X" is undefined */
4015      if (reg_unused_after (insn, base))
4016        {
4017          return *l = 7, ("mov __tmp_reg__, %B1"  CR_TAB
4018			  "st %0,%A1"             CR_TAB
4019			  TINY_ADIW (%E0, %F0, 1) CR_TAB
4020			  "st %0+,__tmp_reg__"    CR_TAB
4021			  "st %0+,%C1"            CR_TAB
4022			  "st %0+,%D1");
4023        }
4024      else
4025        {
4026          return *l = 9, ("mov __tmp_reg__, %B1"  CR_TAB
4027			  "st %0,%A1"             CR_TAB
4028			  TINY_ADIW (%E0, %F0, 1) CR_TAB
4029			  "st %0+,__tmp_reg__"    CR_TAB
4030			  "st %0+,%C1"            CR_TAB
4031			  "st %0+,%D1"            CR_TAB
4032			  TINY_SBIW (%E0, %F0, 3));
4033        }
4034    }
4035  else if (reg_base == reg_src + 2)
4036    {
4037      if (reg_unused_after (insn, base))
4038	return *l = 7, ("mov __zero_reg__,%C1" CR_TAB
4039                        "mov __tmp_reg__,%D1"  CR_TAB
4040                        "st %0+,%A1"           CR_TAB
4041                        "st %0+,%B1"           CR_TAB
4042                        "st %0+,__zero_reg__"  CR_TAB
4043                        "st %0,__tmp_reg__"    CR_TAB
4044                        "clr __zero_reg__");
4045      else
4046	return *l = 9, ("mov __zero_reg__,%C1" CR_TAB
4047			"mov __tmp_reg__,%D1"  CR_TAB
4048			"st %0+,%A1"           CR_TAB
4049			"st %0+,%B1"           CR_TAB
4050			"st %0+,__zero_reg__"  CR_TAB
4051			"st %0,__tmp_reg__"    CR_TAB
4052			"clr __zero_reg__"     CR_TAB
4053			TINY_SBIW (%E0, %F0, 3));
4054    }
4055
4056  return *l = 6, ("st %0+,%A1" CR_TAB
4057		  "st %0+,%B1" CR_TAB
4058		  "st %0+,%C1" CR_TAB
4059		  "st %0,%D1"  CR_TAB
4060		  TINY_SBIW (%E0, %F0, 3));
4061}
4062
4063static const char*
4064avr_out_movsi_mr_r_reg_disp_tiny (rtx op[], int *l)
4065{
4066  rtx dest = op[0];
4067  rtx src = op[1];
4068  rtx base = XEXP (dest, 0);
4069  int reg_base = REGNO (XEXP (base, 0));
4070  int reg_src =true_regnum (src);
4071
4072  if (reg_base == reg_src)
4073    {
4074      *l = 11;
4075      return ("mov __tmp_reg__,%A2"        CR_TAB
4076              "mov __zero_reg__,%B2"       CR_TAB
4077              TINY_ADIW (%I0, %J0, %o0)    CR_TAB
4078              "st %b0+,__tmp_reg__"        CR_TAB
4079              "st %b0+,__zero_reg__"       CR_TAB
4080              "st %b0+,%C2"                CR_TAB
4081              "st %b0,%D2"                 CR_TAB
4082              "clr __zero_reg__"           CR_TAB
4083              TINY_SBIW (%I0, %J0, %o0+3));
4084    }
4085  else if (reg_src == reg_base - 2)
4086    {
4087      *l = 11;
4088      return ("mov __tmp_reg__,%C2"         CR_TAB
4089              "mov __zero_reg__,%D2"        CR_TAB
4090              TINY_ADIW (%I0, %J0, %o0)     CR_TAB
4091              "st %b0+,%A0"                 CR_TAB
4092              "st %b0+,%B0"                 CR_TAB
4093              "st %b0+,__tmp_reg__"         CR_TAB
4094              "st %b0,__zero_reg__"         CR_TAB
4095              "clr __zero_reg__"            CR_TAB
4096              TINY_SBIW (%I0, %J0, %o0+3));
4097    }
4098  *l = 8;
4099  return (TINY_ADIW (%I0, %J0, %o0)     CR_TAB
4100          "st %b0+,%A1"                 CR_TAB
4101          "st %b0+,%B1"                 CR_TAB
4102          "st %b0+,%C1"                 CR_TAB
4103          "st %b0,%D1"                  CR_TAB
4104          TINY_SBIW (%I0, %J0, %o0+3));
4105}
4106
4107static const char*
4108out_movsi_mr_r (rtx_insn *insn, rtx op[], int *l)
4109{
4110  rtx dest = op[0];
4111  rtx src = op[1];
4112  rtx base = XEXP (dest, 0);
4113  int reg_base = true_regnum (base);
4114  int reg_src = true_regnum (src);
4115  int tmp;
4116
4117  if (!l)
4118    l = &tmp;
4119
4120  if (CONSTANT_ADDRESS_P (base))
4121    {
4122      if (io_address_operand (base, SImode))
4123        {
4124          return *l=4,("out %i0, %A1"  CR_TAB
4125                       "out %i0+1,%B1" CR_TAB
4126                       "out %i0+2,%C1" CR_TAB
4127                       "out %i0+3,%D1");
4128        }
4129      else
4130        {
4131          *l = AVR_TINY ? 4 : 8;
4132          return ("sts %m0,%A1"   CR_TAB
4133                  "sts %m0+1,%B1" CR_TAB
4134                  "sts %m0+2,%C1" CR_TAB
4135                  "sts %m0+3,%D1");
4136        }
4137    }
4138
4139  if (reg_base > 0)                 /* (r) */
4140    {
4141      if (AVR_TINY)
4142        return avr_out_movsi_mr_r_reg_no_disp_tiny (insn, op, l);
4143
4144      if (reg_base == REG_X)                /* (R26) */
4145        {
4146          if (reg_src == REG_X)
4147            {
4148	      /* "st X+,r26" is undefined */
4149              if (reg_unused_after (insn, base))
4150		return *l=6, ("mov __tmp_reg__,r27" CR_TAB
4151			      "st X,r26"            CR_TAB
4152			      "adiw r26,1"          CR_TAB
4153			      "st X+,__tmp_reg__"   CR_TAB
4154			      "st X+,r28"           CR_TAB
4155			      "st X,r29");
4156              else
4157                return *l=7, ("mov __tmp_reg__,r27" CR_TAB
4158			      "st X,r26"            CR_TAB
4159			      "adiw r26,1"          CR_TAB
4160			      "st X+,__tmp_reg__"   CR_TAB
4161			      "st X+,r28"           CR_TAB
4162			      "st X,r29"            CR_TAB
4163			      "sbiw r26,3");
4164            }
4165          else if (reg_base == reg_src + 2)
4166            {
4167              if (reg_unused_after (insn, base))
4168                return *l=7, ("mov __zero_reg__,%C1" CR_TAB
4169                              "mov __tmp_reg__,%D1"  CR_TAB
4170                              "st %0+,%A1"           CR_TAB
4171                              "st %0+,%B1"           CR_TAB
4172                              "st %0+,__zero_reg__"  CR_TAB
4173                              "st %0,__tmp_reg__"    CR_TAB
4174                              "clr __zero_reg__");
4175              else
4176                return *l=8, ("mov __zero_reg__,%C1" CR_TAB
4177                              "mov __tmp_reg__,%D1"  CR_TAB
4178                              "st %0+,%A1"           CR_TAB
4179                              "st %0+,%B1"           CR_TAB
4180                              "st %0+,__zero_reg__"  CR_TAB
4181                              "st %0,__tmp_reg__"    CR_TAB
4182                              "clr __zero_reg__"     CR_TAB
4183                              "sbiw r26,3");
4184            }
4185          return *l=5, ("st %0+,%A1" CR_TAB
4186                        "st %0+,%B1" CR_TAB
4187                        "st %0+,%C1" CR_TAB
4188                        "st %0,%D1"  CR_TAB
4189                        "sbiw r26,3");
4190        }
4191      else
4192        return *l=4, ("st %0,%A1"    CR_TAB
4193		      "std %0+1,%B1" CR_TAB
4194		      "std %0+2,%C1" CR_TAB
4195		      "std %0+3,%D1");
4196    }
4197  else if (GET_CODE (base) == PLUS) /* (R + i) */
4198    {
4199      int disp = INTVAL (XEXP (base, 1));
4200
4201      if (AVR_TINY)
4202        return avr_out_movsi_mr_r_reg_disp_tiny (op, l);
4203
4204      reg_base = REGNO (XEXP (base, 0));
4205      if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
4206	{
4207	  if (reg_base != REG_Y)
4208	    fatal_insn ("incorrect insn:",insn);
4209
4210	  if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
4211	    return *l = 6, ("adiw r28,%o0-60" CR_TAB
4212			    "std Y+60,%A1"    CR_TAB
4213			    "std Y+61,%B1"    CR_TAB
4214			    "std Y+62,%C1"    CR_TAB
4215			    "std Y+63,%D1"    CR_TAB
4216			    "sbiw r28,%o0-60");
4217
4218	  return *l = 8, ("subi r28,lo8(-%o0)" CR_TAB
4219			  "sbci r29,hi8(-%o0)" CR_TAB
4220			  "st Y,%A1"           CR_TAB
4221			  "std Y+1,%B1"        CR_TAB
4222			  "std Y+2,%C1"        CR_TAB
4223			  "std Y+3,%D1"        CR_TAB
4224			  "subi r28,lo8(%o0)"  CR_TAB
4225			  "sbci r29,hi8(%o0)");
4226	}
4227      if (reg_base == REG_X)
4228	{
4229	  /* (X + d) = R */
4230	  if (reg_src == REG_X)
4231	    {
4232	      *l = 9;
4233	      return ("mov __tmp_reg__,r26"  CR_TAB
4234		      "mov __zero_reg__,r27" CR_TAB
4235		      "adiw r26,%o0"         CR_TAB
4236		      "st X+,__tmp_reg__"    CR_TAB
4237		      "st X+,__zero_reg__"   CR_TAB
4238		      "st X+,r28"            CR_TAB
4239		      "st X,r29"             CR_TAB
4240		      "clr __zero_reg__"     CR_TAB
4241		      "sbiw r26,%o0+3");
4242	    }
4243	  else if (reg_src == REG_X - 2)
4244	    {
4245	      *l = 9;
4246	      return ("mov __tmp_reg__,r26"  CR_TAB
4247		      "mov __zero_reg__,r27" CR_TAB
4248		      "adiw r26,%o0"         CR_TAB
4249		      "st X+,r24"            CR_TAB
4250		      "st X+,r25"            CR_TAB
4251		      "st X+,__tmp_reg__"    CR_TAB
4252		      "st X,__zero_reg__"    CR_TAB
4253		      "clr __zero_reg__"     CR_TAB
4254		      "sbiw r26,%o0+3");
4255	    }
4256	  *l = 6;
4257	  return ("adiw r26,%o0" CR_TAB
4258		  "st X+,%A1"    CR_TAB
4259		  "st X+,%B1"    CR_TAB
4260		  "st X+,%C1"    CR_TAB
4261		  "st X,%D1"     CR_TAB
4262		  "sbiw r26,%o0+3");
4263	}
4264      return *l=4, ("std %A0,%A1" CR_TAB
4265		    "std %B0,%B1" CR_TAB
4266		    "std %C0,%C1" CR_TAB
4267		    "std %D0,%D1");
4268    }
4269  else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4270    return *l=4, ("st %0,%D1" CR_TAB
4271		  "st %0,%C1" CR_TAB
4272		  "st %0,%B1" CR_TAB
4273		  "st %0,%A1");
4274  else if (GET_CODE (base) == POST_INC) /* (R++) */
4275    return *l=4, ("st %0,%A1" CR_TAB
4276		  "st %0,%B1" CR_TAB
4277		  "st %0,%C1" CR_TAB
4278		  "st %0,%D1");
4279  fatal_insn ("unknown move insn:",insn);
4280  return "";
4281}
4282
4283const char *
4284output_movsisf (rtx_insn *insn, rtx operands[], int *l)
4285{
4286  int dummy;
4287  rtx dest = operands[0];
4288  rtx src = operands[1];
4289  int *real_l = l;
4290
4291  if (avr_mem_flash_p (src)
4292      || avr_mem_flash_p (dest))
4293    {
4294      return avr_out_lpm (insn, operands, real_l);
4295    }
4296
4297  if (!l)
4298    l = &dummy;
4299
4300  gcc_assert (4 == GET_MODE_SIZE (GET_MODE (dest)));
4301  if (REG_P (dest))
4302    {
4303      if (REG_P (src)) /* mov r,r */
4304	{
4305	  if (true_regnum (dest) > true_regnum (src))
4306	    {
4307	      if (AVR_HAVE_MOVW)
4308		{
4309		  *l = 2;
4310		  return ("movw %C0,%C1" CR_TAB
4311			  "movw %A0,%A1");
4312		}
4313	      *l = 4;
4314	      return ("mov %D0,%D1" CR_TAB
4315		      "mov %C0,%C1" CR_TAB
4316		      "mov %B0,%B1" CR_TAB
4317		      "mov %A0,%A1");
4318	    }
4319	  else
4320	    {
4321	      if (AVR_HAVE_MOVW)
4322		{
4323		  *l = 2;
4324		  return ("movw %A0,%A1" CR_TAB
4325			  "movw %C0,%C1");
4326		}
4327	      *l = 4;
4328	      return ("mov %A0,%A1" CR_TAB
4329		      "mov %B0,%B1" CR_TAB
4330		      "mov %C0,%C1" CR_TAB
4331		      "mov %D0,%D1");
4332	    }
4333	}
4334      else if (CONSTANT_P (src))
4335	{
4336          return output_reload_insisf (operands, NULL_RTX, real_l);
4337        }
4338      else if (MEM_P (src))
4339	return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
4340    }
4341  else if (MEM_P (dest))
4342    {
4343      const char *templ;
4344
4345      if (src == CONST0_RTX (GET_MODE (dest)))
4346	  operands[1] = zero_reg_rtx;
4347
4348      templ = out_movsi_mr_r (insn, operands, real_l);
4349
4350      if (!real_l)
4351	output_asm_insn (templ, operands);
4352
4353      operands[1] = src;
4354      return "";
4355    }
4356  fatal_insn ("invalid insn:", insn);
4357  return "";
4358}
4359
4360
4361/* Handle loads of 24-bit types from memory to register.  */
4362
4363static const char*
4364avr_out_load_psi_reg_no_disp_tiny (rtx_insn *insn, rtx *op, int *plen)
4365{
4366  rtx dest = op[0];
4367  rtx src = op[1];
4368  rtx base = XEXP (src, 0);
4369  int reg_dest = true_regnum (dest);
4370  int reg_base = true_regnum (base);
4371
4372  if (reg_base == reg_dest)
4373    {
4374      return avr_asm_len (TINY_ADIW (%E1, %F1, 2)   CR_TAB
4375                          "ld %C0,%1"               CR_TAB
4376                          "ld __tmp_reg__,-%1"      CR_TAB
4377                          TINY_SBIW (%E1, %F1, 1)   CR_TAB
4378                          "ld %A0,%1"               CR_TAB
4379                          "mov %B0,__tmp_reg__", op, plen, -8);
4380    }
4381  else
4382    {
4383      return avr_asm_len ("ld %A0,%1+"  CR_TAB
4384                          "ld %B0,%1+"  CR_TAB
4385                          "ld %C0,%1", op, plen, -3);
4386
4387      if (reg_dest != reg_base - 2 &&
4388          !reg_unused_after (insn, base))
4389        {
4390          avr_asm_len (TINY_SBIW (%E1, %F1, 2), op, plen, 2);
4391        }
4392      return "";
4393    }
4394}
4395
4396static const char*
4397avr_out_load_psi_reg_disp_tiny (rtx_insn *insn, rtx *op, int *plen)
4398{
4399  rtx dest = op[0];
4400  rtx src = op[1];
4401  rtx base = XEXP (src, 0);
4402  int reg_dest = true_regnum (dest);
4403  int reg_base = true_regnum (base);
4404
4405  reg_base = true_regnum (XEXP (base, 0));
4406  if (reg_base == reg_dest)
4407    {
4408      return avr_asm_len (TINY_ADIW (%I1, %J1, %o1+2) CR_TAB
4409                          "ld %C0,%b1"                CR_TAB
4410                          "ld __tmp_reg__,-%b1"       CR_TAB
4411                          TINY_SBIW (%I1, %J1, 1)     CR_TAB
4412                          "ld %A0,%b1"                CR_TAB
4413                          "mov %B0,__tmp_reg__", op, plen, -8);
4414   }
4415  else
4416    {
4417      avr_asm_len (TINY_ADIW (%I1, %J1, %o1)   CR_TAB
4418                          "ld %A0,%b1+"              CR_TAB
4419                          "ld %B0,%b1+"              CR_TAB
4420                          "ld %C0,%b1", op, plen, -5);
4421
4422      if (reg_dest != (reg_base - 2)
4423          && !reg_unused_after (insn, XEXP (base, 0)))
4424          avr_asm_len (TINY_SBIW (%I1, %J1, %o1+2), op, plen, 2);
4425
4426      return "";
4427    }
4428}
4429
4430static const char*
4431avr_out_load_psi (rtx_insn *insn, rtx *op, int *plen)
4432{
4433  rtx dest = op[0];
4434  rtx src = op[1];
4435  rtx base = XEXP (src, 0);
4436  int reg_dest = true_regnum (dest);
4437  int reg_base = true_regnum (base);
4438
4439  if (reg_base > 0)
4440    {
4441      if (AVR_TINY)
4442        return avr_out_load_psi_reg_no_disp_tiny (insn, op, plen);
4443
4444      if (reg_base == REG_X)        /* (R26) */
4445        {
4446          if (reg_dest == REG_X)
4447            /* "ld r26,-X" is undefined */
4448            return avr_asm_len ("adiw r26,2"        CR_TAB
4449                                "ld r28,X"          CR_TAB
4450                                "ld __tmp_reg__,-X" CR_TAB
4451                                "sbiw r26,1"        CR_TAB
4452                                "ld r26,X"          CR_TAB
4453                                "mov r27,__tmp_reg__", op, plen, -6);
4454          else
4455            {
4456              avr_asm_len ("ld %A0,X+" CR_TAB
4457                           "ld %B0,X+" CR_TAB
4458                           "ld %C0,X", op, plen, -3);
4459
4460              if (reg_dest != REG_X - 2
4461                  && !reg_unused_after (insn, base))
4462                {
4463                  avr_asm_len ("sbiw r26,2", op, plen, 1);
4464                }
4465
4466              return "";
4467            }
4468        }
4469      else /* reg_base != REG_X */
4470        {
4471          if (reg_dest == reg_base)
4472            return avr_asm_len ("ldd %C0,%1+2"          CR_TAB
4473                                "ldd __tmp_reg__,%1+1"  CR_TAB
4474                                "ld  %A0,%1"            CR_TAB
4475                                "mov %B0,__tmp_reg__", op, plen, -4);
4476          else
4477            return avr_asm_len ("ld  %A0,%1"    CR_TAB
4478                                "ldd %B0,%1+1"  CR_TAB
4479                                "ldd %C0,%1+2", op, plen, -3);
4480        }
4481    }
4482  else if (GET_CODE (base) == PLUS) /* (R + i) */
4483    {
4484      int disp = INTVAL (XEXP (base, 1));
4485
4486      if (AVR_TINY)
4487        return avr_out_load_psi_reg_disp_tiny (insn, op, plen);
4488
4489      if (disp > MAX_LD_OFFSET (GET_MODE (src)))
4490        {
4491          if (REGNO (XEXP (base, 0)) != REG_Y)
4492            fatal_insn ("incorrect insn:",insn);
4493
4494          if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
4495            return avr_asm_len ("adiw r28,%o1-61" CR_TAB
4496                                "ldd %A0,Y+61"    CR_TAB
4497                                "ldd %B0,Y+62"    CR_TAB
4498                                "ldd %C0,Y+63"    CR_TAB
4499                                "sbiw r28,%o1-61", op, plen, -5);
4500
4501          return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
4502                              "sbci r29,hi8(-%o1)" CR_TAB
4503                              "ld  %A0,Y"           CR_TAB
4504                              "ldd %B0,Y+1"        CR_TAB
4505                              "ldd %C0,Y+2"        CR_TAB
4506                              "subi r28,lo8(%o1)"  CR_TAB
4507                              "sbci r29,hi8(%o1)", op, plen, -7);
4508        }
4509
4510      reg_base = true_regnum (XEXP (base, 0));
4511      if (reg_base == REG_X)
4512        {
4513          /* R = (X + d) */
4514          if (reg_dest == REG_X)
4515            {
4516              /* "ld r26,-X" is undefined */
4517              return avr_asm_len ("adiw r26,%o1+2"     CR_TAB
4518                                  "ld  r28,X"          CR_TAB
4519                                  "ld  __tmp_reg__,-X" CR_TAB
4520                                  "sbiw r26,1"         CR_TAB
4521                                  "ld  r26,X"          CR_TAB
4522                                  "mov r27,__tmp_reg__", op, plen, -6);
4523            }
4524
4525          avr_asm_len ("adiw r26,%o1" CR_TAB
4526                       "ld %A0,X+"    CR_TAB
4527                       "ld %B0,X+"    CR_TAB
4528                       "ld %C0,X", op, plen, -4);
4529
4530          if (reg_dest != REG_W
4531              && !reg_unused_after (insn, XEXP (base, 0)))
4532            avr_asm_len ("sbiw r26,%o1+2", op, plen, 1);
4533
4534          return "";
4535        }
4536
4537      if (reg_dest == reg_base)
4538        return avr_asm_len ("ldd %C0,%C1" CR_TAB
4539                            "ldd __tmp_reg__,%B1"  CR_TAB
4540                            "ldd %A0,%A1" CR_TAB
4541                            "mov %B0,__tmp_reg__", op, plen, -4);
4542
4543        return avr_asm_len ("ldd %A0,%A1" CR_TAB
4544                            "ldd %B0,%B1" CR_TAB
4545                            "ldd %C0,%C1", op, plen, -3);
4546    }
4547  else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4548    return avr_asm_len ("ld %C0,%1" CR_TAB
4549                        "ld %B0,%1" CR_TAB
4550                        "ld %A0,%1", op, plen, -3);
4551  else if (GET_CODE (base) == POST_INC) /* (R++) */
4552    return avr_asm_len ("ld %A0,%1" CR_TAB
4553                        "ld %B0,%1" CR_TAB
4554                        "ld %C0,%1", op, plen, -3);
4555
4556  else if (CONSTANT_ADDRESS_P (base))
4557    {
4558      int n_words = AVR_TINY ? 3 : 6;
4559      return avr_asm_len ("lds %A0,%m1" CR_TAB
4560                          "lds %B0,%m1+1" CR_TAB
4561                          "lds %C0,%m1+2", op, plen , -n_words);
4562    }
4563
4564  fatal_insn ("unknown move insn:",insn);
4565  return "";
4566}
4567
4568
4569static const char*
4570avr_out_store_psi_reg_no_disp_tiny (rtx_insn *insn, rtx *op, int *plen)
4571{
4572  rtx dest = op[0];
4573  rtx src = op[1];
4574  rtx base = XEXP (dest, 0);
4575  int reg_base = true_regnum (base);
4576  int reg_src = true_regnum (src);
4577
4578  if (reg_base == reg_src)
4579    {
4580      avr_asm_len ("st %0,%A1"              CR_TAB
4581                   "mov __tmp_reg__,%B1"    CR_TAB
4582                   TINY_ADIW (%E0, %F0, 1)  CR_TAB /* st X+, r27 is undefined */
4583                   "st %0+,__tmp_reg__"     CR_TAB
4584                   "st %0,%C1", op, plen, -6);
4585
4586    }
4587  else if (reg_src == reg_base - 2)
4588    {
4589      avr_asm_len ("st %0,%A1"              CR_TAB
4590                   "mov __tmp_reg__,%C1"    CR_TAB
4591                   TINY_ADIW (%E0, %F0, 1)  CR_TAB
4592                   "st %0+,%B1"             CR_TAB
4593                   "st %0,__tmp_reg__", op, plen, 6);
4594    }
4595  else
4596    {
4597      avr_asm_len ("st %0+,%A1"  CR_TAB
4598                   "st %0+,%B1" CR_TAB
4599                   "st %0,%C1", op, plen, -3);
4600    }
4601
4602  if (!reg_unused_after (insn, base))
4603    avr_asm_len (TINY_SBIW (%E0, %F0, 2), op, plen, 2);
4604
4605  return "";
4606}
4607
4608static const char*
4609avr_out_store_psi_reg_disp_tiny (rtx *op, int *plen)
4610{
4611  rtx dest = op[0];
4612  rtx src = op[1];
4613  rtx base = XEXP (dest, 0);
4614  int reg_base = REGNO (XEXP (base, 0));
4615  int reg_src = true_regnum (src);
4616
4617  if (reg_src == reg_base)
4618    {
4619      return avr_asm_len ("mov __tmp_reg__,%A1"          CR_TAB
4620                          "mov __zero_reg__,%B1"         CR_TAB
4621                          TINY_ADIW (%I0, %J0, %o0)      CR_TAB
4622                          "st %b0+,__tmp_reg__"          CR_TAB
4623                          "st %b0+,__zero_reg__"         CR_TAB
4624                          "st %b0,%C1"                   CR_TAB
4625                          "clr __zero_reg__"             CR_TAB
4626                          TINY_SBIW (%I0, %J0, %o0+2), op, plen, -10);
4627    }
4628  else if (reg_src == reg_base - 2)
4629    {
4630      return avr_asm_len ("mov __tmp_reg__,%C1"          CR_TAB
4631                          TINY_ADIW (%I0, %J0, %o0)      CR_TAB
4632                          "st %b0+,%A1"                  CR_TAB
4633                          "st %b0+,%B1"                  CR_TAB
4634                          "st %b0,__tmp_reg__"           CR_TAB
4635                          TINY_SBIW (%I0, %J0, %o0+2), op, plen, -8);
4636    }
4637
4638  return avr_asm_len (TINY_ADIW (%I0, %J0, %o0)      CR_TAB
4639                          "st %b0+,%A1"                  CR_TAB
4640                          "st %b0+,%B1"                  CR_TAB
4641                          "st %b0,%C1"                   CR_TAB
4642                          TINY_SBIW (%I0, %J0, %o0+2), op, plen, -7);
4643}
4644
4645/* Handle store of 24-bit type from register or zero to memory.  */
4646
4647static const char*
4648avr_out_store_psi (rtx_insn *insn, rtx *op, int *plen)
4649{
4650  rtx dest = op[0];
4651  rtx src = op[1];
4652  rtx base = XEXP (dest, 0);
4653  int reg_base = true_regnum (base);
4654
4655  if (CONSTANT_ADDRESS_P (base))
4656    {
4657      int n_words = AVR_TINY ? 3 : 6;
4658      return avr_asm_len ("sts %m0,%A1"   CR_TAB
4659                          "sts %m0+1,%B1" CR_TAB
4660                          "sts %m0+2,%C1", op, plen, -n_words);
4661    }
4662
4663  if (reg_base > 0)                 /* (r) */
4664    {
4665      if (AVR_TINY)
4666        return avr_out_store_psi_reg_no_disp_tiny (insn, op, plen);
4667
4668      if (reg_base == REG_X)        /* (R26) */
4669        {
4670          gcc_assert (!reg_overlap_mentioned_p (base, src));
4671
4672          avr_asm_len ("st %0+,%A1"  CR_TAB
4673                       "st %0+,%B1" CR_TAB
4674                       "st %0,%C1", op, plen, -3);
4675
4676          if (!reg_unused_after (insn, base))
4677            avr_asm_len ("sbiw r26,2", op, plen, 1);
4678
4679          return "";
4680        }
4681      else
4682        return avr_asm_len ("st %0,%A1"    CR_TAB
4683                            "std %0+1,%B1" CR_TAB
4684                            "std %0+2,%C1", op, plen, -3);
4685    }
4686  else if (GET_CODE (base) == PLUS) /* (R + i) */
4687    {
4688      int disp = INTVAL (XEXP (base, 1));
4689
4690      if (AVR_TINY)
4691        return avr_out_store_psi_reg_disp_tiny (op, plen);
4692
4693      reg_base = REGNO (XEXP (base, 0));
4694
4695      if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
4696        {
4697          if (reg_base != REG_Y)
4698            fatal_insn ("incorrect insn:",insn);
4699
4700          if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
4701            return avr_asm_len ("adiw r28,%o0-61" CR_TAB
4702                                "std Y+61,%A1"    CR_TAB
4703                                "std Y+62,%B1"    CR_TAB
4704                                "std Y+63,%C1"    CR_TAB
4705                                "sbiw r28,%o0-61", op, plen, -5);
4706
4707          return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4708                              "sbci r29,hi8(-%o0)" CR_TAB
4709                              "st Y,%A1"           CR_TAB
4710                              "std Y+1,%B1"        CR_TAB
4711                              "std Y+2,%C1"        CR_TAB
4712                              "subi r28,lo8(%o0)"  CR_TAB
4713                              "sbci r29,hi8(%o0)", op, plen, -7);
4714        }
4715      if (reg_base == REG_X)
4716        {
4717          /* (X + d) = R */
4718          gcc_assert (!reg_overlap_mentioned_p (XEXP (base, 0), src));
4719
4720          avr_asm_len ("adiw r26,%o0" CR_TAB
4721                       "st X+,%A1"    CR_TAB
4722                       "st X+,%B1"    CR_TAB
4723                       "st X,%C1", op, plen, -4);
4724
4725          if (!reg_unused_after (insn, XEXP (base, 0)))
4726            avr_asm_len ("sbiw r26,%o0+2", op, plen, 1);
4727
4728          return "";
4729        }
4730
4731      return avr_asm_len ("std %A0,%A1" CR_TAB
4732                          "std %B0,%B1" CR_TAB
4733                          "std %C0,%C1", op, plen, -3);
4734    }
4735  else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4736    return avr_asm_len ("st %0,%C1" CR_TAB
4737                        "st %0,%B1" CR_TAB
4738                        "st %0,%A1", op, plen, -3);
4739  else if (GET_CODE (base) == POST_INC) /* (R++) */
4740    return avr_asm_len ("st %0,%A1" CR_TAB
4741                        "st %0,%B1" CR_TAB
4742                        "st %0,%C1", op, plen, -3);
4743
4744  fatal_insn ("unknown move insn:",insn);
4745  return "";
4746}
4747
4748
4749/* Move around 24-bit stuff.  */
4750
4751const char *
4752avr_out_movpsi (rtx_insn *insn, rtx *op, int *plen)
4753{
4754  rtx dest = op[0];
4755  rtx src = op[1];
4756
4757  if (avr_mem_flash_p (src)
4758      || avr_mem_flash_p (dest))
4759    {
4760      return avr_out_lpm (insn, op, plen);
4761    }
4762
4763  if (register_operand (dest, VOIDmode))
4764    {
4765      if (register_operand (src, VOIDmode)) /* mov r,r */
4766        {
4767          if (true_regnum (dest) > true_regnum (src))
4768            {
4769              avr_asm_len ("mov %C0,%C1", op, plen, -1);
4770
4771              if (AVR_HAVE_MOVW)
4772                return avr_asm_len ("movw %A0,%A1", op, plen, 1);
4773              else
4774                return avr_asm_len ("mov %B0,%B1"  CR_TAB
4775                                    "mov %A0,%A1", op, plen, 2);
4776            }
4777          else
4778            {
4779              if (AVR_HAVE_MOVW)
4780                avr_asm_len ("movw %A0,%A1", op, plen, -1);
4781              else
4782                avr_asm_len ("mov %A0,%A1"  CR_TAB
4783                             "mov %B0,%B1", op, plen, -2);
4784
4785              return avr_asm_len ("mov %C0,%C1", op, plen, 1);
4786            }
4787        }
4788      else if (CONSTANT_P (src))
4789        {
4790          return avr_out_reload_inpsi (op, NULL_RTX, plen);
4791        }
4792      else if (MEM_P (src))
4793        return avr_out_load_psi (insn, op, plen); /* mov r,m */
4794    }
4795  else if (MEM_P (dest))
4796    {
4797      rtx xop[2];
4798
4799      xop[0] = dest;
4800      xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
4801
4802      return avr_out_store_psi (insn, xop, plen);
4803    }
4804
4805  fatal_insn ("invalid insn:", insn);
4806  return "";
4807}
4808
4809static const char*
4810avr_out_movqi_mr_r_reg_disp_tiny (rtx_insn *insn, rtx op[], int *plen)
4811{
4812  rtx dest = op[0];
4813  rtx src = op[1];
4814  rtx x = XEXP (dest, 0);
4815
4816  if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
4817    {
4818      avr_asm_len ("mov __tmp_reg__,%1"      CR_TAB
4819                   TINY_ADIW (%I0, %J0, %o0) CR_TAB
4820                   "st %b0,__tmp_reg__", op, plen, -4);
4821    }
4822    else
4823    {
4824      avr_asm_len (TINY_ADIW (%I0, %J0, %o0) CR_TAB
4825          "st %b0,%1" , op, plen, -3);
4826    }
4827
4828  if (!reg_unused_after (insn, XEXP (x,0)))
4829      avr_asm_len (TINY_SBIW (%I0, %J0, %o0), op, plen, 2);
4830
4831  return "";
4832}
4833
4834static const char*
4835out_movqi_mr_r (rtx_insn *insn, rtx op[], int *plen)
4836{
4837  rtx dest = op[0];
4838  rtx src = op[1];
4839  rtx x = XEXP (dest, 0);
4840
4841  if (CONSTANT_ADDRESS_P (x))
4842    {
4843      int n_words = AVR_TINY ? 1 : 2;
4844      return optimize > 0 && io_address_operand (x, QImode)
4845        ? avr_asm_len ("out %i0,%1", op, plen, -1)
4846        : avr_asm_len ("sts %m0,%1", op, plen, -n_words);
4847    }
4848  else if (GET_CODE (x) == PLUS
4849           && REG_P (XEXP (x, 0))
4850           && CONST_INT_P (XEXP (x, 1)))
4851    {
4852      /* memory access by reg+disp */
4853
4854      int disp = INTVAL (XEXP (x, 1));
4855
4856      if (AVR_TINY)
4857        return avr_out_movqi_mr_r_reg_disp_tiny (insn, op, plen);
4858
4859      if (disp - GET_MODE_SIZE (GET_MODE (dest)) >= 63)
4860        {
4861          if (REGNO (XEXP (x, 0)) != REG_Y)
4862            fatal_insn ("incorrect insn:",insn);
4863
4864          if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
4865            return avr_asm_len ("adiw r28,%o0-63" CR_TAB
4866                                "std Y+63,%1"     CR_TAB
4867                                "sbiw r28,%o0-63", op, plen, -3);
4868
4869          return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4870                              "sbci r29,hi8(-%o0)" CR_TAB
4871                              "st Y,%1"            CR_TAB
4872                              "subi r28,lo8(%o0)"  CR_TAB
4873                              "sbci r29,hi8(%o0)", op, plen, -5);
4874        }
4875      else if (REGNO (XEXP (x,0)) == REG_X)
4876        {
4877          if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
4878            {
4879              avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
4880                           "adiw r26,%o0"       CR_TAB
4881                           "st X,__tmp_reg__", op, plen, -3);
4882            }
4883          else
4884            {
4885              avr_asm_len ("adiw r26,%o0" CR_TAB
4886                           "st X,%1", op, plen, -2);
4887            }
4888
4889          if (!reg_unused_after (insn, XEXP (x,0)))
4890            avr_asm_len ("sbiw r26,%o0", op, plen, 1);
4891
4892          return "";
4893        }
4894
4895      return avr_asm_len ("std %0,%1", op, plen, -1);
4896    }
4897
4898  return avr_asm_len ("st %0,%1", op, plen, -1);
4899}
4900
4901
4902/* Helper for the next function for XMEGA.  It does the same
4903   but with low byte first.  */
4904
4905static const char*
4906avr_out_movhi_mr_r_xmega (rtx_insn *insn, rtx op[], int *plen)
4907{
4908  rtx dest = op[0];
4909  rtx src = op[1];
4910  rtx base = XEXP (dest, 0);
4911  int reg_base = true_regnum (base);
4912  int reg_src = true_regnum (src);
4913
4914  /* "volatile" forces writing low byte first, even if less efficient,
4915     for correct operation with 16-bit I/O registers like SP.  */
4916  int mem_volatile_p = MEM_VOLATILE_P (dest);
4917
4918  if (CONSTANT_ADDRESS_P (base))
4919    {
4920      int n_words = AVR_TINY ? 2 : 4;
4921      return optimize > 0 && io_address_operand (base, HImode)
4922        ? avr_asm_len ("out %i0,%A1" CR_TAB
4923                       "out %i0+1,%B1", op, plen, -2)
4924
4925        : avr_asm_len ("sts %m0,%A1" CR_TAB
4926                       "sts %m0+1,%B1", op, plen, -n_words);
4927    }
4928
4929  if (reg_base > 0)
4930    {
4931      if (reg_base != REG_X)
4932        return avr_asm_len ("st %0,%A1" CR_TAB
4933                            "std %0+1,%B1", op, plen, -2);
4934
4935      if (reg_src == REG_X)
4936        /* "st X+,r26" and "st -X,r26" are undefined.  */
4937        avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4938                     "st X,r26"            CR_TAB
4939                     "adiw r26,1"          CR_TAB
4940                     "st X,__tmp_reg__", op, plen, -4);
4941      else
4942        avr_asm_len ("st X+,%A1" CR_TAB
4943                     "st X,%B1", op, plen, -2);
4944
4945      return reg_unused_after (insn, base)
4946        ? ""
4947        : avr_asm_len ("sbiw r26,1", op, plen, 1);
4948    }
4949  else if (GET_CODE (base) == PLUS)
4950    {
4951      int disp = INTVAL (XEXP (base, 1));
4952      reg_base = REGNO (XEXP (base, 0));
4953      if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
4954        {
4955          if (reg_base != REG_Y)
4956            fatal_insn ("incorrect insn:",insn);
4957
4958          return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
4959            ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4960                           "std Y+62,%A1"    CR_TAB
4961                           "std Y+63,%B1"    CR_TAB
4962                           "sbiw r28,%o0-62", op, plen, -4)
4963
4964            : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4965                           "sbci r29,hi8(-%o0)" CR_TAB
4966                           "st Y,%A1"           CR_TAB
4967                           "std Y+1,%B1"        CR_TAB
4968                           "subi r28,lo8(%o0)"  CR_TAB
4969                           "sbci r29,hi8(%o0)", op, plen, -6);
4970        }
4971
4972      if (reg_base != REG_X)
4973        return avr_asm_len ("std %A0,%A1" CR_TAB
4974                            "std %B0,%B1", op, plen, -2);
4975      /* (X + d) = R */
4976      return reg_src == REG_X
4977        ? avr_asm_len ("mov __tmp_reg__,r26"  CR_TAB
4978                       "mov __zero_reg__,r27" CR_TAB
4979                       "adiw r26,%o0"         CR_TAB
4980                       "st X+,__tmp_reg__"    CR_TAB
4981                       "st X,__zero_reg__"    CR_TAB
4982                       "clr __zero_reg__"     CR_TAB
4983                       "sbiw r26,%o0+1", op, plen, -7)
4984
4985        : avr_asm_len ("adiw r26,%o0" CR_TAB
4986                       "st X+,%A1"    CR_TAB
4987                       "st X,%B1"     CR_TAB
4988                       "sbiw r26,%o0+1", op, plen, -4);
4989    }
4990  else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4991    {
4992      if (!mem_volatile_p)
4993        return avr_asm_len ("st %0,%B1" CR_TAB
4994                            "st %0,%A1", op, plen, -2);
4995
4996      return REGNO (XEXP (base, 0)) == REG_X
4997        ? avr_asm_len ("sbiw r26,2"  CR_TAB
4998                       "st X+,%A1"   CR_TAB
4999                       "st X,%B1"    CR_TAB
5000                       "sbiw r26,1", op, plen, -4)
5001
5002        : avr_asm_len ("sbiw %r0,2"  CR_TAB
5003                       "st %p0,%A1"  CR_TAB
5004                       "std %p0+1,%B1", op, plen, -3);
5005    }
5006  else if (GET_CODE (base) == POST_INC) /* (R++) */
5007    {
5008      return avr_asm_len ("st %0,%A1"  CR_TAB
5009                          "st %0,%B1", op, plen, -2);
5010
5011    }
5012  fatal_insn ("unknown move insn:",insn);
5013  return "";
5014}
5015
5016static const char*
5017avr_out_movhi_mr_r_reg_no_disp_tiny (rtx_insn *insn, rtx op[], int *plen)
5018{
5019  rtx dest = op[0];
5020  rtx src = op[1];
5021  rtx base = XEXP (dest, 0);
5022  int reg_base = true_regnum (base);
5023  int reg_src = true_regnum (src);
5024  int mem_volatile_p = MEM_VOLATILE_P (dest);
5025
5026  if (reg_base == reg_src)
5027    {
5028      return !mem_volatile_p && reg_unused_after (insn, src)
5029        ? avr_asm_len ("mov __tmp_reg__,%B1"   CR_TAB
5030                       "st %0,%A1"             CR_TAB
5031                       TINY_ADIW (%E0, %F0, 1) CR_TAB
5032                       "st %0,__tmp_reg__", op, plen, -5)
5033        : avr_asm_len ("mov __tmp_reg__,%B1"   CR_TAB
5034                       TINY_ADIW (%E0, %F0, 1) CR_TAB
5035                       "st %0,__tmp_reg__"      CR_TAB
5036                       TINY_SBIW (%E0, %F0, 1) CR_TAB
5037                       "st %0, %A1", op, plen, -7);
5038    }
5039
5040  return !mem_volatile_p && reg_unused_after (insn, base)
5041      ? avr_asm_len ("st %0+,%A1" CR_TAB
5042                     "st %0,%B1", op, plen, -2)
5043      : avr_asm_len (TINY_ADIW (%E0, %F0, 1) CR_TAB
5044                     "st %0,%B1"             CR_TAB
5045                     "st -%0,%A1", op, plen, -4);
5046}
5047
5048static const char*
5049avr_out_movhi_mr_r_reg_disp_tiny (rtx op[], int *plen)
5050{
5051  rtx dest = op[0];
5052  rtx src = op[1];
5053  rtx base = XEXP (dest, 0);
5054  int reg_base = REGNO (XEXP (base, 0));
5055  int reg_src = true_regnum (src);
5056
5057  return reg_src == reg_base
5058        ? avr_asm_len ("mov __tmp_reg__,%A1"          CR_TAB
5059                       "mov __zero_reg__,%B1"         CR_TAB
5060                       TINY_ADIW (%I0, %J0, %o0+1)    CR_TAB
5061                       "st %b0,__zero_reg__"          CR_TAB
5062                       "st -%b0,__tmp_reg__"          CR_TAB
5063                       "clr __zero_reg__"             CR_TAB
5064                       TINY_SBIW (%I0, %J0, %o0), op, plen, -9)
5065
5066        : avr_asm_len (TINY_ADIW (%I0, %J0, %o0+1) CR_TAB
5067                       "st %b0,%B1"                CR_TAB
5068                       "st -%b0,%A1"               CR_TAB
5069                       TINY_SBIW (%I0, %J0, %o0), op, plen, -6);
5070}
5071
5072static const char*
5073avr_out_movhi_mr_r_post_inc_tiny (rtx op[], int *plen)
5074{
5075  return avr_asm_len (TINY_ADIW (%I0, %J0, 1)  CR_TAB
5076                      "st %p0,%B1"    CR_TAB
5077                      "st -%p0,%A1"   CR_TAB
5078                      TINY_ADIW (%I0, %J0, 2), op, plen, -6);
5079}
5080
5081static const char*
5082out_movhi_mr_r (rtx_insn *insn, rtx op[], int *plen)
5083{
5084  rtx dest = op[0];
5085  rtx src = op[1];
5086  rtx base = XEXP (dest, 0);
5087  int reg_base = true_regnum (base);
5088  int reg_src = true_regnum (src);
5089  int mem_volatile_p;
5090
5091  /* "volatile" forces writing high-byte first (no-xmega) resp.
5092     low-byte first (xmega) even if less efficient, for correct
5093     operation with 16-bit I/O registers like.  */
5094
5095  if (AVR_XMEGA)
5096    return avr_out_movhi_mr_r_xmega (insn, op, plen);
5097
5098  mem_volatile_p = MEM_VOLATILE_P (dest);
5099
5100  if (CONSTANT_ADDRESS_P (base))
5101    {
5102      int n_words = AVR_TINY ? 2 : 4;
5103      return optimize > 0 && io_address_operand (base, HImode)
5104        ? avr_asm_len ("out %i0+1,%B1" CR_TAB
5105                       "out %i0,%A1", op, plen, -2)
5106
5107        : avr_asm_len ("sts %m0+1,%B1" CR_TAB
5108                       "sts %m0,%A1", op, plen, -n_words);
5109    }
5110
5111  if (reg_base > 0)
5112    {
5113      if (AVR_TINY)
5114        return avr_out_movhi_mr_r_reg_no_disp_tiny (insn, op, plen);
5115
5116      if (reg_base != REG_X)
5117        return avr_asm_len ("std %0+1,%B1" CR_TAB
5118                            "st %0,%A1", op, plen, -2);
5119
5120      if (reg_src == REG_X)
5121        /* "st X+,r26" and "st -X,r26" are undefined.  */
5122        return !mem_volatile_p && reg_unused_after (insn, src)
5123          ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
5124                         "st X,r26"            CR_TAB
5125                         "adiw r26,1"          CR_TAB
5126                         "st X,__tmp_reg__", op, plen, -4)
5127
5128          : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
5129                         "adiw r26,1"          CR_TAB
5130                         "st X,__tmp_reg__"    CR_TAB
5131                         "sbiw r26,1"          CR_TAB
5132                         "st X,r26", op, plen, -5);
5133
5134      return !mem_volatile_p && reg_unused_after (insn, base)
5135        ? avr_asm_len ("st X+,%A1" CR_TAB
5136                       "st X,%B1", op, plen, -2)
5137        : avr_asm_len ("adiw r26,1" CR_TAB
5138                       "st X,%B1"   CR_TAB
5139                       "st -X,%A1", op, plen, -3);
5140    }
5141  else if (GET_CODE (base) == PLUS)
5142    {
5143      int disp = INTVAL (XEXP (base, 1));
5144
5145      if (AVR_TINY)
5146        return avr_out_movhi_mr_r_reg_disp_tiny (op, plen);
5147
5148      reg_base = REGNO (XEXP (base, 0));
5149      if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
5150        {
5151          if (reg_base != REG_Y)
5152            fatal_insn ("incorrect insn:",insn);
5153
5154          return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
5155            ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
5156                           "std Y+63,%B1"    CR_TAB
5157                           "std Y+62,%A1"    CR_TAB
5158                           "sbiw r28,%o0-62", op, plen, -4)
5159
5160            : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
5161                           "sbci r29,hi8(-%o0)" CR_TAB
5162                           "std Y+1,%B1"        CR_TAB
5163                           "st Y,%A1"           CR_TAB
5164                           "subi r28,lo8(%o0)"  CR_TAB
5165                           "sbci r29,hi8(%o0)", op, plen, -6);
5166        }
5167
5168      if (reg_base != REG_X)
5169        return avr_asm_len ("std %B0,%B1" CR_TAB
5170                            "std %A0,%A1", op, plen, -2);
5171      /* (X + d) = R */
5172      return reg_src == REG_X
5173        ? avr_asm_len ("mov __tmp_reg__,r26"  CR_TAB
5174                       "mov __zero_reg__,r27" CR_TAB
5175                       "adiw r26,%o0+1"       CR_TAB
5176                       "st X,__zero_reg__"    CR_TAB
5177                       "st -X,__tmp_reg__"    CR_TAB
5178                       "clr __zero_reg__"     CR_TAB
5179                       "sbiw r26,%o0", op, plen, -7)
5180
5181        : avr_asm_len ("adiw r26,%o0+1" CR_TAB
5182                       "st X,%B1"       CR_TAB
5183                       "st -X,%A1"      CR_TAB
5184                       "sbiw r26,%o0", op, plen, -4);
5185    }
5186  else if (GET_CODE (base) == PRE_DEC) /* (--R) */
5187    {
5188      return avr_asm_len ("st %0,%B1" CR_TAB
5189                          "st %0,%A1", op, plen, -2);
5190    }
5191  else if (GET_CODE (base) == POST_INC) /* (R++) */
5192    {
5193      if (!mem_volatile_p)
5194        return avr_asm_len ("st %0,%A1"  CR_TAB
5195                            "st %0,%B1", op, plen, -2);
5196
5197      if (AVR_TINY)
5198        return avr_out_movhi_mr_r_post_inc_tiny (op, plen);
5199
5200      return REGNO (XEXP (base, 0)) == REG_X
5201        ? avr_asm_len ("adiw r26,1"  CR_TAB
5202                       "st X,%B1"    CR_TAB
5203                       "st -X,%A1"   CR_TAB
5204                       "adiw r26,2", op, plen, -4)
5205
5206        : avr_asm_len ("std %p0+1,%B1" CR_TAB
5207                       "st %p0,%A1"    CR_TAB
5208                       "adiw %r0,2", op, plen, -3);
5209    }
5210  fatal_insn ("unknown move insn:",insn);
5211  return "";
5212}
5213
5214/* Return 1 if frame pointer for current function required.  */
5215
5216static bool
5217avr_frame_pointer_required_p (void)
5218{
5219  return (cfun->calls_alloca
5220          || cfun->calls_setjmp
5221          || cfun->has_nonlocal_label
5222          || crtl->args.info.nregs == 0
5223          || get_frame_size () > 0);
5224}
5225
5226/* Returns the condition of compare insn INSN, or UNKNOWN.  */
5227
5228static RTX_CODE
5229compare_condition (rtx_insn *insn)
5230{
5231  rtx_insn *next = next_real_insn (insn);
5232
5233  if (next && JUMP_P (next))
5234    {
5235      rtx pat = PATTERN (next);
5236      rtx src = SET_SRC (pat);
5237
5238      if (IF_THEN_ELSE == GET_CODE (src))
5239        return GET_CODE (XEXP (src, 0));
5240    }
5241
5242  return UNKNOWN;
5243}
5244
5245
5246/* Returns true iff INSN is a tst insn that only tests the sign.  */
5247
5248static bool
5249compare_sign_p (rtx_insn *insn)
5250{
5251  RTX_CODE cond = compare_condition (insn);
5252  return (cond == GE || cond == LT);
5253}
5254
5255
5256/* Returns true iff the next insn is a JUMP_INSN with a condition
5257   that needs to be swapped (GT, GTU, LE, LEU).  */
5258
5259static bool
5260compare_diff_p (rtx_insn *insn)
5261{
5262  RTX_CODE cond = compare_condition (insn);
5263  return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
5264}
5265
5266/* Returns true iff INSN is a compare insn with the EQ or NE condition.  */
5267
5268static bool
5269compare_eq_p (rtx_insn *insn)
5270{
5271  RTX_CODE cond = compare_condition (insn);
5272  return (cond == EQ || cond == NE);
5273}
5274
5275
5276/* Output compare instruction
5277
5278      compare (XOP[0], XOP[1])
5279
5280   for a register XOP[0] and a compile-time constant XOP[1].  Return "".
5281   XOP[2] is an 8-bit scratch register as needed.
5282
5283   PLEN == NULL:  Output instructions.
5284   PLEN != NULL:  Set *PLEN to the length (in words) of the sequence.
5285                  Don't output anything.  */
5286
5287const char*
5288avr_out_compare (rtx_insn *insn, rtx *xop, int *plen)
5289{
5290  /* Register to compare and value to compare against. */
5291  rtx xreg = xop[0];
5292  rtx xval = xop[1];
5293
5294  /* MODE of the comparison.  */
5295  machine_mode mode;
5296
5297  /* Number of bytes to operate on.  */
5298  int i, n_bytes = GET_MODE_SIZE (GET_MODE (xreg));
5299
5300  /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown.  */
5301  int clobber_val = -1;
5302
5303  /* Map fixed mode operands to integer operands with the same binary
5304     representation.  They are easier to handle in the remainder.  */
5305
5306  if (CONST_FIXED_P (xval))
5307    {
5308      xreg = avr_to_int_mode (xop[0]);
5309      xval = avr_to_int_mode (xop[1]);
5310    }
5311
5312  mode = GET_MODE (xreg);
5313
5314  gcc_assert (REG_P (xreg));
5315  gcc_assert ((CONST_INT_P (xval) && n_bytes <= 4)
5316              || (const_double_operand (xval, VOIDmode) && n_bytes == 8));
5317
5318  if (plen)
5319    *plen = 0;
5320
5321  /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
5322     against 0 by ORing the bytes.  This is one instruction shorter.
5323     Notice that 64-bit comparisons are always against reg:ALL8 18 (ACC_A)
5324     and therefore don't use this.  */
5325
5326  if (!test_hard_reg_class (LD_REGS, xreg)
5327      && compare_eq_p (insn)
5328      && reg_unused_after (insn, xreg))
5329    {
5330      if (xval == const1_rtx)
5331        {
5332          avr_asm_len ("dec %A0" CR_TAB
5333                       "or %A0,%B0", xop, plen, 2);
5334
5335          if (n_bytes >= 3)
5336            avr_asm_len ("or %A0,%C0", xop, plen, 1);
5337
5338          if (n_bytes >= 4)
5339            avr_asm_len ("or %A0,%D0", xop, plen, 1);
5340
5341          return "";
5342        }
5343      else if (xval == constm1_rtx)
5344        {
5345          if (n_bytes >= 4)
5346            avr_asm_len ("and %A0,%D0", xop, plen, 1);
5347
5348          if (n_bytes >= 3)
5349            avr_asm_len ("and %A0,%C0", xop, plen, 1);
5350
5351          return avr_asm_len ("and %A0,%B0" CR_TAB
5352                              "com %A0", xop, plen, 2);
5353        }
5354    }
5355
5356  for (i = 0; i < n_bytes; i++)
5357    {
5358      /* We compare byte-wise.  */
5359      rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
5360      rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
5361
5362      /* 8-bit value to compare with this byte.  */
5363      unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
5364
5365      /* Registers R16..R31 can operate with immediate.  */
5366      bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
5367
5368      xop[0] = reg8;
5369      xop[1] = gen_int_mode (val8, QImode);
5370
5371      /* Word registers >= R24 can use SBIW/ADIW with 0..63.  */
5372
5373      if (i == 0
5374          && test_hard_reg_class (ADDW_REGS, reg8))
5375        {
5376          int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
5377
5378          if (IN_RANGE (val16, 0, 63)
5379              && (val8 == 0
5380                  || reg_unused_after (insn, xreg)))
5381            {
5382              if (AVR_TINY)
5383                avr_asm_len (TINY_SBIW (%A0, %B0, %1), xop, plen, 2);
5384              else
5385                avr_asm_len ("sbiw %0,%1", xop, plen, 1);
5386
5387              i++;
5388              continue;
5389            }
5390
5391          if (n_bytes == 2
5392              && IN_RANGE (val16, -63, -1)
5393              && compare_eq_p (insn)
5394              && reg_unused_after (insn, xreg))
5395            {
5396              return AVR_TINY
5397                  ? avr_asm_len (TINY_ADIW (%A0, %B0, %n1), xop, plen, 2)
5398                  : avr_asm_len ("adiw %0,%n1", xop, plen, 1);
5399            }
5400        }
5401
5402      /* Comparing against 0 is easy.  */
5403
5404      if (val8 == 0)
5405        {
5406          avr_asm_len (i == 0
5407                       ? "cp %0,__zero_reg__"
5408                       : "cpc %0,__zero_reg__", xop, plen, 1);
5409          continue;
5410        }
5411
5412      /* Upper registers can compare and subtract-with-carry immediates.
5413         Notice that compare instructions do the same as respective subtract
5414         instruction; the only difference is that comparisons don't write
5415         the result back to the target register.  */
5416
5417      if (ld_reg_p)
5418        {
5419          if (i == 0)
5420            {
5421              avr_asm_len ("cpi %0,%1", xop, plen, 1);
5422              continue;
5423            }
5424          else if (reg_unused_after (insn, xreg))
5425            {
5426              avr_asm_len ("sbci %0,%1", xop, plen, 1);
5427              continue;
5428            }
5429        }
5430
5431      /* Must load the value into the scratch register.  */
5432
5433      gcc_assert (REG_P (xop[2]));
5434
5435      if (clobber_val != (int) val8)
5436        avr_asm_len ("ldi %2,%1", xop, plen, 1);
5437      clobber_val = (int) val8;
5438
5439      avr_asm_len (i == 0
5440                   ? "cp %0,%2"
5441                   : "cpc %0,%2", xop, plen, 1);
5442    }
5443
5444  return "";
5445}
5446
5447
5448/* Prepare operands of compare_const_di2 to be used with avr_out_compare.  */
5449
5450const char*
5451avr_out_compare64 (rtx_insn *insn, rtx *op, int *plen)
5452{
5453  rtx xop[3];
5454
5455  xop[0] = gen_rtx_REG (DImode, 18);
5456  xop[1] = op[0];
5457  xop[2] = op[1];
5458
5459  return avr_out_compare (insn, xop, plen);
5460}
5461
5462/* Output test instruction for HImode.  */
5463
5464const char*
5465avr_out_tsthi (rtx_insn *insn, rtx *op, int *plen)
5466{
5467  if (compare_sign_p (insn))
5468    {
5469      avr_asm_len ("tst %B0", op, plen, -1);
5470    }
5471  else if (reg_unused_after (insn, op[0])
5472           && compare_eq_p (insn))
5473    {
5474      /* Faster than sbiw if we can clobber the operand.  */
5475      avr_asm_len ("or %A0,%B0", op, plen, -1);
5476    }
5477  else
5478    {
5479      avr_out_compare (insn, op, plen);
5480    }
5481
5482  return "";
5483}
5484
5485
5486/* Output test instruction for PSImode.  */
5487
5488const char*
5489avr_out_tstpsi (rtx_insn *insn, rtx *op, int *plen)
5490{
5491  if (compare_sign_p (insn))
5492    {
5493      avr_asm_len ("tst %C0", op, plen, -1);
5494    }
5495  else if (reg_unused_after (insn, op[0])
5496           && compare_eq_p (insn))
5497    {
5498      /* Faster than sbiw if we can clobber the operand.  */
5499      avr_asm_len ("or %A0,%B0" CR_TAB
5500                   "or %A0,%C0", op, plen, -2);
5501    }
5502  else
5503    {
5504      avr_out_compare (insn, op, plen);
5505    }
5506
5507  return "";
5508}
5509
5510
5511/* Output test instruction for SImode.  */
5512
5513const char*
5514avr_out_tstsi (rtx_insn *insn, rtx *op, int *plen)
5515{
5516  if (compare_sign_p (insn))
5517    {
5518      avr_asm_len ("tst %D0", op, plen, -1);
5519    }
5520  else if (reg_unused_after (insn, op[0])
5521           && compare_eq_p (insn))
5522    {
5523      /* Faster than sbiw if we can clobber the operand.  */
5524      avr_asm_len ("or %A0,%B0" CR_TAB
5525                   "or %A0,%C0" CR_TAB
5526                   "or %A0,%D0", op, plen, -3);
5527    }
5528  else
5529    {
5530      avr_out_compare (insn, op, plen);
5531    }
5532
5533  return "";
5534}
5535
5536
5537/* Generate asm equivalent for various shifts.  This only handles cases
5538   that are not already carefully hand-optimized in ?sh??i3_out.
5539
5540   OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
5541   OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
5542   OPERANDS[3] is a QImode scratch register from LD regs if
5543               available and SCRATCH, otherwise (no scratch available)
5544
5545   TEMPL is an assembler template that shifts by one position.
5546   T_LEN is the length of this template.  */
5547
5548void
5549out_shift_with_cnt (const char *templ, rtx_insn *insn, rtx operands[],
5550		    int *plen, int t_len)
5551{
5552  bool second_label = true;
5553  bool saved_in_tmp = false;
5554  bool use_zero_reg = false;
5555  rtx op[5];
5556
5557  op[0] = operands[0];
5558  op[1] = operands[1];
5559  op[2] = operands[2];
5560  op[3] = operands[3];
5561
5562  if (plen)
5563    *plen = 0;
5564
5565  if (CONST_INT_P (operands[2]))
5566    {
5567      bool scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
5568                      && REG_P (operands[3]));
5569      int count = INTVAL (operands[2]);
5570      int max_len = 10;  /* If larger than this, always use a loop.  */
5571
5572      if (count <= 0)
5573          return;
5574
5575      if (count < 8 && !scratch)
5576        use_zero_reg = true;
5577
5578      if (optimize_size)
5579        max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
5580
5581      if (t_len * count <= max_len)
5582        {
5583          /* Output shifts inline with no loop - faster.  */
5584
5585          while (count-- > 0)
5586            avr_asm_len (templ, op, plen, t_len);
5587
5588          return;
5589        }
5590
5591      if (scratch)
5592        {
5593          avr_asm_len ("ldi %3,%2", op, plen, 1);
5594        }
5595      else if (use_zero_reg)
5596        {
5597          /* Hack to save one word: use __zero_reg__ as loop counter.
5598             Set one bit, then shift in a loop until it is 0 again.  */
5599
5600          op[3] = zero_reg_rtx;
5601
5602          avr_asm_len ("set" CR_TAB
5603                       "bld %3,%2-1", op, plen, 2);
5604        }
5605      else
5606        {
5607          /* No scratch register available, use one from LD_REGS (saved in
5608             __tmp_reg__) that doesn't overlap with registers to shift.  */
5609
5610          op[3] = all_regs_rtx[((REGNO (op[0]) - 1) & 15) + 16];
5611          op[4] = tmp_reg_rtx;
5612          saved_in_tmp = true;
5613
5614          avr_asm_len ("mov %4,%3" CR_TAB
5615                       "ldi %3,%2", op, plen, 2);
5616        }
5617
5618      second_label = false;
5619    }
5620  else if (MEM_P (op[2]))
5621    {
5622      rtx op_mov[2];
5623
5624      op_mov[0] = op[3] = tmp_reg_rtx;
5625      op_mov[1] = op[2];
5626
5627      out_movqi_r_mr (insn, op_mov, plen);
5628    }
5629  else if (register_operand (op[2], QImode))
5630    {
5631      op[3] = op[2];
5632
5633      if (!reg_unused_after (insn, op[2])
5634          || reg_overlap_mentioned_p (op[0], op[2]))
5635        {
5636          op[3] = tmp_reg_rtx;
5637          avr_asm_len ("mov %3,%2", op, plen, 1);
5638        }
5639    }
5640  else
5641    fatal_insn ("bad shift insn:", insn);
5642
5643  if (second_label)
5644      avr_asm_len ("rjmp 2f", op, plen, 1);
5645
5646  avr_asm_len ("1:", op, plen, 0);
5647  avr_asm_len (templ, op, plen, t_len);
5648
5649  if (second_label)
5650    avr_asm_len ("2:", op, plen, 0);
5651
5652  avr_asm_len (use_zero_reg ? "lsr %3" : "dec %3", op, plen, 1);
5653  avr_asm_len (second_label ? "brpl 1b" : "brne 1b", op, plen, 1);
5654
5655  if (saved_in_tmp)
5656    avr_asm_len ("mov %3,%4", op, plen, 1);
5657}
5658
5659
5660/* 8bit shift left ((char)x << i)   */
5661
5662const char *
5663ashlqi3_out (rtx_insn *insn, rtx operands[], int *len)
5664{
5665  if (GET_CODE (operands[2]) == CONST_INT)
5666    {
5667      int k;
5668
5669      if (!len)
5670	len = &k;
5671
5672      switch (INTVAL (operands[2]))
5673	{
5674	default:
5675	  if (INTVAL (operands[2]) < 8)
5676	    break;
5677
5678	  *len = 1;
5679	  return "clr %0";
5680
5681	case 1:
5682	  *len = 1;
5683	  return "lsl %0";
5684
5685	case 2:
5686	  *len = 2;
5687	  return ("lsl %0" CR_TAB
5688		  "lsl %0");
5689
5690	case 3:
5691	  *len = 3;
5692	  return ("lsl %0" CR_TAB
5693		  "lsl %0" CR_TAB
5694		  "lsl %0");
5695
5696	case 4:
5697	  if (test_hard_reg_class (LD_REGS, operands[0]))
5698	    {
5699	      *len = 2;
5700	      return ("swap %0" CR_TAB
5701		      "andi %0,0xf0");
5702	    }
5703	  *len = 4;
5704	  return ("lsl %0" CR_TAB
5705		  "lsl %0" CR_TAB
5706		  "lsl %0" CR_TAB
5707		  "lsl %0");
5708
5709	case 5:
5710	  if (test_hard_reg_class (LD_REGS, operands[0]))
5711	    {
5712	      *len = 3;
5713	      return ("swap %0" CR_TAB
5714		      "lsl %0"  CR_TAB
5715		      "andi %0,0xe0");
5716	    }
5717	  *len = 5;
5718	  return ("lsl %0" CR_TAB
5719		  "lsl %0" CR_TAB
5720		  "lsl %0" CR_TAB
5721		  "lsl %0" CR_TAB
5722		  "lsl %0");
5723
5724	case 6:
5725	  if (test_hard_reg_class (LD_REGS, operands[0]))
5726	    {
5727	      *len = 4;
5728	      return ("swap %0" CR_TAB
5729		      "lsl %0"  CR_TAB
5730		      "lsl %0"  CR_TAB
5731		      "andi %0,0xc0");
5732	    }
5733	  *len = 6;
5734	  return ("lsl %0" CR_TAB
5735		  "lsl %0" CR_TAB
5736		  "lsl %0" CR_TAB
5737		  "lsl %0" CR_TAB
5738		  "lsl %0" CR_TAB
5739		  "lsl %0");
5740
5741	case 7:
5742	  *len = 3;
5743	  return ("ror %0" CR_TAB
5744		  "clr %0" CR_TAB
5745		  "ror %0");
5746	}
5747    }
5748  else if (CONSTANT_P (operands[2]))
5749    fatal_insn ("internal compiler error.  Incorrect shift:", insn);
5750
5751  out_shift_with_cnt ("lsl %0",
5752                      insn, operands, len, 1);
5753  return "";
5754}
5755
5756
5757/* 16bit shift left ((short)x << i)   */
5758
5759const char *
5760ashlhi3_out (rtx_insn *insn, rtx operands[], int *len)
5761{
5762  if (GET_CODE (operands[2]) == CONST_INT)
5763    {
5764      int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5765      int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5766      int k;
5767      int *t = len;
5768
5769      if (!len)
5770	len = &k;
5771
5772      switch (INTVAL (operands[2]))
5773	{
5774	default:
5775	  if (INTVAL (operands[2]) < 16)
5776	    break;
5777
5778	  *len = 2;
5779	  return ("clr %B0" CR_TAB
5780		  "clr %A0");
5781
5782	case 4:
5783	  if (optimize_size && scratch)
5784	    break;  /* 5 */
5785	  if (ldi_ok)
5786	    {
5787	      *len = 6;
5788	      return ("swap %A0"      CR_TAB
5789		      "swap %B0"      CR_TAB
5790		      "andi %B0,0xf0" CR_TAB
5791		      "eor %B0,%A0"   CR_TAB
5792		      "andi %A0,0xf0" CR_TAB
5793		      "eor %B0,%A0");
5794	    }
5795	  if (scratch)
5796	    {
5797	      *len = 7;
5798	      return ("swap %A0"    CR_TAB
5799		      "swap %B0"    CR_TAB
5800		      "ldi %3,0xf0" CR_TAB
5801		      "and %B0,%3"      CR_TAB
5802		      "eor %B0,%A0" CR_TAB
5803		      "and %A0,%3"      CR_TAB
5804		      "eor %B0,%A0");
5805	    }
5806	  break;  /* optimize_size ? 6 : 8 */
5807
5808	case 5:
5809	  if (optimize_size)
5810	    break;  /* scratch ? 5 : 6 */
5811	  if (ldi_ok)
5812	    {
5813	      *len = 8;
5814	      return ("lsl %A0"       CR_TAB
5815		      "rol %B0"       CR_TAB
5816		      "swap %A0"      CR_TAB
5817		      "swap %B0"      CR_TAB
5818		      "andi %B0,0xf0" CR_TAB
5819		      "eor %B0,%A0"   CR_TAB
5820		      "andi %A0,0xf0" CR_TAB
5821		      "eor %B0,%A0");
5822	    }
5823	  if (scratch)
5824	    {
5825	      *len = 9;
5826	      return ("lsl %A0"     CR_TAB
5827		      "rol %B0"     CR_TAB
5828		      "swap %A0"    CR_TAB
5829		      "swap %B0"    CR_TAB
5830		      "ldi %3,0xf0" CR_TAB
5831		      "and %B0,%3"      CR_TAB
5832		      "eor %B0,%A0" CR_TAB
5833		      "and %A0,%3"      CR_TAB
5834		      "eor %B0,%A0");
5835	    }
5836	  break;  /* 10 */
5837
5838	case 6:
5839	  if (optimize_size)
5840	    break;  /* scratch ? 5 : 6 */
5841	  *len = 9;
5842	  return ("clr __tmp_reg__" CR_TAB
5843		  "lsr %B0"         CR_TAB
5844		  "ror %A0"         CR_TAB
5845		  "ror __tmp_reg__" CR_TAB
5846		  "lsr %B0"         CR_TAB
5847		  "ror %A0"         CR_TAB
5848		  "ror __tmp_reg__" CR_TAB
5849		  "mov %B0,%A0"     CR_TAB
5850		  "mov %A0,__tmp_reg__");
5851
5852	case 7:
5853	  *len = 5;
5854	  return ("lsr %B0"     CR_TAB
5855		  "mov %B0,%A0" CR_TAB
5856		  "clr %A0"     CR_TAB
5857		  "ror %B0"     CR_TAB
5858		  "ror %A0");
5859
5860	case 8:
5861	  return *len = 2, ("mov %B0,%A1" CR_TAB
5862			    "clr %A0");
5863
5864	case 9:
5865	  *len = 3;
5866	  return ("mov %B0,%A0" CR_TAB
5867		  "clr %A0"     CR_TAB
5868		  "lsl %B0");
5869
5870	case 10:
5871	  *len = 4;
5872	  return ("mov %B0,%A0" CR_TAB
5873		  "clr %A0"     CR_TAB
5874		  "lsl %B0"     CR_TAB
5875		  "lsl %B0");
5876
5877	case 11:
5878	  *len = 5;
5879	  return ("mov %B0,%A0" CR_TAB
5880		  "clr %A0"     CR_TAB
5881		  "lsl %B0"     CR_TAB
5882		  "lsl %B0"     CR_TAB
5883		  "lsl %B0");
5884
5885	case 12:
5886	  if (ldi_ok)
5887	    {
5888	      *len = 4;
5889	      return ("mov %B0,%A0" CR_TAB
5890		      "clr %A0"     CR_TAB
5891		      "swap %B0"    CR_TAB
5892		      "andi %B0,0xf0");
5893	    }
5894	  if (scratch)
5895	    {
5896	      *len = 5;
5897	      return ("mov %B0,%A0" CR_TAB
5898		      "clr %A0"     CR_TAB
5899		      "swap %B0"    CR_TAB
5900		      "ldi %3,0xf0" CR_TAB
5901		      "and %B0,%3");
5902	    }
5903	  *len = 6;
5904	  return ("mov %B0,%A0" CR_TAB
5905		  "clr %A0"     CR_TAB
5906		  "lsl %B0"     CR_TAB
5907		  "lsl %B0"     CR_TAB
5908		  "lsl %B0"     CR_TAB
5909		  "lsl %B0");
5910
5911	case 13:
5912	  if (ldi_ok)
5913	    {
5914	      *len = 5;
5915	      return ("mov %B0,%A0" CR_TAB
5916		      "clr %A0"     CR_TAB
5917		      "swap %B0"    CR_TAB
5918		      "lsl %B0"     CR_TAB
5919		      "andi %B0,0xe0");
5920	    }
5921	  if (AVR_HAVE_MUL && scratch)
5922	    {
5923	      *len = 5;
5924	      return ("ldi %3,0x20" CR_TAB
5925		      "mul %A0,%3"  CR_TAB
5926		      "mov %B0,r0"  CR_TAB
5927		      "clr %A0"     CR_TAB
5928		      "clr __zero_reg__");
5929	    }
5930	  if (optimize_size && scratch)
5931	    break;  /* 5 */
5932	  if (scratch)
5933	    {
5934	      *len = 6;
5935	      return ("mov %B0,%A0" CR_TAB
5936		      "clr %A0"     CR_TAB
5937		      "swap %B0"    CR_TAB
5938		      "lsl %B0"     CR_TAB
5939		      "ldi %3,0xe0" CR_TAB
5940		      "and %B0,%3");
5941	    }
5942	  if (AVR_HAVE_MUL)
5943	    {
5944	      *len = 6;
5945	      return ("set"            CR_TAB
5946		      "bld r1,5"   CR_TAB
5947		      "mul %A0,r1" CR_TAB
5948		      "mov %B0,r0" CR_TAB
5949		      "clr %A0"    CR_TAB
5950		      "clr __zero_reg__");
5951	    }
5952	  *len = 7;
5953	  return ("mov %B0,%A0" CR_TAB
5954		  "clr %A0"     CR_TAB
5955		  "lsl %B0"     CR_TAB
5956		  "lsl %B0"     CR_TAB
5957		  "lsl %B0"     CR_TAB
5958		  "lsl %B0"     CR_TAB
5959		  "lsl %B0");
5960
5961	case 14:
5962	  if (AVR_HAVE_MUL && ldi_ok)
5963	    {
5964	      *len = 5;
5965	      return ("ldi %B0,0x40" CR_TAB
5966		      "mul %A0,%B0"  CR_TAB
5967		      "mov %B0,r0"   CR_TAB
5968		      "clr %A0"      CR_TAB
5969		      "clr __zero_reg__");
5970	    }
5971	  if (AVR_HAVE_MUL && scratch)
5972	    {
5973	      *len = 5;
5974	      return ("ldi %3,0x40" CR_TAB
5975		      "mul %A0,%3"  CR_TAB
5976		      "mov %B0,r0"  CR_TAB
5977		      "clr %A0"     CR_TAB
5978		      "clr __zero_reg__");
5979	    }
5980	  if (optimize_size && ldi_ok)
5981	    {
5982	      *len = 5;
5983	      return ("mov %B0,%A0" CR_TAB
5984		      "ldi %A0,6" "\n1:\t"
5985		      "lsl %B0"     CR_TAB
5986		      "dec %A0"     CR_TAB
5987		      "brne 1b");
5988	    }
5989	  if (optimize_size && scratch)
5990	    break;  /* 5 */
5991	  *len = 6;
5992	  return ("clr %B0" CR_TAB
5993		  "lsr %A0" CR_TAB
5994		  "ror %B0" CR_TAB
5995		  "lsr %A0" CR_TAB
5996		  "ror %B0" CR_TAB
5997		  "clr %A0");
5998
5999	case 15:
6000	  *len = 4;
6001	  return ("clr %B0" CR_TAB
6002		  "lsr %A0" CR_TAB
6003		  "ror %B0" CR_TAB
6004		  "clr %A0");
6005	}
6006      len = t;
6007    }
6008  out_shift_with_cnt ("lsl %A0" CR_TAB
6009                      "rol %B0", insn, operands, len, 2);
6010  return "";
6011}
6012
6013
6014/* 24-bit shift left */
6015
6016const char*
6017avr_out_ashlpsi3 (rtx_insn *insn, rtx *op, int *plen)
6018{
6019  if (plen)
6020    *plen = 0;
6021
6022  if (CONST_INT_P (op[2]))
6023    {
6024      switch (INTVAL (op[2]))
6025        {
6026        default:
6027          if (INTVAL (op[2]) < 24)
6028            break;
6029
6030          return avr_asm_len ("clr %A0" CR_TAB
6031                              "clr %B0" CR_TAB
6032                              "clr %C0", op, plen, 3);
6033
6034        case 8:
6035          {
6036            int reg0 = REGNO (op[0]);
6037            int reg1 = REGNO (op[1]);
6038
6039            if (reg0 >= reg1)
6040              return avr_asm_len ("mov %C0,%B1"  CR_TAB
6041                                  "mov %B0,%A1"  CR_TAB
6042                                  "clr %A0", op, plen, 3);
6043            else
6044              return avr_asm_len ("clr %A0"      CR_TAB
6045                                  "mov %B0,%A1"  CR_TAB
6046                                  "mov %C0,%B1", op, plen, 3);
6047          }
6048
6049        case 16:
6050          {
6051            int reg0 = REGNO (op[0]);
6052            int reg1 = REGNO (op[1]);
6053
6054            if (reg0 + 2 != reg1)
6055              avr_asm_len ("mov %C0,%A0", op, plen, 1);
6056
6057            return avr_asm_len ("clr %B0"  CR_TAB
6058                                "clr %A0", op, plen, 2);
6059          }
6060
6061        case 23:
6062          return avr_asm_len ("clr %C0" CR_TAB
6063                              "lsr %A0" CR_TAB
6064                              "ror %C0" CR_TAB
6065                              "clr %B0" CR_TAB
6066                              "clr %A0", op, plen, 5);
6067        }
6068    }
6069
6070  out_shift_with_cnt ("lsl %A0" CR_TAB
6071                      "rol %B0" CR_TAB
6072                      "rol %C0", insn, op, plen, 3);
6073  return "";
6074}
6075
6076
6077/* 32bit shift left ((long)x << i)   */
6078
6079const char *
6080ashlsi3_out (rtx_insn *insn, rtx operands[], int *len)
6081{
6082  if (GET_CODE (operands[2]) == CONST_INT)
6083    {
6084      int k;
6085      int *t = len;
6086
6087      if (!len)
6088	len = &k;
6089
6090      switch (INTVAL (operands[2]))
6091	{
6092	default:
6093	  if (INTVAL (operands[2]) < 32)
6094	    break;
6095
6096	  if (AVR_HAVE_MOVW)
6097	    return *len = 3, ("clr %D0" CR_TAB
6098			      "clr %C0" CR_TAB
6099			      "movw %A0,%C0");
6100	  *len = 4;
6101	  return ("clr %D0" CR_TAB
6102		  "clr %C0" CR_TAB
6103		  "clr %B0" CR_TAB
6104		  "clr %A0");
6105
6106	case 8:
6107	  {
6108	    int reg0 = true_regnum (operands[0]);
6109	    int reg1 = true_regnum (operands[1]);
6110	    *len = 4;
6111	    if (reg0 >= reg1)
6112	      return ("mov %D0,%C1"  CR_TAB
6113		      "mov %C0,%B1"  CR_TAB
6114		      "mov %B0,%A1"  CR_TAB
6115		      "clr %A0");
6116	    else
6117	      return ("clr %A0"      CR_TAB
6118		      "mov %B0,%A1"  CR_TAB
6119		      "mov %C0,%B1"  CR_TAB
6120		      "mov %D0,%C1");
6121	  }
6122
6123	case 16:
6124	  {
6125	    int reg0 = true_regnum (operands[0]);
6126	    int reg1 = true_regnum (operands[1]);
6127	    if (reg0 + 2 == reg1)
6128	      return *len = 2, ("clr %B0"      CR_TAB
6129				"clr %A0");
6130	    if (AVR_HAVE_MOVW)
6131	      return *len = 3, ("movw %C0,%A1" CR_TAB
6132				"clr %B0"      CR_TAB
6133				"clr %A0");
6134	    else
6135	      return *len = 4, ("mov %C0,%A1"  CR_TAB
6136				"mov %D0,%B1"  CR_TAB
6137				"clr %B0"      CR_TAB
6138				"clr %A0");
6139	  }
6140
6141	case 24:
6142	  *len = 4;
6143	  return ("mov %D0,%A1"  CR_TAB
6144		  "clr %C0"      CR_TAB
6145		  "clr %B0"      CR_TAB
6146		  "clr %A0");
6147
6148	case 31:
6149	  *len = 6;
6150	  return ("clr %D0" CR_TAB
6151		  "lsr %A0" CR_TAB
6152		  "ror %D0" CR_TAB
6153		  "clr %C0" CR_TAB
6154		  "clr %B0" CR_TAB
6155		  "clr %A0");
6156	}
6157      len = t;
6158    }
6159  out_shift_with_cnt ("lsl %A0" CR_TAB
6160                      "rol %B0" CR_TAB
6161                      "rol %C0" CR_TAB
6162                      "rol %D0", insn, operands, len, 4);
6163  return "";
6164}
6165
6166/* 8bit arithmetic shift right  ((signed char)x >> i) */
6167
6168const char *
6169ashrqi3_out (rtx_insn *insn, rtx operands[], int *len)
6170{
6171  if (GET_CODE (operands[2]) == CONST_INT)
6172    {
6173      int k;
6174
6175      if (!len)
6176	len = &k;
6177
6178      switch (INTVAL (operands[2]))
6179	{
6180	case 1:
6181	  *len = 1;
6182	  return "asr %0";
6183
6184	case 2:
6185	  *len = 2;
6186	  return ("asr %0" CR_TAB
6187		  "asr %0");
6188
6189	case 3:
6190	  *len = 3;
6191	  return ("asr %0" CR_TAB
6192		  "asr %0" CR_TAB
6193		  "asr %0");
6194
6195	case 4:
6196	  *len = 4;
6197	  return ("asr %0" CR_TAB
6198		  "asr %0" CR_TAB
6199		  "asr %0" CR_TAB
6200		  "asr %0");
6201
6202	case 5:
6203	  *len = 5;
6204	  return ("asr %0" CR_TAB
6205		  "asr %0" CR_TAB
6206		  "asr %0" CR_TAB
6207		  "asr %0" CR_TAB
6208		  "asr %0");
6209
6210	case 6:
6211	  *len = 4;
6212	  return ("bst %0,6"  CR_TAB
6213		  "lsl %0"    CR_TAB
6214		  "sbc %0,%0" CR_TAB
6215		  "bld %0,0");
6216
6217	default:
6218	  if (INTVAL (operands[2]) < 8)
6219	    break;
6220
6221	  /* fall through */
6222
6223	case 7:
6224	  *len = 2;
6225	  return ("lsl %0" CR_TAB
6226		  "sbc %0,%0");
6227	}
6228    }
6229  else if (CONSTANT_P (operands[2]))
6230    fatal_insn ("internal compiler error.  Incorrect shift:", insn);
6231
6232  out_shift_with_cnt ("asr %0",
6233                      insn, operands, len, 1);
6234  return "";
6235}
6236
6237
6238/* 16bit arithmetic shift right  ((signed short)x >> i) */
6239
6240const char *
6241ashrhi3_out (rtx_insn *insn, rtx operands[], int *len)
6242{
6243  if (GET_CODE (operands[2]) == CONST_INT)
6244    {
6245      int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
6246      int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
6247      int k;
6248      int *t = len;
6249
6250      if (!len)
6251	len = &k;
6252
6253      switch (INTVAL (operands[2]))
6254	{
6255	case 4:
6256	case 5:
6257	  /* XXX try to optimize this too? */
6258	  break;
6259
6260	case 6:
6261	  if (optimize_size)
6262	    break;  /* scratch ? 5 : 6 */
6263	  *len = 8;
6264	  return ("mov __tmp_reg__,%A0" CR_TAB
6265		  "mov %A0,%B0"         CR_TAB
6266		  "lsl __tmp_reg__"     CR_TAB
6267		  "rol %A0"             CR_TAB
6268		  "sbc %B0,%B0"         CR_TAB
6269		  "lsl __tmp_reg__"     CR_TAB
6270		  "rol %A0"             CR_TAB
6271		  "rol %B0");
6272
6273	case 7:
6274	  *len = 4;
6275	  return ("lsl %A0"     CR_TAB
6276		  "mov %A0,%B0" CR_TAB
6277		  "rol %A0"     CR_TAB
6278		  "sbc %B0,%B0");
6279
6280	case 8:
6281	  {
6282	    int reg0 = true_regnum (operands[0]);
6283	    int reg1 = true_regnum (operands[1]);
6284
6285	    if (reg0 == reg1)
6286	      return *len = 3, ("mov %A0,%B0" CR_TAB
6287				"lsl %B0"     CR_TAB
6288				"sbc %B0,%B0");
6289	    else
6290	      return *len = 4, ("mov %A0,%B1" CR_TAB
6291			        "clr %B0"     CR_TAB
6292			        "sbrc %A0,7"  CR_TAB
6293			        "dec %B0");
6294	  }
6295
6296	case 9:
6297	  *len = 4;
6298	  return ("mov %A0,%B0" CR_TAB
6299		  "lsl %B0"      CR_TAB
6300		  "sbc %B0,%B0" CR_TAB
6301		  "asr %A0");
6302
6303	case 10:
6304	  *len = 5;
6305	  return ("mov %A0,%B0" CR_TAB
6306		  "lsl %B0"     CR_TAB
6307		  "sbc %B0,%B0" CR_TAB
6308		  "asr %A0"     CR_TAB
6309		  "asr %A0");
6310
6311	case 11:
6312	  if (AVR_HAVE_MUL && ldi_ok)
6313	    {
6314	      *len = 5;
6315	      return ("ldi %A0,0x20" CR_TAB
6316		      "muls %B0,%A0" CR_TAB
6317		      "mov %A0,r1"   CR_TAB
6318		      "sbc %B0,%B0"  CR_TAB
6319		      "clr __zero_reg__");
6320	    }
6321	  if (optimize_size && scratch)
6322	    break;  /* 5 */
6323	  *len = 6;
6324	  return ("mov %A0,%B0" CR_TAB
6325		  "lsl %B0"     CR_TAB
6326		  "sbc %B0,%B0" CR_TAB
6327		  "asr %A0"     CR_TAB
6328		  "asr %A0"     CR_TAB
6329		  "asr %A0");
6330
6331	case 12:
6332	  if (AVR_HAVE_MUL && ldi_ok)
6333	    {
6334	      *len = 5;
6335	      return ("ldi %A0,0x10" CR_TAB
6336		      "muls %B0,%A0" CR_TAB
6337		      "mov %A0,r1"   CR_TAB
6338		      "sbc %B0,%B0"  CR_TAB
6339		      "clr __zero_reg__");
6340	    }
6341	  if (optimize_size && scratch)
6342	    break;  /* 5 */
6343	  *len = 7;
6344	  return ("mov %A0,%B0" CR_TAB
6345		  "lsl %B0"     CR_TAB
6346		  "sbc %B0,%B0" CR_TAB
6347		  "asr %A0"     CR_TAB
6348		  "asr %A0"     CR_TAB
6349		  "asr %A0"     CR_TAB
6350		  "asr %A0");
6351
6352	case 13:
6353	  if (AVR_HAVE_MUL && ldi_ok)
6354	    {
6355	      *len = 5;
6356	      return ("ldi %A0,0x08" CR_TAB
6357		      "muls %B0,%A0" CR_TAB
6358		      "mov %A0,r1"   CR_TAB
6359		      "sbc %B0,%B0"  CR_TAB
6360		      "clr __zero_reg__");
6361	    }
6362	  if (optimize_size)
6363	    break;  /* scratch ? 5 : 7 */
6364	  *len = 8;
6365	  return ("mov %A0,%B0" CR_TAB
6366		  "lsl %B0"     CR_TAB
6367		  "sbc %B0,%B0" CR_TAB
6368		  "asr %A0"     CR_TAB
6369		  "asr %A0"     CR_TAB
6370		  "asr %A0"     CR_TAB
6371		  "asr %A0"     CR_TAB
6372		  "asr %A0");
6373
6374	case 14:
6375	  *len = 5;
6376	  return ("lsl %B0"     CR_TAB
6377		  "sbc %A0,%A0" CR_TAB
6378		  "lsl %B0"     CR_TAB
6379		  "mov %B0,%A0" CR_TAB
6380		  "rol %A0");
6381
6382	default:
6383	  if (INTVAL (operands[2]) < 16)
6384	    break;
6385
6386	  /* fall through */
6387
6388	case 15:
6389	  return *len = 3, ("lsl %B0"     CR_TAB
6390			    "sbc %A0,%A0" CR_TAB
6391			    "mov %B0,%A0");
6392	}
6393      len = t;
6394    }
6395  out_shift_with_cnt ("asr %B0" CR_TAB
6396                      "ror %A0", insn, operands, len, 2);
6397  return "";
6398}
6399
6400
6401/* 24-bit arithmetic shift right */
6402
6403const char*
6404avr_out_ashrpsi3 (rtx_insn *insn, rtx *op, int *plen)
6405{
6406  int dest = REGNO (op[0]);
6407  int src = REGNO (op[1]);
6408
6409  if (CONST_INT_P (op[2]))
6410    {
6411      if (plen)
6412        *plen = 0;
6413
6414      switch (INTVAL (op[2]))
6415        {
6416        case 8:
6417          if (dest <= src)
6418            return avr_asm_len ("mov %A0,%B1" CR_TAB
6419                                "mov %B0,%C1" CR_TAB
6420                                "clr %C0"     CR_TAB
6421                                "sbrc %B0,7"  CR_TAB
6422                                "dec %C0", op, plen, 5);
6423          else
6424            return avr_asm_len ("clr %C0"     CR_TAB
6425                                "sbrc %C1,7"  CR_TAB
6426                                "dec %C0"     CR_TAB
6427                                "mov %B0,%C1" CR_TAB
6428                                "mov %A0,%B1", op, plen, 5);
6429
6430        case 16:
6431          if (dest != src + 2)
6432            avr_asm_len ("mov %A0,%C1", op, plen, 1);
6433
6434          return avr_asm_len ("clr %B0"     CR_TAB
6435                              "sbrc %A0,7"  CR_TAB
6436                              "com %B0"     CR_TAB
6437                              "mov %C0,%B0", op, plen, 4);
6438
6439        default:
6440          if (INTVAL (op[2]) < 24)
6441            break;
6442
6443          /* fall through */
6444
6445        case 23:
6446          return avr_asm_len ("lsl %C0"     CR_TAB
6447                              "sbc %A0,%A0" CR_TAB
6448                              "mov %B0,%A0" CR_TAB
6449                              "mov %C0,%A0", op, plen, 4);
6450        } /* switch */
6451    }
6452
6453  out_shift_with_cnt ("asr %C0" CR_TAB
6454                      "ror %B0" CR_TAB
6455                      "ror %A0", insn, op, plen, 3);
6456  return "";
6457}
6458
6459
6460/* 32-bit arithmetic shift right  ((signed long)x >> i) */
6461
6462const char *
6463ashrsi3_out (rtx_insn *insn, rtx operands[], int *len)
6464{
6465  if (GET_CODE (operands[2]) == CONST_INT)
6466    {
6467      int k;
6468      int *t = len;
6469
6470      if (!len)
6471	len = &k;
6472
6473      switch (INTVAL (operands[2]))
6474	{
6475	case 8:
6476	  {
6477	    int reg0 = true_regnum (operands[0]);
6478	    int reg1 = true_regnum (operands[1]);
6479	    *len=6;
6480	    if (reg0 <= reg1)
6481	      return ("mov %A0,%B1" CR_TAB
6482		      "mov %B0,%C1" CR_TAB
6483		      "mov %C0,%D1" CR_TAB
6484		      "clr %D0"     CR_TAB
6485		      "sbrc %C0,7"  CR_TAB
6486		      "dec %D0");
6487	    else
6488	      return ("clr %D0"     CR_TAB
6489		      "sbrc %D1,7"  CR_TAB
6490		      "dec %D0"     CR_TAB
6491		      "mov %C0,%D1" CR_TAB
6492		      "mov %B0,%C1" CR_TAB
6493		      "mov %A0,%B1");
6494	  }
6495
6496	case 16:
6497	  {
6498	    int reg0 = true_regnum (operands[0]);
6499	    int reg1 = true_regnum (operands[1]);
6500
6501	    if (reg0 == reg1 + 2)
6502	      return *len = 4, ("clr %D0"     CR_TAB
6503				"sbrc %B0,7"  CR_TAB
6504				"com %D0"     CR_TAB
6505				"mov %C0,%D0");
6506	    if (AVR_HAVE_MOVW)
6507	      return *len = 5, ("movw %A0,%C1" CR_TAB
6508				"clr %D0"      CR_TAB
6509				"sbrc %B0,7"   CR_TAB
6510				"com %D0"      CR_TAB
6511				"mov %C0,%D0");
6512	    else
6513	      return *len = 6, ("mov %B0,%D1" CR_TAB
6514				"mov %A0,%C1" CR_TAB
6515				"clr %D0"     CR_TAB
6516				"sbrc %B0,7"  CR_TAB
6517				"com %D0"     CR_TAB
6518				"mov %C0,%D0");
6519	  }
6520
6521	case 24:
6522	  return *len = 6, ("mov %A0,%D1" CR_TAB
6523			    "clr %D0"     CR_TAB
6524			    "sbrc %A0,7"  CR_TAB
6525			    "com %D0"     CR_TAB
6526			    "mov %B0,%D0" CR_TAB
6527			    "mov %C0,%D0");
6528
6529	default:
6530	  if (INTVAL (operands[2]) < 32)
6531	    break;
6532
6533	  /* fall through */
6534
6535	case 31:
6536	  if (AVR_HAVE_MOVW)
6537	    return *len = 4, ("lsl %D0"     CR_TAB
6538			      "sbc %A0,%A0" CR_TAB
6539			      "mov %B0,%A0" CR_TAB
6540			      "movw %C0,%A0");
6541	  else
6542	    return *len = 5, ("lsl %D0"     CR_TAB
6543			      "sbc %A0,%A0" CR_TAB
6544			      "mov %B0,%A0" CR_TAB
6545			      "mov %C0,%A0" CR_TAB
6546			      "mov %D0,%A0");
6547	}
6548      len = t;
6549    }
6550  out_shift_with_cnt ("asr %D0" CR_TAB
6551                      "ror %C0" CR_TAB
6552                      "ror %B0" CR_TAB
6553                      "ror %A0", insn, operands, len, 4);
6554  return "";
6555}
6556
6557/* 8-bit logic shift right ((unsigned char)x >> i) */
6558
6559const char *
6560lshrqi3_out (rtx_insn *insn, rtx operands[], int *len)
6561{
6562  if (GET_CODE (operands[2]) == CONST_INT)
6563    {
6564      int k;
6565
6566      if (!len)
6567	len = &k;
6568
6569      switch (INTVAL (operands[2]))
6570	{
6571	default:
6572	  if (INTVAL (operands[2]) < 8)
6573	    break;
6574
6575	  *len = 1;
6576	  return "clr %0";
6577
6578	case 1:
6579	  *len = 1;
6580	  return "lsr %0";
6581
6582	case 2:
6583	  *len = 2;
6584	  return ("lsr %0" CR_TAB
6585		  "lsr %0");
6586	case 3:
6587	  *len = 3;
6588	  return ("lsr %0" CR_TAB
6589		  "lsr %0" CR_TAB
6590		  "lsr %0");
6591
6592	case 4:
6593	  if (test_hard_reg_class (LD_REGS, operands[0]))
6594	    {
6595	      *len=2;
6596	      return ("swap %0" CR_TAB
6597		      "andi %0,0x0f");
6598	    }
6599	  *len = 4;
6600	  return ("lsr %0" CR_TAB
6601		  "lsr %0" CR_TAB
6602		  "lsr %0" CR_TAB
6603		  "lsr %0");
6604
6605	case 5:
6606	  if (test_hard_reg_class (LD_REGS, operands[0]))
6607	    {
6608	      *len = 3;
6609	      return ("swap %0" CR_TAB
6610		      "lsr %0"  CR_TAB
6611		      "andi %0,0x7");
6612	    }
6613	  *len = 5;
6614	  return ("lsr %0" CR_TAB
6615		  "lsr %0" CR_TAB
6616		  "lsr %0" CR_TAB
6617		  "lsr %0" CR_TAB
6618		  "lsr %0");
6619
6620	case 6:
6621	  if (test_hard_reg_class (LD_REGS, operands[0]))
6622	    {
6623	      *len = 4;
6624	      return ("swap %0" CR_TAB
6625		      "lsr %0"  CR_TAB
6626		      "lsr %0"  CR_TAB
6627		      "andi %0,0x3");
6628	    }
6629	  *len = 6;
6630	  return ("lsr %0" CR_TAB
6631		  "lsr %0" CR_TAB
6632		  "lsr %0" CR_TAB
6633		  "lsr %0" CR_TAB
6634		  "lsr %0" CR_TAB
6635		  "lsr %0");
6636
6637	case 7:
6638	  *len = 3;
6639	  return ("rol %0" CR_TAB
6640		  "clr %0" CR_TAB
6641		  "rol %0");
6642	}
6643    }
6644  else if (CONSTANT_P (operands[2]))
6645    fatal_insn ("internal compiler error.  Incorrect shift:", insn);
6646
6647  out_shift_with_cnt ("lsr %0",
6648                      insn, operands, len, 1);
6649  return "";
6650}
6651
6652/* 16-bit logic shift right ((unsigned short)x >> i) */
6653
6654const char *
6655lshrhi3_out (rtx_insn *insn, rtx operands[], int *len)
6656{
6657  if (GET_CODE (operands[2]) == CONST_INT)
6658    {
6659      int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
6660      int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
6661      int k;
6662      int *t = len;
6663
6664      if (!len)
6665	len = &k;
6666
6667      switch (INTVAL (operands[2]))
6668	{
6669	default:
6670	  if (INTVAL (operands[2]) < 16)
6671	    break;
6672
6673	  *len = 2;
6674	  return ("clr %B0" CR_TAB
6675		  "clr %A0");
6676
6677	case 4:
6678	  if (optimize_size && scratch)
6679	    break;  /* 5 */
6680	  if (ldi_ok)
6681	    {
6682	      *len = 6;
6683	      return ("swap %B0"      CR_TAB
6684		      "swap %A0"      CR_TAB
6685		      "andi %A0,0x0f" CR_TAB
6686		      "eor %A0,%B0"   CR_TAB
6687		      "andi %B0,0x0f" CR_TAB
6688		      "eor %A0,%B0");
6689	    }
6690	  if (scratch)
6691	    {
6692	      *len = 7;
6693	      return ("swap %B0"    CR_TAB
6694		      "swap %A0"    CR_TAB
6695		      "ldi %3,0x0f" CR_TAB
6696		      "and %A0,%3"      CR_TAB
6697		      "eor %A0,%B0" CR_TAB
6698		      "and %B0,%3"      CR_TAB
6699		      "eor %A0,%B0");
6700	    }
6701	  break;  /* optimize_size ? 6 : 8 */
6702
6703	case 5:
6704	  if (optimize_size)
6705	    break;  /* scratch ? 5 : 6 */
6706	  if (ldi_ok)
6707	    {
6708	      *len = 8;
6709	      return ("lsr %B0"       CR_TAB
6710		      "ror %A0"       CR_TAB
6711		      "swap %B0"      CR_TAB
6712		      "swap %A0"      CR_TAB
6713		      "andi %A0,0x0f" CR_TAB
6714		      "eor %A0,%B0"   CR_TAB
6715		      "andi %B0,0x0f" CR_TAB
6716		      "eor %A0,%B0");
6717	    }
6718	  if (scratch)
6719	    {
6720	      *len = 9;
6721	      return ("lsr %B0"     CR_TAB
6722		      "ror %A0"     CR_TAB
6723		      "swap %B0"    CR_TAB
6724		      "swap %A0"    CR_TAB
6725		      "ldi %3,0x0f" CR_TAB
6726		      "and %A0,%3"      CR_TAB
6727		      "eor %A0,%B0" CR_TAB
6728		      "and %B0,%3"      CR_TAB
6729		      "eor %A0,%B0");
6730	    }
6731	  break;  /* 10 */
6732
6733	case 6:
6734	  if (optimize_size)
6735	    break;  /* scratch ? 5 : 6 */
6736	  *len = 9;
6737	  return ("clr __tmp_reg__" CR_TAB
6738		  "lsl %A0"         CR_TAB
6739		  "rol %B0"         CR_TAB
6740		  "rol __tmp_reg__" CR_TAB
6741		  "lsl %A0"         CR_TAB
6742		  "rol %B0"         CR_TAB
6743		  "rol __tmp_reg__" CR_TAB
6744		  "mov %A0,%B0"     CR_TAB
6745		  "mov %B0,__tmp_reg__");
6746
6747	case 7:
6748	  *len = 5;
6749	  return ("lsl %A0"     CR_TAB
6750		  "mov %A0,%B0" CR_TAB
6751		  "rol %A0"     CR_TAB
6752		  "sbc %B0,%B0" CR_TAB
6753		  "neg %B0");
6754
6755	case 8:
6756	  return *len = 2, ("mov %A0,%B1" CR_TAB
6757			    "clr %B0");
6758
6759	case 9:
6760	  *len = 3;
6761	  return ("mov %A0,%B0" CR_TAB
6762		  "clr %B0"     CR_TAB
6763		  "lsr %A0");
6764
6765	case 10:
6766	  *len = 4;
6767	  return ("mov %A0,%B0" CR_TAB
6768		  "clr %B0"     CR_TAB
6769		  "lsr %A0"     CR_TAB
6770		  "lsr %A0");
6771
6772	case 11:
6773	  *len = 5;
6774	  return ("mov %A0,%B0" CR_TAB
6775		  "clr %B0"     CR_TAB
6776		  "lsr %A0"     CR_TAB
6777		  "lsr %A0"     CR_TAB
6778		  "lsr %A0");
6779
6780	case 12:
6781	  if (ldi_ok)
6782	    {
6783	      *len = 4;
6784	      return ("mov %A0,%B0" CR_TAB
6785		      "clr %B0"     CR_TAB
6786		      "swap %A0"    CR_TAB
6787		      "andi %A0,0x0f");
6788	    }
6789	  if (scratch)
6790	    {
6791	      *len = 5;
6792	      return ("mov %A0,%B0" CR_TAB
6793		      "clr %B0"     CR_TAB
6794		      "swap %A0"    CR_TAB
6795		      "ldi %3,0x0f" CR_TAB
6796		      "and %A0,%3");
6797	    }
6798	  *len = 6;
6799	  return ("mov %A0,%B0" CR_TAB
6800		  "clr %B0"     CR_TAB
6801		  "lsr %A0"     CR_TAB
6802		  "lsr %A0"     CR_TAB
6803		  "lsr %A0"     CR_TAB
6804		  "lsr %A0");
6805
6806	case 13:
6807	  if (ldi_ok)
6808	    {
6809	      *len = 5;
6810	      return ("mov %A0,%B0" CR_TAB
6811		      "clr %B0"     CR_TAB
6812		      "swap %A0"    CR_TAB
6813		      "lsr %A0"     CR_TAB
6814		      "andi %A0,0x07");
6815	    }
6816	  if (AVR_HAVE_MUL && scratch)
6817	    {
6818	      *len = 5;
6819	      return ("ldi %3,0x08" CR_TAB
6820		      "mul %B0,%3"  CR_TAB
6821		      "mov %A0,r1"  CR_TAB
6822		      "clr %B0"     CR_TAB
6823		      "clr __zero_reg__");
6824	    }
6825	  if (optimize_size && scratch)
6826	    break;  /* 5 */
6827	  if (scratch)
6828	    {
6829	      *len = 6;
6830	      return ("mov %A0,%B0" CR_TAB
6831		      "clr %B0"     CR_TAB
6832		      "swap %A0"    CR_TAB
6833		      "lsr %A0"     CR_TAB
6834		      "ldi %3,0x07" CR_TAB
6835		      "and %A0,%3");
6836	    }
6837	  if (AVR_HAVE_MUL)
6838	    {
6839	      *len = 6;
6840	      return ("set"            CR_TAB
6841		      "bld r1,3"   CR_TAB
6842		      "mul %B0,r1" CR_TAB
6843		      "mov %A0,r1" CR_TAB
6844		      "clr %B0"    CR_TAB
6845		      "clr __zero_reg__");
6846	    }
6847	  *len = 7;
6848	  return ("mov %A0,%B0" CR_TAB
6849		  "clr %B0"     CR_TAB
6850		  "lsr %A0"     CR_TAB
6851		  "lsr %A0"     CR_TAB
6852		  "lsr %A0"     CR_TAB
6853		  "lsr %A0"     CR_TAB
6854		  "lsr %A0");
6855
6856	case 14:
6857	  if (AVR_HAVE_MUL && ldi_ok)
6858	    {
6859	      *len = 5;
6860	      return ("ldi %A0,0x04" CR_TAB
6861		      "mul %B0,%A0"  CR_TAB
6862		      "mov %A0,r1"   CR_TAB
6863		      "clr %B0"      CR_TAB
6864		      "clr __zero_reg__");
6865	    }
6866	  if (AVR_HAVE_MUL && scratch)
6867	    {
6868	      *len = 5;
6869	      return ("ldi %3,0x04" CR_TAB
6870		      "mul %B0,%3"  CR_TAB
6871		      "mov %A0,r1"  CR_TAB
6872		      "clr %B0"     CR_TAB
6873		      "clr __zero_reg__");
6874	    }
6875	  if (optimize_size && ldi_ok)
6876	    {
6877	      *len = 5;
6878	      return ("mov %A0,%B0" CR_TAB
6879		      "ldi %B0,6" "\n1:\t"
6880		      "lsr %A0"     CR_TAB
6881		      "dec %B0"     CR_TAB
6882		      "brne 1b");
6883	    }
6884	  if (optimize_size && scratch)
6885	    break;  /* 5 */
6886	  *len = 6;
6887	  return ("clr %A0" CR_TAB
6888		  "lsl %B0" CR_TAB
6889		  "rol %A0" CR_TAB
6890		  "lsl %B0" CR_TAB
6891		  "rol %A0" CR_TAB
6892		  "clr %B0");
6893
6894	case 15:
6895	  *len = 4;
6896	  return ("clr %A0" CR_TAB
6897		  "lsl %B0" CR_TAB
6898		  "rol %A0" CR_TAB
6899		  "clr %B0");
6900	}
6901      len = t;
6902    }
6903  out_shift_with_cnt ("lsr %B0" CR_TAB
6904                      "ror %A0", insn, operands, len, 2);
6905  return "";
6906}
6907
6908
6909/* 24-bit logic shift right */
6910
6911const char*
6912avr_out_lshrpsi3 (rtx_insn *insn, rtx *op, int *plen)
6913{
6914  int dest = REGNO (op[0]);
6915  int src = REGNO (op[1]);
6916
6917  if (CONST_INT_P (op[2]))
6918    {
6919      if (plen)
6920        *plen = 0;
6921
6922      switch (INTVAL (op[2]))
6923        {
6924        case 8:
6925          if (dest <= src)
6926            return avr_asm_len ("mov %A0,%B1" CR_TAB
6927                                "mov %B0,%C1" CR_TAB
6928                                "clr %C0", op, plen, 3);
6929          else
6930            return avr_asm_len ("clr %C0"     CR_TAB
6931                                "mov %B0,%C1" CR_TAB
6932                                "mov %A0,%B1", op, plen, 3);
6933
6934        case 16:
6935          if (dest != src + 2)
6936            avr_asm_len ("mov %A0,%C1", op, plen, 1);
6937
6938          return avr_asm_len ("clr %B0"  CR_TAB
6939                              "clr %C0", op, plen, 2);
6940
6941        default:
6942          if (INTVAL (op[2]) < 24)
6943            break;
6944
6945          /* fall through */
6946
6947        case 23:
6948          return avr_asm_len ("clr %A0"    CR_TAB
6949                              "sbrc %C0,7" CR_TAB
6950                              "inc %A0"    CR_TAB
6951                              "clr %B0"    CR_TAB
6952                              "clr %C0", op, plen, 5);
6953        } /* switch */
6954    }
6955
6956  out_shift_with_cnt ("lsr %C0" CR_TAB
6957                      "ror %B0" CR_TAB
6958                      "ror %A0", insn, op, plen, 3);
6959  return "";
6960}
6961
6962
6963/* 32-bit logic shift right ((unsigned int)x >> i) */
6964
6965const char *
6966lshrsi3_out (rtx_insn *insn, rtx operands[], int *len)
6967{
6968  if (GET_CODE (operands[2]) == CONST_INT)
6969    {
6970      int k;
6971      int *t = len;
6972
6973      if (!len)
6974	len = &k;
6975
6976      switch (INTVAL (operands[2]))
6977	{
6978	default:
6979	  if (INTVAL (operands[2]) < 32)
6980	    break;
6981
6982	  if (AVR_HAVE_MOVW)
6983	    return *len = 3, ("clr %D0" CR_TAB
6984			      "clr %C0" CR_TAB
6985			      "movw %A0,%C0");
6986	  *len = 4;
6987	  return ("clr %D0" CR_TAB
6988		  "clr %C0" CR_TAB
6989		  "clr %B0" CR_TAB
6990		  "clr %A0");
6991
6992	case 8:
6993	  {
6994	    int reg0 = true_regnum (operands[0]);
6995	    int reg1 = true_regnum (operands[1]);
6996	    *len = 4;
6997	    if (reg0 <= reg1)
6998	      return ("mov %A0,%B1" CR_TAB
6999		      "mov %B0,%C1" CR_TAB
7000		      "mov %C0,%D1" CR_TAB
7001		      "clr %D0");
7002	    else
7003	      return ("clr %D0"     CR_TAB
7004		      "mov %C0,%D1" CR_TAB
7005		      "mov %B0,%C1" CR_TAB
7006		      "mov %A0,%B1");
7007	  }
7008
7009	case 16:
7010	  {
7011	    int reg0 = true_regnum (operands[0]);
7012	    int reg1 = true_regnum (operands[1]);
7013
7014	    if (reg0 == reg1 + 2)
7015	      return *len = 2, ("clr %C0"     CR_TAB
7016				"clr %D0");
7017	    if (AVR_HAVE_MOVW)
7018	      return *len = 3, ("movw %A0,%C1" CR_TAB
7019				"clr %C0"      CR_TAB
7020				"clr %D0");
7021	    else
7022	      return *len = 4, ("mov %B0,%D1" CR_TAB
7023				"mov %A0,%C1" CR_TAB
7024				"clr %C0"     CR_TAB
7025				"clr %D0");
7026	  }
7027
7028	case 24:
7029	  return *len = 4, ("mov %A0,%D1" CR_TAB
7030			    "clr %B0"     CR_TAB
7031			    "clr %C0"     CR_TAB
7032			    "clr %D0");
7033
7034	case 31:
7035	  *len = 6;
7036	  return ("clr %A0"    CR_TAB
7037		  "sbrc %D0,7" CR_TAB
7038		  "inc %A0"    CR_TAB
7039		  "clr %B0"    CR_TAB
7040		  "clr %C0"    CR_TAB
7041		  "clr %D0");
7042	}
7043      len = t;
7044    }
7045  out_shift_with_cnt ("lsr %D0" CR_TAB
7046                      "ror %C0" CR_TAB
7047                      "ror %B0" CR_TAB
7048                      "ror %A0", insn, operands, len, 4);
7049  return "";
7050}
7051
7052
7053/* Output addition of register XOP[0] and compile time constant XOP[2].
7054   CODE == PLUS:  perform addition by using ADD instructions or
7055   CODE == MINUS: perform addition by using SUB instructions:
7056
7057      XOP[0] = XOP[0] + XOP[2]
7058
7059   Or perform addition/subtraction with register XOP[2] depending on CODE:
7060
7061      XOP[0] = XOP[0] +/- XOP[2]
7062
7063   If PLEN == NULL, print assembler instructions to perform the operation;
7064   otherwise, set *PLEN to the length of the instruction sequence (in words)
7065   printed with PLEN == NULL.  XOP[3] is an 8-bit scratch register or NULL_RTX.
7066   Set *PCC to effect on cc0 according to respective CC_* insn attribute.
7067
7068   CODE_SAT == UNKNOWN: Perform ordinary, non-saturating operation.
7069   CODE_SAT != UNKNOWN: Perform operation and saturate according to CODE_SAT.
7070   If  CODE_SAT != UNKNOWN  then SIGN contains the sign of the summand resp.
7071   the subtrahend in the original insn, provided it is a compile time constant.
7072   In all other cases, SIGN is 0.
7073
7074   If OUT_LABEL is true, print the final 0: label which is needed for
7075   saturated addition / subtraction.  The only case where OUT_LABEL = false
7076   is useful is for saturated addition / subtraction performed during
7077   fixed-point rounding, cf. `avr_out_round'.  */
7078
7079static void
7080avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc,
7081                enum rtx_code code_sat, int sign, bool out_label)
7082{
7083  /* MODE of the operation.  */
7084  machine_mode mode = GET_MODE (xop[0]);
7085
7086  /* INT_MODE of the same size.  */
7087  machine_mode imode = int_mode_for_mode (mode);
7088
7089  /* Number of bytes to operate on.  */
7090  int i, n_bytes = GET_MODE_SIZE (mode);
7091
7092  /* Value (0..0xff) held in clobber register op[3] or -1 if unknown.  */
7093  int clobber_val = -1;
7094
7095  /* op[0]: 8-bit destination register
7096     op[1]: 8-bit const int
7097     op[2]: 8-bit scratch register */
7098  rtx op[3];
7099
7100  /* Started the operation?  Before starting the operation we may skip
7101     adding 0.  This is no more true after the operation started because
7102     carry must be taken into account.  */
7103  bool started = false;
7104
7105  /* Value to add.  There are two ways to add VAL: R += VAL and R -= -VAL.  */
7106  rtx xval = xop[2];
7107
7108  /* Output a BRVC instruction.  Only needed with saturation.  */
7109  bool out_brvc = true;
7110
7111  if (plen)
7112    *plen = 0;
7113
7114  if (REG_P (xop[2]))
7115    {
7116      *pcc = MINUS == code ? (int) CC_SET_CZN : (int) CC_CLOBBER;
7117
7118      for (i = 0; i < n_bytes; i++)
7119        {
7120          /* We operate byte-wise on the destination.  */
7121          op[0] = simplify_gen_subreg (QImode, xop[0], mode, i);
7122          op[1] = simplify_gen_subreg (QImode, xop[2], mode, i);
7123
7124          if (i == 0)
7125            avr_asm_len (code == PLUS ? "add %0,%1" : "sub %0,%1",
7126                         op, plen, 1);
7127          else
7128            avr_asm_len (code == PLUS ? "adc %0,%1" : "sbc %0,%1",
7129                         op, plen, 1);
7130        }
7131
7132      if (reg_overlap_mentioned_p (xop[0], xop[2]))
7133        {
7134          gcc_assert (REGNO (xop[0]) == REGNO (xop[2]));
7135
7136          if (MINUS == code)
7137            return;
7138        }
7139
7140      goto saturate;
7141    }
7142
7143  /* Except in the case of ADIW with 16-bit register (see below)
7144     addition does not set cc0 in a usable way.  */
7145
7146  *pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
7147
7148  if (CONST_FIXED_P (xval))
7149    xval = avr_to_int_mode (xval);
7150
7151  /* Adding/Subtracting zero is a no-op.  */
7152
7153  if (xval == const0_rtx)
7154    {
7155      *pcc = CC_NONE;
7156      return;
7157    }
7158
7159  if (MINUS == code)
7160    xval = simplify_unary_operation (NEG, imode, xval, imode);
7161
7162  op[2] = xop[3];
7163
7164  if (SS_PLUS == code_sat && MINUS == code
7165      && sign < 0
7166      && 0x80 == (INTVAL (simplify_gen_subreg (QImode, xval, imode, n_bytes-1))
7167                  & GET_MODE_MASK (QImode)))
7168    {
7169      /* We compute x + 0x80 by means of SUB instructions.  We negated the
7170         constant subtrahend above and are left with  x - (-128)  so that we
7171         need something like SUBI r,128 which does not exist because SUBI sets
7172         V according to the sign of the subtrahend.  Notice the only case
7173         where this must be done is when NEG overflowed in case [2s] because
7174         the V computation needs the right sign of the subtrahend.  */
7175
7176      rtx msb = simplify_gen_subreg (QImode, xop[0], mode, n_bytes-1);
7177
7178      avr_asm_len ("subi %0,128" CR_TAB
7179                   "brmi 0f", &msb, plen, 2);
7180      out_brvc = false;
7181
7182      goto saturate;
7183    }
7184
7185  for (i = 0; i < n_bytes; i++)
7186    {
7187      /* We operate byte-wise on the destination.  */
7188      rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
7189      rtx xval8 = simplify_gen_subreg (QImode, xval, imode, i);
7190
7191      /* 8-bit value to operate with this byte. */
7192      unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
7193
7194      /* Registers R16..R31 can operate with immediate.  */
7195      bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
7196
7197      op[0] = reg8;
7198      op[1] = gen_int_mode (val8, QImode);
7199
7200      /* To get usable cc0 no low-bytes must have been skipped.  */
7201
7202      if (i && !started)
7203        *pcc = CC_CLOBBER;
7204
7205      if (!started
7206          && i % 2 == 0
7207          && i + 2 <= n_bytes
7208          && test_hard_reg_class (ADDW_REGS, reg8))
7209        {
7210          rtx xval16 = simplify_gen_subreg (HImode, xval, imode, i);
7211          unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
7212
7213          /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
7214             i.e. operate word-wise.  */
7215
7216          if (val16 < 64)
7217            {
7218              if (val16 != 0)
7219                {
7220                  started = true;
7221                  avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
7222                               op, plen, 1);
7223
7224                  if (n_bytes == 2 && PLUS == code)
7225                    *pcc = CC_SET_CZN;
7226                }
7227
7228              i++;
7229              continue;
7230            }
7231        }
7232
7233      if (val8 == 0)
7234        {
7235          if (started)
7236            avr_asm_len (code == PLUS
7237                         ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
7238                         op, plen, 1);
7239          continue;
7240        }
7241      else if ((val8 == 1 || val8 == 0xff)
7242               && UNKNOWN == code_sat
7243               && !started
7244               && i == n_bytes - 1)
7245        {
7246          avr_asm_len ((code == PLUS) ^ (val8 == 1) ? "dec %0" : "inc %0",
7247                       op, plen, 1);
7248          *pcc = CC_CLOBBER;
7249          break;
7250        }
7251
7252      switch (code)
7253        {
7254        case PLUS:
7255
7256          gcc_assert (plen != NULL || (op[2] && REG_P (op[2])));
7257
7258          if (plen != NULL && UNKNOWN != code_sat)
7259            {
7260              /* This belongs to the x + 0x80 corner case.  The code with
7261                 ADD instruction is not smaller, thus make this case
7262                 expensive so that the caller won't pick it.  */
7263
7264              *plen += 10;
7265              break;
7266            }
7267
7268          if (clobber_val != (int) val8)
7269            avr_asm_len ("ldi %2,%1", op, plen, 1);
7270          clobber_val = (int) val8;
7271
7272          avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
7273
7274          break; /* PLUS */
7275
7276        case MINUS:
7277
7278          if (ld_reg_p)
7279            avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
7280          else
7281            {
7282              gcc_assert (plen != NULL || REG_P (op[2]));
7283
7284              if (clobber_val != (int) val8)
7285                avr_asm_len ("ldi %2,%1", op, plen, 1);
7286              clobber_val = (int) val8;
7287
7288              avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
7289            }
7290
7291          break; /* MINUS */
7292
7293        default:
7294          /* Unknown code */
7295          gcc_unreachable();
7296        }
7297
7298      started = true;
7299
7300    } /* for all sub-bytes */
7301
7302 saturate:
7303
7304  if (UNKNOWN == code_sat)
7305    return;
7306
7307  *pcc = (int) CC_CLOBBER;
7308
7309  /* Vanilla addition/subtraction is done.  We are left with saturation.
7310
7311     We have to compute  A = A <op> B  where  A  is a register and
7312     B is a register or a non-zero compile time constant CONST.
7313     A is register class "r" if unsigned && B is REG.  Otherwise, A is in "d".
7314     B stands for the original operand $2 in INSN.  In the case of B = CONST,
7315     SIGN in { -1, 1 } is the sign of B.  Otherwise, SIGN is 0.
7316
7317     CODE is the instruction flavor we use in the asm sequence to perform <op>.
7318
7319
7320     unsigned
7321     operation        |  code |  sat if  |    b is      | sat value |  case
7322     -----------------+-------+----------+--------------+-----------+-------
7323     +  as  a + b     |  add  |  C == 1  |  const, reg  | u+ = 0xff |  [1u]
7324     +  as  a - (-b)  |  sub  |  C == 0  |  const       | u+ = 0xff |  [2u]
7325     -  as  a - b     |  sub  |  C == 1  |  const, reg  | u- = 0    |  [3u]
7326     -  as  a + (-b)  |  add  |  C == 0  |  const       | u- = 0    |  [4u]
7327
7328
7329     signed
7330     operation        |  code |  sat if  |    b is      | sat value |  case
7331     -----------------+-------+----------+--------------+-----------+-------
7332     +  as  a + b     |  add  |  V == 1  |  const, reg  | s+        |  [1s]
7333     +  as  a - (-b)  |  sub  |  V == 1  |  const       | s+        |  [2s]
7334     -  as  a - b     |  sub  |  V == 1  |  const, reg  | s-        |  [3s]
7335     -  as  a + (-b)  |  add  |  V == 1  |  const       | s-        |  [4s]
7336
7337     s+  =  b < 0  ?  -0x80 :  0x7f
7338     s-  =  b < 0  ?   0x7f : -0x80
7339
7340     The cases a - b actually perform  a - (-(-b))  if B is CONST.
7341  */
7342
7343  op[0] = simplify_gen_subreg (QImode, xop[0], mode, n_bytes-1);
7344  op[1] = n_bytes > 1
7345    ? simplify_gen_subreg (QImode, xop[0], mode, n_bytes-2)
7346    : NULL_RTX;
7347
7348  bool need_copy = true;
7349  int len_call = 1 + AVR_HAVE_JMP_CALL;
7350
7351  switch (code_sat)
7352    {
7353    default:
7354      gcc_unreachable();
7355
7356    case SS_PLUS:
7357    case SS_MINUS:
7358
7359      if (out_brvc)
7360        avr_asm_len ("brvc 0f", op, plen, 1);
7361
7362      if (reg_overlap_mentioned_p (xop[0], xop[2]))
7363        {
7364          /* [1s,reg] */
7365
7366          if (n_bytes == 1)
7367            avr_asm_len ("ldi %0,0x7f" CR_TAB
7368                         "adc %0,__zero_reg__", op, plen, 2);
7369          else
7370            avr_asm_len ("ldi %0,0x7f" CR_TAB
7371                         "ldi %1,0xff" CR_TAB
7372                         "adc %1,__zero_reg__" CR_TAB
7373                         "adc %0,__zero_reg__", op, plen, 4);
7374        }
7375      else if (sign == 0 && PLUS == code)
7376        {
7377          /* [1s,reg] */
7378
7379          op[2] = simplify_gen_subreg (QImode, xop[2], mode, n_bytes-1);
7380
7381          if (n_bytes == 1)
7382            avr_asm_len ("ldi %0,0x80" CR_TAB
7383                         "sbrs %2,7"   CR_TAB
7384                         "dec %0", op, plen, 3);
7385          else
7386            avr_asm_len ("ldi %0,0x80" CR_TAB
7387                         "cp %2,%0"    CR_TAB
7388                         "sbc %1,%1"   CR_TAB
7389                         "sbci %0,0", op, plen, 4);
7390        }
7391      else if (sign == 0 && MINUS == code)
7392        {
7393          /* [3s,reg] */
7394
7395          op[2] = simplify_gen_subreg (QImode, xop[2], mode, n_bytes-1);
7396
7397          if (n_bytes == 1)
7398            avr_asm_len ("ldi %0,0x7f" CR_TAB
7399                         "sbrs %2,7"   CR_TAB
7400                         "inc %0", op, plen, 3);
7401          else
7402            avr_asm_len ("ldi %0,0x7f" CR_TAB
7403                         "cp %0,%2"    CR_TAB
7404                         "sbc %1,%1"   CR_TAB
7405                         "sbci %0,-1", op, plen, 4);
7406        }
7407      else if ((sign < 0) ^ (SS_MINUS == code_sat))
7408        {
7409          /* [1s,const,B < 0] [2s,B < 0] */
7410          /* [3s,const,B > 0] [4s,B > 0] */
7411
7412          if (n_bytes == 8)
7413            {
7414              avr_asm_len ("%~call __clr_8", op, plen, len_call);
7415              need_copy = false;
7416            }
7417
7418          avr_asm_len ("ldi %0,0x80", op, plen, 1);
7419          if (n_bytes > 1 && need_copy)
7420            avr_asm_len ("clr %1", op, plen, 1);
7421        }
7422      else if ((sign > 0) ^ (SS_MINUS == code_sat))
7423        {
7424          /* [1s,const,B > 0] [2s,B > 0] */
7425          /* [3s,const,B < 0] [4s,B < 0] */
7426
7427          if (n_bytes == 8)
7428            {
7429              avr_asm_len ("sec" CR_TAB
7430                           "%~call __sbc_8", op, plen, 1 + len_call);
7431              need_copy = false;
7432            }
7433
7434          avr_asm_len ("ldi %0,0x7f", op, plen, 1);
7435          if (n_bytes > 1 && need_copy)
7436            avr_asm_len ("ldi %1,0xff", op, plen, 1);
7437        }
7438      else
7439        gcc_unreachable();
7440
7441      break;
7442
7443    case US_PLUS:
7444      /* [1u] : [2u] */
7445
7446      avr_asm_len (PLUS == code ? "brcc 0f" : "brcs 0f", op, plen, 1);
7447
7448      if (n_bytes == 8)
7449        {
7450          if (MINUS == code)
7451            avr_asm_len ("sec", op, plen, 1);
7452          avr_asm_len ("%~call __sbc_8", op, plen, len_call);
7453
7454          need_copy = false;
7455        }
7456      else
7457        {
7458          if (MINUS == code && !test_hard_reg_class (LD_REGS, op[0]))
7459            avr_asm_len ("sec" CR_TAB
7460                         "sbc %0,%0", op, plen, 2);
7461          else
7462            avr_asm_len (PLUS == code ? "sbc %0,%0" : "ldi %0,0xff",
7463                         op, plen, 1);
7464        }
7465      break; /* US_PLUS */
7466
7467    case US_MINUS:
7468      /* [4u] : [3u] */
7469
7470      avr_asm_len (PLUS == code ? "brcs 0f" : "brcc 0f", op, plen, 1);
7471
7472      if (n_bytes == 8)
7473        {
7474          avr_asm_len ("%~call __clr_8", op, plen, len_call);
7475          need_copy = false;
7476        }
7477      else
7478        avr_asm_len ("clr %0", op, plen, 1);
7479
7480      break;
7481    }
7482
7483  /* We set the MSB in the unsigned case and the 2 MSBs in the signed case.
7484     Now copy the right value to the LSBs.  */
7485
7486  if (need_copy && n_bytes > 1)
7487    {
7488      if (US_MINUS == code_sat || US_PLUS == code_sat)
7489        {
7490          avr_asm_len ("mov %1,%0", op, plen, 1);
7491
7492          if (n_bytes > 2)
7493            {
7494              op[0] = xop[0];
7495              if (AVR_HAVE_MOVW)
7496                avr_asm_len ("movw %0,%1", op, plen, 1);
7497              else
7498                avr_asm_len ("mov %A0,%1" CR_TAB
7499                             "mov %B0,%1", op, plen, 2);
7500            }
7501        }
7502      else if (n_bytes > 2)
7503        {
7504          op[0] = xop[0];
7505          avr_asm_len ("mov %A0,%1" CR_TAB
7506                       "mov %B0,%1", op, plen, 2);
7507        }
7508    }
7509
7510  if (need_copy && n_bytes == 8)
7511    {
7512      if (AVR_HAVE_MOVW)
7513        avr_asm_len ("movw %r0+2,%0" CR_TAB
7514                     "movw %r0+4,%0", xop, plen, 2);
7515      else
7516        avr_asm_len ("mov %r0+2,%0" CR_TAB
7517                     "mov %r0+3,%0" CR_TAB
7518                     "mov %r0+4,%0" CR_TAB
7519                     "mov %r0+5,%0", xop, plen, 4);
7520    }
7521
7522  if (out_label)
7523    avr_asm_len ("0:", op, plen, 0);
7524}
7525
7526
7527/* Output addition/subtraction of register XOP[0] and a constant XOP[2] that
7528   is ont a compile-time constant:
7529
7530      XOP[0] = XOP[0] +/- XOP[2]
7531
7532   This is a helper for the function below.  The only insns that need this
7533   are additions/subtraction for pointer modes, i.e. HImode and PSImode.  */
7534
7535static const char*
7536avr_out_plus_symbol (rtx *xop, enum rtx_code code, int *plen, int *pcc)
7537{
7538  machine_mode mode = GET_MODE (xop[0]);
7539
7540  /* Only pointer modes want to add symbols.  */
7541
7542  gcc_assert (mode == HImode || mode == PSImode);
7543
7544  *pcc = MINUS == code ? (int) CC_SET_CZN : (int) CC_SET_N;
7545
7546  avr_asm_len (PLUS == code
7547               ? "subi %A0,lo8(-(%2))" CR_TAB "sbci %B0,hi8(-(%2))"
7548               : "subi %A0,lo8(%2)"    CR_TAB "sbci %B0,hi8(%2)",
7549               xop, plen, -2);
7550
7551  if (PSImode == mode)
7552    avr_asm_len (PLUS == code
7553                 ? "sbci %C0,hlo8(-(%2))"
7554                 : "sbci %C0,hlo8(%2)", xop, plen, 1);
7555  return "";
7556}
7557
7558
7559/* Prepare operands of addition/subtraction to be used with avr_out_plus_1.
7560
7561   INSN is a single_set insn or an insn pattern with a binary operation as
7562   SET_SRC that is one of: PLUS, SS_PLUS, US_PLUS, MINUS, SS_MINUS, US_MINUS.
7563
7564   XOP are the operands of INSN.  In the case of 64-bit operations with
7565   constant XOP[] has just one element:  The summand/subtrahend in XOP[0].
7566   The non-saturating insns up to 32 bits may or may not supply a "d" class
7567   scratch as XOP[3].
7568
7569   If PLEN == NULL output the instructions.
7570   If PLEN != NULL set *PLEN to the length of the sequence in words.
7571
7572   PCC is a pointer to store the instructions' effect on cc0.
7573   PCC may be NULL.
7574
7575   PLEN and PCC default to NULL.
7576
7577   OUT_LABEL defaults to TRUE.  For a description, see AVR_OUT_PLUS_1.
7578
7579   Return ""  */
7580
7581const char*
7582avr_out_plus (rtx insn, rtx *xop, int *plen, int *pcc, bool out_label)
7583{
7584  int cc_plus, cc_minus, cc_dummy;
7585  int len_plus, len_minus;
7586  rtx op[4];
7587  rtx xpattern = INSN_P (insn) ? single_set (as_a <rtx_insn *> (insn)) : insn;
7588  rtx xdest = SET_DEST (xpattern);
7589  machine_mode mode = GET_MODE (xdest);
7590  machine_mode imode = int_mode_for_mode (mode);
7591  int n_bytes = GET_MODE_SIZE (mode);
7592  enum rtx_code code_sat = GET_CODE (SET_SRC (xpattern));
7593  enum rtx_code code
7594    = (PLUS == code_sat || SS_PLUS == code_sat || US_PLUS == code_sat
7595       ? PLUS : MINUS);
7596
7597  if (!pcc)
7598    pcc = &cc_dummy;
7599
7600  /* PLUS and MINUS don't saturate:  Use modular wrap-around.  */
7601
7602  if (PLUS == code_sat || MINUS == code_sat)
7603    code_sat = UNKNOWN;
7604
7605  if (n_bytes <= 4 && REG_P (xop[2]))
7606    {
7607      avr_out_plus_1 (xop, plen, code, pcc, code_sat, 0, out_label);
7608      return "";
7609    }
7610
7611  if (8 == n_bytes)
7612    {
7613      op[0] = gen_rtx_REG (DImode, ACC_A);
7614      op[1] = gen_rtx_REG (DImode, ACC_A);
7615      op[2] = avr_to_int_mode (xop[0]);
7616    }
7617  else
7618    {
7619      if (!REG_P (xop[2])
7620          && !CONST_INT_P (xop[2])
7621          && !CONST_FIXED_P (xop[2]))
7622        {
7623          return avr_out_plus_symbol (xop, code, plen, pcc);
7624        }
7625
7626      op[0] = avr_to_int_mode (xop[0]);
7627      op[1] = avr_to_int_mode (xop[1]);
7628      op[2] = avr_to_int_mode (xop[2]);
7629    }
7630
7631  /* Saturations and 64-bit operations don't have a clobber operand.
7632     For the other cases, the caller will provide a proper XOP[3].  */
7633
7634  xpattern = INSN_P (insn) ? PATTERN (insn) : insn;
7635  op[3] = PARALLEL == GET_CODE (xpattern) ? xop[3] : NULL_RTX;
7636
7637  /* Saturation will need the sign of the original operand.  */
7638
7639  rtx xmsb = simplify_gen_subreg (QImode, op[2], imode, n_bytes-1);
7640  int sign = INTVAL (xmsb) < 0 ? -1 : 1;
7641
7642  /* If we subtract and the subtrahend is a constant, then negate it
7643     so that avr_out_plus_1 can be used.  */
7644
7645  if (MINUS == code)
7646    op[2] = simplify_unary_operation (NEG, imode, op[2], imode);
7647
7648  /* Work out the shortest sequence.  */
7649
7650  avr_out_plus_1 (op, &len_minus, MINUS, &cc_minus, code_sat, sign, out_label);
7651  avr_out_plus_1 (op, &len_plus, PLUS, &cc_plus, code_sat, sign, out_label);
7652
7653  if (plen)
7654    {
7655      *plen = (len_minus <= len_plus) ? len_minus : len_plus;
7656      *pcc  = (len_minus <= len_plus) ? cc_minus : cc_plus;
7657    }
7658  else if (len_minus <= len_plus)
7659    avr_out_plus_1 (op, NULL, MINUS, pcc, code_sat, sign, out_label);
7660  else
7661    avr_out_plus_1 (op, NULL, PLUS, pcc, code_sat, sign, out_label);
7662
7663  return "";
7664}
7665
7666
7667/* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
7668   time constant XOP[2]:
7669
7670      XOP[0] = XOP[0] <op> XOP[2]
7671
7672   and return "".  If PLEN == NULL, print assembler instructions to perform the
7673   operation; otherwise, set *PLEN to the length of the instruction sequence
7674   (in words) printed with PLEN == NULL.  XOP[3] is either an 8-bit clobber
7675   register or SCRATCH if no clobber register is needed for the operation.
7676   INSN is an INSN_P or a pattern of an insn.  */
7677
7678const char*
7679avr_out_bitop (rtx insn, rtx *xop, int *plen)
7680{
7681  /* CODE and MODE of the operation.  */
7682  rtx xpattern = INSN_P (insn) ? single_set (as_a <rtx_insn *> (insn)) : insn;
7683  enum rtx_code code = GET_CODE (SET_SRC (xpattern));
7684  machine_mode mode = GET_MODE (xop[0]);
7685
7686  /* Number of bytes to operate on.  */
7687  int i, n_bytes = GET_MODE_SIZE (mode);
7688
7689  /* Value of T-flag (0 or 1) or -1 if unknow.  */
7690  int set_t = -1;
7691
7692  /* Value (0..0xff) held in clobber register op[3] or -1 if unknown.  */
7693  int clobber_val = -1;
7694
7695  /* op[0]: 8-bit destination register
7696     op[1]: 8-bit const int
7697     op[2]: 8-bit clobber register or SCRATCH
7698     op[3]: 8-bit register containing 0xff or NULL_RTX  */
7699  rtx op[4];
7700
7701  op[2] = xop[3];
7702  op[3] = NULL_RTX;
7703
7704  if (plen)
7705    *plen = 0;
7706
7707  for (i = 0; i < n_bytes; i++)
7708    {
7709      /* We operate byte-wise on the destination.  */
7710      rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
7711      rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
7712
7713      /* 8-bit value to operate with this byte. */
7714      unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
7715
7716      /* Number of bits set in the current byte of the constant.  */
7717      int pop8 = avr_popcount (val8);
7718
7719      /* Registers R16..R31 can operate with immediate.  */
7720      bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
7721
7722      op[0] = reg8;
7723      op[1] = GEN_INT (val8);
7724
7725      switch (code)
7726        {
7727        case IOR:
7728
7729          if (0 == pop8)
7730            continue;
7731          else if (ld_reg_p)
7732            avr_asm_len ("ori %0,%1", op, plen, 1);
7733          else if (1 == pop8)
7734            {
7735              if (set_t != 1)
7736                avr_asm_len ("set", op, plen, 1);
7737              set_t = 1;
7738
7739              op[1] = GEN_INT (exact_log2 (val8));
7740              avr_asm_len ("bld %0,%1", op, plen, 1);
7741            }
7742          else if (8 == pop8)
7743            {
7744              if (op[3] != NULL_RTX)
7745                avr_asm_len ("mov %0,%3", op, plen, 1);
7746              else
7747                avr_asm_len ("clr %0" CR_TAB
7748                             "dec %0", op, plen, 2);
7749
7750              op[3] = op[0];
7751            }
7752          else
7753            {
7754              if (clobber_val != (int) val8)
7755                avr_asm_len ("ldi %2,%1", op, plen, 1);
7756              clobber_val = (int) val8;
7757
7758              avr_asm_len ("or %0,%2", op, plen, 1);
7759            }
7760
7761          continue; /* IOR */
7762
7763        case AND:
7764
7765          if (8 == pop8)
7766            continue;
7767          else if (0 == pop8)
7768            avr_asm_len ("clr %0", op, plen, 1);
7769          else if (ld_reg_p)
7770            avr_asm_len ("andi %0,%1", op, plen, 1);
7771          else if (7 == pop8)
7772            {
7773              if (set_t != 0)
7774                avr_asm_len ("clt", op, plen, 1);
7775              set_t = 0;
7776
7777              op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
7778              avr_asm_len ("bld %0,%1", op, plen, 1);
7779            }
7780          else
7781            {
7782              if (clobber_val != (int) val8)
7783                avr_asm_len ("ldi %2,%1", op, plen, 1);
7784              clobber_val = (int) val8;
7785
7786              avr_asm_len ("and %0,%2", op, plen, 1);
7787            }
7788
7789          continue; /* AND */
7790
7791        case XOR:
7792
7793          if (0 == pop8)
7794            continue;
7795          else if (8 == pop8)
7796            avr_asm_len ("com %0", op, plen, 1);
7797          else if (ld_reg_p && val8 == (1 << 7))
7798            avr_asm_len ("subi %0,%1", op, plen, 1);
7799          else
7800            {
7801              if (clobber_val != (int) val8)
7802                avr_asm_len ("ldi %2,%1", op, plen, 1);
7803              clobber_val = (int) val8;
7804
7805              avr_asm_len ("eor %0,%2", op, plen, 1);
7806            }
7807
7808          continue; /* XOR */
7809
7810        default:
7811          /* Unknown rtx_code */
7812          gcc_unreachable();
7813        }
7814    } /* for all sub-bytes */
7815
7816  return "";
7817}
7818
7819
7820/* Output sign extension from XOP[1] to XOP[0] and return "".
7821   If PLEN == NULL, print assembler instructions to perform the operation;
7822   otherwise, set *PLEN to the length of the instruction sequence (in words)
7823   as printed with PLEN == NULL.  */
7824
7825const char*
7826avr_out_sign_extend (rtx_insn *insn, rtx *xop, int *plen)
7827{
7828  // Size in bytes of source resp. destination operand.
7829  unsigned n_src = GET_MODE_SIZE (GET_MODE (xop[1]));
7830  unsigned n_dest = GET_MODE_SIZE (GET_MODE (xop[0]));
7831  rtx r_msb = all_regs_rtx[REGNO (xop[1]) + n_src - 1];
7832
7833  if (plen)
7834    *plen = 0;
7835
7836  // Copy destination to source
7837
7838  if (REGNO (xop[0]) != REGNO (xop[1]))
7839    {
7840      gcc_assert (n_src <= 2);
7841
7842      if (n_src == 2)
7843        avr_asm_len (AVR_HAVE_MOVW
7844                     ? "movw %0,%1"
7845                     : "mov %B0,%B1", xop, plen, 1);
7846      if (n_src == 1 || !AVR_HAVE_MOVW)
7847        avr_asm_len ("mov %A0,%A1", xop, plen, 1);
7848    }
7849
7850  // Set Carry to the sign bit MSB.7...
7851
7852  if (REGNO (xop[0]) == REGNO (xop[1])
7853      || !reg_unused_after (insn, r_msb))
7854    {
7855      avr_asm_len ("mov __tmp_reg__,%0", &r_msb, plen, 1);
7856      r_msb = tmp_reg_rtx;
7857    }
7858
7859  avr_asm_len ("lsl %0", &r_msb, plen, 1);
7860
7861  // ...and propagate it to all the new sign bits
7862
7863  for (unsigned n = n_src; n < n_dest; n++)
7864    avr_asm_len ("sbc %0,%0", &all_regs_rtx[REGNO (xop[0]) + n], plen, 1);
7865
7866  return "";
7867}
7868
7869
7870/* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
7871   PLEN != NULL: Set *PLEN to the length of that sequence.
7872   Return "".  */
7873
7874const char*
7875avr_out_addto_sp (rtx *op, int *plen)
7876{
7877  int pc_len = AVR_2_BYTE_PC ? 2 : 3;
7878  int addend = INTVAL (op[0]);
7879
7880  if (plen)
7881    *plen = 0;
7882
7883  if (addend < 0)
7884    {
7885      if (flag_verbose_asm || flag_print_asm_name)
7886        avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
7887
7888      while (addend <= -pc_len)
7889        {
7890          addend += pc_len;
7891          avr_asm_len ("rcall .", op, plen, 1);
7892        }
7893
7894      while (addend++ < 0)
7895        avr_asm_len ("push __zero_reg__", op, plen, 1);
7896    }
7897  else if (addend > 0)
7898    {
7899      if (flag_verbose_asm || flag_print_asm_name)
7900        avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
7901
7902      while (addend-- > 0)
7903        avr_asm_len ("pop __tmp_reg__", op, plen, 1);
7904    }
7905
7906  return "";
7907}
7908
7909
7910/* Outputs instructions needed for fixed point type conversion.
7911   This includes converting between any fixed point type, as well
7912   as converting to any integer type.  Conversion between integer
7913   types is not supported.
7914
7915   Converting signed fractional types requires a bit shift if converting
7916   to or from any unsigned fractional type because the decimal place is
7917   shifted by 1 bit.  When the destination is a signed fractional, the sign
7918   is stored in either the carry or T bit.  */
7919
7920const char*
7921avr_out_fract (rtx_insn *insn, rtx operands[], bool intsigned, int *plen)
7922{
7923  size_t i;
7924  rtx xop[6];
7925  RTX_CODE shift = UNKNOWN;
7926  bool sign_in_carry = false;
7927  bool msb_in_carry = false;
7928  bool lsb_in_tmp_reg = false;
7929  bool lsb_in_carry = false;
7930  bool frac_rounded = false;
7931  const char *code_ashift = "lsl %0";
7932
7933
7934#define MAY_CLOBBER(RR)                                                 \
7935  /* Shorthand used below.  */                                          \
7936  ((sign_bytes                                                          \
7937    && IN_RANGE (RR, dest.regno_msb - sign_bytes + 1, dest.regno_msb))  \
7938   || (offset && IN_RANGE (RR, dest.regno, dest.regno_msb))		\
7939   || (reg_unused_after (insn, all_regs_rtx[RR])                        \
7940       && !IN_RANGE (RR, dest.regno, dest.regno_msb)))
7941
7942  struct
7943  {
7944    /* bytes       : Length of operand in bytes.
7945       ibyte       : Length of integral part in bytes.
7946       fbyte, fbit : Length of fractional part in bytes, bits.  */
7947
7948    bool sbit;
7949    unsigned fbit, bytes, ibyte, fbyte;
7950    unsigned regno, regno_msb;
7951  } dest, src, *val[2] = { &dest, &src };
7952
7953  if (plen)
7954    *plen = 0;
7955
7956  /* Step 0:  Determine information on source and destination operand we
7957     ======   will need in the remainder.  */
7958
7959  for (i = 0; i < sizeof (val) / sizeof (*val); i++)
7960    {
7961      machine_mode mode;
7962
7963      xop[i] = operands[i];
7964
7965      mode = GET_MODE (xop[i]);
7966
7967      val[i]->bytes = GET_MODE_SIZE (mode);
7968      val[i]->regno = REGNO (xop[i]);
7969      val[i]->regno_msb = REGNO (xop[i]) + val[i]->bytes - 1;
7970
7971      if (SCALAR_INT_MODE_P (mode))
7972        {
7973          val[i]->sbit = intsigned;
7974          val[i]->fbit = 0;
7975        }
7976      else if (ALL_SCALAR_FIXED_POINT_MODE_P (mode))
7977        {
7978          val[i]->sbit = SIGNED_SCALAR_FIXED_POINT_MODE_P (mode);
7979          val[i]->fbit = GET_MODE_FBIT (mode);
7980        }
7981      else
7982        fatal_insn ("unsupported fixed-point conversion", insn);
7983
7984      val[i]->fbyte = (1 + val[i]->fbit) / BITS_PER_UNIT;
7985      val[i]->ibyte = val[i]->bytes - val[i]->fbyte;
7986    }
7987
7988  // Byte offset of the decimal point taking into account different place
7989  // of the decimal point in input and output and different register numbers
7990  // of input and output.
7991  int offset = dest.regno - src.regno + dest.fbyte - src.fbyte;
7992
7993  // Number of destination bytes that will come from sign / zero extension.
7994  int sign_bytes = (dest.ibyte - src.ibyte) * (dest.ibyte > src.ibyte);
7995
7996  // Number of bytes at the low end to be filled with zeros.
7997  int zero_bytes = (dest.fbyte - src.fbyte) * (dest.fbyte > src.fbyte);
7998
7999  // Do we have a 16-Bit register that is cleared?
8000  rtx clrw = NULL_RTX;
8001
8002  bool sign_extend = src.sbit && sign_bytes;
8003
8004  if (0 == dest.fbit % 8 && 7 == src.fbit % 8)
8005    shift = ASHIFT;
8006  else if (7 == dest.fbit % 8 && 0 == src.fbit % 8)
8007    shift = ASHIFTRT;
8008  else if (dest.fbit % 8 == src.fbit % 8)
8009    shift = UNKNOWN;
8010  else
8011    gcc_unreachable();
8012
8013  /* If we need to round the fraction part, we might need to save/round it
8014     before clobbering any of it in Step 1.  Also, we might want to do
8015     the rounding now to make use of LD_REGS.  */
8016  if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
8017      && SCALAR_ACCUM_MODE_P (GET_MODE (xop[1]))
8018      && !TARGET_FRACT_CONV_TRUNC)
8019    {
8020      bool overlap
8021        = (src.regno <=
8022           (offset ? dest.regno_msb - sign_bytes : dest.regno + zero_bytes - 1)
8023           && dest.regno - offset -1 >= dest.regno);
8024      unsigned s0 = dest.regno - offset -1;
8025      bool use_src = true;
8026      unsigned sn;
8027      unsigned copied_msb = src.regno_msb;
8028      bool have_carry = false;
8029
8030      if (src.ibyte > dest.ibyte)
8031        copied_msb -= src.ibyte - dest.ibyte;
8032
8033      for (sn = s0; sn <= copied_msb; sn++)
8034        if (!IN_RANGE (sn, dest.regno, dest.regno_msb)
8035            && !reg_unused_after (insn, all_regs_rtx[sn]))
8036          use_src = false;
8037      if (use_src && TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], s0))
8038        {
8039          avr_asm_len ("tst %0" CR_TAB "brpl 0f",
8040                       &all_regs_rtx[src.regno_msb], plen, 2);
8041          sn = src.regno;
8042          if (sn < s0)
8043            {
8044              if (TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], sn))
8045                avr_asm_len ("cpi %0,1", &all_regs_rtx[sn], plen, 1);
8046              else
8047                avr_asm_len ("sec" CR_TAB
8048                             "cpc %0,__zero_reg__",
8049                             &all_regs_rtx[sn], plen, 2);
8050              have_carry = true;
8051            }
8052          while (++sn < s0)
8053            avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
8054
8055          avr_asm_len (have_carry ? "sbci %0,128" : "subi %0,129",
8056                       &all_regs_rtx[s0], plen, 1);
8057          for (sn = src.regno + src.fbyte; sn <= copied_msb; sn++)
8058            avr_asm_len ("sbci %0,255", &all_regs_rtx[sn], plen, 1);
8059          avr_asm_len ("\n0:", NULL, plen, 0);
8060          frac_rounded = true;
8061        }
8062      else if (use_src && overlap)
8063        {
8064          avr_asm_len ("clr __tmp_reg__" CR_TAB
8065                       "sbrc %1,0"       CR_TAB
8066                       "dec __tmp_reg__", xop, plen, 1);
8067          sn = src.regno;
8068          if (sn < s0)
8069            {
8070              avr_asm_len ("add %0,__tmp_reg__", &all_regs_rtx[sn], plen, 1);
8071              have_carry = true;
8072            }
8073
8074          while (++sn < s0)
8075            avr_asm_len ("adc %0,__tmp_reg__", &all_regs_rtx[sn], plen, 1);
8076
8077          if (have_carry)
8078            avr_asm_len ("clt"                CR_TAB
8079                         "bld __tmp_reg__,7"  CR_TAB
8080                         "adc %0,__tmp_reg__",
8081                         &all_regs_rtx[s0], plen, 1);
8082          else
8083            avr_asm_len ("lsr __tmp_reg" CR_TAB
8084                         "add %0,__tmp_reg__",
8085                         &all_regs_rtx[s0], plen, 2);
8086          for (sn = src.regno + src.fbyte; sn <= copied_msb; sn++)
8087            avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
8088          frac_rounded = true;
8089        }
8090      else if (overlap)
8091        {
8092          bool use_src
8093            = (TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], s0)
8094               && (IN_RANGE (s0, dest.regno, dest.regno_msb)
8095                   || reg_unused_after (insn, all_regs_rtx[s0])));
8096          xop[2] = all_regs_rtx[s0];
8097          unsigned sn = src.regno;
8098          if (!use_src || sn == s0)
8099            avr_asm_len ("mov __tmp_reg__,%2", xop, plen, 1);
8100          /* We need to consider to-be-discarded bits
8101             if the value is negative.  */
8102          if (sn < s0)
8103            {
8104              avr_asm_len ("tst %0" CR_TAB
8105                           "brpl 0f",
8106                           &all_regs_rtx[src.regno_msb], plen, 2);
8107              /* Test to-be-discarded bytes for any nozero bits.
8108                 ??? Could use OR or SBIW to test two registers at once.  */
8109              if (sn < s0)
8110                avr_asm_len ("cp %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
8111
8112              while (++sn < s0)
8113                avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
8114              /* Set bit 0 in __tmp_reg__ if any of the lower bits was set.  */
8115              if (use_src)
8116                avr_asm_len ("breq 0f" CR_TAB
8117                             "ori %2,1"
8118                             "\n0:\t" "mov __tmp_reg__,%2",
8119                             xop, plen, 3);
8120              else
8121                avr_asm_len ("breq 0f" CR_TAB
8122                             "set"     CR_TAB
8123                             "bld __tmp_reg__,0\n0:",
8124                             xop, plen, 3);
8125            }
8126          lsb_in_tmp_reg = true;
8127        }
8128    }
8129
8130  /* Step 1:  Clear bytes at the low end and copy payload bits from source
8131     ======   to destination.  */
8132
8133  int step = offset < 0 ? 1 : -1;
8134  unsigned d0 = offset < 0 ? dest.regno : dest.regno_msb;
8135
8136  // We cleared at least that number of registers.
8137  int clr_n = 0;
8138
8139  for (; d0 >= dest.regno && d0 <= dest.regno_msb; d0 += step)
8140    {
8141      // Next regno of destination is needed for MOVW
8142      unsigned d1 = d0 + step;
8143
8144      // Current and next regno of source
8145      signed s0 = d0 - offset;
8146      signed s1 = s0 + step;
8147
8148      // Must current resp. next regno be CLRed?  This applies to the low
8149      // bytes of the destination that have no associated source bytes.
8150      bool clr0 = s0 < (signed) src.regno;
8151      bool clr1 = s1 < (signed) src.regno && d1 >= dest.regno;
8152
8153      // First gather what code to emit (if any) and additional step to
8154      // apply if a MOVW is in use.  xop[2] is destination rtx and xop[3]
8155      // is the source rtx for the current loop iteration.
8156      const char *code = NULL;
8157      int stepw = 0;
8158
8159      if (clr0)
8160        {
8161          if (AVR_HAVE_MOVW && clr1 && clrw)
8162            {
8163              xop[2] = all_regs_rtx[d0 & ~1];
8164              xop[3] = clrw;
8165              code = "movw %2,%3";
8166              stepw = step;
8167            }
8168          else
8169            {
8170              xop[2] = all_regs_rtx[d0];
8171              code = "clr %2";
8172
8173              if (++clr_n >= 2
8174                  && !clrw
8175                  && d0 % 2 == (step > 0))
8176                {
8177                  clrw = all_regs_rtx[d0 & ~1];
8178                }
8179            }
8180        }
8181      else if (offset && s0 <= (signed) src.regno_msb)
8182        {
8183          int movw = AVR_HAVE_MOVW && offset % 2 == 0
8184            && d0 % 2 == (offset > 0)
8185            && d1 <= dest.regno_msb && d1 >= dest.regno
8186            && s1 <= (signed) src.regno_msb  && s1 >= (signed) src.regno;
8187
8188          xop[2] = all_regs_rtx[d0 & ~movw];
8189          xop[3] = all_regs_rtx[s0 & ~movw];
8190          code = movw ? "movw %2,%3" : "mov %2,%3";
8191          stepw = step * movw;
8192        }
8193
8194      if (code)
8195        {
8196          if (sign_extend && shift != ASHIFT && !sign_in_carry
8197              && (d0 == src.regno_msb || d0 + stepw == src.regno_msb))
8198            {
8199              /* We are going to override the sign bit.  If we sign-extend,
8200                 store the sign in the Carry flag.  This is not needed if
8201                 the destination will be ASHIFT in the remainder because
8202                 the ASHIFT will set Carry without extra instruction.  */
8203
8204              avr_asm_len ("lsl %0", &all_regs_rtx[src.regno_msb], plen, 1);
8205              sign_in_carry = true;
8206            }
8207
8208          unsigned src_msb = dest.regno_msb - sign_bytes - offset + 1;
8209
8210          if (!sign_extend && shift == ASHIFTRT && !msb_in_carry
8211              && src.ibyte > dest.ibyte
8212              && (d0 == src_msb || d0 + stepw == src_msb))
8213            {
8214              /* We are going to override the MSB.  If we shift right,
8215                 store the MSB in the Carry flag.  This is only needed if
8216                 we don't sign-extend becaue with sign-extension the MSB
8217                 (the sign) will be produced by the sign extension.  */
8218
8219              avr_asm_len ("lsr %0", &all_regs_rtx[src_msb], plen, 1);
8220              msb_in_carry = true;
8221            }
8222
8223          unsigned src_lsb = dest.regno - offset -1;
8224
8225          if (shift == ASHIFT && src.fbyte > dest.fbyte && !lsb_in_carry
8226	      && !lsb_in_tmp_reg
8227              && (d0 == src_lsb || d0 + stepw == src_lsb))
8228            {
8229              /* We are going to override the new LSB; store it into carry.  */
8230
8231              avr_asm_len ("lsl %0", &all_regs_rtx[src_lsb], plen, 1);
8232              code_ashift = "rol %0";
8233              lsb_in_carry = true;
8234            }
8235
8236          avr_asm_len (code, xop, plen, 1);
8237          d0 += stepw;
8238        }
8239    }
8240
8241  /* Step 2:  Shift destination left by 1 bit position.  This might be needed
8242     ======   for signed input and unsigned output.  */
8243
8244  if (shift == ASHIFT && src.fbyte > dest.fbyte && !lsb_in_carry)
8245    {
8246      unsigned s0 = dest.regno - offset -1;
8247
8248      /* n1169 4.1.4 says:
8249	 "Conversions from a fixed-point to an integer type round toward zero."
8250	 Hence, converting a fract type to integer only gives a non-zero result
8251	 for -1.  */
8252      if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
8253	  && SCALAR_FRACT_MODE_P (GET_MODE (xop[1]))
8254	  && !TARGET_FRACT_CONV_TRUNC)
8255	{
8256	  gcc_assert (s0 == src.regno_msb);
8257	  /* Check if the input is -1.  We do that by checking if negating
8258	     the input causes an integer overflow.  */
8259	  unsigned sn = src.regno;
8260	  avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx[sn++], plen, 1);
8261	  while (sn <= s0)
8262	    avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx[sn++], plen, 1);
8263
8264	  /* Overflow goes with set carry.  Clear carry otherwise.  */
8265	  avr_asm_len ("brvs 0f" CR_TAB
8266                       "clc\n0:", NULL, plen, 2);
8267	}
8268      /* Likewise, when converting from accumulator types to integer, we
8269	 need to round up negative values.  */
8270      else if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
8271	       && SCALAR_ACCUM_MODE_P (GET_MODE (xop[1]))
8272	       && !TARGET_FRACT_CONV_TRUNC
8273	       && !frac_rounded)
8274	{
8275	  bool have_carry = false;
8276
8277	  xop[2] = all_regs_rtx[s0];
8278	  if (!lsb_in_tmp_reg && !MAY_CLOBBER (s0))
8279	    avr_asm_len ("mov __tmp_reg__,%2", xop, plen, 1);
8280	  avr_asm_len ("tst %0" CR_TAB "brpl 0f",
8281		       &all_regs_rtx[src.regno_msb], plen, 2);
8282	  if (!lsb_in_tmp_reg)
8283	    {
8284	      unsigned sn = src.regno;
8285	      if (sn < s0)
8286		{
8287		  avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx[sn],
8288			       plen, 1);
8289		  have_carry = true;
8290		}
8291	      while (++sn < s0)
8292		avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx[sn], plen, 1);
8293	      lsb_in_tmp_reg = !MAY_CLOBBER (s0);
8294	    }
8295	  /* Add in C and the rounding value 127.  */
8296	  /* If the destination msb is a sign byte, and in LD_REGS,
8297	     grab it as a temporary.  */
8298	  if (sign_bytes
8299	      && TEST_HARD_REG_BIT (reg_class_contents[LD_REGS],
8300				    dest.regno_msb))
8301	    {
8302	      xop[3] = all_regs_rtx[dest.regno_msb];
8303	      avr_asm_len ("ldi %3,127", xop, plen, 1);
8304	      avr_asm_len ((have_carry && lsb_in_tmp_reg ? "adc __tmp_reg__,%3"
8305			   : have_carry ? "adc %2,%3"
8306			   : lsb_in_tmp_reg ? "add __tmp_reg__,%3"
8307			   : "add %2,%3"),
8308			   xop, plen, 1);
8309	    }
8310	  else
8311	    {
8312	      /* Fall back to use __zero_reg__ as a temporary.  */
8313	      avr_asm_len ("dec __zero_reg__", NULL, plen, 1);
8314	      if (have_carry)
8315		avr_asm_len ("clt" CR_TAB
8316                             "bld __zero_reg__,7", NULL, plen, 2);
8317	      else
8318		avr_asm_len ("lsr __zero_reg__", NULL, plen, 1);
8319	      avr_asm_len (have_carry && lsb_in_tmp_reg
8320                           ? "adc __tmp_reg__,__zero_reg__"
8321                           : have_carry ? "adc %2,__zero_reg__"
8322                           : lsb_in_tmp_reg ? "add __tmp_reg__,__zero_reg__"
8323                           : "add %2,__zero_reg__",
8324			   xop, plen, 1);
8325	      avr_asm_len ("eor __zero_reg__,__zero_reg__", NULL, plen, 1);
8326	    }
8327
8328          for (d0 = dest.regno + zero_bytes;
8329	       d0 <= dest.regno_msb - sign_bytes; d0++)
8330	    avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx[d0], plen, 1);
8331
8332          avr_asm_len (lsb_in_tmp_reg
8333		       ? "\n0:\t" "lsl __tmp_reg__"
8334                       : "\n0:\t" "lsl %2",
8335		       xop, plen, 1);
8336	}
8337      else if (MAY_CLOBBER (s0))
8338        avr_asm_len ("lsl %0", &all_regs_rtx[s0], plen, 1);
8339      else
8340        avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
8341                     "lsl __tmp_reg__", &all_regs_rtx[s0], plen, 2);
8342
8343      code_ashift = "rol %0";
8344      lsb_in_carry = true;
8345    }
8346
8347  if (shift == ASHIFT)
8348    {
8349      for (d0 = dest.regno + zero_bytes;
8350           d0 <= dest.regno_msb - sign_bytes; d0++)
8351        {
8352          avr_asm_len (code_ashift, &all_regs_rtx[d0], plen, 1);
8353          code_ashift = "rol %0";
8354        }
8355
8356      lsb_in_carry = false;
8357      sign_in_carry = true;
8358    }
8359
8360  /* Step 4a:  Store MSB in carry if we don't already have it or will produce
8361     =======   it in sign-extension below.  */
8362
8363  if (!sign_extend && shift == ASHIFTRT && !msb_in_carry
8364      && src.ibyte > dest.ibyte)
8365    {
8366      unsigned s0 = dest.regno_msb - sign_bytes - offset + 1;
8367
8368      if (MAY_CLOBBER (s0))
8369        avr_asm_len ("lsr %0", &all_regs_rtx[s0], plen, 1);
8370      else
8371        avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
8372                     "lsr __tmp_reg__", &all_regs_rtx[s0], plen, 2);
8373
8374      msb_in_carry = true;
8375    }
8376
8377  /* Step 3:  Sign-extend or zero-extend the destination as needed.
8378     ======   */
8379
8380  if (sign_extend && !sign_in_carry)
8381    {
8382      unsigned s0 = src.regno_msb;
8383
8384      if (MAY_CLOBBER (s0))
8385        avr_asm_len ("lsl %0", &all_regs_rtx[s0], plen, 1);
8386      else
8387        avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
8388                     "lsl __tmp_reg__", &all_regs_rtx[s0], plen, 2);
8389
8390      sign_in_carry = true;
8391  }
8392
8393  gcc_assert (sign_in_carry + msb_in_carry + lsb_in_carry <= 1);
8394
8395  unsigned copies = 0;
8396  rtx movw = sign_extend ? NULL_RTX : clrw;
8397
8398  for (d0 = dest.regno_msb - sign_bytes + 1; d0 <= dest.regno_msb; d0++)
8399    {
8400      if (AVR_HAVE_MOVW && movw
8401          && d0 % 2 == 0 && d0 + 1 <= dest.regno_msb)
8402        {
8403          xop[2] = all_regs_rtx[d0];
8404          xop[3] = movw;
8405          avr_asm_len ("movw %2,%3", xop, plen, 1);
8406          d0++;
8407        }
8408      else
8409        {
8410          avr_asm_len (sign_extend ? "sbc %0,%0" : "clr %0",
8411                       &all_regs_rtx[d0], plen, 1);
8412
8413          if (++copies >= 2 && !movw && d0 % 2 == 1)
8414            movw = all_regs_rtx[d0-1];
8415        }
8416    } /* for */
8417
8418
8419  /* Step 4:  Right shift the destination.  This might be needed for
8420     ======   conversions from unsigned to signed.  */
8421
8422  if (shift == ASHIFTRT)
8423    {
8424      const char *code_ashiftrt = "lsr %0";
8425
8426      if (sign_extend || msb_in_carry)
8427        code_ashiftrt = "ror %0";
8428
8429      if (src.sbit && src.ibyte == dest.ibyte)
8430        code_ashiftrt = "asr %0";
8431
8432      for (d0 = dest.regno_msb - sign_bytes;
8433           d0 >= dest.regno + zero_bytes - 1 && d0 >= dest.regno; d0--)
8434        {
8435          avr_asm_len (code_ashiftrt, &all_regs_rtx[d0], plen, 1);
8436          code_ashiftrt = "ror %0";
8437        }
8438    }
8439
8440#undef MAY_CLOBBER
8441
8442  return "";
8443}
8444
8445
8446/* Output fixed-point rounding.  XOP[0] = XOP[1] is the operand to round.
8447   XOP[2] is the rounding point, a CONST_INT.  The function prints the
8448   instruction sequence if PLEN = NULL and computes the length in words
8449   of the sequence if PLEN != NULL.  Most of this function deals with
8450   preparing operands for calls to `avr_out_plus' and `avr_out_bitop'.  */
8451
8452const char*
8453avr_out_round (rtx_insn *insn ATTRIBUTE_UNUSED, rtx *xop, int *plen)
8454{
8455  machine_mode mode = GET_MODE (xop[0]);
8456  machine_mode imode = int_mode_for_mode (mode);
8457  // The smallest fractional bit not cleared by the rounding is 2^(-RP).
8458  int fbit = (int) GET_MODE_FBIT (mode);
8459  double_int i_add = double_int_zero.set_bit (fbit-1 - INTVAL (xop[2]));
8460  wide_int wi_add = wi::set_bit_in_zero (fbit-1 - INTVAL (xop[2]),
8461					 GET_MODE_PRECISION (imode));
8462  // Lengths of PLUS and AND parts.
8463  int len_add = 0, *plen_add = plen ? &len_add : NULL;
8464  int len_and = 0, *plen_and = plen ? &len_and : NULL;
8465
8466  // Add-Saturate  1/2 * 2^(-RP).  Don't print the label "0:" when printing
8467  // the saturated addition so that we can emit the "rjmp 1f" before the
8468  // "0:" below.
8469
8470  rtx xadd = const_fixed_from_double_int (i_add, mode);
8471  rtx xpattern, xsrc, op[4];
8472
8473  xsrc = SIGNED_FIXED_POINT_MODE_P (mode)
8474    ? gen_rtx_SS_PLUS (mode, xop[1], xadd)
8475    : gen_rtx_US_PLUS (mode, xop[1], xadd);
8476  xpattern = gen_rtx_SET (VOIDmode, xop[0], xsrc);
8477
8478  op[0] = xop[0];
8479  op[1] = xop[1];
8480  op[2] = xadd;
8481  avr_out_plus (xpattern, op, plen_add, NULL, false /* Don't print "0:" */);
8482
8483  avr_asm_len ("rjmp 1f" CR_TAB
8484               "0:", NULL, plen_add, 1);
8485
8486  // Keep  all bits from RP and higher:   ... 2^(-RP)
8487  // Clear all bits from RP+1 and lower:              2^(-RP-1) ...
8488  // Rounding point                           ^^^^^^^
8489  // Added above                                      ^^^^^^^^^
8490  rtx xreg = simplify_gen_subreg (imode, xop[0], mode, 0);
8491  rtx xmask = immed_wide_int_const (-wi_add - wi_add, imode);
8492
8493  xpattern = gen_rtx_SET (VOIDmode, xreg, gen_rtx_AND (imode, xreg, xmask));
8494
8495  op[0] = xreg;
8496  op[1] = xreg;
8497  op[2] = xmask;
8498  op[3] = gen_rtx_SCRATCH (QImode);
8499  avr_out_bitop (xpattern, op, plen_and);
8500  avr_asm_len ("1:", NULL, plen, 0);
8501
8502  if (plen)
8503    *plen = len_add + len_and;
8504
8505  return "";
8506}
8507
8508
8509/* Create RTL split patterns for byte sized rotate expressions.  This
8510  produces a series of move instructions and considers overlap situations.
8511  Overlapping non-HImode operands need a scratch register.  */
8512
8513bool
8514avr_rotate_bytes (rtx operands[])
8515{
8516    int i, j;
8517    machine_mode mode = GET_MODE (operands[0]);
8518    bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
8519    bool same_reg = rtx_equal_p (operands[0], operands[1]);
8520    int num = INTVAL (operands[2]);
8521    rtx scratch = operands[3];
8522    /* Work out if byte or word move is needed.  Odd byte rotates need QImode.
8523       Word move if no scratch is needed, otherwise use size of scratch.  */
8524    machine_mode move_mode = QImode;
8525    int move_size, offset, size;
8526
8527    if (num & 0xf)
8528      move_mode = QImode;
8529    else if ((mode == SImode && !same_reg) || !overlapped)
8530      move_mode = HImode;
8531    else
8532      move_mode = GET_MODE (scratch);
8533
8534    /* Force DI rotate to use QI moves since other DI moves are currently split
8535       into QI moves so forward propagation works better.  */
8536    if (mode == DImode)
8537      move_mode = QImode;
8538    /* Make scratch smaller if needed.  */
8539    if (SCRATCH != GET_CODE (scratch)
8540        && HImode == GET_MODE (scratch)
8541        && QImode == move_mode)
8542      scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
8543
8544    move_size = GET_MODE_SIZE (move_mode);
8545    /* Number of bytes/words to rotate.  */
8546    offset = (num  >> 3) / move_size;
8547    /* Number of moves needed.  */
8548    size = GET_MODE_SIZE (mode) / move_size;
8549    /* Himode byte swap is special case to avoid a scratch register.  */
8550    if (mode == HImode && same_reg)
8551      {
8552	/* HImode byte swap, using xor.  This is as quick as using scratch.  */
8553	rtx src, dst;
8554	src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
8555	dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
8556	if (!rtx_equal_p (dst, src))
8557	  {
8558	     emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
8559	     emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
8560	     emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
8561	  }
8562      }
8563    else
8564      {
8565#define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode)  */
8566	/* Create linked list of moves to determine move order.  */
8567	struct {
8568	  rtx src, dst;
8569	  int links;
8570	} move[MAX_SIZE + 8];
8571	int blocked, moves;
8572
8573	gcc_assert (size <= MAX_SIZE);
8574	/* Generate list of subreg moves.  */
8575	for (i = 0; i < size; i++)
8576          {
8577	    int from = i;
8578	    int to = (from + offset) % size;
8579	    move[i].src = simplify_gen_subreg (move_mode, operands[1],
8580                                               mode, from * move_size);
8581	    move[i].dst = simplify_gen_subreg (move_mode, operands[0],
8582                                               mode, to * move_size);
8583            move[i].links = -1;
8584          }
8585	/* Mark dependence where a dst of one move is the src of another move.
8586	   The first move is a conflict as it must wait until second is
8587	   performed.  We ignore moves to self - we catch this later.  */
8588	if (overlapped)
8589	  for (i = 0; i < size; i++)
8590	    if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
8591	      for (j = 0; j < size; j++)
8592		if (j != i && rtx_equal_p (move[j].src, move[i].dst))
8593		  {
8594		    /* The dst of move i is the src of move j.  */
8595		    move[i].links = j;
8596		    break;
8597		  }
8598
8599	blocked = -1;
8600	moves = 0;
8601	/* Go through move list and perform non-conflicting moves.  As each
8602	   non-overlapping move is made, it may remove other conflicts
8603	   so the process is repeated until no conflicts remain.  */
8604	do
8605	  {
8606	    blocked = -1;
8607	    moves = 0;
8608	    /* Emit move where dst is not also a src or we have used that
8609	       src already.  */
8610	    for (i = 0; i < size; i++)
8611	      if (move[i].src != NULL_RTX)
8612		{
8613		  if (move[i].links == -1
8614		      || move[move[i].links].src == NULL_RTX)
8615		    {
8616		      moves++;
8617		      /* Ignore NOP moves to self.  */
8618		      if (!rtx_equal_p (move[i].dst, move[i].src))
8619			emit_move_insn (move[i].dst, move[i].src);
8620
8621		      /* Remove  conflict from list.  */
8622		      move[i].src = NULL_RTX;
8623		    }
8624		  else
8625		    blocked = i;
8626		}
8627
8628	    /* Check for deadlock. This is when no moves occurred and we have
8629	       at least one blocked move.  */
8630	    if (moves == 0 && blocked != -1)
8631	      {
8632		/* Need to use scratch register to break deadlock.
8633		   Add move to put dst of blocked move into scratch.
8634		   When this move occurs, it will break chain deadlock.
8635		   The scratch register is substituted for real move.  */
8636
8637		gcc_assert (SCRATCH != GET_CODE (scratch));
8638
8639		move[size].src = move[blocked].dst;
8640		move[size].dst =  scratch;
8641		/* Scratch move is never blocked.  */
8642		move[size].links = -1;
8643		/* Make sure we have valid link.  */
8644		gcc_assert (move[blocked].links != -1);
8645		/* Replace src of  blocking move with scratch reg.  */
8646		move[move[blocked].links].src = scratch;
8647		/* Make dependent on scratch move occurring.  */
8648		move[blocked].links = size;
8649		size=size+1;
8650	      }
8651	  }
8652	while (blocked != -1);
8653      }
8654    return true;
8655}
8656
8657
8658/* Worker function for `ADJUST_INSN_LENGTH'.  */
8659/* Modifies the length assigned to instruction INSN
8660   LEN is the initially computed length of the insn.  */
8661
8662int
8663avr_adjust_insn_length (rtx_insn *insn, int len)
8664{
8665  rtx *op = recog_data.operand;
8666  enum attr_adjust_len adjust_len;
8667
8668  /* Some complex insns don't need length adjustment and therefore
8669     the length need not/must not be adjusted for these insns.
8670     It is easier to state this in an insn attribute "adjust_len" than
8671     to clutter up code here...  */
8672
8673  if (!NONDEBUG_INSN_P (insn)
8674      || -1 == recog_memoized (insn))
8675    {
8676      return len;
8677    }
8678
8679  /* Read from insn attribute "adjust_len" if/how length is to be adjusted.  */
8680
8681  adjust_len = get_attr_adjust_len (insn);
8682
8683  if (adjust_len == ADJUST_LEN_NO)
8684    {
8685      /* Nothing to adjust: The length from attribute "length" is fine.
8686         This is the default.  */
8687
8688      return len;
8689    }
8690
8691  /* Extract insn's operands.  */
8692
8693  extract_constrain_insn_cached (insn);
8694
8695  /* Dispatch to right function.  */
8696
8697  switch (adjust_len)
8698    {
8699    case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
8700    case ADJUST_LEN_RELOAD_IN24: avr_out_reload_inpsi (op, op[2], &len); break;
8701    case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
8702
8703    case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
8704
8705    case ADJUST_LEN_PLUS: avr_out_plus (insn, op, &len); break;
8706    case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
8707
8708    case ADJUST_LEN_MOV8:  output_movqi (insn, op, &len); break;
8709    case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
8710    case ADJUST_LEN_MOV24: avr_out_movpsi (insn, op, &len); break;
8711    case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
8712    case ADJUST_LEN_MOVMEM: avr_out_movmem (insn, op, &len); break;
8713    case ADJUST_LEN_XLOAD: avr_out_xload (insn, op, &len); break;
8714    case ADJUST_LEN_LPM: avr_out_lpm (insn, op, &len); break;
8715    case ADJUST_LEN_SEXT: avr_out_sign_extend (insn, op, &len); break;
8716
8717    case ADJUST_LEN_SFRACT: avr_out_fract (insn, op, true, &len); break;
8718    case ADJUST_LEN_UFRACT: avr_out_fract (insn, op, false, &len); break;
8719    case ADJUST_LEN_ROUND: avr_out_round (insn, op, &len); break;
8720
8721    case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
8722    case ADJUST_LEN_TSTPSI: avr_out_tstpsi (insn, op, &len); break;
8723    case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
8724    case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
8725    case ADJUST_LEN_COMPARE64: avr_out_compare64 (insn, op, &len); break;
8726
8727    case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
8728    case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
8729    case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
8730
8731    case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
8732    case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
8733    case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
8734
8735    case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
8736    case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
8737    case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
8738
8739    case ADJUST_LEN_ASHLPSI: avr_out_ashlpsi3 (insn, op, &len); break;
8740    case ADJUST_LEN_ASHRPSI: avr_out_ashrpsi3 (insn, op, &len); break;
8741    case ADJUST_LEN_LSHRPSI: avr_out_lshrpsi3 (insn, op, &len); break;
8742
8743    case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
8744
8745    case ADJUST_LEN_INSERT_BITS: avr_out_insert_bits (op, &len); break;
8746
8747    default:
8748      gcc_unreachable();
8749    }
8750
8751  return len;
8752}
8753
8754/* Return nonzero if register REG dead after INSN.  */
8755
8756int
8757reg_unused_after (rtx_insn *insn, rtx reg)
8758{
8759  return (dead_or_set_p (insn, reg)
8760	  || (REG_P(reg) && _reg_unused_after (insn, reg)));
8761}
8762
8763/* Return nonzero if REG is not used after INSN.
8764   We assume REG is a reload reg, and therefore does
8765   not live past labels.  It may live past calls or jumps though.  */
8766
8767int
8768_reg_unused_after (rtx_insn *insn, rtx reg)
8769{
8770  enum rtx_code code;
8771  rtx set;
8772
8773  /* If the reg is set by this instruction, then it is safe for our
8774     case.  Disregard the case where this is a store to memory, since
8775     we are checking a register used in the store address.  */
8776  set = single_set (insn);
8777  if (set && GET_CODE (SET_DEST (set)) != MEM
8778      && reg_overlap_mentioned_p (reg, SET_DEST (set)))
8779    return 1;
8780
8781  while ((insn = NEXT_INSN (insn)))
8782    {
8783      rtx set;
8784      code = GET_CODE (insn);
8785
8786#if 0
8787      /* If this is a label that existed before reload, then the register
8788	 if dead here.  However, if this is a label added by reorg, then
8789	 the register may still be live here.  We can't tell the difference,
8790	 so we just ignore labels completely.  */
8791      if (code == CODE_LABEL)
8792	return 1;
8793      /* else */
8794#endif
8795
8796      if (!INSN_P (insn))
8797	continue;
8798
8799      if (code == JUMP_INSN)
8800	return 0;
8801
8802      /* If this is a sequence, we must handle them all at once.
8803	 We could have for instance a call that sets the target register,
8804	 and an insn in a delay slot that uses the register.  In this case,
8805	 we must return 0.  */
8806      else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
8807	{
8808	  rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
8809	  int i;
8810	  int retval = 0;
8811
8812	  for (i = 0; i < seq->len (); i++)
8813	    {
8814	      rtx_insn *this_insn = seq->insn (i);
8815	      rtx set = single_set (this_insn);
8816
8817	      if (CALL_P (this_insn))
8818		code = CALL_INSN;
8819	      else if (JUMP_P (this_insn))
8820		{
8821		  if (INSN_ANNULLED_BRANCH_P (this_insn))
8822		    return 0;
8823		  code = JUMP_INSN;
8824		}
8825
8826	      if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
8827		return 0;
8828	      if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
8829		{
8830		  if (GET_CODE (SET_DEST (set)) != MEM)
8831		    retval = 1;
8832		  else
8833		    return 0;
8834		}
8835	      if (set == 0
8836		  && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
8837		return 0;
8838	    }
8839	  if (retval == 1)
8840	    return 1;
8841	  else if (code == JUMP_INSN)
8842	    return 0;
8843	}
8844
8845      if (code == CALL_INSN)
8846	{
8847	  rtx tem;
8848	  for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
8849	    if (GET_CODE (XEXP (tem, 0)) == USE
8850		&& REG_P (XEXP (XEXP (tem, 0), 0))
8851		&& reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
8852	      return 0;
8853	  if (call_used_regs[REGNO (reg)])
8854	    return 1;
8855	}
8856
8857      set = single_set (insn);
8858
8859      if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
8860	return 0;
8861      if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
8862	return GET_CODE (SET_DEST (set)) != MEM;
8863      if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
8864	return 0;
8865    }
8866  return 1;
8867}
8868
8869
8870/* Implement `TARGET_ASM_INTEGER'.  */
8871/* Target hook for assembling integer objects.  The AVR version needs
8872   special handling for references to certain labels.  */
8873
8874static bool
8875avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
8876{
8877  if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
8878      && text_segment_operand (x, VOIDmode))
8879    {
8880      fputs ("\t.word\tgs(", asm_out_file);
8881      output_addr_const (asm_out_file, x);
8882      fputs (")\n", asm_out_file);
8883
8884      return true;
8885    }
8886  else if (GET_MODE (x) == PSImode)
8887    {
8888      /* This needs binutils 2.23+, see PR binutils/13503  */
8889
8890      fputs ("\t.byte\tlo8(", asm_out_file);
8891      output_addr_const (asm_out_file, x);
8892      fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
8893
8894      fputs ("\t.byte\thi8(", asm_out_file);
8895      output_addr_const (asm_out_file, x);
8896      fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
8897
8898      fputs ("\t.byte\thh8(", asm_out_file);
8899      output_addr_const (asm_out_file, x);
8900      fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
8901
8902      return true;
8903    }
8904  else if (CONST_FIXED_P (x))
8905    {
8906      unsigned n;
8907
8908      /* varasm fails to handle big fixed modes that don't fit in hwi.  */
8909
8910      for (n = 0; n < size; n++)
8911        {
8912          rtx xn = simplify_gen_subreg (QImode, x, GET_MODE (x), n);
8913          default_assemble_integer (xn, 1, aligned_p);
8914        }
8915
8916      return true;
8917    }
8918
8919  return default_assemble_integer (x, size, aligned_p);
8920}
8921
8922
8923/* Implement `TARGET_CLASS_LIKELY_SPILLED_P'.  */
8924/* Return value is nonzero if pseudos that have been
8925   assigned to registers of class CLASS would likely be spilled
8926   because registers of CLASS are needed for spill registers.  */
8927
8928static bool
8929avr_class_likely_spilled_p (reg_class_t c)
8930{
8931  return (c != ALL_REGS &&
8932           (AVR_TINY ? 1 : c != ADDW_REGS));
8933}
8934
8935
8936/* Valid attributes:
8937   progmem   -  Put data to program memory.
8938   signal    -  Make a function to be hardware interrupt.
8939                After function prologue interrupts remain disabled.
8940   interrupt -  Make a function to be hardware interrupt. Before function
8941                prologue interrupts are enabled by means of SEI.
8942   naked     -  Don't generate function prologue/epilogue and RET
8943                instruction.  */
8944
8945/* Handle a "progmem" attribute; arguments as in
8946   struct attribute_spec.handler.  */
8947
8948static tree
8949avr_handle_progmem_attribute (tree *node, tree name,
8950			      tree args ATTRIBUTE_UNUSED,
8951			      int flags ATTRIBUTE_UNUSED,
8952			      bool *no_add_attrs)
8953{
8954  if (DECL_P (*node))
8955    {
8956      if (TREE_CODE (*node) == TYPE_DECL)
8957	{
8958	  /* This is really a decl attribute, not a type attribute,
8959	     but try to handle it for GCC 3.0 backwards compatibility.  */
8960
8961	  tree type = TREE_TYPE (*node);
8962	  tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
8963	  tree newtype = build_type_attribute_variant (type, attr);
8964
8965	  TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
8966	  TREE_TYPE (*node) = newtype;
8967	  *no_add_attrs = true;
8968	}
8969      else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
8970	{
8971          *no_add_attrs = false;
8972	}
8973      else
8974	{
8975	  warning (OPT_Wattributes, "%qE attribute ignored",
8976		   name);
8977	  *no_add_attrs = true;
8978	}
8979    }
8980
8981  return NULL_TREE;
8982}
8983
8984/* Handle an attribute requiring a FUNCTION_DECL; arguments as in
8985   struct attribute_spec.handler.  */
8986
8987static tree
8988avr_handle_fndecl_attribute (tree *node, tree name,
8989			     tree args ATTRIBUTE_UNUSED,
8990			     int flags ATTRIBUTE_UNUSED,
8991			     bool *no_add_attrs)
8992{
8993  if (TREE_CODE (*node) != FUNCTION_DECL)
8994    {
8995      warning (OPT_Wattributes, "%qE attribute only applies to functions",
8996	       name);
8997      *no_add_attrs = true;
8998    }
8999
9000  return NULL_TREE;
9001}
9002
9003static tree
9004avr_handle_fntype_attribute (tree *node, tree name,
9005                             tree args ATTRIBUTE_UNUSED,
9006                             int flags ATTRIBUTE_UNUSED,
9007                             bool *no_add_attrs)
9008{
9009  if (TREE_CODE (*node) != FUNCTION_TYPE)
9010    {
9011      warning (OPT_Wattributes, "%qE attribute only applies to functions",
9012	       name);
9013      *no_add_attrs = true;
9014    }
9015
9016  return NULL_TREE;
9017}
9018
9019static tree
9020avr_handle_addr_attribute (tree *node, tree name, tree args,
9021			   int flags ATTRIBUTE_UNUSED, bool *no_add)
9022{
9023  bool io_p = (strncmp (IDENTIFIER_POINTER (name), "io", 2) == 0);
9024  location_t loc = DECL_SOURCE_LOCATION (*node);
9025
9026  if (TREE_CODE (*node) != VAR_DECL)
9027    {
9028      warning_at (loc, 0, "%qE attribute only applies to variables", name);
9029      *no_add = true;
9030    }
9031
9032  if (args != NULL_TREE)
9033    {
9034      if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
9035	TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
9036      tree arg = TREE_VALUE (args);
9037      if (TREE_CODE (arg) != INTEGER_CST)
9038	{
9039	  warning (0, "%qE attribute allows only an integer constant argument",
9040		   name);
9041	  *no_add = true;
9042	}
9043      else if (io_p
9044	       && (!tree_fits_shwi_p (arg)
9045		   || !(strcmp (IDENTIFIER_POINTER (name), "io_low") == 0
9046			? low_io_address_operand : io_address_operand)
9047			 (GEN_INT (TREE_INT_CST_LOW (arg)), QImode)))
9048	{
9049	  warning_at (loc, 0, "%qE attribute address out of range", name);
9050	  *no_add = true;
9051	}
9052      else
9053	{
9054	  tree attribs = DECL_ATTRIBUTES (*node);
9055	  const char *names[] = { "io", "io_low", "address", NULL } ;
9056	  for (const char **p = names; *p; p++)
9057	    {
9058	      tree other = lookup_attribute (*p, attribs);
9059	      if (other && TREE_VALUE (other))
9060		{
9061		  warning_at (loc, 0,
9062			      "both %s and %qE attribute provide address",
9063			      *p, name);
9064		  *no_add = true;
9065		  break;
9066		}
9067	    }
9068	}
9069    }
9070
9071  if (*no_add == false && io_p && !TREE_THIS_VOLATILE (*node))
9072    warning_at (loc, 0, "%qE attribute on non-volatile variable", name);
9073
9074  return NULL_TREE;
9075}
9076
9077rtx
9078avr_eval_addr_attrib (rtx x)
9079{
9080  if (GET_CODE (x) == SYMBOL_REF
9081      && (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_ADDRESS))
9082    {
9083      tree decl = SYMBOL_REF_DECL (x);
9084      tree attr = NULL_TREE;
9085
9086      if (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_IO)
9087	{
9088	  attr = lookup_attribute ("io", DECL_ATTRIBUTES (decl));
9089	  gcc_assert (attr);
9090	}
9091      if (!attr || !TREE_VALUE (attr))
9092	attr = lookup_attribute ("address", DECL_ATTRIBUTES (decl));
9093      gcc_assert (attr && TREE_VALUE (attr) && TREE_VALUE (TREE_VALUE (attr)));
9094      return GEN_INT (TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr))));
9095    }
9096  return x;
9097}
9098
9099
9100/* AVR attributes.  */
9101static const struct attribute_spec
9102avr_attribute_table[] =
9103{
9104  /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
9105       affects_type_identity } */
9106  { "progmem",   0, 0, false, false, false,  avr_handle_progmem_attribute,
9107    false },
9108  { "signal",    0, 0, true,  false, false,  avr_handle_fndecl_attribute,
9109    false },
9110  { "interrupt", 0, 0, true,  false, false,  avr_handle_fndecl_attribute,
9111    false },
9112  { "naked",     0, 0, false, true,  true,   avr_handle_fntype_attribute,
9113    false },
9114  { "OS_task",   0, 0, false, true,  true,   avr_handle_fntype_attribute,
9115    false },
9116  { "OS_main",   0, 0, false, true,  true,   avr_handle_fntype_attribute,
9117    false },
9118  { "io",        0, 1, false, false, false,  avr_handle_addr_attribute,
9119    false },
9120  { "io_low",    0, 1, false, false, false,  avr_handle_addr_attribute,
9121    false },
9122  { "address",   1, 1, false, false, false,  avr_handle_addr_attribute,
9123    false },
9124  { NULL,        0, 0, false, false, false, NULL, false }
9125};
9126
9127
9128/* Look if DECL shall be placed in program memory space by
9129   means of attribute `progmem' or some address-space qualifier.
9130   Return non-zero if DECL is data that must end up in Flash and
9131   zero if the data lives in RAM (.bss, .data, .rodata, ...).
9132
9133   Return 2   if DECL is located in 24-bit flash address-space
9134   Return 1   if DECL is located in 16-bit flash address-space
9135   Return -1  if attribute `progmem' occurs in DECL or ATTRIBUTES
9136   Return 0   otherwise  */
9137
9138int
9139avr_progmem_p (tree decl, tree attributes)
9140{
9141  tree a;
9142
9143  if (TREE_CODE (decl) != VAR_DECL)
9144    return 0;
9145
9146  if (avr_decl_memx_p (decl))
9147    return 2;
9148
9149  if (avr_decl_flash_p (decl))
9150    return 1;
9151
9152  if (NULL_TREE
9153      != lookup_attribute ("progmem", attributes))
9154    return -1;
9155
9156  a = decl;
9157
9158  do
9159    a = TREE_TYPE(a);
9160  while (TREE_CODE (a) == ARRAY_TYPE);
9161
9162  if (a == error_mark_node)
9163    return 0;
9164
9165  if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
9166    return -1;
9167
9168  return 0;
9169}
9170
9171
9172/* Scan type TYP for pointer references to address space ASn.
9173   Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
9174   the AS are also declared to be CONST.
9175   Otherwise, return the respective address space, i.e. a value != 0.  */
9176
9177static addr_space_t
9178avr_nonconst_pointer_addrspace (tree typ)
9179{
9180  while (ARRAY_TYPE == TREE_CODE (typ))
9181    typ = TREE_TYPE (typ);
9182
9183  if (POINTER_TYPE_P (typ))
9184    {
9185      addr_space_t as;
9186      tree target = TREE_TYPE (typ);
9187
9188      /* Pointer to function: Test the function's return type.  */
9189
9190      if (FUNCTION_TYPE == TREE_CODE (target))
9191        return avr_nonconst_pointer_addrspace (TREE_TYPE (target));
9192
9193      /* "Ordinary" pointers... */
9194
9195      while (TREE_CODE (target) == ARRAY_TYPE)
9196        target = TREE_TYPE (target);
9197
9198      /* Pointers to non-generic address space must be const.
9199         Refuse address spaces outside the device's flash.  */
9200
9201      as = TYPE_ADDR_SPACE (target);
9202
9203      if (!ADDR_SPACE_GENERIC_P (as)
9204          && (!TYPE_READONLY (target)
9205              || avr_addrspace[as].segment >= avr_n_flash
9206	      /* Also refuse __memx address space if we can't support it.  */
9207	      || (!AVR_HAVE_LPM && avr_addrspace[as].pointer_size > 2)))
9208        {
9209          return as;
9210        }
9211
9212      /* Scan pointer's target type.  */
9213
9214      return avr_nonconst_pointer_addrspace (target);
9215    }
9216
9217  return ADDR_SPACE_GENERIC;
9218}
9219
9220
9221/* Sanity check NODE so that all pointers targeting non-generic address spaces
9222   go along with CONST qualifier.  Writing to these address spaces should
9223   be detected and complained about as early as possible.  */
9224
9225static bool
9226avr_pgm_check_var_decl (tree node)
9227{
9228  const char *reason = NULL;
9229
9230  addr_space_t as = ADDR_SPACE_GENERIC;
9231
9232  gcc_assert (as == 0);
9233
9234  if (avr_log.progmem)
9235    avr_edump ("%?: %t\n", node);
9236
9237  switch (TREE_CODE (node))
9238    {
9239    default:
9240      break;
9241
9242    case VAR_DECL:
9243      if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
9244        reason = "variable";
9245      break;
9246
9247    case PARM_DECL:
9248      if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
9249        reason = "function parameter";
9250      break;
9251
9252    case FIELD_DECL:
9253      if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
9254        reason = "structure field";
9255      break;
9256
9257    case FUNCTION_DECL:
9258      if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node))),
9259          as)
9260        reason = "return type of function";
9261      break;
9262
9263    case POINTER_TYPE:
9264      if (as = avr_nonconst_pointer_addrspace (node), as)
9265        reason = "pointer";
9266      break;
9267    }
9268
9269  if (reason)
9270    {
9271      if (avr_addrspace[as].segment >= avr_n_flash)
9272        {
9273          if (TYPE_P (node))
9274            error ("%qT uses address space %qs beyond flash of %d KiB",
9275                   node, avr_addrspace[as].name, 64 * avr_n_flash);
9276          else
9277            error ("%s %q+D uses address space %qs beyond flash of %d KiB",
9278                   reason, node, avr_addrspace[as].name, 64 * avr_n_flash);
9279        }
9280      else
9281        {
9282          if (TYPE_P (node))
9283            error ("pointer targeting address space %qs must be const in %qT",
9284                   avr_addrspace[as].name, node);
9285          else
9286            error ("pointer targeting address space %qs must be const"
9287                   " in %s %q+D",
9288                   avr_addrspace[as].name, reason, node);
9289        }
9290    }
9291
9292  return reason == NULL;
9293}
9294
9295
9296/* Add the section attribute if the variable is in progmem.  */
9297
9298static void
9299avr_insert_attributes (tree node, tree *attributes)
9300{
9301  avr_pgm_check_var_decl (node);
9302
9303  if (TREE_CODE (node) == VAR_DECL
9304      && (TREE_STATIC (node) || DECL_EXTERNAL (node))
9305      && avr_progmem_p (node, *attributes))
9306    {
9307      addr_space_t as;
9308      tree node0 = node;
9309
9310      /* For C++, we have to peel arrays in order to get correct
9311         determination of readonlyness.  */
9312
9313      do
9314        node0 = TREE_TYPE (node0);
9315      while (TREE_CODE (node0) == ARRAY_TYPE);
9316
9317      if (error_mark_node == node0)
9318        return;
9319
9320      as = TYPE_ADDR_SPACE (TREE_TYPE (node));
9321
9322      if (avr_addrspace[as].segment >= avr_n_flash)
9323        {
9324          error ("variable %q+D located in address space %qs beyond flash "
9325                 "of %d KiB", node, avr_addrspace[as].name, 64 * avr_n_flash);
9326        }
9327      else if (!AVR_HAVE_LPM && avr_addrspace[as].pointer_size > 2)
9328	{
9329          error ("variable %q+D located in address space %qs"
9330                 " which is not supported for architecture %qs",
9331                 node, avr_addrspace[as].name, avr_arch->name);
9332	}
9333
9334      if (!TYPE_READONLY (node0)
9335          && !TREE_READONLY (node))
9336        {
9337          const char *reason = "__attribute__((progmem))";
9338
9339          if (!ADDR_SPACE_GENERIC_P (as))
9340            reason = avr_addrspace[as].name;
9341
9342          if (avr_log.progmem)
9343            avr_edump ("\n%?: %t\n%t\n", node, node0);
9344
9345          error ("variable %q+D must be const in order to be put into"
9346                 " read-only section by means of %qs", node, reason);
9347        }
9348    }
9349}
9350
9351
9352/* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'.  */
9353/* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'.  */
9354/* Track need of __do_clear_bss.  */
9355
9356void
9357avr_asm_output_aligned_decl_common (FILE * stream,
9358                                    tree decl,
9359                                    const char *name,
9360                                    unsigned HOST_WIDE_INT size,
9361                                    unsigned int align, bool local_p)
9362{
9363  rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl);
9364  rtx symbol;
9365
9366  if (mem != NULL_RTX && MEM_P (mem)
9367      && GET_CODE ((symbol = XEXP (mem, 0))) == SYMBOL_REF
9368      && (SYMBOL_REF_FLAGS (symbol) & (SYMBOL_FLAG_IO | SYMBOL_FLAG_ADDRESS)))
9369    {
9370
9371      if (!local_p)
9372	{
9373	  fprintf (stream, "\t.globl\t");
9374	  assemble_name (stream, name);
9375	  fprintf (stream, "\n");
9376	}
9377      if (SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_ADDRESS)
9378	{
9379	  assemble_name (stream, name);
9380	  fprintf (stream, " = %ld\n",
9381		   (long) INTVAL (avr_eval_addr_attrib (symbol)));
9382	}
9383      else if (local_p)
9384	error_at (DECL_SOURCE_LOCATION (decl),
9385		  "static IO declaration for %q+D needs an address", decl);
9386      return;
9387    }
9388
9389  /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
9390     There is no need to trigger __do_clear_bss code for them.  */
9391
9392  if (!STR_PREFIX_P (name, "__gnu_lto"))
9393    avr_need_clear_bss_p = true;
9394
9395  if (local_p)
9396    ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
9397  else
9398    ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
9399}
9400
9401void
9402avr_asm_asm_output_aligned_bss (FILE *file, tree decl, const char *name,
9403				unsigned HOST_WIDE_INT size, int align,
9404				void (*default_func)
9405				  (FILE *, tree, const char *,
9406				   unsigned HOST_WIDE_INT, int))
9407{
9408  rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl);
9409  rtx symbol;
9410
9411  if (mem != NULL_RTX && MEM_P (mem)
9412      && GET_CODE ((symbol = XEXP (mem, 0))) == SYMBOL_REF
9413      && (SYMBOL_REF_FLAGS (symbol) & (SYMBOL_FLAG_IO | SYMBOL_FLAG_ADDRESS)))
9414    {
9415      if (!(SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_ADDRESS))
9416	error_at (DECL_SOURCE_LOCATION (decl),
9417		  "IO definition for %q+D needs an address", decl);
9418      avr_asm_output_aligned_decl_common (file, decl, name, size, align, false);
9419    }
9420  else
9421    default_func (file, decl, name, size, align);
9422}
9423
9424
9425/* Unnamed section callback for data_section
9426   to track need of __do_copy_data.  */
9427
9428static void
9429avr_output_data_section_asm_op (const void *data)
9430{
9431  avr_need_copy_data_p = true;
9432
9433  /* Dispatch to default.  */
9434  output_section_asm_op (data);
9435}
9436
9437
9438/* Unnamed section callback for bss_section
9439   to track need of __do_clear_bss.  */
9440
9441static void
9442avr_output_bss_section_asm_op (const void *data)
9443{
9444  avr_need_clear_bss_p = true;
9445
9446  /* Dispatch to default.  */
9447  output_section_asm_op (data);
9448}
9449
9450
9451/* Unnamed section callback for progmem*.data sections.  */
9452
9453static void
9454avr_output_progmem_section_asm_op (const void *data)
9455{
9456  fprintf (asm_out_file, "\t.section\t%s,\"a\",@progbits\n",
9457           (const char*) data);
9458}
9459
9460
9461/* Implement `TARGET_ASM_INIT_SECTIONS'.  */
9462
9463static void
9464avr_asm_init_sections (void)
9465{
9466  /* Set up a section for jump tables.  Alignment is handled by
9467     ASM_OUTPUT_BEFORE_CASE_LABEL.  */
9468
9469  if (AVR_HAVE_JMP_CALL)
9470    {
9471      progmem_swtable_section
9472        = get_unnamed_section (0, output_section_asm_op,
9473                               "\t.section\t.progmem.gcc_sw_table"
9474                               ",\"a\",@progbits");
9475    }
9476  else
9477    {
9478      progmem_swtable_section
9479        = get_unnamed_section (SECTION_CODE, output_section_asm_op,
9480                               "\t.section\t.progmem.gcc_sw_table"
9481                               ",\"ax\",@progbits");
9482    }
9483
9484  /* Override section callbacks to keep track of `avr_need_clear_bss_p'
9485     resp. `avr_need_copy_data_p'.  */
9486
9487  readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
9488  data_section->unnamed.callback = avr_output_data_section_asm_op;
9489  bss_section->unnamed.callback = avr_output_bss_section_asm_op;
9490}
9491
9492
9493/* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'.  */
9494
9495static section*
9496avr_asm_function_rodata_section (tree decl)
9497{
9498  /* If a function is unused and optimized out by -ffunction-sections
9499     and --gc-sections, ensure that the same will happen for its jump
9500     tables by putting them into individual sections.  */
9501
9502  unsigned int flags;
9503  section * frodata;
9504
9505  /* Get the frodata section from the default function in varasm.c
9506     but treat function-associated data-like jump tables as code
9507     rather than as user defined data.  AVR has no constant pools.  */
9508  {
9509    int fdata = flag_data_sections;
9510
9511    flag_data_sections = flag_function_sections;
9512    frodata = default_function_rodata_section (decl);
9513    flag_data_sections = fdata;
9514    flags = frodata->common.flags;
9515  }
9516
9517  if (frodata != readonly_data_section
9518      && flags & SECTION_NAMED)
9519    {
9520      /* Adjust section flags and replace section name prefix.  */
9521
9522      unsigned int i;
9523
9524      static const char* const prefix[] =
9525        {
9526          ".rodata",          ".progmem.gcc_sw_table",
9527          ".gnu.linkonce.r.", ".gnu.linkonce.t."
9528        };
9529
9530      for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
9531        {
9532          const char * old_prefix = prefix[i];
9533          const char * new_prefix = prefix[i+1];
9534          const char * name = frodata->named.name;
9535
9536          if (STR_PREFIX_P (name, old_prefix))
9537            {
9538              const char *rname = ACONCAT ((new_prefix,
9539                                            name + strlen (old_prefix), NULL));
9540              flags &= ~SECTION_CODE;
9541              flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
9542
9543              return get_section (rname, flags, frodata->named.decl);
9544            }
9545        }
9546    }
9547
9548  return progmem_swtable_section;
9549}
9550
9551
9552/* Implement `TARGET_ASM_NAMED_SECTION'.  */
9553/* Track need of __do_clear_bss, __do_copy_data for named sections.  */
9554
9555static void
9556avr_asm_named_section (const char *name, unsigned int flags, tree decl)
9557{
9558  if (flags & AVR_SECTION_PROGMEM)
9559    {
9560      addr_space_t as = (flags & AVR_SECTION_PROGMEM) / SECTION_MACH_DEP;
9561      const char *old_prefix = ".rodata";
9562      const char *new_prefix = avr_addrspace[as].section_name;
9563
9564      if (STR_PREFIX_P (name, old_prefix))
9565        {
9566          const char *sname = ACONCAT ((new_prefix,
9567                                        name + strlen (old_prefix), NULL));
9568          default_elf_asm_named_section (sname, flags, decl);
9569          return;
9570        }
9571
9572      default_elf_asm_named_section (new_prefix, flags, decl);
9573      return;
9574    }
9575
9576  if (!avr_need_copy_data_p)
9577    avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
9578                            || STR_PREFIX_P (name, ".rodata")
9579                            || STR_PREFIX_P (name, ".gnu.linkonce.d"));
9580
9581  if (!avr_need_clear_bss_p)
9582    avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
9583
9584  default_elf_asm_named_section (name, flags, decl);
9585}
9586
9587
9588/* Implement `TARGET_SECTION_TYPE_FLAGS'.  */
9589
9590static unsigned int
9591avr_section_type_flags (tree decl, const char *name, int reloc)
9592{
9593  unsigned int flags = default_section_type_flags (decl, name, reloc);
9594
9595  if (STR_PREFIX_P (name, ".noinit"))
9596    {
9597      if (decl && TREE_CODE (decl) == VAR_DECL
9598	  && DECL_INITIAL (decl) == NULL_TREE)
9599	flags |= SECTION_BSS;  /* @nobits */
9600      else
9601	warning (0, "only uninitialized variables can be placed in the "
9602		 ".noinit section");
9603    }
9604
9605  if (decl && DECL_P (decl)
9606      && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
9607    {
9608      addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
9609
9610      /* Attribute progmem puts data in generic address space.
9611         Set section flags as if it was in __flash to get the right
9612         section prefix in the remainder.  */
9613
9614      if (ADDR_SPACE_GENERIC_P (as))
9615        as = ADDR_SPACE_FLASH;
9616
9617      flags |= as * SECTION_MACH_DEP;
9618      flags &= ~SECTION_WRITE;
9619      flags &= ~SECTION_BSS;
9620    }
9621
9622  return flags;
9623}
9624
9625
9626/* Implement `TARGET_ENCODE_SECTION_INFO'.  */
9627
9628static void
9629avr_encode_section_info (tree decl, rtx rtl, int new_decl_p)
9630{
9631  /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
9632     readily available, see PR34734.  So we postpone the warning
9633     about uninitialized data in program memory section until here.  */
9634
9635  if (new_decl_p
9636      && decl && DECL_P (decl)
9637      && NULL_TREE == DECL_INITIAL (decl)
9638      && !DECL_EXTERNAL (decl)
9639      && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
9640    {
9641      warning (OPT_Wuninitialized,
9642               "uninitialized variable %q+D put into "
9643               "program memory area", decl);
9644    }
9645
9646  default_encode_section_info (decl, rtl, new_decl_p);
9647
9648  if (decl && DECL_P (decl)
9649      && TREE_CODE (decl) != FUNCTION_DECL
9650      && MEM_P (rtl)
9651      && SYMBOL_REF == GET_CODE (XEXP (rtl, 0)))
9652   {
9653      rtx sym = XEXP (rtl, 0);
9654      tree type = TREE_TYPE (decl);
9655      tree attr = DECL_ATTRIBUTES (decl);
9656      if (type == error_mark_node)
9657	return;
9658
9659      addr_space_t as = TYPE_ADDR_SPACE (type);
9660
9661      /* PSTR strings are in generic space but located in flash:
9662         patch address space.  */
9663
9664      if (-1 == avr_progmem_p (decl, attr))
9665        as = ADDR_SPACE_FLASH;
9666
9667      AVR_SYMBOL_SET_ADDR_SPACE (sym, as);
9668
9669      tree io_low_attr = lookup_attribute ("io_low", attr);
9670      tree io_attr = lookup_attribute ("io", attr);
9671      tree addr_attr;
9672      if (io_low_attr
9673	  && TREE_VALUE (io_low_attr) && TREE_VALUE (TREE_VALUE (io_low_attr)))
9674	addr_attr = io_attr;
9675      else if (io_attr
9676	       && TREE_VALUE (io_attr) && TREE_VALUE (TREE_VALUE (io_attr)))
9677	addr_attr = io_attr;
9678      else
9679	addr_attr = lookup_attribute ("address", attr);
9680      if (io_low_attr
9681	  || (io_attr && addr_attr
9682              && low_io_address_operand
9683                  (GEN_INT (TREE_INT_CST_LOW
9684                            (TREE_VALUE (TREE_VALUE (addr_attr)))), QImode)))
9685	SYMBOL_REF_FLAGS (sym) |= SYMBOL_FLAG_IO_LOW;
9686      if (io_attr || io_low_attr)
9687	SYMBOL_REF_FLAGS (sym) |= SYMBOL_FLAG_IO;
9688      /* If we have an (io) address attribute specification, but the variable
9689	 is external, treat the address as only a tentative definition
9690	 to be used to determine if an io port is in the lower range, but
9691	 don't use the exact value for constant propagation.  */
9692      if (addr_attr && !DECL_EXTERNAL (decl))
9693	SYMBOL_REF_FLAGS (sym) |= SYMBOL_FLAG_ADDRESS;
9694    }
9695}
9696
9697
9698/* Implement `TARGET_ASM_SELECT_SECTION' */
9699
9700static section *
9701avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
9702{
9703  section * sect = default_elf_select_section (decl, reloc, align);
9704
9705  if (decl && DECL_P (decl)
9706      && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
9707    {
9708      addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
9709
9710      /* __progmem__ goes in generic space but shall be allocated to
9711         .progmem.data  */
9712
9713      if (ADDR_SPACE_GENERIC_P (as))
9714        as = ADDR_SPACE_FLASH;
9715
9716      if (sect->common.flags & SECTION_NAMED)
9717        {
9718          const char * name = sect->named.name;
9719          const char * old_prefix = ".rodata";
9720          const char * new_prefix = avr_addrspace[as].section_name;
9721
9722          if (STR_PREFIX_P (name, old_prefix))
9723            {
9724              const char *sname = ACONCAT ((new_prefix,
9725                                            name + strlen (old_prefix), NULL));
9726              return get_section (sname, sect->common.flags, sect->named.decl);
9727            }
9728        }
9729
9730      if (!progmem_section[as])
9731        {
9732          progmem_section[as]
9733            = get_unnamed_section (0, avr_output_progmem_section_asm_op,
9734                                   avr_addrspace[as].section_name);
9735        }
9736
9737      return progmem_section[as];
9738    }
9739
9740  return sect;
9741}
9742
9743/* Implement `TARGET_ASM_FILE_START'.  */
9744/* Outputs some text at the start of each assembler file.  */
9745
9746static void
9747avr_file_start (void)
9748{
9749  int sfr_offset = avr_arch->sfr_offset;
9750
9751  if (avr_arch->asm_only)
9752    error ("architecture %qs supported for assembler only", avr_mmcu);
9753
9754  default_file_start ();
9755
9756  /* Print I/O addresses of some SFRs used with IN and OUT.  */
9757
9758  if (AVR_HAVE_SPH)
9759    fprintf (asm_out_file, "__SP_H__ = 0x%02x\n", avr_addr.sp_h - sfr_offset);
9760
9761  fprintf (asm_out_file, "__SP_L__ = 0x%02x\n", avr_addr.sp_l - sfr_offset);
9762  fprintf (asm_out_file, "__SREG__ = 0x%02x\n", avr_addr.sreg - sfr_offset);
9763  if (AVR_HAVE_RAMPZ)
9764    fprintf (asm_out_file, "__RAMPZ__ = 0x%02x\n", avr_addr.rampz - sfr_offset);
9765  if (AVR_HAVE_RAMPY)
9766    fprintf (asm_out_file, "__RAMPY__ = 0x%02x\n", avr_addr.rampy - sfr_offset);
9767  if (AVR_HAVE_RAMPX)
9768    fprintf (asm_out_file, "__RAMPX__ = 0x%02x\n", avr_addr.rampx - sfr_offset);
9769  if (AVR_HAVE_RAMPD)
9770    fprintf (asm_out_file, "__RAMPD__ = 0x%02x\n", avr_addr.rampd - sfr_offset);
9771  if (AVR_XMEGA || AVR_TINY)
9772    fprintf (asm_out_file, "__CCP__ = 0x%02x\n", avr_addr.ccp - sfr_offset);
9773  fprintf (asm_out_file, "__tmp_reg__ = %d\n", AVR_TMP_REGNO);
9774  fprintf (asm_out_file, "__zero_reg__ = %d\n", AVR_ZERO_REGNO);
9775}
9776
9777
9778/* Implement `TARGET_ASM_FILE_END'.  */
9779/* Outputs to the stdio stream FILE some
9780   appropriate text to go at the end of an assembler file.  */
9781
9782static void
9783avr_file_end (void)
9784{
9785  /* Output these only if there is anything in the
9786     .data* / .rodata* / .gnu.linkonce.* resp. .bss* or COMMON
9787     input section(s) - some code size can be saved by not
9788     linking in the initialization code from libgcc if resp.
9789     sections are empty, see PR18145.  */
9790
9791  if (avr_need_copy_data_p)
9792    fputs (".global __do_copy_data\n", asm_out_file);
9793
9794  if (avr_need_clear_bss_p)
9795    fputs (".global __do_clear_bss\n", asm_out_file);
9796}
9797
9798
9799/* Worker function for `ADJUST_REG_ALLOC_ORDER'.  */
9800/* Choose the order in which to allocate hard registers for
9801   pseudo-registers local to a basic block.
9802
9803   Store the desired register order in the array `reg_alloc_order'.
9804   Element 0 should be the register to allocate first; element 1, the
9805   next register; and so on.  */
9806
9807void
9808avr_adjust_reg_alloc_order (void)
9809{
9810  unsigned int i;
9811  static const int order_0[] =
9812    {
9813      24, 25,
9814      18, 19, 20, 21, 22, 23,
9815      30, 31,
9816      26, 27, 28, 29,
9817      17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
9818      0, 1,
9819      32, 33, 34, 35
9820  };
9821  static const int tiny_order_0[] = {
9822    20, 21,
9823    22, 23,
9824    24, 25,
9825    30, 31,
9826    26, 27,
9827    28, 29,
9828    19, 18,
9829    16, 17,
9830    32, 33, 34, 35,
9831    15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
9832  };
9833  static const int order_1[] =
9834    {
9835      18, 19, 20, 21, 22, 23, 24, 25,
9836      30, 31,
9837      26, 27, 28, 29,
9838      17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
9839      0, 1,
9840      32, 33, 34, 35
9841  };
9842  static const int tiny_order_1[] = {
9843    22, 23,
9844    24, 25,
9845    30, 31,
9846    26, 27,
9847    28, 29,
9848    21, 20, 19, 18,
9849    16, 17,
9850    32, 33, 34, 35,
9851    15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
9852  };
9853  static const int order_2[] =
9854    {
9855      25, 24, 23, 22, 21, 20, 19, 18,
9856      30, 31,
9857      26, 27, 28, 29,
9858      17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
9859      1, 0,
9860      32, 33, 34, 35
9861  };
9862
9863  /* Select specific register allocation order.
9864     Tiny Core (ATtiny4/5/9/10/20/40) devices have only 16 registers,
9865     so different allocation order should be used.  */
9866
9867  const int *order = (TARGET_ORDER_1 ? (AVR_TINY ? tiny_order_1 : order_1)
9868                      : TARGET_ORDER_2 ? (AVR_TINY ? tiny_order_0 : order_2)
9869                      : (AVR_TINY ? tiny_order_0 : order_0));
9870
9871  for (i = 0; i < ARRAY_SIZE (order_0); ++i)
9872      reg_alloc_order[i] = order[i];
9873}
9874
9875
9876/* Implement `TARGET_REGISTER_MOVE_COST' */
9877
9878static int
9879avr_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
9880                        reg_class_t from, reg_class_t to)
9881{
9882  return (from == STACK_REG ? 6
9883          : to == STACK_REG ? 12
9884          : 2);
9885}
9886
9887
9888/* Implement `TARGET_MEMORY_MOVE_COST' */
9889
9890static int
9891avr_memory_move_cost (machine_mode mode,
9892                      reg_class_t rclass ATTRIBUTE_UNUSED,
9893                      bool in ATTRIBUTE_UNUSED)
9894{
9895  return (mode == QImode ? 2
9896          : mode == HImode ? 4
9897          : mode == SImode ? 8
9898          : mode == SFmode ? 8
9899          : 16);
9900}
9901
9902
9903/* Mutually recursive subroutine of avr_rtx_cost for calculating the
9904   cost of an RTX operand given its context.  X is the rtx of the
9905   operand, MODE is its mode, and OUTER is the rtx_code of this
9906   operand's parent operator.  */
9907
9908static int
9909avr_operand_rtx_cost (rtx x, machine_mode mode, enum rtx_code outer,
9910		      int opno, bool speed)
9911{
9912  enum rtx_code code = GET_CODE (x);
9913  int total;
9914
9915  switch (code)
9916    {
9917    case REG:
9918    case SUBREG:
9919      return 0;
9920
9921    case CONST_INT:
9922    case CONST_FIXED:
9923    case CONST_DOUBLE:
9924      return COSTS_N_INSNS (GET_MODE_SIZE (mode));
9925
9926    default:
9927      break;
9928    }
9929
9930  total = 0;
9931  avr_rtx_costs (x, code, outer, opno, &total, speed);
9932  return total;
9933}
9934
9935/* Worker function for AVR backend's rtx_cost function.
9936   X is rtx expression whose cost is to be calculated.
9937   Return true if the complete cost has been computed.
9938   Return false if subexpressions should be scanned.
9939   In either case, *TOTAL contains the cost result.  */
9940
9941static bool
9942avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
9943                 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
9944{
9945  enum rtx_code code = (enum rtx_code) codearg;
9946  machine_mode mode = GET_MODE (x);
9947  HOST_WIDE_INT val;
9948
9949  switch (code)
9950    {
9951    case CONST_INT:
9952    case CONST_FIXED:
9953    case CONST_DOUBLE:
9954    case SYMBOL_REF:
9955    case CONST:
9956    case LABEL_REF:
9957      /* Immediate constants are as cheap as registers.  */
9958      *total = 0;
9959      return true;
9960
9961    case MEM:
9962      *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
9963      return true;
9964
9965    case NEG:
9966      switch (mode)
9967	{
9968	case QImode:
9969	case SFmode:
9970	  *total = COSTS_N_INSNS (1);
9971	  break;
9972
9973        case HImode:
9974        case PSImode:
9975        case SImode:
9976          *total = COSTS_N_INSNS (2 * GET_MODE_SIZE (mode) - 1);
9977          break;
9978
9979	default:
9980	  return false;
9981	}
9982      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9983      return true;
9984
9985    case ABS:
9986      switch (mode)
9987	{
9988	case QImode:
9989	case SFmode:
9990	  *total = COSTS_N_INSNS (1);
9991	  break;
9992
9993	default:
9994	  return false;
9995	}
9996      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9997      return true;
9998
9999    case NOT:
10000      *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
10001      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10002      return true;
10003
10004    case ZERO_EXTEND:
10005      *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
10006			      - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
10007      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10008      return true;
10009
10010    case SIGN_EXTEND:
10011      *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
10012			      - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
10013      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10014      return true;
10015
10016    case PLUS:
10017      switch (mode)
10018	{
10019	case QImode:
10020          if (AVR_HAVE_MUL
10021              && MULT == GET_CODE (XEXP (x, 0))
10022              && register_operand (XEXP (x, 1), QImode))
10023            {
10024              /* multiply-add */
10025              *total = COSTS_N_INSNS (speed ? 4 : 3);
10026              /* multiply-add with constant: will be split and load constant. */
10027              if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
10028                *total = COSTS_N_INSNS (1) + *total;
10029              return true;
10030            }
10031	  *total = COSTS_N_INSNS (1);
10032	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10033	    *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10034	  break;
10035
10036	case HImode:
10037          if (AVR_HAVE_MUL
10038              && (MULT == GET_CODE (XEXP (x, 0))
10039                  || ASHIFT == GET_CODE (XEXP (x, 0)))
10040              && register_operand (XEXP (x, 1), HImode)
10041              && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
10042                  || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
10043            {
10044              /* multiply-add */
10045              *total = COSTS_N_INSNS (speed ? 5 : 4);
10046              /* multiply-add with constant: will be split and load constant. */
10047              if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
10048                *total = COSTS_N_INSNS (1) + *total;
10049              return true;
10050            }
10051	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10052	    {
10053	      *total = COSTS_N_INSNS (2);
10054	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10055					      speed);
10056	    }
10057	  else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
10058	    *total = COSTS_N_INSNS (1);
10059	  else
10060	    *total = COSTS_N_INSNS (2);
10061	  break;
10062
10063        case PSImode:
10064          if (!CONST_INT_P (XEXP (x, 1)))
10065            {
10066              *total = COSTS_N_INSNS (3);
10067              *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10068                                              speed);
10069            }
10070          else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
10071            *total = COSTS_N_INSNS (2);
10072          else
10073            *total = COSTS_N_INSNS (3);
10074          break;
10075
10076	case SImode:
10077	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10078	    {
10079	      *total = COSTS_N_INSNS (4);
10080	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10081					      speed);
10082	    }
10083	  else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
10084	    *total = COSTS_N_INSNS (1);
10085	  else
10086	    *total = COSTS_N_INSNS (4);
10087	  break;
10088
10089	default:
10090	  return false;
10091	}
10092      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10093      return true;
10094
10095    case MINUS:
10096      if (AVR_HAVE_MUL
10097          && QImode == mode
10098          && register_operand (XEXP (x, 0), QImode)
10099          && MULT == GET_CODE (XEXP (x, 1)))
10100        {
10101          /* multiply-sub */
10102          *total = COSTS_N_INSNS (speed ? 4 : 3);
10103          /* multiply-sub with constant: will be split and load constant. */
10104          if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
10105            *total = COSTS_N_INSNS (1) + *total;
10106          return true;
10107        }
10108      if (AVR_HAVE_MUL
10109          && HImode == mode
10110          && register_operand (XEXP (x, 0), HImode)
10111          && (MULT == GET_CODE (XEXP (x, 1))
10112              || ASHIFT == GET_CODE (XEXP (x, 1)))
10113          && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
10114              || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
10115        {
10116          /* multiply-sub */
10117          *total = COSTS_N_INSNS (speed ? 5 : 4);
10118          /* multiply-sub with constant: will be split and load constant. */
10119          if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
10120            *total = COSTS_N_INSNS (1) + *total;
10121          return true;
10122        }
10123      /* FALLTHRU */
10124    case AND:
10125    case IOR:
10126      *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
10127      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10128      if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10129	*total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10130      return true;
10131
10132    case XOR:
10133      *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
10134      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10135      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10136      return true;
10137
10138    case MULT:
10139      switch (mode)
10140	{
10141	case QImode:
10142	  if (AVR_HAVE_MUL)
10143	    *total = COSTS_N_INSNS (!speed ? 3 : 4);
10144	  else if (!speed)
10145	    *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
10146	  else
10147	    return false;
10148	  break;
10149
10150	case HImode:
10151	  if (AVR_HAVE_MUL)
10152            {
10153              rtx op0 = XEXP (x, 0);
10154              rtx op1 = XEXP (x, 1);
10155              enum rtx_code code0 = GET_CODE (op0);
10156              enum rtx_code code1 = GET_CODE (op1);
10157              bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
10158              bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
10159
10160              if (ex0
10161                  && (u8_operand (op1, HImode)
10162                      || s8_operand (op1, HImode)))
10163                {
10164                  *total = COSTS_N_INSNS (!speed ? 4 : 6);
10165                  return true;
10166                }
10167              if (ex0
10168                  && register_operand (op1, HImode))
10169                {
10170                  *total = COSTS_N_INSNS (!speed ? 5 : 8);
10171                  return true;
10172                }
10173              else if (ex0 || ex1)
10174                {
10175                  *total = COSTS_N_INSNS (!speed ? 3 : 5);
10176                  return true;
10177                }
10178              else if (register_operand (op0, HImode)
10179                       && (u8_operand (op1, HImode)
10180                           || s8_operand (op1, HImode)))
10181                {
10182                  *total = COSTS_N_INSNS (!speed ? 6 : 9);
10183                  return true;
10184                }
10185              else
10186                *total = COSTS_N_INSNS (!speed ? 7 : 10);
10187            }
10188	  else if (!speed)
10189	    *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
10190	  else
10191	    return false;
10192	  break;
10193
10194        case PSImode:
10195          if (!speed)
10196            *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
10197          else
10198            *total = 10;
10199          break;
10200
10201	case SImode:
10202	  if (AVR_HAVE_MUL)
10203            {
10204              if (!speed)
10205                {
10206                  /* Add some additional costs besides CALL like moves etc.  */
10207
10208                  *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
10209                }
10210              else
10211                {
10212                  /* Just a rough estimate.  Even with -O2 we don't want bulky
10213                     code expanded inline.  */
10214
10215                  *total = COSTS_N_INSNS (25);
10216                }
10217            }
10218          else
10219            {
10220              if (speed)
10221                *total = COSTS_N_INSNS (300);
10222              else
10223                /* Add some additional costs besides CALL like moves etc.  */
10224                *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
10225            }
10226
10227          return true;
10228
10229	default:
10230	  return false;
10231	}
10232      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10233      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10234      return true;
10235
10236    case DIV:
10237    case MOD:
10238    case UDIV:
10239    case UMOD:
10240      if (!speed)
10241        *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
10242      else
10243        *total = COSTS_N_INSNS (15 * GET_MODE_SIZE (mode));
10244      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10245      /* For div/mod with const-int divisor we have at least the cost of
10246         loading the divisor. */
10247      if (CONST_INT_P (XEXP (x, 1)))
10248        *total += COSTS_N_INSNS (GET_MODE_SIZE (mode));
10249      /* Add some overall penaly for clobbering and moving around registers */
10250      *total += COSTS_N_INSNS (2);
10251      return true;
10252
10253    case ROTATE:
10254      switch (mode)
10255	{
10256	case QImode:
10257	  if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
10258	    *total = COSTS_N_INSNS (1);
10259
10260	  break;
10261
10262	case HImode:
10263	  if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
10264	    *total = COSTS_N_INSNS (3);
10265
10266	  break;
10267
10268	case SImode:
10269	  if (CONST_INT_P (XEXP (x, 1)))
10270	    switch (INTVAL (XEXP (x, 1)))
10271	      {
10272	      case 8:
10273	      case 24:
10274		*total = COSTS_N_INSNS (5);
10275		break;
10276	      case 16:
10277		*total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
10278		break;
10279	      }
10280	  break;
10281
10282	default:
10283	  return false;
10284	}
10285      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10286      return true;
10287
10288    case ASHIFT:
10289      switch (mode)
10290	{
10291	case QImode:
10292	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10293	    {
10294	      *total = COSTS_N_INSNS (!speed ? 4 : 17);
10295	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10296					      speed);
10297	    }
10298	  else
10299	    {
10300	      val = INTVAL (XEXP (x, 1));
10301	      if (val == 7)
10302		*total = COSTS_N_INSNS (3);
10303	      else if (val >= 0 && val <= 7)
10304		*total = COSTS_N_INSNS (val);
10305	      else
10306		*total = COSTS_N_INSNS (1);
10307	    }
10308	  break;
10309
10310	case HImode:
10311          if (AVR_HAVE_MUL)
10312            {
10313              if (const_2_to_7_operand (XEXP (x, 1), HImode)
10314                  && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
10315                      || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
10316                {
10317                  *total = COSTS_N_INSNS (!speed ? 4 : 6);
10318                  return true;
10319                }
10320            }
10321
10322          if (const1_rtx == (XEXP (x, 1))
10323              && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
10324            {
10325              *total = COSTS_N_INSNS (2);
10326              return true;
10327            }
10328
10329	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10330	    {
10331	      *total = COSTS_N_INSNS (!speed ? 5 : 41);
10332	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10333					      speed);
10334	    }
10335	  else
10336	    switch (INTVAL (XEXP (x, 1)))
10337	      {
10338	      case 0:
10339		*total = 0;
10340		break;
10341	      case 1:
10342	      case 8:
10343		*total = COSTS_N_INSNS (2);
10344		break;
10345	      case 9:
10346		*total = COSTS_N_INSNS (3);
10347		break;
10348	      case 2:
10349	      case 3:
10350	      case 10:
10351	      case 15:
10352		*total = COSTS_N_INSNS (4);
10353		break;
10354	      case 7:
10355	      case 11:
10356	      case 12:
10357		*total = COSTS_N_INSNS (5);
10358		break;
10359	      case 4:
10360		*total = COSTS_N_INSNS (!speed ? 5 : 8);
10361		break;
10362	      case 6:
10363		*total = COSTS_N_INSNS (!speed ? 5 : 9);
10364		break;
10365	      case 5:
10366		*total = COSTS_N_INSNS (!speed ? 5 : 10);
10367		break;
10368	      default:
10369	        *total = COSTS_N_INSNS (!speed ? 5 : 41);
10370	        *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10371						speed);
10372	      }
10373	  break;
10374
10375        case PSImode:
10376          if (!CONST_INT_P (XEXP (x, 1)))
10377            {
10378              *total = COSTS_N_INSNS (!speed ? 6 : 73);
10379            }
10380          else
10381            switch (INTVAL (XEXP (x, 1)))
10382              {
10383              case 0:
10384                *total = 0;
10385                break;
10386              case 1:
10387              case 8:
10388              case 16:
10389                *total = COSTS_N_INSNS (3);
10390                break;
10391              case 23:
10392                *total = COSTS_N_INSNS (5);
10393                break;
10394              default:
10395                *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
10396                break;
10397              }
10398          break;
10399
10400	case SImode:
10401	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10402	    {
10403	      *total = COSTS_N_INSNS (!speed ? 7 : 113);
10404	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10405					      speed);
10406	    }
10407	  else
10408	    switch (INTVAL (XEXP (x, 1)))
10409	      {
10410	      case 0:
10411		*total = 0;
10412		break;
10413	      case 24:
10414		*total = COSTS_N_INSNS (3);
10415		break;
10416	      case 1:
10417	      case 8:
10418	      case 16:
10419		*total = COSTS_N_INSNS (4);
10420		break;
10421	      case 31:
10422		*total = COSTS_N_INSNS (6);
10423		break;
10424	      case 2:
10425		*total = COSTS_N_INSNS (!speed ? 7 : 8);
10426		break;
10427	      default:
10428		*total = COSTS_N_INSNS (!speed ? 7 : 113);
10429		*total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10430						speed);
10431	      }
10432	  break;
10433
10434	default:
10435	  return false;
10436	}
10437      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10438      return true;
10439
10440    case ASHIFTRT:
10441      switch (mode)
10442	{
10443	case QImode:
10444	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10445	    {
10446	      *total = COSTS_N_INSNS (!speed ? 4 : 17);
10447	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10448					      speed);
10449	    }
10450	  else
10451	    {
10452	      val = INTVAL (XEXP (x, 1));
10453	      if (val == 6)
10454		*total = COSTS_N_INSNS (4);
10455	      else if (val == 7)
10456		*total = COSTS_N_INSNS (2);
10457	      else if (val >= 0 && val <= 7)
10458		*total = COSTS_N_INSNS (val);
10459	      else
10460		*total = COSTS_N_INSNS (1);
10461	    }
10462	  break;
10463
10464	case HImode:
10465	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10466	    {
10467	      *total = COSTS_N_INSNS (!speed ? 5 : 41);
10468	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10469					      speed);
10470	    }
10471	  else
10472	    switch (INTVAL (XEXP (x, 1)))
10473	      {
10474	      case 0:
10475		*total = 0;
10476		break;
10477	      case 1:
10478		*total = COSTS_N_INSNS (2);
10479		break;
10480	      case 15:
10481		*total = COSTS_N_INSNS (3);
10482		break;
10483	      case 2:
10484	      case 7:
10485              case 8:
10486              case 9:
10487		*total = COSTS_N_INSNS (4);
10488		break;
10489              case 10:
10490	      case 14:
10491		*total = COSTS_N_INSNS (5);
10492		break;
10493              case 11:
10494                *total = COSTS_N_INSNS (!speed ? 5 : 6);
10495		break;
10496              case 12:
10497                *total = COSTS_N_INSNS (!speed ? 5 : 7);
10498		break;
10499              case 6:
10500	      case 13:
10501                *total = COSTS_N_INSNS (!speed ? 5 : 8);
10502		break;
10503	      default:
10504	        *total = COSTS_N_INSNS (!speed ? 5 : 41);
10505	        *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10506						speed);
10507	      }
10508	  break;
10509
10510        case PSImode:
10511          if (!CONST_INT_P (XEXP (x, 1)))
10512            {
10513              *total = COSTS_N_INSNS (!speed ? 6 : 73);
10514            }
10515          else
10516            switch (INTVAL (XEXP (x, 1)))
10517              {
10518              case 0:
10519                *total = 0;
10520                break;
10521              case 1:
10522                *total = COSTS_N_INSNS (3);
10523                break;
10524              case 16:
10525              case 8:
10526                *total = COSTS_N_INSNS (5);
10527                break;
10528              case 23:
10529                *total = COSTS_N_INSNS (4);
10530                break;
10531              default:
10532                *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
10533                break;
10534              }
10535          break;
10536
10537	case SImode:
10538	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10539	    {
10540	      *total = COSTS_N_INSNS (!speed ? 7 : 113);
10541	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10542					      speed);
10543	    }
10544	  else
10545	    switch (INTVAL (XEXP (x, 1)))
10546	      {
10547	      case 0:
10548		*total = 0;
10549		break;
10550	      case 1:
10551		*total = COSTS_N_INSNS (4);
10552		break;
10553	      case 8:
10554	      case 16:
10555	      case 24:
10556		*total = COSTS_N_INSNS (6);
10557		break;
10558	      case 2:
10559		*total = COSTS_N_INSNS (!speed ? 7 : 8);
10560		break;
10561	      case 31:
10562		*total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
10563		break;
10564	      default:
10565		*total = COSTS_N_INSNS (!speed ? 7 : 113);
10566		*total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10567						speed);
10568	      }
10569	  break;
10570
10571	default:
10572	  return false;
10573	}
10574      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10575      return true;
10576
10577    case LSHIFTRT:
10578      switch (mode)
10579	{
10580	case QImode:
10581	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10582	    {
10583	      *total = COSTS_N_INSNS (!speed ? 4 : 17);
10584	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10585					      speed);
10586	    }
10587	  else
10588	    {
10589	      val = INTVAL (XEXP (x, 1));
10590	      if (val == 7)
10591		*total = COSTS_N_INSNS (3);
10592	      else if (val >= 0 && val <= 7)
10593		*total = COSTS_N_INSNS (val);
10594	      else
10595		*total = COSTS_N_INSNS (1);
10596	    }
10597	  break;
10598
10599	case HImode:
10600	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10601	    {
10602	      *total = COSTS_N_INSNS (!speed ? 5 : 41);
10603	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10604					      speed);
10605	    }
10606	  else
10607	    switch (INTVAL (XEXP (x, 1)))
10608	      {
10609	      case 0:
10610		*total = 0;
10611		break;
10612	      case 1:
10613	      case 8:
10614		*total = COSTS_N_INSNS (2);
10615		break;
10616	      case 9:
10617		*total = COSTS_N_INSNS (3);
10618		break;
10619	      case 2:
10620	      case 10:
10621	      case 15:
10622		*total = COSTS_N_INSNS (4);
10623		break;
10624	      case 7:
10625              case 11:
10626		*total = COSTS_N_INSNS (5);
10627		break;
10628	      case 3:
10629	      case 12:
10630	      case 13:
10631	      case 14:
10632		*total = COSTS_N_INSNS (!speed ? 5 : 6);
10633		break;
10634	      case 4:
10635		*total = COSTS_N_INSNS (!speed ? 5 : 7);
10636		break;
10637	      case 5:
10638	      case 6:
10639		*total = COSTS_N_INSNS (!speed ? 5 : 9);
10640		break;
10641	      default:
10642	        *total = COSTS_N_INSNS (!speed ? 5 : 41);
10643	        *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10644						speed);
10645	      }
10646	  break;
10647
10648        case PSImode:
10649          if (!CONST_INT_P (XEXP (x, 1)))
10650            {
10651              *total = COSTS_N_INSNS (!speed ? 6 : 73);
10652            }
10653          else
10654            switch (INTVAL (XEXP (x, 1)))
10655              {
10656              case 0:
10657                *total = 0;
10658                break;
10659              case 1:
10660              case 8:
10661              case 16:
10662                *total = COSTS_N_INSNS (3);
10663                break;
10664              case 23:
10665                *total = COSTS_N_INSNS (5);
10666                break;
10667              default:
10668                *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
10669                break;
10670              }
10671          break;
10672
10673	case SImode:
10674	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10675	    {
10676	      *total = COSTS_N_INSNS (!speed ? 7 : 113);
10677	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10678					      speed);
10679	    }
10680	  else
10681	    switch (INTVAL (XEXP (x, 1)))
10682	      {
10683	      case 0:
10684		*total = 0;
10685		break;
10686	      case 1:
10687		*total = COSTS_N_INSNS (4);
10688		break;
10689	      case 2:
10690		*total = COSTS_N_INSNS (!speed ? 7 : 8);
10691		break;
10692	      case 8:
10693	      case 16:
10694	      case 24:
10695		*total = COSTS_N_INSNS (4);
10696		break;
10697	      case 31:
10698		*total = COSTS_N_INSNS (6);
10699		break;
10700	      default:
10701		*total = COSTS_N_INSNS (!speed ? 7 : 113);
10702		*total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10703						speed);
10704	      }
10705	  break;
10706
10707	default:
10708	  return false;
10709	}
10710      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10711      return true;
10712
10713    case COMPARE:
10714      switch (GET_MODE (XEXP (x, 0)))
10715	{
10716	case QImode:
10717	  *total = COSTS_N_INSNS (1);
10718	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10719	    *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10720	  break;
10721
10722        case HImode:
10723	  *total = COSTS_N_INSNS (2);
10724	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10725            *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10726	  else if (INTVAL (XEXP (x, 1)) != 0)
10727	    *total += COSTS_N_INSNS (1);
10728          break;
10729
10730        case PSImode:
10731          *total = COSTS_N_INSNS (3);
10732          if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
10733            *total += COSTS_N_INSNS (2);
10734          break;
10735
10736        case SImode:
10737          *total = COSTS_N_INSNS (4);
10738          if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10739            *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10740	  else if (INTVAL (XEXP (x, 1)) != 0)
10741	    *total += COSTS_N_INSNS (3);
10742          break;
10743
10744	default:
10745	  return false;
10746	}
10747      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10748      return true;
10749
10750    case TRUNCATE:
10751      if (AVR_HAVE_MUL
10752          && LSHIFTRT == GET_CODE (XEXP (x, 0))
10753          && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
10754          && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
10755        {
10756          if (QImode == mode || HImode == mode)
10757            {
10758              *total = COSTS_N_INSNS (2);
10759              return true;
10760            }
10761        }
10762      break;
10763
10764    default:
10765      break;
10766    }
10767  return false;
10768}
10769
10770
10771/* Implement `TARGET_RTX_COSTS'.  */
10772
10773static bool
10774avr_rtx_costs (rtx x, int codearg, int outer_code,
10775	       int opno, int *total, bool speed)
10776{
10777  bool done = avr_rtx_costs_1 (x, codearg, outer_code,
10778                               opno, total, speed);
10779
10780  if (avr_log.rtx_costs)
10781    {
10782      avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
10783                 done, speed ? "speed" : "size", *total, outer_code, x);
10784    }
10785
10786  return done;
10787}
10788
10789
10790/* Implement `TARGET_ADDRESS_COST'.  */
10791
10792static int
10793avr_address_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
10794                  addr_space_t as ATTRIBUTE_UNUSED,
10795                  bool speed ATTRIBUTE_UNUSED)
10796{
10797  int cost = 4;
10798
10799  if (GET_CODE (x) == PLUS
10800      && CONST_INT_P (XEXP (x, 1))
10801      && (REG_P (XEXP (x, 0))
10802          || GET_CODE (XEXP (x, 0)) == SUBREG))
10803    {
10804      if (INTVAL (XEXP (x, 1)) >= 61)
10805        cost = 18;
10806    }
10807  else if (CONSTANT_ADDRESS_P (x))
10808    {
10809      if (optimize > 0
10810          && io_address_operand (x, QImode))
10811        cost = 2;
10812    }
10813
10814  if (avr_log.address_cost)
10815    avr_edump ("\n%?: %d = %r\n", cost, x);
10816
10817  return cost;
10818}
10819
10820/* Test for extra memory constraint 'Q'.
10821   It's a memory address based on Y or Z pointer with valid displacement.  */
10822
10823int
10824extra_constraint_Q (rtx x)
10825{
10826  int ok = 0;
10827
10828  if (GET_CODE (XEXP (x,0)) == PLUS
10829      && REG_P (XEXP (XEXP (x,0), 0))
10830      && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
10831      && (INTVAL (XEXP (XEXP (x,0), 1))
10832	  <= MAX_LD_OFFSET (GET_MODE (x))))
10833    {
10834      rtx xx = XEXP (XEXP (x,0), 0);
10835      int regno = REGNO (xx);
10836
10837      ok = (/* allocate pseudos */
10838            regno >= FIRST_PSEUDO_REGISTER
10839            /* strictly check */
10840            || regno == REG_Z || regno == REG_Y
10841            /* XXX frame & arg pointer checks */
10842            || xx == frame_pointer_rtx
10843            || xx == arg_pointer_rtx);
10844
10845      if (avr_log.constraints)
10846        avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
10847                   ok, reload_completed, reload_in_progress, x);
10848    }
10849
10850  return ok;
10851}
10852
10853/* Convert condition code CONDITION to the valid AVR condition code.  */
10854
10855RTX_CODE
10856avr_normalize_condition (RTX_CODE condition)
10857{
10858  switch (condition)
10859    {
10860    case GT:
10861      return GE;
10862    case GTU:
10863      return GEU;
10864    case LE:
10865      return LT;
10866    case LEU:
10867      return LTU;
10868    default:
10869      gcc_unreachable ();
10870    }
10871}
10872
10873/* Helper function for `avr_reorg'.  */
10874
10875static rtx
10876avr_compare_pattern (rtx_insn *insn)
10877{
10878  rtx pattern = single_set (insn);
10879
10880  if (pattern
10881      && NONJUMP_INSN_P (insn)
10882      && SET_DEST (pattern) == cc0_rtx
10883      && GET_CODE (SET_SRC (pattern)) == COMPARE)
10884    {
10885      machine_mode mode0 = GET_MODE (XEXP (SET_SRC (pattern), 0));
10886      machine_mode mode1 = GET_MODE (XEXP (SET_SRC (pattern), 1));
10887
10888      /* The 64-bit comparisons have fixed operands ACC_A and ACC_B.
10889         They must not be swapped, thus skip them.  */
10890
10891      if ((mode0 == VOIDmode || GET_MODE_SIZE (mode0) <= 4)
10892          && (mode1 == VOIDmode || GET_MODE_SIZE (mode1) <= 4))
10893        return pattern;
10894    }
10895
10896  return NULL_RTX;
10897}
10898
10899/* Helper function for `avr_reorg'.  */
10900
10901/* Expansion of switch/case decision trees leads to code like
10902
10903       cc0 = compare (Reg, Num)
10904       if (cc0 == 0)
10905         goto L1
10906
10907       cc0 = compare (Reg, Num)
10908       if (cc0 > 0)
10909         goto L2
10910
10911   The second comparison is superfluous and can be deleted.
10912   The second jump condition can be transformed from a
10913   "difficult" one to a "simple" one because "cc0 > 0" and
10914   "cc0 >= 0" will have the same effect here.
10915
10916   This function relies on the way switch/case is being expaned
10917   as binary decision tree.  For example code see PR 49903.
10918
10919   Return TRUE if optimization performed.
10920   Return FALSE if nothing changed.
10921
10922   INSN1 is a comparison, i.e. avr_compare_pattern != 0.
10923
10924   We don't want to do this in text peephole because it is
10925   tedious to work out jump offsets there and the second comparison
10926   might have been transormed by `avr_reorg'.
10927
10928   RTL peephole won't do because peephole2 does not scan across
10929   basic blocks.  */
10930
10931static bool
10932avr_reorg_remove_redundant_compare (rtx_insn *insn1)
10933{
10934  rtx comp1, ifelse1, xcond1;
10935  rtx_insn *branch1;
10936  rtx comp2, ifelse2, xcond2;
10937  rtx_insn *branch2, *insn2;
10938  enum rtx_code code;
10939  rtx_insn *jump;
10940  rtx target, cond;
10941
10942  /* Look out for:  compare1 - branch1 - compare2 - branch2  */
10943
10944  branch1 = next_nonnote_nondebug_insn (insn1);
10945  if (!branch1 || !JUMP_P (branch1))
10946    return false;
10947
10948  insn2 = next_nonnote_nondebug_insn (branch1);
10949  if (!insn2 || !avr_compare_pattern (insn2))
10950    return false;
10951
10952  branch2 = next_nonnote_nondebug_insn (insn2);
10953  if (!branch2 || !JUMP_P (branch2))
10954    return false;
10955
10956  comp1 = avr_compare_pattern (insn1);
10957  comp2 = avr_compare_pattern (insn2);
10958  xcond1 = single_set (branch1);
10959  xcond2 = single_set (branch2);
10960
10961  if (!comp1 || !comp2
10962      || !rtx_equal_p (comp1, comp2)
10963      || !xcond1 || SET_DEST (xcond1) != pc_rtx
10964      || !xcond2 || SET_DEST (xcond2) != pc_rtx
10965      || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
10966      || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
10967    {
10968      return false;
10969    }
10970
10971  comp1 = SET_SRC (comp1);
10972  ifelse1 = SET_SRC (xcond1);
10973  ifelse2 = SET_SRC (xcond2);
10974
10975  /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE.  */
10976
10977  if (EQ != GET_CODE (XEXP (ifelse1, 0))
10978      || !REG_P (XEXP (comp1, 0))
10979      || !CONST_INT_P (XEXP (comp1, 1))
10980      || XEXP (ifelse1, 2) != pc_rtx
10981      || XEXP (ifelse2, 2) != pc_rtx
10982      || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
10983      || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
10984      || !COMPARISON_P (XEXP (ifelse2, 0))
10985      || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
10986      || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
10987      || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
10988      || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
10989    {
10990      return false;
10991    }
10992
10993  /* We filtered the insn sequence to look like
10994
10995        (set (cc0)
10996             (compare (reg:M N)
10997                      (const_int VAL)))
10998        (set (pc)
10999             (if_then_else (eq (cc0)
11000                               (const_int 0))
11001                           (label_ref L1)
11002                           (pc)))
11003
11004        (set (cc0)
11005             (compare (reg:M N)
11006                      (const_int VAL)))
11007        (set (pc)
11008             (if_then_else (CODE (cc0)
11009                                 (const_int 0))
11010                           (label_ref L2)
11011                           (pc)))
11012  */
11013
11014  code = GET_CODE (XEXP (ifelse2, 0));
11015
11016  /* Map GT/GTU to GE/GEU which is easier for AVR.
11017     The first two instructions compare/branch on EQ
11018     so we may replace the difficult
11019
11020        if (x == VAL)   goto L1;
11021        if (x > VAL)    goto L2;
11022
11023     with easy
11024
11025         if (x == VAL)   goto L1;
11026         if (x >= VAL)   goto L2;
11027
11028     Similarly, replace LE/LEU by LT/LTU.  */
11029
11030  switch (code)
11031    {
11032    case EQ:
11033    case LT:  case LTU:
11034    case GE:  case GEU:
11035      break;
11036
11037    case LE:  case LEU:
11038    case GT:  case GTU:
11039      code = avr_normalize_condition (code);
11040      break;
11041
11042    default:
11043      return false;
11044    }
11045
11046  /* Wrap the branches into UNSPECs so they won't be changed or
11047     optimized in the remainder.  */
11048
11049  target = XEXP (XEXP (ifelse1, 1), 0);
11050  cond = XEXP (ifelse1, 0);
11051  jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
11052
11053  JUMP_LABEL (jump) = JUMP_LABEL (branch1);
11054
11055  target = XEXP (XEXP (ifelse2, 1), 0);
11056  cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
11057  jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
11058
11059  JUMP_LABEL (jump) = JUMP_LABEL (branch2);
11060
11061  /* The comparisons in insn1 and insn2 are exactly the same;
11062     insn2 is superfluous so delete it.  */
11063
11064  delete_insn (insn2);
11065  delete_insn (branch1);
11066  delete_insn (branch2);
11067
11068  return true;
11069}
11070
11071
11072/* Implement `TARGET_MACHINE_DEPENDENT_REORG'.  */
11073/* Optimize conditional jumps.  */
11074
11075static void
11076avr_reorg (void)
11077{
11078  rtx_insn *insn = get_insns();
11079
11080  for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
11081    {
11082      rtx pattern = avr_compare_pattern (insn);
11083
11084      if (!pattern)
11085        continue;
11086
11087      if (optimize
11088          && avr_reorg_remove_redundant_compare (insn))
11089        {
11090          continue;
11091        }
11092
11093      if (compare_diff_p (insn))
11094	{
11095          /* Now we work under compare insn with difficult branch.  */
11096
11097          rtx next = next_real_insn (insn);
11098          rtx pat = PATTERN (next);
11099
11100          pattern = SET_SRC (pattern);
11101
11102          if (true_regnum (XEXP (pattern, 0)) >= 0
11103              && true_regnum (XEXP (pattern, 1)) >= 0)
11104            {
11105              rtx x = XEXP (pattern, 0);
11106              rtx src = SET_SRC (pat);
11107              rtx t = XEXP (src,0);
11108              PUT_CODE (t, swap_condition (GET_CODE (t)));
11109              XEXP (pattern, 0) = XEXP (pattern, 1);
11110              XEXP (pattern, 1) = x;
11111              INSN_CODE (next) = -1;
11112            }
11113          else if (true_regnum (XEXP (pattern, 0)) >= 0
11114                   && XEXP (pattern, 1) == const0_rtx)
11115            {
11116              /* This is a tst insn, we can reverse it.  */
11117              rtx src = SET_SRC (pat);
11118              rtx t = XEXP (src,0);
11119
11120              PUT_CODE (t, swap_condition (GET_CODE (t)));
11121              XEXP (pattern, 1) = XEXP (pattern, 0);
11122              XEXP (pattern, 0) = const0_rtx;
11123              INSN_CODE (next) = -1;
11124              INSN_CODE (insn) = -1;
11125            }
11126          else if (true_regnum (XEXP (pattern, 0)) >= 0
11127                   && CONST_INT_P (XEXP (pattern, 1)))
11128            {
11129              rtx x = XEXP (pattern, 1);
11130              rtx src = SET_SRC (pat);
11131              rtx t = XEXP (src,0);
11132              machine_mode mode = GET_MODE (XEXP (pattern, 0));
11133
11134              if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
11135                {
11136                  XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
11137                  PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
11138                  INSN_CODE (next) = -1;
11139                  INSN_CODE (insn) = -1;
11140                }
11141            }
11142        }
11143    }
11144}
11145
11146/* Returns register number for function return value.*/
11147
11148static inline unsigned int
11149avr_ret_register (void)
11150{
11151  return 24;
11152}
11153
11154
11155/* Implement `TARGET_FUNCTION_VALUE_REGNO_P'.  */
11156
11157static bool
11158avr_function_value_regno_p (const unsigned int regno)
11159{
11160  return (regno == avr_ret_register ());
11161}
11162
11163
11164/* Implement `TARGET_LIBCALL_VALUE'.  */
11165/* Create an RTX representing the place where a
11166   library function returns a value of mode MODE.  */
11167
11168static rtx
11169avr_libcall_value (machine_mode mode,
11170		   const_rtx func ATTRIBUTE_UNUSED)
11171{
11172  int offs = GET_MODE_SIZE (mode);
11173
11174  if (offs <= 4)
11175    offs = (offs + 1) & ~1;
11176
11177  return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
11178}
11179
11180
11181/* Implement `TARGET_FUNCTION_VALUE'.  */
11182/* Create an RTX representing the place where a
11183   function returns a value of data type VALTYPE.  */
11184
11185static rtx
11186avr_function_value (const_tree type,
11187                    const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
11188                    bool outgoing ATTRIBUTE_UNUSED)
11189{
11190  unsigned int offs;
11191
11192  if (TYPE_MODE (type) != BLKmode)
11193    return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
11194
11195  offs = int_size_in_bytes (type);
11196  if (offs < 2)
11197    offs = 2;
11198  if (offs > 2 && offs < GET_MODE_SIZE (SImode))
11199    offs = GET_MODE_SIZE (SImode);
11200  else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
11201    offs = GET_MODE_SIZE (DImode);
11202
11203  return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
11204}
11205
11206int
11207test_hard_reg_class (enum reg_class rclass, rtx x)
11208{
11209  int regno = true_regnum (x);
11210  if (regno < 0)
11211    return 0;
11212
11213  if (TEST_HARD_REG_CLASS (rclass, regno))
11214    return 1;
11215
11216  return 0;
11217}
11218
11219
11220/* Helper for jump_over_one_insn_p:  Test if INSN is a 2-word instruction
11221   and thus is suitable to be skipped by CPSE, SBRC, etc.  */
11222
11223static bool
11224avr_2word_insn_p (rtx_insn *insn)
11225{
11226  if (TARGET_SKIP_BUG
11227      || !insn
11228      || 2 != get_attr_length (insn))
11229    {
11230      return false;
11231    }
11232
11233  switch (INSN_CODE (insn))
11234    {
11235    default:
11236      return false;
11237
11238    case CODE_FOR_movqi_insn:
11239    case CODE_FOR_movuqq_insn:
11240    case CODE_FOR_movqq_insn:
11241      {
11242        rtx set  = single_set (insn);
11243        rtx src  = SET_SRC (set);
11244        rtx dest = SET_DEST (set);
11245
11246        /* Factor out LDS and STS from movqi_insn.  */
11247
11248        if (MEM_P (dest)
11249            && (REG_P (src) || src == CONST0_RTX (GET_MODE (dest))))
11250          {
11251            return CONSTANT_ADDRESS_P (XEXP (dest, 0));
11252          }
11253        else if (REG_P (dest)
11254                 && MEM_P (src))
11255          {
11256            return CONSTANT_ADDRESS_P (XEXP (src, 0));
11257          }
11258
11259        return false;
11260      }
11261
11262    case CODE_FOR_call_insn:
11263    case CODE_FOR_call_value_insn:
11264      return true;
11265    }
11266}
11267
11268
11269int
11270jump_over_one_insn_p (rtx_insn *insn, rtx dest)
11271{
11272  int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
11273		      ? XEXP (dest, 0)
11274		      : dest);
11275  int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
11276  int dest_addr = INSN_ADDRESSES (uid);
11277  int jump_offset = dest_addr - jump_addr - get_attr_length (insn);
11278
11279  return (jump_offset == 1
11280          || (jump_offset == 2
11281              && avr_2word_insn_p (next_active_insn (insn))));
11282}
11283
11284
11285/* Worker function for `HARD_REGNO_MODE_OK'.  */
11286/* Returns 1 if a value of mode MODE can be stored starting with hard
11287   register number REGNO.  On the enhanced core, anything larger than
11288   1 byte must start in even numbered register for "movw" to work
11289   (this way we don't have to check for odd registers everywhere).  */
11290
11291int
11292avr_hard_regno_mode_ok (int regno, machine_mode mode)
11293{
11294  /* NOTE: 8-bit values must not be disallowed for R28 or R29.
11295        Disallowing QI et al. in these regs might lead to code like
11296            (set (subreg:QI (reg:HI 28) n) ...)
11297        which will result in wrong code because reload does not
11298        handle SUBREGs of hard regsisters like this.
11299        This could be fixed in reload.  However, it appears
11300        that fixing reload is not wanted by reload people.  */
11301
11302  /* Any GENERAL_REGS register can hold 8-bit values.  */
11303
11304  if (GET_MODE_SIZE (mode) == 1)
11305    return 1;
11306
11307  /* FIXME: Ideally, the following test is not needed.
11308        However, it turned out that it can reduce the number
11309        of spill fails.  AVR and it's poor endowment with
11310        address registers is extreme stress test for reload.  */
11311
11312  if (GET_MODE_SIZE (mode) >= 4
11313      && regno >= REG_X)
11314    return 0;
11315
11316  /* All modes larger than 8 bits should start in an even register.  */
11317
11318  return !(regno & 1);
11319}
11320
11321
11322/* Implement `HARD_REGNO_CALL_PART_CLOBBERED'.  */
11323
11324int
11325avr_hard_regno_call_part_clobbered (unsigned regno, machine_mode mode)
11326{
11327  /* FIXME: This hook gets called with MODE:REGNO combinations that don't
11328        represent valid hard registers like, e.g. HI:29.  Returning TRUE
11329        for such registers can lead to performance degradation as mentioned
11330        in PR53595.  Thus, report invalid hard registers as FALSE.  */
11331
11332  if (!avr_hard_regno_mode_ok (regno, mode))
11333    return 0;
11334
11335  /* Return true if any of the following boundaries is crossed:
11336     17/18, 27/28 and 29/30.  */
11337
11338  return ((regno < 18 && regno + GET_MODE_SIZE (mode) > 18)
11339          || (regno < REG_Y && regno + GET_MODE_SIZE (mode) > REG_Y)
11340          || (regno < REG_Z && regno + GET_MODE_SIZE (mode) > REG_Z));
11341}
11342
11343
11344/* Implement `MODE_CODE_BASE_REG_CLASS'.  */
11345
11346enum reg_class
11347avr_mode_code_base_reg_class (machine_mode mode ATTRIBUTE_UNUSED,
11348                              addr_space_t as, RTX_CODE outer_code,
11349                              RTX_CODE index_code ATTRIBUTE_UNUSED)
11350{
11351  if (!ADDR_SPACE_GENERIC_P (as))
11352    {
11353      return POINTER_Z_REGS;
11354    }
11355
11356  if (!avr_strict_X)
11357    return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
11358
11359  return PLUS == outer_code ? BASE_POINTER_REGS : POINTER_REGS;
11360}
11361
11362
11363/* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'.  */
11364
11365bool
11366avr_regno_mode_code_ok_for_base_p (int regno,
11367                                   machine_mode mode ATTRIBUTE_UNUSED,
11368                                   addr_space_t as ATTRIBUTE_UNUSED,
11369                                   RTX_CODE outer_code,
11370                                   RTX_CODE index_code ATTRIBUTE_UNUSED)
11371{
11372  bool ok = false;
11373
11374  if (!ADDR_SPACE_GENERIC_P (as))
11375    {
11376      if (regno < FIRST_PSEUDO_REGISTER
11377          && regno == REG_Z)
11378        {
11379          return true;
11380        }
11381
11382      if (reg_renumber)
11383        {
11384          regno = reg_renumber[regno];
11385
11386          if (regno == REG_Z)
11387            {
11388              return true;
11389            }
11390        }
11391
11392      return false;
11393    }
11394
11395  if (regno < FIRST_PSEUDO_REGISTER
11396      && (regno == REG_X
11397          || regno == REG_Y
11398          || regno == REG_Z
11399          || regno == ARG_POINTER_REGNUM))
11400    {
11401      ok = true;
11402    }
11403  else if (reg_renumber)
11404    {
11405      regno = reg_renumber[regno];
11406
11407      if (regno == REG_X
11408          || regno == REG_Y
11409          || regno == REG_Z
11410          || regno == ARG_POINTER_REGNUM)
11411        {
11412          ok = true;
11413        }
11414    }
11415
11416  if (avr_strict_X
11417      && PLUS == outer_code
11418      && regno == REG_X)
11419    {
11420      ok = false;
11421    }
11422
11423  return ok;
11424}
11425
11426
11427/* A helper for `output_reload_insisf' and `output_reload_inhi'.  */
11428/* Set 32-bit register OP[0] to compile-time constant OP[1].
11429   CLOBBER_REG is a QI clobber register or NULL_RTX.
11430   LEN == NULL: output instructions.
11431   LEN != NULL: set *LEN to the length of the instruction sequence
11432                (in words) printed with LEN = NULL.
11433   If CLEAR_P is true, OP[0] had been cleard to Zero already.
11434   If CLEAR_P is false, nothing is known about OP[0].
11435
11436   The effect on cc0 is as follows:
11437
11438   Load 0 to any register except ZERO_REG : NONE
11439   Load ld register with any value        : NONE
11440   Anything else:                         : CLOBBER  */
11441
11442static void
11443output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
11444{
11445  rtx src = op[1];
11446  rtx dest = op[0];
11447  rtx xval, xdest[4];
11448  int ival[4];
11449  int clobber_val = 1234;
11450  bool cooked_clobber_p = false;
11451  bool set_p = false;
11452  machine_mode mode = GET_MODE (dest);
11453  int n, n_bytes = GET_MODE_SIZE (mode);
11454
11455  gcc_assert (REG_P (dest)
11456              && CONSTANT_P (src));
11457
11458  if (len)
11459    *len = 0;
11460
11461  /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
11462     but has some subregs that are in LD_REGS.  Use the MSB (REG:QI 17).  */
11463
11464  if (REGNO (dest) < 16
11465      && REGNO (dest) + GET_MODE_SIZE (mode) > 16)
11466    {
11467      clobber_reg = all_regs_rtx[REGNO (dest) + n_bytes - 1];
11468    }
11469
11470  /* We might need a clobber reg but don't have one.  Look at the value to
11471     be loaded more closely.  A clobber is only needed if it is a symbol
11472     or contains a byte that is neither 0, -1 or a power of 2.  */
11473
11474  if (NULL_RTX == clobber_reg
11475      && !test_hard_reg_class (LD_REGS, dest)
11476      && (! (CONST_INT_P (src) || CONST_FIXED_P (src) || CONST_DOUBLE_P (src))
11477          || !avr_popcount_each_byte (src, n_bytes,
11478                                      (1 << 0) | (1 << 1) | (1 << 8))))
11479    {
11480      /* We have no clobber register but need one.  Cook one up.
11481         That's cheaper than loading from constant pool.  */
11482
11483      cooked_clobber_p = true;
11484      clobber_reg = all_regs_rtx[REG_Z + 1];
11485      avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
11486    }
11487
11488  /* Now start filling DEST from LSB to MSB.  */
11489
11490  for (n = 0; n < n_bytes; n++)
11491    {
11492      int ldreg_p;
11493      bool done_byte = false;
11494      int j;
11495      rtx xop[3];
11496
11497      /* Crop the n-th destination byte.  */
11498
11499      xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
11500      ldreg_p = test_hard_reg_class (LD_REGS, xdest[n]);
11501
11502      if (!CONST_INT_P (src)
11503          && !CONST_FIXED_P (src)
11504          && !CONST_DOUBLE_P (src))
11505        {
11506          static const char* const asm_code[][2] =
11507            {
11508              { "ldi %2,lo8(%1)"  CR_TAB "mov %0,%2",    "ldi %0,lo8(%1)"  },
11509              { "ldi %2,hi8(%1)"  CR_TAB "mov %0,%2",    "ldi %0,hi8(%1)"  },
11510              { "ldi %2,hlo8(%1)" CR_TAB "mov %0,%2",    "ldi %0,hlo8(%1)" },
11511              { "ldi %2,hhi8(%1)" CR_TAB "mov %0,%2",    "ldi %0,hhi8(%1)" }
11512            };
11513
11514          xop[0] = xdest[n];
11515          xop[1] = src;
11516          xop[2] = clobber_reg;
11517
11518          avr_asm_len (asm_code[n][ldreg_p], xop, len, ldreg_p ? 1 : 2);
11519
11520          continue;
11521        }
11522
11523      /* Crop the n-th source byte.  */
11524
11525      xval = simplify_gen_subreg (QImode, src, mode, n);
11526      ival[n] = INTVAL (xval);
11527
11528      /* Look if we can reuse the low word by means of MOVW.  */
11529
11530      if (n == 2
11531          && n_bytes >= 4
11532          && AVR_HAVE_MOVW)
11533        {
11534          rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
11535          rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
11536
11537          if (INTVAL (lo16) == INTVAL (hi16))
11538            {
11539              if (0 != INTVAL (lo16)
11540                  || !clear_p)
11541                {
11542                  avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
11543                }
11544
11545              break;
11546            }
11547        }
11548
11549      /* Don't use CLR so that cc0 is set as expected.  */
11550
11551      if (ival[n] == 0)
11552        {
11553          if (!clear_p)
11554            avr_asm_len (ldreg_p ? "ldi %0,0"
11555                         : AVR_ZERO_REGNO == REGNO (xdest[n]) ? "clr %0"
11556                         : "mov %0,__zero_reg__",
11557                         &xdest[n], len, 1);
11558          continue;
11559        }
11560
11561      if (clobber_val == ival[n]
11562          && REGNO (clobber_reg) == REGNO (xdest[n]))
11563        {
11564          continue;
11565        }
11566
11567      /* LD_REGS can use LDI to move a constant value */
11568
11569      if (ldreg_p)
11570        {
11571          xop[0] = xdest[n];
11572          xop[1] = xval;
11573          avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
11574          continue;
11575        }
11576
11577      /* Try to reuse value already loaded in some lower byte. */
11578
11579      for (j = 0; j < n; j++)
11580        if (ival[j] == ival[n])
11581          {
11582            xop[0] = xdest[n];
11583            xop[1] = xdest[j];
11584
11585            avr_asm_len ("mov %0,%1", xop, len, 1);
11586            done_byte = true;
11587            break;
11588          }
11589
11590      if (done_byte)
11591        continue;
11592
11593      /* Need no clobber reg for -1: Use CLR/DEC */
11594
11595      if (-1 == ival[n])
11596        {
11597          if (!clear_p)
11598            avr_asm_len ("clr %0", &xdest[n], len, 1);
11599
11600          avr_asm_len ("dec %0", &xdest[n], len, 1);
11601          continue;
11602        }
11603      else if (1 == ival[n])
11604        {
11605          if (!clear_p)
11606            avr_asm_len ("clr %0", &xdest[n], len, 1);
11607
11608          avr_asm_len ("inc %0", &xdest[n], len, 1);
11609          continue;
11610        }
11611
11612      /* Use T flag or INC to manage powers of 2 if we have
11613         no clobber reg.  */
11614
11615      if (NULL_RTX == clobber_reg
11616          && single_one_operand (xval, QImode))
11617        {
11618          xop[0] = xdest[n];
11619          xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
11620
11621          gcc_assert (constm1_rtx != xop[1]);
11622
11623          if (!set_p)
11624            {
11625              set_p = true;
11626              avr_asm_len ("set", xop, len, 1);
11627            }
11628
11629          if (!clear_p)
11630            avr_asm_len ("clr %0", xop, len, 1);
11631
11632          avr_asm_len ("bld %0,%1", xop, len, 1);
11633          continue;
11634        }
11635
11636      /* We actually need the LD_REGS clobber reg.  */
11637
11638      gcc_assert (NULL_RTX != clobber_reg);
11639
11640      xop[0] = xdest[n];
11641      xop[1] = xval;
11642      xop[2] = clobber_reg;
11643      clobber_val = ival[n];
11644
11645      avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
11646                   "mov %0,%2", xop, len, 2);
11647    }
11648
11649  /* If we cooked up a clobber reg above, restore it.  */
11650
11651  if (cooked_clobber_p)
11652    {
11653      avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
11654    }
11655}
11656
11657
11658/* Reload the constant OP[1] into the HI register OP[0].
11659   CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
11660   into a NO_LD_REGS register.  If CLOBBER_REG is NULL_RTX we either don't
11661   need a clobber reg or have to cook one up.
11662
11663   PLEN == NULL: Output instructions.
11664   PLEN != NULL: Output nothing.  Set *PLEN to number of words occupied
11665                 by the insns printed.
11666
11667   Return "".  */
11668
11669const char*
11670output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
11671{
11672  output_reload_in_const (op, clobber_reg, plen, false);
11673  return "";
11674}
11675
11676
11677/* Reload a SI or SF compile time constant OP[1] into the register OP[0].
11678   CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
11679   into a NO_LD_REGS register.  If CLOBBER_REG is NULL_RTX we either don't
11680   need a clobber reg or have to cook one up.
11681
11682   LEN == NULL: Output instructions.
11683
11684   LEN != NULL: Output nothing.  Set *LEN to number of words occupied
11685                by the insns printed.
11686
11687   Return "".  */
11688
11689const char *
11690output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
11691{
11692  if (AVR_HAVE_MOVW
11693      && !test_hard_reg_class (LD_REGS, op[0])
11694      && (CONST_INT_P (op[1])
11695          || CONST_FIXED_P (op[1])
11696          || CONST_DOUBLE_P (op[1])))
11697    {
11698      int len_clr, len_noclr;
11699
11700      /* In some cases it is better to clear the destination beforehand, e.g.
11701
11702             CLR R2   CLR R3   MOVW R4,R2   INC R2
11703
11704         is shorther than
11705
11706             CLR R2   INC R2   CLR  R3      CLR R4   CLR R5
11707
11708         We find it too tedious to work that out in the print function.
11709         Instead, we call the print function twice to get the lengths of
11710         both methods and use the shortest one.  */
11711
11712      output_reload_in_const (op, clobber_reg, &len_clr, true);
11713      output_reload_in_const (op, clobber_reg, &len_noclr, false);
11714
11715      if (len_noclr - len_clr == 4)
11716        {
11717          /* Default needs 4 CLR instructions: clear register beforehand.  */
11718
11719          avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
11720                       "mov %B0,__zero_reg__" CR_TAB
11721                       "movw %C0,%A0", &op[0], len, 3);
11722
11723          output_reload_in_const (op, clobber_reg, len, true);
11724
11725          if (len)
11726            *len += 3;
11727
11728          return "";
11729        }
11730    }
11731
11732  /* Default: destination not pre-cleared.  */
11733
11734  output_reload_in_const (op, clobber_reg, len, false);
11735  return "";
11736}
11737
11738const char*
11739avr_out_reload_inpsi (rtx *op, rtx clobber_reg, int *len)
11740{
11741  output_reload_in_const (op, clobber_reg, len, false);
11742  return "";
11743}
11744
11745
11746/* Worker function for `ASM_OUTPUT_ADDR_VEC_ELT'.  */
11747
11748void
11749avr_output_addr_vec_elt (FILE *stream, int value)
11750{
11751  if (AVR_HAVE_JMP_CALL)
11752    fprintf (stream, "\t.word gs(.L%d)\n", value);
11753  else
11754    fprintf (stream, "\trjmp .L%d\n", value);
11755}
11756
11757static void
11758avr_conditional_register_usage(void)
11759{
11760  if (AVR_TINY)
11761    {
11762      unsigned int i;
11763
11764      const int tiny_reg_alloc_order[] = {
11765        24, 25,
11766        22, 23,
11767        30, 31,
11768        26, 27,
11769        28, 29,
11770        21, 20, 19, 18,
11771        16, 17,
11772        32, 33, 34, 35,
11773        15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
11774      };
11775
11776      /* Set R0-R17 as fixed registers. Reset R0-R17 in call used register list
11777         - R0-R15 are not available in Tiny Core devices
11778         - R16 and R17 are fixed registers.  */
11779
11780      for (i = 0;  i <= 17;  i++)
11781        {
11782          fixed_regs[i] = 1;
11783          call_used_regs[i] = 1;
11784        }
11785
11786      /* Set R18 to R21 as callee saved registers
11787         - R18, R19, R20 and R21 are the callee saved registers in
11788           Tiny Core devices  */
11789
11790      for (i = 18; i <= LAST_CALLEE_SAVED_REG; i++)
11791        {
11792          call_used_regs[i] = 0;
11793        }
11794
11795      /* Update register allocation order for Tiny Core devices */
11796
11797      for (i = 0; i < ARRAY_SIZE (tiny_reg_alloc_order); i++)
11798        {
11799          reg_alloc_order[i] = tiny_reg_alloc_order[i];
11800        }
11801
11802      CLEAR_HARD_REG_SET (reg_class_contents[(int) ADDW_REGS]);
11803      CLEAR_HARD_REG_SET (reg_class_contents[(int) NO_LD_REGS]);
11804    }
11805}
11806
11807/* Implement `TARGET_HARD_REGNO_SCRATCH_OK'.  */
11808/* Returns true if SCRATCH are safe to be allocated as a scratch
11809   registers (for a define_peephole2) in the current function.  */
11810
11811static bool
11812avr_hard_regno_scratch_ok (unsigned int regno)
11813{
11814  /* Interrupt functions can only use registers that have already been saved
11815     by the prologue, even if they would normally be call-clobbered.  */
11816
11817  if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
11818      && !df_regs_ever_live_p (regno))
11819    return false;
11820
11821  /* Don't allow hard registers that might be part of the frame pointer.
11822     Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
11823     and don't care for a frame pointer that spans more than one register.  */
11824
11825  if ((!reload_completed || frame_pointer_needed)
11826      && (regno == REG_Y || regno == REG_Y + 1))
11827    {
11828      return false;
11829    }
11830
11831  return true;
11832}
11833
11834
11835/* Worker function for `HARD_REGNO_RENAME_OK'.  */
11836/* Return nonzero if register OLD_REG can be renamed to register NEW_REG.  */
11837
11838int
11839avr_hard_regno_rename_ok (unsigned int old_reg,
11840			  unsigned int new_reg)
11841{
11842  /* Interrupt functions can only use registers that have already been
11843     saved by the prologue, even if they would normally be
11844     call-clobbered.  */
11845
11846  if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
11847      && !df_regs_ever_live_p (new_reg))
11848    return 0;
11849
11850  /* Don't allow hard registers that might be part of the frame pointer.
11851     Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
11852     and don't care for a frame pointer that spans more than one register.  */
11853
11854  if ((!reload_completed || frame_pointer_needed)
11855      && (old_reg == REG_Y || old_reg == REG_Y + 1
11856          || new_reg == REG_Y || new_reg == REG_Y + 1))
11857    {
11858      return 0;
11859    }
11860
11861  return 1;
11862}
11863
11864/* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
11865   or memory location in the I/O space (QImode only).
11866
11867   Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
11868   Operand 1: register operand to test, or CONST_INT memory address.
11869   Operand 2: bit number.
11870   Operand 3: label to jump to if the test is true.  */
11871
11872const char*
11873avr_out_sbxx_branch (rtx_insn *insn, rtx operands[])
11874{
11875  enum rtx_code comp = GET_CODE (operands[0]);
11876  bool long_jump = get_attr_length (insn) >= 4;
11877  bool reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
11878
11879  if (comp == GE)
11880    comp = EQ;
11881  else if (comp == LT)
11882    comp = NE;
11883
11884  if (reverse)
11885    comp = reverse_condition (comp);
11886
11887  switch (GET_CODE (operands[1]))
11888    {
11889    default:
11890      gcc_unreachable();
11891
11892    case CONST_INT:
11893    case CONST:
11894    case SYMBOL_REF:
11895
11896      if (low_io_address_operand (operands[1], QImode))
11897        {
11898          if (comp == EQ)
11899            output_asm_insn ("sbis %i1,%2", operands);
11900          else
11901            output_asm_insn ("sbic %i1,%2", operands);
11902        }
11903      else
11904        {
11905	  gcc_assert (io_address_operand (operands[1], QImode));
11906          output_asm_insn ("in __tmp_reg__,%i1", operands);
11907          if (comp == EQ)
11908            output_asm_insn ("sbrs __tmp_reg__,%2", operands);
11909          else
11910            output_asm_insn ("sbrc __tmp_reg__,%2", operands);
11911        }
11912
11913      break; /* CONST_INT */
11914
11915    case REG:
11916
11917      if (comp == EQ)
11918        output_asm_insn ("sbrs %T1%T2", operands);
11919      else
11920        output_asm_insn ("sbrc %T1%T2", operands);
11921
11922      break; /* REG */
11923    }        /* switch */
11924
11925  if (long_jump)
11926    return ("rjmp .+4" CR_TAB
11927            "jmp %x3");
11928
11929  if (!reverse)
11930    return "rjmp %x3";
11931
11932  return "";
11933}
11934
11935/* Worker function for `TARGET_ASM_CONSTRUCTOR'.  */
11936
11937static void
11938avr_asm_out_ctor (rtx symbol, int priority)
11939{
11940  fputs ("\t.global __do_global_ctors\n", asm_out_file);
11941  default_ctor_section_asm_out_constructor (symbol, priority);
11942}
11943
11944
11945/* Worker function for `TARGET_ASM_DESTRUCTOR'.  */
11946
11947static void
11948avr_asm_out_dtor (rtx symbol, int priority)
11949{
11950  fputs ("\t.global __do_global_dtors\n", asm_out_file);
11951  default_dtor_section_asm_out_destructor (symbol, priority);
11952}
11953
11954
11955/* Worker function for `TARGET_RETURN_IN_MEMORY'.  */
11956
11957static bool
11958avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
11959{
11960  HOST_WIDE_INT size = int_size_in_bytes (type);
11961  HOST_WIDE_INT ret_size_limit = AVR_TINY ? 4 : 8;
11962
11963  /* In avr, there are 8 return registers. But, for Tiny Core
11964     (ATtiny4/5/9/10/20/40) devices, only 4 registers are available.
11965     Return true if size is unknown or greater than the limit.  */
11966
11967  if (size == -1 || size > ret_size_limit)
11968    {
11969      return true;
11970    }
11971  else
11972    {
11973      return false;
11974    }
11975}
11976
11977
11978/* Implement `CASE_VALUES_THRESHOLD'.  */
11979/* Supply the default for --param case-values-threshold=0  */
11980
11981static unsigned int
11982avr_case_values_threshold (void)
11983{
11984  /* The exact break-even point between a jump table and an if-else tree
11985     depends on several factors not available here like, e.g. if 8-bit
11986     comparisons can be used in the if-else tree or not, on the
11987     range of the case values, if the case value can be reused, on the
11988     register allocation, etc.  '7' appears to be a good choice.  */
11989
11990  return 7;
11991}
11992
11993
11994/* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'.  */
11995
11996static machine_mode
11997avr_addr_space_address_mode (addr_space_t as)
11998{
11999  return avr_addrspace[as].pointer_size == 3 ? PSImode : HImode;
12000}
12001
12002
12003/* Implement `TARGET_ADDR_SPACE_POINTER_MODE'.  */
12004
12005static machine_mode
12006avr_addr_space_pointer_mode (addr_space_t as)
12007{
12008  return avr_addr_space_address_mode (as);
12009}
12010
12011
12012/* Helper for following function.  */
12013
12014static bool
12015avr_reg_ok_for_pgm_addr (rtx reg, bool strict)
12016{
12017  gcc_assert (REG_P (reg));
12018
12019  if (strict)
12020    {
12021      return REGNO (reg) == REG_Z;
12022    }
12023
12024  /* Avoid combine to propagate hard regs.  */
12025
12026  if (can_create_pseudo_p()
12027      && REGNO (reg) < REG_Z)
12028    {
12029      return false;
12030    }
12031
12032  return true;
12033}
12034
12035
12036/* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'.  */
12037
12038static bool
12039avr_addr_space_legitimate_address_p (machine_mode mode, rtx x,
12040                                     bool strict, addr_space_t as)
12041{
12042  bool ok = false;
12043
12044  switch (as)
12045    {
12046    default:
12047      gcc_unreachable();
12048
12049    case ADDR_SPACE_GENERIC:
12050      return avr_legitimate_address_p (mode, x, strict);
12051
12052    case ADDR_SPACE_FLASH:
12053    case ADDR_SPACE_FLASH1:
12054    case ADDR_SPACE_FLASH2:
12055    case ADDR_SPACE_FLASH3:
12056    case ADDR_SPACE_FLASH4:
12057    case ADDR_SPACE_FLASH5:
12058
12059      switch (GET_CODE (x))
12060        {
12061        case REG:
12062          ok = avr_reg_ok_for_pgm_addr (x, strict);
12063          break;
12064
12065        case POST_INC:
12066          ok = avr_reg_ok_for_pgm_addr (XEXP (x, 0), strict);
12067          break;
12068
12069        default:
12070          break;
12071        }
12072
12073      break; /* FLASH */
12074
12075    case ADDR_SPACE_MEMX:
12076      if (REG_P (x))
12077        ok = (!strict
12078              && can_create_pseudo_p());
12079
12080      if (LO_SUM == GET_CODE (x))
12081        {
12082          rtx hi = XEXP (x, 0);
12083          rtx lo = XEXP (x, 1);
12084
12085          ok = (REG_P (hi)
12086                && (!strict || REGNO (hi) < FIRST_PSEUDO_REGISTER)
12087                && REG_P (lo)
12088                && REGNO (lo) == REG_Z);
12089        }
12090
12091      break; /* MEMX */
12092    }
12093
12094  if (avr_log.legitimate_address_p)
12095    {
12096      avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
12097                 "reload_completed=%d reload_in_progress=%d %s:",
12098                 ok, mode, strict, reload_completed, reload_in_progress,
12099                 reg_renumber ? "(reg_renumber)" : "");
12100
12101      if (GET_CODE (x) == PLUS
12102          && REG_P (XEXP (x, 0))
12103          && CONST_INT_P (XEXP (x, 1))
12104          && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
12105          && reg_renumber)
12106        {
12107          avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
12108                     true_regnum (XEXP (x, 0)));
12109        }
12110
12111      avr_edump ("\n%r\n", x);
12112    }
12113
12114  return ok;
12115}
12116
12117
12118/* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'.  */
12119
12120static rtx
12121avr_addr_space_legitimize_address (rtx x, rtx old_x,
12122                                   machine_mode mode, addr_space_t as)
12123{
12124  if (ADDR_SPACE_GENERIC_P (as))
12125    return avr_legitimize_address (x, old_x, mode);
12126
12127  if (avr_log.legitimize_address)
12128    {
12129      avr_edump ("\n%?: mode=%m\n %r\n", mode, old_x);
12130    }
12131
12132  return old_x;
12133}
12134
12135
12136/* Implement `TARGET_ADDR_SPACE_CONVERT'.  */
12137
12138static rtx
12139avr_addr_space_convert (rtx src, tree type_from, tree type_to)
12140{
12141  addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (type_from));
12142  addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type_to));
12143
12144  if (avr_log.progmem)
12145    avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
12146               src, type_from, type_to);
12147
12148  /* Up-casting from 16-bit to 24-bit pointer.  */
12149
12150  if (as_from != ADDR_SPACE_MEMX
12151      && as_to == ADDR_SPACE_MEMX)
12152    {
12153      int msb;
12154      rtx sym = src;
12155      rtx reg = gen_reg_rtx (PSImode);
12156
12157      while (CONST == GET_CODE (sym) || PLUS == GET_CODE (sym))
12158        sym = XEXP (sym, 0);
12159
12160      /* Look at symbol flags:  avr_encode_section_info set the flags
12161         also if attribute progmem was seen so that we get the right
12162         promotion for, e.g. PSTR-like strings that reside in generic space
12163         but are located in flash.  In that case we patch the incoming
12164         address space.  */
12165
12166      if (SYMBOL_REF == GET_CODE (sym)
12167          && ADDR_SPACE_FLASH == AVR_SYMBOL_GET_ADDR_SPACE (sym))
12168        {
12169          as_from = ADDR_SPACE_FLASH;
12170        }
12171
12172      /* Linearize memory: RAM has bit 23 set.  */
12173
12174      msb = ADDR_SPACE_GENERIC_P (as_from)
12175        ? 0x80
12176        : avr_addrspace[as_from].segment;
12177
12178      src = force_reg (Pmode, src);
12179
12180      emit_insn (msb == 0
12181                 ? gen_zero_extendhipsi2 (reg, src)
12182                 : gen_n_extendhipsi2 (reg, gen_int_mode (msb, QImode), src));
12183
12184      return reg;
12185    }
12186
12187  /* Down-casting from 24-bit to 16-bit throws away the high byte.  */
12188
12189  if (as_from == ADDR_SPACE_MEMX
12190      && as_to != ADDR_SPACE_MEMX)
12191    {
12192      rtx new_src = gen_reg_rtx (Pmode);
12193
12194      src = force_reg (PSImode, src);
12195
12196      emit_move_insn (new_src,
12197                      simplify_gen_subreg (Pmode, src, PSImode, 0));
12198      return new_src;
12199    }
12200
12201  return src;
12202}
12203
12204
12205/* Implement `TARGET_ADDR_SPACE_SUBSET_P'.  */
12206
12207static bool
12208avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED,
12209                         addr_space_t superset ATTRIBUTE_UNUSED)
12210{
12211  /* Allow any kind of pointer mess.  */
12212
12213  return true;
12214}
12215
12216
12217/* Implement `TARGET_CONVERT_TO_TYPE'.  */
12218
12219static tree
12220avr_convert_to_type (tree type, tree expr)
12221{
12222  /* Print a diagnose for pointer conversion that changes the address
12223     space of the pointer target to a non-enclosing address space,
12224     provided -Waddr-space-convert is on.
12225
12226     FIXME: Filter out cases where the target object is known to
12227            be located in the right memory, like in
12228
12229                (const __flash*) PSTR ("text")
12230
12231            Also try to distinguish between explicit casts requested by
12232            the user and implicit casts like
12233
12234                void f (const __flash char*);
12235
12236                void g (const char *p)
12237                {
12238                    f ((const __flash*) p);
12239                }
12240
12241            under the assumption that an explicit casts means that the user
12242            knows what he is doing, e.g. interface with PSTR or old style
12243            code with progmem and pgm_read_xxx.
12244  */
12245
12246  if (avr_warn_addr_space_convert
12247      && expr != error_mark_node
12248      && POINTER_TYPE_P (type)
12249      && POINTER_TYPE_P (TREE_TYPE (expr)))
12250    {
12251      addr_space_t as_old = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (expr)));
12252      addr_space_t as_new = TYPE_ADDR_SPACE (TREE_TYPE (type));
12253
12254      if (avr_log.progmem)
12255        avr_edump ("%?: type = %t\nexpr = %t\n\n", type, expr);
12256
12257      if (as_new != ADDR_SPACE_MEMX
12258          && as_new != as_old)
12259        {
12260          location_t loc = EXPR_LOCATION (expr);
12261          const char *name_old = avr_addrspace[as_old].name;
12262          const char *name_new = avr_addrspace[as_new].name;
12263
12264          warning (OPT_Waddr_space_convert,
12265                   "conversion from address space %qs to address space %qs",
12266                   ADDR_SPACE_GENERIC_P (as_old) ? "generic" : name_old,
12267                   ADDR_SPACE_GENERIC_P (as_new) ? "generic" : name_new);
12268
12269          return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, expr);
12270        }
12271    }
12272
12273  return NULL_TREE;
12274}
12275
12276
12277/* PR63633: The middle-end might come up with hard regs as input operands.
12278
12279   RMASK is a bit mask representing a subset of hard registers R0...R31:
12280   Rn is an element of that set iff bit n of RMASK is set.
12281   OPMASK describes a subset of OP[]:  If bit n of OPMASK is 1 then
12282   OP[n] has to be fixed; otherwise OP[n] is left alone.
12283
12284   For each element of OPMASK which is a hard register overlapping RMASK,
12285   replace OP[n] with a newly created pseudo register
12286
12287   HREG == 0:  Also emit a move insn that copies the contents of that
12288               hard register into the new pseudo.
12289
12290   HREG != 0:  Also set HREG[n] to the hard register.  */
12291
12292static void
12293avr_fix_operands (rtx *op, rtx *hreg, unsigned opmask, unsigned rmask)
12294{
12295  for (; opmask; opmask >>= 1, op++)
12296    {
12297      rtx reg = *op;
12298
12299      if (hreg)
12300        *hreg = NULL_RTX;
12301
12302      if ((opmask & 1)
12303          && REG_P (reg)
12304          && REGNO (reg) < FIRST_PSEUDO_REGISTER
12305          // This hard-reg overlaps other prohibited hard regs?
12306          && (rmask & regmask (GET_MODE (reg), REGNO (reg))))
12307        {
12308          *op = gen_reg_rtx (GET_MODE (reg));
12309          if (hreg == NULL)
12310            emit_move_insn (*op, reg);
12311          else
12312            *hreg = reg;
12313        }
12314
12315      if (hreg)
12316        hreg++;
12317    }
12318}
12319
12320
12321void
12322avr_fix_inputs (rtx *op, unsigned opmask, unsigned rmask)
12323{
12324  avr_fix_operands (op, NULL, opmask, rmask);
12325}
12326
12327
12328/* Helper for the function below:  If bit n of MASK is set and
12329   HREG[n] != NULL, then emit a move insn to copy OP[n] to HREG[n].
12330   Otherwise do nothing for that n.  Return TRUE.  */
12331
12332static bool
12333avr_move_fixed_operands (rtx *op, rtx *hreg, unsigned mask)
12334{
12335  for (; mask; mask >>= 1, op++, hreg++)
12336    if ((mask & 1)
12337        && *hreg)
12338      emit_move_insn (*hreg, *op);
12339
12340  return true;
12341}
12342
12343
12344/* PR63633: The middle-end might come up with hard regs as output operands.
12345
12346   GEN is a sequence generating function like gen_mulsi3 with 3 operands OP[].
12347   RMASK is a bit mask representing a subset of hard registers R0...R31:
12348   Rn is an element of that set iff bit n of RMASK is set.
12349   OPMASK describes a subset of OP[]:  If bit n of OPMASK is 1 then
12350   OP[n] has to be fixed; otherwise OP[n] is left alone.
12351
12352   Emit the insn sequence as generated by GEN() with all elements of OPMASK
12353   which are hard registers overlapping RMASK replaced by newly created
12354   pseudo registers.  After the sequence has been emitted, emit insns that
12355   move the contents of respective pseudos to their hard regs.  */
12356
12357bool
12358avr_emit3_fix_outputs (rtx (*gen)(rtx,rtx,rtx), rtx *op,
12359                       unsigned opmask, unsigned rmask)
12360{
12361  const int n = 3;
12362  rtx hreg[n];
12363
12364  /* It is letigimate for GEN to call this function, and in order not to
12365     get self-recursive we use the following static kludge.  This is the
12366     only way not to duplicate all expanders and to avoid ugly and
12367     hard-to-maintain C-code instead of the much more appreciated RTL
12368     representation as supplied by define_expand.  */
12369  static bool lock = false;
12370
12371  gcc_assert (opmask < (1u << n));
12372
12373  if (lock)
12374    return false;
12375
12376  avr_fix_operands (op, hreg, opmask, rmask);
12377
12378  lock = true;
12379  emit_insn (gen (op[0], op[1], op[2]));
12380  lock = false;
12381
12382  return avr_move_fixed_operands (op, hreg, opmask);
12383}
12384
12385
12386/* Worker function for movmemhi expander.
12387   XOP[0]  Destination as MEM:BLK
12388   XOP[1]  Source      "     "
12389   XOP[2]  # Bytes to copy
12390
12391   Return TRUE  if the expansion is accomplished.
12392   Return FALSE if the operand compination is not supported.  */
12393
12394bool
12395avr_emit_movmemhi (rtx *xop)
12396{
12397  HOST_WIDE_INT count;
12398  machine_mode loop_mode;
12399  addr_space_t as = MEM_ADDR_SPACE (xop[1]);
12400  rtx loop_reg, addr1, a_src, a_dest, insn, xas;
12401  rtx a_hi8 = NULL_RTX;
12402
12403  if (avr_mem_flash_p (xop[0]))
12404    return false;
12405
12406  if (!CONST_INT_P (xop[2]))
12407    return false;
12408
12409  count = INTVAL (xop[2]);
12410  if (count <= 0)
12411    return false;
12412
12413  a_src  = XEXP (xop[1], 0);
12414  a_dest = XEXP (xop[0], 0);
12415
12416  if (PSImode == GET_MODE (a_src))
12417    {
12418      gcc_assert (as == ADDR_SPACE_MEMX);
12419
12420      loop_mode = (count < 0x100) ? QImode : HImode;
12421      loop_reg = gen_rtx_REG (loop_mode, 24);
12422      emit_move_insn (loop_reg, gen_int_mode (count, loop_mode));
12423
12424      addr1 = simplify_gen_subreg (HImode, a_src, PSImode, 0);
12425      a_hi8 = simplify_gen_subreg (QImode, a_src, PSImode, 2);
12426    }
12427  else
12428    {
12429      int segment = avr_addrspace[as].segment;
12430
12431      if (segment
12432          && avr_n_flash > 1)
12433        {
12434          a_hi8 = GEN_INT (segment);
12435          emit_move_insn (rampz_rtx, a_hi8 = copy_to_mode_reg (QImode, a_hi8));
12436        }
12437      else if (!ADDR_SPACE_GENERIC_P (as))
12438        {
12439          as = ADDR_SPACE_FLASH;
12440        }
12441
12442      addr1 = a_src;
12443
12444      loop_mode = (count <= 0x100) ? QImode : HImode;
12445      loop_reg = copy_to_mode_reg (loop_mode, gen_int_mode (count, loop_mode));
12446    }
12447
12448  xas = GEN_INT (as);
12449
12450  /* FIXME: Register allocator might come up with spill fails if it is left
12451        on its own.  Thus, we allocate the pointer registers by hand:
12452        Z = source address
12453        X = destination address  */
12454
12455  emit_move_insn (lpm_addr_reg_rtx, addr1);
12456  emit_move_insn (gen_rtx_REG (HImode, REG_X), a_dest);
12457
12458  /* FIXME: Register allocator does a bad job and might spill address
12459        register(s) inside the loop leading to additional move instruction
12460        to/from stack which could clobber tmp_reg.  Thus, do *not* emit
12461        load and store as separate insns.  Instead, we perform the copy
12462        by means of one monolithic insn.  */
12463
12464  gcc_assert (TMP_REGNO == LPM_REGNO);
12465
12466  if (as != ADDR_SPACE_MEMX)
12467    {
12468      /* Load instruction ([E]LPM or LD) is known at compile time:
12469         Do the copy-loop inline.  */
12470
12471      rtx (*fun) (rtx, rtx, rtx)
12472        = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
12473
12474      insn = fun (xas, loop_reg, loop_reg);
12475    }
12476  else
12477    {
12478      rtx (*fun) (rtx, rtx)
12479        = QImode == loop_mode ? gen_movmemx_qi : gen_movmemx_hi;
12480
12481      emit_move_insn (gen_rtx_REG (QImode, 23), a_hi8);
12482
12483      insn = fun (xas, GEN_INT (avr_addr.rampz));
12484    }
12485
12486  set_mem_addr_space (SET_SRC (XVECEXP (insn, 0, 0)), as);
12487  emit_insn (insn);
12488
12489  return true;
12490}
12491
12492
12493/* Print assembler for movmem_qi, movmem_hi insns...
12494       $0     : Address Space
12495       $1, $2 : Loop register
12496       Z      : Source address
12497       X      : Destination address
12498*/
12499
12500const char*
12501avr_out_movmem (rtx_insn *insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
12502{
12503  addr_space_t as = (addr_space_t) INTVAL (op[0]);
12504  machine_mode loop_mode = GET_MODE (op[1]);
12505  bool sbiw_p = test_hard_reg_class (ADDW_REGS, op[1]);
12506  rtx xop[3];
12507
12508  if (plen)
12509    *plen = 0;
12510
12511  xop[0] = op[0];
12512  xop[1] = op[1];
12513  xop[2] = tmp_reg_rtx;
12514
12515  /* Loop label */
12516
12517  avr_asm_len ("0:", xop, plen, 0);
12518
12519  /* Load with post-increment */
12520
12521  switch (as)
12522    {
12523    default:
12524      gcc_unreachable();
12525
12526    case ADDR_SPACE_GENERIC:
12527
12528      avr_asm_len ("ld %2,Z+", xop, plen, 1);
12529      break;
12530
12531    case ADDR_SPACE_FLASH:
12532
12533      if (AVR_HAVE_LPMX)
12534        avr_asm_len ("lpm %2,Z+", xop, plen, 1);
12535      else
12536        avr_asm_len ("lpm" CR_TAB
12537                     "adiw r30,1", xop, plen, 2);
12538      break;
12539
12540    case ADDR_SPACE_FLASH1:
12541    case ADDR_SPACE_FLASH2:
12542    case ADDR_SPACE_FLASH3:
12543    case ADDR_SPACE_FLASH4:
12544    case ADDR_SPACE_FLASH5:
12545
12546      if (AVR_HAVE_ELPMX)
12547        avr_asm_len ("elpm %2,Z+", xop, plen, 1);
12548      else
12549        avr_asm_len ("elpm" CR_TAB
12550                     "adiw r30,1", xop, plen, 2);
12551      break;
12552    }
12553
12554  /* Store with post-increment */
12555
12556  avr_asm_len ("st X+,%2", xop, plen, 1);
12557
12558  /* Decrement loop-counter and set Z-flag */
12559
12560  if (QImode == loop_mode)
12561    {
12562      avr_asm_len ("dec %1", xop, plen, 1);
12563    }
12564  else if (sbiw_p)
12565    {
12566      avr_asm_len ("sbiw %1,1", xop, plen, 1);
12567    }
12568  else
12569    {
12570      avr_asm_len ("subi %A1,1" CR_TAB
12571                   "sbci %B1,0", xop, plen, 2);
12572    }
12573
12574  /* Loop until zero */
12575
12576  return avr_asm_len ("brne 0b", xop, plen, 1);
12577}
12578
12579
12580
12581/* Helper for __builtin_avr_delay_cycles */
12582
12583static rtx
12584avr_mem_clobber (void)
12585{
12586  rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
12587  MEM_VOLATILE_P (mem) = 1;
12588  return mem;
12589}
12590
12591static void
12592avr_expand_delay_cycles (rtx operands0)
12593{
12594  unsigned HOST_WIDE_INT cycles = UINTVAL (operands0) & GET_MODE_MASK (SImode);
12595  unsigned HOST_WIDE_INT cycles_used;
12596  unsigned HOST_WIDE_INT loop_count;
12597
12598  if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
12599    {
12600      loop_count = ((cycles - 9) / 6) + 1;
12601      cycles_used = ((loop_count - 1) * 6) + 9;
12602      emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode),
12603                                     avr_mem_clobber()));
12604      cycles -= cycles_used;
12605    }
12606
12607  if (IN_RANGE (cycles, 262145, 83886081))
12608    {
12609      loop_count = ((cycles - 7) / 5) + 1;
12610      if (loop_count > 0xFFFFFF)
12611        loop_count = 0xFFFFFF;
12612      cycles_used = ((loop_count - 1) * 5) + 7;
12613      emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode),
12614                                     avr_mem_clobber()));
12615      cycles -= cycles_used;
12616    }
12617
12618  if (IN_RANGE (cycles, 768, 262144))
12619    {
12620      loop_count = ((cycles - 5) / 4) + 1;
12621      if (loop_count > 0xFFFF)
12622        loop_count = 0xFFFF;
12623      cycles_used = ((loop_count - 1) * 4) + 5;
12624      emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode),
12625                                     avr_mem_clobber()));
12626      cycles -= cycles_used;
12627    }
12628
12629  if (IN_RANGE (cycles, 6, 767))
12630    {
12631      loop_count = cycles / 3;
12632      if (loop_count > 255)
12633        loop_count = 255;
12634      cycles_used = loop_count * 3;
12635      emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode),
12636                                     avr_mem_clobber()));
12637      cycles -= cycles_used;
12638      }
12639
12640  while (cycles >= 2)
12641    {
12642      emit_insn (gen_nopv (GEN_INT(2)));
12643      cycles -= 2;
12644    }
12645
12646  if (cycles == 1)
12647    {
12648      emit_insn (gen_nopv (GEN_INT(1)));
12649      cycles--;
12650    }
12651}
12652
12653
12654/* Compute the image of x under f, i.e. perform   x --> f(x)    */
12655
12656static int
12657avr_map (unsigned int f, int x)
12658{
12659  return x < 8 ? (f >> (4 * x)) & 0xf : 0;
12660}
12661
12662
12663/* Return some metrics of map A.  */
12664
12665enum
12666  {
12667    /* Number of fixed points in { 0 ... 7 } */
12668    MAP_FIXED_0_7,
12669
12670    /* Size of preimage of non-fixed points in { 0 ... 7 } */
12671    MAP_NONFIXED_0_7,
12672
12673    /* Mask representing the fixed points in { 0 ... 7 } */
12674    MAP_MASK_FIXED_0_7,
12675
12676    /* Size of the preimage of { 0 ... 7 } */
12677    MAP_PREIMAGE_0_7,
12678
12679    /* Mask that represents the preimage of { f } */
12680    MAP_MASK_PREIMAGE_F
12681  };
12682
12683static unsigned
12684avr_map_metric (unsigned int a, int mode)
12685{
12686  unsigned i, metric = 0;
12687
12688  for (i = 0; i < 8; i++)
12689    {
12690      unsigned ai = avr_map (a, i);
12691
12692      if (mode == MAP_FIXED_0_7)
12693        metric += ai == i;
12694      else if (mode == MAP_NONFIXED_0_7)
12695        metric += ai < 8 && ai != i;
12696      else if (mode == MAP_MASK_FIXED_0_7)
12697        metric |= ((unsigned) (ai == i)) << i;
12698      else if (mode == MAP_PREIMAGE_0_7)
12699        metric += ai < 8;
12700      else if (mode == MAP_MASK_PREIMAGE_F)
12701        metric |= ((unsigned) (ai == 0xf)) << i;
12702      else
12703        gcc_unreachable();
12704    }
12705
12706  return metric;
12707}
12708
12709
12710/* Return true if IVAL has a 0xf in its hexadecimal representation
12711   and false, otherwise.  Only nibbles 0..7 are taken into account.
12712   Used as constraint helper for C0f and Cxf.  */
12713
12714bool
12715avr_has_nibble_0xf (rtx ival)
12716{
12717  unsigned int map = UINTVAL (ival) & GET_MODE_MASK (SImode);
12718  return 0 != avr_map_metric (map, MAP_MASK_PREIMAGE_F);
12719}
12720
12721
12722/* We have a set of bits that are mapped by a function F.
12723   Try to decompose F by means of a second function G so that
12724
12725      F = F o G^-1 o G
12726
12727   and
12728
12729      cost (F o G^-1) + cost (G)  <  cost (F)
12730
12731   Example:  Suppose builtin insert_bits supplies us with the map
12732   F = 0x3210ffff.  Instead of doing 4 bit insertions to get the high
12733   nibble of the result, we can just as well rotate the bits before inserting
12734   them and use the map 0x7654ffff which is cheaper than the original map.
12735   For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff.  */
12736
12737typedef struct
12738{
12739  /* tree code of binary function G */
12740  enum tree_code code;
12741
12742  /* The constant second argument of G */
12743  int arg;
12744
12745  /* G^-1, the inverse of G (*, arg) */
12746  unsigned ginv;
12747
12748  /* The cost of appplying G (*, arg) */
12749  int cost;
12750
12751  /* The composition F o G^-1 (*, arg) for some function F */
12752  unsigned int map;
12753
12754  /* For debug purpose only */
12755  const char *str;
12756} avr_map_op_t;
12757
12758static const avr_map_op_t avr_map_op[] =
12759  {
12760    { LROTATE_EXPR, 0, 0x76543210, 0, 0, "id" },
12761    { LROTATE_EXPR, 1, 0x07654321, 2, 0, "<<<" },
12762    { LROTATE_EXPR, 2, 0x10765432, 4, 0, "<<<" },
12763    { LROTATE_EXPR, 3, 0x21076543, 4, 0, "<<<" },
12764    { LROTATE_EXPR, 4, 0x32107654, 1, 0, "<<<" },
12765    { LROTATE_EXPR, 5, 0x43210765, 3, 0, "<<<" },
12766    { LROTATE_EXPR, 6, 0x54321076, 5, 0, "<<<" },
12767    { LROTATE_EXPR, 7, 0x65432107, 3, 0, "<<<" },
12768    { RSHIFT_EXPR, 1, 0x6543210c, 1, 0, ">>" },
12769    { RSHIFT_EXPR, 1, 0x7543210c, 1, 0, ">>" },
12770    { RSHIFT_EXPR, 2, 0x543210cc, 2, 0, ">>" },
12771    { RSHIFT_EXPR, 2, 0x643210cc, 2, 0, ">>" },
12772    { RSHIFT_EXPR, 2, 0x743210cc, 2, 0, ">>" },
12773    { LSHIFT_EXPR, 1, 0xc7654321, 1, 0, "<<" },
12774    { LSHIFT_EXPR, 2, 0xcc765432, 2, 0, "<<" }
12775  };
12776
12777
12778/* Try to decompose F as F = (F o G^-1) o G as described above.
12779   The result is a struct representing F o G^-1 and G.
12780   If result.cost < 0 then such a decomposition does not exist.  */
12781
12782static avr_map_op_t
12783avr_map_decompose (unsigned int f, const avr_map_op_t *g, bool val_const_p)
12784{
12785  int i;
12786  bool val_used_p = 0 != avr_map_metric (f, MAP_MASK_PREIMAGE_F);
12787  avr_map_op_t f_ginv = *g;
12788  unsigned int ginv = g->ginv;
12789
12790  f_ginv.cost = -1;
12791
12792  /* Step 1:  Computing F o G^-1  */
12793
12794  for (i = 7; i >= 0; i--)
12795    {
12796      int x = avr_map (f, i);
12797
12798      if (x <= 7)
12799        {
12800          x = avr_map (ginv, x);
12801
12802          /* The bit is no element of the image of G: no avail (cost = -1)  */
12803
12804          if (x > 7)
12805            return f_ginv;
12806        }
12807
12808      f_ginv.map = (f_ginv.map << 4) + x;
12809    }
12810
12811  /* Step 2:  Compute the cost of the operations.
12812     The overall cost of doing an operation prior to the insertion is
12813      the cost of the insertion plus the cost of the operation.  */
12814
12815  /* Step 2a:  Compute cost of F o G^-1  */
12816
12817  if (0 == avr_map_metric (f_ginv.map, MAP_NONFIXED_0_7))
12818    {
12819      /* The mapping consists only of fixed points and can be folded
12820         to AND/OR logic in the remainder.  Reasonable cost is 3. */
12821
12822      f_ginv.cost = 2 + (val_used_p && !val_const_p);
12823    }
12824  else
12825    {
12826      rtx xop[4];
12827
12828      /* Get the cost of the insn by calling the output worker with some
12829         fake values.  Mimic effect of reloading xop[3]: Unused operands
12830         are mapped to 0 and used operands are reloaded to xop[0].  */
12831
12832      xop[0] = all_regs_rtx[24];
12833      xop[1] = gen_int_mode (f_ginv.map, SImode);
12834      xop[2] = all_regs_rtx[25];
12835      xop[3] = val_used_p ? xop[0] : const0_rtx;
12836
12837      avr_out_insert_bits (xop, &f_ginv.cost);
12838
12839      f_ginv.cost += val_const_p && val_used_p ? 1 : 0;
12840    }
12841
12842  /* Step 2b:  Add cost of G  */
12843
12844  f_ginv.cost += g->cost;
12845
12846  if (avr_log.builtin)
12847    avr_edump (" %s%d=%d", g->str, g->arg, f_ginv.cost);
12848
12849  return f_ginv;
12850}
12851
12852
12853/* Insert bits from XOP[1] into XOP[0] according to MAP.
12854   XOP[0] and XOP[1] don't overlap.
12855   If FIXP_P = true:  Move all bits according to MAP using BLD/BST sequences.
12856   If FIXP_P = false: Just move the bit if its position in the destination
12857   is different to its source position.  */
12858
12859static void
12860avr_move_bits (rtx *xop, unsigned int map, bool fixp_p, int *plen)
12861{
12862  int bit_dest, b;
12863
12864  /* T-flag contains this bit of the source, i.e. of XOP[1]  */
12865  int t_bit_src = -1;
12866
12867  /* We order the operations according to the requested source bit b.  */
12868
12869  for (b = 0; b < 8; b++)
12870    for (bit_dest = 0; bit_dest < 8; bit_dest++)
12871      {
12872        int bit_src = avr_map (map, bit_dest);
12873
12874        if (b != bit_src
12875            || bit_src >= 8
12876            /* Same position: No need to copy as requested by FIXP_P.  */
12877            || (bit_dest == bit_src && !fixp_p))
12878          continue;
12879
12880        if (t_bit_src != bit_src)
12881          {
12882            /* Source bit is not yet in T: Store it to T.  */
12883
12884            t_bit_src = bit_src;
12885
12886            xop[3] = GEN_INT (bit_src);
12887            avr_asm_len ("bst %T1%T3", xop, plen, 1);
12888          }
12889
12890        /* Load destination bit with T.  */
12891
12892        xop[3] = GEN_INT (bit_dest);
12893        avr_asm_len ("bld %T0%T3", xop, plen, 1);
12894      }
12895}
12896
12897
12898/* PLEN == 0: Print assembler code for `insert_bits'.
12899   PLEN != 0: Compute code length in bytes.
12900
12901   OP[0]:  Result
12902   OP[1]:  The mapping composed of nibbles. If nibble no. N is
12903           0:   Bit N of result is copied from bit OP[2].0
12904           ...  ...
12905           7:   Bit N of result is copied from bit OP[2].7
12906           0xf: Bit N of result is copied from bit OP[3].N
12907   OP[2]:  Bits to be inserted
12908   OP[3]:  Target value  */
12909
12910const char*
12911avr_out_insert_bits (rtx *op, int *plen)
12912{
12913  unsigned int map = UINTVAL (op[1]) & GET_MODE_MASK (SImode);
12914  unsigned mask_fixed;
12915  bool fixp_p = true;
12916  rtx xop[4];
12917
12918  xop[0] = op[0];
12919  xop[1] = op[2];
12920  xop[2] = op[3];
12921
12922  gcc_assert (REG_P (xop[2]) || CONST_INT_P (xop[2]));
12923
12924  if (plen)
12925    *plen = 0;
12926  else if (flag_print_asm_name)
12927    fprintf (asm_out_file, ASM_COMMENT_START "map = 0x%08x\n", map);
12928
12929  /* If MAP has fixed points it might be better to initialize the result
12930     with the bits to be inserted instead of moving all bits by hand.  */
12931
12932  mask_fixed = avr_map_metric (map, MAP_MASK_FIXED_0_7);
12933
12934  if (REGNO (xop[0]) == REGNO (xop[1]))
12935    {
12936      /* Avoid early-clobber conflicts */
12937
12938      avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
12939      xop[1] = tmp_reg_rtx;
12940      fixp_p = false;
12941    }
12942
12943  if (avr_map_metric (map, MAP_MASK_PREIMAGE_F))
12944    {
12945      /* XOP[2] is used and reloaded to XOP[0] already */
12946
12947      int n_fix = 0, n_nofix = 0;
12948
12949      gcc_assert (REG_P (xop[2]));
12950
12951      /* Get the code size of the bit insertions; once with all bits
12952         moved and once with fixed points omitted.  */
12953
12954      avr_move_bits (xop, map, true, &n_fix);
12955      avr_move_bits (xop, map, false, &n_nofix);
12956
12957      if (fixp_p && n_fix - n_nofix > 3)
12958        {
12959          xop[3] = gen_int_mode (~mask_fixed, QImode);
12960
12961          avr_asm_len ("eor %0,%1"   CR_TAB
12962                       "andi %0,%3"  CR_TAB
12963                       "eor %0,%1", xop, plen, 3);
12964          fixp_p = false;
12965        }
12966    }
12967  else
12968    {
12969      /* XOP[2] is unused */
12970
12971      if (fixp_p && mask_fixed)
12972        {
12973          avr_asm_len ("mov %0,%1", xop, plen, 1);
12974          fixp_p = false;
12975        }
12976    }
12977
12978  /* Move/insert remaining bits.  */
12979
12980  avr_move_bits (xop, map, fixp_p, plen);
12981
12982  return "";
12983}
12984
12985
12986/* IDs for all the AVR builtins.  */
12987
12988enum avr_builtin_id
12989  {
12990#define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME)  \
12991    AVR_BUILTIN_ ## NAME,
12992#include "builtins.def"
12993#undef DEF_BUILTIN
12994
12995    AVR_BUILTIN_COUNT
12996  };
12997
12998struct GTY(()) avr_builtin_description
12999{
13000  enum insn_code icode;
13001  int n_args;
13002  tree fndecl;
13003};
13004
13005
13006/* Notice that avr_bdesc[] and avr_builtin_id are initialized in such a way
13007   that a built-in's ID can be used to access the built-in by means of
13008   avr_bdesc[ID]  */
13009
13010static GTY(()) struct avr_builtin_description
13011avr_bdesc[AVR_BUILTIN_COUNT] =
13012  {
13013#define DEF_BUILTIN(NAME, N_ARGS, TYPE, ICODE, LIBNAME)         \
13014    { (enum insn_code) CODE_FOR_ ## ICODE, N_ARGS, NULL_TREE },
13015#include "builtins.def"
13016#undef DEF_BUILTIN
13017  };
13018
13019
13020/* Implement `TARGET_BUILTIN_DECL'.  */
13021
13022static tree
13023avr_builtin_decl (unsigned id, bool initialize_p ATTRIBUTE_UNUSED)
13024{
13025  if (id < AVR_BUILTIN_COUNT)
13026    return avr_bdesc[id].fndecl;
13027
13028  return error_mark_node;
13029}
13030
13031
13032static void
13033avr_init_builtin_int24 (void)
13034{
13035  tree int24_type  = make_signed_type (GET_MODE_BITSIZE (PSImode));
13036  tree uint24_type = make_unsigned_type (GET_MODE_BITSIZE (PSImode));
13037
13038  lang_hooks.types.register_builtin_type (int24_type, "__int24");
13039  lang_hooks.types.register_builtin_type (uint24_type, "__uint24");
13040}
13041
13042
13043/* Implement `TARGET_INIT_BUILTINS' */
13044/* Set up all builtin functions for this target.  */
13045
13046static void
13047avr_init_builtins (void)
13048{
13049  tree void_ftype_void
13050    = build_function_type_list (void_type_node, NULL_TREE);
13051  tree uchar_ftype_uchar
13052    = build_function_type_list (unsigned_char_type_node,
13053                                unsigned_char_type_node,
13054                                NULL_TREE);
13055  tree uint_ftype_uchar_uchar
13056    = build_function_type_list (unsigned_type_node,
13057                                unsigned_char_type_node,
13058                                unsigned_char_type_node,
13059                                NULL_TREE);
13060  tree int_ftype_char_char
13061    = build_function_type_list (integer_type_node,
13062                                char_type_node,
13063                                char_type_node,
13064                                NULL_TREE);
13065  tree int_ftype_char_uchar
13066    = build_function_type_list (integer_type_node,
13067                                char_type_node,
13068                                unsigned_char_type_node,
13069                                NULL_TREE);
13070  tree void_ftype_ulong
13071    = build_function_type_list (void_type_node,
13072                                long_unsigned_type_node,
13073                                NULL_TREE);
13074
13075  tree uchar_ftype_ulong_uchar_uchar
13076    = build_function_type_list (unsigned_char_type_node,
13077                                long_unsigned_type_node,
13078                                unsigned_char_type_node,
13079                                unsigned_char_type_node,
13080                                NULL_TREE);
13081
13082  tree const_memx_void_node
13083    = build_qualified_type (void_type_node,
13084                            TYPE_QUAL_CONST
13085                            | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_MEMX));
13086
13087  tree const_memx_ptr_type_node
13088    = build_pointer_type_for_mode (const_memx_void_node, PSImode, false);
13089
13090  tree char_ftype_const_memx_ptr
13091    = build_function_type_list (char_type_node,
13092                                const_memx_ptr_type_node,
13093                                NULL);
13094
13095#define ITYP(T)                                                         \
13096  lang_hooks.types.type_for_size (TYPE_PRECISION (T), TYPE_UNSIGNED (T))
13097
13098#define FX_FTYPE_FX(fx)                                                 \
13099  tree fx##r_ftype_##fx##r                                              \
13100    = build_function_type_list (node_##fx##r, node_##fx##r, NULL);      \
13101  tree fx##k_ftype_##fx##k                                              \
13102    = build_function_type_list (node_##fx##k, node_##fx##k, NULL)
13103
13104#define FX_FTYPE_FX_INT(fx)                                             \
13105  tree fx##r_ftype_##fx##r_int                                          \
13106    = build_function_type_list (node_##fx##r, node_##fx##r,             \
13107                                integer_type_node, NULL);               \
13108  tree fx##k_ftype_##fx##k_int                                          \
13109    = build_function_type_list (node_##fx##k, node_##fx##k,             \
13110                                integer_type_node, NULL)
13111
13112#define INT_FTYPE_FX(fx)                                                \
13113  tree int_ftype_##fx##r                                                \
13114    = build_function_type_list (integer_type_node, node_##fx##r, NULL); \
13115  tree int_ftype_##fx##k                                                \
13116    = build_function_type_list (integer_type_node, node_##fx##k, NULL)
13117
13118#define INTX_FTYPE_FX(fx)                                               \
13119  tree int##fx##r_ftype_##fx##r                                         \
13120    = build_function_type_list (ITYP (node_##fx##r), node_##fx##r, NULL); \
13121  tree int##fx##k_ftype_##fx##k                                         \
13122    = build_function_type_list (ITYP (node_##fx##k), node_##fx##k, NULL)
13123
13124#define FX_FTYPE_INTX(fx)                                               \
13125  tree fx##r_ftype_int##fx##r                                           \
13126    = build_function_type_list (node_##fx##r, ITYP (node_##fx##r), NULL); \
13127  tree fx##k_ftype_int##fx##k                                           \
13128    = build_function_type_list (node_##fx##k, ITYP (node_##fx##k), NULL)
13129
13130  tree node_hr = short_fract_type_node;
13131  tree node_nr = fract_type_node;
13132  tree node_lr = long_fract_type_node;
13133  tree node_llr = long_long_fract_type_node;
13134
13135  tree node_uhr = unsigned_short_fract_type_node;
13136  tree node_unr = unsigned_fract_type_node;
13137  tree node_ulr = unsigned_long_fract_type_node;
13138  tree node_ullr = unsigned_long_long_fract_type_node;
13139
13140  tree node_hk = short_accum_type_node;
13141  tree node_nk = accum_type_node;
13142  tree node_lk = long_accum_type_node;
13143  tree node_llk = long_long_accum_type_node;
13144
13145  tree node_uhk = unsigned_short_accum_type_node;
13146  tree node_unk = unsigned_accum_type_node;
13147  tree node_ulk = unsigned_long_accum_type_node;
13148  tree node_ullk = unsigned_long_long_accum_type_node;
13149
13150
13151  /* For absfx builtins.  */
13152
13153  FX_FTYPE_FX (h);
13154  FX_FTYPE_FX (n);
13155  FX_FTYPE_FX (l);
13156  FX_FTYPE_FX (ll);
13157
13158  /* For roundfx builtins.  */
13159
13160  FX_FTYPE_FX_INT (h);
13161  FX_FTYPE_FX_INT (n);
13162  FX_FTYPE_FX_INT (l);
13163  FX_FTYPE_FX_INT (ll);
13164
13165  FX_FTYPE_FX_INT (uh);
13166  FX_FTYPE_FX_INT (un);
13167  FX_FTYPE_FX_INT (ul);
13168  FX_FTYPE_FX_INT (ull);
13169
13170  /* For countlsfx builtins.  */
13171
13172  INT_FTYPE_FX (h);
13173  INT_FTYPE_FX (n);
13174  INT_FTYPE_FX (l);
13175  INT_FTYPE_FX (ll);
13176
13177  INT_FTYPE_FX (uh);
13178  INT_FTYPE_FX (un);
13179  INT_FTYPE_FX (ul);
13180  INT_FTYPE_FX (ull);
13181
13182  /* For bitsfx builtins.  */
13183
13184  INTX_FTYPE_FX (h);
13185  INTX_FTYPE_FX (n);
13186  INTX_FTYPE_FX (l);
13187  INTX_FTYPE_FX (ll);
13188
13189  INTX_FTYPE_FX (uh);
13190  INTX_FTYPE_FX (un);
13191  INTX_FTYPE_FX (ul);
13192  INTX_FTYPE_FX (ull);
13193
13194  /* For fxbits builtins.  */
13195
13196  FX_FTYPE_INTX (h);
13197  FX_FTYPE_INTX (n);
13198  FX_FTYPE_INTX (l);
13199  FX_FTYPE_INTX (ll);
13200
13201  FX_FTYPE_INTX (uh);
13202  FX_FTYPE_INTX (un);
13203  FX_FTYPE_INTX (ul);
13204  FX_FTYPE_INTX (ull);
13205
13206
13207#define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME)                  \
13208  {                                                                     \
13209    int id = AVR_BUILTIN_ ## NAME;                                      \
13210    const char *Name = "__builtin_avr_" #NAME;                          \
13211    char *name = (char*) alloca (1 + strlen (Name));                    \
13212                                                                        \
13213    gcc_assert (id < AVR_BUILTIN_COUNT);                                \
13214    avr_bdesc[id].fndecl                                                \
13215      = add_builtin_function (avr_tolower (name, Name), TYPE, id,       \
13216                              BUILT_IN_MD, LIBNAME, NULL_TREE);         \
13217  }
13218#include "builtins.def"
13219#undef DEF_BUILTIN
13220
13221  avr_init_builtin_int24 ();
13222}
13223
13224
13225/* Subroutine of avr_expand_builtin to expand vanilla builtins
13226   with non-void result and 1 ... 3 arguments.  */
13227
13228static rtx
13229avr_default_expand_builtin (enum insn_code icode, tree exp, rtx target)
13230{
13231  rtx pat, xop[3];
13232  int n, n_args = call_expr_nargs (exp);
13233  machine_mode tmode = insn_data[icode].operand[0].mode;
13234
13235  gcc_assert (n_args >= 1 && n_args <= 3);
13236
13237  if (target == NULL_RTX
13238      || GET_MODE (target) != tmode
13239      || !insn_data[icode].operand[0].predicate (target, tmode))
13240    {
13241      target = gen_reg_rtx (tmode);
13242    }
13243
13244  for (n = 0; n < n_args; n++)
13245    {
13246      tree arg = CALL_EXPR_ARG (exp, n);
13247      rtx op = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
13248      machine_mode opmode = GET_MODE (op);
13249      machine_mode mode = insn_data[icode].operand[n+1].mode;
13250
13251      if ((opmode == SImode || opmode == VOIDmode) && mode == HImode)
13252        {
13253          opmode = HImode;
13254          op = gen_lowpart (HImode, op);
13255        }
13256
13257      /* In case the insn wants input operands in modes different from
13258         the result, abort.  */
13259
13260      gcc_assert (opmode == mode || opmode == VOIDmode);
13261
13262      if (!insn_data[icode].operand[n+1].predicate (op, mode))
13263        op = copy_to_mode_reg (mode, op);
13264
13265      xop[n] = op;
13266    }
13267
13268  switch (n_args)
13269    {
13270    case 1: pat = GEN_FCN (icode) (target, xop[0]); break;
13271    case 2: pat = GEN_FCN (icode) (target, xop[0], xop[1]); break;
13272    case 3: pat = GEN_FCN (icode) (target, xop[0], xop[1], xop[2]); break;
13273
13274    default:
13275      gcc_unreachable();
13276    }
13277
13278  if (pat == NULL_RTX)
13279    return NULL_RTX;
13280
13281  emit_insn (pat);
13282
13283  return target;
13284}
13285
13286
13287/* Implement `TARGET_EXPAND_BUILTIN'.  */
13288/* Expand an expression EXP that calls a built-in function,
13289   with result going to TARGET if that's convenient
13290   (and in mode MODE if that's convenient).
13291   SUBTARGET may be used as the target for computing one of EXP's operands.
13292   IGNORE is nonzero if the value is to be ignored.  */
13293
13294static rtx
13295avr_expand_builtin (tree exp, rtx target,
13296                    rtx subtarget ATTRIBUTE_UNUSED,
13297                    machine_mode mode ATTRIBUTE_UNUSED,
13298                    int ignore)
13299{
13300  tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
13301  const char *bname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
13302  unsigned int id = DECL_FUNCTION_CODE (fndecl);
13303  const struct avr_builtin_description *d = &avr_bdesc[id];
13304  tree arg0;
13305  rtx op0;
13306
13307  gcc_assert (id < AVR_BUILTIN_COUNT);
13308
13309  switch (id)
13310    {
13311    case AVR_BUILTIN_NOP:
13312      emit_insn (gen_nopv (GEN_INT(1)));
13313      return 0;
13314
13315    case AVR_BUILTIN_DELAY_CYCLES:
13316      {
13317        arg0 = CALL_EXPR_ARG (exp, 0);
13318        op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
13319
13320        if (!CONST_INT_P (op0))
13321          error ("%s expects a compile time integer constant", bname);
13322        else
13323          avr_expand_delay_cycles (op0);
13324
13325        return NULL_RTX;
13326      }
13327
13328    case AVR_BUILTIN_INSERT_BITS:
13329      {
13330        arg0 = CALL_EXPR_ARG (exp, 0);
13331        op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
13332
13333        if (!CONST_INT_P (op0))
13334          {
13335            error ("%s expects a compile time long integer constant"
13336                   " as first argument", bname);
13337            return target;
13338          }
13339
13340        break;
13341      }
13342
13343    case AVR_BUILTIN_ROUNDHR:   case AVR_BUILTIN_ROUNDUHR:
13344    case AVR_BUILTIN_ROUNDR:    case AVR_BUILTIN_ROUNDUR:
13345    case AVR_BUILTIN_ROUNDLR:   case AVR_BUILTIN_ROUNDULR:
13346    case AVR_BUILTIN_ROUNDLLR:  case AVR_BUILTIN_ROUNDULLR:
13347
13348    case AVR_BUILTIN_ROUNDHK:   case AVR_BUILTIN_ROUNDUHK:
13349    case AVR_BUILTIN_ROUNDK:    case AVR_BUILTIN_ROUNDUK:
13350    case AVR_BUILTIN_ROUNDLK:   case AVR_BUILTIN_ROUNDULK:
13351    case AVR_BUILTIN_ROUNDLLK:  case AVR_BUILTIN_ROUNDULLK:
13352
13353      /* Warn about odd rounding.  Rounding points >= FBIT will have
13354         no effect.  */
13355
13356      if (TREE_CODE (CALL_EXPR_ARG (exp, 1)) != INTEGER_CST)
13357        break;
13358
13359      int rbit = (int) TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1));
13360
13361      if (rbit >= (int) GET_MODE_FBIT (mode))
13362        {
13363          warning (OPT_Wextra, "rounding to %d bits has no effect for "
13364                   "fixed-point value with %d fractional bits",
13365                   rbit, GET_MODE_FBIT (mode));
13366
13367          return expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX, mode,
13368                              EXPAND_NORMAL);
13369        }
13370      else if (rbit <= - (int) GET_MODE_IBIT (mode))
13371        {
13372          warning (0, "rounding result will always be 0");
13373          return CONST0_RTX (mode);
13374        }
13375
13376      /* The rounding points RP satisfies now:  -IBIT < RP < FBIT.
13377
13378         TR 18037 only specifies results for  RP > 0.  However, the
13379         remaining cases of  -IBIT < RP <= 0  can easily be supported
13380         without any additional overhead.  */
13381
13382      break; /* round */
13383    }
13384
13385  /* No fold found and no insn:  Call support function from libgcc.  */
13386
13387  if (d->icode == CODE_FOR_nothing
13388      && DECL_ASSEMBLER_NAME (get_callee_fndecl (exp)) != NULL_TREE)
13389    {
13390      return expand_call (exp, target, ignore);
13391    }
13392
13393  /* No special treatment needed: vanilla expand.  */
13394
13395  gcc_assert (d->icode != CODE_FOR_nothing);
13396  gcc_assert (d->n_args == call_expr_nargs (exp));
13397
13398  if (d->n_args == 0)
13399    {
13400      emit_insn ((GEN_FCN (d->icode)) (target));
13401      return NULL_RTX;
13402    }
13403
13404  return avr_default_expand_builtin (d->icode, exp, target);
13405}
13406
13407
13408/* Helper for `avr_fold_builtin' that folds  absfx (FIXED_CST).  */
13409
13410static tree
13411avr_fold_absfx (tree tval)
13412{
13413  if (FIXED_CST != TREE_CODE (tval))
13414    return NULL_TREE;
13415
13416  /* Our fixed-points have no padding:  Use double_int payload directly.  */
13417
13418  FIXED_VALUE_TYPE fval = TREE_FIXED_CST (tval);
13419  unsigned int bits = GET_MODE_BITSIZE (fval.mode);
13420  double_int ival = fval.data.sext (bits);
13421
13422  if (!ival.is_negative())
13423    return tval;
13424
13425  /* ISO/IEC TR 18037, 7.18a.6.2:  The absfx functions are saturating.  */
13426
13427  fval.data = (ival == double_int::min_value (bits, false).sext (bits))
13428    ? double_int::max_value (bits, false)
13429    : -ival;
13430
13431  return build_fixed (TREE_TYPE (tval), fval);
13432}
13433
13434
13435/* Implement `TARGET_FOLD_BUILTIN'.  */
13436
13437static tree
13438avr_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *arg,
13439                  bool ignore ATTRIBUTE_UNUSED)
13440{
13441  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
13442  tree val_type = TREE_TYPE (TREE_TYPE (fndecl));
13443
13444  if (!optimize)
13445    return NULL_TREE;
13446
13447  switch (fcode)
13448    {
13449    default:
13450      break;
13451
13452    case AVR_BUILTIN_SWAP:
13453      {
13454        return fold_build2 (LROTATE_EXPR, val_type, arg[0],
13455                            build_int_cst (val_type, 4));
13456      }
13457
13458    case AVR_BUILTIN_ABSHR:
13459    case AVR_BUILTIN_ABSR:
13460    case AVR_BUILTIN_ABSLR:
13461    case AVR_BUILTIN_ABSLLR:
13462
13463    case AVR_BUILTIN_ABSHK:
13464    case AVR_BUILTIN_ABSK:
13465    case AVR_BUILTIN_ABSLK:
13466    case AVR_BUILTIN_ABSLLK:
13467      /* GCC is not good with folding ABS for fixed-point.  Do it by hand.  */
13468
13469      return avr_fold_absfx (arg[0]);
13470
13471    case AVR_BUILTIN_BITSHR:    case AVR_BUILTIN_HRBITS:
13472    case AVR_BUILTIN_BITSHK:    case AVR_BUILTIN_HKBITS:
13473    case AVR_BUILTIN_BITSUHR:   case AVR_BUILTIN_UHRBITS:
13474    case AVR_BUILTIN_BITSUHK:   case AVR_BUILTIN_UHKBITS:
13475
13476    case AVR_BUILTIN_BITSR:     case AVR_BUILTIN_RBITS:
13477    case AVR_BUILTIN_BITSK:     case AVR_BUILTIN_KBITS:
13478    case AVR_BUILTIN_BITSUR:    case AVR_BUILTIN_URBITS:
13479    case AVR_BUILTIN_BITSUK:    case AVR_BUILTIN_UKBITS:
13480
13481    case AVR_BUILTIN_BITSLR:    case AVR_BUILTIN_LRBITS:
13482    case AVR_BUILTIN_BITSLK:    case AVR_BUILTIN_LKBITS:
13483    case AVR_BUILTIN_BITSULR:   case AVR_BUILTIN_ULRBITS:
13484    case AVR_BUILTIN_BITSULK:   case AVR_BUILTIN_ULKBITS:
13485
13486    case AVR_BUILTIN_BITSLLR:   case AVR_BUILTIN_LLRBITS:
13487    case AVR_BUILTIN_BITSLLK:   case AVR_BUILTIN_LLKBITS:
13488    case AVR_BUILTIN_BITSULLR:  case AVR_BUILTIN_ULLRBITS:
13489    case AVR_BUILTIN_BITSULLK:  case AVR_BUILTIN_ULLKBITS:
13490
13491      gcc_assert (TYPE_PRECISION (val_type)
13492                  == TYPE_PRECISION (TREE_TYPE (arg[0])));
13493
13494      return build1 (VIEW_CONVERT_EXPR, val_type, arg[0]);
13495
13496    case AVR_BUILTIN_INSERT_BITS:
13497      {
13498        tree tbits = arg[1];
13499        tree tval = arg[2];
13500        tree tmap;
13501        tree map_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
13502        unsigned int map;
13503        bool changed = false;
13504        unsigned i;
13505        avr_map_op_t best_g;
13506
13507        if (TREE_CODE (arg[0]) != INTEGER_CST)
13508          {
13509            /* No constant as first argument: Don't fold this and run into
13510               error in avr_expand_builtin.  */
13511
13512            break;
13513          }
13514
13515        tmap = wide_int_to_tree (map_type, arg[0]);
13516        map = TREE_INT_CST_LOW (tmap);
13517
13518        if (TREE_CODE (tval) != INTEGER_CST
13519            && 0 == avr_map_metric (map, MAP_MASK_PREIMAGE_F))
13520          {
13521            /* There are no F in the map, i.e. 3rd operand is unused.
13522               Replace that argument with some constant to render
13523               respective input unused.  */
13524
13525            tval = build_int_cst (val_type, 0);
13526            changed = true;
13527          }
13528
13529        if (TREE_CODE (tbits) != INTEGER_CST
13530            && 0 == avr_map_metric (map, MAP_PREIMAGE_0_7))
13531          {
13532            /* Similar for the bits to be inserted. If they are unused,
13533               we can just as well pass 0.  */
13534
13535            tbits = build_int_cst (val_type, 0);
13536          }
13537
13538        if (TREE_CODE (tbits) == INTEGER_CST)
13539          {
13540            /* Inserting bits known at compile time is easy and can be
13541               performed by AND and OR with appropriate masks.  */
13542
13543            int bits = TREE_INT_CST_LOW (tbits);
13544            int mask_ior = 0, mask_and = 0xff;
13545
13546            for (i = 0; i < 8; i++)
13547              {
13548                int mi = avr_map (map, i);
13549
13550                if (mi < 8)
13551                  {
13552                    if (bits & (1 << mi))     mask_ior |=  (1 << i);
13553                    else                      mask_and &= ~(1 << i);
13554                  }
13555              }
13556
13557            tval = fold_build2 (BIT_IOR_EXPR, val_type, tval,
13558                                build_int_cst (val_type, mask_ior));
13559            return fold_build2 (BIT_AND_EXPR, val_type, tval,
13560                                build_int_cst (val_type, mask_and));
13561          }
13562
13563        if (changed)
13564          return build_call_expr (fndecl, 3, tmap, tbits, tval);
13565
13566        /* If bits don't change their position we can use vanilla logic
13567           to merge the two arguments.  */
13568
13569        if (0 == avr_map_metric (map, MAP_NONFIXED_0_7))
13570          {
13571            int mask_f = avr_map_metric (map, MAP_MASK_PREIMAGE_F);
13572            tree tres, tmask = build_int_cst (val_type, mask_f ^ 0xff);
13573
13574            tres = fold_build2 (BIT_XOR_EXPR, val_type, tbits, tval);
13575            tres = fold_build2 (BIT_AND_EXPR, val_type, tres, tmask);
13576            return fold_build2 (BIT_XOR_EXPR, val_type, tres, tval);
13577          }
13578
13579        /* Try to decomposing map to reduce overall cost.  */
13580
13581        if (avr_log.builtin)
13582          avr_edump ("\n%?: %x\n%?: ROL cost: ", map);
13583
13584        best_g = avr_map_op[0];
13585        best_g.cost = 1000;
13586
13587        for (i = 0; i < sizeof (avr_map_op) / sizeof (*avr_map_op); i++)
13588          {
13589            avr_map_op_t g
13590              = avr_map_decompose (map, avr_map_op + i,
13591                                   TREE_CODE (tval) == INTEGER_CST);
13592
13593            if (g.cost >= 0 && g.cost < best_g.cost)
13594              best_g = g;
13595          }
13596
13597        if (avr_log.builtin)
13598          avr_edump ("\n");
13599
13600        if (best_g.arg == 0)
13601          /* No optimization found */
13602          break;
13603
13604        /* Apply operation G to the 2nd argument.  */
13605
13606        if (avr_log.builtin)
13607          avr_edump ("%?: using OP(%s%d, %x) cost %d\n",
13608                     best_g.str, best_g.arg, best_g.map, best_g.cost);
13609
13610        /* Do right-shifts arithmetically: They copy the MSB instead of
13611           shifting in a non-usable value (0) as with logic right-shift.  */
13612
13613        tbits = fold_convert (signed_char_type_node, tbits);
13614        tbits = fold_build2 (best_g.code, signed_char_type_node, tbits,
13615                             build_int_cst (val_type, best_g.arg));
13616        tbits = fold_convert (val_type, tbits);
13617
13618        /* Use map o G^-1 instead of original map to undo the effect of G.  */
13619
13620        tmap = wide_int_to_tree (map_type, best_g.map);
13621
13622        return build_call_expr (fndecl, 3, tmap, tbits, tval);
13623      } /* AVR_BUILTIN_INSERT_BITS */
13624    }
13625
13626  return NULL_TREE;
13627}
13628
13629
13630
13631/* Initialize the GCC target structure.  */
13632
13633#undef  TARGET_ASM_ALIGNED_HI_OP
13634#define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
13635#undef  TARGET_ASM_ALIGNED_SI_OP
13636#define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
13637#undef  TARGET_ASM_UNALIGNED_HI_OP
13638#define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
13639#undef  TARGET_ASM_UNALIGNED_SI_OP
13640#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
13641#undef  TARGET_ASM_INTEGER
13642#define TARGET_ASM_INTEGER avr_assemble_integer
13643#undef  TARGET_ASM_FILE_START
13644#define TARGET_ASM_FILE_START avr_file_start
13645#undef  TARGET_ASM_FILE_END
13646#define TARGET_ASM_FILE_END avr_file_end
13647
13648#undef  TARGET_ASM_FUNCTION_END_PROLOGUE
13649#define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
13650#undef  TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
13651#define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
13652
13653#undef  TARGET_FUNCTION_VALUE
13654#define TARGET_FUNCTION_VALUE avr_function_value
13655#undef  TARGET_LIBCALL_VALUE
13656#define TARGET_LIBCALL_VALUE avr_libcall_value
13657#undef  TARGET_FUNCTION_VALUE_REGNO_P
13658#define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
13659
13660#undef  TARGET_ATTRIBUTE_TABLE
13661#define TARGET_ATTRIBUTE_TABLE avr_attribute_table
13662#undef  TARGET_INSERT_ATTRIBUTES
13663#define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
13664#undef  TARGET_SECTION_TYPE_FLAGS
13665#define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
13666
13667#undef  TARGET_ASM_NAMED_SECTION
13668#define TARGET_ASM_NAMED_SECTION avr_asm_named_section
13669#undef  TARGET_ASM_INIT_SECTIONS
13670#define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
13671#undef  TARGET_ENCODE_SECTION_INFO
13672#define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
13673#undef  TARGET_ASM_SELECT_SECTION
13674#define TARGET_ASM_SELECT_SECTION avr_asm_select_section
13675
13676#undef  TARGET_REGISTER_MOVE_COST
13677#define TARGET_REGISTER_MOVE_COST avr_register_move_cost
13678#undef  TARGET_MEMORY_MOVE_COST
13679#define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
13680#undef  TARGET_RTX_COSTS
13681#define TARGET_RTX_COSTS avr_rtx_costs
13682#undef  TARGET_ADDRESS_COST
13683#define TARGET_ADDRESS_COST avr_address_cost
13684#undef  TARGET_MACHINE_DEPENDENT_REORG
13685#define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
13686#undef  TARGET_FUNCTION_ARG
13687#define TARGET_FUNCTION_ARG avr_function_arg
13688#undef  TARGET_FUNCTION_ARG_ADVANCE
13689#define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
13690
13691#undef  TARGET_SET_CURRENT_FUNCTION
13692#define TARGET_SET_CURRENT_FUNCTION avr_set_current_function
13693
13694#undef  TARGET_RETURN_IN_MEMORY
13695#define TARGET_RETURN_IN_MEMORY avr_return_in_memory
13696
13697#undef  TARGET_STRICT_ARGUMENT_NAMING
13698#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
13699
13700#undef  TARGET_BUILTIN_SETJMP_FRAME_VALUE
13701#define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
13702
13703#undef TARGET_CONDITIONAL_REGISTER_USAGE
13704#define TARGET_CONDITIONAL_REGISTER_USAGE avr_conditional_register_usage
13705
13706#undef  TARGET_HARD_REGNO_SCRATCH_OK
13707#define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
13708#undef  TARGET_CASE_VALUES_THRESHOLD
13709#define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
13710
13711#undef  TARGET_FRAME_POINTER_REQUIRED
13712#define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
13713#undef  TARGET_CAN_ELIMINATE
13714#define TARGET_CAN_ELIMINATE avr_can_eliminate
13715
13716#undef  TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
13717#define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS avr_allocate_stack_slots_for_args
13718
13719#undef TARGET_WARN_FUNC_RETURN
13720#define TARGET_WARN_FUNC_RETURN avr_warn_func_return
13721
13722#undef  TARGET_CLASS_LIKELY_SPILLED_P
13723#define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
13724
13725#undef  TARGET_OPTION_OVERRIDE
13726#define TARGET_OPTION_OVERRIDE avr_option_override
13727
13728#undef  TARGET_CANNOT_MODIFY_JUMPS_P
13729#define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
13730
13731#undef  TARGET_FUNCTION_OK_FOR_SIBCALL
13732#define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
13733
13734#undef  TARGET_INIT_BUILTINS
13735#define TARGET_INIT_BUILTINS avr_init_builtins
13736
13737#undef  TARGET_BUILTIN_DECL
13738#define TARGET_BUILTIN_DECL avr_builtin_decl
13739
13740#undef  TARGET_EXPAND_BUILTIN
13741#define TARGET_EXPAND_BUILTIN avr_expand_builtin
13742
13743#undef  TARGET_FOLD_BUILTIN
13744#define TARGET_FOLD_BUILTIN avr_fold_builtin
13745
13746#undef  TARGET_ASM_FUNCTION_RODATA_SECTION
13747#define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
13748
13749#undef  TARGET_SCALAR_MODE_SUPPORTED_P
13750#define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
13751
13752#undef  TARGET_BUILD_BUILTIN_VA_LIST
13753#define TARGET_BUILD_BUILTIN_VA_LIST avr_build_builtin_va_list
13754
13755#undef  TARGET_FIXED_POINT_SUPPORTED_P
13756#define TARGET_FIXED_POINT_SUPPORTED_P hook_bool_void_true
13757
13758#undef  TARGET_CONVERT_TO_TYPE
13759#define TARGET_CONVERT_TO_TYPE avr_convert_to_type
13760
13761#undef  TARGET_ADDR_SPACE_SUBSET_P
13762#define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
13763
13764#undef  TARGET_ADDR_SPACE_CONVERT
13765#define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
13766
13767#undef  TARGET_ADDR_SPACE_ADDRESS_MODE
13768#define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
13769
13770#undef  TARGET_ADDR_SPACE_POINTER_MODE
13771#define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
13772
13773#undef  TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
13774#define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P  \
13775  avr_addr_space_legitimate_address_p
13776
13777#undef  TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
13778#define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
13779
13780#undef  TARGET_MODE_DEPENDENT_ADDRESS_P
13781#define TARGET_MODE_DEPENDENT_ADDRESS_P avr_mode_dependent_address_p
13782
13783#undef  TARGET_SECONDARY_RELOAD
13784#define TARGET_SECONDARY_RELOAD avr_secondary_reload
13785
13786#undef  TARGET_PRINT_OPERAND
13787#define TARGET_PRINT_OPERAND avr_print_operand
13788#undef  TARGET_PRINT_OPERAND_ADDRESS
13789#define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
13790#undef  TARGET_PRINT_OPERAND_PUNCT_VALID_P
13791#define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
13792
13793struct gcc_target targetm = TARGET_INITIALIZER;
13794
13795
13796#include "gt-avr.h"
13797