1/* Subroutines for insn-output.c for ATMEL AVR micro controllers
2   Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3   2009, 2010 Free Software Foundation, Inc.
4   Contributed by Denis Chertykov (chertykov@gmail.com)
5
6   This file is part of GCC.
7
8   GCC is free software; you can redistribute it and/or modify
9   it under the terms of the GNU General Public License as published by
10   the Free Software Foundation; either version 3, or (at your option)
11   any later version.
12
13   GCC is distributed in the hope that it will be useful,
14   but WITHOUT ANY WARRANTY; without even the implied warranty of
15   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16   GNU General Public License for more details.
17
18   You should have received a copy of the GNU General Public License
19   along with GCC; see the file COPYING3.  If not see
20   <http://www.gnu.org/licenses/>.  */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "rtl.h"
27#include "regs.h"
28#include "hard-reg-set.h"
29#include "real.h"
30#include "insn-config.h"
31#include "conditions.h"
32#include "insn-attr.h"
33#include "flags.h"
34#include "reload.h"
35#include "tree.h"
36#include "output.h"
37#include "expr.h"
38#include "toplev.h"
39#include "obstack.h"
40#include "function.h"
41#include "recog.h"
42#include "ggc.h"
43#include "tm_p.h"
44#include "target.h"
45#include "target-def.h"
46#include "params.h"
47#include "df.h"
48
49/* Maximal allowed offset for an address in the LD command */
50#define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
51
52static int avr_naked_function_p (tree);
53static int interrupt_function_p (tree);
54static int signal_function_p (tree);
55static int avr_OS_task_function_p (tree);
56static int avr_OS_main_function_p (tree);
57static int avr_regs_to_save (HARD_REG_SET *);
58static int get_sequence_length (rtx insns);
59static int sequent_regs_live (void);
60static const char *ptrreg_to_str (int);
61static const char *cond_string (enum rtx_code);
62static int avr_num_arg_regs (enum machine_mode, tree);
63
64static RTX_CODE compare_condition (rtx insn);
65static rtx avr_legitimize_address (rtx, rtx, enum machine_mode);
66static int compare_sign_p (rtx insn);
67static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
68static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
69static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
70static bool avr_assemble_integer (rtx, unsigned int, int);
71static void avr_file_start (void);
72static void avr_file_end (void);
73static bool avr_legitimate_address_p (enum machine_mode, rtx, bool);
74static void avr_asm_function_end_prologue (FILE *);
75static void avr_asm_function_begin_epilogue (FILE *);
76static bool avr_cannot_modify_jumps_p (void);
77static rtx avr_function_value (const_tree, const_tree, bool);
78static void avr_insert_attributes (tree, tree *);
79static void avr_asm_init_sections (void);
80static unsigned int avr_section_type_flags (tree, const char *, int);
81
82static void avr_reorg (void);
83static void avr_asm_out_ctor (rtx, int);
84static void avr_asm_out_dtor (rtx, int);
85static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code, bool);
86static bool avr_rtx_costs (rtx, int, int, int *, bool);
87static int avr_address_cost (rtx, bool);
88static bool avr_return_in_memory (const_tree, const_tree);
89static struct machine_function * avr_init_machine_status (void);
90static rtx avr_builtin_setjmp_frame_value (void);
91static bool avr_hard_regno_scratch_ok (unsigned int);
92static unsigned int avr_case_values_threshold (void);
93static bool avr_frame_pointer_required_p (void);
94static bool avr_can_eliminate (const int, const int);
95static void avr_help (void);
96
97/* Allocate registers from r25 to r8 for parameters for function calls.  */
98#define FIRST_CUM_REG 26
99
100/* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
101static GTY(()) rtx tmp_reg_rtx;
102
103/* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
104static GTY(()) rtx zero_reg_rtx;
105
106/* AVR register names {"r0", "r1", ..., "r31"} */
107static const char *const avr_regnames[] = REGISTER_NAMES;
108
109/* Preprocessor macros to define depending on MCU type.  */
110const char *avr_extra_arch_macro;
111
112/* Current architecture.  */
113const struct base_arch_s *avr_current_arch;
114
115/* Current device.  */
116const struct mcu_type_s *avr_current_device;
117
118section *progmem_section;
119
120/* AVR attributes.  */
121static const struct attribute_spec avr_attribute_table[] =
122{
123  /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
124  { "progmem",   0, 0, false, false, false,  avr_handle_progmem_attribute },
125  { "signal",    0, 0, true,  false, false,  avr_handle_fndecl_attribute },
126  { "interrupt", 0, 0, true,  false, false,  avr_handle_fndecl_attribute },
127  { "naked",     0, 0, false, true,  true,   avr_handle_fntype_attribute },
128  { "OS_task",   0, 0, false, true,  true,   avr_handle_fntype_attribute },
129  { "OS_main",   0, 0, false, true,  true,   avr_handle_fntype_attribute },
130  { NULL,        0, 0, false, false, false, NULL }
131};
132
133/* Initialize the GCC target structure.  */
134#undef TARGET_ASM_ALIGNED_HI_OP
135#define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
136#undef TARGET_ASM_ALIGNED_SI_OP
137#define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
138#undef TARGET_ASM_UNALIGNED_HI_OP
139#define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
140#undef TARGET_ASM_UNALIGNED_SI_OP
141#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
142#undef TARGET_ASM_INTEGER
143#define TARGET_ASM_INTEGER avr_assemble_integer
144#undef TARGET_ASM_FILE_START
145#define TARGET_ASM_FILE_START avr_file_start
146#undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
147#define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
148#undef TARGET_ASM_FILE_END
149#define TARGET_ASM_FILE_END avr_file_end
150
151#undef TARGET_ASM_FUNCTION_END_PROLOGUE
152#define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
153#undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
154#define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
155#undef TARGET_FUNCTION_VALUE
156#define TARGET_FUNCTION_VALUE avr_function_value
157#undef TARGET_ATTRIBUTE_TABLE
158#define TARGET_ATTRIBUTE_TABLE avr_attribute_table
159#undef TARGET_ASM_FUNCTION_RODATA_SECTION
160#define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
161#undef TARGET_INSERT_ATTRIBUTES
162#define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
163#undef TARGET_SECTION_TYPE_FLAGS
164#define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
165#undef TARGET_RTX_COSTS
166#define TARGET_RTX_COSTS avr_rtx_costs
167#undef TARGET_ADDRESS_COST
168#define TARGET_ADDRESS_COST avr_address_cost
169#undef TARGET_MACHINE_DEPENDENT_REORG
170#define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
171
172#undef TARGET_LEGITIMIZE_ADDRESS
173#define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
174
175#undef TARGET_RETURN_IN_MEMORY
176#define TARGET_RETURN_IN_MEMORY avr_return_in_memory
177
178#undef TARGET_STRICT_ARGUMENT_NAMING
179#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
180
181#undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
182#define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
183
184#undef TARGET_HARD_REGNO_SCRATCH_OK
185#define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
186#undef TARGET_CASE_VALUES_THRESHOLD
187#define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
188
189#undef TARGET_LEGITIMATE_ADDRESS_P
190#define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
191
192#undef TARGET_FRAME_POINTER_REQUIRED
193#define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
194#undef TARGET_CAN_ELIMINATE
195#define TARGET_CAN_ELIMINATE avr_can_eliminate
196
197#undef TARGET_HELP
198#define TARGET_HELP avr_help
199
200#undef TARGET_CANNOT_MODIFY_JUMPS_P
201#define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
202
203struct gcc_target targetm = TARGET_INITIALIZER;
204
205void
206avr_override_options (void)
207{
208  const struct mcu_type_s *t;
209
210  flag_delete_null_pointer_checks = 0;
211
212  for (t = avr_mcu_types; t->name; t++)
213    if (strcmp (t->name, avr_mcu_name) == 0)
214      break;
215
216  if (!t->name)
217    {
218      error ("unrecognized argument to -mmcu= option: %qs", avr_mcu_name);
219      inform (input_location,  "See --target-help for supported MCUs");
220    }
221
222  avr_current_device = t;
223  avr_current_arch = &avr_arch_types[avr_current_device->arch];
224  avr_extra_arch_macro = avr_current_device->macro;
225
226  tmp_reg_rtx  = gen_rtx_REG (QImode, TMP_REGNO);
227  zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
228
229  init_machine_status = avr_init_machine_status;
230}
231
232/* Implement TARGET_HELP */
233/* Report extra information for --target-help */
234
235static void
236avr_help (void)
237{
238  const struct mcu_type_s *t;
239  const char * const indent = "  ";
240  int len;
241
242  /* Give a list of MCUs that are accepted by -mmcu=* .
243     Note that MCUs supported by the compiler might differ from
244     MCUs supported by binutils. */
245
246  len = strlen (indent);
247  printf ("Known MCU names:\n%s", indent);
248
249  /* Print a blank-separated list of all supported MCUs */
250
251  for (t = avr_mcu_types; t->name; t++)
252    {
253      printf ("%s ", t->name);
254      len += 1 + strlen (t->name);
255
256      /* Break long lines */
257
258      if (len > 66 && (t+1)->name)
259        {
260          printf ("\n%s", indent);
261          len = strlen (indent);
262        }
263    }
264
265  printf ("\n\n");
266}
267
268/*  return register class from register number.  */
269
270static const enum reg_class reg_class_tab[]={
271  GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
272  GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
273  GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
274  GENERAL_REGS, /* r0 - r15 */
275  LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
276  LD_REGS,                      /* r16 - 23 */
277  ADDW_REGS,ADDW_REGS,          /* r24,r25 */
278  POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
279  POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
280  POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
281  STACK_REG,STACK_REG           /* SPL,SPH */
282};
283
284/* Function to set up the backend function structure.  */
285
286static struct machine_function *
287avr_init_machine_status (void)
288{
289  return ((struct machine_function *)
290          ggc_alloc_cleared (sizeof (struct machine_function)));
291}
292
293/* Return register class for register R.  */
294
295enum reg_class
296avr_regno_reg_class (int r)
297{
298  if (r <= 33)
299    return reg_class_tab[r];
300  return ALL_REGS;
301}
302
303/* Return nonzero if FUNC is a naked function.  */
304
305static int
306avr_naked_function_p (tree func)
307{
308  tree a;
309
310  gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
311
312  a = lookup_attribute ("naked", TYPE_ATTRIBUTES (TREE_TYPE (func)));
313  return a != NULL_TREE;
314}
315
316/* Return nonzero if FUNC is an interrupt function as specified
317   by the "interrupt" attribute.  */
318
319static int
320interrupt_function_p (tree func)
321{
322  tree a;
323
324  if (TREE_CODE (func) != FUNCTION_DECL)
325    return 0;
326
327  a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
328  return a != NULL_TREE;
329}
330
331/* Return nonzero if FUNC is a signal function as specified
332   by the "signal" attribute.  */
333
334static int
335signal_function_p (tree func)
336{
337  tree a;
338
339  if (TREE_CODE (func) != FUNCTION_DECL)
340    return 0;
341
342  a = lookup_attribute ("signal", DECL_ATTRIBUTES (func));
343  return a != NULL_TREE;
344}
345
346/* Return nonzero if FUNC is a OS_task function.  */
347
348static int
349avr_OS_task_function_p (tree func)
350{
351  tree a;
352
353  gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
354
355  a = lookup_attribute ("OS_task", TYPE_ATTRIBUTES (TREE_TYPE (func)));
356  return a != NULL_TREE;
357}
358
359/* Return nonzero if FUNC is a OS_main function.  */
360
361static int
362avr_OS_main_function_p (tree func)
363{
364  tree a;
365
366  gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
367
368  a = lookup_attribute ("OS_main", TYPE_ATTRIBUTES (TREE_TYPE (func)));
369  return a != NULL_TREE;
370}
371
372/* Return the number of hard registers to push/pop in the prologue/epilogue
373   of the current function, and optionally store these registers in SET.  */
374
375static int
376avr_regs_to_save (HARD_REG_SET *set)
377{
378  int reg, count;
379  int int_or_sig_p = (interrupt_function_p (current_function_decl)
380		      || signal_function_p (current_function_decl));
381
382  if (set)
383    CLEAR_HARD_REG_SET (*set);
384  count = 0;
385
386  /* No need to save any registers if the function never returns or
387     is have "OS_task" or "OS_main" attribute.  */
388  if (TREE_THIS_VOLATILE (current_function_decl)
389      || cfun->machine->is_OS_task
390      || cfun->machine->is_OS_main)
391    return 0;
392
393  for (reg = 0; reg < 32; reg++)
394    {
395      /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
396	 any global register variables.  */
397      if (fixed_regs[reg])
398	continue;
399
400      if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
401	  || (df_regs_ever_live_p (reg)
402	      && (int_or_sig_p || !call_used_regs[reg])
403	      && !(frame_pointer_needed
404		   && (reg == REG_Y || reg == (REG_Y+1)))))
405	{
406	  if (set)
407	    SET_HARD_REG_BIT (*set, reg);
408	  count++;
409	}
410    }
411  return count;
412}
413
414/* Return true if register FROM can be eliminated via register TO.  */
415
416bool
417avr_can_eliminate (const int from, const int to)
418{
419  return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
420	  || ((from == FRAME_POINTER_REGNUM
421	       || from == FRAME_POINTER_REGNUM + 1)
422	      && !frame_pointer_needed));
423}
424
425/* Compute offset between arg_pointer and frame_pointer.  */
426
427int
428avr_initial_elimination_offset (int from, int to)
429{
430  if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
431    return 0;
432  else
433    {
434      int offset = frame_pointer_needed ? 2 : 0;
435      int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
436
437      offset += avr_regs_to_save (NULL);
438      return get_frame_size () + (avr_pc_size) + 1 + offset;
439    }
440}
441
442/* Actual start of frame is virtual_stack_vars_rtx this is offset from
443   frame pointer by +STARTING_FRAME_OFFSET.
444   Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
445   avoids creating add/sub of offset in nonlocal goto and setjmp.  */
446
447rtx avr_builtin_setjmp_frame_value (void)
448{
449  return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
450			 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
451}
452
453/* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
454   This is return address of function.  */
455rtx
456avr_return_addr_rtx (int count, const_rtx tem)
457{
458  rtx r;
459
460  /* Can only return this functions return address. Others not supported.  */
461  if (count)
462     return NULL;
463
464  if (AVR_3_BYTE_PC)
465    {
466      r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
467      warning (0, "'builtin_return_address' contains only 2 bytes of address");
468    }
469  else
470    r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
471
472  r = gen_rtx_PLUS (Pmode, tem, r);
473  r = gen_frame_mem (Pmode, memory_address (Pmode, r));
474  r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
475  return  r;
476}
477
478/* Return 1 if the function epilogue is just a single "ret".  */
479
480int
481avr_simple_epilogue (void)
482{
483  return (! frame_pointer_needed
484	  && get_frame_size () == 0
485	  && avr_regs_to_save (NULL) == 0
486	  && ! interrupt_function_p (current_function_decl)
487	  && ! signal_function_p (current_function_decl)
488	  && ! avr_naked_function_p (current_function_decl)
489	  && ! TREE_THIS_VOLATILE (current_function_decl));
490}
491
492/* This function checks sequence of live registers.  */
493
494static int
495sequent_regs_live (void)
496{
497  int reg;
498  int live_seq=0;
499  int cur_seq=0;
500
501  for (reg = 0; reg < 18; ++reg)
502    {
503      if (!call_used_regs[reg])
504	{
505	  if (df_regs_ever_live_p (reg))
506	    {
507	      ++live_seq;
508	      ++cur_seq;
509	    }
510	  else
511	    cur_seq = 0;
512	}
513    }
514
515  if (!frame_pointer_needed)
516    {
517      if (df_regs_ever_live_p (REG_Y))
518	{
519	  ++live_seq;
520	  ++cur_seq;
521	}
522      else
523	cur_seq = 0;
524
525      if (df_regs_ever_live_p (REG_Y+1))
526	{
527	  ++live_seq;
528	  ++cur_seq;
529	}
530      else
531	cur_seq = 0;
532    }
533  else
534    {
535      cur_seq += 2;
536      live_seq += 2;
537    }
538  return (cur_seq == live_seq) ? live_seq : 0;
539}
540
541/* Obtain the length sequence of insns.  */
542
543int
544get_sequence_length (rtx insns)
545{
546  rtx insn;
547  int length;
548
549  for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
550    length += get_attr_length (insn);
551
552  return length;
553}
554
555/*  Output function prologue.  */
556
557void
558expand_prologue (void)
559{
560  int live_seq;
561  HARD_REG_SET set;
562  int minimize;
563  HOST_WIDE_INT size = get_frame_size();
564  /* Define templates for push instructions.  */
565  rtx pushbyte = gen_rtx_MEM (QImode,
566                  gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
567  rtx pushword = gen_rtx_MEM (HImode,
568                  gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
569  rtx insn;
570
571  /* Init cfun->machine.  */
572  cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
573  cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
574  cfun->machine->is_signal = signal_function_p (current_function_decl);
575  cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
576  cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
577  cfun->machine->stack_usage = 0;
578
579  /* Prologue: naked.  */
580  if (cfun->machine->is_naked)
581    {
582      return;
583    }
584
585  avr_regs_to_save (&set);
586  live_seq = sequent_regs_live ();
587  minimize = (TARGET_CALL_PROLOGUES
588	      && !cfun->machine->is_interrupt
589	      && !cfun->machine->is_signal
590	      && !cfun->machine->is_OS_task
591	      && !cfun->machine->is_OS_main
592	      && live_seq);
593
594  if (cfun->machine->is_interrupt || cfun->machine->is_signal)
595    {
596      if (cfun->machine->is_interrupt)
597        {
598          /* Enable interrupts.  */
599          insn = emit_insn (gen_enable_interrupt ());
600          RTX_FRAME_RELATED_P (insn) = 1;
601        }
602
603      /* Push zero reg.  */
604      insn = emit_move_insn (pushbyte, zero_reg_rtx);
605      RTX_FRAME_RELATED_P (insn) = 1;
606      cfun->machine->stack_usage++;
607
608      /* Push tmp reg.  */
609      insn = emit_move_insn (pushbyte, tmp_reg_rtx);
610      RTX_FRAME_RELATED_P (insn) = 1;
611      cfun->machine->stack_usage++;
612
613      /* Push SREG.  */
614      insn = emit_move_insn (tmp_reg_rtx,
615                             gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
616      RTX_FRAME_RELATED_P (insn) = 1;
617      insn = emit_move_insn (pushbyte, tmp_reg_rtx);
618      RTX_FRAME_RELATED_P (insn) = 1;
619      cfun->machine->stack_usage++;
620
621      /* Push RAMPZ.  */
622      if(AVR_HAVE_RAMPZ
623         && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
624        {
625          insn = emit_move_insn (tmp_reg_rtx,
626                                 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
627          RTX_FRAME_RELATED_P (insn) = 1;
628          insn = emit_move_insn (pushbyte, tmp_reg_rtx);
629          RTX_FRAME_RELATED_P (insn) = 1;
630          cfun->machine->stack_usage++;
631        }
632
633      /* Clear zero reg.  */
634      insn = emit_move_insn (zero_reg_rtx, const0_rtx);
635      RTX_FRAME_RELATED_P (insn) = 1;
636
637      /* Prevent any attempt to delete the setting of ZERO_REG!  */
638      emit_use (zero_reg_rtx);
639    }
640  if (minimize && (frame_pointer_needed
641		   || (AVR_2_BYTE_PC && live_seq > 6)
642		   || live_seq > 7))
643    {
644      insn = emit_move_insn (gen_rtx_REG (HImode, REG_X),
645                             gen_int_mode (size, HImode));
646      RTX_FRAME_RELATED_P (insn) = 1;
647
648      insn =
649        emit_insn (gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
650					    gen_int_mode (size + live_seq, HImode)));
651      RTX_FRAME_RELATED_P (insn) = 1;
652      cfun->machine->stack_usage += size + live_seq;
653    }
654  else
655    {
656      int reg;
657      for (reg = 0; reg < 32; ++reg)
658        {
659          if (TEST_HARD_REG_BIT (set, reg))
660            {
661              /* Emit push of register to save.  */
662              insn=emit_move_insn (pushbyte, gen_rtx_REG (QImode, reg));
663              RTX_FRAME_RELATED_P (insn) = 1;
664              cfun->machine->stack_usage++;
665            }
666        }
667      if (frame_pointer_needed)
668        {
669	  if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
670	    {
671              /* Push frame pointer.  */
672	      insn = emit_move_insn (pushword, frame_pointer_rtx);
673              RTX_FRAME_RELATED_P (insn) = 1;
674	      cfun->machine->stack_usage += 2;
675	    }
676
677          if (!size)
678            {
679              insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
680              RTX_FRAME_RELATED_P (insn) = 1;
681            }
682          else
683            {
684              /*  Creating a frame can be done by direct manipulation of the
685                  stack or via the frame pointer. These two methods are:
686                    fp=sp
687                    fp-=size
688                    sp=fp
689                OR
690                    sp-=size
691                    fp=sp
692              the optimum method depends on function type, stack and frame size.
693              To avoid a complex logic, both methods are tested and shortest
694              is selected.  */
695              rtx myfp;
696	      rtx fp_plus_insns;
697	      rtx sp_plus_insns = NULL_RTX;
698
699              if (AVR_HAVE_8BIT_SP)
700                {
701                  /* The high byte (r29) doesn't change - prefer 'subi' (1 cycle)
702                     over 'sbiw' (2 cycles, same size).  */
703                  myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
704                }
705              else
706                {
707                  /*  Normal sized addition.  */
708                  myfp = frame_pointer_rtx;
709                }
710
711	      /* Method 1-Adjust frame pointer.  */
712	      start_sequence ();
713
714              insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
715              RTX_FRAME_RELATED_P (insn) = 1;
716
717              insn =
718	        emit_move_insn (myfp,
719				gen_rtx_PLUS (GET_MODE(myfp), myfp,
720					      gen_int_mode (-size,
721							    GET_MODE(myfp))));
722              RTX_FRAME_RELATED_P (insn) = 1;
723
724	      /* Copy to stack pointer.  */
725	      if (AVR_HAVE_8BIT_SP)
726		{
727		  insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
728		  RTX_FRAME_RELATED_P (insn) = 1;
729		}
730	      else if (TARGET_NO_INTERRUPTS
731		       || cfun->machine->is_signal
732		       || cfun->machine->is_OS_main)
733		{
734		  insn =
735		    emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
736						       frame_pointer_rtx));
737		  RTX_FRAME_RELATED_P (insn) = 1;
738		}
739	      else if (cfun->machine->is_interrupt)
740		{
741		  insn = emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
742							   frame_pointer_rtx));
743		  RTX_FRAME_RELATED_P (insn) = 1;
744		}
745	      else
746		{
747		  insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
748		  RTX_FRAME_RELATED_P (insn) = 1;
749		}
750
751	      fp_plus_insns = get_insns ();
752	      end_sequence ();
753
754	      /* Method 2-Adjust Stack pointer.  */
755              if (size <= 6)
756                {
757		  start_sequence ();
758
759		  insn =
760		    emit_move_insn (stack_pointer_rtx,
761				    gen_rtx_PLUS (HImode,
762						  stack_pointer_rtx,
763						  gen_int_mode (-size,
764								HImode)));
765		  RTX_FRAME_RELATED_P (insn) = 1;
766
767		  insn =
768		    emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
769		  RTX_FRAME_RELATED_P (insn) = 1;
770
771		  sp_plus_insns = get_insns ();
772		  end_sequence ();
773                }
774
775              /* Use shortest method.  */
776              if (size <= 6 && (get_sequence_length (sp_plus_insns)
777				 < get_sequence_length (fp_plus_insns)))
778		emit_insn (sp_plus_insns);
779              else
780		emit_insn (fp_plus_insns);
781	      cfun->machine->stack_usage += size;
782            }
783        }
784    }
785}
786
787/* Output summary at end of function prologue.  */
788
789static void
790avr_asm_function_end_prologue (FILE *file)
791{
792  if (cfun->machine->is_naked)
793    {
794      fputs ("/* prologue: naked */\n", file);
795    }
796  else
797    {
798      if (cfun->machine->is_interrupt)
799        {
800          fputs ("/* prologue: Interrupt */\n", file);
801        }
802      else if (cfun->machine->is_signal)
803        {
804          fputs ("/* prologue: Signal */\n", file);
805        }
806      else
807        fputs ("/* prologue: function */\n", file);
808    }
809  fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
810                 get_frame_size());
811  fprintf (file, "/* stack size = %d */\n",
812                 cfun->machine->stack_usage);
813  /* Create symbol stack offset here so all functions have it. Add 1 to stack
814     usage for offset so that SP + .L__stack_offset = return address.  */
815  fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
816}
817
818
819/* Implement EPILOGUE_USES.  */
820
821int
822avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
823{
824  if (reload_completed
825      && cfun->machine
826      && (cfun->machine->is_interrupt || cfun->machine->is_signal))
827    return 1;
828  return 0;
829}
830
831/*  Output RTL epilogue.  */
832
833void
834expand_epilogue (void)
835{
836  int reg;
837  int live_seq;
838  HARD_REG_SET set;
839  int minimize;
840  HOST_WIDE_INT size = get_frame_size();
841
842  /* epilogue: naked  */
843  if (cfun->machine->is_naked)
844    {
845      emit_jump_insn (gen_return ());
846      return;
847    }
848
849  avr_regs_to_save (&set);
850  live_seq = sequent_regs_live ();
851  minimize = (TARGET_CALL_PROLOGUES
852	      && !cfun->machine->is_interrupt
853	      && !cfun->machine->is_signal
854	      && !cfun->machine->is_OS_task
855	      && !cfun->machine->is_OS_main
856	      && live_seq);
857
858  if (minimize && (frame_pointer_needed || live_seq > 4))
859    {
860      if (frame_pointer_needed)
861	{
862          /*  Get rid of frame.  */
863	  emit_move_insn(frame_pointer_rtx,
864                         gen_rtx_PLUS (HImode, frame_pointer_rtx,
865                                       gen_int_mode (size, HImode)));
866	}
867      else
868	{
869          emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
870	}
871
872      emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
873    }
874  else
875    {
876      if (frame_pointer_needed)
877	{
878	  if (size)
879	    {
880              /* Try two methods to adjust stack and select shortest.  */
881	      rtx myfp;
882	      rtx fp_plus_insns;
883	      rtx sp_plus_insns = NULL_RTX;
884
885	      if (AVR_HAVE_8BIT_SP)
886                {
887                  /* The high byte (r29) doesn't change - prefer 'subi'
888                     (1 cycle) over 'sbiw' (2 cycles, same size).  */
889                  myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
890                }
891              else
892                {
893                  /* Normal sized addition.  */
894                  myfp = frame_pointer_rtx;
895                }
896
897              /* Method 1-Adjust frame pointer.  */
898	      start_sequence ();
899
900	      emit_move_insn (myfp,
901			      gen_rtx_PLUS (GET_MODE (myfp), myfp,
902					    gen_int_mode (size,
903							  GET_MODE(myfp))));
904
905	      /* Copy to stack pointer.  */
906	      if (AVR_HAVE_8BIT_SP)
907		{
908		  emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
909		}
910	      else if (TARGET_NO_INTERRUPTS
911		       || cfun->machine->is_signal)
912		{
913		  emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
914						     frame_pointer_rtx));
915		}
916	      else if (cfun->machine->is_interrupt)
917		{
918		  emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
919						    frame_pointer_rtx));
920		}
921	      else
922		{
923		  emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
924		}
925
926	      fp_plus_insns = get_insns ();
927	      end_sequence ();
928
929              /* Method 2-Adjust Stack pointer.  */
930              if (size <= 5)
931                {
932		  start_sequence ();
933
934		  emit_move_insn (stack_pointer_rtx,
935				  gen_rtx_PLUS (HImode, stack_pointer_rtx,
936						gen_int_mode (size,
937							      HImode)));
938
939		  sp_plus_insns = get_insns ();
940		  end_sequence ();
941                }
942
943              /* Use shortest method.  */
944              if (size <= 5 && (get_sequence_length (sp_plus_insns)
945				 < get_sequence_length (fp_plus_insns)))
946	      	emit_insn (sp_plus_insns);
947              else
948		emit_insn (fp_plus_insns);
949            }
950	  if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
951	    {
952              /* Restore previous frame_pointer.  */
953	      emit_insn (gen_pophi (frame_pointer_rtx));
954	    }
955	}
956      /* Restore used registers.  */
957      for (reg = 31; reg >= 0; --reg)
958        {
959          if (TEST_HARD_REG_BIT (set, reg))
960              emit_insn (gen_popqi (gen_rtx_REG (QImode, reg)));
961        }
962      if (cfun->machine->is_interrupt || cfun->machine->is_signal)
963        {
964          /* Restore RAMPZ using tmp reg as scratch.  */
965	  if(AVR_HAVE_RAMPZ
966             && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
967            {
968	      emit_insn (gen_popqi (tmp_reg_rtx));
969	      emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(RAMPZ_ADDR)),
970			      tmp_reg_rtx);
971	    }
972
973          /* Restore SREG using tmp reg as scratch.  */
974          emit_insn (gen_popqi (tmp_reg_rtx));
975
976          emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(SREG_ADDR)),
977			  tmp_reg_rtx);
978
979          /* Restore tmp REG.  */
980          emit_insn (gen_popqi (tmp_reg_rtx));
981
982          /* Restore zero REG.  */
983          emit_insn (gen_popqi (zero_reg_rtx));
984        }
985
986      emit_jump_insn (gen_return ());
987    }
988}
989
990/* Output summary messages at beginning of function epilogue.  */
991
992static void
993avr_asm_function_begin_epilogue (FILE *file)
994{
995  fprintf (file, "/* epilogue start */\n");
996}
997
998
999/* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1000
1001static bool
1002avr_cannot_modify_jumps_p (void)
1003{
1004
1005  /* Naked Functions must not have any instructions after
1006     their epilogue, see PR42240 */
1007
1008  if (reload_completed
1009      && cfun->machine
1010      && cfun->machine->is_naked)
1011    {
1012      return true;
1013    }
1014
1015  return false;
1016}
1017
1018
1019/* Return nonzero if X (an RTX) is a legitimate memory address on the target
1020   machine for a memory operand of mode MODE.  */
1021
1022bool
1023avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1024{
1025  enum reg_class r = NO_REGS;
1026
1027  if (TARGET_ALL_DEBUG)
1028    {
1029      fprintf (stderr, "mode: (%s) %s %s %s %s:",
1030	       GET_MODE_NAME(mode),
1031	       strict ? "(strict)": "",
1032	       reload_completed ? "(reload_completed)": "",
1033	       reload_in_progress ? "(reload_in_progress)": "",
1034	       reg_renumber ? "(reg_renumber)" : "");
1035      if (GET_CODE (x) == PLUS
1036	  && REG_P (XEXP (x, 0))
1037	  && GET_CODE (XEXP (x, 1)) == CONST_INT
1038	  && INTVAL (XEXP (x, 1)) >= 0
1039	  && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
1040	  && reg_renumber
1041	  )
1042	fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1043		 true_regnum (XEXP (x, 0)));
1044      debug_rtx (x);
1045    }
1046  if (!strict && GET_CODE (x) == SUBREG)
1047	x = SUBREG_REG (x);
1048  if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
1049                    : REG_OK_FOR_BASE_NOSTRICT_P (x)))
1050    r = POINTER_REGS;
1051  else if (CONSTANT_ADDRESS_P (x))
1052    r = ALL_REGS;
1053  else if (GET_CODE (x) == PLUS
1054           && REG_P (XEXP (x, 0))
1055	   && GET_CODE (XEXP (x, 1)) == CONST_INT
1056	   && INTVAL (XEXP (x, 1)) >= 0)
1057    {
1058      int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1059      if (fit)
1060	{
1061	  if (! strict
1062	      || REGNO (XEXP (x,0)) == REG_X
1063	      || REGNO (XEXP (x,0)) == REG_Y
1064	      || REGNO (XEXP (x,0)) == REG_Z)
1065	    r = BASE_POINTER_REGS;
1066	  if (XEXP (x,0) == frame_pointer_rtx
1067	      || XEXP (x,0) == arg_pointer_rtx)
1068	    r = BASE_POINTER_REGS;
1069	}
1070      else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1071	r = POINTER_Y_REGS;
1072    }
1073  else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1074           && REG_P (XEXP (x, 0))
1075           && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1076               : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1077    {
1078      r = POINTER_REGS;
1079    }
1080  if (TARGET_ALL_DEBUG)
1081    {
1082      fprintf (stderr, "   ret = %c\n", r + '0');
1083    }
1084  return r == NO_REGS ? 0 : (int)r;
1085}
1086
1087/* Attempts to replace X with a valid
1088   memory address for an operand of mode MODE  */
1089
1090rtx
1091avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1092{
1093  x = oldx;
1094  if (TARGET_ALL_DEBUG)
1095    {
1096      fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1097      debug_rtx (oldx);
1098    }
1099
1100  if (GET_CODE (oldx) == PLUS
1101      && REG_P (XEXP (oldx,0)))
1102    {
1103      if (REG_P (XEXP (oldx,1)))
1104	x = force_reg (GET_MODE (oldx), oldx);
1105      else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1106	{
1107	  int offs = INTVAL (XEXP (oldx,1));
1108	  if (frame_pointer_rtx != XEXP (oldx,0))
1109	    if (offs > MAX_LD_OFFSET (mode))
1110	      {
1111		if (TARGET_ALL_DEBUG)
1112		  fprintf (stderr, "force_reg (big offset)\n");
1113		x = force_reg (GET_MODE (oldx), oldx);
1114	      }
1115	}
1116    }
1117  return x;
1118}
1119
1120
1121/* Return a pointer register name as a string.  */
1122
1123static const char *
1124ptrreg_to_str (int regno)
1125{
1126  switch (regno)
1127    {
1128    case REG_X: return "X";
1129    case REG_Y: return "Y";
1130    case REG_Z: return "Z";
1131    default:
1132      output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1133    }
1134  return NULL;
1135}
1136
1137/* Return the condition name as a string.
1138   Used in conditional jump constructing  */
1139
1140static const char *
1141cond_string (enum rtx_code code)
1142{
1143  switch (code)
1144    {
1145    case NE:
1146      return "ne";
1147    case EQ:
1148      return "eq";
1149    case GE:
1150      if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1151	return "pl";
1152      else
1153	return "ge";
1154    case LT:
1155      if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1156	return "mi";
1157      else
1158	return "lt";
1159    case GEU:
1160      return "sh";
1161    case LTU:
1162      return "lo";
1163    default:
1164      gcc_unreachable ();
1165    }
1166}
1167
1168/* Output ADDR to FILE as address.  */
1169
1170void
1171print_operand_address (FILE *file, rtx addr)
1172{
1173  switch (GET_CODE (addr))
1174    {
1175    case REG:
1176      fprintf (file, ptrreg_to_str (REGNO (addr)));
1177      break;
1178
1179    case PRE_DEC:
1180      fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1181      break;
1182
1183    case POST_INC:
1184      fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1185      break;
1186
1187    default:
1188      if (CONSTANT_ADDRESS_P (addr)
1189	  && text_segment_operand (addr, VOIDmode))
1190	{
1191	  rtx x = XEXP (addr,0);
1192	  if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1193	    {
1194	      /* Assembler gs() will implant word address. Make offset
1195		 a byte offset inside gs() for assembler. This is
1196		 needed because the more logical (constant+gs(sym)) is not
1197		 accepted by gas. For 128K and lower devices this is ok. For
1198		 large devices it will create a Trampoline to offset from symbol
1199		 which may not be what the user really wanted.  */
1200	      fprintf (file, "gs(");
1201	      output_addr_const (file, XEXP (x,0));
1202	      fprintf (file,"+" HOST_WIDE_INT_PRINT_DEC ")", 2 * INTVAL (XEXP (x,1)));
1203	      if (AVR_3_BYTE_PC)
1204	        if (warning ( 0, "Pointer offset from symbol maybe incorrect."))
1205		  {
1206		    output_addr_const (stderr, addr);
1207		    fprintf(stderr,"\n");
1208		  }
1209	    }
1210	  else
1211	    {
1212	      fprintf (file, "gs(");
1213	      output_addr_const (file, addr);
1214	      fprintf (file, ")");
1215	    }
1216	}
1217      else
1218	output_addr_const (file, addr);
1219    }
1220}
1221
1222
1223/* Output X as assembler operand to file FILE.  */
1224
1225void
1226print_operand (FILE *file, rtx x, int code)
1227{
1228  int abcd = 0;
1229
1230  if (code >= 'A' && code <= 'D')
1231    abcd = code - 'A';
1232
1233  if (code == '~')
1234    {
1235      if (!AVR_HAVE_JMP_CALL)
1236	fputc ('r', file);
1237    }
1238  else if (code == '!')
1239    {
1240      if (AVR_HAVE_EIJMP_EICALL)
1241	fputc ('e', file);
1242    }
1243  else if (REG_P (x))
1244    {
1245      if (x == zero_reg_rtx)
1246	fprintf (file, "__zero_reg__");
1247      else
1248	fprintf (file, reg_names[true_regnum (x) + abcd]);
1249    }
1250  else if (GET_CODE (x) == CONST_INT)
1251    fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1252  else if (GET_CODE (x) == MEM)
1253    {
1254      rtx addr = XEXP (x,0);
1255      if (code == 'm')
1256	{
1257	   if (!CONSTANT_P (addr))
1258	    fatal_insn ("bad address, not a constant):", addr);
1259	  /* Assembler template with m-code is data - not progmem section */
1260	  if (text_segment_operand (addr, VOIDmode))
1261	    if (warning ( 0, "accessing data memory with program memory address"))
1262	      {
1263		output_addr_const (stderr, addr);
1264		fprintf(stderr,"\n");
1265	      }
1266	  output_addr_const (file, addr);
1267	}
1268      else if (code == 'o')
1269	{
1270	  if (GET_CODE (addr) != PLUS)
1271	    fatal_insn ("bad address, not (reg+disp):", addr);
1272
1273	  print_operand (file, XEXP (addr, 1), 0);
1274	}
1275      else if (code == 'p' || code == 'r')
1276        {
1277          if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1278            fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1279
1280          if (code == 'p')
1281            print_operand_address (file, XEXP (addr, 0));  /* X, Y, Z */
1282          else
1283            print_operand (file, XEXP (addr, 0), 0);  /* r26, r28, r30 */
1284        }
1285      else if (GET_CODE (addr) == PLUS)
1286	{
1287	  print_operand_address (file, XEXP (addr,0));
1288	  if (REGNO (XEXP (addr, 0)) == REG_X)
1289	    fatal_insn ("internal compiler error.  Bad address:"
1290			,addr);
1291	  fputc ('+', file);
1292	  print_operand (file, XEXP (addr,1), code);
1293	}
1294      else
1295	print_operand_address (file, addr);
1296    }
1297  else if (code == 'x')
1298    {
1299      /* Constant progmem address - like used in jmp or call */
1300      if (0 == text_segment_operand (x, VOIDmode))
1301	    if (warning ( 0, "accessing program  memory with data memory address"))
1302	  {
1303	    output_addr_const (stderr, x);
1304	    fprintf(stderr,"\n");
1305	  }
1306      /* Use normal symbol for direct address no linker trampoline needed */
1307      output_addr_const (file, x);
1308    }
1309  else if (GET_CODE (x) == CONST_DOUBLE)
1310    {
1311      long val;
1312      REAL_VALUE_TYPE rv;
1313      if (GET_MODE (x) != SFmode)
1314	fatal_insn ("internal compiler error.  Unknown mode:", x);
1315      REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1316      REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1317      fprintf (file, "0x%lx", val);
1318    }
1319  else if (code == 'j')
1320    fputs (cond_string (GET_CODE (x)), file);
1321  else if (code == 'k')
1322    fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1323  else
1324    print_operand_address (file, x);
1325}
1326
1327/* Update the condition code in the INSN.  */
1328
1329void
1330notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1331{
1332  rtx set;
1333
1334  switch (get_attr_cc (insn))
1335    {
1336    case CC_NONE:
1337      /* Insn does not affect CC at all.  */
1338      break;
1339
1340    case CC_SET_N:
1341      CC_STATUS_INIT;
1342      break;
1343
1344    case CC_SET_ZN:
1345      set = single_set (insn);
1346      CC_STATUS_INIT;
1347      if (set)
1348	{
1349	  cc_status.flags |= CC_NO_OVERFLOW;
1350	  cc_status.value1 = SET_DEST (set);
1351	}
1352      break;
1353
1354    case CC_SET_CZN:
1355      /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1356         The V flag may or may not be known but that's ok because
1357         alter_cond will change tests to use EQ/NE.  */
1358      set = single_set (insn);
1359      CC_STATUS_INIT;
1360      if (set)
1361	{
1362	  cc_status.value1 = SET_DEST (set);
1363	  cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1364	}
1365      break;
1366
1367    case CC_COMPARE:
1368      set = single_set (insn);
1369      CC_STATUS_INIT;
1370      if (set)
1371	cc_status.value1 = SET_SRC (set);
1372      break;
1373
1374    case CC_CLOBBER:
1375      /* Insn doesn't leave CC in a usable state.  */
1376      CC_STATUS_INIT;
1377
1378      /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1379      set = single_set (insn);
1380      if (set)
1381	{
1382	  rtx src = SET_SRC (set);
1383
1384	  if (GET_CODE (src) == ASHIFTRT
1385	      && GET_MODE (src) == QImode)
1386	    {
1387	      rtx x = XEXP (src, 1);
1388
1389	      if (GET_CODE (x) == CONST_INT
1390		  && INTVAL (x) > 0
1391		  && INTVAL (x) != 6)
1392		{
1393		  cc_status.value1 = SET_DEST (set);
1394		  cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1395		}
1396	    }
1397	}
1398      break;
1399    }
1400}
1401
1402/* Return maximum number of consecutive registers of
1403   class CLASS needed to hold a value of mode MODE.  */
1404
1405int
1406class_max_nregs (enum reg_class rclass ATTRIBUTE_UNUSED,enum machine_mode mode)
1407{
1408  return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1409}
1410
1411/* Choose mode for jump insn:
1412   1 - relative jump in range -63 <= x <= 62 ;
1413   2 - relative jump in range -2046 <= x <= 2045 ;
1414   3 - absolute jump (only for ATmega[16]03).  */
1415
1416int
1417avr_jump_mode (rtx x, rtx insn)
1418{
1419  int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
1420					    ? XEXP (x, 0) : x));
1421  int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1422  int jump_distance = cur_addr - dest_addr;
1423
1424  if (-63 <= jump_distance && jump_distance <= 62)
1425    return 1;
1426  else if (-2046 <= jump_distance && jump_distance <= 2045)
1427    return 2;
1428  else if (AVR_HAVE_JMP_CALL)
1429    return 3;
1430
1431  return 2;
1432}
1433
1434/* return an AVR condition jump commands.
1435   X is a comparison RTX.
1436   LEN is a number returned by avr_jump_mode function.
1437   if REVERSE nonzero then condition code in X must be reversed.  */
1438
1439const char *
1440ret_cond_branch (rtx x, int len, int reverse)
1441{
1442  RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1443
1444  switch (cond)
1445    {
1446    case GT:
1447      if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1448	return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1449			    AS1 (brpl,%0)) :
1450		len == 2 ? (AS1 (breq,.+4) CR_TAB
1451			    AS1 (brmi,.+2) CR_TAB
1452			    AS1 (rjmp,%0)) :
1453		(AS1 (breq,.+6) CR_TAB
1454		 AS1 (brmi,.+4) CR_TAB
1455		 AS1 (jmp,%0)));
1456
1457      else
1458	return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1459			    AS1 (brge,%0)) :
1460		len == 2 ? (AS1 (breq,.+4) CR_TAB
1461			    AS1 (brlt,.+2) CR_TAB
1462			    AS1 (rjmp,%0)) :
1463		(AS1 (breq,.+6) CR_TAB
1464		 AS1 (brlt,.+4) CR_TAB
1465		 AS1 (jmp,%0)));
1466    case GTU:
1467      return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1468                          AS1 (brsh,%0)) :
1469              len == 2 ? (AS1 (breq,.+4) CR_TAB
1470                          AS1 (brlo,.+2) CR_TAB
1471                          AS1 (rjmp,%0)) :
1472              (AS1 (breq,.+6) CR_TAB
1473               AS1 (brlo,.+4) CR_TAB
1474               AS1 (jmp,%0)));
1475    case LE:
1476      if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1477	return (len == 1 ? (AS1 (breq,%0) CR_TAB
1478			    AS1 (brmi,%0)) :
1479		len == 2 ? (AS1 (breq,.+2) CR_TAB
1480			    AS1 (brpl,.+2) CR_TAB
1481			    AS1 (rjmp,%0)) :
1482		(AS1 (breq,.+2) CR_TAB
1483		 AS1 (brpl,.+4) CR_TAB
1484		 AS1 (jmp,%0)));
1485      else
1486	return (len == 1 ? (AS1 (breq,%0) CR_TAB
1487			    AS1 (brlt,%0)) :
1488		len == 2 ? (AS1 (breq,.+2) CR_TAB
1489			    AS1 (brge,.+2) CR_TAB
1490			    AS1 (rjmp,%0)) :
1491		(AS1 (breq,.+2) CR_TAB
1492		 AS1 (brge,.+4) CR_TAB
1493		 AS1 (jmp,%0)));
1494    case LEU:
1495      return (len == 1 ? (AS1 (breq,%0) CR_TAB
1496                          AS1 (brlo,%0)) :
1497              len == 2 ? (AS1 (breq,.+2) CR_TAB
1498                          AS1 (brsh,.+2) CR_TAB
1499			  AS1 (rjmp,%0)) :
1500              (AS1 (breq,.+2) CR_TAB
1501               AS1 (brsh,.+4) CR_TAB
1502	       AS1 (jmp,%0)));
1503    default:
1504      if (reverse)
1505	{
1506	  switch (len)
1507	    {
1508	    case 1:
1509	      return AS1 (br%k1,%0);
1510	    case 2:
1511	      return (AS1 (br%j1,.+2) CR_TAB
1512		      AS1 (rjmp,%0));
1513	    default:
1514	      return (AS1 (br%j1,.+4) CR_TAB
1515		      AS1 (jmp,%0));
1516	    }
1517	}
1518	else
1519	  {
1520	    switch (len)
1521	      {
1522	      case 1:
1523		return AS1 (br%j1,%0);
1524	      case 2:
1525		return (AS1 (br%k1,.+2) CR_TAB
1526			AS1 (rjmp,%0));
1527	      default:
1528		return (AS1 (br%k1,.+4) CR_TAB
1529			AS1 (jmp,%0));
1530	      }
1531	  }
1532    }
1533  return "";
1534}
1535
1536/* Predicate function for immediate operand which fits to byte (8bit) */
1537
1538int
1539byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1540{
1541  return (GET_CODE (op) == CONST_INT
1542          && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1543}
1544
1545/* Output insn cost for next insn.  */
1546
1547void
1548final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1549		    int num_operands ATTRIBUTE_UNUSED)
1550{
1551  if (TARGET_ALL_DEBUG)
1552    {
1553      fprintf (asm_out_file, "/* DEBUG: cost = %d.  */\n",
1554	       rtx_cost (PATTERN (insn), INSN, !optimize_size));
1555    }
1556}
1557
1558/* Return 0 if undefined, 1 if always true or always false.  */
1559
1560int
1561avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1562{
1563  unsigned int max = (mode == QImode ? 0xff :
1564                      mode == HImode ? 0xffff :
1565                      mode == SImode ? 0xffffffff : 0);
1566  if (max && op && GET_CODE (x) == CONST_INT)
1567    {
1568      if (unsigned_condition (op) != op)
1569	max >>= 1;
1570
1571      if (max != (INTVAL (x) & max)
1572	  && INTVAL (x) != 0xff)
1573	return 1;
1574    }
1575  return 0;
1576}
1577
1578
1579/* Returns nonzero if REGNO is the number of a hard
1580   register in which function arguments are sometimes passed.  */
1581
1582int
1583function_arg_regno_p(int r)
1584{
1585  return (r >= 8 && r <= 25);
1586}
1587
1588/* Initializing the variable cum for the state at the beginning
1589   of the argument list.  */
1590
1591void
1592init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1593		      tree fndecl ATTRIBUTE_UNUSED)
1594{
1595  cum->nregs = 18;
1596  cum->regno = FIRST_CUM_REG;
1597  if (!libname && fntype)
1598    {
1599      int stdarg = (TYPE_ARG_TYPES (fntype) != 0
1600                    && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
1601                        != void_type_node));
1602      if (stdarg)
1603        cum->nregs = 0;
1604    }
1605}
1606
1607/* Returns the number of registers to allocate for a function argument.  */
1608
1609static int
1610avr_num_arg_regs (enum machine_mode mode, tree type)
1611{
1612  int size;
1613
1614  if (mode == BLKmode)
1615    size = int_size_in_bytes (type);
1616  else
1617    size = GET_MODE_SIZE (mode);
1618
1619  /* Align all function arguments to start in even-numbered registers.
1620     Odd-sized arguments leave holes above them.  */
1621
1622  return (size + 1) & ~1;
1623}
1624
1625/* Controls whether a function argument is passed
1626   in a register, and which register.  */
1627
1628rtx
1629function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1630	      int named ATTRIBUTE_UNUSED)
1631{
1632  int bytes = avr_num_arg_regs (mode, type);
1633
1634  if (cum->nregs && bytes <= cum->nregs)
1635    return gen_rtx_REG (mode, cum->regno - bytes);
1636
1637  return NULL_RTX;
1638}
1639
1640/* Update the summarizer variable CUM to advance past an argument
1641   in the argument list.  */
1642
1643void
1644function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1645		      int named ATTRIBUTE_UNUSED)
1646{
1647  int bytes = avr_num_arg_regs (mode, type);
1648
1649  cum->nregs -= bytes;
1650  cum->regno -= bytes;
1651
1652  if (cum->nregs <= 0)
1653    {
1654      cum->nregs = 0;
1655      cum->regno = FIRST_CUM_REG;
1656    }
1657}
1658
1659/***********************************************************************
1660  Functions for outputting various mov's for a various modes
1661************************************************************************/
1662const char *
1663output_movqi (rtx insn, rtx operands[], int *l)
1664{
1665  int dummy;
1666  rtx dest = operands[0];
1667  rtx src = operands[1];
1668  int *real_l = l;
1669
1670  if (!l)
1671    l = &dummy;
1672
1673  *l = 1;
1674
1675  if (register_operand (dest, QImode))
1676    {
1677      if (register_operand (src, QImode)) /* mov r,r */
1678	{
1679	  if (test_hard_reg_class (STACK_REG, dest))
1680	    return AS2 (out,%0,%1);
1681	  else if (test_hard_reg_class (STACK_REG, src))
1682	    return AS2 (in,%0,%1);
1683
1684	  return AS2 (mov,%0,%1);
1685	}
1686      else if (CONSTANT_P (src))
1687	{
1688	  if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1689	    return AS2 (ldi,%0,lo8(%1));
1690
1691	  if (GET_CODE (src) == CONST_INT)
1692	    {
1693	      if (src == const0_rtx) /* mov r,L */
1694		return AS1 (clr,%0);
1695	      else if (src == const1_rtx)
1696		{
1697		  *l = 2;
1698		  return (AS1 (clr,%0) CR_TAB
1699			  AS1 (inc,%0));
1700		}
1701	      else if (src == constm1_rtx)
1702		{
1703		  /* Immediate constants -1 to any register */
1704		  *l = 2;
1705		  return (AS1 (clr,%0) CR_TAB
1706			  AS1 (dec,%0));
1707		}
1708	      else
1709		{
1710		  int bit_nr = exact_log2 (INTVAL (src));
1711
1712		  if (bit_nr >= 0)
1713		    {
1714		      *l = 3;
1715		      if (!real_l)
1716			output_asm_insn ((AS1 (clr,%0) CR_TAB
1717					  "set"), operands);
1718		      if (!real_l)
1719			avr_output_bld (operands, bit_nr);
1720
1721		      return "";
1722		    }
1723		}
1724	    }
1725
1726	  /* Last resort, larger than loading from memory.  */
1727	  *l = 4;
1728	  return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1729		  AS2 (ldi,r31,lo8(%1))     CR_TAB
1730		  AS2 (mov,%0,r31)          CR_TAB
1731		  AS2 (mov,r31,__tmp_reg__));
1732	}
1733      else if (GET_CODE (src) == MEM)
1734	return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1735    }
1736  else if (GET_CODE (dest) == MEM)
1737    {
1738      const char *templ;
1739
1740      if (src == const0_rtx)
1741	operands[1] = zero_reg_rtx;
1742
1743      templ = out_movqi_mr_r (insn, operands, real_l);
1744
1745      if (!real_l)
1746	output_asm_insn (templ, operands);
1747
1748      operands[1] = src;
1749    }
1750  return "";
1751}
1752
1753
1754const char *
1755output_movhi (rtx insn, rtx operands[], int *l)
1756{
1757  int dummy;
1758  rtx dest = operands[0];
1759  rtx src = operands[1];
1760  int *real_l = l;
1761
1762  if (!l)
1763    l = &dummy;
1764
1765  if (register_operand (dest, HImode))
1766    {
1767      if (register_operand (src, HImode)) /* mov r,r */
1768	{
1769	  if (test_hard_reg_class (STACK_REG, dest))
1770	    {
1771	      if (AVR_HAVE_8BIT_SP)
1772		return *l = 1, AS2 (out,__SP_L__,%A1);
1773              /* Use simple load of stack pointer if no interrupts are
1774		 used.  */
1775	      else if (TARGET_NO_INTERRUPTS)
1776		return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
1777				AS2 (out,__SP_L__,%A1));
1778	      *l = 5;
1779	      return (AS2 (in,__tmp_reg__,__SREG__)  CR_TAB
1780		      "cli"                          CR_TAB
1781		      AS2 (out,__SP_H__,%B1)         CR_TAB
1782		      AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1783		      AS2 (out,__SP_L__,%A1));
1784	    }
1785	  else if (test_hard_reg_class (STACK_REG, src))
1786	    {
1787	      *l = 2;
1788	      return (AS2 (in,%A0,__SP_L__) CR_TAB
1789		      AS2 (in,%B0,__SP_H__));
1790	    }
1791
1792	  if (AVR_HAVE_MOVW)
1793	    {
1794	      *l = 1;
1795	      return (AS2 (movw,%0,%1));
1796	    }
1797	  else
1798	    {
1799	      *l = 2;
1800	      return (AS2 (mov,%A0,%A1) CR_TAB
1801		      AS2 (mov,%B0,%B1));
1802	    }
1803	}
1804      else if (CONSTANT_P (src))
1805	{
1806	  if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1807	    {
1808	      *l = 2;
1809	      return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1810		      AS2 (ldi,%B0,hi8(%1)));
1811	    }
1812
1813	  if (GET_CODE (src) == CONST_INT)
1814	    {
1815	      if (src == const0_rtx) /* mov r,L */
1816		{
1817		  *l = 2;
1818		  return (AS1 (clr,%A0) CR_TAB
1819			  AS1 (clr,%B0));
1820		}
1821	      else if (src == const1_rtx)
1822		{
1823		  *l = 3;
1824		  return (AS1 (clr,%A0) CR_TAB
1825			  AS1 (clr,%B0) CR_TAB
1826			  AS1 (inc,%A0));
1827		}
1828	      else if (src == constm1_rtx)
1829		{
1830		  /* Immediate constants -1 to any register */
1831		  *l = 3;
1832		  return (AS1 (clr,%0)  CR_TAB
1833			  AS1 (dec,%A0) CR_TAB
1834			  AS2 (mov,%B0,%A0));
1835		}
1836	      else
1837		{
1838		  int bit_nr = exact_log2 (INTVAL (src));
1839
1840		  if (bit_nr >= 0)
1841		    {
1842		      *l = 4;
1843		      if (!real_l)
1844			output_asm_insn ((AS1 (clr,%A0) CR_TAB
1845					  AS1 (clr,%B0) CR_TAB
1846					  "set"), operands);
1847		      if (!real_l)
1848			avr_output_bld (operands, bit_nr);
1849
1850		      return "";
1851		    }
1852		}
1853
1854	      if ((INTVAL (src) & 0xff) == 0)
1855		{
1856		  *l = 5;
1857		  return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1858			  AS1 (clr,%A0)             CR_TAB
1859			  AS2 (ldi,r31,hi8(%1))     CR_TAB
1860			  AS2 (mov,%B0,r31)         CR_TAB
1861			  AS2 (mov,r31,__tmp_reg__));
1862		}
1863	      else if ((INTVAL (src) & 0xff00) == 0)
1864		{
1865		  *l = 5;
1866		  return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1867			  AS2 (ldi,r31,lo8(%1))     CR_TAB
1868			  AS2 (mov,%A0,r31)         CR_TAB
1869			  AS1 (clr,%B0)             CR_TAB
1870			  AS2 (mov,r31,__tmp_reg__));
1871		}
1872	    }
1873
1874	  /* Last resort, equal to loading from memory.  */
1875	  *l = 6;
1876	  return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1877		  AS2 (ldi,r31,lo8(%1))     CR_TAB
1878		  AS2 (mov,%A0,r31)         CR_TAB
1879		  AS2 (ldi,r31,hi8(%1))     CR_TAB
1880		  AS2 (mov,%B0,r31)         CR_TAB
1881		  AS2 (mov,r31,__tmp_reg__));
1882	}
1883      else if (GET_CODE (src) == MEM)
1884	return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
1885    }
1886  else if (GET_CODE (dest) == MEM)
1887    {
1888      const char *templ;
1889
1890      if (src == const0_rtx)
1891	operands[1] = zero_reg_rtx;
1892
1893      templ = out_movhi_mr_r (insn, operands, real_l);
1894
1895      if (!real_l)
1896	output_asm_insn (templ, operands);
1897
1898      operands[1] = src;
1899      return "";
1900    }
1901  fatal_insn ("invalid insn:", insn);
1902  return "";
1903}
1904
1905const char *
1906out_movqi_r_mr (rtx insn, rtx op[], int *l)
1907{
1908  rtx dest = op[0];
1909  rtx src = op[1];
1910  rtx x = XEXP (src, 0);
1911  int dummy;
1912
1913  if (!l)
1914    l = &dummy;
1915
1916  if (CONSTANT_ADDRESS_P (x))
1917    {
1918      if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
1919	{
1920	  *l = 1;
1921	  return AS2 (in,%0,__SREG__);
1922	}
1923      if (optimize > 0 && io_address_operand (x, QImode))
1924	{
1925	  *l = 1;
1926	  return AS2 (in,%0,%m1-0x20);
1927	}
1928      *l = 2;
1929      return AS2 (lds,%0,%m1);
1930    }
1931  /* memory access by reg+disp */
1932  else if (GET_CODE (x) == PLUS
1933      && REG_P (XEXP (x,0))
1934      && GET_CODE (XEXP (x,1)) == CONST_INT)
1935    {
1936      if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
1937	{
1938	  int disp = INTVAL (XEXP (x,1));
1939	  if (REGNO (XEXP (x,0)) != REG_Y)
1940	    fatal_insn ("incorrect insn:",insn);
1941
1942	  if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1943	    return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
1944			    AS2 (ldd,%0,Y+63)     CR_TAB
1945			    AS2 (sbiw,r28,%o1-63));
1946
1947	  return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1948			  AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1949			  AS2 (ld,%0,Y)            CR_TAB
1950			  AS2 (subi,r28,lo8(%o1))  CR_TAB
1951			  AS2 (sbci,r29,hi8(%o1)));
1952	}
1953      else if (REGNO (XEXP (x,0)) == REG_X)
1954	{
1955	  /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
1956	     it but I have this situation with extremal optimizing options.  */
1957	  if (reg_overlap_mentioned_p (dest, XEXP (x,0))
1958	      || reg_unused_after (insn, XEXP (x,0)))
1959	    return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
1960			    AS2 (ld,%0,X));
1961
1962	  return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
1963			  AS2 (ld,%0,X)      CR_TAB
1964			  AS2 (sbiw,r26,%o1));
1965	}
1966      *l = 1;
1967      return AS2 (ldd,%0,%1);
1968    }
1969  *l = 1;
1970  return AS2 (ld,%0,%1);
1971}
1972
1973const char *
1974out_movhi_r_mr (rtx insn, rtx op[], int *l)
1975{
1976  rtx dest = op[0];
1977  rtx src = op[1];
1978  rtx base = XEXP (src, 0);
1979  int reg_dest = true_regnum (dest);
1980  int reg_base = true_regnum (base);
1981  /* "volatile" forces reading low byte first, even if less efficient,
1982     for correct operation with 16-bit I/O registers.  */
1983  int mem_volatile_p = MEM_VOLATILE_P (src);
1984  int tmp;
1985
1986  if (!l)
1987    l = &tmp;
1988
1989  if (reg_base > 0)
1990    {
1991      if (reg_dest == reg_base)         /* R = (R) */
1992	{
1993	  *l = 3;
1994	  return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
1995		  AS2 (ld,%B0,%1) CR_TAB
1996		  AS2 (mov,%A0,__tmp_reg__));
1997	}
1998      else if (reg_base == REG_X)        /* (R26) */
1999        {
2000          if (reg_unused_after (insn, base))
2001	    {
2002	      *l = 2;
2003	      return (AS2 (ld,%A0,X+) CR_TAB
2004		      AS2 (ld,%B0,X));
2005	    }
2006	  *l  = 3;
2007	  return (AS2 (ld,%A0,X+) CR_TAB
2008		  AS2 (ld,%B0,X) CR_TAB
2009		  AS2 (sbiw,r26,1));
2010        }
2011      else                      /* (R)  */
2012	{
2013	  *l = 2;
2014	  return (AS2 (ld,%A0,%1)    CR_TAB
2015		  AS2 (ldd,%B0,%1+1));
2016	}
2017    }
2018  else if (GET_CODE (base) == PLUS) /* (R + i) */
2019    {
2020      int disp = INTVAL (XEXP (base, 1));
2021      int reg_base = true_regnum (XEXP (base, 0));
2022
2023      if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2024	{
2025	  if (REGNO (XEXP (base, 0)) != REG_Y)
2026	    fatal_insn ("incorrect insn:",insn);
2027
2028	  if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2029	    return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
2030			    AS2 (ldd,%A0,Y+62)    CR_TAB
2031			    AS2 (ldd,%B0,Y+63)    CR_TAB
2032			    AS2 (sbiw,r28,%o1-62));
2033
2034	  return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2035			  AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2036			  AS2 (ld,%A0,Y)           CR_TAB
2037			  AS2 (ldd,%B0,Y+1)        CR_TAB
2038			  AS2 (subi,r28,lo8(%o1))  CR_TAB
2039			  AS2 (sbci,r29,hi8(%o1)));
2040	}
2041      if (reg_base == REG_X)
2042	{
2043	  /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2044	     it but I have this situation with extremal
2045	     optimization options.  */
2046
2047	  *l = 4;
2048	  if (reg_base == reg_dest)
2049	    return (AS2 (adiw,r26,%o1)      CR_TAB
2050		    AS2 (ld,__tmp_reg__,X+) CR_TAB
2051		    AS2 (ld,%B0,X)          CR_TAB
2052		    AS2 (mov,%A0,__tmp_reg__));
2053
2054	  return (AS2 (adiw,r26,%o1) CR_TAB
2055		  AS2 (ld,%A0,X+)    CR_TAB
2056		  AS2 (ld,%B0,X)     CR_TAB
2057		  AS2 (sbiw,r26,%o1+1));
2058	}
2059
2060      if (reg_base == reg_dest)
2061	{
2062	  *l = 3;
2063	  return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2064		  AS2 (ldd,%B0,%B1)         CR_TAB
2065		  AS2 (mov,%A0,__tmp_reg__));
2066	}
2067
2068      *l = 2;
2069      return (AS2 (ldd,%A0,%A1) CR_TAB
2070	      AS2 (ldd,%B0,%B1));
2071    }
2072  else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2073    {
2074      if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2075	fatal_insn ("incorrect insn:", insn);
2076
2077      if (mem_volatile_p)
2078        {
2079          if (REGNO (XEXP (base, 0)) == REG_X)
2080            {
2081              *l = 4;
2082              return (AS2 (sbiw,r26,2)  CR_TAB
2083                      AS2 (ld,%A0,X+)   CR_TAB
2084                      AS2 (ld,%B0,X)    CR_TAB
2085                      AS2 (sbiw,r26,1));
2086            }
2087          else
2088            {
2089              *l = 3;
2090              return (AS2 (sbiw,%r1,2)   CR_TAB
2091                      AS2 (ld,%A0,%p1)  CR_TAB
2092                      AS2 (ldd,%B0,%p1+1));
2093            }
2094        }
2095
2096      *l = 2;
2097      return (AS2 (ld,%B0,%1) CR_TAB
2098	      AS2 (ld,%A0,%1));
2099    }
2100  else if (GET_CODE (base) == POST_INC) /* (R++) */
2101    {
2102      if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2103	fatal_insn ("incorrect insn:", insn);
2104
2105      *l = 2;
2106      return (AS2 (ld,%A0,%1)  CR_TAB
2107	      AS2 (ld,%B0,%1));
2108    }
2109  else if (CONSTANT_ADDRESS_P (base))
2110    {
2111      if (optimize > 0 && io_address_operand (base, HImode))
2112	{
2113	  *l = 2;
2114	  return (AS2 (in,%A0,%m1-0x20) CR_TAB
2115		  AS2 (in,%B0,%m1+1-0x20));
2116	}
2117      *l = 4;
2118      return (AS2 (lds,%A0,%m1) CR_TAB
2119	      AS2 (lds,%B0,%m1+1));
2120    }
2121
2122  fatal_insn ("unknown move insn:",insn);
2123  return "";
2124}
2125
2126const char *
2127out_movsi_r_mr (rtx insn, rtx op[], int *l)
2128{
2129  rtx dest = op[0];
2130  rtx src = op[1];
2131  rtx base = XEXP (src, 0);
2132  int reg_dest = true_regnum (dest);
2133  int reg_base = true_regnum (base);
2134  int tmp;
2135
2136  if (!l)
2137    l = &tmp;
2138
2139  if (reg_base > 0)
2140    {
2141      if (reg_base == REG_X)        /* (R26) */
2142        {
2143          if (reg_dest == REG_X)
2144	    /* "ld r26,-X" is undefined */
2145	    return *l=7, (AS2 (adiw,r26,3)        CR_TAB
2146			  AS2 (ld,r29,X)          CR_TAB
2147			  AS2 (ld,r28,-X)         CR_TAB
2148			  AS2 (ld,__tmp_reg__,-X) CR_TAB
2149			  AS2 (sbiw,r26,1)        CR_TAB
2150			  AS2 (ld,r26,X)          CR_TAB
2151			  AS2 (mov,r27,__tmp_reg__));
2152          else if (reg_dest == REG_X - 2)
2153            return *l=5, (AS2 (ld,%A0,X+)  CR_TAB
2154                          AS2 (ld,%B0,X+) CR_TAB
2155                          AS2 (ld,__tmp_reg__,X+)  CR_TAB
2156                          AS2 (ld,%D0,X)  CR_TAB
2157                          AS2 (mov,%C0,__tmp_reg__));
2158          else if (reg_unused_after (insn, base))
2159            return  *l=4, (AS2 (ld,%A0,X+)  CR_TAB
2160                           AS2 (ld,%B0,X+) CR_TAB
2161                           AS2 (ld,%C0,X+) CR_TAB
2162                           AS2 (ld,%D0,X));
2163          else
2164            return  *l=5, (AS2 (ld,%A0,X+)  CR_TAB
2165                           AS2 (ld,%B0,X+) CR_TAB
2166                           AS2 (ld,%C0,X+) CR_TAB
2167                           AS2 (ld,%D0,X)  CR_TAB
2168                           AS2 (sbiw,r26,3));
2169        }
2170      else
2171        {
2172          if (reg_dest == reg_base)
2173            return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2174                          AS2 (ldd,%C0,%1+2) CR_TAB
2175                          AS2 (ldd,__tmp_reg__,%1+1)  CR_TAB
2176                          AS2 (ld,%A0,%1)  CR_TAB
2177                          AS2 (mov,%B0,__tmp_reg__));
2178          else if (reg_base == reg_dest + 2)
2179            return *l=5, (AS2 (ld ,%A0,%1)    CR_TAB
2180                          AS2 (ldd,%B0,%1+1) CR_TAB
2181                          AS2 (ldd,__tmp_reg__,%1+2)  CR_TAB
2182                          AS2 (ldd,%D0,%1+3) CR_TAB
2183                          AS2 (mov,%C0,__tmp_reg__));
2184          else
2185            return *l=4, (AS2 (ld ,%A0,%1)   CR_TAB
2186                          AS2 (ldd,%B0,%1+1) CR_TAB
2187                          AS2 (ldd,%C0,%1+2) CR_TAB
2188                          AS2 (ldd,%D0,%1+3));
2189        }
2190    }
2191  else if (GET_CODE (base) == PLUS) /* (R + i) */
2192    {
2193      int disp = INTVAL (XEXP (base, 1));
2194
2195      if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2196	{
2197	  if (REGNO (XEXP (base, 0)) != REG_Y)
2198	    fatal_insn ("incorrect insn:",insn);
2199
2200	  if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2201	    return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2202			    AS2 (ldd,%A0,Y+60)    CR_TAB
2203			    AS2 (ldd,%B0,Y+61)    CR_TAB
2204			    AS2 (ldd,%C0,Y+62)    CR_TAB
2205			    AS2 (ldd,%D0,Y+63)    CR_TAB
2206			    AS2 (sbiw,r28,%o1-60));
2207
2208	  return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2209			  AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2210			  AS2 (ld,%A0,Y)           CR_TAB
2211			  AS2 (ldd,%B0,Y+1)        CR_TAB
2212			  AS2 (ldd,%C0,Y+2)        CR_TAB
2213			  AS2 (ldd,%D0,Y+3)        CR_TAB
2214			  AS2 (subi,r28,lo8(%o1))  CR_TAB
2215			  AS2 (sbci,r29,hi8(%o1)));
2216	}
2217
2218      reg_base = true_regnum (XEXP (base, 0));
2219      if (reg_base == REG_X)
2220	{
2221	  /* R = (X + d) */
2222	  if (reg_dest == REG_X)
2223	    {
2224	      *l = 7;
2225	      /* "ld r26,-X" is undefined */
2226	      return (AS2 (adiw,r26,%o1+3)    CR_TAB
2227		      AS2 (ld,r29,X)          CR_TAB
2228		      AS2 (ld,r28,-X)         CR_TAB
2229		      AS2 (ld,__tmp_reg__,-X) CR_TAB
2230		      AS2 (sbiw,r26,1)        CR_TAB
2231		      AS2 (ld,r26,X)          CR_TAB
2232		      AS2 (mov,r27,__tmp_reg__));
2233	    }
2234	  *l = 6;
2235	  if (reg_dest == REG_X - 2)
2236	    return (AS2 (adiw,r26,%o1)      CR_TAB
2237		    AS2 (ld,r24,X+)         CR_TAB
2238		    AS2 (ld,r25,X+)         CR_TAB
2239		    AS2 (ld,__tmp_reg__,X+) CR_TAB
2240		    AS2 (ld,r27,X)          CR_TAB
2241		    AS2 (mov,r26,__tmp_reg__));
2242
2243	  return (AS2 (adiw,r26,%o1) CR_TAB
2244		  AS2 (ld,%A0,X+)    CR_TAB
2245		  AS2 (ld,%B0,X+)    CR_TAB
2246		  AS2 (ld,%C0,X+)    CR_TAB
2247		  AS2 (ld,%D0,X)     CR_TAB
2248		  AS2 (sbiw,r26,%o1+3));
2249	}
2250      if (reg_dest == reg_base)
2251        return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2252                      AS2 (ldd,%C0,%C1) CR_TAB
2253                      AS2 (ldd,__tmp_reg__,%B1)  CR_TAB
2254                      AS2 (ldd,%A0,%A1) CR_TAB
2255                      AS2 (mov,%B0,__tmp_reg__));
2256      else if (reg_dest == reg_base - 2)
2257        return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2258                      AS2 (ldd,%B0,%B1) CR_TAB
2259                      AS2 (ldd,__tmp_reg__,%C1)  CR_TAB
2260                      AS2 (ldd,%D0,%D1) CR_TAB
2261                      AS2 (mov,%C0,__tmp_reg__));
2262      return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2263                    AS2 (ldd,%B0,%B1) CR_TAB
2264                    AS2 (ldd,%C0,%C1) CR_TAB
2265                    AS2 (ldd,%D0,%D1));
2266    }
2267  else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2268    return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2269		  AS2 (ld,%C0,%1) CR_TAB
2270		  AS2 (ld,%B0,%1) CR_TAB
2271		  AS2 (ld,%A0,%1));
2272  else if (GET_CODE (base) == POST_INC) /* (R++) */
2273    return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2274		  AS2 (ld,%B0,%1) CR_TAB
2275		  AS2 (ld,%C0,%1) CR_TAB
2276		  AS2 (ld,%D0,%1));
2277  else if (CONSTANT_ADDRESS_P (base))
2278      return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
2279		    AS2 (lds,%B0,%m1+1) CR_TAB
2280		    AS2 (lds,%C0,%m1+2) CR_TAB
2281		    AS2 (lds,%D0,%m1+3));
2282
2283  fatal_insn ("unknown move insn:",insn);
2284  return "";
2285}
2286
2287const char *
2288out_movsi_mr_r (rtx insn, rtx op[], int *l)
2289{
2290  rtx dest = op[0];
2291  rtx src = op[1];
2292  rtx base = XEXP (dest, 0);
2293  int reg_base = true_regnum (base);
2294  int reg_src = true_regnum (src);
2295  int tmp;
2296
2297  if (!l)
2298    l = &tmp;
2299
2300  if (CONSTANT_ADDRESS_P (base))
2301    return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
2302		 AS2 (sts,%m0+1,%B1) CR_TAB
2303		 AS2 (sts,%m0+2,%C1) CR_TAB
2304		 AS2 (sts,%m0+3,%D1));
2305  if (reg_base > 0)                 /* (r) */
2306    {
2307      if (reg_base == REG_X)                /* (R26) */
2308        {
2309          if (reg_src == REG_X)
2310            {
2311	      /* "st X+,r26" is undefined */
2312              if (reg_unused_after (insn, base))
2313		return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2314			      AS2 (st,X,r26)            CR_TAB
2315			      AS2 (adiw,r26,1)          CR_TAB
2316			      AS2 (st,X+,__tmp_reg__)   CR_TAB
2317			      AS2 (st,X+,r28)           CR_TAB
2318			      AS2 (st,X,r29));
2319              else
2320                return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2321			      AS2 (st,X,r26)            CR_TAB
2322			      AS2 (adiw,r26,1)          CR_TAB
2323			      AS2 (st,X+,__tmp_reg__)   CR_TAB
2324			      AS2 (st,X+,r28)           CR_TAB
2325			      AS2 (st,X,r29)            CR_TAB
2326			      AS2 (sbiw,r26,3));
2327            }
2328          else if (reg_base == reg_src + 2)
2329            {
2330              if (reg_unused_after (insn, base))
2331                return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2332                              AS2 (mov,__tmp_reg__,%D1) CR_TAB
2333                              AS2 (st,%0+,%A1) CR_TAB
2334                              AS2 (st,%0+,%B1) CR_TAB
2335                              AS2 (st,%0+,__zero_reg__)  CR_TAB
2336                              AS2 (st,%0,__tmp_reg__)   CR_TAB
2337                              AS1 (clr,__zero_reg__));
2338              else
2339                return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2340                              AS2 (mov,__tmp_reg__,%D1) CR_TAB
2341                              AS2 (st,%0+,%A1) CR_TAB
2342                              AS2 (st,%0+,%B1) CR_TAB
2343                              AS2 (st,%0+,__zero_reg__)  CR_TAB
2344                              AS2 (st,%0,__tmp_reg__)   CR_TAB
2345                              AS1 (clr,__zero_reg__)     CR_TAB
2346                              AS2 (sbiw,r26,3));
2347            }
2348          return *l=5, (AS2 (st,%0+,%A1)  CR_TAB
2349                        AS2 (st,%0+,%B1) CR_TAB
2350                        AS2 (st,%0+,%C1) CR_TAB
2351                        AS2 (st,%0,%D1)  CR_TAB
2352                        AS2 (sbiw,r26,3));
2353        }
2354      else
2355        return *l=4, (AS2 (st,%0,%A1)    CR_TAB
2356		      AS2 (std,%0+1,%B1) CR_TAB
2357		      AS2 (std,%0+2,%C1) CR_TAB
2358		      AS2 (std,%0+3,%D1));
2359    }
2360  else if (GET_CODE (base) == PLUS) /* (R + i) */
2361    {
2362      int disp = INTVAL (XEXP (base, 1));
2363      reg_base = REGNO (XEXP (base, 0));
2364      if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2365	{
2366	  if (reg_base != REG_Y)
2367	    fatal_insn ("incorrect insn:",insn);
2368
2369	  if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2370	    return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2371			    AS2 (std,Y+60,%A1)    CR_TAB
2372			    AS2 (std,Y+61,%B1)    CR_TAB
2373			    AS2 (std,Y+62,%C1)    CR_TAB
2374			    AS2 (std,Y+63,%D1)    CR_TAB
2375			    AS2 (sbiw,r28,%o0-60));
2376
2377	  return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2378			  AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2379			  AS2 (st,Y,%A1)           CR_TAB
2380			  AS2 (std,Y+1,%B1)        CR_TAB
2381			  AS2 (std,Y+2,%C1)        CR_TAB
2382			  AS2 (std,Y+3,%D1)        CR_TAB
2383			  AS2 (subi,r28,lo8(%o0))  CR_TAB
2384			  AS2 (sbci,r29,hi8(%o0)));
2385	}
2386      if (reg_base == REG_X)
2387	{
2388	  /* (X + d) = R */
2389	  if (reg_src == REG_X)
2390	    {
2391	      *l = 9;
2392	      return (AS2 (mov,__tmp_reg__,r26)  CR_TAB
2393		      AS2 (mov,__zero_reg__,r27) CR_TAB
2394		      AS2 (adiw,r26,%o0)         CR_TAB
2395		      AS2 (st,X+,__tmp_reg__)    CR_TAB
2396		      AS2 (st,X+,__zero_reg__)   CR_TAB
2397		      AS2 (st,X+,r28)            CR_TAB
2398		      AS2 (st,X,r29)             CR_TAB
2399		      AS1 (clr,__zero_reg__)     CR_TAB
2400		      AS2 (sbiw,r26,%o0+3));
2401	    }
2402	  else if (reg_src == REG_X - 2)
2403	    {
2404	      *l = 9;
2405	      return (AS2 (mov,__tmp_reg__,r26)  CR_TAB
2406		      AS2 (mov,__zero_reg__,r27) CR_TAB
2407		      AS2 (adiw,r26,%o0)         CR_TAB
2408		      AS2 (st,X+,r24)            CR_TAB
2409		      AS2 (st,X+,r25)            CR_TAB
2410		      AS2 (st,X+,__tmp_reg__)    CR_TAB
2411		      AS2 (st,X,__zero_reg__)    CR_TAB
2412		      AS1 (clr,__zero_reg__)     CR_TAB
2413		      AS2 (sbiw,r26,%o0+3));
2414	    }
2415	  *l = 6;
2416	  return (AS2 (adiw,r26,%o0) CR_TAB
2417		  AS2 (st,X+,%A1)    CR_TAB
2418		  AS2 (st,X+,%B1)    CR_TAB
2419		  AS2 (st,X+,%C1)    CR_TAB
2420		  AS2 (st,X,%D1)     CR_TAB
2421		  AS2 (sbiw,r26,%o0+3));
2422	}
2423      return *l=4, (AS2 (std,%A0,%A1)    CR_TAB
2424		    AS2 (std,%B0,%B1) CR_TAB
2425		    AS2 (std,%C0,%C1) CR_TAB
2426		    AS2 (std,%D0,%D1));
2427    }
2428  else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2429    return *l=4, (AS2 (st,%0,%D1) CR_TAB
2430		  AS2 (st,%0,%C1) CR_TAB
2431		  AS2 (st,%0,%B1) CR_TAB
2432		  AS2 (st,%0,%A1));
2433  else if (GET_CODE (base) == POST_INC) /* (R++) */
2434    return *l=4, (AS2 (st,%0,%A1)  CR_TAB
2435		  AS2 (st,%0,%B1) CR_TAB
2436		  AS2 (st,%0,%C1) CR_TAB
2437		  AS2 (st,%0,%D1));
2438  fatal_insn ("unknown move insn:",insn);
2439  return "";
2440}
2441
2442const char *
2443output_movsisf(rtx insn, rtx operands[], int *l)
2444{
2445  int dummy;
2446  rtx dest = operands[0];
2447  rtx src = operands[1];
2448  int *real_l = l;
2449
2450  if (!l)
2451    l = &dummy;
2452
2453  if (register_operand (dest, VOIDmode))
2454    {
2455      if (register_operand (src, VOIDmode)) /* mov r,r */
2456	{
2457	  if (true_regnum (dest) > true_regnum (src))
2458	    {
2459	      if (AVR_HAVE_MOVW)
2460		{
2461		  *l = 2;
2462		  return (AS2 (movw,%C0,%C1) CR_TAB
2463			  AS2 (movw,%A0,%A1));
2464		}
2465	      *l = 4;
2466	      return (AS2 (mov,%D0,%D1) CR_TAB
2467		      AS2 (mov,%C0,%C1) CR_TAB
2468		      AS2 (mov,%B0,%B1) CR_TAB
2469		      AS2 (mov,%A0,%A1));
2470	    }
2471	  else
2472	    {
2473	      if (AVR_HAVE_MOVW)
2474		{
2475		  *l = 2;
2476		  return (AS2 (movw,%A0,%A1) CR_TAB
2477			  AS2 (movw,%C0,%C1));
2478		}
2479	      *l = 4;
2480	      return (AS2 (mov,%A0,%A1) CR_TAB
2481		      AS2 (mov,%B0,%B1) CR_TAB
2482		      AS2 (mov,%C0,%C1) CR_TAB
2483		      AS2 (mov,%D0,%D1));
2484	    }
2485	}
2486      else if (CONSTANT_P (src))
2487	{
2488	  if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2489	    {
2490	      *l = 4;
2491	      return (AS2 (ldi,%A0,lo8(%1))  CR_TAB
2492		      AS2 (ldi,%B0,hi8(%1))  CR_TAB
2493		      AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2494		      AS2 (ldi,%D0,hhi8(%1)));
2495	    }
2496
2497	  if (GET_CODE (src) == CONST_INT)
2498	    {
2499	      const char *const clr_op0 =
2500		AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2501				AS1 (clr,%B0) CR_TAB
2502				AS2 (movw,%C0,%A0))
2503			     : (AS1 (clr,%A0) CR_TAB
2504				AS1 (clr,%B0) CR_TAB
2505				AS1 (clr,%C0) CR_TAB
2506				AS1 (clr,%D0));
2507
2508	      if (src == const0_rtx) /* mov r,L */
2509		{
2510		  *l = AVR_HAVE_MOVW ? 3 : 4;
2511		  return clr_op0;
2512		}
2513	      else if (src == const1_rtx)
2514		{
2515		  if (!real_l)
2516		    output_asm_insn (clr_op0, operands);
2517		  *l = AVR_HAVE_MOVW ? 4 : 5;
2518		  return AS1 (inc,%A0);
2519		}
2520	      else if (src == constm1_rtx)
2521		{
2522		  /* Immediate constants -1 to any register */
2523		  if (AVR_HAVE_MOVW)
2524		    {
2525		      *l = 4;
2526		      return (AS1 (clr,%A0)     CR_TAB
2527			      AS1 (dec,%A0)     CR_TAB
2528			      AS2 (mov,%B0,%A0) CR_TAB
2529			      AS2 (movw,%C0,%A0));
2530		    }
2531		  *l = 5;
2532		  return (AS1 (clr,%A0)     CR_TAB
2533			  AS1 (dec,%A0)     CR_TAB
2534			  AS2 (mov,%B0,%A0) CR_TAB
2535			  AS2 (mov,%C0,%A0) CR_TAB
2536			  AS2 (mov,%D0,%A0));
2537		}
2538	      else
2539		{
2540		  int bit_nr = exact_log2 (INTVAL (src));
2541
2542		  if (bit_nr >= 0)
2543		    {
2544		      *l = AVR_HAVE_MOVW ? 5 : 6;
2545		      if (!real_l)
2546			{
2547			  output_asm_insn (clr_op0, operands);
2548			  output_asm_insn ("set", operands);
2549			}
2550		      if (!real_l)
2551			avr_output_bld (operands, bit_nr);
2552
2553		      return "";
2554		    }
2555		}
2556	    }
2557
2558	  /* Last resort, better than loading from memory.  */
2559	  *l = 10;
2560	  return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2561		  AS2 (ldi,r31,lo8(%1))     CR_TAB
2562		  AS2 (mov,%A0,r31)         CR_TAB
2563		  AS2 (ldi,r31,hi8(%1))     CR_TAB
2564		  AS2 (mov,%B0,r31)         CR_TAB
2565		  AS2 (ldi,r31,hlo8(%1))    CR_TAB
2566		  AS2 (mov,%C0,r31)         CR_TAB
2567		  AS2 (ldi,r31,hhi8(%1))    CR_TAB
2568		  AS2 (mov,%D0,r31)         CR_TAB
2569		  AS2 (mov,r31,__tmp_reg__));
2570	}
2571      else if (GET_CODE (src) == MEM)
2572	return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2573    }
2574  else if (GET_CODE (dest) == MEM)
2575    {
2576      const char *templ;
2577
2578      if (src == const0_rtx)
2579	  operands[1] = zero_reg_rtx;
2580
2581      templ = out_movsi_mr_r (insn, operands, real_l);
2582
2583      if (!real_l)
2584	output_asm_insn (templ, operands);
2585
2586      operands[1] = src;
2587      return "";
2588    }
2589  fatal_insn ("invalid insn:", insn);
2590  return "";
2591}
2592
2593const char *
2594out_movqi_mr_r (rtx insn, rtx op[], int *l)
2595{
2596  rtx dest = op[0];
2597  rtx src = op[1];
2598  rtx x = XEXP (dest, 0);
2599  int dummy;
2600
2601  if (!l)
2602    l = &dummy;
2603
2604  if (CONSTANT_ADDRESS_P (x))
2605    {
2606      if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2607	{
2608	  *l = 1;
2609	  return AS2 (out,__SREG__,%1);
2610	}
2611      if (optimize > 0 && io_address_operand (x, QImode))
2612	{
2613	  *l = 1;
2614	  return AS2 (out,%m0-0x20,%1);
2615	}
2616      *l = 2;
2617      return AS2 (sts,%m0,%1);
2618    }
2619  /* memory access by reg+disp */
2620  else if (GET_CODE (x) == PLUS
2621      && REG_P (XEXP (x,0))
2622      && GET_CODE (XEXP (x,1)) == CONST_INT)
2623    {
2624      if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2625	{
2626	  int disp = INTVAL (XEXP (x,1));
2627	  if (REGNO (XEXP (x,0)) != REG_Y)
2628	    fatal_insn ("incorrect insn:",insn);
2629
2630	  if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2631	    return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2632			    AS2 (std,Y+63,%1)     CR_TAB
2633			    AS2 (sbiw,r28,%o0-63));
2634
2635	  return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2636			  AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2637			  AS2 (st,Y,%1)            CR_TAB
2638			  AS2 (subi,r28,lo8(%o0))  CR_TAB
2639			  AS2 (sbci,r29,hi8(%o0)));
2640	}
2641      else if (REGNO (XEXP (x,0)) == REG_X)
2642	{
2643	  if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2644	    {
2645	      if (reg_unused_after (insn, XEXP (x,0)))
2646		return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2647				AS2 (adiw,r26,%o0)       CR_TAB
2648				AS2 (st,X,__tmp_reg__));
2649
2650	      return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2651			      AS2 (adiw,r26,%o0)       CR_TAB
2652			      AS2 (st,X,__tmp_reg__)   CR_TAB
2653			      AS2 (sbiw,r26,%o0));
2654	    }
2655	  else
2656	    {
2657	      if (reg_unused_after (insn, XEXP (x,0)))
2658		return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2659				AS2 (st,X,%1));
2660
2661	      return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2662			      AS2 (st,X,%1)      CR_TAB
2663			      AS2 (sbiw,r26,%o0));
2664	    }
2665	}
2666      *l = 1;
2667      return AS2 (std,%0,%1);
2668    }
2669  *l = 1;
2670  return AS2 (st,%0,%1);
2671}
2672
2673const char *
2674out_movhi_mr_r (rtx insn, rtx op[], int *l)
2675{
2676  rtx dest = op[0];
2677  rtx src = op[1];
2678  rtx base = XEXP (dest, 0);
2679  int reg_base = true_regnum (base);
2680  int reg_src = true_regnum (src);
2681  /* "volatile" forces writing high byte first, even if less efficient,
2682     for correct operation with 16-bit I/O registers.  */
2683  int mem_volatile_p = MEM_VOLATILE_P (dest);
2684  int tmp;
2685
2686  if (!l)
2687    l = &tmp;
2688  if (CONSTANT_ADDRESS_P (base))
2689    {
2690      if (optimize > 0 && io_address_operand (base, HImode))
2691	{
2692	  *l = 2;
2693	  return (AS2 (out,%m0+1-0x20,%B1) CR_TAB
2694		  AS2 (out,%m0-0x20,%A1));
2695	}
2696      return *l = 4, (AS2 (sts,%m0+1,%B1) CR_TAB
2697		      AS2 (sts,%m0,%A1));
2698    }
2699  if (reg_base > 0)
2700    {
2701      if (reg_base == REG_X)
2702        {
2703          if (reg_src == REG_X)
2704            {
2705              /* "st X+,r26" and "st -X,r26" are undefined.  */
2706              if (!mem_volatile_p && reg_unused_after (insn, src))
2707		return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2708			      AS2 (st,X,r26)            CR_TAB
2709			      AS2 (adiw,r26,1)          CR_TAB
2710			      AS2 (st,X,__tmp_reg__));
2711              else
2712		return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2713			      AS2 (adiw,r26,1)          CR_TAB
2714			      AS2 (st,X,__tmp_reg__)    CR_TAB
2715                              AS2 (sbiw,r26,1)          CR_TAB
2716                              AS2 (st,X,r26));
2717            }
2718          else
2719            {
2720              if (!mem_volatile_p && reg_unused_after (insn, base))
2721                return *l=2, (AS2 (st,X+,%A1) CR_TAB
2722                              AS2 (st,X,%B1));
2723              else
2724                return *l=3, (AS2 (adiw,r26,1) CR_TAB
2725                              AS2 (st,X,%B1)   CR_TAB
2726                              AS2 (st,-X,%A1));
2727            }
2728        }
2729      else
2730        return  *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2731                       AS2 (st,%0,%A1));
2732    }
2733  else if (GET_CODE (base) == PLUS)
2734    {
2735      int disp = INTVAL (XEXP (base, 1));
2736      reg_base = REGNO (XEXP (base, 0));
2737      if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2738	{
2739	  if (reg_base != REG_Y)
2740	    fatal_insn ("incorrect insn:",insn);
2741
2742	  if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2743	    return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2744			    AS2 (std,Y+63,%B1)    CR_TAB
2745			    AS2 (std,Y+62,%A1)    CR_TAB
2746			    AS2 (sbiw,r28,%o0-62));
2747
2748	  return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2749			  AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2750			  AS2 (std,Y+1,%B1)        CR_TAB
2751			  AS2 (st,Y,%A1)           CR_TAB
2752			  AS2 (subi,r28,lo8(%o0))  CR_TAB
2753			  AS2 (sbci,r29,hi8(%o0)));
2754	}
2755      if (reg_base == REG_X)
2756	{
2757	  /* (X + d) = R */
2758	  if (reg_src == REG_X)
2759            {
2760	      *l = 7;
2761	      return (AS2 (mov,__tmp_reg__,r26)  CR_TAB
2762		      AS2 (mov,__zero_reg__,r27) CR_TAB
2763                      AS2 (adiw,r26,%o0+1)       CR_TAB
2764		      AS2 (st,X,__zero_reg__)    CR_TAB
2765		      AS2 (st,-X,__tmp_reg__)    CR_TAB
2766		      AS1 (clr,__zero_reg__)     CR_TAB
2767                      AS2 (sbiw,r26,%o0));
2768	    }
2769	  *l = 4;
2770          return (AS2 (adiw,r26,%o0+1) CR_TAB
2771                  AS2 (st,X,%B1)       CR_TAB
2772                  AS2 (st,-X,%A1)      CR_TAB
2773                  AS2 (sbiw,r26,%o0));
2774	}
2775      return *l=2, (AS2 (std,%B0,%B1)    CR_TAB
2776                    AS2 (std,%A0,%A1));
2777    }
2778  else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2779    return *l=2, (AS2 (st,%0,%B1) CR_TAB
2780		  AS2 (st,%0,%A1));
2781  else if (GET_CODE (base) == POST_INC) /* (R++) */
2782    {
2783      if (mem_volatile_p)
2784        {
2785          if (REGNO (XEXP (base, 0)) == REG_X)
2786            {
2787              *l = 4;
2788              return (AS2 (adiw,r26,1)  CR_TAB
2789                      AS2 (st,X,%B1)    CR_TAB
2790                      AS2 (st,-X,%A1)   CR_TAB
2791                      AS2 (adiw,r26,2));
2792            }
2793          else
2794            {
2795              *l = 3;
2796              return (AS2 (std,%p0+1,%B1) CR_TAB
2797                      AS2 (st,%p0,%A1)    CR_TAB
2798                      AS2 (adiw,%r0,2));
2799            }
2800        }
2801
2802      *l = 2;
2803      return (AS2 (st,%0,%A1)  CR_TAB
2804            AS2 (st,%0,%B1));
2805    }
2806  fatal_insn ("unknown move insn:",insn);
2807  return "";
2808}
2809
2810/* Return 1 if frame pointer for current function required.  */
2811
2812bool
2813avr_frame_pointer_required_p (void)
2814{
2815  return (cfun->calls_alloca
2816	  || crtl->args.info.nregs == 0
2817  	  || get_frame_size () > 0);
2818}
2819
2820/* Returns the condition of compare insn INSN, or UNKNOWN.  */
2821
2822static RTX_CODE
2823compare_condition (rtx insn)
2824{
2825  rtx next = next_real_insn (insn);
2826  RTX_CODE cond = UNKNOWN;
2827  if (next && GET_CODE (next) == JUMP_INSN)
2828    {
2829      rtx pat = PATTERN (next);
2830      rtx src = SET_SRC (pat);
2831      rtx t = XEXP (src, 0);
2832      cond = GET_CODE (t);
2833    }
2834  return cond;
2835}
2836
2837/* Returns nonzero if INSN is a tst insn that only tests the sign.  */
2838
2839static int
2840compare_sign_p (rtx insn)
2841{
2842  RTX_CODE cond = compare_condition (insn);
2843  return (cond == GE || cond == LT);
2844}
2845
2846/* Returns nonzero if the next insn is a JUMP_INSN with a condition
2847   that needs to be swapped (GT, GTU, LE, LEU).  */
2848
2849int
2850compare_diff_p (rtx insn)
2851{
2852  RTX_CODE cond = compare_condition (insn);
2853  return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
2854}
2855
2856/* Returns nonzero if INSN is a compare insn with the EQ or NE condition.  */
2857
2858int
2859compare_eq_p (rtx insn)
2860{
2861  RTX_CODE cond = compare_condition (insn);
2862  return (cond == EQ || cond == NE);
2863}
2864
2865
2866/* Output test instruction for HImode.  */
2867
2868const char *
2869out_tsthi (rtx insn, rtx op, int *l)
2870{
2871  if (compare_sign_p (insn))
2872    {
2873      if (l) *l = 1;
2874      return AS1 (tst,%B0);
2875    }
2876  if (reg_unused_after (insn, op)
2877      && compare_eq_p (insn))
2878    {
2879      /* Faster than sbiw if we can clobber the operand.  */
2880      if (l) *l = 1;
2881      return "or %A0,%B0";
2882    }
2883  if (test_hard_reg_class (ADDW_REGS, op))
2884    {
2885      if (l) *l = 1;
2886      return AS2 (sbiw,%0,0);
2887    }
2888  if (l) *l = 2;
2889  return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2890          AS2 (cpc,%B0,__zero_reg__));
2891}
2892
2893
2894/* Output test instruction for SImode.  */
2895
2896const char *
2897out_tstsi (rtx insn, rtx op, int *l)
2898{
2899  if (compare_sign_p (insn))
2900    {
2901      if (l) *l = 1;
2902      return AS1 (tst,%D0);
2903    }
2904  if (test_hard_reg_class (ADDW_REGS, op))
2905    {
2906      if (l) *l = 3;
2907      return (AS2 (sbiw,%A0,0) CR_TAB
2908              AS2 (cpc,%C0,__zero_reg__) CR_TAB
2909              AS2 (cpc,%D0,__zero_reg__));
2910    }
2911  if (l) *l = 4;
2912  return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2913          AS2 (cpc,%B0,__zero_reg__) CR_TAB
2914          AS2 (cpc,%C0,__zero_reg__) CR_TAB
2915          AS2 (cpc,%D0,__zero_reg__));
2916}
2917
2918
2919/* Generate asm equivalent for various shifts.
2920   Shift count is a CONST_INT, MEM or REG.
2921   This only handles cases that are not already
2922   carefully hand-optimized in ?sh??i3_out.  */
2923
2924void
2925out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
2926		    int *len, int t_len)
2927{
2928  rtx op[10];
2929  char str[500];
2930  int second_label = 1;
2931  int saved_in_tmp = 0;
2932  int use_zero_reg = 0;
2933
2934  op[0] = operands[0];
2935  op[1] = operands[1];
2936  op[2] = operands[2];
2937  op[3] = operands[3];
2938  str[0] = 0;
2939
2940  if (len)
2941    *len = 1;
2942
2943  if (GET_CODE (operands[2]) == CONST_INT)
2944    {
2945      int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
2946      int count = INTVAL (operands[2]);
2947      int max_len = 10;  /* If larger than this, always use a loop.  */
2948
2949      if (count <= 0)
2950	{
2951	  if (len)
2952	    *len = 0;
2953	  return;
2954	}
2955
2956      if (count < 8 && !scratch)
2957	use_zero_reg = 1;
2958
2959      if (optimize_size)
2960	max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
2961
2962      if (t_len * count <= max_len)
2963	{
2964	  /* Output shifts inline with no loop - faster.  */
2965	  if (len)
2966	    *len = t_len * count;
2967	  else
2968	    {
2969	      while (count-- > 0)
2970		output_asm_insn (templ, op);
2971	    }
2972
2973	  return;
2974	}
2975
2976      if (scratch)
2977	{
2978	  if (!len)
2979	    strcat (str, AS2 (ldi,%3,%2));
2980	}
2981      else if (use_zero_reg)
2982	{
2983	  /* Hack to save one word: use __zero_reg__ as loop counter.
2984	     Set one bit, then shift in a loop until it is 0 again.  */
2985
2986	  op[3] = zero_reg_rtx;
2987	  if (len)
2988	    *len = 2;
2989	  else
2990	    strcat (str, ("set" CR_TAB
2991			  AS2 (bld,%3,%2-1)));
2992	}
2993      else
2994	{
2995	  /* No scratch register available, use one from LD_REGS (saved in
2996	     __tmp_reg__) that doesn't overlap with registers to shift.  */
2997
2998	  op[3] = gen_rtx_REG (QImode,
2999			   ((true_regnum (operands[0]) - 1) & 15) + 16);
3000	  op[4] = tmp_reg_rtx;
3001	  saved_in_tmp = 1;
3002
3003	  if (len)
3004	    *len = 3;  /* Includes "mov %3,%4" after the loop.  */
3005	  else
3006	    strcat (str, (AS2 (mov,%4,%3) CR_TAB
3007			  AS2 (ldi,%3,%2)));
3008	}
3009
3010      second_label = 0;
3011    }
3012  else if (GET_CODE (operands[2]) == MEM)
3013    {
3014      rtx op_mov[10];
3015
3016      op[3] = op_mov[0] = tmp_reg_rtx;
3017      op_mov[1] = op[2];
3018
3019      if (len)
3020	out_movqi_r_mr (insn, op_mov, len);
3021      else
3022	output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
3023    }
3024  else if (register_operand (operands[2], QImode))
3025    {
3026      if (reg_unused_after (insn, operands[2]))
3027	op[3] = op[2];
3028      else
3029	{
3030	  op[3] = tmp_reg_rtx;
3031	  if (!len)
3032	    strcat (str, (AS2 (mov,%3,%2) CR_TAB));
3033	}
3034    }
3035  else
3036    fatal_insn ("bad shift insn:", insn);
3037
3038  if (second_label)
3039    {
3040      if (len)
3041	++*len;
3042      else
3043	strcat (str, AS1 (rjmp,2f));
3044    }
3045
3046  if (len)
3047    *len += t_len + 2;  /* template + dec + brXX */
3048  else
3049    {
3050      strcat (str, "\n1:\t");
3051      strcat (str, templ);
3052      strcat (str, second_label ? "\n2:\t" : "\n\t");
3053      strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3054      strcat (str, CR_TAB);
3055      strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3056      if (saved_in_tmp)
3057	strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3058      output_asm_insn (str, op);
3059    }
3060}
3061
3062
3063/* 8bit shift left ((char)x << i)   */
3064
3065const char *
3066ashlqi3_out (rtx insn, rtx operands[], int *len)
3067{
3068  if (GET_CODE (operands[2]) == CONST_INT)
3069    {
3070      int k;
3071
3072      if (!len)
3073	len = &k;
3074
3075      switch (INTVAL (operands[2]))
3076	{
3077	default:
3078	  if (INTVAL (operands[2]) < 8)
3079	    break;
3080
3081	  *len = 1;
3082	  return AS1 (clr,%0);
3083
3084	case 1:
3085	  *len = 1;
3086	  return AS1 (lsl,%0);
3087
3088	case 2:
3089	  *len = 2;
3090	  return (AS1 (lsl,%0) CR_TAB
3091		  AS1 (lsl,%0));
3092
3093	case 3:
3094	  *len = 3;
3095	  return (AS1 (lsl,%0) CR_TAB
3096		  AS1 (lsl,%0) CR_TAB
3097		  AS1 (lsl,%0));
3098
3099	case 4:
3100	  if (test_hard_reg_class (LD_REGS, operands[0]))
3101	    {
3102	      *len = 2;
3103	      return (AS1 (swap,%0) CR_TAB
3104		      AS2 (andi,%0,0xf0));
3105	    }
3106	  *len = 4;
3107	  return (AS1 (lsl,%0) CR_TAB
3108		  AS1 (lsl,%0) CR_TAB
3109		  AS1 (lsl,%0) CR_TAB
3110		  AS1 (lsl,%0));
3111
3112	case 5:
3113	  if (test_hard_reg_class (LD_REGS, operands[0]))
3114	    {
3115	      *len = 3;
3116	      return (AS1 (swap,%0) CR_TAB
3117		      AS1 (lsl,%0)  CR_TAB
3118		      AS2 (andi,%0,0xe0));
3119	    }
3120	  *len = 5;
3121	  return (AS1 (lsl,%0) CR_TAB
3122		  AS1 (lsl,%0) CR_TAB
3123		  AS1 (lsl,%0) CR_TAB
3124		  AS1 (lsl,%0) CR_TAB
3125		  AS1 (lsl,%0));
3126
3127	case 6:
3128	  if (test_hard_reg_class (LD_REGS, operands[0]))
3129	    {
3130	      *len = 4;
3131	      return (AS1 (swap,%0) CR_TAB
3132		      AS1 (lsl,%0)  CR_TAB
3133		      AS1 (lsl,%0)  CR_TAB
3134		      AS2 (andi,%0,0xc0));
3135	    }
3136	  *len = 6;
3137	  return (AS1 (lsl,%0) CR_TAB
3138		  AS1 (lsl,%0) CR_TAB
3139		  AS1 (lsl,%0) CR_TAB
3140		  AS1 (lsl,%0) CR_TAB
3141		  AS1 (lsl,%0) CR_TAB
3142		  AS1 (lsl,%0));
3143
3144	case 7:
3145	  *len = 3;
3146	  return (AS1 (ror,%0) CR_TAB
3147		  AS1 (clr,%0) CR_TAB
3148		  AS1 (ror,%0));
3149	}
3150    }
3151  else if (CONSTANT_P (operands[2]))
3152    fatal_insn ("internal compiler error.  Incorrect shift:", insn);
3153
3154  out_shift_with_cnt (AS1 (lsl,%0),
3155		      insn, operands, len, 1);
3156  return "";
3157}
3158
3159
3160/* 16bit shift left ((short)x << i)   */
3161
3162const char *
3163ashlhi3_out (rtx insn, rtx operands[], int *len)
3164{
3165  if (GET_CODE (operands[2]) == CONST_INT)
3166    {
3167      int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3168      int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3169      int k;
3170      int *t = len;
3171
3172      if (!len)
3173	len = &k;
3174
3175      switch (INTVAL (operands[2]))
3176	{
3177	default:
3178	  if (INTVAL (operands[2]) < 16)
3179	    break;
3180
3181	  *len = 2;
3182	  return (AS1 (clr,%B0) CR_TAB
3183		  AS1 (clr,%A0));
3184
3185	case 4:
3186	  if (optimize_size && scratch)
3187	    break;  /* 5 */
3188	  if (ldi_ok)
3189	    {
3190	      *len = 6;
3191	      return (AS1 (swap,%A0)      CR_TAB
3192		      AS1 (swap,%B0)      CR_TAB
3193		      AS2 (andi,%B0,0xf0) CR_TAB
3194		      AS2 (eor,%B0,%A0)   CR_TAB
3195		      AS2 (andi,%A0,0xf0) CR_TAB
3196		      AS2 (eor,%B0,%A0));
3197	    }
3198	  if (scratch)
3199	    {
3200	      *len = 7;
3201	      return (AS1 (swap,%A0)    CR_TAB
3202		      AS1 (swap,%B0)    CR_TAB
3203		      AS2 (ldi,%3,0xf0) CR_TAB
3204		      "and %B0,%3"      CR_TAB
3205		      AS2 (eor,%B0,%A0) CR_TAB
3206		      "and %A0,%3"      CR_TAB
3207		      AS2 (eor,%B0,%A0));
3208	    }
3209	  break;  /* optimize_size ? 6 : 8 */
3210
3211	case 5:
3212	  if (optimize_size)
3213	    break;  /* scratch ? 5 : 6 */
3214	  if (ldi_ok)
3215	    {
3216	      *len = 8;
3217	      return (AS1 (lsl,%A0)       CR_TAB
3218		      AS1 (rol,%B0)       CR_TAB
3219		      AS1 (swap,%A0)      CR_TAB
3220		      AS1 (swap,%B0)      CR_TAB
3221		      AS2 (andi,%B0,0xf0) CR_TAB
3222		      AS2 (eor,%B0,%A0)   CR_TAB
3223		      AS2 (andi,%A0,0xf0) CR_TAB
3224		      AS2 (eor,%B0,%A0));
3225	    }
3226	  if (scratch)
3227	    {
3228	      *len = 9;
3229	      return (AS1 (lsl,%A0)     CR_TAB
3230		      AS1 (rol,%B0)     CR_TAB
3231		      AS1 (swap,%A0)    CR_TAB
3232		      AS1 (swap,%B0)    CR_TAB
3233		      AS2 (ldi,%3,0xf0) CR_TAB
3234		      "and %B0,%3"      CR_TAB
3235		      AS2 (eor,%B0,%A0) CR_TAB
3236		      "and %A0,%3"      CR_TAB
3237		      AS2 (eor,%B0,%A0));
3238	    }
3239	  break;  /* 10 */
3240
3241	case 6:
3242	  if (optimize_size)
3243	    break;  /* scratch ? 5 : 6 */
3244	  *len = 9;
3245	  return (AS1 (clr,__tmp_reg__) CR_TAB
3246		  AS1 (lsr,%B0)         CR_TAB
3247		  AS1 (ror,%A0)         CR_TAB
3248		  AS1 (ror,__tmp_reg__) CR_TAB
3249		  AS1 (lsr,%B0)         CR_TAB
3250		  AS1 (ror,%A0)         CR_TAB
3251		  AS1 (ror,__tmp_reg__) CR_TAB
3252		  AS2 (mov,%B0,%A0)     CR_TAB
3253		  AS2 (mov,%A0,__tmp_reg__));
3254
3255	case 7:
3256	  *len = 5;
3257	  return (AS1 (lsr,%B0)     CR_TAB
3258		  AS2 (mov,%B0,%A0) CR_TAB
3259		  AS1 (clr,%A0)     CR_TAB
3260		  AS1 (ror,%B0)     CR_TAB
3261		  AS1 (ror,%A0));
3262
3263	case 8:
3264	  return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3265			    AS1 (clr,%A0));
3266
3267	case 9:
3268	  *len = 3;
3269	  return (AS2 (mov,%B0,%A0) CR_TAB
3270		  AS1 (clr,%A0)     CR_TAB
3271		  AS1 (lsl,%B0));
3272
3273	case 10:
3274	  *len = 4;
3275	  return (AS2 (mov,%B0,%A0) CR_TAB
3276		  AS1 (clr,%A0)     CR_TAB
3277		  AS1 (lsl,%B0)     CR_TAB
3278		  AS1 (lsl,%B0));
3279
3280	case 11:
3281	  *len = 5;
3282	  return (AS2 (mov,%B0,%A0) CR_TAB
3283		  AS1 (clr,%A0)     CR_TAB
3284		  AS1 (lsl,%B0)     CR_TAB
3285		  AS1 (lsl,%B0)     CR_TAB
3286		  AS1 (lsl,%B0));
3287
3288	case 12:
3289	  if (ldi_ok)
3290	    {
3291	      *len = 4;
3292	      return (AS2 (mov,%B0,%A0) CR_TAB
3293		      AS1 (clr,%A0)     CR_TAB
3294		      AS1 (swap,%B0)    CR_TAB
3295		      AS2 (andi,%B0,0xf0));
3296	    }
3297	  if (scratch)
3298	    {
3299	      *len = 5;
3300	      return (AS2 (mov,%B0,%A0) CR_TAB
3301		      AS1 (clr,%A0)     CR_TAB
3302		      AS1 (swap,%B0)    CR_TAB
3303		      AS2 (ldi,%3,0xf0) CR_TAB
3304		      "and %B0,%3");
3305	    }
3306	  *len = 6;
3307	  return (AS2 (mov,%B0,%A0) CR_TAB
3308		  AS1 (clr,%A0)     CR_TAB
3309		  AS1 (lsl,%B0)     CR_TAB
3310		  AS1 (lsl,%B0)     CR_TAB
3311		  AS1 (lsl,%B0)     CR_TAB
3312		  AS1 (lsl,%B0));
3313
3314	case 13:
3315	  if (ldi_ok)
3316	    {
3317	      *len = 5;
3318	      return (AS2 (mov,%B0,%A0) CR_TAB
3319		      AS1 (clr,%A0)     CR_TAB
3320		      AS1 (swap,%B0)    CR_TAB
3321		      AS1 (lsl,%B0)     CR_TAB
3322		      AS2 (andi,%B0,0xe0));
3323	    }
3324	  if (AVR_HAVE_MUL && scratch)
3325	    {
3326	      *len = 5;
3327	      return (AS2 (ldi,%3,0x20) CR_TAB
3328		      AS2 (mul,%A0,%3)  CR_TAB
3329		      AS2 (mov,%B0,r0)  CR_TAB
3330		      AS1 (clr,%A0)     CR_TAB
3331		      AS1 (clr,__zero_reg__));
3332	    }
3333	  if (optimize_size && scratch)
3334	    break;  /* 5 */
3335	  if (scratch)
3336	    {
3337	      *len = 6;
3338	      return (AS2 (mov,%B0,%A0) CR_TAB
3339		      AS1 (clr,%A0)     CR_TAB
3340		      AS1 (swap,%B0)    CR_TAB
3341		      AS1 (lsl,%B0)     CR_TAB
3342		      AS2 (ldi,%3,0xe0) CR_TAB
3343		      "and %B0,%3");
3344	    }
3345	  if (AVR_HAVE_MUL)
3346	    {
3347	      *len = 6;
3348	      return ("set"            CR_TAB
3349		      AS2 (bld,r1,5)   CR_TAB
3350		      AS2 (mul,%A0,r1) CR_TAB
3351		      AS2 (mov,%B0,r0) CR_TAB
3352		      AS1 (clr,%A0)    CR_TAB
3353		      AS1 (clr,__zero_reg__));
3354	    }
3355	  *len = 7;
3356	  return (AS2 (mov,%B0,%A0) CR_TAB
3357		  AS1 (clr,%A0)     CR_TAB
3358		  AS1 (lsl,%B0)     CR_TAB
3359		  AS1 (lsl,%B0)     CR_TAB
3360		  AS1 (lsl,%B0)     CR_TAB
3361		  AS1 (lsl,%B0)     CR_TAB
3362		  AS1 (lsl,%B0));
3363
3364	case 14:
3365	  if (AVR_HAVE_MUL && ldi_ok)
3366	    {
3367	      *len = 5;
3368	      return (AS2 (ldi,%B0,0x40) CR_TAB
3369		      AS2 (mul,%A0,%B0)  CR_TAB
3370		      AS2 (mov,%B0,r0)   CR_TAB
3371		      AS1 (clr,%A0)      CR_TAB
3372		      AS1 (clr,__zero_reg__));
3373	    }
3374	  if (AVR_HAVE_MUL && scratch)
3375	    {
3376	      *len = 5;
3377	      return (AS2 (ldi,%3,0x40) CR_TAB
3378		      AS2 (mul,%A0,%3)  CR_TAB
3379		      AS2 (mov,%B0,r0)  CR_TAB
3380		      AS1 (clr,%A0)     CR_TAB
3381		      AS1 (clr,__zero_reg__));
3382	    }
3383	  if (optimize_size && ldi_ok)
3384	    {
3385	      *len = 5;
3386	      return (AS2 (mov,%B0,%A0) CR_TAB
3387		      AS2 (ldi,%A0,6) "\n1:\t"
3388		      AS1 (lsl,%B0)     CR_TAB
3389		      AS1 (dec,%A0)     CR_TAB
3390		      AS1 (brne,1b));
3391	    }
3392	  if (optimize_size && scratch)
3393	    break;  /* 5 */
3394	  *len = 6;
3395	  return (AS1 (clr,%B0) CR_TAB
3396		  AS1 (lsr,%A0) CR_TAB
3397		  AS1 (ror,%B0) CR_TAB
3398		  AS1 (lsr,%A0) CR_TAB
3399		  AS1 (ror,%B0) CR_TAB
3400		  AS1 (clr,%A0));
3401
3402	case 15:
3403	  *len = 4;
3404	  return (AS1 (clr,%B0) CR_TAB
3405		  AS1 (lsr,%A0) CR_TAB
3406		  AS1 (ror,%B0) CR_TAB
3407		  AS1 (clr,%A0));
3408	}
3409      len = t;
3410    }
3411  out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3412		       AS1 (rol,%B0)),
3413		       insn, operands, len, 2);
3414  return "";
3415}
3416
3417
3418/* 32bit shift left ((long)x << i)   */
3419
3420const char *
3421ashlsi3_out (rtx insn, rtx operands[], int *len)
3422{
3423  if (GET_CODE (operands[2]) == CONST_INT)
3424    {
3425      int k;
3426      int *t = len;
3427
3428      if (!len)
3429	len = &k;
3430
3431      switch (INTVAL (operands[2]))
3432	{
3433	default:
3434	  if (INTVAL (operands[2]) < 32)
3435	    break;
3436
3437	  if (AVR_HAVE_MOVW)
3438	    return *len = 3, (AS1 (clr,%D0) CR_TAB
3439			      AS1 (clr,%C0) CR_TAB
3440			      AS2 (movw,%A0,%C0));
3441	  *len = 4;
3442	  return (AS1 (clr,%D0) CR_TAB
3443		  AS1 (clr,%C0) CR_TAB
3444		  AS1 (clr,%B0) CR_TAB
3445		  AS1 (clr,%A0));
3446
3447	case 8:
3448	  {
3449	    int reg0 = true_regnum (operands[0]);
3450	    int reg1 = true_regnum (operands[1]);
3451	    *len = 4;
3452	    if (reg0 >= reg1)
3453	      return (AS2 (mov,%D0,%C1)  CR_TAB
3454		      AS2 (mov,%C0,%B1)  CR_TAB
3455		      AS2 (mov,%B0,%A1)  CR_TAB
3456		      AS1 (clr,%A0));
3457	    else
3458	      return (AS1 (clr,%A0)      CR_TAB
3459		      AS2 (mov,%B0,%A1)  CR_TAB
3460		      AS2 (mov,%C0,%B1)  CR_TAB
3461		      AS2 (mov,%D0,%C1));
3462	  }
3463
3464	case 16:
3465	  {
3466	    int reg0 = true_regnum (operands[0]);
3467	    int reg1 = true_regnum (operands[1]);
3468	    if (reg0 + 2 == reg1)
3469	      return *len = 2, (AS1 (clr,%B0)      CR_TAB
3470				AS1 (clr,%A0));
3471	    if (AVR_HAVE_MOVW)
3472	      return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3473				AS1 (clr,%B0)      CR_TAB
3474				AS1 (clr,%A0));
3475	    else
3476	      return *len = 4, (AS2 (mov,%C0,%A1)  CR_TAB
3477				AS2 (mov,%D0,%B1)  CR_TAB
3478				AS1 (clr,%B0)      CR_TAB
3479				AS1 (clr,%A0));
3480	  }
3481
3482	case 24:
3483	  *len = 4;
3484	  return (AS2 (mov,%D0,%A1)  CR_TAB
3485		  AS1 (clr,%C0)      CR_TAB
3486		  AS1 (clr,%B0)      CR_TAB
3487		  AS1 (clr,%A0));
3488
3489	case 31:
3490	  *len = 6;
3491	  return (AS1 (clr,%D0) CR_TAB
3492		  AS1 (lsr,%A0) CR_TAB
3493		  AS1 (ror,%D0) CR_TAB
3494		  AS1 (clr,%C0) CR_TAB
3495		  AS1 (clr,%B0) CR_TAB
3496		  AS1 (clr,%A0));
3497	}
3498      len = t;
3499    }
3500  out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3501		       AS1 (rol,%B0) CR_TAB
3502		       AS1 (rol,%C0) CR_TAB
3503		       AS1 (rol,%D0)),
3504		       insn, operands, len, 4);
3505  return "";
3506}
3507
3508/* 8bit arithmetic shift right  ((signed char)x >> i) */
3509
3510const char *
3511ashrqi3_out (rtx insn, rtx operands[], int *len)
3512{
3513  if (GET_CODE (operands[2]) == CONST_INT)
3514    {
3515      int k;
3516
3517      if (!len)
3518	len = &k;
3519
3520      switch (INTVAL (operands[2]))
3521	{
3522	case 1:
3523	  *len = 1;
3524	  return AS1 (asr,%0);
3525
3526	case 2:
3527	  *len = 2;
3528	  return (AS1 (asr,%0) CR_TAB
3529		  AS1 (asr,%0));
3530
3531	case 3:
3532	  *len = 3;
3533	  return (AS1 (asr,%0) CR_TAB
3534		  AS1 (asr,%0) CR_TAB
3535		  AS1 (asr,%0));
3536
3537	case 4:
3538	  *len = 4;
3539	  return (AS1 (asr,%0) CR_TAB
3540		  AS1 (asr,%0) CR_TAB
3541		  AS1 (asr,%0) CR_TAB
3542		  AS1 (asr,%0));
3543
3544	case 5:
3545	  *len = 5;
3546	  return (AS1 (asr,%0) CR_TAB
3547		  AS1 (asr,%0) CR_TAB
3548		  AS1 (asr,%0) CR_TAB
3549		  AS1 (asr,%0) CR_TAB
3550		  AS1 (asr,%0));
3551
3552	case 6:
3553	  *len = 4;
3554	  return (AS2 (bst,%0,6)  CR_TAB
3555		  AS1 (lsl,%0)    CR_TAB
3556		  AS2 (sbc,%0,%0) CR_TAB
3557		  AS2 (bld,%0,0));
3558
3559	default:
3560	  if (INTVAL (operands[2]) < 8)
3561	    break;
3562
3563	  /* fall through */
3564
3565	case 7:
3566	  *len = 2;
3567	  return (AS1 (lsl,%0) CR_TAB
3568		  AS2 (sbc,%0,%0));
3569	}
3570    }
3571  else if (CONSTANT_P (operands[2]))
3572    fatal_insn ("internal compiler error.  Incorrect shift:", insn);
3573
3574  out_shift_with_cnt (AS1 (asr,%0),
3575		      insn, operands, len, 1);
3576  return "";
3577}
3578
3579
3580/* 16bit arithmetic shift right  ((signed short)x >> i) */
3581
3582const char *
3583ashrhi3_out (rtx insn, rtx operands[], int *len)
3584{
3585  if (GET_CODE (operands[2]) == CONST_INT)
3586    {
3587      int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3588      int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3589      int k;
3590      int *t = len;
3591
3592      if (!len)
3593	len = &k;
3594
3595      switch (INTVAL (operands[2]))
3596	{
3597	case 4:
3598	case 5:
3599	  /* XXX try to optimize this too? */
3600	  break;
3601
3602	case 6:
3603	  if (optimize_size)
3604	    break;  /* scratch ? 5 : 6 */
3605	  *len = 8;
3606	  return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3607		  AS2 (mov,%A0,%B0)         CR_TAB
3608		  AS1 (lsl,__tmp_reg__)     CR_TAB
3609		  AS1 (rol,%A0)             CR_TAB
3610		  AS2 (sbc,%B0,%B0)         CR_TAB
3611		  AS1 (lsl,__tmp_reg__)     CR_TAB
3612		  AS1 (rol,%A0)             CR_TAB
3613		  AS1 (rol,%B0));
3614
3615	case 7:
3616	  *len = 4;
3617	  return (AS1 (lsl,%A0)     CR_TAB
3618		  AS2 (mov,%A0,%B0) CR_TAB
3619		  AS1 (rol,%A0)     CR_TAB
3620		  AS2 (sbc,%B0,%B0));
3621
3622	case 8:
3623	  {
3624	    int reg0 = true_regnum (operands[0]);
3625	    int reg1 = true_regnum (operands[1]);
3626
3627	    if (reg0 == reg1)
3628	      return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3629				AS1 (lsl,%B0)     CR_TAB
3630				AS2 (sbc,%B0,%B0));
3631	    else
3632	      return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3633			        AS1 (clr,%B0)     CR_TAB
3634			        AS2 (sbrc,%A0,7)  CR_TAB
3635			        AS1 (dec,%B0));
3636	  }
3637
3638	case 9:
3639	  *len = 4;
3640	  return (AS2 (mov,%A0,%B0) CR_TAB
3641		  AS1 (lsl,%B0)      CR_TAB
3642		  AS2 (sbc,%B0,%B0) CR_TAB
3643		  AS1 (asr,%A0));
3644
3645	case 10:
3646	  *len = 5;
3647	  return (AS2 (mov,%A0,%B0) CR_TAB
3648		  AS1 (lsl,%B0)     CR_TAB
3649		  AS2 (sbc,%B0,%B0) CR_TAB
3650		  AS1 (asr,%A0)     CR_TAB
3651		  AS1 (asr,%A0));
3652
3653	case 11:
3654	  if (AVR_HAVE_MUL && ldi_ok)
3655	    {
3656	      *len = 5;
3657	      return (AS2 (ldi,%A0,0x20) CR_TAB
3658		      AS2 (muls,%B0,%A0) CR_TAB
3659		      AS2 (mov,%A0,r1)   CR_TAB
3660		      AS2 (sbc,%B0,%B0)  CR_TAB
3661		      AS1 (clr,__zero_reg__));
3662	    }
3663	  if (optimize_size && scratch)
3664	    break;  /* 5 */
3665	  *len = 6;
3666	  return (AS2 (mov,%A0,%B0) CR_TAB
3667		  AS1 (lsl,%B0)     CR_TAB
3668		  AS2 (sbc,%B0,%B0) CR_TAB
3669		  AS1 (asr,%A0)     CR_TAB
3670		  AS1 (asr,%A0)     CR_TAB
3671		  AS1 (asr,%A0));
3672
3673	case 12:
3674	  if (AVR_HAVE_MUL && ldi_ok)
3675	    {
3676	      *len = 5;
3677	      return (AS2 (ldi,%A0,0x10) CR_TAB
3678		      AS2 (muls,%B0,%A0) CR_TAB
3679		      AS2 (mov,%A0,r1)   CR_TAB
3680		      AS2 (sbc,%B0,%B0)  CR_TAB
3681		      AS1 (clr,__zero_reg__));
3682	    }
3683	  if (optimize_size && scratch)
3684	    break;  /* 5 */
3685	  *len = 7;
3686	  return (AS2 (mov,%A0,%B0) CR_TAB
3687		  AS1 (lsl,%B0)     CR_TAB
3688		  AS2 (sbc,%B0,%B0) CR_TAB
3689		  AS1 (asr,%A0)     CR_TAB
3690		  AS1 (asr,%A0)     CR_TAB
3691		  AS1 (asr,%A0)     CR_TAB
3692		  AS1 (asr,%A0));
3693
3694	case 13:
3695	  if (AVR_HAVE_MUL && ldi_ok)
3696	    {
3697	      *len = 5;
3698	      return (AS2 (ldi,%A0,0x08) CR_TAB
3699		      AS2 (muls,%B0,%A0) CR_TAB
3700		      AS2 (mov,%A0,r1)   CR_TAB
3701		      AS2 (sbc,%B0,%B0)  CR_TAB
3702		      AS1 (clr,__zero_reg__));
3703	    }
3704	  if (optimize_size)
3705	    break;  /* scratch ? 5 : 7 */
3706	  *len = 8;
3707	  return (AS2 (mov,%A0,%B0) CR_TAB
3708		  AS1 (lsl,%B0)     CR_TAB
3709		  AS2 (sbc,%B0,%B0) CR_TAB
3710		  AS1 (asr,%A0)     CR_TAB
3711		  AS1 (asr,%A0)     CR_TAB
3712		  AS1 (asr,%A0)     CR_TAB
3713		  AS1 (asr,%A0)     CR_TAB
3714		  AS1 (asr,%A0));
3715
3716	case 14:
3717	  *len = 5;
3718	  return (AS1 (lsl,%B0)     CR_TAB
3719		  AS2 (sbc,%A0,%A0) CR_TAB
3720		  AS1 (lsl,%B0)     CR_TAB
3721		  AS2 (mov,%B0,%A0) CR_TAB
3722		  AS1 (rol,%A0));
3723
3724	default:
3725	  if (INTVAL (operands[2]) < 16)
3726	    break;
3727
3728	  /* fall through */
3729
3730	case 15:
3731	  return *len = 3, (AS1 (lsl,%B0)     CR_TAB
3732			    AS2 (sbc,%A0,%A0) CR_TAB
3733			    AS2 (mov,%B0,%A0));
3734	}
3735      len = t;
3736    }
3737  out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3738		       AS1 (ror,%A0)),
3739		       insn, operands, len, 2);
3740  return "";
3741}
3742
3743
3744/* 32bit arithmetic shift right  ((signed long)x >> i) */
3745
3746const char *
3747ashrsi3_out (rtx insn, rtx operands[], int *len)
3748{
3749  if (GET_CODE (operands[2]) == CONST_INT)
3750    {
3751      int k;
3752      int *t = len;
3753
3754      if (!len)
3755	len = &k;
3756
3757      switch (INTVAL (operands[2]))
3758	{
3759	case 8:
3760	  {
3761	    int reg0 = true_regnum (operands[0]);
3762	    int reg1 = true_regnum (operands[1]);
3763	    *len=6;
3764	    if (reg0 <= reg1)
3765	      return (AS2 (mov,%A0,%B1) CR_TAB
3766		      AS2 (mov,%B0,%C1) CR_TAB
3767		      AS2 (mov,%C0,%D1) CR_TAB
3768		      AS1 (clr,%D0)     CR_TAB
3769		      AS2 (sbrc,%C0,7)  CR_TAB
3770		      AS1 (dec,%D0));
3771	    else
3772	      return (AS1 (clr,%D0)     CR_TAB
3773		      AS2 (sbrc,%D1,7)  CR_TAB
3774		      AS1 (dec,%D0)     CR_TAB
3775		      AS2 (mov,%C0,%D1) CR_TAB
3776		      AS2 (mov,%B0,%C1) CR_TAB
3777		      AS2 (mov,%A0,%B1));
3778	  }
3779
3780	case 16:
3781	  {
3782	    int reg0 = true_regnum (operands[0]);
3783	    int reg1 = true_regnum (operands[1]);
3784
3785	    if (reg0 == reg1 + 2)
3786	      return *len = 4, (AS1 (clr,%D0)     CR_TAB
3787				AS2 (sbrc,%B0,7)  CR_TAB
3788				AS1 (com,%D0)     CR_TAB
3789				AS2 (mov,%C0,%D0));
3790	    if (AVR_HAVE_MOVW)
3791	      return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3792				AS1 (clr,%D0)      CR_TAB
3793				AS2 (sbrc,%B0,7)   CR_TAB
3794				AS1 (com,%D0)      CR_TAB
3795				AS2 (mov,%C0,%D0));
3796	    else
3797	      return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3798				AS2 (mov,%A0,%C1) CR_TAB
3799				AS1 (clr,%D0)     CR_TAB
3800				AS2 (sbrc,%B0,7)  CR_TAB
3801				AS1 (com,%D0)     CR_TAB
3802				AS2 (mov,%C0,%D0));
3803	  }
3804
3805	case 24:
3806	  return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3807			    AS1 (clr,%D0)     CR_TAB
3808			    AS2 (sbrc,%A0,7)  CR_TAB
3809			    AS1 (com,%D0)     CR_TAB
3810			    AS2 (mov,%B0,%D0) CR_TAB
3811			    AS2 (mov,%C0,%D0));
3812
3813	default:
3814	  if (INTVAL (operands[2]) < 32)
3815	    break;
3816
3817	  /* fall through */
3818
3819	case 31:
3820	  if (AVR_HAVE_MOVW)
3821	    return *len = 4, (AS1 (lsl,%D0)     CR_TAB
3822			      AS2 (sbc,%A0,%A0) CR_TAB
3823			      AS2 (mov,%B0,%A0) CR_TAB
3824			      AS2 (movw,%C0,%A0));
3825	  else
3826	    return *len = 5, (AS1 (lsl,%D0)     CR_TAB
3827			      AS2 (sbc,%A0,%A0) CR_TAB
3828			      AS2 (mov,%B0,%A0) CR_TAB
3829			      AS2 (mov,%C0,%A0) CR_TAB
3830			      AS2 (mov,%D0,%A0));
3831	}
3832      len = t;
3833    }
3834  out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3835		       AS1 (ror,%C0) CR_TAB
3836		       AS1 (ror,%B0) CR_TAB
3837		       AS1 (ror,%A0)),
3838		       insn, operands, len, 4);
3839  return "";
3840}
3841
3842/* 8bit logic shift right ((unsigned char)x >> i) */
3843
3844const char *
3845lshrqi3_out (rtx insn, rtx operands[], int *len)
3846{
3847  if (GET_CODE (operands[2]) == CONST_INT)
3848    {
3849      int k;
3850
3851      if (!len)
3852	len = &k;
3853
3854      switch (INTVAL (operands[2]))
3855	{
3856	default:
3857	  if (INTVAL (operands[2]) < 8)
3858	    break;
3859
3860	  *len = 1;
3861	  return AS1 (clr,%0);
3862
3863	case 1:
3864	  *len = 1;
3865	  return AS1 (lsr,%0);
3866
3867	case 2:
3868	  *len = 2;
3869	  return (AS1 (lsr,%0) CR_TAB
3870		  AS1 (lsr,%0));
3871	case 3:
3872	  *len = 3;
3873	  return (AS1 (lsr,%0) CR_TAB
3874		  AS1 (lsr,%0) CR_TAB
3875		  AS1 (lsr,%0));
3876
3877	case 4:
3878	  if (test_hard_reg_class (LD_REGS, operands[0]))
3879	    {
3880	      *len=2;
3881	      return (AS1 (swap,%0) CR_TAB
3882		      AS2 (andi,%0,0x0f));
3883	    }
3884	  *len = 4;
3885	  return (AS1 (lsr,%0) CR_TAB
3886		  AS1 (lsr,%0) CR_TAB
3887		  AS1 (lsr,%0) CR_TAB
3888		  AS1 (lsr,%0));
3889
3890	case 5:
3891	  if (test_hard_reg_class (LD_REGS, operands[0]))
3892	    {
3893	      *len = 3;
3894	      return (AS1 (swap,%0) CR_TAB
3895		      AS1 (lsr,%0)  CR_TAB
3896		      AS2 (andi,%0,0x7));
3897	    }
3898	  *len = 5;
3899	  return (AS1 (lsr,%0) CR_TAB
3900		  AS1 (lsr,%0) CR_TAB
3901		  AS1 (lsr,%0) CR_TAB
3902		  AS1 (lsr,%0) CR_TAB
3903		  AS1 (lsr,%0));
3904
3905	case 6:
3906	  if (test_hard_reg_class (LD_REGS, operands[0]))
3907	    {
3908	      *len = 4;
3909	      return (AS1 (swap,%0) CR_TAB
3910		      AS1 (lsr,%0)  CR_TAB
3911		      AS1 (lsr,%0)  CR_TAB
3912		      AS2 (andi,%0,0x3));
3913	    }
3914	  *len = 6;
3915	  return (AS1 (lsr,%0) CR_TAB
3916		  AS1 (lsr,%0) CR_TAB
3917		  AS1 (lsr,%0) CR_TAB
3918		  AS1 (lsr,%0) CR_TAB
3919		  AS1 (lsr,%0) CR_TAB
3920		  AS1 (lsr,%0));
3921
3922	case 7:
3923	  *len = 3;
3924	  return (AS1 (rol,%0) CR_TAB
3925		  AS1 (clr,%0) CR_TAB
3926		  AS1 (rol,%0));
3927	}
3928    }
3929  else if (CONSTANT_P (operands[2]))
3930    fatal_insn ("internal compiler error.  Incorrect shift:", insn);
3931
3932  out_shift_with_cnt (AS1 (lsr,%0),
3933		      insn, operands, len, 1);
3934  return "";
3935}
3936
3937/* 16bit logic shift right ((unsigned short)x >> i) */
3938
3939const char *
3940lshrhi3_out (rtx insn, rtx operands[], int *len)
3941{
3942  if (GET_CODE (operands[2]) == CONST_INT)
3943    {
3944      int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3945      int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3946      int k;
3947      int *t = len;
3948
3949      if (!len)
3950	len = &k;
3951
3952      switch (INTVAL (operands[2]))
3953	{
3954	default:
3955	  if (INTVAL (operands[2]) < 16)
3956	    break;
3957
3958	  *len = 2;
3959	  return (AS1 (clr,%B0) CR_TAB
3960		  AS1 (clr,%A0));
3961
3962	case 4:
3963	  if (optimize_size && scratch)
3964	    break;  /* 5 */
3965	  if (ldi_ok)
3966	    {
3967	      *len = 6;
3968	      return (AS1 (swap,%B0)      CR_TAB
3969		      AS1 (swap,%A0)      CR_TAB
3970		      AS2 (andi,%A0,0x0f) CR_TAB
3971		      AS2 (eor,%A0,%B0)   CR_TAB
3972		      AS2 (andi,%B0,0x0f) CR_TAB
3973		      AS2 (eor,%A0,%B0));
3974	    }
3975	  if (scratch)
3976	    {
3977	      *len = 7;
3978	      return (AS1 (swap,%B0)    CR_TAB
3979		      AS1 (swap,%A0)    CR_TAB
3980		      AS2 (ldi,%3,0x0f) CR_TAB
3981		      "and %A0,%3"      CR_TAB
3982		      AS2 (eor,%A0,%B0) CR_TAB
3983		      "and %B0,%3"      CR_TAB
3984		      AS2 (eor,%A0,%B0));
3985	    }
3986	  break;  /* optimize_size ? 6 : 8 */
3987
3988	case 5:
3989	  if (optimize_size)
3990	    break;  /* scratch ? 5 : 6 */
3991	  if (ldi_ok)
3992	    {
3993	      *len = 8;
3994	      return (AS1 (lsr,%B0)       CR_TAB
3995		      AS1 (ror,%A0)       CR_TAB
3996		      AS1 (swap,%B0)      CR_TAB
3997		      AS1 (swap,%A0)      CR_TAB
3998		      AS2 (andi,%A0,0x0f) CR_TAB
3999		      AS2 (eor,%A0,%B0)   CR_TAB
4000		      AS2 (andi,%B0,0x0f) CR_TAB
4001		      AS2 (eor,%A0,%B0));
4002	    }
4003	  if (scratch)
4004	    {
4005	      *len = 9;
4006	      return (AS1 (lsr,%B0)     CR_TAB
4007		      AS1 (ror,%A0)     CR_TAB
4008		      AS1 (swap,%B0)    CR_TAB
4009		      AS1 (swap,%A0)    CR_TAB
4010		      AS2 (ldi,%3,0x0f) CR_TAB
4011		      "and %A0,%3"      CR_TAB
4012		      AS2 (eor,%A0,%B0) CR_TAB
4013		      "and %B0,%3"      CR_TAB
4014		      AS2 (eor,%A0,%B0));
4015	    }
4016	  break;  /* 10 */
4017
4018	case 6:
4019	  if (optimize_size)
4020	    break;  /* scratch ? 5 : 6 */
4021	  *len = 9;
4022	  return (AS1 (clr,__tmp_reg__) CR_TAB
4023		  AS1 (lsl,%A0)         CR_TAB
4024		  AS1 (rol,%B0)         CR_TAB
4025		  AS1 (rol,__tmp_reg__) CR_TAB
4026		  AS1 (lsl,%A0)         CR_TAB
4027		  AS1 (rol,%B0)         CR_TAB
4028		  AS1 (rol,__tmp_reg__) CR_TAB
4029		  AS2 (mov,%A0,%B0)     CR_TAB
4030		  AS2 (mov,%B0,__tmp_reg__));
4031
4032	case 7:
4033	  *len = 5;
4034	  return (AS1 (lsl,%A0)     CR_TAB
4035		  AS2 (mov,%A0,%B0) CR_TAB
4036		  AS1 (rol,%A0)     CR_TAB
4037		  AS2 (sbc,%B0,%B0) CR_TAB
4038		  AS1 (neg,%B0));
4039
4040	case 8:
4041	  return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4042			    AS1 (clr,%B0));
4043
4044	case 9:
4045	  *len = 3;
4046	  return (AS2 (mov,%A0,%B0) CR_TAB
4047		  AS1 (clr,%B0)     CR_TAB
4048		  AS1 (lsr,%A0));
4049
4050	case 10:
4051	  *len = 4;
4052	  return (AS2 (mov,%A0,%B0) CR_TAB
4053		  AS1 (clr,%B0)     CR_TAB
4054		  AS1 (lsr,%A0)     CR_TAB
4055		  AS1 (lsr,%A0));
4056
4057	case 11:
4058	  *len = 5;
4059	  return (AS2 (mov,%A0,%B0) CR_TAB
4060		  AS1 (clr,%B0)     CR_TAB
4061		  AS1 (lsr,%A0)     CR_TAB
4062		  AS1 (lsr,%A0)     CR_TAB
4063		  AS1 (lsr,%A0));
4064
4065	case 12:
4066	  if (ldi_ok)
4067	    {
4068	      *len = 4;
4069	      return (AS2 (mov,%A0,%B0) CR_TAB
4070		      AS1 (clr,%B0)     CR_TAB
4071		      AS1 (swap,%A0)    CR_TAB
4072		      AS2 (andi,%A0,0x0f));
4073	    }
4074	  if (scratch)
4075	    {
4076	      *len = 5;
4077	      return (AS2 (mov,%A0,%B0) CR_TAB
4078		      AS1 (clr,%B0)     CR_TAB
4079		      AS1 (swap,%A0)    CR_TAB
4080		      AS2 (ldi,%3,0x0f) CR_TAB
4081		      "and %A0,%3");
4082	    }
4083	  *len = 6;
4084	  return (AS2 (mov,%A0,%B0) CR_TAB
4085		  AS1 (clr,%B0)     CR_TAB
4086		  AS1 (lsr,%A0)     CR_TAB
4087		  AS1 (lsr,%A0)     CR_TAB
4088		  AS1 (lsr,%A0)     CR_TAB
4089		  AS1 (lsr,%A0));
4090
4091	case 13:
4092	  if (ldi_ok)
4093	    {
4094	      *len = 5;
4095	      return (AS2 (mov,%A0,%B0) CR_TAB
4096		      AS1 (clr,%B0)     CR_TAB
4097		      AS1 (swap,%A0)    CR_TAB
4098		      AS1 (lsr,%A0)     CR_TAB
4099		      AS2 (andi,%A0,0x07));
4100	    }
4101	  if (AVR_HAVE_MUL && scratch)
4102	    {
4103	      *len = 5;
4104	      return (AS2 (ldi,%3,0x08) CR_TAB
4105		      AS2 (mul,%B0,%3)  CR_TAB
4106		      AS2 (mov,%A0,r1)  CR_TAB
4107		      AS1 (clr,%B0)     CR_TAB
4108		      AS1 (clr,__zero_reg__));
4109	    }
4110	  if (optimize_size && scratch)
4111	    break;  /* 5 */
4112	  if (scratch)
4113	    {
4114	      *len = 6;
4115	      return (AS2 (mov,%A0,%B0) CR_TAB
4116		      AS1 (clr,%B0)     CR_TAB
4117		      AS1 (swap,%A0)    CR_TAB
4118		      AS1 (lsr,%A0)     CR_TAB
4119		      AS2 (ldi,%3,0x07) CR_TAB
4120		      "and %A0,%3");
4121	    }
4122	  if (AVR_HAVE_MUL)
4123	    {
4124	      *len = 6;
4125	      return ("set"            CR_TAB
4126		      AS2 (bld,r1,3)   CR_TAB
4127		      AS2 (mul,%B0,r1) CR_TAB
4128		      AS2 (mov,%A0,r1) CR_TAB
4129		      AS1 (clr,%B0)    CR_TAB
4130		      AS1 (clr,__zero_reg__));
4131	    }
4132	  *len = 7;
4133	  return (AS2 (mov,%A0,%B0) CR_TAB
4134		  AS1 (clr,%B0)     CR_TAB
4135		  AS1 (lsr,%A0)     CR_TAB
4136		  AS1 (lsr,%A0)     CR_TAB
4137		  AS1 (lsr,%A0)     CR_TAB
4138		  AS1 (lsr,%A0)     CR_TAB
4139		  AS1 (lsr,%A0));
4140
4141	case 14:
4142	  if (AVR_HAVE_MUL && ldi_ok)
4143	    {
4144	      *len = 5;
4145	      return (AS2 (ldi,%A0,0x04) CR_TAB
4146		      AS2 (mul,%B0,%A0)  CR_TAB
4147		      AS2 (mov,%A0,r1)   CR_TAB
4148		      AS1 (clr,%B0)      CR_TAB
4149		      AS1 (clr,__zero_reg__));
4150	    }
4151	  if (AVR_HAVE_MUL && scratch)
4152	    {
4153	      *len = 5;
4154	      return (AS2 (ldi,%3,0x04) CR_TAB
4155		      AS2 (mul,%B0,%3)  CR_TAB
4156		      AS2 (mov,%A0,r1)  CR_TAB
4157		      AS1 (clr,%B0)     CR_TAB
4158		      AS1 (clr,__zero_reg__));
4159	    }
4160	  if (optimize_size && ldi_ok)
4161	    {
4162	      *len = 5;
4163	      return (AS2 (mov,%A0,%B0) CR_TAB
4164		      AS2 (ldi,%B0,6) "\n1:\t"
4165		      AS1 (lsr,%A0)     CR_TAB
4166		      AS1 (dec,%B0)     CR_TAB
4167		      AS1 (brne,1b));
4168	    }
4169	  if (optimize_size && scratch)
4170	    break;  /* 5 */
4171	  *len = 6;
4172	  return (AS1 (clr,%A0) CR_TAB
4173		  AS1 (lsl,%B0) CR_TAB
4174		  AS1 (rol,%A0) CR_TAB
4175		  AS1 (lsl,%B0) CR_TAB
4176		  AS1 (rol,%A0) CR_TAB
4177		  AS1 (clr,%B0));
4178
4179	case 15:
4180	  *len = 4;
4181	  return (AS1 (clr,%A0) CR_TAB
4182		  AS1 (lsl,%B0) CR_TAB
4183		  AS1 (rol,%A0) CR_TAB
4184		  AS1 (clr,%B0));
4185	}
4186      len = t;
4187    }
4188  out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4189		       AS1 (ror,%A0)),
4190		       insn, operands, len, 2);
4191  return "";
4192}
4193
4194/* 32bit logic shift right ((unsigned int)x >> i) */
4195
4196const char *
4197lshrsi3_out (rtx insn, rtx operands[], int *len)
4198{
4199  if (GET_CODE (operands[2]) == CONST_INT)
4200    {
4201      int k;
4202      int *t = len;
4203
4204      if (!len)
4205	len = &k;
4206
4207      switch (INTVAL (operands[2]))
4208	{
4209	default:
4210	  if (INTVAL (operands[2]) < 32)
4211	    break;
4212
4213	  if (AVR_HAVE_MOVW)
4214	    return *len = 3, (AS1 (clr,%D0) CR_TAB
4215			      AS1 (clr,%C0) CR_TAB
4216			      AS2 (movw,%A0,%C0));
4217	  *len = 4;
4218	  return (AS1 (clr,%D0) CR_TAB
4219		  AS1 (clr,%C0) CR_TAB
4220		  AS1 (clr,%B0) CR_TAB
4221		  AS1 (clr,%A0));
4222
4223	case 8:
4224	  {
4225	    int reg0 = true_regnum (operands[0]);
4226	    int reg1 = true_regnum (operands[1]);
4227	    *len = 4;
4228	    if (reg0 <= reg1)
4229	      return (AS2 (mov,%A0,%B1) CR_TAB
4230		      AS2 (mov,%B0,%C1) CR_TAB
4231		      AS2 (mov,%C0,%D1) CR_TAB
4232		      AS1 (clr,%D0));
4233	    else
4234	      return (AS1 (clr,%D0)     CR_TAB
4235		      AS2 (mov,%C0,%D1) CR_TAB
4236		      AS2 (mov,%B0,%C1) CR_TAB
4237		      AS2 (mov,%A0,%B1));
4238	  }
4239
4240	case 16:
4241	  {
4242	    int reg0 = true_regnum (operands[0]);
4243	    int reg1 = true_regnum (operands[1]);
4244
4245	    if (reg0 == reg1 + 2)
4246	      return *len = 2, (AS1 (clr,%C0)     CR_TAB
4247				AS1 (clr,%D0));
4248	    if (AVR_HAVE_MOVW)
4249	      return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4250				AS1 (clr,%C0)      CR_TAB
4251				AS1 (clr,%D0));
4252	    else
4253	      return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4254				AS2 (mov,%A0,%C1) CR_TAB
4255				AS1 (clr,%C0)     CR_TAB
4256				AS1 (clr,%D0));
4257	  }
4258
4259	case 24:
4260	  return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4261			    AS1 (clr,%B0)     CR_TAB
4262			    AS1 (clr,%C0)     CR_TAB
4263			    AS1 (clr,%D0));
4264
4265	case 31:
4266	  *len = 6;
4267	  return (AS1 (clr,%A0)    CR_TAB
4268		  AS2 (sbrc,%D0,7) CR_TAB
4269		  AS1 (inc,%A0)    CR_TAB
4270		  AS1 (clr,%B0)    CR_TAB
4271		  AS1 (clr,%C0)    CR_TAB
4272		  AS1 (clr,%D0));
4273	}
4274      len = t;
4275    }
4276  out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4277		       AS1 (ror,%C0) CR_TAB
4278		       AS1 (ror,%B0) CR_TAB
4279		       AS1 (ror,%A0)),
4280		      insn, operands, len, 4);
4281  return "";
4282}
4283
4284/* Create RTL split patterns for byte sized rotate expressions.  This
4285  produces a series of move instructions and considers overlap situations.
4286  Overlapping non-HImode operands need a scratch register.  */
4287
4288bool
4289avr_rotate_bytes (rtx operands[])
4290{
4291    int i, j;
4292    enum machine_mode mode = GET_MODE (operands[0]);
4293    bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
4294    bool same_reg = rtx_equal_p (operands[0], operands[1]);
4295    int num = INTVAL (operands[2]);
4296    rtx scratch = operands[3];
4297    /* Work out if byte or word move is needed.  Odd byte rotates need QImode.
4298       Word move if no scratch is needed, otherwise use size of scratch.  */
4299    enum machine_mode move_mode = QImode;
4300    if (num & 0xf)
4301      move_mode = QImode;
4302    else if ((mode == SImode && !same_reg) || !overlapped)
4303      move_mode = HImode;
4304    else
4305      move_mode = GET_MODE (scratch);
4306
4307    /* Force DI rotate to use QI moves since other DI moves are currently split
4308       into QI moves so forward propagation works better.  */
4309    if (mode == DImode)
4310      move_mode = QImode;
4311    /* Make scratch smaller if needed.  */
4312    if (GET_MODE (scratch) == HImode && move_mode == QImode)
4313      scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
4314
4315    int move_size = GET_MODE_SIZE (move_mode);
4316    /* Number of bytes/words to rotate.  */
4317    int offset = (num  >> 3) / move_size;
4318    /* Number of moves needed.  */
4319    int size = GET_MODE_SIZE (mode) / move_size;
4320    /* Himode byte swap is special case to avoid a scratch register.  */
4321    if (mode == HImode && same_reg)
4322      {
4323	/* HImode byte swap, using xor.  This is as quick as using scratch.  */
4324	rtx src, dst;
4325	src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
4326	dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
4327	if (!rtx_equal_p (dst, src))
4328	  {
4329	     emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4330	     emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
4331	     emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4332	  }
4333      }
4334    else
4335      {
4336	/* Create linked list of moves to determine move order.  */
4337	struct {
4338	  rtx src, dst;
4339	  int links;
4340	} move[size + 8];
4341
4342	/* Generate list of subreg moves.  */
4343	for (i = 0; i < size; i++)
4344	  {
4345	    int from = i;
4346	    int to = (from + offset) % size;
4347	    move[i].src = simplify_gen_subreg (move_mode, operands[1],
4348						mode, from * move_size);
4349	    move[i].dst = simplify_gen_subreg (move_mode, operands[0],
4350						mode, to   * move_size);
4351	    move[i].links = -1;
4352	   }
4353	/* Mark dependence where a dst of one move is the src of another move.
4354	   The first move is a conflict as it must wait until second is
4355	   performed.  We ignore moves to self - we catch this later.  */
4356	if (overlapped)
4357	  for (i = 0; i < size; i++)
4358	    if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
4359	      for (j = 0; j < size; j++)
4360		if (j != i && rtx_equal_p (move[j].src, move[i].dst))
4361		  {
4362		    /* The dst of move i is the src of move j.  */
4363		    move[i].links = j;
4364		    break;
4365		  }
4366
4367	int blocked = -1;
4368	int moves = 0;
4369	/* Go through move list and perform non-conflicting moves.  As each
4370	   non-overlapping move is made, it may remove other conflicts
4371	   so the process is repeated until no conflicts remain.  */
4372	do
4373	  {
4374	    blocked = -1;
4375	    moves = 0;
4376	    /* Emit move where dst is not also a src or we have used that
4377	       src already.  */
4378	    for (i = 0; i < size; i++)
4379	      if (move[i].src != NULL_RTX)
4380		if  (move[i].links == -1 || move[move[i].links].src == NULL_RTX)
4381		  {
4382		    moves++;
4383		    /* Ignore NOP moves to self.  */
4384		    if (!rtx_equal_p (move[i].dst, move[i].src))
4385		      emit_move_insn (move[i].dst, move[i].src);
4386
4387		    /* Remove  conflict from list.  */
4388		    move[i].src = NULL_RTX;
4389		  }
4390		else
4391		  blocked = i;
4392
4393	    /* Check for deadlock. This is when no moves occurred and we have
4394	       at least one blocked move.  */
4395	    if (moves == 0 && blocked != -1)
4396	      {
4397		/* Need to use scratch register to break deadlock.
4398		   Add move to put dst of blocked move into scratch.
4399		   When this move occurs, it will break chain deadlock.
4400		   The scratch register is substituted for real move.  */
4401
4402		move[size].src = move[blocked].dst;
4403		move[size].dst =  scratch;
4404		/* Scratch move is never blocked.  */
4405		move[size].links = -1;
4406		/* Make sure we have valid link.  */
4407		gcc_assert (move[blocked].links != -1);
4408		/* Replace src of  blocking move with scratch reg.  */
4409		move[move[blocked].links].src = scratch;
4410		/* Make dependent on scratch move occuring.  */
4411		move[blocked].links = size;
4412		size=size+1;
4413	      }
4414	  }
4415	while (blocked != -1);
4416      }
4417    return true;
4418}
4419
4420/* Modifies the length assigned to instruction INSN
4421 LEN is the initially computed length of the insn.  */
4422
4423int
4424adjust_insn_length (rtx insn, int len)
4425{
4426  rtx patt = PATTERN (insn);
4427  rtx set;
4428
4429  if (GET_CODE (patt) == SET)
4430    {
4431      rtx op[10];
4432      op[1] = SET_SRC (patt);
4433      op[0] = SET_DEST (patt);
4434      if (general_operand (op[1], VOIDmode)
4435	  && general_operand (op[0], VOIDmode))
4436	{
4437	  switch (GET_MODE (op[0]))
4438	    {
4439	    case QImode:
4440	      output_movqi (insn, op, &len);
4441	      break;
4442	    case HImode:
4443	      output_movhi (insn, op, &len);
4444	      break;
4445	    case SImode:
4446	    case SFmode:
4447	      output_movsisf (insn, op, &len);
4448	      break;
4449	    default:
4450	      break;
4451	    }
4452	}
4453      else if (op[0] == cc0_rtx && REG_P (op[1]))
4454	{
4455	  switch (GET_MODE (op[1]))
4456	    {
4457	    case HImode: out_tsthi (insn, op[1], &len); break;
4458	    case SImode: out_tstsi (insn, op[1], &len); break;
4459	    default: break;
4460	    }
4461	}
4462      else if (GET_CODE (op[1]) == AND)
4463	{
4464	  if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4465	    {
4466	      HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4467	      if (GET_MODE (op[1]) == SImode)
4468		len = (((mask & 0xff) != 0xff)
4469		       + ((mask & 0xff00) != 0xff00)
4470		       + ((mask & 0xff0000L) != 0xff0000L)
4471		       + ((mask & 0xff000000L) != 0xff000000L));
4472	      else if (GET_MODE (op[1]) == HImode)
4473		len = (((mask & 0xff) != 0xff)
4474		       + ((mask & 0xff00) != 0xff00));
4475	    }
4476	}
4477      else if (GET_CODE (op[1]) == IOR)
4478	{
4479	  if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4480	    {
4481	      HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4482	      if (GET_MODE (op[1]) == SImode)
4483		len = (((mask & 0xff) != 0)
4484		       + ((mask & 0xff00) != 0)
4485		       + ((mask & 0xff0000L) != 0)
4486		       + ((mask & 0xff000000L) != 0));
4487	      else if (GET_MODE (op[1]) == HImode)
4488		len = (((mask & 0xff) != 0)
4489		       + ((mask & 0xff00) != 0));
4490	    }
4491	}
4492    }
4493  set = single_set (insn);
4494  if (set)
4495    {
4496      rtx op[10];
4497
4498      op[1] = SET_SRC (set);
4499      op[0] = SET_DEST (set);
4500
4501      if (GET_CODE (patt) == PARALLEL
4502	  && general_operand (op[1], VOIDmode)
4503	  && general_operand (op[0], VOIDmode))
4504	{
4505	  if (XVECLEN (patt, 0) == 2)
4506	    op[2] = XVECEXP (patt, 0, 1);
4507
4508	  switch (GET_MODE (op[0]))
4509	    {
4510	    case QImode:
4511	      len = 2;
4512	      break;
4513	    case HImode:
4514	      output_reload_inhi (insn, op, &len);
4515	      break;
4516	    case SImode:
4517	    case SFmode:
4518	      output_reload_insisf (insn, op, &len);
4519	      break;
4520	    default:
4521	      break;
4522	    }
4523	}
4524      else if (GET_CODE (op[1]) == ASHIFT
4525	  || GET_CODE (op[1]) == ASHIFTRT
4526	  || GET_CODE (op[1]) == LSHIFTRT)
4527	{
4528	  rtx ops[10];
4529	  ops[0] = op[0];
4530	  ops[1] = XEXP (op[1],0);
4531	  ops[2] = XEXP (op[1],1);
4532	  switch (GET_CODE (op[1]))
4533	    {
4534	    case ASHIFT:
4535	      switch (GET_MODE (op[0]))
4536		{
4537		case QImode: ashlqi3_out (insn,ops,&len); break;
4538		case HImode: ashlhi3_out (insn,ops,&len); break;
4539		case SImode: ashlsi3_out (insn,ops,&len); break;
4540		default: break;
4541		}
4542	      break;
4543	    case ASHIFTRT:
4544	      switch (GET_MODE (op[0]))
4545		{
4546		case QImode: ashrqi3_out (insn,ops,&len); break;
4547		case HImode: ashrhi3_out (insn,ops,&len); break;
4548		case SImode: ashrsi3_out (insn,ops,&len); break;
4549		default: break;
4550		}
4551	      break;
4552	    case LSHIFTRT:
4553	      switch (GET_MODE (op[0]))
4554		{
4555		case QImode: lshrqi3_out (insn,ops,&len); break;
4556		case HImode: lshrhi3_out (insn,ops,&len); break;
4557		case SImode: lshrsi3_out (insn,ops,&len); break;
4558		default: break;
4559		}
4560	      break;
4561	    default:
4562	      break;
4563	    }
4564	}
4565    }
4566  return len;
4567}
4568
4569/* Return nonzero if register REG dead after INSN.  */
4570
4571int
4572reg_unused_after (rtx insn, rtx reg)
4573{
4574  return (dead_or_set_p (insn, reg)
4575	  || (REG_P(reg) && _reg_unused_after (insn, reg)));
4576}
4577
4578/* Return nonzero if REG is not used after INSN.
4579   We assume REG is a reload reg, and therefore does
4580   not live past labels.  It may live past calls or jumps though.  */
4581
4582int
4583_reg_unused_after (rtx insn, rtx reg)
4584{
4585  enum rtx_code code;
4586  rtx set;
4587
4588  /* If the reg is set by this instruction, then it is safe for our
4589     case.  Disregard the case where this is a store to memory, since
4590     we are checking a register used in the store address.  */
4591  set = single_set (insn);
4592  if (set && GET_CODE (SET_DEST (set)) != MEM
4593      && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4594    return 1;
4595
4596  while ((insn = NEXT_INSN (insn)))
4597    {
4598      rtx set;
4599      code = GET_CODE (insn);
4600
4601#if 0
4602      /* If this is a label that existed before reload, then the register
4603	 if dead here.  However, if this is a label added by reorg, then
4604	 the register may still be live here.  We can't tell the difference,
4605	 so we just ignore labels completely.  */
4606      if (code == CODE_LABEL)
4607	return 1;
4608      /* else */
4609#endif
4610
4611      if (!INSN_P (insn))
4612	continue;
4613
4614      if (code == JUMP_INSN)
4615	return 0;
4616
4617      /* If this is a sequence, we must handle them all at once.
4618	 We could have for instance a call that sets the target register,
4619	 and an insn in a delay slot that uses the register.  In this case,
4620	 we must return 0.  */
4621      else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4622	{
4623	  int i;
4624	  int retval = 0;
4625
4626	  for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4627	    {
4628	      rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4629	      rtx set = single_set (this_insn);
4630
4631	      if (GET_CODE (this_insn) == CALL_INSN)
4632		code = CALL_INSN;
4633	      else if (GET_CODE (this_insn) == JUMP_INSN)
4634		{
4635		  if (INSN_ANNULLED_BRANCH_P (this_insn))
4636		    return 0;
4637		  code = JUMP_INSN;
4638		}
4639
4640	      if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4641		return 0;
4642	      if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4643		{
4644		  if (GET_CODE (SET_DEST (set)) != MEM)
4645		    retval = 1;
4646		  else
4647		    return 0;
4648		}
4649	      if (set == 0
4650		  && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4651		return 0;
4652	    }
4653	  if (retval == 1)
4654	    return 1;
4655	  else if (code == JUMP_INSN)
4656	    return 0;
4657	}
4658
4659      if (code == CALL_INSN)
4660	{
4661	  rtx tem;
4662	  for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4663	    if (GET_CODE (XEXP (tem, 0)) == USE
4664		&& REG_P (XEXP (XEXP (tem, 0), 0))
4665		&& reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4666	      return 0;
4667	  if (call_used_regs[REGNO (reg)])
4668	    return 1;
4669	}
4670
4671      set = single_set (insn);
4672
4673      if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4674	return 0;
4675      if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4676	return GET_CODE (SET_DEST (set)) != MEM;
4677      if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4678	return 0;
4679    }
4680  return 1;
4681}
4682
4683/* Target hook for assembling integer objects.  The AVR version needs
4684   special handling for references to certain labels.  */
4685
4686static bool
4687avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4688{
4689  if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4690      && text_segment_operand (x, VOIDmode) )
4691    {
4692      fputs ("\t.word\tgs(", asm_out_file);
4693      output_addr_const (asm_out_file, x);
4694      fputs (")\n", asm_out_file);
4695      return true;
4696    }
4697  return default_assemble_integer (x, size, aligned_p);
4698}
4699
4700/* Worker function for ASM_DECLARE_FUNCTION_NAME.  */
4701
4702void
4703avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
4704{
4705
4706  /* If the function has the 'signal' or 'interrupt' attribute, test to
4707     make sure that the name of the function is "__vector_NN" so as to
4708     catch when the user misspells the interrupt vector name.  */
4709
4710  if (cfun->machine->is_interrupt)
4711    {
4712      if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4713        {
4714          warning_at (DECL_SOURCE_LOCATION (decl), 0,
4715                      "%qs appears to be a misspelled interrupt handler",
4716                      name);
4717        }
4718    }
4719  else if (cfun->machine->is_signal)
4720    {
4721      if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4722        {
4723           warning_at (DECL_SOURCE_LOCATION (decl), 0,
4724                       "%qs appears to be a misspelled signal handler",
4725                       name);
4726        }
4727    }
4728
4729  ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
4730  ASM_OUTPUT_LABEL (file, name);
4731}
4732
4733/* The routine used to output NUL terminated strings.  We use a special
4734   version of this for most svr4 targets because doing so makes the
4735   generated assembly code more compact (and thus faster to assemble)
4736   as well as more readable, especially for targets like the i386
4737   (where the only alternative is to output character sequences as
4738   comma separated lists of numbers).  */
4739
4740void
4741gas_output_limited_string(FILE *file, const char *str)
4742{
4743  const unsigned char *_limited_str = (const unsigned char *) str;
4744  unsigned ch;
4745  fprintf (file, "%s\"", STRING_ASM_OP);
4746  for (; (ch = *_limited_str); _limited_str++)
4747    {
4748      int escape;
4749      switch (escape = ESCAPES[ch])
4750	{
4751	case 0:
4752	  putc (ch, file);
4753	  break;
4754	case 1:
4755	  fprintf (file, "\\%03o", ch);
4756	  break;
4757	default:
4758	  putc ('\\', file);
4759	  putc (escape, file);
4760	  break;
4761	}
4762    }
4763  fprintf (file, "\"\n");
4764}
4765
4766/* The routine used to output sequences of byte values.  We use a special
4767   version of this for most svr4 targets because doing so makes the
4768   generated assembly code more compact (and thus faster to assemble)
4769   as well as more readable.  Note that if we find subparts of the
4770   character sequence which end with NUL (and which are shorter than
4771   STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING.  */
4772
4773void
4774gas_output_ascii(FILE *file, const char *str, size_t length)
4775{
4776  const unsigned char *_ascii_bytes = (const unsigned char *) str;
4777  const unsigned char *limit = _ascii_bytes + length;
4778  unsigned bytes_in_chunk = 0;
4779  for (; _ascii_bytes < limit; _ascii_bytes++)
4780    {
4781      const unsigned char *p;
4782      if (bytes_in_chunk >= 60)
4783	{
4784	  fprintf (file, "\"\n");
4785	  bytes_in_chunk = 0;
4786	}
4787      for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
4788	continue;
4789      if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
4790	{
4791	  if (bytes_in_chunk > 0)
4792	    {
4793	      fprintf (file, "\"\n");
4794	      bytes_in_chunk = 0;
4795	    }
4796	  gas_output_limited_string (file, (const char*)_ascii_bytes);
4797	  _ascii_bytes = p;
4798	}
4799      else
4800	{
4801	  int escape;
4802	  unsigned ch;
4803	  if (bytes_in_chunk == 0)
4804	    fprintf (file, "\t.ascii\t\"");
4805	  switch (escape = ESCAPES[ch = *_ascii_bytes])
4806	    {
4807	    case 0:
4808	      putc (ch, file);
4809	      bytes_in_chunk++;
4810	      break;
4811	    case 1:
4812	      fprintf (file, "\\%03o", ch);
4813	      bytes_in_chunk += 4;
4814	      break;
4815	    default:
4816	      putc ('\\', file);
4817	      putc (escape, file);
4818	      bytes_in_chunk += 2;
4819	      break;
4820	    }
4821	}
4822    }
4823  if (bytes_in_chunk > 0)
4824    fprintf (file, "\"\n");
4825}
4826
4827/* Return value is nonzero if pseudos that have been
4828   assigned to registers of class CLASS would likely be spilled
4829   because registers of CLASS are needed for spill registers.  */
4830
4831bool
4832class_likely_spilled_p (int c)
4833{
4834  return (c != ALL_REGS && c != ADDW_REGS);
4835}
4836
4837/* Valid attributes:
4838   progmem - put data to program memory;
4839   signal - make a function to be hardware interrupt. After function
4840   prologue interrupts are disabled;
4841   interrupt - make a function to be hardware interrupt. After function
4842   prologue interrupts are enabled;
4843   naked     - don't generate function prologue/epilogue and `ret' command.
4844
4845   Only `progmem' attribute valid for type.  */
4846
4847/* Handle a "progmem" attribute; arguments as in
4848   struct attribute_spec.handler.  */
4849static tree
4850avr_handle_progmem_attribute (tree *node, tree name,
4851			      tree args ATTRIBUTE_UNUSED,
4852			      int flags ATTRIBUTE_UNUSED,
4853			      bool *no_add_attrs)
4854{
4855  if (DECL_P (*node))
4856    {
4857      if (TREE_CODE (*node) == TYPE_DECL)
4858	{
4859	  /* This is really a decl attribute, not a type attribute,
4860	     but try to handle it for GCC 3.0 backwards compatibility.  */
4861
4862	  tree type = TREE_TYPE (*node);
4863	  tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4864	  tree newtype = build_type_attribute_variant (type, attr);
4865
4866	  TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4867	  TREE_TYPE (*node) = newtype;
4868	  *no_add_attrs = true;
4869	}
4870      else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4871	{
4872	  if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
4873	    {
4874	      warning (0, "only initialized variables can be placed into "
4875		       "program memory area");
4876	      *no_add_attrs = true;
4877	    }
4878	}
4879      else
4880	{
4881	  warning (OPT_Wattributes, "%qE attribute ignored",
4882		   name);
4883	  *no_add_attrs = true;
4884	}
4885    }
4886
4887  return NULL_TREE;
4888}
4889
4890/* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4891   struct attribute_spec.handler.  */
4892
4893static tree
4894avr_handle_fndecl_attribute (tree *node, tree name,
4895			     tree args ATTRIBUTE_UNUSED,
4896			     int flags ATTRIBUTE_UNUSED,
4897			     bool *no_add_attrs)
4898{
4899  if (TREE_CODE (*node) != FUNCTION_DECL)
4900    {
4901      warning (OPT_Wattributes, "%qE attribute only applies to functions",
4902	       name);
4903      *no_add_attrs = true;
4904    }
4905
4906  return NULL_TREE;
4907}
4908
4909static tree
4910avr_handle_fntype_attribute (tree *node, tree name,
4911                             tree args ATTRIBUTE_UNUSED,
4912                             int flags ATTRIBUTE_UNUSED,
4913                             bool *no_add_attrs)
4914{
4915  if (TREE_CODE (*node) != FUNCTION_TYPE)
4916    {
4917      warning (OPT_Wattributes, "%qE attribute only applies to functions",
4918	       name);
4919      *no_add_attrs = true;
4920    }
4921
4922  return NULL_TREE;
4923}
4924
4925/* Look for attribute `progmem' in DECL
4926   if found return 1, otherwise 0.  */
4927
4928int
4929avr_progmem_p (tree decl, tree attributes)
4930{
4931  tree a;
4932
4933  if (TREE_CODE (decl) != VAR_DECL)
4934    return 0;
4935
4936  if (NULL_TREE
4937      != lookup_attribute ("progmem", attributes))
4938    return 1;
4939
4940  a=decl;
4941  do
4942    a = TREE_TYPE(a);
4943  while (TREE_CODE (a) == ARRAY_TYPE);
4944
4945  if (a == error_mark_node)
4946    return 0;
4947
4948  if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4949    return 1;
4950
4951  return 0;
4952}
4953
4954/* Add the section attribute if the variable is in progmem.  */
4955
4956static void
4957avr_insert_attributes (tree node, tree *attributes)
4958{
4959  if (TREE_CODE (node) == VAR_DECL
4960      && (TREE_STATIC (node) || DECL_EXTERNAL (node))
4961      && avr_progmem_p (node, *attributes))
4962    {
4963      static const char dsec[] = ".progmem.data";
4964      *attributes = tree_cons (get_identifier ("section"),
4965		build_tree_list (NULL, build_string (strlen (dsec), dsec)),
4966		*attributes);
4967
4968      /* ??? This seems sketchy.  Why can't the user declare the
4969	 thing const in the first place?  */
4970      TREE_READONLY (node) = 1;
4971    }
4972}
4973
4974/* A get_unnamed_section callback for switching to progmem_section.  */
4975
4976static void
4977avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
4978{
4979  fprintf (asm_out_file,
4980	   "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
4981	   AVR_HAVE_JMP_CALL ? "a" : "ax");
4982  /* Should already be aligned, this is just to be safe if it isn't.  */
4983  fprintf (asm_out_file, "\t.p2align 1\n");
4984}
4985
4986/* Implement TARGET_ASM_INIT_SECTIONS.  */
4987
4988static void
4989avr_asm_init_sections (void)
4990{
4991  progmem_section = get_unnamed_section (AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE,
4992					 avr_output_progmem_section_asm_op,
4993					 NULL);
4994  readonly_data_section = data_section;
4995}
4996
4997static unsigned int
4998avr_section_type_flags (tree decl, const char *name, int reloc)
4999{
5000  unsigned int flags = default_section_type_flags (decl, name, reloc);
5001
5002  if (strncmp (name, ".noinit", 7) == 0)
5003    {
5004      if (decl && TREE_CODE (decl) == VAR_DECL
5005	  && DECL_INITIAL (decl) == NULL_TREE)
5006	flags |= SECTION_BSS;  /* @nobits */
5007      else
5008	warning (0, "only uninitialized variables can be placed in the "
5009		 ".noinit section");
5010    }
5011
5012  return flags;
5013}
5014
5015/* Outputs some appropriate text to go at the start of an assembler
5016   file.  */
5017
5018static void
5019avr_file_start (void)
5020{
5021  if (avr_current_arch->asm_only)
5022    error ("MCU %qs supported for assembler only", avr_mcu_name);
5023
5024  default_file_start ();
5025
5026/*  fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
5027  fputs ("__SREG__ = 0x3f\n"
5028	 "__SP_H__ = 0x3e\n"
5029	 "__SP_L__ = 0x3d\n", asm_out_file);
5030
5031  fputs ("__tmp_reg__ = 0\n"
5032         "__zero_reg__ = 1\n", asm_out_file);
5033
5034  /* FIXME: output these only if there is anything in the .data / .bss
5035     sections - some code size could be saved by not linking in the
5036     initialization code from libgcc if one or both sections are empty.  */
5037  fputs ("\t.global __do_copy_data\n", asm_out_file);
5038  fputs ("\t.global __do_clear_bss\n", asm_out_file);
5039}
5040
5041/* Outputs to the stdio stream FILE some
5042   appropriate text to go at the end of an assembler file.  */
5043
5044static void
5045avr_file_end (void)
5046{
5047}
5048
5049/* Choose the order in which to allocate hard registers for
5050   pseudo-registers local to a basic block.
5051
5052   Store the desired register order in the array `reg_alloc_order'.
5053   Element 0 should be the register to allocate first; element 1, the
5054   next register; and so on.  */
5055
5056void
5057order_regs_for_local_alloc (void)
5058{
5059  unsigned int i;
5060  static const int order_0[] = {
5061    24,25,
5062    18,19,
5063    20,21,
5064    22,23,
5065    30,31,
5066    26,27,
5067    28,29,
5068    17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5069    0,1,
5070    32,33,34,35
5071  };
5072  static const int order_1[] = {
5073    18,19,
5074    20,21,
5075    22,23,
5076    24,25,
5077    30,31,
5078    26,27,
5079    28,29,
5080    17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5081    0,1,
5082    32,33,34,35
5083  };
5084  static const int order_2[] = {
5085    25,24,
5086    23,22,
5087    21,20,
5088    19,18,
5089    30,31,
5090    26,27,
5091    28,29,
5092    17,16,
5093    15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5094    1,0,
5095    32,33,34,35
5096  };
5097
5098  const int *order = (TARGET_ORDER_1 ? order_1 :
5099		      TARGET_ORDER_2 ? order_2 :
5100		      order_0);
5101  for (i=0; i < ARRAY_SIZE (order_0); ++i)
5102      reg_alloc_order[i] = order[i];
5103}
5104
5105
5106/* Mutually recursive subroutine of avr_rtx_cost for calculating the
5107   cost of an RTX operand given its context.  X is the rtx of the
5108   operand, MODE is its mode, and OUTER is the rtx_code of this
5109   operand's parent operator.  */
5110
5111static int
5112avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
5113		      bool speed)
5114{
5115  enum rtx_code code = GET_CODE (x);
5116  int total;
5117
5118  switch (code)
5119    {
5120    case REG:
5121    case SUBREG:
5122      return 0;
5123
5124    case CONST_INT:
5125    case CONST_DOUBLE:
5126      return COSTS_N_INSNS (GET_MODE_SIZE (mode));
5127
5128    default:
5129      break;
5130    }
5131
5132  total = 0;
5133  avr_rtx_costs (x, code, outer, &total, speed);
5134  return total;
5135}
5136
5137/* The AVR backend's rtx_cost function.  X is rtx expression whose cost
5138   is to be calculated.  Return true if the complete cost has been
5139   computed, and false if subexpressions should be scanned.  In either
5140   case, *TOTAL contains the cost result.  */
5141
5142static bool
5143avr_rtx_costs (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED, int *total,
5144	       bool speed)
5145{
5146  enum rtx_code code = (enum rtx_code) codearg;
5147  enum machine_mode mode = GET_MODE (x);
5148  HOST_WIDE_INT val;
5149
5150  switch (code)
5151    {
5152    case CONST_INT:
5153    case CONST_DOUBLE:
5154      /* Immediate constants are as cheap as registers.  */
5155      *total = 0;
5156      return true;
5157
5158    case MEM:
5159    case CONST:
5160    case LABEL_REF:
5161    case SYMBOL_REF:
5162      *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5163      return true;
5164
5165    case NEG:
5166      switch (mode)
5167	{
5168	case QImode:
5169	case SFmode:
5170	  *total = COSTS_N_INSNS (1);
5171	  break;
5172
5173	case HImode:
5174	  *total = COSTS_N_INSNS (3);
5175	  break;
5176
5177	case SImode:
5178	  *total = COSTS_N_INSNS (7);
5179	  break;
5180
5181	default:
5182	  return false;
5183	}
5184      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5185      return true;
5186
5187    case ABS:
5188      switch (mode)
5189	{
5190	case QImode:
5191	case SFmode:
5192	  *total = COSTS_N_INSNS (1);
5193	  break;
5194
5195	default:
5196	  return false;
5197	}
5198      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5199      return true;
5200
5201    case NOT:
5202      *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5203      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5204      return true;
5205
5206    case ZERO_EXTEND:
5207      *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
5208			      - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5209      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5210      return true;
5211
5212    case SIGN_EXTEND:
5213      *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5214			      - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5215      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5216      return true;
5217
5218    case PLUS:
5219      switch (mode)
5220	{
5221	case QImode:
5222	  *total = COSTS_N_INSNS (1);
5223	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5224	    *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5225	  break;
5226
5227	case HImode:
5228	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5229	    {
5230	      *total = COSTS_N_INSNS (2);
5231	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5232	    }
5233	  else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5234	    *total = COSTS_N_INSNS (1);
5235	  else
5236	    *total = COSTS_N_INSNS (2);
5237	  break;
5238
5239	case SImode:
5240	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5241	    {
5242	      *total = COSTS_N_INSNS (4);
5243	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5244	    }
5245	  else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5246	    *total = COSTS_N_INSNS (1);
5247	  else
5248	    *total = COSTS_N_INSNS (4);
5249	  break;
5250
5251	default:
5252	  return false;
5253	}
5254      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5255      return true;
5256
5257    case MINUS:
5258    case AND:
5259    case IOR:
5260      *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5261      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5262      if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5263          *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5264      return true;
5265
5266    case XOR:
5267      *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5268      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5269      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5270      return true;
5271
5272    case MULT:
5273      switch (mode)
5274	{
5275	case QImode:
5276	  if (AVR_HAVE_MUL)
5277	    *total = COSTS_N_INSNS (!speed ? 3 : 4);
5278	  else if (!speed)
5279	    *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5280	  else
5281	    return false;
5282	  break;
5283
5284	case HImode:
5285	  if (AVR_HAVE_MUL)
5286	    *total = COSTS_N_INSNS (!speed ? 7 : 10);
5287	  else if (!speed)
5288	    *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5289	  else
5290	    return false;
5291	  break;
5292
5293	default:
5294	  return false;
5295	}
5296      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5297      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5298      return true;
5299
5300    case DIV:
5301    case MOD:
5302    case UDIV:
5303    case UMOD:
5304      if (!speed)
5305	*total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5306      else
5307	return false;
5308      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5309      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5310      return true;
5311
5312    case ROTATE:
5313      switch (mode)
5314	{
5315	case QImode:
5316	  if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
5317	    *total = COSTS_N_INSNS (1);
5318
5319	  break;
5320
5321	case HImode:
5322	  if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
5323	    *total = COSTS_N_INSNS (3);
5324
5325	  break;
5326
5327	case SImode:
5328	  if (CONST_INT_P (XEXP (x, 1)))
5329	    switch (INTVAL (XEXP (x, 1)))
5330	      {
5331	      case 8:
5332	      case 24:
5333		*total = COSTS_N_INSNS (5);
5334		break;
5335	      case 16:
5336		*total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
5337		break;
5338	      }
5339	  break;
5340
5341	default:
5342	  return false;
5343	}
5344      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5345      return true;
5346
5347    case ASHIFT:
5348      switch (mode)
5349	{
5350	case QImode:
5351	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5352	    {
5353	      *total = COSTS_N_INSNS (!speed ? 4 : 17);
5354	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5355	    }
5356	  else
5357	    {
5358	      val = INTVAL (XEXP (x, 1));
5359	      if (val == 7)
5360		*total = COSTS_N_INSNS (3);
5361	      else if (val >= 0 && val <= 7)
5362		*total = COSTS_N_INSNS (val);
5363	      else
5364		*total = COSTS_N_INSNS (1);
5365	    }
5366	  break;
5367
5368	case HImode:
5369	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5370	    {
5371	      *total = COSTS_N_INSNS (!speed ? 5 : 41);
5372	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5373	    }
5374	  else
5375	    switch (INTVAL (XEXP (x, 1)))
5376	      {
5377	      case 0:
5378		*total = 0;
5379		break;
5380	      case 1:
5381	      case 8:
5382		*total = COSTS_N_INSNS (2);
5383		break;
5384	      case 9:
5385		*total = COSTS_N_INSNS (3);
5386		break;
5387	      case 2:
5388	      case 3:
5389	      case 10:
5390	      case 15:
5391		*total = COSTS_N_INSNS (4);
5392		break;
5393	      case 7:
5394	      case 11:
5395	      case 12:
5396		*total = COSTS_N_INSNS (5);
5397		break;
5398	      case 4:
5399		*total = COSTS_N_INSNS (!speed ? 5 : 8);
5400		break;
5401	      case 6:
5402		*total = COSTS_N_INSNS (!speed ? 5 : 9);
5403		break;
5404	      case 5:
5405		*total = COSTS_N_INSNS (!speed ? 5 : 10);
5406		break;
5407	      default:
5408	        *total = COSTS_N_INSNS (!speed ? 5 : 41);
5409	        *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5410	      }
5411	  break;
5412
5413	case SImode:
5414	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5415	    {
5416	      *total = COSTS_N_INSNS (!speed ? 7 : 113);
5417	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5418	    }
5419	  else
5420	    switch (INTVAL (XEXP (x, 1)))
5421	      {
5422	      case 0:
5423		*total = 0;
5424		break;
5425	      case 24:
5426		*total = COSTS_N_INSNS (3);
5427		break;
5428	      case 1:
5429	      case 8:
5430	      case 16:
5431		*total = COSTS_N_INSNS (4);
5432		break;
5433	      case 31:
5434		*total = COSTS_N_INSNS (6);
5435		break;
5436	      case 2:
5437		*total = COSTS_N_INSNS (!speed ? 7 : 8);
5438		break;
5439	      default:
5440		*total = COSTS_N_INSNS (!speed ? 7 : 113);
5441		*total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5442	      }
5443	  break;
5444
5445	default:
5446	  return false;
5447	}
5448      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5449      return true;
5450
5451    case ASHIFTRT:
5452      switch (mode)
5453	{
5454	case QImode:
5455	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5456	    {
5457	      *total = COSTS_N_INSNS (!speed ? 4 : 17);
5458	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5459	    }
5460	  else
5461	    {
5462	      val = INTVAL (XEXP (x, 1));
5463	      if (val == 6)
5464		*total = COSTS_N_INSNS (4);
5465	      else if (val == 7)
5466		*total = COSTS_N_INSNS (2);
5467	      else if (val >= 0 && val <= 7)
5468		*total = COSTS_N_INSNS (val);
5469	      else
5470		*total = COSTS_N_INSNS (1);
5471	    }
5472	  break;
5473
5474	case HImode:
5475	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5476	    {
5477	      *total = COSTS_N_INSNS (!speed ? 5 : 41);
5478	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5479	    }
5480	  else
5481	    switch (INTVAL (XEXP (x, 1)))
5482	      {
5483	      case 0:
5484		*total = 0;
5485		break;
5486	      case 1:
5487		*total = COSTS_N_INSNS (2);
5488		break;
5489	      case 15:
5490		*total = COSTS_N_INSNS (3);
5491		break;
5492	      case 2:
5493	      case 7:
5494              case 8:
5495              case 9:
5496		*total = COSTS_N_INSNS (4);
5497		break;
5498              case 10:
5499	      case 14:
5500		*total = COSTS_N_INSNS (5);
5501		break;
5502              case 11:
5503                *total = COSTS_N_INSNS (!speed ? 5 : 6);
5504		break;
5505              case 12:
5506                *total = COSTS_N_INSNS (!speed ? 5 : 7);
5507		break;
5508              case 6:
5509	      case 13:
5510                *total = COSTS_N_INSNS (!speed ? 5 : 8);
5511		break;
5512	      default:
5513	        *total = COSTS_N_INSNS (!speed ? 5 : 41);
5514	        *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5515	      }
5516	  break;
5517
5518	case SImode:
5519	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5520	    {
5521	      *total = COSTS_N_INSNS (!speed ? 7 : 113);
5522	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5523	    }
5524	  else
5525	    switch (INTVAL (XEXP (x, 1)))
5526	      {
5527	      case 0:
5528		*total = 0;
5529		break;
5530	      case 1:
5531		*total = COSTS_N_INSNS (4);
5532		break;
5533	      case 8:
5534	      case 16:
5535	      case 24:
5536		*total = COSTS_N_INSNS (6);
5537		break;
5538	      case 2:
5539		*total = COSTS_N_INSNS (!speed ? 7 : 8);
5540		break;
5541	      case 31:
5542		*total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5543		break;
5544	      default:
5545		*total = COSTS_N_INSNS (!speed ? 7 : 113);
5546		*total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5547	      }
5548	  break;
5549
5550	default:
5551	  return false;
5552	}
5553      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5554      return true;
5555
5556    case LSHIFTRT:
5557      switch (mode)
5558	{
5559	case QImode:
5560	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5561	    {
5562	      *total = COSTS_N_INSNS (!speed ? 4 : 17);
5563	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5564	    }
5565	  else
5566	    {
5567	      val = INTVAL (XEXP (x, 1));
5568	      if (val == 7)
5569		*total = COSTS_N_INSNS (3);
5570	      else if (val >= 0 && val <= 7)
5571		*total = COSTS_N_INSNS (val);
5572	      else
5573		*total = COSTS_N_INSNS (1);
5574	    }
5575	  break;
5576
5577	case HImode:
5578	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5579	    {
5580	      *total = COSTS_N_INSNS (!speed ? 5 : 41);
5581	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5582	    }
5583	  else
5584	    switch (INTVAL (XEXP (x, 1)))
5585	      {
5586	      case 0:
5587		*total = 0;
5588		break;
5589	      case 1:
5590	      case 8:
5591		*total = COSTS_N_INSNS (2);
5592		break;
5593	      case 9:
5594		*total = COSTS_N_INSNS (3);
5595		break;
5596	      case 2:
5597	      case 10:
5598	      case 15:
5599		*total = COSTS_N_INSNS (4);
5600		break;
5601	      case 7:
5602              case 11:
5603		*total = COSTS_N_INSNS (5);
5604		break;
5605	      case 3:
5606	      case 12:
5607	      case 13:
5608	      case 14:
5609		*total = COSTS_N_INSNS (!speed ? 5 : 6);
5610		break;
5611	      case 4:
5612		*total = COSTS_N_INSNS (!speed ? 5 : 7);
5613		break;
5614	      case 5:
5615	      case 6:
5616		*total = COSTS_N_INSNS (!speed ? 5 : 9);
5617		break;
5618	      default:
5619	        *total = COSTS_N_INSNS (!speed ? 5 : 41);
5620	        *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5621	      }
5622	  break;
5623
5624	case SImode:
5625	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5626	    {
5627	      *total = COSTS_N_INSNS (!speed ? 7 : 113);
5628	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5629	    }
5630	  else
5631	    switch (INTVAL (XEXP (x, 1)))
5632	      {
5633	      case 0:
5634		*total = 0;
5635		break;
5636	      case 1:
5637		*total = COSTS_N_INSNS (4);
5638		break;
5639	      case 2:
5640		*total = COSTS_N_INSNS (!speed ? 7 : 8);
5641		break;
5642	      case 8:
5643	      case 16:
5644	      case 24:
5645		*total = COSTS_N_INSNS (4);
5646		break;
5647	      case 31:
5648		*total = COSTS_N_INSNS (6);
5649		break;
5650	      default:
5651		*total = COSTS_N_INSNS (!speed ? 7 : 113);
5652		*total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5653	      }
5654	  break;
5655
5656	default:
5657	  return false;
5658	}
5659      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5660      return true;
5661
5662    case COMPARE:
5663      switch (GET_MODE (XEXP (x, 0)))
5664	{
5665	case QImode:
5666	  *total = COSTS_N_INSNS (1);
5667	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5668	    *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5669	  break;
5670
5671        case HImode:
5672	  *total = COSTS_N_INSNS (2);
5673	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5674            *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5675	  else if (INTVAL (XEXP (x, 1)) != 0)
5676	    *total += COSTS_N_INSNS (1);
5677          break;
5678
5679        case SImode:
5680          *total = COSTS_N_INSNS (4);
5681          if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5682            *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5683	  else if (INTVAL (XEXP (x, 1)) != 0)
5684	    *total += COSTS_N_INSNS (3);
5685          break;
5686
5687	default:
5688	  return false;
5689	}
5690      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5691      return true;
5692
5693    default:
5694      break;
5695    }
5696  return false;
5697}
5698
5699/* Calculate the cost of a memory address.  */
5700
5701static int
5702avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
5703{
5704  if (GET_CODE (x) == PLUS
5705      && GET_CODE (XEXP (x,1)) == CONST_INT
5706      && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5707      && INTVAL (XEXP (x,1)) >= 61)
5708    return 18;
5709  if (CONSTANT_ADDRESS_P (x))
5710    {
5711      if (optimize > 0 && io_address_operand (x, QImode))
5712	return 2;
5713      return 4;
5714    }
5715  return 4;
5716}
5717
5718/* Test for extra memory constraint 'Q'.
5719   It's a memory address based on Y or Z pointer with valid displacement.  */
5720
5721int
5722extra_constraint_Q (rtx x)
5723{
5724  if (GET_CODE (XEXP (x,0)) == PLUS
5725      && REG_P (XEXP (XEXP (x,0), 0))
5726      && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5727      && (INTVAL (XEXP (XEXP (x,0), 1))
5728	  <= MAX_LD_OFFSET (GET_MODE (x))))
5729    {
5730      rtx xx = XEXP (XEXP (x,0), 0);
5731      int regno = REGNO (xx);
5732      if (TARGET_ALL_DEBUG)
5733	{
5734	  fprintf (stderr, ("extra_constraint:\n"
5735			    "reload_completed: %d\n"
5736			    "reload_in_progress: %d\n"),
5737		   reload_completed, reload_in_progress);
5738	  debug_rtx (x);
5739	}
5740      if (regno >= FIRST_PSEUDO_REGISTER)
5741	return 1;		/* allocate pseudos */
5742      else if (regno == REG_Z || regno == REG_Y)
5743	return 1;		/* strictly check */
5744      else if (xx == frame_pointer_rtx
5745	       || xx == arg_pointer_rtx)
5746	return 1;		/* XXX frame & arg pointer checks */
5747    }
5748  return 0;
5749}
5750
5751/* Convert condition code CONDITION to the valid AVR condition code.  */
5752
5753RTX_CODE
5754avr_normalize_condition (RTX_CODE condition)
5755{
5756  switch (condition)
5757    {
5758    case GT:
5759      return GE;
5760    case GTU:
5761      return GEU;
5762    case LE:
5763      return LT;
5764    case LEU:
5765      return LTU;
5766    default:
5767      gcc_unreachable ();
5768    }
5769}
5770
5771/* This function optimizes conditional jumps.  */
5772
5773static void
5774avr_reorg (void)
5775{
5776  rtx insn, pattern;
5777
5778  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5779    {
5780      if (! (GET_CODE (insn) == INSN
5781	     || GET_CODE (insn) == CALL_INSN
5782	     || GET_CODE (insn) == JUMP_INSN)
5783	  || !single_set (insn))
5784	continue;
5785
5786      pattern = PATTERN (insn);
5787
5788      if (GET_CODE (pattern) == PARALLEL)
5789	pattern = XVECEXP (pattern, 0, 0);
5790      if (GET_CODE (pattern) == SET
5791	  && SET_DEST (pattern) == cc0_rtx
5792	  && compare_diff_p (insn))
5793	{
5794	  if (GET_CODE (SET_SRC (pattern)) == COMPARE)
5795	    {
5796	      /* Now we work under compare insn.  */
5797
5798	      pattern = SET_SRC (pattern);
5799	      if (true_regnum (XEXP (pattern,0)) >= 0
5800		  && true_regnum (XEXP (pattern,1)) >= 0 )
5801		{
5802		  rtx x = XEXP (pattern,0);
5803		  rtx next = next_real_insn (insn);
5804		  rtx pat = PATTERN (next);
5805		  rtx src = SET_SRC (pat);
5806		  rtx t = XEXP (src,0);
5807		  PUT_CODE (t, swap_condition (GET_CODE (t)));
5808		  XEXP (pattern,0) = XEXP (pattern,1);
5809		  XEXP (pattern,1) = x;
5810		  INSN_CODE (next) = -1;
5811		}
5812	      else if (true_regnum (XEXP (pattern, 0)) >= 0
5813		       && XEXP (pattern, 1) == const0_rtx)
5814	        {
5815	          /* This is a tst insn, we can reverse it.  */
5816	          rtx next = next_real_insn (insn);
5817	          rtx pat = PATTERN (next);
5818	          rtx src = SET_SRC (pat);
5819	          rtx t = XEXP (src,0);
5820
5821	          PUT_CODE (t, swap_condition (GET_CODE (t)));
5822	          XEXP (pattern, 1) = XEXP (pattern, 0);
5823	          XEXP (pattern, 0) = const0_rtx;
5824	          INSN_CODE (next) = -1;
5825	          INSN_CODE (insn) = -1;
5826	        }
5827	      else if (true_regnum (XEXP (pattern,0)) >= 0
5828		       && GET_CODE (XEXP (pattern,1)) == CONST_INT)
5829		{
5830		  rtx x = XEXP (pattern,1);
5831		  rtx next = next_real_insn (insn);
5832		  rtx pat = PATTERN (next);
5833		  rtx src = SET_SRC (pat);
5834		  rtx t = XEXP (src,0);
5835		  enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
5836
5837		  if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
5838		    {
5839		      XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
5840		      PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
5841		      INSN_CODE (next) = -1;
5842		      INSN_CODE (insn) = -1;
5843		    }
5844		}
5845	    }
5846	}
5847    }
5848}
5849
5850/* Returns register number for function return value.*/
5851
5852int
5853avr_ret_register (void)
5854{
5855  return 24;
5856}
5857
5858/* Create an RTX representing the place where a
5859   library function returns a value of mode MODE.  */
5860
5861rtx
5862avr_libcall_value (enum machine_mode mode)
5863{
5864  int offs = GET_MODE_SIZE (mode);
5865  if (offs < 2)
5866    offs = 2;
5867  return gen_rtx_REG (mode, RET_REGISTER + 2 - offs);
5868}
5869
5870/* Create an RTX representing the place where a
5871   function returns a value of data type VALTYPE.  */
5872
5873rtx
5874avr_function_value (const_tree type,
5875		    const_tree func ATTRIBUTE_UNUSED,
5876		    bool outgoing ATTRIBUTE_UNUSED)
5877{
5878  unsigned int offs;
5879
5880  if (TYPE_MODE (type) != BLKmode)
5881    return avr_libcall_value (TYPE_MODE (type));
5882
5883  offs = int_size_in_bytes (type);
5884  if (offs < 2)
5885    offs = 2;
5886  if (offs > 2 && offs < GET_MODE_SIZE (SImode))
5887    offs = GET_MODE_SIZE (SImode);
5888  else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
5889    offs = GET_MODE_SIZE (DImode);
5890
5891  return gen_rtx_REG (BLKmode, RET_REGISTER + 2 - offs);
5892}
5893
5894/* Places additional restrictions on the register class to
5895   use when it is necessary to copy value X into a register
5896   in class CLASS.  */
5897
5898enum reg_class
5899preferred_reload_class (rtx x ATTRIBUTE_UNUSED, enum reg_class rclass)
5900{
5901  return rclass;
5902}
5903
5904int
5905test_hard_reg_class (enum reg_class rclass, rtx x)
5906{
5907  int regno = true_regnum (x);
5908  if (regno < 0)
5909    return 0;
5910
5911  if (TEST_HARD_REG_CLASS (rclass, regno))
5912    return 1;
5913
5914  return 0;
5915}
5916
5917
5918int
5919jump_over_one_insn_p (rtx insn, rtx dest)
5920{
5921  int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
5922		      ? XEXP (dest, 0)
5923		      : dest);
5924  int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
5925  int dest_addr = INSN_ADDRESSES (uid);
5926  return dest_addr - jump_addr == get_attr_length (insn) + 1;
5927}
5928
5929/* Returns 1 if a value of mode MODE can be stored starting with hard
5930   register number REGNO.  On the enhanced core, anything larger than
5931   1 byte must start in even numbered register for "movw" to work
5932   (this way we don't have to check for odd registers everywhere).  */
5933
5934int
5935avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
5936{
5937  /* Disallow QImode in stack pointer regs.  */
5938  if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
5939    return 0;
5940
5941  /* The only thing that can go into registers r28:r29 is a Pmode.  */
5942  if (regno == REG_Y && mode == Pmode)
5943    return 1;
5944
5945  /* Otherwise disallow all regno/mode combinations that span r28:r29.  */
5946  if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
5947    return 0;
5948
5949  if (mode == QImode)
5950    return 1;
5951
5952  /* Modes larger than QImode occupy consecutive registers.  */
5953  if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
5954    return 0;
5955
5956  /* All modes larger than QImode should start in an even register.  */
5957  return !(regno & 1);
5958}
5959
5960const char *
5961output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5962{
5963  int tmp;
5964  if (!len)
5965    len = &tmp;
5966
5967  if (GET_CODE (operands[1]) == CONST_INT)
5968    {
5969      int val = INTVAL (operands[1]);
5970      if ((val & 0xff) == 0)
5971	{
5972	  *len = 3;
5973	  return (AS2 (mov,%A0,__zero_reg__) CR_TAB
5974		  AS2 (ldi,%2,hi8(%1))       CR_TAB
5975		  AS2 (mov,%B0,%2));
5976	}
5977      else if ((val & 0xff00) == 0)
5978	{
5979	  *len = 3;
5980	  return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5981		  AS2 (mov,%A0,%2)     CR_TAB
5982		  AS2 (mov,%B0,__zero_reg__));
5983	}
5984      else if ((val & 0xff) == ((val & 0xff00) >> 8))
5985	{
5986	  *len = 3;
5987	  return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5988		  AS2 (mov,%A0,%2)     CR_TAB
5989		  AS2 (mov,%B0,%2));
5990	}
5991    }
5992  *len = 4;
5993  return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5994	  AS2 (mov,%A0,%2)     CR_TAB
5995	  AS2 (ldi,%2,hi8(%1)) CR_TAB
5996	  AS2 (mov,%B0,%2));
5997}
5998
5999
6000const char *
6001output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
6002{
6003  rtx src = operands[1];
6004  int cnst = (GET_CODE (src) == CONST_INT);
6005
6006  if (len)
6007    {
6008      if (cnst)
6009	*len = 4 + ((INTVAL (src) & 0xff) != 0)
6010		+ ((INTVAL (src) & 0xff00) != 0)
6011		+ ((INTVAL (src) & 0xff0000) != 0)
6012		+ ((INTVAL (src) & 0xff000000) != 0);
6013      else
6014	*len = 8;
6015
6016      return "";
6017    }
6018
6019  if (cnst && ((INTVAL (src) & 0xff) == 0))
6020    output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
6021  else
6022    {
6023      output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
6024      output_asm_insn (AS2 (mov, %A0, %2), operands);
6025    }
6026  if (cnst && ((INTVAL (src) & 0xff00) == 0))
6027    output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
6028  else
6029    {
6030      output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
6031      output_asm_insn (AS2 (mov, %B0, %2), operands);
6032    }
6033  if (cnst && ((INTVAL (src) & 0xff0000) == 0))
6034    output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
6035  else
6036    {
6037      output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
6038      output_asm_insn (AS2 (mov, %C0, %2), operands);
6039    }
6040  if (cnst && ((INTVAL (src) & 0xff000000) == 0))
6041    output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
6042  else
6043    {
6044      output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
6045      output_asm_insn (AS2 (mov, %D0, %2), operands);
6046    }
6047  return "";
6048}
6049
6050void
6051avr_output_bld (rtx operands[], int bit_nr)
6052{
6053  static char s[] = "bld %A0,0";
6054
6055  s[5] = 'A' + (bit_nr >> 3);
6056  s[8] = '0' + (bit_nr & 7);
6057  output_asm_insn (s, operands);
6058}
6059
6060void
6061avr_output_addr_vec_elt (FILE *stream, int value)
6062{
6063  switch_to_section (progmem_section);
6064  if (AVR_HAVE_JMP_CALL)
6065    fprintf (stream, "\t.word gs(.L%d)\n", value);
6066  else
6067    fprintf (stream, "\trjmp .L%d\n", value);
6068}
6069
6070/* Returns true if SCRATCH are safe to be allocated as a scratch
6071   registers (for a define_peephole2) in the current function.  */
6072
6073bool
6074avr_hard_regno_scratch_ok (unsigned int regno)
6075{
6076  /* Interrupt functions can only use registers that have already been saved
6077     by the prologue, even if they would normally be call-clobbered.  */
6078
6079  if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6080      && !df_regs_ever_live_p (regno))
6081    return false;
6082
6083  return true;
6084}
6085
6086/* Return nonzero if register OLD_REG can be renamed to register NEW_REG.  */
6087
6088int
6089avr_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
6090			  unsigned int new_reg)
6091{
6092  /* Interrupt functions can only use registers that have already been
6093     saved by the prologue, even if they would normally be
6094     call-clobbered.  */
6095
6096  if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6097      && !df_regs_ever_live_p (new_reg))
6098    return 0;
6099
6100  return 1;
6101}
6102
6103/* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
6104   or memory location in the I/O space (QImode only).
6105
6106   Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
6107   Operand 1: register operand to test, or CONST_INT memory address.
6108   Operand 2: bit number.
6109   Operand 3: label to jump to if the test is true.  */
6110
6111const char *
6112avr_out_sbxx_branch (rtx insn, rtx operands[])
6113{
6114  enum rtx_code comp = GET_CODE (operands[0]);
6115  int long_jump = (get_attr_length (insn) >= 4);
6116  int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
6117
6118  if (comp == GE)
6119    comp = EQ;
6120  else if (comp == LT)
6121    comp = NE;
6122
6123  if (reverse)
6124    comp = reverse_condition (comp);
6125
6126  if (GET_CODE (operands[1]) == CONST_INT)
6127    {
6128      if (INTVAL (operands[1]) < 0x40)
6129	{
6130	  if (comp == EQ)
6131	    output_asm_insn (AS2 (sbis,%m1-0x20,%2), operands);
6132	  else
6133	    output_asm_insn (AS2 (sbic,%m1-0x20,%2), operands);
6134	}
6135      else
6136	{
6137	  output_asm_insn (AS2 (in,__tmp_reg__,%m1-0x20), operands);
6138	  if (comp == EQ)
6139	    output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
6140	  else
6141	    output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
6142	}
6143    }
6144  else  /* GET_CODE (operands[1]) == REG */
6145    {
6146      if (GET_MODE (operands[1]) == QImode)
6147	{
6148	  if (comp == EQ)
6149	    output_asm_insn (AS2 (sbrs,%1,%2), operands);
6150	  else
6151	    output_asm_insn (AS2 (sbrc,%1,%2), operands);
6152	}
6153      else  /* HImode or SImode */
6154	{
6155	  static char buf[] = "sbrc %A1,0";
6156	  int bit_nr = INTVAL (operands[2]);
6157	  buf[3] = (comp == EQ) ? 's' : 'c';
6158	  buf[6] = 'A' + (bit_nr >> 3);
6159	  buf[9] = '0' + (bit_nr & 7);
6160	  output_asm_insn (buf, operands);
6161	}
6162    }
6163
6164  if (long_jump)
6165    return (AS1 (rjmp,.+4) CR_TAB
6166	    AS1 (jmp,%x3));
6167  if (!reverse)
6168    return AS1 (rjmp,%x3);
6169  return "";
6170}
6171
6172/* Worker function for TARGET_ASM_CONSTRUCTOR.  */
6173
6174static void
6175avr_asm_out_ctor (rtx symbol, int priority)
6176{
6177  fputs ("\t.global __do_global_ctors\n", asm_out_file);
6178  default_ctor_section_asm_out_constructor (symbol, priority);
6179}
6180
6181/* Worker function for TARGET_ASM_DESTRUCTOR.  */
6182
6183static void
6184avr_asm_out_dtor (rtx symbol, int priority)
6185{
6186  fputs ("\t.global __do_global_dtors\n", asm_out_file);
6187  default_dtor_section_asm_out_destructor (symbol, priority);
6188}
6189
6190/* Worker function for TARGET_RETURN_IN_MEMORY.  */
6191
6192static bool
6193avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
6194{
6195  if (TYPE_MODE (type) == BLKmode)
6196    {
6197      HOST_WIDE_INT size = int_size_in_bytes (type);
6198      return (size == -1 || size > 8);
6199    }
6200  else
6201    return false;
6202}
6203
6204/* Worker function for CASE_VALUES_THRESHOLD.  */
6205
6206unsigned int avr_case_values_threshold (void)
6207{
6208  return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
6209}
6210
6211#include "gt-avr.h"
6212