1/* Memory address lowering and addressing mode selection.
2   Copyright (C) 2004 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it
7under the terms of the GNU General Public License as published by the
8Free Software Foundation; either version 2, or (at your option) any
9later version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT
12ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING.  If not, write to the Free
18Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
1902110-1301, USA.  */
20
21/* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
22   that directly map to addressing modes of the target.  */
23
24#include "config.h"
25#include "system.h"
26#include "coretypes.h"
27#include "tm.h"
28#include "tree.h"
29#include "rtl.h"
30#include "tm_p.h"
31#include "hard-reg-set.h"
32#include "basic-block.h"
33#include "output.h"
34#include "diagnostic.h"
35#include "tree-flow.h"
36#include "tree-dump.h"
37#include "tree-pass.h"
38#include "timevar.h"
39#include "flags.h"
40#include "tree-inline.h"
41#include "insn-config.h"
42#include "recog.h"
43#include "expr.h"
44#include "ggc.h"
45
46/* TODO -- handling of symbols (according to Richard Hendersons
47   comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
48
49   There are at least 5 different kinds of symbols that we can run up against:
50
51     (1) binds_local_p, small data area.
52     (2) binds_local_p, eg local statics
53     (3) !binds_local_p, eg global variables
54     (4) thread local, local_exec
55     (5) thread local, !local_exec
56
57   Now, (1) won't appear often in an array context, but it certainly can.
58   All you have to do is set -GN high enough, or explicitly mark any
59   random object __attribute__((section (".sdata"))).
60
61   All of these affect whether or not a symbol is in fact a valid address.
62   The only one tested here is (3).  And that result may very well
63   be incorrect for (4) or (5).
64
65   An incorrect result here does not cause incorrect results out the
66   back end, because the expander in expr.c validizes the address.  However
67   it would be nice to improve the handling here in order to produce more
68   precise results.  */
69
70/* A "template" for memory address, used to determine whether the address is
71   valid for mode.  */
72
73struct mem_addr_template GTY (())
74{
75  rtx ref;			/* The template.  */
76  rtx * GTY ((skip)) step_p;	/* The point in template where the step should be
77				   filled in.  */
78  rtx * GTY ((skip)) off_p;	/* The point in template where the offset should
79				   be filled in.  */
80};
81
82/* The templates.  Each of the five bits of the index corresponds to one
83   component of TARGET_MEM_REF being present, see TEMPL_IDX.  */
84
85static GTY (()) struct mem_addr_template templates[32];
86
87#define TEMPL_IDX(SYMBOL, BASE, INDEX, STEP, OFFSET) \
88  (((SYMBOL != 0) << 4) \
89   | ((BASE != 0) << 3) \
90   | ((INDEX != 0) << 2) \
91   | ((STEP != 0) << 1) \
92   | (OFFSET != 0))
93
94/* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
95   STEP and OFFSET to *ADDR.  Stores pointers to where step is placed to
96   *STEP_P and offset to *OFFSET_P.  */
97
98static void
99gen_addr_rtx (rtx symbol, rtx base, rtx index, rtx step, rtx offset,
100	      rtx *addr, rtx **step_p, rtx **offset_p)
101{
102  rtx act_elem;
103
104  *addr = NULL_RTX;
105  if (step_p)
106    *step_p = NULL;
107  if (offset_p)
108    *offset_p = NULL;
109
110  if (index)
111    {
112      act_elem = index;
113      if (step)
114	{
115	  act_elem = gen_rtx_MULT (Pmode, act_elem, step);
116
117	  if (step_p)
118	    *step_p = &XEXP (act_elem, 1);
119	}
120
121      *addr = act_elem;
122    }
123
124  if (base)
125    {
126      if (*addr)
127	*addr = gen_rtx_PLUS (Pmode, *addr, base);
128      else
129	*addr = base;
130    }
131
132  if (symbol)
133    {
134      act_elem = symbol;
135      if (offset)
136	{
137	  act_elem = gen_rtx_CONST (Pmode,
138				    gen_rtx_PLUS (Pmode, act_elem, offset));
139	  if (offset_p)
140	    *offset_p = &XEXP (XEXP (act_elem, 0), 1);
141	}
142
143      if (*addr)
144	*addr = gen_rtx_PLUS (Pmode, *addr, act_elem);
145      else
146	*addr = act_elem;
147    }
148  else if (offset)
149    {
150      if (*addr)
151	{
152	  *addr = gen_rtx_PLUS (Pmode, *addr, offset);
153	  if (offset_p)
154	    *offset_p = &XEXP (*addr, 1);
155	}
156      else
157	{
158	  *addr = offset;
159	  if (offset_p)
160	    *offset_p = addr;
161	}
162    }
163
164  if (!*addr)
165    *addr = const0_rtx;
166}
167
168/* Returns address for TARGET_MEM_REF with parameters given by ADDR.
169   If REALLY_EXPAND is false, just make fake registers instead
170   of really expanding the operands, and perform the expansion in-place
171   by using one of the "templates".  */
172
173rtx
174addr_for_mem_ref (struct mem_address *addr, bool really_expand)
175{
176  rtx address, sym, bse, idx, st, off;
177  static bool templates_initialized = false;
178  struct mem_addr_template *templ;
179
180  if (addr->step && !integer_onep (addr->step))
181    st = immed_double_const (TREE_INT_CST_LOW (addr->step),
182			     TREE_INT_CST_HIGH (addr->step), Pmode);
183  else
184    st = NULL_RTX;
185
186  if (addr->offset && !integer_zerop (addr->offset))
187    off = immed_double_const (TREE_INT_CST_LOW (addr->offset),
188			      TREE_INT_CST_HIGH (addr->offset), Pmode);
189  else
190    off = NULL_RTX;
191
192  if (!really_expand)
193    {
194      /* Reuse the templates for addresses, so that we do not waste memory.  */
195      if (!templates_initialized)
196	{
197	  unsigned i;
198
199	  templates_initialized = true;
200	  sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup ("test_symbol"));
201	  bse = gen_raw_REG (Pmode, LAST_VIRTUAL_REGISTER + 1);
202	  idx = gen_raw_REG (Pmode, LAST_VIRTUAL_REGISTER + 2);
203
204	  for (i = 0; i < 32; i++)
205	    gen_addr_rtx ((i & 16 ? sym : NULL_RTX),
206			  (i & 8 ? bse : NULL_RTX),
207			  (i & 4 ? idx : NULL_RTX),
208			  (i & 2 ? const0_rtx : NULL_RTX),
209			  (i & 1 ? const0_rtx : NULL_RTX),
210			  &templates[i].ref,
211			  &templates[i].step_p,
212			  &templates[i].off_p);
213	}
214
215      templ = templates + TEMPL_IDX (addr->symbol, addr->base, addr->index,
216				     st, off);
217      if (st)
218	*templ->step_p = st;
219      if (off)
220	*templ->off_p = off;
221
222      return templ->ref;
223    }
224
225  /* Otherwise really expand the expressions.  */
226  sym = (addr->symbol
227	 ? expand_expr (build_addr (addr->symbol, current_function_decl),
228			NULL_RTX, Pmode, EXPAND_NORMAL)
229	 : NULL_RTX);
230  bse = (addr->base
231	 ? expand_expr (addr->base, NULL_RTX, Pmode, EXPAND_NORMAL)
232	 : NULL_RTX);
233  idx = (addr->index
234	 ? expand_expr (addr->index, NULL_RTX, Pmode, EXPAND_NORMAL)
235	 : NULL_RTX);
236
237  gen_addr_rtx (sym, bse, idx, st, off, &address, NULL, NULL);
238  return address;
239}
240
241/* Returns address of MEM_REF in TYPE.  */
242
243tree
244tree_mem_ref_addr (tree type, tree mem_ref)
245{
246  tree addr = NULL_TREE;
247  tree act_elem;
248  tree step = TMR_STEP (mem_ref), offset = TMR_OFFSET (mem_ref);
249
250  act_elem = TMR_INDEX (mem_ref);
251  if (act_elem)
252    {
253      act_elem = fold_convert (type, act_elem);
254
255      if (step)
256	act_elem = fold_build2 (MULT_EXPR, type, act_elem,
257				fold_convert (type, step));
258      addr = act_elem;
259    }
260
261  act_elem = TMR_BASE (mem_ref);
262  if (act_elem)
263    {
264      act_elem = fold_convert (type, act_elem);
265
266      if (addr)
267	addr = fold_build2 (PLUS_EXPR, type, addr, act_elem);
268      else
269	addr = act_elem;
270    }
271
272  act_elem = TMR_SYMBOL (mem_ref);
273  if (act_elem)
274    {
275      act_elem = fold_convert (type, build_addr (act_elem,
276						 current_function_decl));
277      if (addr)
278	addr = fold_build2 (PLUS_EXPR, type, addr, act_elem);
279      else
280	addr = act_elem;
281    }
282
283  if (!zero_p (offset))
284    {
285      act_elem = fold_convert (type, offset);
286
287      if (addr)
288	addr = fold_build2 (PLUS_EXPR, type, addr, act_elem);
289      else
290	addr = act_elem;
291    }
292
293  if (!addr)
294    addr = build_int_cst (type, 0);
295
296  return addr;
297}
298
299/* Returns true if a memory reference in MODE and with parameters given by
300   ADDR is valid on the current target.  */
301
302static bool
303valid_mem_ref_p (enum machine_mode mode, struct mem_address *addr)
304{
305  rtx address;
306
307  address = addr_for_mem_ref (addr, false);
308  if (!address)
309    return false;
310
311  return memory_address_p (mode, address);
312}
313
314/* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
315   is valid on the current target and if so, creates and returns the
316   TARGET_MEM_REF.  */
317
318static tree
319create_mem_ref_raw (tree type, struct mem_address *addr)
320{
321  if (!valid_mem_ref_p (TYPE_MODE (type), addr))
322    return NULL_TREE;
323
324  if (addr->step && integer_onep (addr->step))
325    addr->step = NULL_TREE;
326
327  if (addr->offset && zero_p (addr->offset))
328    addr->offset = NULL_TREE;
329
330  return build7 (TARGET_MEM_REF, type,
331		 addr->symbol, addr->base, addr->index,
332		 addr->step, addr->offset, NULL, NULL);
333}
334
335/* Returns true if OBJ is an object whose address is a link time constant.  */
336
337static bool
338fixed_address_object_p (tree obj)
339{
340  return (TREE_CODE (obj) == VAR_DECL
341	  && (TREE_STATIC (obj)
342	      || DECL_EXTERNAL (obj)));
343}
344
345/* Adds COEF * ELT to PARTS.  TYPE is the type of the address we
346   construct.  */
347
348static void
349add_to_parts (struct mem_address *parts, tree type, tree elt,
350	      unsigned HOST_WIDE_INT coef)
351{
352  /* Check if this is a symbol.  */
353  if (!parts->symbol
354      && coef == 1
355      && TREE_CODE (elt) == ADDR_EXPR
356      && fixed_address_object_p (TREE_OPERAND (elt, 0)))
357    {
358      parts->symbol = TREE_OPERAND (elt, 0);
359      return;
360    }
361
362  if (coef != 1)
363    elt = fold_build2 (MULT_EXPR, type, fold_convert (type, elt),
364		       build_int_cst_type (type, coef));
365  else
366    elt = fold_convert (type, elt);
367
368  if (!parts->base)
369    {
370      parts->base = elt;
371      return;
372    }
373
374  if (!parts->index)
375    {
376      parts->index = elt;
377      return;
378    }
379
380  /* Add ELT to base.  */
381  parts->base = fold_build2 (PLUS_EXPR, type, parts->base, elt);
382}
383
384/* Finds the most expensive multiplication in ADDR that can be
385   expressed in an addressing mode and move the corresponding
386   element(s) to PARTS.  TYPE is the type of the address we
387   construct.  */
388
389static void
390most_expensive_mult_to_index (struct mem_address *parts, tree type,
391			      struct affine_tree_combination *addr)
392{
393  unsigned HOST_WIDE_INT best_mult = 0;
394  unsigned best_mult_cost = 0, acost;
395  tree mult_elt = NULL_TREE, elt;
396  unsigned i, j;
397
398  for (i = 0; i < addr->n; i++)
399    {
400      if (addr->coefs[i] == 1
401	  || !multiplier_allowed_in_address_p (addr->coefs[i]))
402	continue;
403
404      acost = multiply_by_cost (addr->coefs[i], Pmode);
405
406      if (acost > best_mult_cost)
407	{
408	  best_mult_cost = acost;
409	  best_mult = addr->coefs[i];
410	}
411    }
412
413  if (!best_mult)
414    return;
415
416  for (i = j = 0; i < addr->n; i++)
417    {
418      if (addr->coefs[i] != best_mult)
419	{
420	  addr->coefs[j] = addr->coefs[i];
421	  addr->elts[j] = addr->elts[i];
422	  j++;
423	  continue;
424	}
425
426      elt = fold_convert (type, addr->elts[i]);
427      if (!mult_elt)
428	mult_elt = elt;
429      else
430	mult_elt = fold_build2 (PLUS_EXPR, type, mult_elt, elt);
431    }
432  addr->n = j;
433
434  parts->index = mult_elt;
435  parts->step = build_int_cst_type (type, best_mult);
436}
437
438/* Splits address ADDR into PARTS.
439
440   TODO -- be more clever about the distribution of the elements of ADDR
441   to PARTS.  Some architectures do not support anything but single
442   register in address, possibly with a small integer offset; while
443   create_mem_ref will simplify the address to an acceptable shape
444   later, it would be a small bit more efficient to know that asking
445   for complicated addressing modes is useless.  */
446
447static void
448addr_to_parts (struct affine_tree_combination *addr, tree type,
449	       struct mem_address *parts)
450{
451  unsigned i;
452
453  parts->symbol = NULL_TREE;
454  parts->base = NULL_TREE;
455  parts->index = NULL_TREE;
456  parts->step = NULL_TREE;
457
458  if (addr->offset)
459    parts->offset = build_int_cst_type (type, addr->offset);
460  else
461    parts->offset = NULL_TREE;
462
463  /* First move the most expensive feasible multiplication
464     to index.  */
465  most_expensive_mult_to_index (parts, type, addr);
466
467  /* Then try to process the remaining elements.  */
468  for (i = 0; i < addr->n; i++)
469    add_to_parts (parts, type, addr->elts[i], addr->coefs[i]);
470  if (addr->rest)
471    add_to_parts (parts, type, addr->rest, 1);
472}
473
474/* Force the PARTS to register.  */
475
476static void
477gimplify_mem_ref_parts (block_stmt_iterator *bsi, struct mem_address *parts)
478{
479  if (parts->base)
480    parts->base = force_gimple_operand_bsi (bsi, parts->base,
481					    true, NULL_TREE);
482  if (parts->index)
483    parts->index = force_gimple_operand_bsi (bsi, parts->index,
484					     true, NULL_TREE);
485}
486
487/* Creates and returns a TARGET_MEM_REF for address ADDR.  If necessary
488   computations are emitted in front of BSI.  TYPE is the mode
489   of created memory reference.  */
490
491tree
492create_mem_ref (block_stmt_iterator *bsi, tree type,
493		struct affine_tree_combination *addr)
494{
495  tree mem_ref, tmp;
496  tree addr_type = build_pointer_type (type);
497  struct mem_address parts;
498
499  addr_to_parts (addr, addr_type, &parts);
500  gimplify_mem_ref_parts (bsi, &parts);
501  mem_ref = create_mem_ref_raw (type, &parts);
502  if (mem_ref)
503    return mem_ref;
504
505  /* The expression is too complicated.  Try making it simpler.  */
506
507  if (parts.step && !integer_onep (parts.step))
508    {
509      /* Move the multiplication to index.  */
510      gcc_assert (parts.index);
511      parts.index = force_gimple_operand_bsi (bsi,
512					      build2 (MULT_EXPR, addr_type,
513						      parts.index, parts.step),
514					      true, NULL_TREE);
515      parts.step = NULL_TREE;
516
517      mem_ref = create_mem_ref_raw (type, &parts);
518      if (mem_ref)
519	return mem_ref;
520    }
521
522  if (parts.symbol)
523    {
524      tmp = build_addr (parts.symbol, current_function_decl);
525
526      /* Add the symbol to base, eventually forcing it to register.  */
527      if (parts.base)
528	{
529	  if (parts.index)
530	    parts.base = force_gimple_operand_bsi (bsi,
531						   build2 (PLUS_EXPR, addr_type,
532							   parts.base, tmp),
533						   true, NULL_TREE);
534	  else
535	    {
536	      parts.index = parts.base;
537	      parts.base = tmp;
538	    }
539	}
540      else
541	parts.base = tmp;
542      parts.symbol = NULL_TREE;
543
544      mem_ref = create_mem_ref_raw (type, &parts);
545      if (mem_ref)
546	return mem_ref;
547    }
548
549  if (parts.base)
550    {
551      /* Add base to index.  */
552      if (parts.index)
553	parts.index = force_gimple_operand_bsi (bsi,
554						build2 (PLUS_EXPR, addr_type,
555							parts.base,
556							parts.index),
557						true, NULL_TREE);
558      else
559	parts.index = parts.base;
560      parts.base = NULL_TREE;
561
562      mem_ref = create_mem_ref_raw (type, &parts);
563      if (mem_ref)
564	return mem_ref;
565    }
566
567  if (parts.offset && !integer_zerop (parts.offset))
568    {
569      /* Try adding offset to index.  */
570      if (parts.index)
571	parts.index = force_gimple_operand_bsi (bsi,
572						build2 (PLUS_EXPR, addr_type,
573							parts.index,
574							parts.offset),
575						true, NULL_TREE);
576      else
577	parts.index = parts.offset, bsi;
578
579      parts.offset = NULL_TREE;
580
581      mem_ref = create_mem_ref_raw (type, &parts);
582      if (mem_ref)
583	return mem_ref;
584    }
585
586  /* Verify that the address is in the simplest possible shape
587     (only a register).  If we cannot create such a memory reference,
588     something is really wrong.  */
589  gcc_assert (parts.symbol == NULL_TREE);
590  gcc_assert (parts.base == NULL_TREE);
591  gcc_assert (!parts.step || integer_onep (parts.step));
592  gcc_assert (!parts.offset || integer_zerop (parts.offset));
593  gcc_unreachable ();
594}
595
596/* Copies components of the address from OP to ADDR.  */
597
598void
599get_address_description (tree op, struct mem_address *addr)
600{
601  addr->symbol = TMR_SYMBOL (op);
602  addr->base = TMR_BASE (op);
603  addr->index = TMR_INDEX (op);
604  addr->step = TMR_STEP (op);
605  addr->offset = TMR_OFFSET (op);
606}
607
608/* Copies the additional information attached to target_mem_ref FROM to TO.  */
609
610void
611copy_mem_ref_info (tree to, tree from)
612{
613  /* Copy the annotation, to preserve the aliasing information.  */
614  TMR_TAG (to) = TMR_TAG (from);
615
616  /* And the info about the original reference.  */
617  TMR_ORIGINAL (to) = TMR_ORIGINAL (from);
618}
619
620/* Move constants in target_mem_ref REF to offset.  Returns the new target
621   mem ref if anything changes, NULL_TREE otherwise.  */
622
623tree
624maybe_fold_tmr (tree ref)
625{
626  struct mem_address addr;
627  bool changed = false;
628  tree ret, off;
629
630  get_address_description (ref, &addr);
631
632  if (addr.base && TREE_CODE (addr.base) == INTEGER_CST)
633    {
634      if (addr.offset)
635	addr.offset = fold_binary_to_constant (PLUS_EXPR, ptr_type_node,
636					       addr.offset, addr.base);
637      else
638	addr.offset = addr.base;
639
640      addr.base = NULL_TREE;
641      changed = true;
642    }
643
644  if (addr.index && TREE_CODE (addr.index) == INTEGER_CST)
645    {
646      off = addr.index;
647      if (addr.step)
648	{
649	  off = fold_binary_to_constant (MULT_EXPR, ptr_type_node,
650					 off, addr.step);
651	  addr.step = NULL_TREE;
652	}
653
654      if (addr.offset)
655	{
656	  addr.offset = fold_binary_to_constant (PLUS_EXPR, ptr_type_node,
657						 addr.offset, off);
658	}
659      else
660	addr.offset = off;
661
662      addr.index = NULL_TREE;
663      changed = true;
664    }
665
666  if (!changed)
667    return NULL_TREE;
668
669  ret = create_mem_ref_raw (TREE_TYPE (ref), &addr);
670  if (!ret)
671    return NULL_TREE;
672
673  copy_mem_ref_info (ret, ref);
674  return ret;
675}
676
677/* Dump PARTS to FILE.  */
678
679extern void dump_mem_address (FILE *, struct mem_address *);
680void
681dump_mem_address (FILE *file, struct mem_address *parts)
682{
683  if (parts->symbol)
684    {
685      fprintf (file, "symbol: ");
686      print_generic_expr (file, parts->symbol, TDF_SLIM);
687      fprintf (file, "\n");
688    }
689  if (parts->base)
690    {
691      fprintf (file, "base: ");
692      print_generic_expr (file, parts->base, TDF_SLIM);
693      fprintf (file, "\n");
694    }
695  if (parts->index)
696    {
697      fprintf (file, "index: ");
698      print_generic_expr (file, parts->index, TDF_SLIM);
699      fprintf (file, "\n");
700    }
701  if (parts->step)
702    {
703      fprintf (file, "step: ");
704      print_generic_expr (file, parts->step, TDF_SLIM);
705      fprintf (file, "\n");
706    }
707  if (parts->offset)
708    {
709      fprintf (file, "offset: ");
710      print_generic_expr (file, parts->offset, TDF_SLIM);
711      fprintf (file, "\n");
712    }
713}
714
715#include "gt-tree-ssa-address.h"
716