1;; Predicate definitions for IA-32 and x86-64.
2;; Copyright (C) 2004, 2005, 2006 Free Software Foundation, Inc.
3;;
4;; This file is part of GCC.
5;;
6;; GCC is free software; you can redistribute it and/or modify
7;; it under the terms of the GNU General Public License as published by
8;; the Free Software Foundation; either version 2, or (at your option)
9;; any later version.
10;;
11;; GCC is distributed in the hope that it will be useful,
12;; but WITHOUT ANY WARRANTY; without even the implied warranty of
13;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14;; GNU General Public License for more details.
15;;
16;; You should have received a copy of the GNU General Public License
17;; along with GCC; see the file COPYING.  If not, write to
18;; the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19;; Boston, MA 02110-1301, USA.
20
21;; Return nonzero if OP is either a i387 or SSE fp register.
22(define_predicate "any_fp_register_operand"
23  (and (match_code "reg")
24       (match_test "ANY_FP_REGNO_P (REGNO (op))")))
25
26;; Return nonzero if OP is an i387 fp register.
27(define_predicate "fp_register_operand"
28  (and (match_code "reg")
29       (match_test "FP_REGNO_P (REGNO (op))")))
30
31;; Return nonzero if OP is a non-fp register_operand.
32(define_predicate "register_and_not_any_fp_reg_operand"
33  (and (match_code "reg")
34       (not (match_test "ANY_FP_REGNO_P (REGNO (op))"))))
35
36;; Return nonzero if OP is a register operand other than an i387 fp register.
37(define_predicate "register_and_not_fp_reg_operand"
38  (and (match_code "reg")
39       (not (match_test "FP_REGNO_P (REGNO (op))"))))
40
41;; True if the operand is an MMX register.
42(define_predicate "mmx_reg_operand"
43  (and (match_code "reg")
44       (match_test "MMX_REGNO_P (REGNO (op))")))
45
46;; True if the operand is a Q_REGS class register.
47(define_predicate "q_regs_operand"
48  (match_operand 0 "register_operand")
49{
50  if (GET_CODE (op) == SUBREG)
51    op = SUBREG_REG (op);
52  return ANY_QI_REG_P (op);
53})
54
55;; Return true if op is a NON_Q_REGS class register.
56(define_predicate "non_q_regs_operand"
57  (match_operand 0 "register_operand")
58{
59  if (GET_CODE (op) == SUBREG)
60    op = SUBREG_REG (op);
61  return NON_QI_REG_P (op);
62})
63
64;; Match an SI or HImode register for a zero_extract.
65(define_special_predicate "ext_register_operand"
66  (match_operand 0 "register_operand")
67{
68  if ((!TARGET_64BIT || GET_MODE (op) != DImode)
69      && GET_MODE (op) != SImode && GET_MODE (op) != HImode)
70    return 0;
71  if (GET_CODE (op) == SUBREG)
72    op = SUBREG_REG (op);
73
74  /* Be careful to accept only registers having upper parts.  */
75  return REGNO (op) > LAST_VIRTUAL_REGISTER || REGNO (op) < 4;
76})
77
78;; Return true if op is the AX register.
79(define_predicate "ax_reg_operand"
80  (and (match_code "reg")
81       (match_test "REGNO (op) == 0")))
82
83;; Return true if op is the flags register.
84(define_predicate "flags_reg_operand"
85  (and (match_code "reg")
86       (match_test "REGNO (op) == FLAGS_REG")))
87
88;; Return 1 if VALUE can be stored in a sign extended immediate field.
89(define_predicate "x86_64_immediate_operand"
90  (match_code "const_int,symbol_ref,label_ref,const")
91{
92  if (!TARGET_64BIT)
93    return immediate_operand (op, mode);
94
95  switch (GET_CODE (op))
96    {
97    case CONST_INT:
98      /* CONST_DOUBLES never match, since HOST_BITS_PER_WIDE_INT is known
99         to be at least 32 and this all acceptable constants are
100	 represented as CONST_INT.  */
101      if (HOST_BITS_PER_WIDE_INT == 32)
102	return 1;
103      else
104	{
105	  HOST_WIDE_INT val = trunc_int_for_mode (INTVAL (op), DImode);
106	  return trunc_int_for_mode (val, SImode) == val;
107	}
108      break;
109
110    case SYMBOL_REF:
111      /* For certain code models, the symbolic references are known to fit.
112	 in CM_SMALL_PIC model we know it fits if it is local to the shared
113	 library.  Don't count TLS SYMBOL_REFs here, since they should fit
114	 only if inside of UNSPEC handled below.  */
115      /* TLS symbols are not constant.  */
116      if (SYMBOL_REF_TLS_MODEL (op))
117	return false;
118      return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_KERNEL
119	      || (ix86_cmodel == CM_MEDIUM && !SYMBOL_REF_FAR_ADDR_P (op)));
120
121    case LABEL_REF:
122      /* For certain code models, the code is near as well.  */
123      return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM
124	      || ix86_cmodel == CM_KERNEL);
125
126    case CONST:
127      /* We also may accept the offsetted memory references in certain
128	 special cases.  */
129      if (GET_CODE (XEXP (op, 0)) == UNSPEC)
130	switch (XINT (XEXP (op, 0), 1))
131	  {
132	  case UNSPEC_GOTPCREL:
133	  case UNSPEC_DTPOFF:
134	  case UNSPEC_GOTNTPOFF:
135	  case UNSPEC_NTPOFF:
136	    return 1;
137	  default:
138	    break;
139	  }
140
141      if (GET_CODE (XEXP (op, 0)) == PLUS)
142	{
143	  rtx op1 = XEXP (XEXP (op, 0), 0);
144	  rtx op2 = XEXP (XEXP (op, 0), 1);
145	  HOST_WIDE_INT offset;
146
147	  if (ix86_cmodel == CM_LARGE)
148	    return 0;
149	  if (GET_CODE (op2) != CONST_INT)
150	    return 0;
151	  offset = trunc_int_for_mode (INTVAL (op2), DImode);
152	  switch (GET_CODE (op1))
153	    {
154	    case SYMBOL_REF:
155	      /* TLS symbols are not constant.  */
156	      if (SYMBOL_REF_TLS_MODEL (op1))
157		return 0;
158	      /* For CM_SMALL assume that latest object is 16MB before
159		 end of 31bits boundary.  We may also accept pretty
160		 large negative constants knowing that all objects are
161		 in the positive half of address space.  */
162	      if ((ix86_cmodel == CM_SMALL
163		   || (ix86_cmodel == CM_MEDIUM
164		       && !SYMBOL_REF_FAR_ADDR_P (op1)))
165		  && offset < 16*1024*1024
166		  && trunc_int_for_mode (offset, SImode) == offset)
167		return 1;
168	      /* For CM_KERNEL we know that all object resist in the
169		 negative half of 32bits address space.  We may not
170		 accept negative offsets, since they may be just off
171		 and we may accept pretty large positive ones.  */
172	      if (ix86_cmodel == CM_KERNEL
173		  && offset > 0
174		  && trunc_int_for_mode (offset, SImode) == offset)
175		return 1;
176	      break;
177
178	    case LABEL_REF:
179	      /* These conditions are similar to SYMBOL_REF ones, just the
180		 constraints for code models differ.  */
181	      if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
182		  && offset < 16*1024*1024
183		  && trunc_int_for_mode (offset, SImode) == offset)
184		return 1;
185	      if (ix86_cmodel == CM_KERNEL
186		  && offset > 0
187		  && trunc_int_for_mode (offset, SImode) == offset)
188		return 1;
189	      break;
190
191	    case UNSPEC:
192	      switch (XINT (op1, 1))
193		{
194		case UNSPEC_DTPOFF:
195		case UNSPEC_NTPOFF:
196		  if (offset > 0
197		      && trunc_int_for_mode (offset, SImode) == offset)
198		    return 1;
199		}
200	      break;
201
202	    default:
203	      break;
204	    }
205	}
206      break;
207
208      default:
209	gcc_unreachable ();
210    }
211
212  return 0;
213})
214
215;; Return 1 if VALUE can be stored in the zero extended immediate field.
216(define_predicate "x86_64_zext_immediate_operand"
217  (match_code "const_double,const_int,symbol_ref,label_ref,const")
218{
219  switch (GET_CODE (op))
220    {
221    case CONST_DOUBLE:
222      if (HOST_BITS_PER_WIDE_INT == 32)
223	return (GET_MODE (op) == VOIDmode && !CONST_DOUBLE_HIGH (op));
224      else
225	return 0;
226
227    case CONST_INT:
228      if (HOST_BITS_PER_WIDE_INT == 32)
229	return INTVAL (op) >= 0;
230      else
231	return !(INTVAL (op) & ~(HOST_WIDE_INT) 0xffffffff);
232
233    case SYMBOL_REF:
234      /* For certain code models, the symbolic references are known to fit.  */
235      /* TLS symbols are not constant.  */
236      if (SYMBOL_REF_TLS_MODEL (op))
237	return false;
238      return (ix86_cmodel == CM_SMALL
239	      || (ix86_cmodel == CM_MEDIUM
240		  && !SYMBOL_REF_FAR_ADDR_P (op)));
241
242    case LABEL_REF:
243      /* For certain code models, the code is near as well.  */
244      return ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM;
245
246    case CONST:
247      /* We also may accept the offsetted memory references in certain
248	 special cases.  */
249      if (GET_CODE (XEXP (op, 0)) == PLUS)
250	{
251	  rtx op1 = XEXP (XEXP (op, 0), 0);
252	  rtx op2 = XEXP (XEXP (op, 0), 1);
253
254	  if (ix86_cmodel == CM_LARGE)
255	    return 0;
256	  switch (GET_CODE (op1))
257	    {
258	    case SYMBOL_REF:
259	      /* TLS symbols are not constant.  */
260	      if (SYMBOL_REF_TLS_MODEL (op1))
261		return 0;
262	      /* For small code model we may accept pretty large positive
263		 offsets, since one bit is available for free.  Negative
264		 offsets are limited by the size of NULL pointer area
265		 specified by the ABI.  */
266	      if ((ix86_cmodel == CM_SMALL
267		   || (ix86_cmodel == CM_MEDIUM
268		       && !SYMBOL_REF_FAR_ADDR_P (op1)))
269		  && GET_CODE (op2) == CONST_INT
270		  && trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
271		  && trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
272		return 1;
273	      /* ??? For the kernel, we may accept adjustment of
274		 -0x10000000, since we know that it will just convert
275		 negative address space to positive, but perhaps this
276		 is not worthwhile.  */
277	      break;
278
279	    case LABEL_REF:
280	      /* These conditions are similar to SYMBOL_REF ones, just the
281		 constraints for code models differ.  */
282	      if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
283		  && GET_CODE (op2) == CONST_INT
284		  && trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
285		  && trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
286		return 1;
287	      break;
288
289	    default:
290	      return 0;
291	    }
292	}
293      break;
294
295    default:
296      gcc_unreachable ();
297    }
298  return 0;
299})
300
301;; Return nonzero if OP is general operand representable on x86_64.
302(define_predicate "x86_64_general_operand"
303  (if_then_else (match_test "TARGET_64BIT")
304    (ior (match_operand 0 "nonimmediate_operand")
305	 (match_operand 0 "x86_64_immediate_operand"))
306    (match_operand 0 "general_operand")))
307
308;; Return nonzero if OP is general operand representable on x86_64
309;; as either sign extended or zero extended constant.
310(define_predicate "x86_64_szext_general_operand"
311  (if_then_else (match_test "TARGET_64BIT")
312    (ior (match_operand 0 "nonimmediate_operand")
313	 (ior (match_operand 0 "x86_64_immediate_operand")
314	      (match_operand 0 "x86_64_zext_immediate_operand")))
315    (match_operand 0 "general_operand")))
316
317;; Return nonzero if OP is nonmemory operand representable on x86_64.
318(define_predicate "x86_64_nonmemory_operand"
319  (if_then_else (match_test "TARGET_64BIT")
320    (ior (match_operand 0 "register_operand")
321	 (match_operand 0 "x86_64_immediate_operand"))
322    (match_operand 0 "nonmemory_operand")))
323
324;; Return nonzero if OP is nonmemory operand representable on x86_64.
325(define_predicate "x86_64_szext_nonmemory_operand"
326  (if_then_else (match_test "TARGET_64BIT")
327    (ior (match_operand 0 "register_operand")
328	 (ior (match_operand 0 "x86_64_immediate_operand")
329	      (match_operand 0 "x86_64_zext_immediate_operand")))
330    (match_operand 0 "nonmemory_operand")))
331
332;; Return true when operand is PIC expression that can be computed by lea
333;; operation.
334(define_predicate "pic_32bit_operand"
335  (match_code "const,symbol_ref,label_ref")
336{
337  if (!flag_pic)
338    return 0;
339  /* Rule out relocations that translate into 64bit constants.  */
340  if (TARGET_64BIT && GET_CODE (op) == CONST)
341    {
342      op = XEXP (op, 0);
343      if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT)
344	op = XEXP (op, 0);
345      if (GET_CODE (op) == UNSPEC
346	  && (XINT (op, 1) == UNSPEC_GOTOFF
347	      || XINT (op, 1) == UNSPEC_GOT))
348	return 0;
349    }
350  return symbolic_operand (op, mode);
351})
352
353
354;; Return nonzero if OP is nonmemory operand acceptable by movabs patterns.
355(define_predicate "x86_64_movabs_operand"
356  (if_then_else (match_test "!TARGET_64BIT || !flag_pic")
357    (match_operand 0 "nonmemory_operand")
358    (ior (match_operand 0 "register_operand")
359	 (and (match_operand 0 "const_double_operand")
360	      (match_test "GET_MODE_SIZE (mode) <= 8")))))
361
362;; Returns nonzero if OP is either a symbol reference or a sum of a symbol
363;; reference and a constant.
364(define_predicate "symbolic_operand"
365  (match_code "symbol_ref,label_ref,const")
366{
367  switch (GET_CODE (op))
368    {
369    case SYMBOL_REF:
370    case LABEL_REF:
371      return 1;
372
373    case CONST:
374      op = XEXP (op, 0);
375      if (GET_CODE (op) == SYMBOL_REF
376	  || GET_CODE (op) == LABEL_REF
377	  || (GET_CODE (op) == UNSPEC
378	      && (XINT (op, 1) == UNSPEC_GOT
379		  || XINT (op, 1) == UNSPEC_GOTOFF
380		  || XINT (op, 1) == UNSPEC_GOTPCREL)))
381	return 1;
382      if (GET_CODE (op) != PLUS
383	  || GET_CODE (XEXP (op, 1)) != CONST_INT)
384	return 0;
385
386      op = XEXP (op, 0);
387      if (GET_CODE (op) == SYMBOL_REF
388	  || GET_CODE (op) == LABEL_REF)
389	return 1;
390      /* Only @GOTOFF gets offsets.  */
391      if (GET_CODE (op) != UNSPEC
392	  || XINT (op, 1) != UNSPEC_GOTOFF)
393	return 0;
394
395      op = XVECEXP (op, 0, 0);
396      if (GET_CODE (op) == SYMBOL_REF
397	  || GET_CODE (op) == LABEL_REF)
398	return 1;
399      return 0;
400
401    default:
402      gcc_unreachable ();
403    }
404})
405
406;; Return true if the operand contains a @GOT or @GOTOFF reference.
407(define_predicate "pic_symbolic_operand"
408  (match_code "const")
409{
410  op = XEXP (op, 0);
411  if (TARGET_64BIT)
412    {
413      if (GET_CODE (op) == UNSPEC
414	  && XINT (op, 1) == UNSPEC_GOTPCREL)
415	return 1;
416      if (GET_CODE (op) == PLUS
417	  && GET_CODE (XEXP (op, 0)) == UNSPEC
418	  && XINT (XEXP (op, 0), 1) == UNSPEC_GOTPCREL)
419	return 1;
420    }
421  else
422    {
423      if (GET_CODE (op) == UNSPEC)
424	return 1;
425      if (GET_CODE (op) != PLUS
426	  || GET_CODE (XEXP (op, 1)) != CONST_INT)
427	return 0;
428      op = XEXP (op, 0);
429      if (GET_CODE (op) == UNSPEC)
430	return 1;
431    }
432  return 0;
433})
434
435;; Return true if OP is a symbolic operand that resolves locally.
436(define_predicate "local_symbolic_operand"
437  (match_code "const,label_ref,symbol_ref")
438{
439  if (GET_CODE (op) == CONST
440      && GET_CODE (XEXP (op, 0)) == PLUS
441      && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
442    op = XEXP (XEXP (op, 0), 0);
443
444  if (GET_CODE (op) == LABEL_REF)
445    return 1;
446
447  if (GET_CODE (op) != SYMBOL_REF)
448    return 0;
449
450  if (SYMBOL_REF_TLS_MODEL (op) != 0)
451    return 0;
452
453  if (SYMBOL_REF_LOCAL_P (op))
454    return 1;
455
456  /* There is, however, a not insubstantial body of code in the rest of
457     the compiler that assumes it can just stick the results of
458     ASM_GENERATE_INTERNAL_LABEL in a symbol_ref and have done.  */
459  /* ??? This is a hack.  Should update the body of the compiler to
460     always create a DECL an invoke targetm.encode_section_info.  */
461  if (strncmp (XSTR (op, 0), internal_label_prefix,
462	       internal_label_prefix_len) == 0)
463    return 1;
464
465  return 0;
466})
467
468;; Test for various thread-local symbols.
469(define_predicate "tls_symbolic_operand"
470  (and (match_code "symbol_ref")
471       (match_test "SYMBOL_REF_TLS_MODEL (op) != 0")))
472
473(define_predicate "tls_modbase_operand"
474  (and (match_code "symbol_ref")
475       (match_test "op == ix86_tls_module_base ()")))
476
477(define_predicate "tp_or_register_operand"
478  (ior (match_operand 0 "register_operand")
479       (and (match_code "unspec")
480	    (match_test "XINT (op, 1) == UNSPEC_TP"))))
481
482;; Test for a pc-relative call operand
483(define_predicate "constant_call_address_operand"
484  (ior (match_code "symbol_ref")
485       (match_operand 0 "local_symbolic_operand")))
486
487;; True for any non-virtual or eliminable register.  Used in places where
488;; instantiation of such a register may cause the pattern to not be recognized.
489(define_predicate "register_no_elim_operand"
490  (match_operand 0 "register_operand")
491{
492  if (GET_CODE (op) == SUBREG)
493    op = SUBREG_REG (op);
494  return !(op == arg_pointer_rtx
495	   || op == frame_pointer_rtx
496	   || (REGNO (op) >= FIRST_PSEUDO_REGISTER
497	       && REGNO (op) <= LAST_VIRTUAL_REGISTER));
498})
499
500;; Similarly, but include the stack pointer.  This is used to prevent esp
501;; from being used as an index reg.
502(define_predicate "index_register_operand"
503  (match_operand 0 "register_operand")
504{
505  if (GET_CODE (op) == SUBREG)
506    op = SUBREG_REG (op);
507  if (reload_in_progress || reload_completed)
508    return REG_OK_FOR_INDEX_STRICT_P (op);
509  else
510    return REG_OK_FOR_INDEX_NONSTRICT_P (op);
511})
512
513;; Return false if this is any eliminable register.  Otherwise general_operand.
514(define_predicate "general_no_elim_operand"
515  (if_then_else (match_code "reg,subreg")
516    (match_operand 0 "register_no_elim_operand")
517    (match_operand 0 "general_operand")))
518
519;; Return false if this is any eliminable register.  Otherwise
520;; register_operand or a constant.
521(define_predicate "nonmemory_no_elim_operand"
522  (ior (match_operand 0 "register_no_elim_operand")
523       (match_operand 0 "immediate_operand")))
524
525;; Test for a valid operand for a call instruction.
526(define_predicate "call_insn_operand"
527  (ior (match_operand 0 "constant_call_address_operand")
528       (ior (match_operand 0 "register_no_elim_operand")
529	    (match_operand 0 "memory_operand"))))
530
531;; Similarly, but for tail calls, in which we cannot allow memory references.
532(define_predicate "sibcall_insn_operand"
533  (ior (match_operand 0 "constant_call_address_operand")
534       (match_operand 0 "register_no_elim_operand")))
535
536;; Match exactly zero.
537(define_predicate "const0_operand"
538  (match_code "const_int,const_double,const_vector")
539{
540  if (mode == VOIDmode)
541    mode = GET_MODE (op);
542  return op == CONST0_RTX (mode);
543})
544
545;; Match exactly one.
546(define_predicate "const1_operand"
547  (and (match_code "const_int")
548       (match_test "op == const1_rtx")))
549
550;; Match exactly eight.
551(define_predicate "const8_operand"
552  (and (match_code "const_int")
553       (match_test "INTVAL (op) == 8")))
554
555;; Match 2, 4, or 8.  Used for leal multiplicands.
556(define_predicate "const248_operand"
557  (match_code "const_int")
558{
559  HOST_WIDE_INT i = INTVAL (op);
560  return i == 2 || i == 4 || i == 8;
561})
562
563;; Match 0 or 1.
564(define_predicate "const_0_to_1_operand"
565  (and (match_code "const_int")
566       (match_test "op == const0_rtx || op == const1_rtx")))
567
568;; Match 0 to 3.
569(define_predicate "const_0_to_3_operand"
570  (and (match_code "const_int")
571       (match_test "INTVAL (op) >= 0 && INTVAL (op) <= 3")))
572
573;; Match 0 to 7.
574(define_predicate "const_0_to_7_operand"
575  (and (match_code "const_int")
576       (match_test "INTVAL (op) >= 0 && INTVAL (op) <= 7")))
577
578;; Match 0 to 15.
579(define_predicate "const_0_to_15_operand"
580  (and (match_code "const_int")
581       (match_test "INTVAL (op) >= 0 && INTVAL (op) <= 15")))
582
583;; Match 0 to 63.
584(define_predicate "const_0_to_63_operand"
585  (and (match_code "const_int")
586       (match_test "INTVAL (op) >= 0 && INTVAL (op) <= 63")))
587
588;; Match 0 to 255.
589(define_predicate "const_0_to_255_operand"
590  (and (match_code "const_int")
591       (match_test "INTVAL (op) >= 0 && INTVAL (op) <= 255")))
592
593;; Match (0 to 255) * 8
594(define_predicate "const_0_to_255_mul_8_operand"
595  (match_code "const_int")
596{
597  unsigned HOST_WIDE_INT val = INTVAL (op);
598  return val <= 255*8 && val % 8 == 0;
599})
600
601;; Return nonzero if OP is CONST_INT >= 1 and <= 31 (a valid operand
602;; for shift & compare patterns, as shifting by 0 does not change flags).
603(define_predicate "const_1_to_31_operand"
604  (and (match_code "const_int")
605       (match_test "INTVAL (op) >= 1 && INTVAL (op) <= 31")))
606
607;; Match 2 or 3.
608(define_predicate "const_2_to_3_operand"
609  (and (match_code "const_int")
610       (match_test "INTVAL (op) == 2 || INTVAL (op) == 3")))
611
612;; Match 4 to 7.
613(define_predicate "const_4_to_7_operand"
614  (and (match_code "const_int")
615       (match_test "INTVAL (op) >= 4 && INTVAL (op) <= 7")))
616
617;; Match exactly one bit in 4-bit mask.
618(define_predicate "const_pow2_1_to_8_operand"
619  (match_code "const_int")
620{
621  unsigned int log = exact_log2 (INTVAL (op));
622  return log <= 3;
623})
624
625;; Match exactly one bit in 8-bit mask.
626(define_predicate "const_pow2_1_to_128_operand"
627  (match_code "const_int")
628{
629  unsigned int log = exact_log2 (INTVAL (op));
630  return log <= 7;
631})
632
633;; True if this is a constant appropriate for an increment or decrement.
634(define_predicate "incdec_operand"
635  (match_code "const_int")
636{
637  /* On Pentium4, the inc and dec operations causes extra dependency on flag
638     registers, since carry flag is not set.  */
639  if (!TARGET_USE_INCDEC && !optimize_size)
640    return 0;
641  return op == const1_rtx || op == constm1_rtx;
642})
643
644;; True for registers, or 1 or -1.  Used to optimize double-word shifts.
645(define_predicate "reg_or_pm1_operand"
646  (ior (match_operand 0 "register_operand")
647       (and (match_code "const_int")
648	    (match_test "op == const1_rtx || op == constm1_rtx"))))
649
650;; True if OP is acceptable as operand of DImode shift expander.
651(define_predicate "shiftdi_operand"
652  (if_then_else (match_test "TARGET_64BIT")
653    (match_operand 0 "nonimmediate_operand")
654    (match_operand 0 "register_operand")))
655
656(define_predicate "ashldi_input_operand"
657  (if_then_else (match_test "TARGET_64BIT")
658    (match_operand 0 "nonimmediate_operand")
659    (match_operand 0 "reg_or_pm1_operand")))
660
661;; Return true if OP is a vector load from the constant pool with just
662;; the first element nonzero.
663(define_predicate "zero_extended_scalar_load_operand"
664  (match_code "mem")
665{
666  unsigned n_elts;
667  op = maybe_get_pool_constant (op);
668  if (!op)
669    return 0;
670  if (GET_CODE (op) != CONST_VECTOR)
671    return 0;
672  n_elts =
673    (GET_MODE_SIZE (GET_MODE (op)) /
674     GET_MODE_SIZE (GET_MODE_INNER (GET_MODE (op))));
675  for (n_elts--; n_elts > 0; n_elts--)
676    {
677      rtx elt = CONST_VECTOR_ELT (op, n_elts);
678      if (elt != CONST0_RTX (GET_MODE_INNER (GET_MODE (op))))
679	return 0;
680    }
681  return 1;
682})
683
684/* Return true if operand is a vector constant that is all ones. */
685(define_predicate "vector_all_ones_operand"
686  (match_code "const_vector")
687{
688  int nunits = GET_MODE_NUNITS (mode);
689
690  if (GET_CODE (op) == CONST_VECTOR
691      && CONST_VECTOR_NUNITS (op) == nunits)
692    {
693      int i;
694      for (i = 0; i < nunits; ++i)
695        {
696          rtx x = CONST_VECTOR_ELT (op, i);
697          if (x != constm1_rtx)
698            return 0;
699        }
700      return 1;
701    }
702
703  return 0;
704})
705
706; Return 1 when OP is operand acceptable for standard SSE move.
707(define_predicate "vector_move_operand"
708  (ior (match_operand 0 "nonimmediate_operand")
709       (match_operand 0 "const0_operand")))
710
711;; Return 1 when OP is nonimmediate or standard SSE constant.
712(define_predicate "nonimmediate_or_sse_const_operand"
713  (match_operand 0 "general_operand")
714{
715  if (nonimmediate_operand (op, mode))
716    return 1;
717  if (standard_sse_constant_p (op) > 0)
718    return 1;
719  return 0;
720})
721
722;; Return true if OP is a register or a zero.
723(define_predicate "reg_or_0_operand"
724  (ior (match_operand 0 "register_operand")
725       (match_operand 0 "const0_operand")))
726
727;; Return true if op if a valid address, and does not contain
728;; a segment override.
729(define_special_predicate "no_seg_address_operand"
730  (match_operand 0 "address_operand")
731{
732  struct ix86_address parts;
733  int ok;
734
735  ok = ix86_decompose_address (op, &parts);
736  gcc_assert (ok);
737  return parts.seg == SEG_DEFAULT;
738})
739
740;; Return nonzero if the rtx is known to be at least 32 bits aligned.
741(define_predicate "aligned_operand"
742  (match_operand 0 "general_operand")
743{
744  struct ix86_address parts;
745  int ok;
746
747  /* Registers and immediate operands are always "aligned".  */
748  if (GET_CODE (op) != MEM)
749    return 1;
750
751  /* All patterns using aligned_operand on memory operands ends up
752     in promoting memory operand to 64bit and thus causing memory mismatch.  */
753  if (TARGET_MEMORY_MISMATCH_STALL && !optimize_size)
754    return 0;
755
756  /* Don't even try to do any aligned optimizations with volatiles.  */
757  if (MEM_VOLATILE_P (op))
758    return 0;
759
760  if (MEM_ALIGN (op) >= 32)
761    return 1;
762
763  op = XEXP (op, 0);
764
765  /* Pushes and pops are only valid on the stack pointer.  */
766  if (GET_CODE (op) == PRE_DEC
767      || GET_CODE (op) == POST_INC)
768    return 1;
769
770  /* Decode the address.  */
771  ok = ix86_decompose_address (op, &parts);
772  gcc_assert (ok);
773
774  /* Look for some component that isn't known to be aligned.  */
775  if (parts.index)
776    {
777      if (REGNO_POINTER_ALIGN (REGNO (parts.index)) * parts.scale < 32)
778	return 0;
779    }
780  if (parts.base)
781    {
782      if (REGNO_POINTER_ALIGN (REGNO (parts.base)) < 32)
783	return 0;
784    }
785  if (parts.disp)
786    {
787      if (GET_CODE (parts.disp) != CONST_INT
788	  || (INTVAL (parts.disp) & 3) != 0)
789	return 0;
790    }
791
792  /* Didn't find one -- this must be an aligned address.  */
793  return 1;
794})
795
796;; Returns 1 if OP is memory operand with a displacement.
797(define_predicate "memory_displacement_operand"
798  (match_operand 0 "memory_operand")
799{
800  struct ix86_address parts;
801  int ok;
802
803  ok = ix86_decompose_address (XEXP (op, 0), &parts);
804  gcc_assert (ok);
805  return parts.disp != NULL_RTX;
806})
807
808;; Returns 1 if OP is memory operand with a displacement only.
809(define_predicate "memory_displacement_only_operand"
810  (match_operand 0 "memory_operand")
811{
812  struct ix86_address parts;
813  int ok;
814
815  ok = ix86_decompose_address (XEXP (op, 0), &parts);
816  gcc_assert (ok);
817
818  if (parts.base || parts.index)
819    return 0;
820
821  return parts.disp != NULL_RTX;
822})
823
824;; Returns 1 if OP is memory operand that cannot be represented
825;; by the modRM array.
826(define_predicate "long_memory_operand"
827  (and (match_operand 0 "memory_operand")
828       (match_test "memory_address_length (op) != 0")))
829
830;; Return 1 if OP is a comparison operator that can be issued by fcmov.
831(define_predicate "fcmov_comparison_operator"
832  (match_operand 0 "comparison_operator")
833{
834  enum machine_mode inmode = GET_MODE (XEXP (op, 0));
835  enum rtx_code code = GET_CODE (op);
836
837  if (inmode == CCFPmode || inmode == CCFPUmode)
838    {
839      enum rtx_code second_code, bypass_code;
840      ix86_fp_comparison_codes (code, &bypass_code, &code, &second_code);
841      if (bypass_code != UNKNOWN || second_code != UNKNOWN)
842	return 0;
843      code = ix86_fp_compare_code_to_integer (code);
844    }
845  /* i387 supports just limited amount of conditional codes.  */
846  switch (code)
847    {
848    case LTU: case GTU: case LEU: case GEU:
849      if (inmode == CCmode || inmode == CCFPmode || inmode == CCFPUmode)
850	return 1;
851      return 0;
852    case ORDERED: case UNORDERED:
853    case EQ: case NE:
854      return 1;
855    default:
856      return 0;
857    }
858})
859
860;; Return 1 if OP is a comparison that can be used in the CMPSS/CMPPS insns.
861;; The first set are supported directly; the second set can't be done with
862;; full IEEE support, i.e. NaNs.
863;;
864;; ??? It would seem that we have a lot of uses of this predicate that pass
865;; it the wrong mode.  We got away with this because the old function didn't
866;; check the mode at all.  Mirror that for now by calling this a special
867;; predicate.
868
869(define_special_predicate "sse_comparison_operator"
870  (match_code "eq,lt,le,unordered,ne,unge,ungt,ordered"))
871
872;; Return 1 if OP is a valid comparison operator in valid mode.
873(define_predicate "ix86_comparison_operator"
874  (match_operand 0 "comparison_operator")
875{
876  enum machine_mode inmode = GET_MODE (XEXP (op, 0));
877  enum rtx_code code = GET_CODE (op);
878
879  if (inmode == CCFPmode || inmode == CCFPUmode)
880    {
881      enum rtx_code second_code, bypass_code;
882      ix86_fp_comparison_codes (code, &bypass_code, &code, &second_code);
883      return (bypass_code == UNKNOWN && second_code == UNKNOWN);
884    }
885  switch (code)
886    {
887    case EQ: case NE:
888      return 1;
889    case LT: case GE:
890      if (inmode == CCmode || inmode == CCGCmode
891	  || inmode == CCGOCmode || inmode == CCNOmode)
892	return 1;
893      return 0;
894    case LTU: case GTU: case LEU: case ORDERED: case UNORDERED: case GEU:
895      if (inmode == CCmode)
896	return 1;
897      return 0;
898    case GT: case LE:
899      if (inmode == CCmode || inmode == CCGCmode || inmode == CCNOmode)
900	return 1;
901      return 0;
902    default:
903      return 0;
904    }
905})
906
907;; Return 1 if OP is a valid comparison operator testing carry flag to be set.
908(define_predicate "ix86_carry_flag_operator"
909  (match_code "ltu,lt,unlt,gt,ungt,le,unle,ge,unge,ltgt,uneq")
910{
911  enum machine_mode inmode = GET_MODE (XEXP (op, 0));
912  enum rtx_code code = GET_CODE (op);
913
914  if (GET_CODE (XEXP (op, 0)) != REG
915      || REGNO (XEXP (op, 0)) != FLAGS_REG
916      || XEXP (op, 1) != const0_rtx)
917    return 0;
918
919  if (inmode == CCFPmode || inmode == CCFPUmode)
920    {
921      enum rtx_code second_code, bypass_code;
922      ix86_fp_comparison_codes (code, &bypass_code, &code, &second_code);
923      if (bypass_code != UNKNOWN || second_code != UNKNOWN)
924	return 0;
925      code = ix86_fp_compare_code_to_integer (code);
926    }
927  else if (inmode != CCmode)
928    return 0;
929
930  return code == LTU;
931})
932
933;; Nearly general operand, but accept any const_double, since we wish
934;; to be able to drop them into memory rather than have them get pulled
935;; into registers.
936(define_predicate "cmp_fp_expander_operand"
937  (ior (match_code "const_double")
938       (match_operand 0 "general_operand")))
939
940;; Return true if this is a valid binary floating-point operation.
941(define_predicate "binary_fp_operator"
942  (match_code "plus,minus,mult,div"))
943
944;; Return true if this is a multiply operation.
945(define_predicate "mult_operator"
946  (match_code "mult"))
947
948;; Return true if this is a division operation.
949(define_predicate "div_operator"
950  (match_code "div"))
951
952;; Return true if this is a float extend operation.
953(define_predicate "float_operator"
954  (match_code "float"))
955
956;; Return true for ARITHMETIC_P.
957(define_predicate "arith_or_logical_operator"
958  (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax,compare,minus,div,
959	       mod,udiv,umod,ashift,rotate,ashiftrt,lshiftrt,rotatert"))
960
961;; Return 1 if OP is a binary operator that can be promoted to wider mode.
962;; Modern CPUs have same latency for HImode and SImode multiply,
963;; but 386 and 486 do HImode multiply faster.  */
964(define_predicate "promotable_binary_operator"
965  (ior (match_code "plus,and,ior,xor,ashift")
966       (and (match_code "mult")
967	    (match_test "ix86_tune > PROCESSOR_I486"))))
968
969;; To avoid problems when jump re-emits comparisons like testqi_ext_ccno_0,
970;; re-recognize the operand to avoid a copy_to_mode_reg that will fail.
971;;
972;; ??? It seems likely that this will only work because cmpsi is an
973;; expander, and no actual insns use this.
974
975(define_predicate "cmpsi_operand"
976  (ior (match_operand 0 "nonimmediate_operand")
977       (and (match_code "and")
978	    (match_code "zero_extract" "0")
979	    (match_code "const_int"    "1")
980	    (match_code "const_int"    "01")
981	    (match_code "const_int"    "02")
982	    (match_test "INTVAL (XEXP (XEXP (op, 0), 1)) == 8")
983	    (match_test "INTVAL (XEXP (XEXP (op, 0), 2)) == 8")
984       )))
985
986(define_predicate "compare_operator"
987  (match_code "compare"))
988
989(define_predicate "absneg_operator"
990  (match_code "abs,neg"))
991