1;; Predicate definitions for IA-32 and x86-64.
2;; Copyright (C) 2004-2015 Free Software Foundation, Inc.
3;;
4;; This file is part of GCC.
5;;
6;; GCC is free software; you can redistribute it and/or modify
7;; it under the terms of the GNU General Public License as published by
8;; the Free Software Foundation; either version 3, or (at your option)
9;; any later version.
10;;
11;; GCC is distributed in the hope that it will be useful,
12;; but WITHOUT ANY WARRANTY; without even the implied warranty of
13;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14;; GNU General Public License for more details.
15;;
16;; You should have received a copy of the GNU General Public License
17;; along with GCC; see the file COPYING3.  If not see
18;; <http://www.gnu.org/licenses/>.
19
20;; Return true if OP is either a i387 or SSE fp register.
21(define_predicate "any_fp_register_operand"
22  (and (match_code "reg")
23       (match_test "ANY_FP_REGNO_P (REGNO (op))")))
24
25;; Return true if OP is an i387 fp register.
26(define_predicate "fp_register_operand"
27  (and (match_code "reg")
28       (match_test "STACK_REGNO_P (REGNO (op))")))
29
30;; Return true if OP is a non-fp register_operand.
31(define_predicate "register_and_not_any_fp_reg_operand"
32  (and (match_code "reg")
33       (not (match_test "ANY_FP_REGNO_P (REGNO (op))"))))
34
35;; True if the operand is a GENERAL class register.
36(define_predicate "general_reg_operand"
37  (and (match_code "reg")
38       (match_test "GENERAL_REG_P (op)")))
39
40;; True if the operand is a nonimmediate operand with GENERAL class register.
41(define_predicate "nonimmediate_gr_operand"
42  (if_then_else (match_code "reg")
43    (match_test "GENERAL_REGNO_P (REGNO (op))")
44    (match_operand 0 "nonimmediate_operand")))
45
46;; Return true if OP is a register operand other than an i387 fp register.
47(define_predicate "register_and_not_fp_reg_operand"
48  (and (match_code "reg")
49       (not (match_test "STACK_REGNO_P (REGNO (op))"))))
50
51;; True if the operand is an MMX register.
52(define_predicate "mmx_reg_operand"
53  (and (match_code "reg")
54       (match_test "MMX_REGNO_P (REGNO (op))")))
55
56;; True if the operand is an SSE register.
57(define_predicate "sse_reg_operand"
58  (and (match_code "reg")
59       (match_test "SSE_REGNO_P (REGNO (op))")))
60
61;; True if the operand is an AVX-512 new register.
62(define_predicate "ext_sse_reg_operand"
63  (and (match_code "reg")
64       (match_test "EXT_REX_SSE_REGNO_P (REGNO (op))")))
65
66;; True if the operand is an AVX-512 mask register.
67(define_predicate "mask_reg_operand"
68  (and (match_code "reg")
69       (match_test "MASK_REGNO_P (REGNO (op))")))
70
71;; True if the operand is a Q_REGS class register.
72(define_predicate "q_regs_operand"
73  (match_operand 0 "register_operand")
74{
75  if (GET_CODE (op) == SUBREG)
76    op = SUBREG_REG (op);
77  return ANY_QI_REG_P (op);
78})
79
80;; Return true if OP is a memory operands that can be used in sibcalls.
81(define_predicate "sibcall_memory_operand"
82  (and (match_operand 0 "memory_operand")
83       (match_test "CONSTANT_P (XEXP (op, 0))")))
84
85;; Match an SI or HImode register for a zero_extract.
86(define_special_predicate "ext_register_operand"
87  (match_operand 0 "register_operand")
88{
89  if ((!TARGET_64BIT || GET_MODE (op) != DImode)
90      && GET_MODE (op) != SImode && GET_MODE (op) != HImode)
91    return false;
92  if (GET_CODE (op) == SUBREG)
93    op = SUBREG_REG (op);
94
95  /* Be careful to accept only registers having upper parts.  */
96  return (REG_P (op)
97	  && (REGNO (op) > LAST_VIRTUAL_REGISTER || REGNO (op) <= BX_REG));
98})
99
100;; Match nonimmediate operands, but exclude memory operands on 64bit targets.
101(define_predicate "nonimmediate_x64nomem_operand"
102  (if_then_else (match_test "TARGET_64BIT")
103    (match_operand 0 "register_operand")
104    (match_operand 0 "nonimmediate_operand")))
105
106;; Match general operands, but exclude memory operands on 64bit targets.
107(define_predicate "general_x64nomem_operand"
108  (if_then_else (match_test "TARGET_64BIT")
109    (match_operand 0 "nonmemory_operand")
110    (match_operand 0 "general_operand")))
111
112;; Return true if op is the AX register.
113(define_predicate "ax_reg_operand"
114  (and (match_code "reg")
115       (match_test "REGNO (op) == AX_REG")))
116
117;; Return true if op is the flags register.
118(define_predicate "flags_reg_operand"
119  (and (match_code "reg")
120       (match_test "REGNO (op) == FLAGS_REG")))
121
122;; Return true if op is one of QImode registers: %[abcd][hl].
123(define_predicate "QIreg_operand"
124  (match_test "QI_REG_P (op)"))
125
126;; Return true if op is a QImode register operand other than
127;; %[abcd][hl].
128(define_predicate "ext_QIreg_operand"
129  (and (match_code "reg")
130       (match_test "TARGET_64BIT")
131       (match_test "REGNO (op) > BX_REG")))
132
133;; Return true if VALUE is symbol reference
134(define_predicate "symbol_operand"
135  (match_code "symbol_ref"))
136
137;; Return true if VALUE can be stored in a sign extended immediate field.
138(define_predicate "x86_64_immediate_operand"
139  (match_code "const_int,symbol_ref,label_ref,const")
140{
141  if (!TARGET_64BIT)
142    return immediate_operand (op, mode);
143
144  switch (GET_CODE (op))
145    {
146    case CONST_INT:
147      /* CONST_DOUBLES never match, since HOST_BITS_PER_WIDE_INT is known
148         to be at least 32 and this all acceptable constants are
149	 represented as CONST_INT.  */
150      if (HOST_BITS_PER_WIDE_INT == 32)
151	return true;
152      else
153	{
154	  HOST_WIDE_INT val = trunc_int_for_mode (INTVAL (op), DImode);
155	  return trunc_int_for_mode (val, SImode) == val;
156	}
157      break;
158
159    case SYMBOL_REF:
160      /* For certain code models, the symbolic references are known to fit.
161	 in CM_SMALL_PIC model we know it fits if it is local to the shared
162	 library.  Don't count TLS SYMBOL_REFs here, since they should fit
163	 only if inside of UNSPEC handled below.  */
164      /* TLS symbols are not constant.  */
165      if (SYMBOL_REF_TLS_MODEL (op))
166	return false;
167      return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_KERNEL
168	      || (ix86_cmodel == CM_MEDIUM && !SYMBOL_REF_FAR_ADDR_P (op)));
169
170    case LABEL_REF:
171      /* For certain code models, the code is near as well.  */
172      return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM
173	      || ix86_cmodel == CM_KERNEL);
174
175    case CONST:
176      /* We also may accept the offsetted memory references in certain
177	 special cases.  */
178      if (GET_CODE (XEXP (op, 0)) == UNSPEC)
179	switch (XINT (XEXP (op, 0), 1))
180	  {
181	  case UNSPEC_GOTPCREL:
182	  case UNSPEC_DTPOFF:
183	  case UNSPEC_GOTNTPOFF:
184	  case UNSPEC_NTPOFF:
185	    return true;
186	  default:
187	    break;
188	  }
189
190      if (GET_CODE (XEXP (op, 0)) == PLUS)
191	{
192	  rtx op1 = XEXP (XEXP (op, 0), 0);
193	  rtx op2 = XEXP (XEXP (op, 0), 1);
194	  HOST_WIDE_INT offset;
195
196	  if (ix86_cmodel == CM_LARGE)
197	    return false;
198	  if (!CONST_INT_P (op2))
199	    return false;
200	  offset = trunc_int_for_mode (INTVAL (op2), DImode);
201	  switch (GET_CODE (op1))
202	    {
203	    case SYMBOL_REF:
204	      /* TLS symbols are not constant.  */
205	      if (SYMBOL_REF_TLS_MODEL (op1))
206		return false;
207	      /* For CM_SMALL assume that latest object is 16MB before
208		 end of 31bits boundary.  We may also accept pretty
209		 large negative constants knowing that all objects are
210		 in the positive half of address space.  */
211	      if ((ix86_cmodel == CM_SMALL
212		   || (ix86_cmodel == CM_MEDIUM
213		       && !SYMBOL_REF_FAR_ADDR_P (op1)))
214		  && offset < 16*1024*1024
215		  && trunc_int_for_mode (offset, SImode) == offset)
216		return true;
217	      /* For CM_KERNEL we know that all object resist in the
218		 negative half of 32bits address space.  We may not
219		 accept negative offsets, since they may be just off
220		 and we may accept pretty large positive ones.  */
221	      if (ix86_cmodel == CM_KERNEL
222		  && offset > 0
223		  && trunc_int_for_mode (offset, SImode) == offset)
224		return true;
225	      break;
226
227	    case LABEL_REF:
228	      /* These conditions are similar to SYMBOL_REF ones, just the
229		 constraints for code models differ.  */
230	      if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
231		  && offset < 16*1024*1024
232		  && trunc_int_for_mode (offset, SImode) == offset)
233		return true;
234	      if (ix86_cmodel == CM_KERNEL
235		  && offset > 0
236		  && trunc_int_for_mode (offset, SImode) == offset)
237		return true;
238	      break;
239
240	    case UNSPEC:
241	      switch (XINT (op1, 1))
242		{
243		case UNSPEC_DTPOFF:
244		case UNSPEC_NTPOFF:
245		  if (trunc_int_for_mode (offset, SImode) == offset)
246		    return true;
247		}
248	      break;
249
250	    default:
251	      break;
252	    }
253	}
254      break;
255
256      default:
257	gcc_unreachable ();
258    }
259
260  return false;
261})
262
263;; Return true if VALUE can be stored in the zero extended immediate field.
264(define_predicate "x86_64_zext_immediate_operand"
265  (match_code "const_double,const_int,symbol_ref,label_ref,const")
266{
267  switch (GET_CODE (op))
268    {
269    case CONST_DOUBLE:
270      if (HOST_BITS_PER_WIDE_INT == 32)
271	return (GET_MODE (op) == VOIDmode && !CONST_DOUBLE_HIGH (op));
272      else
273	return false;
274
275    case CONST_INT:
276      if (HOST_BITS_PER_WIDE_INT == 32)
277	return INTVAL (op) >= 0;
278      else
279	return !(INTVAL (op) & ~(HOST_WIDE_INT) 0xffffffff);
280
281    case SYMBOL_REF:
282      /* For certain code models, the symbolic references are known to fit.  */
283      /* TLS symbols are not constant.  */
284      if (SYMBOL_REF_TLS_MODEL (op))
285	return false;
286      return (ix86_cmodel == CM_SMALL
287	      || (ix86_cmodel == CM_MEDIUM
288		  && !SYMBOL_REF_FAR_ADDR_P (op)));
289
290    case LABEL_REF:
291      /* For certain code models, the code is near as well.  */
292      return ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM;
293
294    case CONST:
295      /* We also may accept the offsetted memory references in certain
296	 special cases.  */
297      if (GET_CODE (XEXP (op, 0)) == PLUS)
298	{
299	  rtx op1 = XEXP (XEXP (op, 0), 0);
300	  rtx op2 = XEXP (XEXP (op, 0), 1);
301
302	  if (ix86_cmodel == CM_LARGE)
303	    return false;
304	  switch (GET_CODE (op1))
305	    {
306	    case SYMBOL_REF:
307	      /* TLS symbols are not constant.  */
308	      if (SYMBOL_REF_TLS_MODEL (op1))
309		return false;
310	      /* For small code model we may accept pretty large positive
311		 offsets, since one bit is available for free.  Negative
312		 offsets are limited by the size of NULL pointer area
313		 specified by the ABI.  */
314	      if ((ix86_cmodel == CM_SMALL
315		   || (ix86_cmodel == CM_MEDIUM
316		       && !SYMBOL_REF_FAR_ADDR_P (op1)))
317		  && CONST_INT_P (op2)
318		  && trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
319		  && trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
320		return true;
321	      /* ??? For the kernel, we may accept adjustment of
322		 -0x10000000, since we know that it will just convert
323		 negative address space to positive, but perhaps this
324		 is not worthwhile.  */
325	      break;
326
327	    case LABEL_REF:
328	      /* These conditions are similar to SYMBOL_REF ones, just the
329		 constraints for code models differ.  */
330	      if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
331		  && CONST_INT_P (op2)
332		  && trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
333		  && trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
334		return true;
335	      break;
336
337	    default:
338	      return false;
339	    }
340	}
341      break;
342
343    default:
344      gcc_unreachable ();
345    }
346  return false;
347})
348
349;; Return true if size of VALUE can be stored in a sign
350;; extended immediate field.
351(define_predicate "x86_64_immediate_size_operand"
352  (and (match_code "symbol_ref")
353       (ior (not (match_test "TARGET_64BIT"))
354	    (match_test "ix86_cmodel == CM_SMALL")
355	    (match_test "ix86_cmodel == CM_KERNEL"))))
356
357;; Return true if OP is general operand representable on x86_64.
358(define_predicate "x86_64_general_operand"
359  (if_then_else (match_test "TARGET_64BIT")
360    (ior (match_operand 0 "nonimmediate_operand")
361	 (match_operand 0 "x86_64_immediate_operand"))
362    (match_operand 0 "general_operand")))
363
364;; Return true if OP is non-VOIDmode general operand representable
365;; on x86_64.  This predicate is used in sign-extending conversion
366;; operations that require non-VOIDmode immediate operands.
367(define_predicate "x86_64_sext_operand"
368  (and (match_test "GET_MODE (op) != VOIDmode")
369       (match_operand 0 "x86_64_general_operand")))
370
371;; Return true if OP is non-VOIDmode general operand.  This predicate
372;; is used in sign-extending conversion operations that require
373;; non-VOIDmode immediate operands.
374(define_predicate "sext_operand"
375  (and (match_test "GET_MODE (op) != VOIDmode")
376       (match_operand 0 "general_operand")))
377
378;; Return true if OP is representable on x86_64 as zero-extended operand.
379;; This predicate is used in zero-extending conversion operations that
380;; require non-VOIDmode immediate operands.
381(define_predicate "x86_64_zext_operand"
382  (if_then_else (match_test "TARGET_64BIT")
383    (ior (match_operand 0 "nonimmediate_operand")
384	 (and (match_operand 0 "x86_64_zext_immediate_operand")
385	      (match_test "GET_MODE (op) != VOIDmode")))
386    (match_operand 0 "nonimmediate_operand")))
387
388;; Return true if OP is general operand representable on x86_64
389;; as either sign extended or zero extended constant.
390(define_predicate "x86_64_szext_general_operand"
391  (if_then_else (match_test "TARGET_64BIT")
392    (ior (match_operand 0 "nonimmediate_operand")
393	 (match_operand 0 "x86_64_immediate_operand")
394	 (match_operand 0 "x86_64_zext_immediate_operand"))
395    (match_operand 0 "general_operand")))
396
397;; Return true if OP is nonmemory operand representable on x86_64.
398(define_predicate "x86_64_nonmemory_operand"
399  (if_then_else (match_test "TARGET_64BIT")
400    (ior (match_operand 0 "register_operand")
401	 (match_operand 0 "x86_64_immediate_operand"))
402    (match_operand 0 "nonmemory_operand")))
403
404;; Return true if OP is nonmemory operand representable on x86_64.
405(define_predicate "x86_64_szext_nonmemory_operand"
406  (if_then_else (match_test "TARGET_64BIT")
407    (ior (match_operand 0 "register_operand")
408	 (match_operand 0 "x86_64_immediate_operand")
409	 (match_operand 0 "x86_64_zext_immediate_operand"))
410    (match_operand 0 "nonmemory_operand")))
411
412;; Return true when operand is PIC expression that can be computed by lea
413;; operation.
414(define_predicate "pic_32bit_operand"
415  (match_code "const,symbol_ref,label_ref")
416{
417  if (!flag_pic)
418    return false;
419
420  /* Rule out relocations that translate into 64bit constants.  */
421  if (TARGET_64BIT && GET_CODE (op) == CONST)
422    {
423      op = XEXP (op, 0);
424      if (GET_CODE (op) == PLUS && CONST_INT_P (XEXP (op, 1)))
425	op = XEXP (op, 0);
426      if (GET_CODE (op) == UNSPEC
427	  && (XINT (op, 1) == UNSPEC_GOTOFF
428	      || XINT (op, 1) == UNSPEC_GOT))
429	return false;
430    }
431
432  return symbolic_operand (op, mode);
433})
434
435;; Return true if OP is nonmemory operand acceptable by movabs patterns.
436(define_predicate "x86_64_movabs_operand"
437  (and (match_operand 0 "nonmemory_operand")
438       (not (match_operand 0 "pic_32bit_operand"))))
439
440;; Return true if OP is either a symbol reference or a sum of a symbol
441;; reference and a constant.
442(define_predicate "symbolic_operand"
443  (match_code "symbol_ref,label_ref,const")
444{
445  switch (GET_CODE (op))
446    {
447    case SYMBOL_REF:
448    case LABEL_REF:
449      return true;
450
451    case CONST:
452      op = XEXP (op, 0);
453      if (GET_CODE (op) == SYMBOL_REF
454	  || GET_CODE (op) == LABEL_REF
455	  || (GET_CODE (op) == UNSPEC
456	      && (XINT (op, 1) == UNSPEC_GOT
457		  || XINT (op, 1) == UNSPEC_GOTOFF
458		  || XINT (op, 1) == UNSPEC_PCREL
459		  || XINT (op, 1) == UNSPEC_GOTPCREL)))
460	return true;
461      if (GET_CODE (op) != PLUS
462	  || !CONST_INT_P (XEXP (op, 1)))
463	return false;
464
465      op = XEXP (op, 0);
466      if (GET_CODE (op) == SYMBOL_REF
467	  || GET_CODE (op) == LABEL_REF)
468	return true;
469      /* Only @GOTOFF gets offsets.  */
470      if (GET_CODE (op) != UNSPEC
471	  || XINT (op, 1) != UNSPEC_GOTOFF)
472	return false;
473
474      op = XVECEXP (op, 0, 0);
475      if (GET_CODE (op) == SYMBOL_REF
476	  || GET_CODE (op) == LABEL_REF)
477	return true;
478      return false;
479
480    default:
481      gcc_unreachable ();
482    }
483})
484
485;; Return true if OP is a symbolic operand that resolves locally.
486(define_predicate "local_symbolic_operand"
487  (match_code "const,label_ref,symbol_ref")
488{
489  if (GET_CODE (op) == CONST
490      && GET_CODE (XEXP (op, 0)) == PLUS
491      && CONST_INT_P (XEXP (XEXP (op, 0), 1)))
492    op = XEXP (XEXP (op, 0), 0);
493
494  if (GET_CODE (op) == LABEL_REF)
495    return true;
496
497  if (GET_CODE (op) != SYMBOL_REF)
498    return false;
499
500  if (SYMBOL_REF_TLS_MODEL (op))
501    return false;
502
503  /* Dll-imported symbols are always external.  */
504  if (TARGET_DLLIMPORT_DECL_ATTRIBUTES && SYMBOL_REF_DLLIMPORT_P (op))
505    return false;
506  if (SYMBOL_REF_LOCAL_P (op))
507    return true;
508
509  /* There is, however, a not insubstantial body of code in the rest of
510     the compiler that assumes it can just stick the results of
511     ASM_GENERATE_INTERNAL_LABEL in a symbol_ref and have done.  */
512  /* ??? This is a hack.  Should update the body of the compiler to
513     always create a DECL an invoke targetm.encode_section_info.  */
514  if (strncmp (XSTR (op, 0), internal_label_prefix,
515	       internal_label_prefix_len) == 0)
516    return true;
517
518  return false;
519})
520
521;; Test for a legitimate @GOTOFF operand.
522;;
523;; VxWorks does not impose a fixed gap between segments; the run-time
524;; gap can be different from the object-file gap.  We therefore can't
525;; use @GOTOFF unless we are absolutely sure that the symbol is in the
526;; same segment as the GOT.  Unfortunately, the flexibility of linker
527;; scripts means that we can't be sure of that in general, so assume
528;; that @GOTOFF is never valid on VxWorks.
529(define_predicate "gotoff_operand"
530  (and (not (match_test "TARGET_VXWORKS_RTP"))
531       (match_operand 0 "local_symbolic_operand")))
532
533;; Test for various thread-local symbols.
534(define_special_predicate "tls_symbolic_operand"
535  (and (match_code "symbol_ref")
536       (match_test "SYMBOL_REF_TLS_MODEL (op)")))
537
538(define_special_predicate "tls_modbase_operand"
539  (and (match_code "symbol_ref")
540       (match_test "op == ix86_tls_module_base ()")))
541
542;; Test for a pc-relative call operand
543(define_predicate "constant_call_address_operand"
544  (match_code "symbol_ref")
545{
546  if (ix86_cmodel == CM_LARGE || ix86_cmodel == CM_LARGE_PIC)
547    return false;
548  if (TARGET_DLLIMPORT_DECL_ATTRIBUTES && SYMBOL_REF_DLLIMPORT_P (op))
549    return false;
550  return true;
551})
552
553;; P6 processors will jump to the address after the decrement when %esp
554;; is used as a call operand, so they will execute return address as a code.
555;; See Pentium Pro errata 70, Pentium 2 errata A33 and Pentium 3 errata E17.
556
557(define_predicate "call_register_no_elim_operand"
558  (match_operand 0 "register_operand")
559{
560  if (GET_CODE (op) == SUBREG)
561    op = SUBREG_REG (op);
562
563  if (!TARGET_64BIT && op == stack_pointer_rtx)
564    return false;
565
566  return register_no_elim_operand (op, mode);
567})
568
569;; True for any non-virtual or eliminable register.  Used in places where
570;; instantiation of such a register may cause the pattern to not be recognized.
571(define_predicate "register_no_elim_operand"
572  (match_operand 0 "register_operand")
573{
574  if (GET_CODE (op) == SUBREG)
575    op = SUBREG_REG (op);
576  return !(op == arg_pointer_rtx
577	   || op == frame_pointer_rtx
578	   || IN_RANGE (REGNO (op),
579			FIRST_PSEUDO_REGISTER, LAST_VIRTUAL_REGISTER));
580})
581
582;; Similarly, but include the stack pointer.  This is used to prevent esp
583;; from being used as an index reg.
584(define_predicate "index_register_operand"
585  (match_operand 0 "register_operand")
586{
587  if (GET_CODE (op) == SUBREG)
588    op = SUBREG_REG (op);
589  if (reload_in_progress || reload_completed)
590    return REG_OK_FOR_INDEX_STRICT_P (op);
591  else
592    return REG_OK_FOR_INDEX_NONSTRICT_P (op);
593})
594
595;; Return false if this is any eliminable register.  Otherwise general_operand.
596(define_predicate "general_no_elim_operand"
597  (if_then_else (match_code "reg,subreg")
598    (match_operand 0 "register_no_elim_operand")
599    (match_operand 0 "general_operand")))
600
601;; Return false if this is any eliminable register.  Otherwise
602;; register_operand or a constant.
603(define_predicate "nonmemory_no_elim_operand"
604  (ior (match_operand 0 "register_no_elim_operand")
605       (match_operand 0 "immediate_operand")))
606
607;; Test for a valid operand for indirect branch.
608(define_predicate "indirect_branch_operand"
609  (ior (match_operand 0 "register_operand")
610       (and (not (match_test "TARGET_X32"))
611	    (match_operand 0 "memory_operand"))))
612
613;; Test for a valid operand for a call instruction.
614;; Allow constant call address operands in Pmode only.
615(define_special_predicate "call_insn_operand"
616  (ior (match_test "constant_call_address_operand
617		     (op, mode == VOIDmode ? mode : Pmode)")
618       (match_operand 0 "call_register_no_elim_operand")
619       (and (not (match_test "TARGET_X32"))
620	    (match_operand 0 "memory_operand"))))
621
622;; Similarly, but for tail calls, in which we cannot allow memory references.
623(define_special_predicate "sibcall_insn_operand"
624  (ior (match_test "constant_call_address_operand
625		     (op, mode == VOIDmode ? mode : Pmode)")
626       (match_operand 0 "register_no_elim_operand")
627       (and (not (match_test "TARGET_X32"))
628	    (match_operand 0 "sibcall_memory_operand"))))
629
630;; Match exactly zero.
631(define_predicate "const0_operand"
632  (match_code "const_int,const_double,const_vector")
633{
634  if (mode == VOIDmode)
635    mode = GET_MODE (op);
636  return op == CONST0_RTX (mode);
637})
638
639;; Match -1.
640(define_predicate "constm1_operand"
641  (match_code "const_int,const_double,const_vector")
642{
643  if (mode == VOIDmode)
644    mode = GET_MODE (op);
645  return op == CONSTM1_RTX (mode);
646})
647
648;; Match one or vector filled with ones.
649(define_predicate "const1_operand"
650  (match_code "const_int,const_double,const_vector")
651{
652  if (mode == VOIDmode)
653    mode = GET_MODE (op);
654  return op == CONST1_RTX (mode);
655})
656
657;; Match exactly eight.
658(define_predicate "const8_operand"
659  (and (match_code "const_int")
660       (match_test "INTVAL (op) == 8")))
661
662;; Match exactly 128.
663(define_predicate "const128_operand"
664  (and (match_code "const_int")
665       (match_test "INTVAL (op) == 128")))
666
667;; Match exactly 0x0FFFFFFFF in anddi as a zero-extension operation
668(define_predicate "const_32bit_mask"
669  (and (match_code "const_int")
670       (match_test "trunc_int_for_mode (INTVAL (op), DImode)
671		    == (HOST_WIDE_INT) 0xffffffff")))
672
673;; Match 2, 4, or 8.  Used for leal multiplicands.
674(define_predicate "const248_operand"
675  (match_code "const_int")
676{
677  HOST_WIDE_INT i = INTVAL (op);
678  return i == 2 || i == 4 || i == 8;
679})
680
681;; Match 2, 3, 6, or 7
682(define_predicate "const2367_operand"
683  (match_code "const_int")
684{
685  HOST_WIDE_INT i = INTVAL (op);
686  return i == 2 || i == 3 || i == 6 || i == 7;
687})
688
689;; Match 1, 2, 4, or 8
690(define_predicate "const1248_operand"
691  (match_code "const_int")
692{
693  HOST_WIDE_INT i = INTVAL (op);
694  return i == 1 || i == 2 || i == 4 || i == 8;
695})
696
697;; Match 3, 5, or 9.  Used for leal multiplicands.
698(define_predicate "const359_operand"
699  (match_code "const_int")
700{
701  HOST_WIDE_INT i = INTVAL (op);
702  return i == 3 || i == 5 || i == 9;
703})
704
705;; Match 4 or 8 to 11.  Used for embeded rounding.
706(define_predicate "const_4_or_8_to_11_operand"
707  (match_code "const_int")
708{
709  HOST_WIDE_INT i = INTVAL (op);
710  return i == 4 || (i >= 8 && i <= 11);
711})
712
713;; Match 4 or 8. Used for SAE.
714(define_predicate "const48_operand"
715  (match_code "const_int")
716{
717  HOST_WIDE_INT i = INTVAL (op);
718  return i == 4 || i == 8;
719})
720
721;; Match 0 or 1.
722(define_predicate "const_0_to_1_operand"
723  (and (match_code "const_int")
724       (ior (match_test "op == const0_rtx")
725	    (match_test "op == const1_rtx"))))
726
727;; Match 0 to 3.
728(define_predicate "const_0_to_3_operand"
729  (and (match_code "const_int")
730       (match_test "IN_RANGE (INTVAL (op), 0, 3)")))
731
732;; Match 0 to 4.
733(define_predicate "const_0_to_4_operand"
734  (and (match_code "const_int")
735       (match_test "IN_RANGE (INTVAL (op), 0, 4)")))
736
737;; Match 0 to 5.
738(define_predicate "const_0_to_5_operand"
739  (and (match_code "const_int")
740       (match_test "IN_RANGE (INTVAL (op), 0, 5)")))
741
742;; Match 0 to 7.
743(define_predicate "const_0_to_7_operand"
744  (and (match_code "const_int")
745       (match_test "IN_RANGE (INTVAL (op), 0, 7)")))
746
747;; Match 0 to 15.
748(define_predicate "const_0_to_15_operand"
749  (and (match_code "const_int")
750       (match_test "IN_RANGE (INTVAL (op), 0, 15)")))
751
752;; Match 0 to 31.
753(define_predicate "const_0_to_31_operand"
754  (and (match_code "const_int")
755       (match_test "IN_RANGE (INTVAL (op), 0, 31)")))
756
757;; Match 0 to 63.
758(define_predicate "const_0_to_63_operand"
759  (and (match_code "const_int")
760       (match_test "IN_RANGE (INTVAL (op), 0, 63)")))
761
762;; Match 0 to 255.
763(define_predicate "const_0_to_255_operand"
764  (and (match_code "const_int")
765       (match_test "IN_RANGE (INTVAL (op), 0, 255)")))
766
767;; Match (0 to 255) * 8
768(define_predicate "const_0_to_255_mul_8_operand"
769  (match_code "const_int")
770{
771  unsigned HOST_WIDE_INT val = INTVAL (op);
772  return val <= 255*8 && val % 8 == 0;
773})
774
775;; Return true if OP is CONST_INT >= 1 and <= 31 (a valid operand
776;; for shift & compare patterns, as shifting by 0 does not change flags).
777(define_predicate "const_1_to_31_operand"
778  (and (match_code "const_int")
779       (match_test "IN_RANGE (INTVAL (op), 1, 31)")))
780
781;; Return true if OP is CONST_INT >= 1 and <= 63 (a valid operand
782;; for 64bit shift & compare patterns, as shifting by 0 does not change flags).
783(define_predicate "const_1_to_63_operand"
784  (and (match_code "const_int")
785       (match_test "IN_RANGE (INTVAL (op), 1, 63)")))
786
787;; Match 2 or 3.
788(define_predicate "const_2_to_3_operand"
789  (and (match_code "const_int")
790       (match_test "IN_RANGE (INTVAL (op), 2, 3)")))
791
792;; Match 4 to 5.
793(define_predicate "const_4_to_5_operand"
794  (and (match_code "const_int")
795       (match_test "IN_RANGE (INTVAL (op), 4, 5)")))
796
797;; Match 4 to 7.
798(define_predicate "const_4_to_7_operand"
799  (and (match_code "const_int")
800       (match_test "IN_RANGE (INTVAL (op), 4, 7)")))
801
802;; Match 6 to 7.
803(define_predicate "const_6_to_7_operand"
804  (and (match_code "const_int")
805       (match_test "IN_RANGE (INTVAL (op), 6, 7)")))
806
807;; Match 8 to 9.
808(define_predicate "const_8_to_9_operand"
809  (and (match_code "const_int")
810       (match_test "IN_RANGE (INTVAL (op), 8, 9)")))
811
812;; Match 8 to 11.
813(define_predicate "const_8_to_11_operand"
814  (and (match_code "const_int")
815       (match_test "IN_RANGE (INTVAL (op), 8, 11)")))
816
817;; Match 8 to 15.
818(define_predicate "const_8_to_15_operand"
819  (and (match_code "const_int")
820       (match_test "IN_RANGE (INTVAL (op), 8, 15)")))
821
822;; Match 10 to 11.
823(define_predicate "const_10_to_11_operand"
824  (and (match_code "const_int")
825       (match_test "IN_RANGE (INTVAL (op), 10, 11)")))
826
827;; Match 12 to 13.
828(define_predicate "const_12_to_13_operand"
829  (and (match_code "const_int")
830       (match_test "IN_RANGE (INTVAL (op), 12, 13)")))
831
832;; Match 12 to 15.
833(define_predicate "const_12_to_15_operand"
834  (and (match_code "const_int")
835       (match_test "IN_RANGE (INTVAL (op), 12, 15)")))
836
837;; Match 14 to 15.
838(define_predicate "const_14_to_15_operand"
839  (and (match_code "const_int")
840       (match_test "IN_RANGE (INTVAL (op), 14, 15)")))
841
842;; Match 16 to 19.
843(define_predicate "const_16_to_19_operand"
844  (and (match_code "const_int")
845       (match_test "IN_RANGE (INTVAL (op), 16, 19)")))
846
847;; Match 16 to 31.
848(define_predicate "const_16_to_31_operand"
849  (and (match_code "const_int")
850       (match_test "IN_RANGE (INTVAL (op), 16, 31)")))
851
852;; Match 20 to 23.
853(define_predicate "const_20_to_23_operand"
854  (and (match_code "const_int")
855       (match_test "IN_RANGE (INTVAL (op), 20, 23)")))
856
857;; Match 24 to 27.
858(define_predicate "const_24_to_27_operand"
859  (and (match_code "const_int")
860       (match_test "IN_RANGE (INTVAL (op), 24, 27)")))
861
862;; Match 28 to 31.
863(define_predicate "const_28_to_31_operand"
864  (and (match_code "const_int")
865       (match_test "IN_RANGE (INTVAL (op), 28, 31)")))
866
867;; True if this is a constant appropriate for an increment or decrement.
868(define_predicate "incdec_operand"
869  (match_code "const_int")
870{
871  /* On Pentium4, the inc and dec operations causes extra dependency on flag
872     registers, since carry flag is not set.  */
873  if (!TARGET_USE_INCDEC && !optimize_insn_for_size_p ())
874    return false;
875  return op == const1_rtx || op == constm1_rtx;
876})
877
878;; True for registers, or 1 or -1.  Used to optimize double-word shifts.
879(define_predicate "reg_or_pm1_operand"
880  (ior (match_operand 0 "register_operand")
881       (and (match_code "const_int")
882	    (ior (match_test "op == const1_rtx")
883		 (match_test "op == constm1_rtx")))))
884
885;; True if OP is acceptable as operand of DImode shift expander.
886(define_predicate "shiftdi_operand"
887  (if_then_else (match_test "TARGET_64BIT")
888    (match_operand 0 "nonimmediate_operand")
889    (match_operand 0 "register_operand")))
890
891(define_predicate "ashldi_input_operand"
892  (if_then_else (match_test "TARGET_64BIT")
893    (match_operand 0 "nonimmediate_operand")
894    (match_operand 0 "reg_or_pm1_operand")))
895
896;; Return true if OP is a vector load from the constant pool with just
897;; the first element nonzero.
898(define_predicate "zero_extended_scalar_load_operand"
899  (match_code "mem")
900{
901  unsigned n_elts;
902  op = maybe_get_pool_constant (op);
903
904  if (!(op && GET_CODE (op) == CONST_VECTOR))
905    return false;
906
907  n_elts = CONST_VECTOR_NUNITS (op);
908
909  for (n_elts--; n_elts > 0; n_elts--)
910    {
911      rtx elt = CONST_VECTOR_ELT (op, n_elts);
912      if (elt != CONST0_RTX (GET_MODE_INNER (GET_MODE (op))))
913	return false;
914    }
915  return true;
916})
917
918/* Return true if operand is a vector constant that is all ones. */
919(define_predicate "vector_all_ones_operand"
920  (match_code "const_vector")
921{
922  int nunits = GET_MODE_NUNITS (mode);
923
924  if (GET_CODE (op) == CONST_VECTOR
925      && CONST_VECTOR_NUNITS (op) == nunits)
926    {
927      int i;
928      for (i = 0; i < nunits; ++i)
929        {
930          rtx x = CONST_VECTOR_ELT (op, i);
931          if (x != constm1_rtx)
932            return false;
933        }
934      return true;
935    }
936
937  return false;
938})
939
940; Return true when OP is operand acceptable for standard SSE move.
941(define_predicate "vector_move_operand"
942  (ior (match_operand 0 "nonimmediate_operand")
943       (match_operand 0 "const0_operand")))
944
945;; Return true when OP is either nonimmediate operand, or any
946;; CONST_VECTOR.
947(define_predicate "nonimmediate_or_const_vector_operand"
948  (ior (match_operand 0 "nonimmediate_operand")
949       (match_code "const_vector")))
950
951;; Return true when OP is nonimmediate or standard SSE constant.
952(define_predicate "nonimmediate_or_sse_const_operand"
953  (match_operand 0 "general_operand")
954{
955  if (nonimmediate_operand (op, mode))
956    return true;
957  if (standard_sse_constant_p (op) > 0)
958    return true;
959  return false;
960})
961
962;; Return true if OP is a register or a zero.
963(define_predicate "reg_or_0_operand"
964  (ior (match_operand 0 "register_operand")
965       (match_operand 0 "const0_operand")))
966
967;; Return true for RTX codes that force SImode address.
968(define_predicate "SImode_address_operand"
969  (match_code "subreg,zero_extend,and"))
970
971;; Return true if op if a valid address for LEA, and does not contain
972;; a segment override.  Defined as a special predicate to allow
973;; mode-less const_int operands pass to address_operand.
974(define_special_predicate "address_no_seg_operand"
975  (match_test "address_operand (op, VOIDmode)")
976{
977  struct ix86_address parts;
978  int ok;
979
980  if (!CONST_INT_P (op)
981      && mode != VOIDmode
982      && GET_MODE (op) != mode)
983    return false;
984
985  ok = ix86_decompose_address (op, &parts);
986  gcc_assert (ok);
987  return parts.seg == SEG_DEFAULT;
988})
989
990;; Return true if op if a valid base register, displacement or
991;; sum of base register and displacement for VSIB addressing.
992(define_predicate "vsib_address_operand"
993  (match_test "address_operand (op, VOIDmode)")
994{
995  struct ix86_address parts;
996  int ok;
997  rtx disp;
998
999  ok = ix86_decompose_address (op, &parts);
1000  gcc_assert (ok);
1001  if (parts.index || parts.seg != SEG_DEFAULT)
1002    return false;
1003
1004  /* VSIB addressing doesn't support (%rip).  */
1005  if (parts.disp)
1006    {
1007      disp = parts.disp;
1008      if (GET_CODE (disp) == CONST)
1009	{
1010	  disp = XEXP (disp, 0);
1011	  if (GET_CODE (disp) == PLUS)
1012	    disp = XEXP (disp, 0);
1013	  if (GET_CODE (disp) == UNSPEC)
1014	    switch (XINT (disp, 1))
1015	      {
1016	      case UNSPEC_GOTPCREL:
1017	      case UNSPEC_PCREL:
1018	      case UNSPEC_GOTNTPOFF:
1019		return false;
1020	      }
1021	}
1022      if (TARGET_64BIT
1023	  && flag_pic
1024	  && (GET_CODE (disp) == SYMBOL_REF
1025	      || GET_CODE (disp) == LABEL_REF))
1026	return false;
1027    }
1028
1029  return true;
1030})
1031
1032;; Return true if op is valid MPX address operand without base
1033(define_predicate "address_mpx_no_base_operand"
1034  (match_test "address_operand (op, VOIDmode)")
1035{
1036  struct ix86_address parts;
1037  int ok;
1038
1039  ok = ix86_decompose_address (op, &parts);
1040  gcc_assert (ok);
1041
1042  if (parts.index && parts.base)
1043    return false;
1044
1045  if (parts.seg != SEG_DEFAULT)
1046    return false;
1047
1048  /* Do not support (%rip).  */
1049  if (parts.disp && flag_pic && TARGET_64BIT
1050      && SYMBOLIC_CONST (parts.disp))
1051    {
1052      if (GET_CODE (parts.disp) != CONST
1053	  || GET_CODE (XEXP (parts.disp, 0)) != PLUS
1054	  || GET_CODE (XEXP (XEXP (parts.disp, 0), 0)) != UNSPEC
1055	  || !CONST_INT_P (XEXP (XEXP (parts.disp, 0), 1))
1056	  || (XINT (XEXP (XEXP (parts.disp, 0), 0), 1) != UNSPEC_DTPOFF
1057	      && XINT (XEXP (XEXP (parts.disp, 0), 0), 1) != UNSPEC_NTPOFF))
1058	return false;
1059    }
1060
1061  return true;
1062})
1063
1064;; Return true if op is valid MPX address operand without index
1065(define_predicate "address_mpx_no_index_operand"
1066  (match_test "address_operand (op, VOIDmode)")
1067{
1068  struct ix86_address parts;
1069  int ok;
1070
1071  ok = ix86_decompose_address (op, &parts);
1072  gcc_assert (ok);
1073
1074  if (parts.index)
1075    return false;
1076
1077  if (parts.seg != SEG_DEFAULT)
1078    return false;
1079
1080  /* Do not support (%rip).  */
1081  if (parts.disp && flag_pic && TARGET_64BIT
1082      && SYMBOLIC_CONST (parts.disp)
1083      && (GET_CODE (parts.disp) != CONST
1084	  || GET_CODE (XEXP (parts.disp, 0)) != PLUS
1085	  || GET_CODE (XEXP (XEXP (parts.disp, 0), 0)) != UNSPEC
1086	  || !CONST_INT_P (XEXP (XEXP (parts.disp, 0), 1))
1087	  || (XINT (XEXP (XEXP (parts.disp, 0), 0), 1) != UNSPEC_DTPOFF
1088	      && XINT (XEXP (XEXP (parts.disp, 0), 0), 1) != UNSPEC_NTPOFF)))
1089    return false;
1090
1091  return true;
1092})
1093
1094(define_predicate "vsib_mem_operator"
1095  (match_code "mem"))
1096
1097(define_predicate "bnd_mem_operator"
1098  (match_code "mem"))
1099
1100;; Return true if the rtx is known to be at least 32 bits aligned.
1101(define_predicate "aligned_operand"
1102  (match_operand 0 "general_operand")
1103{
1104  struct ix86_address parts;
1105  int ok;
1106
1107  /* Registers and immediate operands are always "aligned".  */
1108  if (!MEM_P (op))
1109    return true;
1110
1111  /* All patterns using aligned_operand on memory operands ends up
1112     in promoting memory operand to 64bit and thus causing memory mismatch.  */
1113  if (TARGET_MEMORY_MISMATCH_STALL && !optimize_insn_for_size_p ())
1114    return false;
1115
1116  /* Don't even try to do any aligned optimizations with volatiles.  */
1117  if (MEM_VOLATILE_P (op))
1118    return false;
1119
1120  if (MEM_ALIGN (op) >= 32)
1121    return true;
1122
1123  op = XEXP (op, 0);
1124
1125  /* Pushes and pops are only valid on the stack pointer.  */
1126  if (GET_CODE (op) == PRE_DEC
1127      || GET_CODE (op) == POST_INC)
1128    return true;
1129
1130  /* Decode the address.  */
1131  ok = ix86_decompose_address (op, &parts);
1132  gcc_assert (ok);
1133
1134  if (parts.base && GET_CODE (parts.base) == SUBREG)
1135    parts.base = SUBREG_REG (parts.base);
1136  if (parts.index && GET_CODE (parts.index) == SUBREG)
1137    parts.index = SUBREG_REG (parts.index);
1138
1139  /* Look for some component that isn't known to be aligned.  */
1140  if (parts.index)
1141    {
1142      if (REGNO_POINTER_ALIGN (REGNO (parts.index)) * parts.scale < 32)
1143	return false;
1144    }
1145  if (parts.base)
1146    {
1147      if (REGNO_POINTER_ALIGN (REGNO (parts.base)) < 32)
1148	return false;
1149    }
1150  if (parts.disp)
1151    {
1152      if (!CONST_INT_P (parts.disp)
1153	  || (INTVAL (parts.disp) & 3))
1154	return false;
1155    }
1156
1157  /* Didn't find one -- this must be an aligned address.  */
1158  return true;
1159})
1160
1161;; Return true if OP is memory operand with a displacement.
1162(define_predicate "memory_displacement_operand"
1163  (match_operand 0 "memory_operand")
1164{
1165  struct ix86_address parts;
1166  int ok;
1167
1168  ok = ix86_decompose_address (XEXP (op, 0), &parts);
1169  gcc_assert (ok);
1170  return parts.disp != NULL_RTX;
1171})
1172
1173;; Return true if OP is memory operand with a displacement only.
1174(define_predicate "memory_displacement_only_operand"
1175  (match_operand 0 "memory_operand")
1176{
1177  struct ix86_address parts;
1178  int ok;
1179
1180  if (TARGET_64BIT)
1181    return false;
1182
1183  ok = ix86_decompose_address (XEXP (op, 0), &parts);
1184  gcc_assert (ok);
1185
1186  if (parts.base || parts.index)
1187    return false;
1188
1189  return parts.disp != NULL_RTX;
1190})
1191
1192;; Return true if OP is memory operand that cannot be represented
1193;; by the modRM array.
1194(define_predicate "long_memory_operand"
1195  (and (match_operand 0 "memory_operand")
1196       (match_test "memory_address_length (op, false)")))
1197
1198;; Return true if OP is a comparison operator that can be issued by fcmov.
1199(define_predicate "fcmov_comparison_operator"
1200  (match_operand 0 "comparison_operator")
1201{
1202  machine_mode inmode = GET_MODE (XEXP (op, 0));
1203  enum rtx_code code = GET_CODE (op);
1204
1205  if (inmode == CCFPmode || inmode == CCFPUmode)
1206    {
1207      if (!ix86_trivial_fp_comparison_operator (op, mode))
1208	return false;
1209      code = ix86_fp_compare_code_to_integer (code);
1210    }
1211  /* i387 supports just limited amount of conditional codes.  */
1212  switch (code)
1213    {
1214    case LTU: case GTU: case LEU: case GEU:
1215      if (inmode == CCmode || inmode == CCFPmode || inmode == CCFPUmode
1216	  || inmode == CCCmode)
1217	return true;
1218      return false;
1219    case ORDERED: case UNORDERED:
1220    case EQ: case NE:
1221      return true;
1222    default:
1223      return false;
1224    }
1225})
1226
1227;; Return true if OP is a comparison that can be used in the CMPSS/CMPPS insns.
1228;; The first set are supported directly; the second set can't be done with
1229;; full IEEE support, i.e. NaNs.
1230
1231(define_predicate "sse_comparison_operator"
1232  (ior (match_code "eq,ne,lt,le,unordered,unge,ungt,ordered")
1233       (and (match_test "TARGET_AVX")
1234	    (match_code "ge,gt,uneq,unle,unlt,ltgt"))))
1235
1236(define_predicate "ix86_comparison_int_operator"
1237  (match_code "ne,eq,ge,gt,le,lt"))
1238
1239(define_predicate "ix86_comparison_uns_operator"
1240  (match_code "ne,eq,geu,gtu,leu,ltu"))
1241
1242(define_predicate "bt_comparison_operator"
1243  (match_code "ne,eq"))
1244
1245;; Return true if OP is a valid comparison operator in valid mode.
1246(define_predicate "ix86_comparison_operator"
1247  (match_operand 0 "comparison_operator")
1248{
1249  machine_mode inmode = GET_MODE (XEXP (op, 0));
1250  enum rtx_code code = GET_CODE (op);
1251
1252  if (inmode == CCFPmode || inmode == CCFPUmode)
1253    return ix86_trivial_fp_comparison_operator (op, mode);
1254
1255  switch (code)
1256    {
1257    case EQ: case NE:
1258      return true;
1259    case LT: case GE:
1260      if (inmode == CCmode || inmode == CCGCmode
1261	  || inmode == CCGOCmode || inmode == CCNOmode)
1262	return true;
1263      return false;
1264    case LTU: case GTU: case LEU: case GEU:
1265      if (inmode == CCmode || inmode == CCCmode)
1266	return true;
1267      return false;
1268    case ORDERED: case UNORDERED:
1269      if (inmode == CCmode)
1270	return true;
1271      return false;
1272    case GT: case LE:
1273      if (inmode == CCmode || inmode == CCGCmode || inmode == CCNOmode)
1274	return true;
1275      return false;
1276    default:
1277      return false;
1278    }
1279})
1280
1281;; Return true if OP is a valid comparison operator
1282;; testing carry flag to be set.
1283(define_predicate "ix86_carry_flag_operator"
1284  (match_code "ltu,lt,unlt,gtu,gt,ungt,le,unle,ge,unge,ltgt,uneq")
1285{
1286  machine_mode inmode = GET_MODE (XEXP (op, 0));
1287  enum rtx_code code = GET_CODE (op);
1288
1289  if (inmode == CCFPmode || inmode == CCFPUmode)
1290    {
1291      if (!ix86_trivial_fp_comparison_operator (op, mode))
1292	return false;
1293      code = ix86_fp_compare_code_to_integer (code);
1294    }
1295  else if (inmode == CCCmode)
1296   return code == LTU || code == GTU;
1297  else if (inmode != CCmode)
1298    return false;
1299
1300  return code == LTU;
1301})
1302
1303;; Return true if this comparison only requires testing one flag bit.
1304(define_predicate "ix86_trivial_fp_comparison_operator"
1305  (match_code "gt,ge,unlt,unle,uneq,ltgt,ordered,unordered"))
1306
1307;; Return true if we know how to do this comparison.  Others require
1308;; testing more than one flag bit, and we let the generic middle-end
1309;; code do that.
1310(define_predicate "ix86_fp_comparison_operator"
1311  (if_then_else (match_test "ix86_fp_comparison_strategy (GET_CODE (op))
1312                             == IX86_FPCMP_ARITH")
1313               (match_operand 0 "comparison_operator")
1314               (match_operand 0 "ix86_trivial_fp_comparison_operator")))
1315
1316;; Same as above, but for swapped comparison used in *jcc<fp>_<int>_i387.
1317(define_predicate "ix86_swapped_fp_comparison_operator"
1318  (match_operand 0 "comparison_operator")
1319{
1320  enum rtx_code code = GET_CODE (op);
1321  bool ret;
1322
1323  PUT_CODE (op, swap_condition (code));
1324  ret = ix86_fp_comparison_operator (op, mode);
1325  PUT_CODE (op, code);
1326  return ret;
1327})
1328
1329;; Nearly general operand, but accept any const_double, since we wish
1330;; to be able to drop them into memory rather than have them get pulled
1331;; into registers.
1332(define_predicate "cmp_fp_expander_operand"
1333  (ior (match_code "const_double")
1334       (match_operand 0 "general_operand")))
1335
1336;; Return true if this is a valid binary floating-point operation.
1337(define_predicate "binary_fp_operator"
1338  (match_code "plus,minus,mult,div"))
1339
1340;; Return true if this is a multiply operation.
1341(define_predicate "mult_operator"
1342  (match_code "mult"))
1343
1344;; Return true if this is a division operation.
1345(define_predicate "div_operator"
1346  (match_code "div"))
1347
1348;; Return true if this is a plus, minus, and, ior or xor operation.
1349(define_predicate "plusminuslogic_operator"
1350  (match_code "plus,minus,and,ior,xor"))
1351
1352;; Return true if this is a float extend operation.
1353(define_predicate "float_operator"
1354  (match_code "float"))
1355
1356;; Return true for ARITHMETIC_P.
1357(define_predicate "arith_or_logical_operator"
1358  (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax,compare,minus,div,
1359	       mod,udiv,umod,ashift,rotate,ashiftrt,lshiftrt,rotatert"))
1360
1361;; Return true for COMMUTATIVE_P.
1362(define_predicate "commutative_operator"
1363  (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax"))
1364
1365;; Return true if OP is a binary operator that can be promoted to wider mode.
1366(define_predicate "promotable_binary_operator"
1367  (ior (match_code "plus,minus,and,ior,xor,ashift")
1368       (and (match_code "mult")
1369	    (match_test "TARGET_TUNE_PROMOTE_HIMODE_IMUL"))))
1370
1371(define_predicate "compare_operator"
1372  (match_code "compare"))
1373
1374(define_predicate "absneg_operator"
1375  (match_code "abs,neg"))
1376
1377;; Return true if OP is misaligned memory operand
1378(define_predicate "misaligned_operand"
1379  (and (match_code "mem")
1380       (match_test "MEM_ALIGN (op) < GET_MODE_ALIGNMENT (mode)")))
1381
1382;; Return true if OP is a emms operation, known to be a PARALLEL.
1383(define_predicate "emms_operation"
1384  (match_code "parallel")
1385{
1386  unsigned i;
1387
1388  if (XVECLEN (op, 0) != 17)
1389    return false;
1390
1391  for (i = 0; i < 8; i++)
1392    {
1393      rtx elt = XVECEXP (op, 0, i+1);
1394
1395      if (GET_CODE (elt) != CLOBBER
1396	  || GET_CODE (SET_DEST (elt)) != REG
1397	  || GET_MODE (SET_DEST (elt)) != XFmode
1398	  || REGNO (SET_DEST (elt)) != FIRST_STACK_REG + i)
1399        return false;
1400
1401      elt = XVECEXP (op, 0, i+9);
1402
1403      if (GET_CODE (elt) != CLOBBER
1404	  || GET_CODE (SET_DEST (elt)) != REG
1405	  || GET_MODE (SET_DEST (elt)) != DImode
1406	  || REGNO (SET_DEST (elt)) != FIRST_MMX_REG + i)
1407	return false;
1408    }
1409  return true;
1410})
1411
1412;; Return true if OP is a vzeroall operation, known to be a PARALLEL.
1413(define_predicate "vzeroall_operation"
1414  (match_code "parallel")
1415{
1416  unsigned i, nregs = TARGET_64BIT ? 16 : 8;
1417
1418  if ((unsigned) XVECLEN (op, 0) != 1 + nregs)
1419    return false;
1420
1421  for (i = 0; i < nregs; i++)
1422    {
1423      rtx elt = XVECEXP (op, 0, i+1);
1424
1425      if (GET_CODE (elt) != SET
1426	  || GET_CODE (SET_DEST (elt)) != REG
1427	  || GET_MODE (SET_DEST (elt)) != V8SImode
1428	  || REGNO (SET_DEST (elt)) != SSE_REGNO (i)
1429	  || SET_SRC (elt) != CONST0_RTX (V8SImode))
1430	return false;
1431    }
1432  return true;
1433})
1434
1435;; return true if OP is a vzeroupper operation.
1436(define_predicate "vzeroupper_operation"
1437  (and (match_code "unspec_volatile")
1438       (match_test "XINT (op, 1) == UNSPECV_VZEROUPPER")))
1439
1440;; Return true if OP is a parallel for a vbroadcast permute.
1441
1442(define_predicate "avx_vbroadcast_operand"
1443  (and (match_code "parallel")
1444       (match_code "const_int" "a"))
1445{
1446  rtx elt = XVECEXP (op, 0, 0);
1447  int i, nelt = XVECLEN (op, 0);
1448
1449  /* Don't bother checking there are the right number of operands,
1450     merely that they're all identical.  */
1451  for (i = 1; i < nelt; ++i)
1452    if (XVECEXP (op, 0, i) != elt)
1453      return false;
1454  return true;
1455})
1456
1457;; Return true if OP is a parallel for a palignr permute.
1458(define_predicate "palignr_operand"
1459  (and (match_code "parallel")
1460       (match_code "const_int" "a"))
1461{
1462  int elt = INTVAL (XVECEXP (op, 0, 0));
1463  int i, nelt = XVECLEN (op, 0);
1464
1465  /* Check that an order in the permutation is suitable for palignr.
1466     For example, {5 6 7 0 1 2 3 4} is "palignr 5, xmm, xmm".  */
1467  for (i = 1; i < nelt; ++i)
1468    if (INTVAL (XVECEXP (op, 0, i)) != ((elt + i) % nelt))
1469      return false;
1470  return true;
1471})
1472
1473;; Return true if OP is a proper third operand to vpblendw256.
1474(define_predicate "avx2_pblendw_operand"
1475  (match_code "const_int")
1476{
1477  HOST_WIDE_INT val = INTVAL (op);
1478  HOST_WIDE_INT low = val & 0xff;
1479  return val == ((low << 8) | low);
1480})
1481
1482;; Return true if OP is nonimmediate_operand or CONST_VECTOR.
1483(define_predicate "general_vector_operand"
1484  (ior (match_operand 0 "nonimmediate_operand")
1485       (match_code "const_vector")))
1486
1487;; Return true if OP is either -1 constant or stored in register.
1488(define_predicate "register_or_constm1_operand"
1489  (ior (match_operand 0 "register_operand")
1490       (and (match_code "const_int")
1491	    (match_test "op == constm1_rtx"))))
1492