1/* { dg-do run } */
2/* { dg-require-effective-target avx } */
3/* { dg-options "-O2 -mavx" } */
4
5#include "avx-check.h"
6
7static void
8avx_test (void)
9{
10  __m256i src;
11#ifdef __x86_64__
12  char reg_save[16][32];
13  char d[16][32];
14#else
15  char reg_save[8][32];
16  char d[8][32];
17#endif
18
19  int s[8] = {1, 2, 3, 4, 5, 6, 7, 8};
20
21  __builtin_memset (d, 0, sizeof d);
22  __builtin_memset (reg_save, -1, sizeof reg_save);
23
24  src = _mm256_loadu_si256 ((__m256i*) s);
25
26  _mm256_zeroall ();
27
28  __asm__ __volatile__ ("vmovdqu %%ymm0,%0":"=m"(reg_save[0]));
29  __asm__ __volatile__ ("vmovdqu %%ymm1,%0":"=m"(reg_save[1]));
30  __asm__ __volatile__ ("vmovdqu %%ymm2,%0":"=m"(reg_save[2]));
31  __asm__ __volatile__ ("vmovdqu %%ymm3,%0":"=m"(reg_save[3]));
32  __asm__ __volatile__ ("vmovdqu %%ymm4,%0":"=m"(reg_save[4]));
33  __asm__ __volatile__ ("vmovdqu %%ymm5,%0":"=m"(reg_save[5]));
34  __asm__ __volatile__ ("vmovdqu %%ymm6,%0":"=m"(reg_save[6]));
35  __asm__ __volatile__ ("vmovdqu %%ymm7,%0":"=m"(reg_save[7]));
36#ifdef __x86_64__
37  __asm__ __volatile__ ("vmovdqu %%ymm8,%0":"=m"(reg_save[8]));
38  __asm__ __volatile__ ("vmovdqu %%ymm9,%0":"=m"(reg_save[9]));
39  __asm__ __volatile__ ("vmovdqu %%ymm10,%0":"=m"(reg_save[10]));
40  __asm__ __volatile__ ("vmovdqu %%ymm11,%0":"=m"(reg_save[11]));
41  __asm__ __volatile__ ("vmovdqu %%ymm12,%0":"=m"(reg_save[12]));
42  __asm__ __volatile__ ("vmovdqu %%ymm13,%0":"=m"(reg_save[13]));
43  __asm__ __volatile__ ("vmovdqu %%ymm14,%0":"=m"(reg_save[14]));
44  __asm__ __volatile__ ("vmovdqu %%ymm15,%0":"=m"(reg_save[15]));
45#endif
46
47  if (__builtin_memcmp (reg_save, d, sizeof d))
48    abort ();
49
50  _mm256_storeu_si256 ((__m256i*) d, src);
51
52}
53