Mercurial > hg > CbC > CbC_llvm
comparison test/CodeGen/X86/pmovsx-inreg.ll @ 95:afa8332a0e37 LLVM3.8
LLVM 3.8
author | Kaito Tokumori <e105711@ie.u-ryukyu.ac.jp> |
---|---|
date | Tue, 13 Oct 2015 17:48:58 +0900 |
parents | 95c75e76d11b |
children | 1172e4bd9c6f |
comparison
equal
deleted
inserted
replaced
84:f3e34b893a5f | 95:afa8332a0e37 |
---|---|
4 | 4 |
5 ; PR14887 | 5 ; PR14887 |
6 ; These tests inject a store into the chain to test the inreg versions of pmovsx | 6 ; These tests inject a store into the chain to test the inreg versions of pmovsx |
7 | 7 |
8 define void @test1(<2 x i8>* %in, <2 x i64>* %out) nounwind { | 8 define void @test1(<2 x i8>* %in, <2 x i64>* %out) nounwind { |
9 %wide.load35 = load <2 x i8>* %in, align 1 | 9 %wide.load35 = load <2 x i8>, <2 x i8>* %in, align 1 |
10 %sext = sext <2 x i8> %wide.load35 to <2 x i64> | 10 %sext = sext <2 x i8> %wide.load35 to <2 x i64> |
11 store <2 x i64> zeroinitializer, <2 x i64>* undef, align 8 | 11 store <2 x i64> zeroinitializer, <2 x i64>* undef, align 8 |
12 store <2 x i64> %sext, <2 x i64>* %out, align 8 | 12 store <2 x i64> %sext, <2 x i64>* %out, align 8 |
13 ret void | 13 ret void |
14 | 14 |
21 ; AVX2-LABEL: test1: | 21 ; AVX2-LABEL: test1: |
22 ; AVX2: vpmovsxbq | 22 ; AVX2: vpmovsxbq |
23 } | 23 } |
24 | 24 |
25 define void @test2(<4 x i8>* %in, <4 x i64>* %out) nounwind { | 25 define void @test2(<4 x i8>* %in, <4 x i64>* %out) nounwind { |
26 %wide.load35 = load <4 x i8>* %in, align 1 | 26 %wide.load35 = load <4 x i8>, <4 x i8>* %in, align 1 |
27 %sext = sext <4 x i8> %wide.load35 to <4 x i64> | 27 %sext = sext <4 x i8> %wide.load35 to <4 x i64> |
28 store <4 x i64> zeroinitializer, <4 x i64>* undef, align 8 | 28 store <4 x i64> zeroinitializer, <4 x i64>* undef, align 8 |
29 store <4 x i64> %sext, <4 x i64>* %out, align 8 | 29 store <4 x i64> %sext, <4 x i64>* %out, align 8 |
30 ret void | 30 ret void |
31 | 31 |
32 ; AVX2-LABEL: test2: | 32 ; AVX2-LABEL: test2: |
33 ; AVX2: vpmovsxbq | 33 ; AVX2: vpmovsxbq |
34 } | 34 } |
35 | 35 |
36 define void @test3(<4 x i8>* %in, <4 x i32>* %out) nounwind { | 36 define void @test3(<4 x i8>* %in, <4 x i32>* %out) nounwind { |
37 %wide.load35 = load <4 x i8>* %in, align 1 | 37 %wide.load35 = load <4 x i8>, <4 x i8>* %in, align 1 |
38 %sext = sext <4 x i8> %wide.load35 to <4 x i32> | 38 %sext = sext <4 x i8> %wide.load35 to <4 x i32> |
39 store <4 x i32> zeroinitializer, <4 x i32>* undef, align 8 | 39 store <4 x i32> zeroinitializer, <4 x i32>* undef, align 8 |
40 store <4 x i32> %sext, <4 x i32>* %out, align 8 | 40 store <4 x i32> %sext, <4 x i32>* %out, align 8 |
41 ret void | 41 ret void |
42 | 42 |
49 ; AVX2-LABEL: test3: | 49 ; AVX2-LABEL: test3: |
50 ; AVX2: vpmovsxbd | 50 ; AVX2: vpmovsxbd |
51 } | 51 } |
52 | 52 |
53 define void @test4(<8 x i8>* %in, <8 x i32>* %out) nounwind { | 53 define void @test4(<8 x i8>* %in, <8 x i32>* %out) nounwind { |
54 %wide.load35 = load <8 x i8>* %in, align 1 | 54 %wide.load35 = load <8 x i8>, <8 x i8>* %in, align 1 |
55 %sext = sext <8 x i8> %wide.load35 to <8 x i32> | 55 %sext = sext <8 x i8> %wide.load35 to <8 x i32> |
56 store <8 x i32> zeroinitializer, <8 x i32>* undef, align 8 | 56 store <8 x i32> zeroinitializer, <8 x i32>* undef, align 8 |
57 store <8 x i32> %sext, <8 x i32>* %out, align 8 | 57 store <8 x i32> %sext, <8 x i32>* %out, align 8 |
58 ret void | 58 ret void |
59 | 59 |
60 ; AVX2-LABEL: test4: | 60 ; AVX2-LABEL: test4: |
61 ; AVX2: vpmovsxbd | 61 ; AVX2: vpmovsxbd |
62 } | 62 } |
63 | 63 |
64 define void @test5(<8 x i8>* %in, <8 x i16>* %out) nounwind { | 64 define void @test5(<8 x i8>* %in, <8 x i16>* %out) nounwind { |
65 %wide.load35 = load <8 x i8>* %in, align 1 | 65 %wide.load35 = load <8 x i8>, <8 x i8>* %in, align 1 |
66 %sext = sext <8 x i8> %wide.load35 to <8 x i16> | 66 %sext = sext <8 x i8> %wide.load35 to <8 x i16> |
67 store <8 x i16> zeroinitializer, <8 x i16>* undef, align 8 | 67 store <8 x i16> zeroinitializer, <8 x i16>* undef, align 8 |
68 store <8 x i16> %sext, <8 x i16>* %out, align 8 | 68 store <8 x i16> %sext, <8 x i16>* %out, align 8 |
69 ret void | 69 ret void |
70 | 70 |
77 ; AVX2-LABEL: test5: | 77 ; AVX2-LABEL: test5: |
78 ; AVX2: vpmovsxbw | 78 ; AVX2: vpmovsxbw |
79 } | 79 } |
80 | 80 |
81 define void @test6(<16 x i8>* %in, <16 x i16>* %out) nounwind { | 81 define void @test6(<16 x i8>* %in, <16 x i16>* %out) nounwind { |
82 %wide.load35 = load <16 x i8>* %in, align 1 | 82 %wide.load35 = load <16 x i8>, <16 x i8>* %in, align 1 |
83 %sext = sext <16 x i8> %wide.load35 to <16 x i16> | 83 %sext = sext <16 x i8> %wide.load35 to <16 x i16> |
84 store <16 x i16> zeroinitializer, <16 x i16>* undef, align 8 | 84 store <16 x i16> zeroinitializer, <16 x i16>* undef, align 8 |
85 store <16 x i16> %sext, <16 x i16>* %out, align 8 | 85 store <16 x i16> %sext, <16 x i16>* %out, align 8 |
86 ret void | 86 ret void |
87 | 87 |
88 ; AVX2-LABEL: test6: | 88 ; AVX2-LABEL: test6: |
89 ; AVX2: vpmovsxbw | 89 ; AVX2: vpmovsxbw |
90 } | 90 } |
91 | 91 |
92 define void @test7(<2 x i16>* %in, <2 x i64>* %out) nounwind { | 92 define void @test7(<2 x i16>* %in, <2 x i64>* %out) nounwind { |
93 %wide.load35 = load <2 x i16>* %in, align 1 | 93 %wide.load35 = load <2 x i16>, <2 x i16>* %in, align 1 |
94 %sext = sext <2 x i16> %wide.load35 to <2 x i64> | 94 %sext = sext <2 x i16> %wide.load35 to <2 x i64> |
95 store <2 x i64> zeroinitializer, <2 x i64>* undef, align 8 | 95 store <2 x i64> zeroinitializer, <2 x i64>* undef, align 8 |
96 store <2 x i64> %sext, <2 x i64>* %out, align 8 | 96 store <2 x i64> %sext, <2 x i64>* %out, align 8 |
97 ret void | 97 ret void |
98 | 98 |
106 ; AVX2-LABEL: test7: | 106 ; AVX2-LABEL: test7: |
107 ; AVX2: vpmovsxwq | 107 ; AVX2: vpmovsxwq |
108 } | 108 } |
109 | 109 |
110 define void @test8(<4 x i16>* %in, <4 x i64>* %out) nounwind { | 110 define void @test8(<4 x i16>* %in, <4 x i64>* %out) nounwind { |
111 %wide.load35 = load <4 x i16>* %in, align 1 | 111 %wide.load35 = load <4 x i16>, <4 x i16>* %in, align 1 |
112 %sext = sext <4 x i16> %wide.load35 to <4 x i64> | 112 %sext = sext <4 x i16> %wide.load35 to <4 x i64> |
113 store <4 x i64> zeroinitializer, <4 x i64>* undef, align 8 | 113 store <4 x i64> zeroinitializer, <4 x i64>* undef, align 8 |
114 store <4 x i64> %sext, <4 x i64>* %out, align 8 | 114 store <4 x i64> %sext, <4 x i64>* %out, align 8 |
115 ret void | 115 ret void |
116 | 116 |
117 ; AVX2-LABEL: test8: | 117 ; AVX2-LABEL: test8: |
118 ; AVX2: vpmovsxwq | 118 ; AVX2: vpmovsxwq |
119 } | 119 } |
120 | 120 |
121 define void @test9(<4 x i16>* %in, <4 x i32>* %out) nounwind { | 121 define void @test9(<4 x i16>* %in, <4 x i32>* %out) nounwind { |
122 %wide.load35 = load <4 x i16>* %in, align 1 | 122 %wide.load35 = load <4 x i16>, <4 x i16>* %in, align 1 |
123 %sext = sext <4 x i16> %wide.load35 to <4 x i32> | 123 %sext = sext <4 x i16> %wide.load35 to <4 x i32> |
124 store <4 x i32> zeroinitializer, <4 x i32>* undef, align 8 | 124 store <4 x i32> zeroinitializer, <4 x i32>* undef, align 8 |
125 store <4 x i32> %sext, <4 x i32>* %out, align 8 | 125 store <4 x i32> %sext, <4 x i32>* %out, align 8 |
126 ret void | 126 ret void |
127 | 127 |
134 ; AVX2-LABEL: test9: | 134 ; AVX2-LABEL: test9: |
135 ; AVX2: vpmovsxwd | 135 ; AVX2: vpmovsxwd |
136 } | 136 } |
137 | 137 |
138 define void @test10(<8 x i16>* %in, <8 x i32>* %out) nounwind { | 138 define void @test10(<8 x i16>* %in, <8 x i32>* %out) nounwind { |
139 %wide.load35 = load <8 x i16>* %in, align 1 | 139 %wide.load35 = load <8 x i16>, <8 x i16>* %in, align 1 |
140 %sext = sext <8 x i16> %wide.load35 to <8 x i32> | 140 %sext = sext <8 x i16> %wide.load35 to <8 x i32> |
141 store <8 x i32> zeroinitializer, <8 x i32>* undef, align 8 | 141 store <8 x i32> zeroinitializer, <8 x i32>* undef, align 8 |
142 store <8 x i32> %sext, <8 x i32>* %out, align 8 | 142 store <8 x i32> %sext, <8 x i32>* %out, align 8 |
143 ret void | 143 ret void |
144 | 144 |
145 ; AVX2-LABEL: test10: | 145 ; AVX2-LABEL: test10: |
146 ; AVX2: vpmovsxwd | 146 ; AVX2: vpmovsxwd |
147 } | 147 } |
148 | 148 |
149 define void @test11(<2 x i32>* %in, <2 x i64>* %out) nounwind { | 149 define void @test11(<2 x i32>* %in, <2 x i64>* %out) nounwind { |
150 %wide.load35 = load <2 x i32>* %in, align 1 | 150 %wide.load35 = load <2 x i32>, <2 x i32>* %in, align 1 |
151 %sext = sext <2 x i32> %wide.load35 to <2 x i64> | 151 %sext = sext <2 x i32> %wide.load35 to <2 x i64> |
152 store <2 x i64> zeroinitializer, <2 x i64>* undef, align 8 | 152 store <2 x i64> zeroinitializer, <2 x i64>* undef, align 8 |
153 store <2 x i64> %sext, <2 x i64>* %out, align 8 | 153 store <2 x i64> %sext, <2 x i64>* %out, align 8 |
154 ret void | 154 ret void |
155 | 155 |
162 ; AVX2-LABEL: test11: | 162 ; AVX2-LABEL: test11: |
163 ; AVX2: vpmovsxdq | 163 ; AVX2: vpmovsxdq |
164 } | 164 } |
165 | 165 |
166 define void @test12(<4 x i32>* %in, <4 x i64>* %out) nounwind { | 166 define void @test12(<4 x i32>* %in, <4 x i64>* %out) nounwind { |
167 %wide.load35 = load <4 x i32>* %in, align 1 | 167 %wide.load35 = load <4 x i32>, <4 x i32>* %in, align 1 |
168 %sext = sext <4 x i32> %wide.load35 to <4 x i64> | 168 %sext = sext <4 x i32> %wide.load35 to <4 x i64> |
169 store <4 x i64> zeroinitializer, <4 x i64>* undef, align 8 | 169 store <4 x i64> zeroinitializer, <4 x i64>* undef, align 8 |
170 store <4 x i64> %sext, <4 x i64>* %out, align 8 | 170 store <4 x i64> %sext, <4 x i64>* %out, align 8 |
171 ret void | 171 ret void |
172 | 172 |