mirror of
https://github.com/intel/llvm.git
synced 2026-01-27 06:06:34 +08:00
[RISCV] Add copies to physical registers in VL optimizer tests. NFC (#151170)
In an upcoming patch to support recurrences in the RISCVVLOptimizer, we need to perform an optimistic dataflow analysis where we assume instructions have a DemandedVL of zero until a user is encountered. Because of this if there's no "root" instruction, nothing will be demanded and all the VLs will be set to zero. This prepares for this by adding a copy to a physical register in the MIR tests so that the behaviour is preserved, and matches whats generated lowering from regular LLVM IR.
This commit is contained in:
File diff suppressed because it is too large
Load Diff
@@ -12,9 +12,11 @@ body: |
|
||||
; CHECK-NEXT: %vl:gprnox0 = COPY $x1
|
||||
; CHECK-NEXT: %x:vr = PseudoVADD_VV_MF4 $noreg, $noreg, $noreg, -1, 4 /* e16 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: %y:vr = PseudoVNSRL_WV_MF4 $noreg, %x, $noreg, %vl, 4 /* e16 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: $v8 = COPY %y
|
||||
%vl:gprnox0 = COPY $x1
|
||||
%x:vr = PseudoVADD_VV_MF4 $noreg, $noreg, $noreg, -1, 4 /* e16 */, 0 /* tu, mu */
|
||||
%y:vr = PseudoVNSRL_WV_MF4 $noreg, %x, $noreg, %vl, 4 /* e16 */, 0 /* tu, mu */
|
||||
$v8 = COPY %y
|
||||
...
|
||||
---
|
||||
name: vredsum_vv_user
|
||||
@@ -28,10 +30,14 @@ body: |
|
||||
; CHECK-NEXT: %x:vr = PseudoVADD_VV_M1 $noreg, $noreg, $noreg, -1, 5 /* e32 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: %y:vr = PseudoVREDSUM_VS_M1_E64 $noreg, %x, $noreg, -1, 6 /* e64 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: %z:vr = PseudoVADD_VV_M1 $noreg, %x, $noreg, %vl, 5 /* e32 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: $v8 = COPY %y
|
||||
; CHECK-NEXT: $v9 = COPY %z
|
||||
%vl:gprnox0 = COPY $x1
|
||||
%x:vr = PseudoVADD_VV_M1 $noreg, $noreg, $noreg, -1, 5 /* e32 */, 0 /* tu, mu */
|
||||
%y:vr = PseudoVREDSUM_VS_M1_E64 $noreg, %x, $noreg, -1, 6 /* e64 */, 0 /* tu, mu */
|
||||
%z:vr = PseudoVADD_VV_M1 $noreg, %x, $noreg, %vl, 5 /* e32 */, 0 /* tu, mu */
|
||||
$v8 = COPY %y
|
||||
$v9 = COPY %z
|
||||
...
|
||||
---
|
||||
name: use_largest_common_vl_imm_imm
|
||||
@@ -41,9 +47,13 @@ body: |
|
||||
; CHECK: %x:vr = PseudoVADD_VV_M1 $noreg, $noreg, $noreg, 2, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: %y:vr = PseudoVADD_VV_M1 $noreg, %x, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: %z:vr = PseudoVADD_VV_M1 $noreg, %x, $noreg, 2, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: $v8 = COPY %y
|
||||
; CHECK-NEXT: $v9 = COPY %z
|
||||
%x:vr = PseudoVADD_VV_M1 $noreg, $noreg, $noreg, -1, 3 /* e8 */, 0
|
||||
%y:vr = PseudoVADD_VV_M1 $noreg, %x, $noreg, 1, 3 /* e8 */, 0
|
||||
%z:vr = PseudoVADD_VV_M1 $noreg, %x, $noreg, 2, 3 /* e8 */, 0
|
||||
$v8 = COPY %y
|
||||
$v9 = COPY %z
|
||||
...
|
||||
---
|
||||
name: use_largest_common_vl_same_reg
|
||||
@@ -57,10 +67,14 @@ body: |
|
||||
; CHECK-NEXT: %x:vr = PseudoVADD_VV_M1 $noreg, $noreg, $noreg, %vl, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: %y:vr = PseudoVADD_VV_M1 $noreg, %x, $noreg, %vl, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: %z:vr = PseudoVADD_VV_M1 $noreg, %x, $noreg, %vl, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: $v8 = COPY %y
|
||||
; CHECK-NEXT: $v9 = COPY %z
|
||||
%vl:gprnox0 = COPY $x1
|
||||
%x:vr = PseudoVADD_VV_M1 $noreg, $noreg, $noreg, -1, 3 /* e8 */, 0
|
||||
%y:vr = PseudoVADD_VV_M1 $noreg, %x, $noreg, %vl, 3 /* e8 */, 0
|
||||
%z:vr = PseudoVADD_VV_M1 $noreg, %x, $noreg, %vl, 3 /* e8 */, 0
|
||||
$v8 = COPY %y
|
||||
$v9 = COPY %z
|
||||
...
|
||||
---
|
||||
name: use_largest_common_vl_diff_regs
|
||||
@@ -75,11 +89,15 @@ body: |
|
||||
; CHECK-NEXT: %x:vr = PseudoVADD_VV_M1 $noreg, $noreg, $noreg, -1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: %y:vr = PseudoVADD_VV_M1 $noreg, %x, $noreg, %vl0, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: %z:vr = PseudoVADD_VV_M1 $noreg, %x, $noreg, %vl1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: $v8 = COPY %y
|
||||
; CHECK-NEXT: $v9 = COPY %z
|
||||
%vl0:gprnox0 = COPY $x1
|
||||
%vl1:gprnox0 = COPY $x2
|
||||
%x:vr = PseudoVADD_VV_M1 $noreg, $noreg, $noreg, -1, 3 /* e8 */, 0
|
||||
%y:vr = PseudoVADD_VV_M1 $noreg, %x, $noreg, %vl0, 3 /* e8 */, 0
|
||||
%z:vr = PseudoVADD_VV_M1 $noreg, %x, $noreg, %vl1, 3 /* e8 */, 0
|
||||
$v8 = COPY %y
|
||||
$v9 = COPY %z
|
||||
...
|
||||
---
|
||||
name: use_largest_common_vl_imm_reg
|
||||
@@ -93,10 +111,14 @@ body: |
|
||||
; CHECK-NEXT: %x:vr = PseudoVADD_VV_M1 $noreg, $noreg, $noreg, -1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: %y:vr = PseudoVADD_VV_M1 $noreg, %x, $noreg, %vl, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: %z:vr = PseudoVADD_VV_M1 $noreg, %x, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: $v8 = COPY %y
|
||||
; CHECK-NEXT: $v9 = COPY %z
|
||||
%vl:gprnox0 = COPY $x1
|
||||
%x:vr = PseudoVADD_VV_M1 $noreg, $noreg, $noreg, -1, 3 /* e8 */, 0
|
||||
%y:vr = PseudoVADD_VV_M1 $noreg, %x, $noreg, %vl, 3 /* e8 */, 0
|
||||
%z:vr = PseudoVADD_VV_M1 $noreg, %x, $noreg, 1, 3 /* e8 */, 0
|
||||
$v8 = COPY %y
|
||||
$v9 = COPY %z
|
||||
...
|
||||
---
|
||||
name: use_largest_common_vl_imm_vlmax
|
||||
@@ -106,9 +128,13 @@ body: |
|
||||
; CHECK: %x:vr = PseudoVADD_VV_M1 $noreg, $noreg, $noreg, -1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: %y:vr = PseudoVADD_VV_M1 $noreg, %x, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: %z:vr = PseudoVADD_VV_M1 $noreg, %x, $noreg, -1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: $v8 = COPY %y
|
||||
; CHECK-NEXT: $v9 = COPY %z
|
||||
%x:vr = PseudoVADD_VV_M1 $noreg, $noreg, $noreg, -1, 3 /* e8 */, 0
|
||||
%y:vr = PseudoVADD_VV_M1 $noreg, %x, $noreg, 1, 3 /* e8 */, 0
|
||||
%z:vr = PseudoVADD_VV_M1 $noreg, %x, $noreg, -1, 3 /* e8 */, 0
|
||||
$v8 = COPY %y
|
||||
$v9 = COPY %z
|
||||
...
|
||||
---
|
||||
name: vfcvt_x_f_v_nofpexcept
|
||||
@@ -117,8 +143,10 @@ body: |
|
||||
; CHECK-LABEL: name: vfcvt_x_f_v_nofpexcept
|
||||
; CHECK: %x:vr = nofpexcept PseudoVFCVT_X_F_V_M1 $noreg, $noreg, 0, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: %y:vr = PseudoVADD_VV_M1 $noreg, %x, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: $v8 = COPY %y
|
||||
%x:vr = nofpexcept PseudoVFCVT_X_F_V_M1 $noreg, $noreg, 0, -1, 3 /* e32 */, 0
|
||||
%y:vr = PseudoVADD_VV_M1 $noreg, %x, $noreg, 1, 3 /* e8 */, 0
|
||||
$v8 = COPY %y
|
||||
...
|
||||
---
|
||||
name: vfcvt_x_f_v_fpexcept
|
||||
@@ -127,8 +155,10 @@ body: |
|
||||
; CHECK-LABEL: name: vfcvt_x_f_v_fpexcept
|
||||
; CHECK: %x:vr = PseudoVFCVT_X_F_V_M1 $noreg, $noreg, 0, -1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: %y:vr = PseudoVADD_VV_M1 $noreg, %x, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: $v8 = COPY %y
|
||||
%x:vr = PseudoVFCVT_X_F_V_M1 $noreg, $noreg, 0, -1, 3 /* e32 */, 0
|
||||
%y:vr = PseudoVADD_VV_M1 $noreg, %x, $noreg, 1, 3 /* e8 */, 0
|
||||
$v8 = COPY %y
|
||||
...
|
||||
---
|
||||
name: vfncvtbf16_f_f_w_nofpexcept
|
||||
@@ -137,8 +167,10 @@ body: |
|
||||
; CHECK-LABEL: name: vfncvtbf16_f_f_w_nofpexcept
|
||||
; CHECK: early-clobber %x:vr = nofpexcept PseudoVFNCVTBF16_F_F_W_M1_E16 $noreg, $noreg, 7, 1, 4 /* e16 */, 0 /* tu, mu */, implicit $frm
|
||||
; CHECK-NEXT: %y:vr = PseudoVADD_VV_M1 $noreg, %x, $noreg, 1, 4 /* e16 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: $v8 = COPY %y
|
||||
%x:vr = nofpexcept PseudoVFNCVTBF16_F_F_W_M1_E16 $noreg, $noreg, 7, -1, 4 /* e16 */, 0 /* tu, mu */, implicit $frm
|
||||
%y:vr = PseudoVADD_VV_M1 $noreg, %x, $noreg, 1, 4 /* e16 */, 0
|
||||
$v8 = COPY %y
|
||||
...
|
||||
---
|
||||
name: vfsqrt_nofpexcept
|
||||
@@ -147,8 +179,10 @@ body: |
|
||||
; CHECK-LABEL: name: vfsqrt_nofpexcept
|
||||
; CHECK: %x:vrm2 = nofpexcept PseudoVFSQRT_V_M2_E32 $noreg, $noreg, 7, 6, 5 /* e32 */, 3 /* ta, ma */, implicit $frm
|
||||
; CHECK-NEXT: early-clobber %y:vr = nofpexcept PseudoVFNCVTBF16_F_F_W_M1_E16 $noreg, %x, 7, 6, 4 /* e16 */, 3 /* ta, ma */, implicit $frm
|
||||
; CHECK-NEXT: $v8 = COPY %y
|
||||
%x:vrm2 = nofpexcept PseudoVFSQRT_V_M2_E32 $noreg, $noreg, 7, 8, 5, 3, implicit $frm
|
||||
early-clobber %y:vr = nofpexcept PseudoVFNCVTBF16_F_F_W_M1_E16 $noreg, %x, 7, 6, 4, 3, implicit $frm
|
||||
$v8 = COPY %y
|
||||
...
|
||||
---
|
||||
name: vfsqrt_fpexcept
|
||||
@@ -157,8 +191,10 @@ body: |
|
||||
; CHECK-LABEL: name: vfsqrt_fpexcept
|
||||
; CHECK: %x:vrm2 = PseudoVFSQRT_V_M2_E32 $noreg, $noreg, 7, 8, 5 /* e32 */, 3 /* ta, ma */, implicit $frm
|
||||
; CHECK-NEXT: early-clobber %y:vr = nofpexcept PseudoVFNCVTBF16_F_F_W_M1_E16 $noreg, %x, 7, 6, 4 /* e16 */, 3 /* ta, ma */, implicit $frm
|
||||
; CHECK-NEXT: $v8 = COPY %y
|
||||
%x:vrm2 = PseudoVFSQRT_V_M2_E32 $noreg, $noreg, 7, 8, 5, 3, implicit $frm
|
||||
early-clobber %y:vr = nofpexcept PseudoVFNCVTBF16_F_F_W_M1_E16 $noreg, %x, 7, 6, 4, 3, implicit $frm
|
||||
$v8 = COPY %y
|
||||
...
|
||||
---
|
||||
name: vfrsqrt7_nofpexcept
|
||||
@@ -167,8 +203,10 @@ body: |
|
||||
; CHECK-LABEL: name: vfrsqrt7_nofpexcept
|
||||
; CHECK: %x:vrm2 = nofpexcept PseudoVFRSQRT7_V_M2_E32 $noreg, $noreg, 1, 5 /* e32 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: %y:vrm2 = PseudoVADD_VV_M2 $noreg, %x, $noreg, 1, 5 /* e32 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: $v8m2 = COPY %y
|
||||
%x:vrm2 = nofpexcept PseudoVFRSQRT7_V_M2_E32 $noreg, $noreg, 7, 5, 0
|
||||
%y:vrm2 = PseudoVADD_VV_M2 $noreg, %x, $noreg, 1, 5 /* e32 */, 0
|
||||
$v8m2 = COPY %y
|
||||
...
|
||||
---
|
||||
name: vfrsqrt7_fpexcept
|
||||
@@ -177,8 +215,10 @@ body: |
|
||||
; CHECK-LABEL: name: vfrsqrt7_fpexcept
|
||||
; CHECK: %x:vrm2 = PseudoVFRSQRT7_V_M2_E32 $noreg, $noreg, 7, 5 /* e32 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: %y:vrm2 = PseudoVADD_VV_M2 $noreg, %x, $noreg, 1, 5 /* e32 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: $v8m2 = COPY %y
|
||||
%x:vrm2 = PseudoVFRSQRT7_V_M2_E32 $noreg, $noreg, 7, 5, 0
|
||||
%y:vrm2 = PseudoVADD_VV_M2 $noreg, %x, $noreg, 1, 5 /* e32 */, 0
|
||||
$v8m2 = COPY %y
|
||||
...
|
||||
---
|
||||
name: vwadd_tied_vs1
|
||||
@@ -187,8 +227,10 @@ body: |
|
||||
; CHECK-LABEL: name: vwadd_tied_vs1
|
||||
; CHECK: %x:vr = PseudoVADD_VV_M1 $noreg, $noreg, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: early-clobber %y:vrm2 = PseudoVWADD_WV_M1_TIED $noreg, %x, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: $v8m2 = COPY %y
|
||||
%x:vr = PseudoVADD_VV_M1 $noreg, $noreg, $noreg, -1, 3 /* e8 */, 0 /* tu, mu */
|
||||
%y:vrm2 = PseudoVWADD_WV_M1_TIED $noreg, %x, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
$v8m2 = COPY %y
|
||||
...
|
||||
---
|
||||
name: crossbb
|
||||
@@ -202,11 +244,13 @@ body: |
|
||||
; CHECK-NEXT: bb.1:
|
||||
; CHECK-NEXT: %a1:vr = PseudoVADD_VV_M1 $noreg, %c, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: %a2:vr = PseudoVADD_VV_M1 $noreg, %a1, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: $v8 = COPY %a2
|
||||
; CHECK-NEXT: PseudoRET
|
||||
; CHECK-NEXT: {{ $}}
|
||||
; CHECK-NEXT: bb.2:
|
||||
; CHECK-NEXT: %b1:vr = PseudoVADD_VV_M1 $noreg, %c, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: %b2:vr = PseudoVADD_VV_M1 $noreg, %b1, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: $v8 = COPY %b2
|
||||
; CHECK-NEXT: PseudoRET
|
||||
; CHECK-NEXT: {{ $}}
|
||||
; CHECK-NEXT: bb.3:
|
||||
@@ -221,10 +265,12 @@ body: |
|
||||
bb.1:
|
||||
%a1:vr = PseudoVADD_VV_M1 $noreg, %c, $noreg, -1, 3 /* e8 */, 0 /* tu, mu */
|
||||
%a2:vr = PseudoVADD_VV_M1 $noreg, %a1, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
$v8 = COPY %a2
|
||||
PseudoRET
|
||||
bb.2:
|
||||
%b1:vr = PseudoVADD_VV_M1 $noreg, %c, $noreg, -1, 3 /* e8 */, 0 /* tu, mu */
|
||||
%b2:vr = PseudoVADD_VV_M1 $noreg, %b1, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
$v8 = COPY %b2
|
||||
PseudoRET
|
||||
bb.3:
|
||||
liveins: $x1
|
||||
@@ -237,17 +283,21 @@ name: unreachable
|
||||
body: |
|
||||
; CHECK-LABEL: name: unreachable
|
||||
; CHECK: bb.0:
|
||||
; CHECK-NEXT: %x:vr = PseudoVADD_VV_M1 $noreg, $noreg, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: %x:vr = PseudoVADD_VV_M1 $noreg, $noreg, $noreg, -1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: $v8 = COPY %x
|
||||
; CHECK-NEXT: PseudoRET
|
||||
; CHECK-NEXT: {{ $}}
|
||||
; CHECK-NEXT: bb.1:
|
||||
; CHECK-NEXT: %y:vr = PseudoVADD_VV_M1 $noreg, %x, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: $v8 = COPY %y
|
||||
; CHECK-NEXT: PseudoRET
|
||||
bb.0:
|
||||
%x:vr = PseudoVADD_VV_M1 $noreg, $noreg, $noreg, -1, 3 /* e8 */, 0 /* tu, mu */
|
||||
$v8 = COPY %x
|
||||
PseudoRET
|
||||
bb.1:
|
||||
%y:vr = PseudoVADD_VV_M1 $noreg, %x, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
$v8 = COPY %y
|
||||
PseudoRET
|
||||
...
|
||||
---
|
||||
@@ -259,9 +309,11 @@ body: |
|
||||
; CHECK: %x:vr = PseudoVADD_VV_M1 $noreg, $noreg, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: %y:vr = PseudoVADD_VV_M1 %x, $noreg, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: %z:vr = PseudoVADD_VV_M1 $noreg, %y, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: $v8 = COPY %z
|
||||
%x:vr = PseudoVADD_VV_M1 $noreg, $noreg, $noreg, -1, 3 /* e8 */, 0 /* tu, mu */
|
||||
%y:vr = PseudoVADD_VV_M1 %x, $noreg, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
%z:vr = PseudoVADD_VV_M1 $noreg, %y, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
$v8 = COPY %z
|
||||
...
|
||||
---
|
||||
# Can't reduce %x because %y uses it as a passthru, and %y's inactive elements are demanded by %z
|
||||
@@ -272,9 +324,11 @@ body: |
|
||||
; CHECK: %x:vr = PseudoVADD_VV_M1 $noreg, $noreg, $noreg, -1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: %y:vr = PseudoVADD_VV_M1 %x, $noreg, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: %z:vr = PseudoVADD_VV_M1 $noreg, %y, $noreg, 2, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: $v8 = COPY %z
|
||||
%x:vr = PseudoVADD_VV_M1 $noreg, $noreg, $noreg, -1, 3 /* e8 */, 0 /* tu, mu */
|
||||
%y:vr = PseudoVADD_VV_M1 %x, $noreg, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
%z:vr = PseudoVADD_VV_M1 $noreg, %y, $noreg, 2, 3 /* e8 */, 0 /* tu, mu */
|
||||
$v8 = COPY %z
|
||||
...
|
||||
---
|
||||
# Can reduce %x even though %y uses it as a passthru, because %y's inactive elements aren't demanded
|
||||
@@ -287,11 +341,13 @@ body: |
|
||||
; CHECK-NEXT: %z:vr = PseudoVADD_VV_M1 %y, $noreg, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: %a:vr = PseudoVADD_VV_M1 %z, $noreg, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: %b:vr = PseudoVADD_VV_M1 $noreg, %a, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: $v8 = COPY %b
|
||||
%x:vr = PseudoVADD_VV_M1 $noreg, $noreg, $noreg, -1, 3 /* e8 */, 0 /* tu, mu */
|
||||
%y:vr = PseudoVADD_VV_M1 %x, $noreg, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
%z:vr = PseudoVADD_VV_M1 %y, $noreg, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
%a:vr = PseudoVADD_VV_M1 %z, $noreg, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
%b:vr = PseudoVADD_VV_M1 $noreg, %a, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
$v8 = COPY %b
|
||||
...
|
||||
---
|
||||
# Can't reduce %x because %y uses it as a passthru, and %y's inactive elements are ultimately demanded in %b
|
||||
@@ -304,11 +360,13 @@ body: |
|
||||
; CHECK-NEXT: %z:vr = PseudoVADD_VV_M1 %y, $noreg, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: %a:vr = PseudoVADD_VV_M1 %z, $noreg, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: %b:vr = PseudoVADD_VV_M1 $noreg, %a, $noreg, 2, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: $v8 = COPY %b
|
||||
%x:vr = PseudoVADD_VV_M1 $noreg, $noreg, $noreg, -1, 3 /* e8 */, 0 /* tu, mu */
|
||||
%y:vr = PseudoVADD_VV_M1 %x, $noreg, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
%z:vr = PseudoVADD_VV_M1 %y, $noreg, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
%a:vr = PseudoVADD_VV_M1 %z, $noreg, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
%b:vr = PseudoVADD_VV_M1 $noreg, %a, $noreg, 2, 3 /* e8 */, 0 /* tu, mu */
|
||||
$v8 = COPY %b
|
||||
...
|
||||
---
|
||||
name: vxsat_dead
|
||||
@@ -317,8 +375,10 @@ body: |
|
||||
; CHECK-LABEL: name: vxsat_dead
|
||||
; CHECK: %x:vr = PseudoVSADDU_VV_M1 $noreg, $noreg, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */, implicit-def dead $vxsat
|
||||
; CHECK-NEXT: %y:vr = PseudoVADD_VV_M1 $noreg, %x, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: $v8 = COPY %y
|
||||
%x:vr = PseudoVSADDU_VV_M1 $noreg, $noreg, $noreg, -1, 3 /* e8 */, 0 /* tu, mu */, implicit-def dead $vxsat
|
||||
%y:vr = PseudoVADD_VV_M1 $noreg, %x, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
$v8 = COPY %y
|
||||
...
|
||||
---
|
||||
name: vxsat_not_dead
|
||||
@@ -327,8 +387,10 @@ body: |
|
||||
; CHECK-LABEL: name: vxsat_not_dead
|
||||
; CHECK: %x:vr = PseudoVSADDU_VV_M1 $noreg, $noreg, $noreg, -1, 3 /* e8 */, 0 /* tu, mu */, implicit-def $vxsat
|
||||
; CHECK-NEXT: %y:vr = PseudoVADD_VV_M1 $noreg, %x, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: $v8 = COPY %y
|
||||
%x:vr = PseudoVSADDU_VV_M1 $noreg, $noreg, $noreg, -1, 3 /* e8 */, 0 /* tu, mu */, implicit-def $vxsat
|
||||
%y:vr = PseudoVADD_VV_M1 $noreg, %x, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
$v8 = COPY %y
|
||||
...
|
||||
---
|
||||
name: copy
|
||||
@@ -338,9 +400,11 @@ body: |
|
||||
; CHECK: %x:vr = PseudoVADD_VV_M1 $noreg, $noreg, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: %y:vr = COPY %x
|
||||
; CHECK-NEXT: %z:vr = PseudoVADD_VV_M1 $noreg, %y, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: $v8 = COPY %z
|
||||
%x:vr = PseudoVADD_VV_M1 $noreg, $noreg, $noreg, -1, 3 /* e8 */, 0 /* tu, mu */
|
||||
%y:vr = COPY %x
|
||||
%z:vr = PseudoVADD_VV_M1 $noreg, %y, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
$v8 = COPY %z
|
||||
...
|
||||
---
|
||||
name: copy_multiple_users
|
||||
@@ -351,10 +415,14 @@ body: |
|
||||
; CHECK-NEXT: %y:vr = COPY %x
|
||||
; CHECK-NEXT: %z0:vr = PseudoVADD_VV_M1 $noreg, %y, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: %z1:vr = PseudoVADD_VV_M1 $noreg, %y, $noreg, 3, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: $v8 = COPY %z0
|
||||
; CHECK-NEXT: $v9 = COPY %z1
|
||||
%x:vr = PseudoVADD_VV_M1 $noreg, $noreg, $noreg, -1, 3 /* e8 */, 0 /* tu, mu */
|
||||
%y:vr = COPY %x
|
||||
%z0:vr = PseudoVADD_VV_M1 $noreg, %y, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
%z1:vr = PseudoVADD_VV_M1 $noreg, %y, $noreg, 3, 3 /* e8 */, 0 /* tu, mu */
|
||||
$v8 = COPY %z0
|
||||
$v9 = COPY %z1
|
||||
...
|
||||
---
|
||||
name: copy_user_invalid_sew
|
||||
@@ -364,9 +432,11 @@ body: |
|
||||
; CHECK: %x:vr = PseudoVADD_VV_M1 $noreg, $noreg, $noreg, -1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: %y:vr = COPY %x
|
||||
; CHECK-NEXT: %z:vr = PseudoVADD_VV_M1 $noreg, %y, $noreg, 1, 4 /* e16 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: $v8 = COPY %z
|
||||
%x:vr = PseudoVADD_VV_M1 $noreg, $noreg, $noreg, -1, 3 /* e8 */, 0 /* tu, mu */
|
||||
%y:vr = COPY %x
|
||||
%z:vr = PseudoVADD_VV_M1 $noreg, %y, $noreg, 1, 4 /* e16 */, 0 /* tu, mu */
|
||||
$v8 = COPY %z
|
||||
...
|
||||
---
|
||||
name: phi
|
||||
@@ -387,6 +457,7 @@ body: |
|
||||
; CHECK-NEXT: bb.2:
|
||||
; CHECK-NEXT: %y:vr = PHI %w, %bb.0, %x, %bb.1
|
||||
; CHECK-NEXT: %z:vr = PseudoVADD_VV_M1 $noreg, %y, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: $v8 = COPY %z
|
||||
bb.0:
|
||||
%w:vr = PseudoVADD_VV_M1 $noreg, $noreg, $noreg, -1, 3 /* e8 */, 0 /* tu, mu */
|
||||
BNE $noreg, $noreg, %bb.2
|
||||
@@ -395,6 +466,7 @@ body: |
|
||||
bb.2:
|
||||
%y:vr = PHI %w, %bb.0, %x, %bb.1
|
||||
%z:vr = PseudoVADD_VV_M1 $noreg, %y, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
$v8 = COPY %z
|
||||
...
|
||||
---
|
||||
name: phi_user_invalid_sew
|
||||
@@ -415,6 +487,7 @@ body: |
|
||||
; CHECK-NEXT: bb.2:
|
||||
; CHECK-NEXT: %y:vr = PHI %w, %bb.0, %x, %bb.1
|
||||
; CHECK-NEXT: %z:vr = PseudoVADD_VV_M1 $noreg, %y, $noreg, 1, 4 /* e16 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: $v8 = COPY %z
|
||||
bb.0:
|
||||
%w:vr = PseudoVADD_VV_M1 $noreg, $noreg, $noreg, -1, 3 /* e8 */, 0 /* tu, mu */
|
||||
BNE $noreg, $noreg, %bb.2
|
||||
@@ -423,6 +496,7 @@ body: |
|
||||
bb.2:
|
||||
%y:vr = PHI %w, %bb.0, %x, %bb.1
|
||||
%z:vr = PseudoVADD_VV_M1 $noreg, %y, $noreg, 1, 4 /* e16 */, 0 /* tu, mu */
|
||||
$v8 = COPY %z
|
||||
...
|
||||
---
|
||||
name: phi_different_incoming_sew
|
||||
@@ -443,6 +517,7 @@ body: |
|
||||
; CHECK-NEXT: bb.2:
|
||||
; CHECK-NEXT: %y:vr = PHI %w, %bb.0, %x, %bb.1
|
||||
; CHECK-NEXT: %z:vr = PseudoVADD_VV_M1 $noreg, %y, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: $v8 = COPY %z
|
||||
bb.0:
|
||||
%w:vr = PseudoVADD_VV_M1 $noreg, $noreg, $noreg, -1, 3 /* e8 */, 0 /* tu, mu */
|
||||
BNE $noreg, $noreg, %bb.2
|
||||
@@ -451,6 +526,7 @@ body: |
|
||||
bb.2:
|
||||
%y:vr = PHI %w, %bb.0, %x, %bb.1
|
||||
%z:vr = PseudoVADD_VV_M1 $noreg, %y, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
$v8 = COPY %z
|
||||
...
|
||||
---
|
||||
name: phi_cycle_direct
|
||||
@@ -467,12 +543,14 @@ body: |
|
||||
; CHECK-NEXT: {{ $}}
|
||||
; CHECK-NEXT: %y:vr = PHI %x, %bb.0, %y, %bb.1
|
||||
; CHECK-NEXT: %z:vr = PseudoVADD_VV_M1 $noreg, %y, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: $v8 = COPY %z
|
||||
; CHECK-NEXT: PseudoBR %bb.1
|
||||
bb.0:
|
||||
%x:vr = PseudoVADD_VV_M1 $noreg, $noreg, $noreg, -1, 3 /* e8 */, 0 /* tu, mu */
|
||||
bb.1:
|
||||
%y:vr = PHI %x, %bb.0, %y, %bb.1
|
||||
%z:vr = PseudoVADD_VV_M1 $noreg, %y, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
$v8 = COPY %z
|
||||
PseudoBR %bb.1
|
||||
...
|
||||
---
|
||||
@@ -490,12 +568,14 @@ body: |
|
||||
; CHECK-NEXT: {{ $}}
|
||||
; CHECK-NEXT: %y:vr = PHI %x, %bb.0, %z, %bb.1
|
||||
; CHECK-NEXT: %z:vr = PseudoVADD_VV_M1 $noreg, %y, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
; CHECK-NEXT: $v8 = COPY %z
|
||||
; CHECK-NEXT: PseudoBR %bb.1
|
||||
bb.0:
|
||||
%x:vr = PseudoVADD_VV_M1 $noreg, $noreg, $noreg, -1, 3 /* e8 */, 0 /* tu, mu */
|
||||
bb.1:
|
||||
%y:vr = PHI %x, %bb.0, %z, %bb.1
|
||||
%z:vr = PseudoVADD_VV_M1 $noreg, %y, $noreg, 1, 3 /* e8 */, 0 /* tu, mu */
|
||||
$v8 = COPY %z
|
||||
PseudoBR %bb.1
|
||||
...
|
||||
---
|
||||
|
||||
Reference in New Issue
Block a user