| // NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 |
| // REQUIRES: riscv-registered-target |
| // RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zfh \ |
| // RUN: -target-feature +zvfh -disable-O0-optnone \ |
| // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ |
| // RUN: FileCheck --check-prefix=CHECK-RV64 %s |
| |
| #include <riscv_vector.h> |
| |
| // CHECK-RV64-LABEL: define dso_local <vscale x 1 x double> @test_vle64ff_v_f64m1_m |
| // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { |
| // CHECK-RV64-NEXT: entry: |
| // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { <vscale x 1 x double>, i64 } @llvm.riscv.vleff.mask.nxv1f64.i64(<vscale x 1 x double> poison, ptr [[BASE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 3) |
| // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { <vscale x 1 x double>, i64 } [[TMP0]], 0 |
| // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 1 x double>, i64 } [[TMP0]], 1 |
| // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 |
| // CHECK-RV64-NEXT: ret <vscale x 1 x double> [[TMP1]] |
| // |
| vfloat64m1_t test_vle64ff_v_f64m1_m(vbool64_t mask, const double *base, size_t *new_vl, size_t vl) { |
| return __riscv_vle64ff(mask, base, new_vl, vl); |
| } |
| |
| // CHECK-RV64-LABEL: define dso_local <vscale x 2 x double> @test_vle64ff_v_f64m2_m |
| // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { |
| // CHECK-RV64-NEXT: entry: |
| // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { <vscale x 2 x double>, i64 } @llvm.riscv.vleff.mask.nxv2f64.i64(<vscale x 2 x double> poison, ptr [[BASE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 3) |
| // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { <vscale x 2 x double>, i64 } [[TMP0]], 0 |
| // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 2 x double>, i64 } [[TMP0]], 1 |
| // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 |
| // CHECK-RV64-NEXT: ret <vscale x 2 x double> [[TMP1]] |
| // |
| vfloat64m2_t test_vle64ff_v_f64m2_m(vbool32_t mask, const double *base, size_t *new_vl, size_t vl) { |
| return __riscv_vle64ff(mask, base, new_vl, vl); |
| } |
| |
| // CHECK-RV64-LABEL: define dso_local <vscale x 4 x double> @test_vle64ff_v_f64m4_m |
| // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { |
| // CHECK-RV64-NEXT: entry: |
| // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { <vscale x 4 x double>, i64 } @llvm.riscv.vleff.mask.nxv4f64.i64(<vscale x 4 x double> poison, ptr [[BASE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 3) |
| // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { <vscale x 4 x double>, i64 } [[TMP0]], 0 |
| // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 4 x double>, i64 } [[TMP0]], 1 |
| // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 |
| // CHECK-RV64-NEXT: ret <vscale x 4 x double> [[TMP1]] |
| // |
| vfloat64m4_t test_vle64ff_v_f64m4_m(vbool16_t mask, const double *base, size_t *new_vl, size_t vl) { |
| return __riscv_vle64ff(mask, base, new_vl, vl); |
| } |
| |
| // CHECK-RV64-LABEL: define dso_local <vscale x 8 x double> @test_vle64ff_v_f64m8_m |
| // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { |
| // CHECK-RV64-NEXT: entry: |
| // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { <vscale x 8 x double>, i64 } @llvm.riscv.vleff.mask.nxv8f64.i64(<vscale x 8 x double> poison, ptr [[BASE]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 3) |
| // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { <vscale x 8 x double>, i64 } [[TMP0]], 0 |
| // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 8 x double>, i64 } [[TMP0]], 1 |
| // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 |
| // CHECK-RV64-NEXT: ret <vscale x 8 x double> [[TMP1]] |
| // |
| vfloat64m8_t test_vle64ff_v_f64m8_m(vbool8_t mask, const double *base, size_t *new_vl, size_t vl) { |
| return __riscv_vle64ff(mask, base, new_vl, vl); |
| } |
| |
| // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vle64ff_v_i64m1_m |
| // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { |
| // CHECK-RV64-NEXT: entry: |
| // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { <vscale x 1 x i64>, i64 } @llvm.riscv.vleff.mask.nxv1i64.i64(<vscale x 1 x i64> poison, ptr [[BASE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 3) |
| // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { <vscale x 1 x i64>, i64 } [[TMP0]], 0 |
| // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 1 x i64>, i64 } [[TMP0]], 1 |
| // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 |
| // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP1]] |
| // |
| vint64m1_t test_vle64ff_v_i64m1_m(vbool64_t mask, const int64_t *base, size_t *new_vl, size_t vl) { |
| return __riscv_vle64ff(mask, base, new_vl, vl); |
| } |
| |
| // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vle64ff_v_i64m2_m |
| // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { |
| // CHECK-RV64-NEXT: entry: |
| // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { <vscale x 2 x i64>, i64 } @llvm.riscv.vleff.mask.nxv2i64.i64(<vscale x 2 x i64> poison, ptr [[BASE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 3) |
| // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { <vscale x 2 x i64>, i64 } [[TMP0]], 0 |
| // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 2 x i64>, i64 } [[TMP0]], 1 |
| // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 |
| // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP1]] |
| // |
| vint64m2_t test_vle64ff_v_i64m2_m(vbool32_t mask, const int64_t *base, size_t *new_vl, size_t vl) { |
| return __riscv_vle64ff(mask, base, new_vl, vl); |
| } |
| |
| // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vle64ff_v_i64m4_m |
| // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { |
| // CHECK-RV64-NEXT: entry: |
| // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { <vscale x 4 x i64>, i64 } @llvm.riscv.vleff.mask.nxv4i64.i64(<vscale x 4 x i64> poison, ptr [[BASE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 3) |
| // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { <vscale x 4 x i64>, i64 } [[TMP0]], 0 |
| // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 4 x i64>, i64 } [[TMP0]], 1 |
| // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 |
| // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP1]] |
| // |
| vint64m4_t test_vle64ff_v_i64m4_m(vbool16_t mask, const int64_t *base, size_t *new_vl, size_t vl) { |
| return __riscv_vle64ff(mask, base, new_vl, vl); |
| } |
| |
| // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vle64ff_v_i64m8_m |
| // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { |
| // CHECK-RV64-NEXT: entry: |
| // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { <vscale x 8 x i64>, i64 } @llvm.riscv.vleff.mask.nxv8i64.i64(<vscale x 8 x i64> poison, ptr [[BASE]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 3) |
| // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { <vscale x 8 x i64>, i64 } [[TMP0]], 0 |
| // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 8 x i64>, i64 } [[TMP0]], 1 |
| // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 |
| // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP1]] |
| // |
| vint64m8_t test_vle64ff_v_i64m8_m(vbool8_t mask, const int64_t *base, size_t *new_vl, size_t vl) { |
| return __riscv_vle64ff(mask, base, new_vl, vl); |
| } |
| |
| // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vle64ff_v_u64m1_m |
| // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { |
| // CHECK-RV64-NEXT: entry: |
| // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { <vscale x 1 x i64>, i64 } @llvm.riscv.vleff.mask.nxv1i64.i64(<vscale x 1 x i64> poison, ptr [[BASE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 3) |
| // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { <vscale x 1 x i64>, i64 } [[TMP0]], 0 |
| // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 1 x i64>, i64 } [[TMP0]], 1 |
| // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 |
| // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP1]] |
| // |
| vuint64m1_t test_vle64ff_v_u64m1_m(vbool64_t mask, const uint64_t *base, size_t *new_vl, size_t vl) { |
| return __riscv_vle64ff(mask, base, new_vl, vl); |
| } |
| |
| // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vle64ff_v_u64m2_m |
| // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { |
| // CHECK-RV64-NEXT: entry: |
| // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { <vscale x 2 x i64>, i64 } @llvm.riscv.vleff.mask.nxv2i64.i64(<vscale x 2 x i64> poison, ptr [[BASE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 3) |
| // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { <vscale x 2 x i64>, i64 } [[TMP0]], 0 |
| // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 2 x i64>, i64 } [[TMP0]], 1 |
| // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 |
| // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP1]] |
| // |
| vuint64m2_t test_vle64ff_v_u64m2_m(vbool32_t mask, const uint64_t *base, size_t *new_vl, size_t vl) { |
| return __riscv_vle64ff(mask, base, new_vl, vl); |
| } |
| |
| // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vle64ff_v_u64m4_m |
| // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { |
| // CHECK-RV64-NEXT: entry: |
| // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { <vscale x 4 x i64>, i64 } @llvm.riscv.vleff.mask.nxv4i64.i64(<vscale x 4 x i64> poison, ptr [[BASE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 3) |
| // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { <vscale x 4 x i64>, i64 } [[TMP0]], 0 |
| // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 4 x i64>, i64 } [[TMP0]], 1 |
| // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 |
| // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP1]] |
| // |
| vuint64m4_t test_vle64ff_v_u64m4_m(vbool16_t mask, const uint64_t *base, size_t *new_vl, size_t vl) { |
| return __riscv_vle64ff(mask, base, new_vl, vl); |
| } |
| |
| // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vle64ff_v_u64m8_m |
| // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { |
| // CHECK-RV64-NEXT: entry: |
| // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { <vscale x 8 x i64>, i64 } @llvm.riscv.vleff.mask.nxv8i64.i64(<vscale x 8 x i64> poison, ptr [[BASE]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 3) |
| // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { <vscale x 8 x i64>, i64 } [[TMP0]], 0 |
| // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 8 x i64>, i64 } [[TMP0]], 1 |
| // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 |
| // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP1]] |
| // |
| vuint64m8_t test_vle64ff_v_u64m8_m(vbool8_t mask, const uint64_t *base, size_t *new_vl, size_t vl) { |
| return __riscv_vle64ff(mask, base, new_vl, vl); |
| } |
| |