summaryrefslogtreecommitdiff
path: root/test/CodeGen
diff options
context:
space:
mode:
Diffstat (limited to 'test/CodeGen')
-rw-r--r--test/CodeGen/MIR/X86/frame-info-save-restore-points.mir6
-rw-r--r--test/CodeGen/X86/GlobalISel/irtranslator-callingconv.ll110
-rw-r--r--test/CodeGen/X86/ipra-reg-usage.ll2
-rw-r--r--test/CodeGen/X86/movtopush.mir24
-rw-r--r--test/CodeGen/X86/tail-call-conditional.mir2
-rw-r--r--test/CodeGen/X86/x32-cet-intrinsics.ll106
-rw-r--r--test/CodeGen/X86/x64-cet-intrinsics.ll150
7 files changed, 328 insertions, 72 deletions
diff --git a/test/CodeGen/MIR/X86/frame-info-save-restore-points.mir b/test/CodeGen/MIR/X86/frame-info-save-restore-points.mir
index 14bb5db5a51..d9b117bd9c2 100644
--- a/test/CodeGen/MIR/X86/frame-info-save-restore-points.mir
+++ b/test/CodeGen/MIR/X86/frame-info-save-restore-points.mir
@@ -60,11 +60,11 @@ body: |
liveins: %eax
MOV32mr %stack.0.tmp, 1, _, 0, _, killed %eax
- ADJCALLSTACKDOWN64 0, 0, 0, implicit-def %rsp, implicit-def dead %eflags, implicit %rsp
+ ADJCALLSTACKDOWN64 0, 0, 0, implicit-def %rsp, implicit-def %ssp, implicit-def dead %eflags, implicit %rsp, implicit %ssp
%rsi = LEA64r %stack.0.tmp, 1, _, 0, _
%edi = MOV32r0 implicit-def dead %eflags
- CALL64pcrel32 @doSomething, csr_64, implicit %rsp, implicit %edi, implicit %rsi, implicit-def %rsp, implicit-def %eax
- ADJCALLSTACKUP64 0, 0, implicit-def %rsp, implicit-def dead %eflags, implicit %rsp
+ CALL64pcrel32 @doSomething, csr_64, implicit %rsp, implicit %ssp, implicit %edi, implicit %rsi, implicit-def %rsp, implicit-def %ssp, implicit-def %eax
+ ADJCALLSTACKUP64 0, 0, implicit-def %rsp, implicit-def %ssp, implicit-def dead %eflags, implicit %rsp, implicit %ssp
bb.3.false:
liveins: %eax
diff --git a/test/CodeGen/X86/GlobalISel/irtranslator-callingconv.ll b/test/CodeGen/X86/GlobalISel/irtranslator-callingconv.ll
index 4cc2ee566a5..0b906e7a985 100644
--- a/test/CodeGen/X86/GlobalISel/irtranslator-callingconv.ll
+++ b/test/CodeGen/X86/GlobalISel/irtranslator-callingconv.ll
@@ -403,14 +403,14 @@ declare void @trivial_callee()
define void @test_trivial_call() {
; ALL-LABEL: name: test_trivial_call
-; X32: ADJCALLSTACKDOWN32 0, 0, 0, implicit-def %esp, implicit-def %eflags, implicit %esp
+; X32: ADJCALLSTACKDOWN32 0, 0, 0, implicit-def %esp, implicit-def %eflags, implicit-def %ssp, implicit %esp, implicit %ssp
; X32-NEXT: CALLpcrel32 @trivial_callee, csr_32, implicit %esp
-; X32-NEXT: ADJCALLSTACKUP32 0, 0, implicit-def %esp, implicit-def %eflags, implicit %esp
+; X32-NEXT: ADJCALLSTACKUP32 0, 0, implicit-def %esp, implicit-def %eflags, implicit-def %ssp, implicit %esp, implicit %ssp
; X32-NEXT: RET 0
-; X64: ADJCALLSTACKDOWN64 0, 0, 0, implicit-def %rsp, implicit-def %eflags, implicit %rsp
+; X64: ADJCALLSTACKDOWN64 0, 0, 0, implicit-def %rsp, implicit-def %eflags, implicit-def %ssp, implicit %rsp, implicit %ssp
; X64-NEXT: CALL64pcrel32 @trivial_callee, csr_64, implicit %rsp
-; X64-NEXT: ADJCALLSTACKUP64 0, 0, implicit-def %rsp, implicit-def %eflags, implicit %rsp
+; X64-NEXT: ADJCALLSTACKUP64 0, 0, implicit-def %rsp, implicit-def %eflags, implicit-def %ssp, implicit %rsp, implicit %ssp
; X64-NEXT: RET 0
call void @trivial_callee()
@@ -432,7 +432,7 @@ define void @test_simple_arg(i32 %in0, i32 %in1) {
; X32-NEXT: %0:_(s32) = G_LOAD %2(p0) :: (invariant load 4 from %fixed-stack.1, align 0)
; X32-NEXT: %3:_(p0) = G_FRAME_INDEX %fixed-stack.0
; X32-NEXT: %1:_(s32) = G_LOAD %3(p0) :: (invariant load 4 from %fixed-stack.0, align 0)
-; X32-NEXT: ADJCALLSTACKDOWN32 8, 0, 0, implicit-def %esp, implicit-def %eflags, implicit %esp
+; X32-NEXT: ADJCALLSTACKDOWN32 8, 0, 0, implicit-def %esp, implicit-def %eflags, implicit-def %ssp, implicit %esp, implicit %ssp
; X32-NEXT: %4:_(p0) = COPY %esp
; X32-NEXT: %5:_(s32) = G_CONSTANT i32 0
; X32-NEXT: %6:_(p0) = G_GEP %4, %5(s32)
@@ -442,16 +442,16 @@ define void @test_simple_arg(i32 %in0, i32 %in1) {
; X32-NEXT: %9:_(p0) = G_GEP %7, %8(s32)
; X32-NEXT: G_STORE %0(s32), %9(p0) :: (store 4 into stack + 4, align 0)
; X32-NEXT: CALLpcrel32 @simple_arg_callee, csr_32, implicit %esp
-; X32-NEXT: ADJCALLSTACKUP32 8, 0, implicit-def %esp, implicit-def %eflags, implicit %esp
+; X32-NEXT: ADJCALLSTACKUP32 8, 0, implicit-def %esp, implicit-def %eflags, implicit-def %ssp, implicit %esp, implicit %ssp
; X32-NEXT: RET 0
; X64: %0:_(s32) = COPY %edi
; X64-NEXT: %1:_(s32) = COPY %esi
-; X64-NEXT: ADJCALLSTACKDOWN64 0, 0, 0, implicit-def %rsp, implicit-def %eflags, implicit %rsp
+; X64-NEXT: ADJCALLSTACKDOWN64 0, 0, 0, implicit-def %rsp, implicit-def %eflags, implicit-def %ssp, implicit %rsp, implicit %ssp
; X64-NEXT: %edi = COPY %1(s32)
; X64-NEXT: %esi = COPY %0(s32)
-; X64-NEXT: CALL64pcrel32 @simple_arg_callee, csr_64, implicit %rsp, implicit %edi, implicit %esi
-; X64-NEXT: ADJCALLSTACKUP64 0, 0, implicit-def %rsp, implicit-def %eflags, implicit %rsp
+; X64-NEXT: CALL64pcrel32 @simple_arg_callee, csr_64, implicit %rsp, implicit %ssp, implicit %edi, implicit %esi
+; X64-NEXT: ADJCALLSTACKUP64 0, 0, implicit-def %rsp, implicit-def %eflags, implicit-def %ssp, implicit %rsp, implicit %ssp
; X64-NEXT: RET 0
call void @simple_arg_callee(i32 %in1, i32 %in0)
@@ -469,7 +469,7 @@ define void @test_simple_arg8_call(i32 %in0) {
; X32-NEXT: bb.1 (%ir-block.0):
; X32-NEXT: %1:_(p0) = G_FRAME_INDEX %fixed-stack.0
; X32-NEXT: %0:_(s32) = G_LOAD %1(p0) :: (invariant load 4 from %fixed-stack.0, align 0)
-; X32-NEXT: ADJCALLSTACKDOWN32 32, 0, 0, implicit-def %esp, implicit-def %eflags, implicit %esp
+; X32-NEXT: ADJCALLSTACKDOWN32 32, 0, 0, implicit-def %esp, implicit-def %eflags, implicit-def %ssp, implicit %esp, implicit %ssp
; X32-NEXT: %2:_(p0) = COPY %esp
; X32-NEXT: %3:_(s32) = G_CONSTANT i32 0
; X32-NEXT: %4:_(p0) = G_GEP %2, %3(s32)
@@ -503,11 +503,11 @@ define void @test_simple_arg8_call(i32 %in0) {
; X32-NEXT: %25:_(p0) = G_GEP %23, %24(s32)
; X32-NEXT: G_STORE %0(s32), %25(p0) :: (store 4 into stack + 28, align 0)
; X32-NEXT: CALLpcrel32 @simple_arg8_callee, csr_32, implicit %esp
-; X32-NEXT: ADJCALLSTACKUP32 32, 0, implicit-def %esp, implicit-def %eflags, implicit %esp
+; X32-NEXT: ADJCALLSTACKUP32 32, 0, implicit-def %esp, implicit-def %eflags, implicit-def %ssp, implicit %esp, implicit %ssp
; X32-NEXT: RET 0
; X64: %0:_(s32) = COPY %edi
-; X64-NEXT: ADJCALLSTACKDOWN64 16, 0, 0, implicit-def %rsp, implicit-def %eflags, implicit %rsp
+; X64-NEXT: ADJCALLSTACKDOWN64 16, 0, 0, implicit-def %rsp, implicit-def %eflags, implicit-def %ssp, implicit %rsp, implicit %ssp
; X64-NEXT: %edi = COPY %0(s32)
; X64-NEXT: %esi = COPY %0(s32)
; X64-NEXT: %edx = COPY %0(s32)
@@ -522,8 +522,8 @@ define void @test_simple_arg8_call(i32 %in0) {
; X64-NEXT: %5:_(s64) = G_CONSTANT i64 8
; X64-NEXT: %6:_(p0) = G_GEP %4, %5(s64)
; X64-NEXT: G_STORE %0(s32), %6(p0) :: (store 4 into stack + 8, align 0)
-; X64-NEXT: CALL64pcrel32 @simple_arg8_callee, csr_64, implicit %rsp, implicit %edi, implicit %esi, implicit %edx, implicit %ecx, implicit %r8d, implicit %r9d
-; X64-NEXT: ADJCALLSTACKUP64 16, 0, implicit-def %rsp, implicit-def %eflags, implicit %rsp
+; X64-NEXT: CALL64pcrel32 @simple_arg8_callee, csr_64, implicit %rsp, implicit %ssp, implicit %edi, implicit %esi, implicit %edx, implicit %ecx, implicit %r8d, implicit %r9d
+; X64-NEXT: ADJCALLSTACKUP64 16, 0, implicit-def %rsp, implicit-def %eflags, implicit-def %ssp, implicit %rsp, implicit %ssp
; X64-NEXT: RET 0
call void @simple_arg8_callee(i32 %in0, i32 %in0, i32 %in0, i32 %in0,i32 %in0, i32 %in0, i32 %in0, i32 %in0)
@@ -535,24 +535,24 @@ define i32 @test_simple_return_callee() {
; ALL-LABEL: name: test_simple_return_callee
; X32: %1:_(s32) = G_CONSTANT i32 5
-; X32-NEXT: ADJCALLSTACKDOWN32 4, 0, 0, implicit-def %esp, implicit-def %eflags, implicit %esp
+; X32-NEXT: ADJCALLSTACKDOWN32 4, 0, 0, implicit-def %esp, implicit-def %eflags, implicit-def %ssp, implicit %esp, implicit %ssp
; X32-NEXT: %2:_(p0) = COPY %esp
; X32-NEXT: %3:_(s32) = G_CONSTANT i32 0
; X32-NEXT: %4:_(p0) = G_GEP %2, %3(s32)
; X32-NEXT: G_STORE %1(s32), %4(p0) :: (store 4 into stack, align 0)
-; X32-NEXT: CALLpcrel32 @simple_return_callee, csr_32, implicit %esp, implicit-def %eax
+; X32-NEXT: CALLpcrel32 @simple_return_callee, csr_32, implicit %esp, implicit %ssp, implicit-def %eax
; X32-NEXT: %0:_(s32) = COPY %eax
-; X32-NEXT: ADJCALLSTACKUP32 4, 0, implicit-def %esp, implicit-def %eflags, implicit %esp
+; X32-NEXT: ADJCALLSTACKUP32 4, 0, implicit-def %esp, implicit-def %eflags, implicit-def %ssp, implicit %esp, implicit %ssp
; X32-NEXT: %5:_(s32) = G_ADD %0, %0
; X32-NEXT: %eax = COPY %5(s32)
; X32-NEXT: RET 0, implicit %eax
; X64: %1:_(s32) = G_CONSTANT i32 5
-; X64-NEXT: ADJCALLSTACKDOWN64 0, 0, 0, implicit-def %rsp, implicit-def %eflags, implicit %rsp
+; X64-NEXT: ADJCALLSTACKDOWN64 0, 0, 0, implicit-def %rsp, implicit-def %eflags, implicit-def %ssp, implicit %rsp, implicit %ssp
; X64-NEXT: %edi = COPY %1(s32)
-; X64-NEXT: CALL64pcrel32 @simple_return_callee, csr_64, implicit %rsp, implicit %edi, implicit-def %eax
+; X64-NEXT: CALL64pcrel32 @simple_return_callee, csr_64, implicit %rsp, implicit %ssp, implicit %edi, implicit-def %eax
; X64-NEXT: %0:_(s32) = COPY %eax
-; X64-NEXT: ADJCALLSTACKUP64 0, 0, implicit-def %rsp, implicit-def %eflags, implicit %rsp
+; X64-NEXT: ADJCALLSTACKUP64 0, 0, implicit-def %rsp, implicit-def %eflags, implicit-def %ssp, implicit %rsp, implicit %ssp
; X64-NEXT: %2:_(s32) = G_ADD %0, %0
; X64-NEXT: %eax = COPY %2(s32)
; X64-NEXT: RET 0, implicit %eax
@@ -576,15 +576,15 @@ define <8 x i32> @test_split_return_callee(<8 x i32> %arg1, <8 x i32> %arg2) {
; X32-NEXT: %5:_(<4 x s32>) = G_LOAD %6(p0) :: (invariant load 16 from %fixed-stack.0, align 0)
; X32-NEXT: %0:_(<8 x s32>) = G_MERGE_VALUES %2(<4 x s32>), %3(<4 x s32>)
; X32-NEXT: %1:_(<8 x s32>) = G_MERGE_VALUES %4(<4 x s32>), %5(<4 x s32>)
-; X32-NEXT: ADJCALLSTACKDOWN32 0, 0, 0, implicit-def %esp, implicit-def %eflags, implicit %esp
+; X32-NEXT: ADJCALLSTACKDOWN32 0, 0, 0, implicit-def %esp, implicit-def %eflags, implicit-def %ssp, implicit %esp, implicit %ssp
; X32-NEXT: %8:_(<4 x s32>), %9:_(<4 x s32>) = G_UNMERGE_VALUES %1(<8 x s32>)
; X32-NEXT: %xmm0 = COPY %8(<4 x s32>)
; X32-NEXT: %xmm1 = COPY %9(<4 x s32>)
-; X32-NEXT: CALLpcrel32 @split_return_callee, csr_32, implicit %esp, implicit %xmm0, implicit %xmm1, implicit-def %xmm0, implicit-def %xmm1
+; X32-NEXT: CALLpcrel32 @split_return_callee, csr_32, implicit %esp, implicit %ssp, implicit %xmm0, implicit %xmm1, implicit-def %xmm0, implicit-def %xmm1
; X32-NEXT: %10:_(<4 x s32>) = COPY %xmm0
; X32-NEXT: %11:_(<4 x s32>) = COPY %xmm1
; X32-NEXT: %7:_(<8 x s32>) = G_MERGE_VALUES %10(<4 x s32>), %11(<4 x s32>)
-; X32-NEXT: ADJCALLSTACKUP32 0, 0, implicit-def %esp, implicit-def %eflags, implicit %esp
+; X32-NEXT: ADJCALLSTACKUP32 0, 0, implicit-def %esp, implicit-def %eflags, implicit-def %ssp, implicit %esp, implicit %ssp
; X32-NEXT: %12:_(<8 x s32>) = G_ADD %0, %7
; X32-NEXT: %13:_(<4 x s32>), %14:_(<4 x s32>) = G_UNMERGE_VALUES %12(<8 x s32>)
; X32-NEXT: %xmm0 = COPY %13(<4 x s32>)
@@ -597,15 +597,15 @@ define <8 x i32> @test_split_return_callee(<8 x i32> %arg1, <8 x i32> %arg2) {
; X64-NEXT: %5:_(<4 x s32>) = COPY %xmm3
; X64-NEXT: %0:_(<8 x s32>) = G_MERGE_VALUES %2(<4 x s32>), %3(<4 x s32>)
; X64-NEXT: %1:_(<8 x s32>) = G_MERGE_VALUES %4(<4 x s32>), %5(<4 x s32>)
-; X64-NEXT: ADJCALLSTACKDOWN64 0, 0, 0, implicit-def %rsp, implicit-def %eflags, implicit %rsp
+; X64-NEXT: ADJCALLSTACKDOWN64 0, 0, 0, implicit-def %rsp, implicit-def %eflags, implicit-def %ssp, implicit %rsp, implicit %ssp
; X64-NEXT: %7:_(<4 x s32>), %8:_(<4 x s32>) = G_UNMERGE_VALUES %1(<8 x s32>)
; X64-NEXT: %xmm0 = COPY %7(<4 x s32>)
; X64-NEXT: %xmm1 = COPY %8(<4 x s32>)
-; X64-NEXT: CALL64pcrel32 @split_return_callee, csr_64, implicit %rsp, implicit %xmm0, implicit %xmm1, implicit-def %xmm0, implicit-def %xmm1
+; X64-NEXT: CALL64pcrel32 @split_return_callee, csr_64, implicit %rsp, implicit %ssp, implicit %xmm0, implicit %xmm1, implicit-def %xmm0, implicit-def %xmm1
; X64-NEXT: %9:_(<4 x s32>) = COPY %xmm0
; X64-NEXT: %10:_(<4 x s32>) = COPY %xmm1
; X64-NEXT: %6:_(<8 x s32>) = G_MERGE_VALUES %9(<4 x s32>), %10(<4 x s32>)
-; X64-NEXT: ADJCALLSTACKUP64 0, 0, implicit-def %rsp, implicit-def %eflags, implicit %rsp
+; X64-NEXT: ADJCALLSTACKUP64 0, 0, implicit-def %rsp, implicit-def %eflags, implicit-def %ssp, implicit %rsp, implicit %ssp
; X64-NEXT: %11:_(<8 x s32>) = G_ADD %0, %6
; X64-NEXT: %12:_(<4 x s32>), %13:_(<4 x s32>) = G_UNMERGE_VALUES %11(<8 x s32>)
; X64-NEXT: %xmm0 = COPY %12(<4 x s32>)
@@ -625,17 +625,17 @@ define void @test_indirect_call(void()* %func) {
; X32-NEXT: - { id: 1, class: _, preferred-register: '' }
; X32: %1:_(p0) = G_FRAME_INDEX %fixed-stack.0
; X32-NEXT: %0:gr32(p0) = G_LOAD %1(p0) :: (invariant load 4 from %fixed-stack.0, align 0)
-; X32-NEXT: ADJCALLSTACKDOWN32 0, 0, 0, implicit-def %esp, implicit-def %eflags, implicit %esp
+; X32-NEXT: ADJCALLSTACKDOWN32 0, 0, 0, implicit-def %esp, implicit-def %eflags, implicit-def %ssp, implicit %esp, implicit %ssp
; X32-NEXT: CALL32r %0(p0), csr_32, implicit %esp
-; X32-NEXT: ADJCALLSTACKUP32 0, 0, implicit-def %esp, implicit-def %eflags, implicit %esp
+; X32-NEXT: ADJCALLSTACKUP32 0, 0, implicit-def %esp, implicit-def %eflags, implicit-def %ssp, implicit %esp, implicit %ssp
; X32-NEXT: RET 0
; X64: registers:
; X64-NEXT: - { id: 0, class: gr64, preferred-register: '' }
; X64: %0:gr64(p0) = COPY %rdi
-; X64-NEXT: ADJCALLSTACKDOWN64 0, 0, 0, implicit-def %rsp, implicit-def %eflags, implicit %rsp
+; X64-NEXT: ADJCALLSTACKDOWN64 0, 0, 0, implicit-def %rsp, implicit-def %eflags, implicit-def %ssp, implicit %rsp, implicit %ssp
; X64-NEXT: CALL64r %0(p0), csr_64, implicit %rsp
-; X64-NEXT: ADJCALLSTACKUP64 0, 0, implicit-def %rsp, implicit-def %eflags, implicit %rsp
+; X64-NEXT: ADJCALLSTACKUP64 0, 0, implicit-def %rsp, implicit-def %eflags, implicit-def %ssp, implicit %rsp, implicit %ssp
; X64-NEXT: RET 0
call void %func()
@@ -653,49 +653,49 @@ define void @test_abi_exts_call(i8* %addr) {
; X32: %1:_(p0) = G_FRAME_INDEX %fixed-stack.0
; X32-NEXT: %0:_(p0) = G_LOAD %1(p0) :: (invariant load 4 from %fixed-stack.0, align 0)
; X32-NEXT: %2:_(s8) = G_LOAD %0(p0) :: (load 1 from %ir.addr)
-; X32-NEXT: ADJCALLSTACKDOWN32 4, 0, 0, implicit-def %esp, implicit-def %eflags, implicit %esp
+; X32-NEXT: ADJCALLSTACKDOWN32 4, 0, 0, implicit-def %esp, implicit-def %eflags, implicit-def %ssp, implicit %esp, implicit %ssp
; X32-NEXT: %3:_(p0) = COPY %esp
; X32-NEXT: %4:_(s32) = G_CONSTANT i32 0
; X32-NEXT: %5:_(p0) = G_GEP %3, %4(s32)
; X32-NEXT: %6:_(s32) = G_ANYEXT %2(s8)
; X32-NEXT: G_STORE %6(s32), %5(p0) :: (store 4 into stack, align 0)
; X32-NEXT: CALLpcrel32 @take_char, csr_32, implicit %esp
-; X32-NEXT: ADJCALLSTACKUP32 4, 0, implicit-def %esp, implicit-def %eflags, implicit %esp
-; X32-NEXT: ADJCALLSTACKDOWN32 4, 0, 0, implicit-def %esp, implicit-def %eflags, implicit %esp
+; X32-NEXT: ADJCALLSTACKUP32 4, 0, implicit-def %esp, implicit-def %eflags, implicit-def %ssp, implicit %esp, implicit %ssp
+; X32-NEXT: ADJCALLSTACKDOWN32 4, 0, 0, implicit-def %esp, implicit-def %eflags, implicit-def %ssp, implicit %esp, implicit %ssp
; X32-NEXT: %7:_(p0) = COPY %esp
; X32-NEXT: %8:_(s32) = G_CONSTANT i32 0
; X32-NEXT: %9:_(p0) = G_GEP %7, %8(s32)
; X32-NEXT: %10:_(s32) = G_SEXT %2(s8)
; X32-NEXT: G_STORE %10(s32), %9(p0) :: (store 4 into stack, align 0)
; X32-NEXT: CALLpcrel32 @take_char, csr_32, implicit %esp
-; X32-NEXT: ADJCALLSTACKUP32 4, 0, implicit-def %esp, implicit-def %eflags, implicit %esp
-; X32-NEXT: ADJCALLSTACKDOWN32 4, 0, 0, implicit-def %esp, implicit-def %eflags, implicit %esp
+; X32-NEXT: ADJCALLSTACKUP32 4, 0, implicit-def %esp, implicit-def %eflags, implicit-def %ssp, implicit %esp, implicit %ssp
+; X32-NEXT: ADJCALLSTACKDOWN32 4, 0, 0, implicit-def %esp, implicit-def %eflags, implicit-def %ssp, implicit %esp, implicit %ssp
; X32-NEXT: %11:_(p0) = COPY %esp
; X32-NEXT: %12:_(s32) = G_CONSTANT i32 0
; X32-NEXT: %13:_(p0) = G_GEP %11, %12(s32)
; X32-NEXT: %14:_(s32) = G_ZEXT %2(s8)
; X32-NEXT: G_STORE %14(s32), %13(p0) :: (store 4 into stack, align 0)
; X32-NEXT: CALLpcrel32 @take_char, csr_32, implicit %esp
-; X32-NEXT: ADJCALLSTACKUP32 4, 0, implicit-def %esp, implicit-def %eflags, implicit %esp
+; X32-NEXT: ADJCALLSTACKUP32 4, 0, implicit-def %esp, implicit-def %eflags, implicit-def %ssp, implicit %esp, implicit %ssp
; X32-NEXT: RET 0
; X64: %0:_(p0) = COPY %rdi
; X64-NEXT: %1:_(s8) = G_LOAD %0(p0) :: (load 1 from %ir.addr)
-; X64-NEXT: ADJCALLSTACKDOWN64 0, 0, 0, implicit-def %rsp, implicit-def %eflags, implicit %rsp
+; X64-NEXT: ADJCALLSTACKDOWN64 0, 0, 0, implicit-def %rsp, implicit-def %eflags, implicit-def %ssp, implicit %rsp, implicit %ssp
; X64-NEXT: %2:_(s32) = G_ANYEXT %1(s8)
; X64-NEXT: %edi = COPY %2(s32)
-; X64-NEXT: CALL64pcrel32 @take_char, csr_64, implicit %rsp, implicit %edi
-; X64-NEXT: ADJCALLSTACKUP64 0, 0, implicit-def %rsp, implicit-def %eflags, implicit %rsp
-; X64-NEXT: ADJCALLSTACKDOWN64 0, 0, 0, implicit-def %rsp, implicit-def %eflags, implicit %rsp
+; X64-NEXT: CALL64pcrel32 @take_char, csr_64, implicit %rsp, implicit %ssp, implicit %edi
+; X64-NEXT: ADJCALLSTACKUP64 0, 0, implicit-def %rsp, implicit-def %eflags, implicit-def %ssp, implicit %rsp, implicit %ssp
+; X64-NEXT: ADJCALLSTACKDOWN64 0, 0, 0, implicit-def %rsp, implicit-def %eflags, implicit-def %ssp, implicit %rsp, implicit %ssp
; X64-NEXT: %3:_(s32) = G_SEXT %1(s8)
; X64-NEXT: %edi = COPY %3(s32)
-; X64-NEXT: CALL64pcrel32 @take_char, csr_64, implicit %rsp, implicit %edi
-; X64-NEXT: ADJCALLSTACKUP64 0, 0, implicit-def %rsp, implicit-def %eflags, implicit %rsp
-; X64-NEXT: ADJCALLSTACKDOWN64 0, 0, 0, implicit-def %rsp, implicit-def %eflags, implicit %rsp
+; X64-NEXT: CALL64pcrel32 @take_char, csr_64, implicit %rsp, implicit %ssp, implicit %edi
+; X64-NEXT: ADJCALLSTACKUP64 0, 0, implicit-def %rsp, implicit-def %eflags, implicit-def %ssp, implicit %rsp, implicit %ssp
+; X64-NEXT: ADJCALLSTACKDOWN64 0, 0, 0, implicit-def %rsp, implicit-def %eflags, implicit-def %ssp, implicit %rsp, implicit %ssp
; X64-NEXT: %4:_(s32) = G_ZEXT %1(s8)
; X64-NEXT: %edi = COPY %4(s32)
-; X64-NEXT: CALL64pcrel32 @take_char, csr_64, implicit %rsp, implicit %edi
-; X64-NEXT: ADJCALLSTACKUP64 0, 0, implicit-def %rsp, implicit-def %eflags, implicit %rsp
+; X64-NEXT: CALL64pcrel32 @take_char, csr_64, implicit %rsp, implicit %ssp, implicit %edi
+; X64-NEXT: ADJCALLSTACKUP64 0, 0, implicit-def %rsp, implicit-def %eflags, implicit-def %ssp, implicit %rsp, implicit %ssp
; X64-NEXT: RET 0
%val = load i8, i8* %addr
@@ -720,7 +720,7 @@ define void @test_variadic_call_1(i8** %addr_ptr, i32* %val_ptr) {
; X32-NEXT: %1:_(p0) = G_LOAD %3(p0) :: (invariant load 4 from %fixed-stack.0, align 0)
; X32-NEXT: %4:_(p0) = G_LOAD %0(p0) :: (load 4 from %ir.addr_ptr)
; X32-NEXT: %5:_(s32) = G_LOAD %1(p0) :: (load 4 from %ir.val_ptr)
-; X32-NEXT: ADJCALLSTACKDOWN32 8, 0, 0, implicit-def %esp, implicit-def %eflags, implicit %esp
+; X32-NEXT: ADJCALLSTACKDOWN32 8, 0, 0, implicit-def %esp, implicit-def %eflags, implicit-def %ssp, implicit %esp, implicit %ssp
; X32-NEXT: %6:_(p0) = COPY %esp
; X32-NEXT: %7:_(s32) = G_CONSTANT i32 0
; X32-NEXT: %8:_(p0) = G_GEP %6, %7(s32)
@@ -730,19 +730,19 @@ define void @test_variadic_call_1(i8** %addr_ptr, i32* %val_ptr) {
; X32-NEXT: %11:_(p0) = G_GEP %9, %10(s32)
; X32-NEXT: G_STORE %5(s32), %11(p0) :: (store 4 into stack + 4, align 0)
; X32-NEXT: CALLpcrel32 @variadic_callee, csr_32, implicit %esp
-; X32-NEXT: ADJCALLSTACKUP32 8, 0, implicit-def %esp, implicit-def %eflags, implicit %esp
+; X32-NEXT: ADJCALLSTACKUP32 8, 0, implicit-def %esp, implicit-def %eflags, implicit-def %ssp, implicit %esp, implicit %ssp
; X32-NEXT: RET 0
; X64: %0:_(p0) = COPY %rdi
; X64-NEXT: %1:_(p0) = COPY %rsi
; X64-NEXT: %2:_(p0) = G_LOAD %0(p0) :: (load 8 from %ir.addr_ptr)
; X64-NEXT: %3:_(s32) = G_LOAD %1(p0) :: (load 4 from %ir.val_ptr)
-; X64-NEXT: ADJCALLSTACKDOWN64 0, 0, 0, implicit-def %rsp, implicit-def %eflags, implicit %rsp
+; X64-NEXT: ADJCALLSTACKDOWN64 0, 0, 0, implicit-def %rsp, implicit-def %eflags, implicit-def %ssp, implicit %rsp, implicit %ssp
; X64-NEXT: %rdi = COPY %2(p0)
; X64-NEXT: %esi = COPY %3(s32)
; X64-NEXT: %al = MOV8ri 0
-; X64-NEXT: CALL64pcrel32 @variadic_callee, csr_64, implicit %rsp, implicit %rdi, implicit %esi, implicit %al
-; X64-NEXT: ADJCALLSTACKUP64 0, 0, implicit-def %rsp, implicit-def %eflags, implicit %rsp
+; X64-NEXT: CALL64pcrel32 @variadic_callee, csr_64, implicit %rsp, implicit %ssp, implicit %rdi, implicit %esi, implicit %al
+; X64-NEXT: ADJCALLSTACKUP64 0, 0, implicit-def %rsp, implicit-def %eflags, implicit-def %ssp, implicit %rsp, implicit %ssp
; X64-NEXT: RET 0
%addr = load i8*, i8** %addr_ptr
@@ -765,7 +765,7 @@ define void @test_variadic_call_2(i8** %addr_ptr, double* %val_ptr) {
; X32-NEXT: %1:_(p0) = G_LOAD %3(p0) :: (invariant load 4 from %fixed-stack.0, align 0)
; X32-NEXT: %4:_(p0) = G_LOAD %0(p0) :: (load 4 from %ir.addr_ptr)
; X32-NEXT: %5:_(s64) = G_LOAD %1(p0) :: (load 8 from %ir.val_ptr, align 4)
-; X32-NEXT: ADJCALLSTACKDOWN32 12, 0, 0, implicit-def %esp, implicit-def %eflags, implicit %esp
+; X32-NEXT: ADJCALLSTACKDOWN32 12, 0, 0, implicit-def %esp, implicit-def %eflags, implicit-def %ssp, implicit %esp, implicit %ssp
; X32-NEXT: %6:_(p0) = COPY %esp
; X32-NEXT: %7:_(s32) = G_CONSTANT i32 0
; X32-NEXT: %8:_(p0) = G_GEP %6, %7(s32)
@@ -775,18 +775,18 @@ define void @test_variadic_call_2(i8** %addr_ptr, double* %val_ptr) {
; X32-NEXT: %11:_(p0) = G_GEP %9, %10(s32)
; X32-NEXT: G_STORE %5(s64), %11(p0) :: (store 8 into stack + 4, align 0)
; X32-NEXT: CALLpcrel32 @variadic_callee, csr_32, implicit %esp
-; X32-NEXT: ADJCALLSTACKUP32 12, 0, implicit-def %esp, implicit-def %eflags, implicit %esp
+; X32-NEXT: ADJCALLSTACKUP32 12, 0, implicit-def %esp, implicit-def %eflags, implicit-def %ssp, implicit %esp, implicit %ssp
; X32-NEXT: RET 0
; X64: %1:_(p0) = COPY %rsi
; X64-NEXT: %2:_(p0) = G_LOAD %0(p0) :: (load 8 from %ir.addr_ptr)
; X64-NEXT: %3:_(s64) = G_LOAD %1(p0) :: (load 8 from %ir.val_ptr)
-; X64-NEXT: ADJCALLSTACKDOWN64 0, 0, 0, implicit-def %rsp, implicit-def %eflags, implicit %rsp
+; X64-NEXT: ADJCALLSTACKDOWN64 0, 0, 0, implicit-def %rsp, implicit-def %eflags, implicit-def %ssp, implicit %rsp, implicit %ssp
; X64-NEXT: %rdi = COPY %2(p0)
; X64-NEXT: %xmm0 = COPY %3(s64)
; X64-NEXT: %al = MOV8ri 1
-; X64-NEXT: CALL64pcrel32 @variadic_callee, csr_64, implicit %rsp, implicit %rdi, implicit %xmm0, implicit %al
-; X64-NEXT: ADJCALLSTACKUP64 0, 0, implicit-def %rsp, implicit-def %eflags, implicit %rsp
+; X64-NEXT: CALL64pcrel32 @variadic_callee, csr_64, implicit %rsp, implicit %ssp, implicit %rdi, implicit %xmm0, implicit %al
+; X64-NEXT: ADJCALLSTACKUP64 0, 0, implicit-def %rsp, implicit-def %eflags, implicit-def %ssp, implicit %rsp, implicit %ssp
; X64-NEXT: RET 0
%addr = load i8*, i8** %addr_ptr
diff --git a/test/CodeGen/X86/ipra-reg-usage.ll b/test/CodeGen/X86/ipra-reg-usage.ll
index ca97472bb82..6a84ab8ab75 100644
--- a/test/CodeGen/X86/ipra-reg-usage.ll
+++ b/test/CodeGen/X86/ipra-reg-usage.ll
@@ -3,7 +3,7 @@
target triple = "x86_64-unknown-unknown"
declare void @bar1()
define preserve_allcc void @foo()#0 {
-; CHECK: foo Clobbered Registers: CS DS EFLAGS EIP EIZ ES FPSW FS GS IP RIP RIZ SS BND0 BND1 BND2 BND3 CR0 CR1 CR2 CR3 CR4 CR5 CR6 CR7 CR8 CR9 CR10 CR11 CR12 CR13 CR14 CR15 DR0 DR1 DR2 DR3 DR4 DR5 DR6 DR7 DR8 DR9 DR10 DR11 DR12 DR13 DR14 DR15 FP0 FP1 FP2 FP3 FP4 FP5 FP6 FP7 K0 K1 K2 K3 K4 K5 K6 K7 MM0 MM1 MM2 MM3 MM4 MM5 MM6 MM7 R11 ST0 ST1 ST2 ST3 ST4 ST5 ST6 ST7 XMM16 XMM17 XMM18 XMM19 XMM20 XMM21 XMM22 XMM23 XMM24 XMM25 XMM26 XMM27 XMM28 XMM29 XMM30 XMM31 YMM0 YMM1 YMM2 YMM3 YMM4 YMM5 YMM6 YMM7 YMM8 YMM9 YMM10 YMM11 YMM12 YMM13 YMM14 YMM15 YMM16 YMM17 YMM18 YMM19 YMM20 YMM21 YMM22 YMM23 YMM24 YMM25 YMM26 YMM27 YMM28 YMM29 YMM30 YMM31 ZMM0 ZMM1 ZMM2 ZMM3 ZMM4 ZMM5 ZMM6 ZMM7 ZMM8 ZMM9 ZMM10 ZMM11 ZMM12 ZMM13 ZMM14 ZMM15 ZMM16 ZMM17 ZMM18 ZMM19 ZMM20 ZMM21 ZMM22 ZMM23 ZMM24 ZMM25 ZMM26 ZMM27 ZMM28 ZMM29 ZMM30 ZMM31 R11B R11D R11W
+; CHECK: foo Clobbered Registers: CS DS EFLAGS EIP EIZ ES FPSW FS GS IP RIP RIZ SS SSP BND0 BND1 BND2 BND3 CR0 CR1 CR2 CR3 CR4 CR5 CR6 CR7 CR8 CR9 CR10 CR11 CR12 CR13 CR14 CR15 DR0 DR1 DR2 DR3 DR4 DR5 DR6 DR7 DR8 DR9 DR10 DR11 DR12 DR13 DR14 DR15 FP0 FP1 FP2 FP3 FP4 FP5 FP6 FP7 K0 K1 K2 K3 K4 K5 K6 K7 MM0 MM1 MM2 MM3 MM4 MM5 MM6 MM7 R11 ST0 ST1 ST2 ST3 ST4 ST5 ST6 ST7 XMM16 XMM17 XMM18 XMM19 XMM20 XMM21 XMM22 XMM23 XMM24 XMM25 XMM26 XMM27 XMM28 XMM29 XMM30 XMM31 YMM0 YMM1 YMM2 YMM3 YMM4 YMM5 YMM6 YMM7 YMM8 YMM9 YMM10 YMM11 YMM12 YMM13 YMM14 YMM15 YMM16 YMM17 YMM18 YMM19 YMM20 YMM21 YMM22 YMM23 YMM24 YMM25 YMM26 YMM27 YMM28 YMM29 YMM30 YMM31 ZMM0 ZMM1 ZMM2 ZMM3 ZMM4 ZMM5 ZMM6 ZMM7 ZMM8 ZMM9 ZMM10 ZMM11 ZMM12 ZMM13 ZMM14 ZMM15 ZMM16 ZMM17 ZMM18 ZMM19 ZMM20 ZMM21 ZMM22 ZMM23 ZMM24 ZMM25 ZMM26 ZMM27 ZMM28 ZMM29 ZMM30 ZMM31 R11B R11D R11W
call void @bar1()
call void @bar2()
ret void
diff --git a/test/CodeGen/X86/movtopush.mir b/test/CodeGen/X86/movtopush.mir
index 4b8fac8d411..95ba9490c31 100644
--- a/test/CodeGen/X86/movtopush.mir
+++ b/test/CodeGen/X86/movtopush.mir
@@ -33,14 +33,14 @@
...
---
# CHECK-LABEL: test9
-# CHECK: ADJCALLSTACKDOWN32 16, 0, 16, implicit-def dead %esp, implicit-def dead %eflags, implicit %esp
+# CHECK: ADJCALLSTACKDOWN32 16, 0, 16, implicit-def dead %esp, implicit-def dead %eflags, implicit-def dead %ssp, implicit %esp, implicit %ssp
# CHECK-NEXT: PUSH32i8 4, implicit-def %esp, implicit %esp
# CHECK-NEXT: PUSH32i8 3, implicit-def %esp, implicit %esp
# CHECK-NEXT: PUSH32i8 2, implicit-def %esp, implicit %esp
# CHECK-NEXT: PUSH32i8 1, implicit-def %esp, implicit %esp
-# CHECK-NEXT: CALLpcrel32 @good, csr_32, implicit %esp, implicit-def %esp
-# CHECK-NEXT: ADJCALLSTACKUP32 16, 0, implicit-def dead %esp, implicit-def dead %eflags, implicit %esp
-# CHECK-NEXT: ADJCALLSTACKDOWN32 20, 0, 20, implicit-def dead %esp, implicit-def dead %eflags, implicit %esp
+# CHECK-NEXT: CALLpcrel32 @good, csr_32, implicit %esp, implicit %ssp, implicit-def %esp, implicit-def %ssp
+# CHECK-NEXT: ADJCALLSTACKUP32 16, 0, implicit-def dead %esp, implicit-def dead %eflags, implicit-def dead %ssp, implicit %esp, implicit %ssp
+# CHECK-NEXT: ADJCALLSTACKDOWN32 20, 0, 20, implicit-def dead %esp, implicit-def dead %eflags, implicit-def dead %ssp, implicit %esp, implicit %ssp
# CHECK-NEXT: %1:gr32 = MOV32rm %stack.2.s, 1, _, 0, _ :: (load 4 from %stack.2.s, align 8)
# CHECK-NEXT: %2:gr32 = MOV32rm %stack.2.s, 1, _, 4, _ :: (load 4 from %stack.2.s + 4)
# CHECK-NEXT: %4:gr32 = LEA32r %stack.0.p, 1, _, 0, _
@@ -50,8 +50,8 @@
# CHECK-NEXT: PUSH32i8 6, implicit-def %esp, implicit %esp
# CHECK-NEXT: PUSH32r %2, implicit-def %esp, implicit %esp
# CHECK-NEXT: PUSH32r %1, implicit-def %esp, implicit %esp
-# CHECK-NEXT: CALLpcrel32 @struct, csr_32, implicit %esp, implicit-def %esp
-# CHECK-NEXT: ADJCALLSTACKUP32 20, 0, implicit-def dead %esp, implicit-def dead %eflags, implicit %esp
+# CHECK-NEXT: CALLpcrel32 @struct, csr_32, implicit %esp, implicit %ssp, implicit-def %esp, implicit-def %ssp
+# CHECK-NEXT: ADJCALLSTACKUP32 20, 0, implicit-def dead %esp, implicit-def dead %eflags, implicit-def dead %ssp, implicit %esp, implicit %ssp
# CHECK-NEXT: RET 0
name: test9
alignment: 0
@@ -99,15 +99,15 @@ stack:
constants:
body: |
bb.0.entry:
- ADJCALLSTACKDOWN32 16, 0, 0, implicit-def dead %esp, implicit-def dead %eflags, implicit %esp
+ ADJCALLSTACKDOWN32 16, 0, 0, implicit-def dead %esp, implicit-def dead %eflags, implicit-def dead %ssp, implicit %esp, implicit %ssp
%0 = COPY %esp
MOV32mi %0, 1, _, 12, _, 4 :: (store 4 into stack + 12)
MOV32mi %0, 1, _, 8, _, 3 :: (store 4 into stack + 8)
MOV32mi %0, 1, _, 4, _, 2 :: (store 4 into stack + 4)
MOV32mi %0, 1, _, 0, _, 1 :: (store 4 into stack)
- CALLpcrel32 @good, csr_32, implicit %esp, implicit-def %esp
- ADJCALLSTACKUP32 16, 0, implicit-def dead %esp, implicit-def dead %eflags, implicit %esp
- ADJCALLSTACKDOWN32 20, 0, 0, implicit-def dead %esp, implicit-def dead %eflags, implicit %esp
+ CALLpcrel32 @good, csr_32, implicit %esp, implicit %ssp, implicit-def %esp, implicit-def %ssp
+ ADJCALLSTACKUP32 16, 0, implicit-def dead %esp, implicit-def dead %eflags, implicit-def dead %ssp, implicit %esp, implicit %ssp
+ ADJCALLSTACKDOWN32 20, 0, 0, implicit-def dead %esp, implicit-def dead %eflags, implicit-def dead %ssp, implicit %esp, implicit %ssp
%1 = MOV32rm %stack.2.s, 1, _, 0, _ :: (load 4 from %stack.2.s, align 8)
%2 = MOV32rm %stack.2.s, 1, _, 4, _ :: (load 4 from %stack.2.s + 4)
%3 = COPY %esp
@@ -118,8 +118,8 @@ body: |
%5 = LEA32r %stack.1.q, 1, _, 0, _
MOV32mr %3, 1, _, 12, _, killed %5 :: (store 4 into stack + 12)
MOV32mi %3, 1, _, 8, _, 6 :: (store 4 into stack + 8)
- CALLpcrel32 @struct, csr_32, implicit %esp, implicit-def %esp
- ADJCALLSTACKUP32 20, 0, implicit-def dead %esp, implicit-def dead %eflags, implicit %esp
+ CALLpcrel32 @struct, csr_32, implicit %esp, implicit %ssp, implicit-def %esp, implicit-def %ssp,
+ ADJCALLSTACKUP32 20, 0, implicit-def dead %esp, implicit-def dead %eflags, implicit-def dead %ssp, implicit %esp, implicit %ssp
RET 0
...
diff --git a/test/CodeGen/X86/tail-call-conditional.mir b/test/CodeGen/X86/tail-call-conditional.mir
index e006138ba84..300b2734f52 100644
--- a/test/CodeGen/X86/tail-call-conditional.mir
+++ b/test/CodeGen/X86/tail-call-conditional.mir
@@ -48,7 +48,7 @@ body: |
; CHECK-NEXT: %rdi = COPY %rsi
; CHECK-NEXT: %rsi = COPY %rax
; CHECK-NEXT: CMP64ri8 %rax, 9, implicit-def %eflags
- ; CHECK-NEXT: TCRETURNdi64cc @f1, 0, 3, csr_64, implicit %rsp, implicit %eflags, implicit %rsp, implicit %rdi, implicit %rsi, implicit %rax, implicit-def %rax, implicit %sil, implicit-def %sil, implicit %si, implicit-def %si, implicit %esi, implicit-def %esi, implicit %rsi, implicit-def %rsi, implicit %dil, implicit-def %dil, implicit %di, implicit-def %di, implicit %edi, implicit-def %edi, implicit %rdi, implicit-def %rdi, implicit %ah, implicit-def %ah, implicit %al, implicit-def %al, implicit %ax, implicit-def %ax, implicit %eax, implicit-def %eax
+ ; CHECK-NEXT: TCRETURNdi64cc @f1, 0, 3, csr_64, implicit %rsp, implicit %eflags, implicit %ssp, implicit %rsp, implicit %rdi, implicit %rsi, implicit %rax, implicit-def %rax, implicit %sil, implicit-def %sil, implicit %si, implicit-def %si, implicit %esi, implicit-def %esi, implicit %rsi, implicit-def %rsi, implicit %dil, implicit-def %dil, implicit %di, implicit-def %di, implicit %edi, implicit-def %edi, implicit %rdi, implicit-def %rdi, implicit %ah, implicit-def %ah, implicit %al, implicit-def %al, implicit %ax, implicit-def %ax, implicit %eax, implicit-def %eax
bb.1:
successors: %bb.2, %bb.3
diff --git a/test/CodeGen/X86/x32-cet-intrinsics.ll b/test/CodeGen/X86/x32-cet-intrinsics.ll
new file mode 100644
index 00000000000..4d45014d18f
--- /dev/null
+++ b/test/CodeGen/X86/x32-cet-intrinsics.ll
@@ -0,0 +1,106 @@
+; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
+; RUN: llc < %s -mtriple=i386-apple-darwin -mattr=+shstk -mattr=+ibt | FileCheck %s
+
+define void @test_incsspd(i32 %a) local_unnamed_addr {
+; CHECK-LABEL: test_incsspd:
+; CHECK: ## BB#0: ## %entry
+; CHECK-NEXT: movl {{[0-9]+}}(%esp), %eax
+; CHECK-NEXT: incsspd %eax
+; CHECK-NEXT: retl
+entry:
+ tail call void @llvm.x86.incsspd(i32 %a)
+ ret void
+}
+
+declare void @llvm.x86.incsspd(i32)
+
+define i32 @test_rdsspd(i32 %a) {
+; CHECK-LABEL: test_rdsspd:
+; CHECK: ## BB#0: ## %entry
+; CHECK-NEXT: movl {{[0-9]+}}(%esp), %eax
+; CHECK-NEXT: rdsspd %eax
+; CHECK-NEXT: retl
+entry:
+ %0 = call i32 @llvm.x86.rdsspd(i32 %a)
+ ret i32 %0
+}
+
+declare i32 @llvm.x86.rdsspd(i32)
+
+define void @test_saveprevssp() {
+; CHECK-LABEL: test_saveprevssp:
+; CHECK: ## BB#0: ## %entry
+; CHECK-NEXT: saveprevssp
+; CHECK-NEXT: retl
+entry:
+ tail call void @llvm.x86.saveprevssp()
+ ret void
+}
+
+declare void @llvm.x86.saveprevssp()
+
+define void @test_rstorssp(i8* %__p) {
+; CHECK-LABEL: test_rstorssp:
+; CHECK: ## BB#0: ## %entry
+; CHECK-NEXT: movl {{[0-9]+}}(%esp), %eax
+; CHECK-NEXT: rstorssp (%eax)
+; CHECK-NEXT: retl
+entry:
+ tail call void @llvm.x86.rstorssp(i8* %__p)
+ ret void
+}
+
+declare void @llvm.x86.rstorssp(i8*)
+
+define void @test_wrssd(i32 %a, i8* %__p) {
+; CHECK-LABEL: test_wrssd:
+; CHECK: ## BB#0: ## %entry
+; CHECK-NEXT: movl {{[0-9]+}}(%esp), %eax
+; CHECK-NEXT: movl {{[0-9]+}}(%esp), %ecx
+; CHECK-NEXT: wrssd %eax, (%ecx)
+; CHECK-NEXT: retl
+entry:
+ tail call void @llvm.x86.wrssd(i32 %a, i8* %__p)
+ ret void
+}
+
+declare void @llvm.x86.wrssd(i32, i8*)
+
+define void @test_wrussd(i32 %a, i8* %__p) {
+; CHECK-LABEL: test_wrussd:
+; CHECK: ## BB#0: ## %entry
+; CHECK-NEXT: movl {{[0-9]+}}(%esp), %eax
+; CHECK-NEXT: movl {{[0-9]+}}(%esp), %ecx
+; CHECK-NEXT: wrussd %eax, (%ecx)
+; CHECK-NEXT: retl
+entry:
+ tail call void @llvm.x86.wrussd(i32 %a, i8* %__p)
+ ret void
+}
+
+declare void @llvm.x86.wrussd(i32, i8*)
+
+define void @test_setssbsy() {
+; CHECK-LABEL: test_setssbsy:
+; CHECK: ## BB#0: ## %entry
+; CHECK-NEXT: setssbsy
+; CHECK-NEXT: retl
+entry:
+ tail call void @llvm.x86.setssbsy()
+ ret void
+}
+
+declare void @llvm.x86.setssbsy()
+
+define void @test_clrssbsy(i8* %__p) {
+; CHECK-LABEL: test_clrssbsy:
+; CHECK: ## BB#0: ## %entry
+; CHECK-NEXT: movl {{[0-9]+}}(%esp), %eax
+; CHECK-NEXT: clrssbsy (%eax)
+; CHECK-NEXT: retl
+entry:
+ tail call void @llvm.x86.clrssbsy(i8* %__p)
+ ret void
+}
+
+declare void @llvm.x86.clrssbsy(i8* %__p)
diff --git a/test/CodeGen/X86/x64-cet-intrinsics.ll b/test/CodeGen/X86/x64-cet-intrinsics.ll
new file mode 100644
index 00000000000..f9cba0056db
--- /dev/null
+++ b/test/CodeGen/X86/x64-cet-intrinsics.ll
@@ -0,0 +1,150 @@
+; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
+; RUN: llc < %s -mtriple=x86_64-apple-darwin -mattr=+shstk -mattr=+ibt | FileCheck %s
+
+define void @test_incsspd(i32 %a) local_unnamed_addr {
+; CHECK-LABEL: test_incsspd:
+; CHECK: ## BB#0: ## %entry
+; CHECK-NEXT: incsspd %edi
+; CHECK-NEXT: retq
+entry:
+ tail call void @llvm.x86.incsspd(i32 %a)
+ ret void
+}
+
+declare void @llvm.x86.incsspd(i32)
+
+define void @test_incsspq(i32 %a) local_unnamed_addr {
+; CHECK-LABEL: test_incsspq:
+; CHECK: ## BB#0: ## %entry
+; CHECK-NEXT: movslq %edi, %rax
+; CHECK-NEXT: incsspq %rax
+; CHECK-NEXT: retq
+entry:
+ %conv.i = sext i32 %a to i64
+ tail call void @llvm.x86.incsspq(i64 %conv.i)
+ ret void
+}
+
+declare void @llvm.x86.incsspq(i64)
+
+define i32 @test_rdsspd(i32 %a) {
+; CHECK-LABEL: test_rdsspd:
+; CHECK: ## BB#0: ## %entry
+; CHECK-NEXT: rdsspd %edi
+; CHECK-NEXT: movl %edi, %eax
+; CHECK-NEXT: retq
+entry:
+ %0 = call i32 @llvm.x86.rdsspd(i32 %a)
+ ret i32 %0
+}
+
+declare i32 @llvm.x86.rdsspd(i32)
+
+define i64 @test_rdsspq(i64 %a) {
+; CHECK-LABEL: test_rdsspq:
+; CHECK: ## BB#0: ## %entry
+; CHECK-NEXT: rdsspq %rdi
+; CHECK-NEXT: movq %rdi, %rax
+; CHECK-NEXT: retq
+entry:
+ %0 = call i64 @llvm.x86.rdsspq(i64 %a)
+ ret i64 %0
+}
+
+declare i64 @llvm.x86.rdsspq(i64)
+
+define void @test_saveprevssp() {
+; CHECK-LABEL: test_saveprevssp:
+; CHECK: ## BB#0: ## %entry
+; CHECK-NEXT: saveprevssp
+; CHECK-NEXT: retq
+entry:
+ tail call void @llvm.x86.saveprevssp()
+ ret void
+}
+
+declare void @llvm.x86.saveprevssp()
+
+define void @test_rstorssp(i8* %__p) {
+; CHECK-LABEL: test_rstorssp:
+; CHECK: ## BB#0: ## %entry
+; CHECK-NEXT: rstorssp (%rdi)
+; CHECK-NEXT: retq
+entry:
+ tail call void @llvm.x86.rstorssp(i8* %__p)
+ ret void
+}
+
+declare void @llvm.x86.rstorssp(i8*)
+
+define void @test_wrssd(i32 %a, i8* %__p) {
+; CHECK-LABEL: test_wrssd:
+; CHECK: ## BB#0: ## %entry
+; CHECK-NEXT: wrssd %edi, (%rsi)
+; CHECK-NEXT: retq
+entry:
+ tail call void @llvm.x86.wrssd(i32 %a, i8* %__p)
+ ret void
+}
+
+declare void @llvm.x86.wrssd(i32, i8*)
+
+define void @test_wrssq(i64 %a, i8* %__p) {
+; CHECK-LABEL: test_wrssq:
+; CHECK: ## BB#0: ## %entry
+; CHECK-NEXT: wrssq %rdi, (%rsi)
+; CHECK-NEXT: retq
+entry:
+ tail call void @llvm.x86.wrssq(i64 %a, i8* %__p)
+ ret void
+}
+
+declare void @llvm.x86.wrssq(i64, i8*)
+
+define void @test_wrussd(i32 %a, i8* %__p) {
+; CHECK-LABEL: test_wrussd:
+; CHECK: ## BB#0: ## %entry
+; CHECK-NEXT: wrussd %edi, (%rsi)
+; CHECK-NEXT: retq
+entry:
+ tail call void @llvm.x86.wrussd(i32 %a, i8* %__p)
+ ret void
+}
+
+declare void @llvm.x86.wrussd(i32, i8*)
+
+define void @test_wrussq(i64 %a, i8* %__p) {
+; CHECK-LABEL: test_wrussq:
+; CHECK: ## BB#0: ## %entry
+; CHECK-NEXT: wrussq %rdi, (%rsi)
+; CHECK-NEXT: retq
+entry:
+ tail call void @llvm.x86.wrussq(i64 %a, i8* %__p)
+ ret void
+}
+
+declare void @llvm.x86.wrussq(i64, i8*)
+
+define void @test_setssbsy() {
+; CHECK-LABEL: test_setssbsy:
+; CHECK: ## BB#0: ## %entry
+; CHECK-NEXT: setssbsy
+; CHECK-NEXT: retq
+entry:
+ tail call void @llvm.x86.setssbsy()
+ ret void
+}
+
+declare void @llvm.x86.setssbsy()
+
+define void @test_clrssbsy(i8* %__p) {
+; CHECK-LABEL: test_clrssbsy:
+; CHECK: ## BB#0: ## %entry
+; CHECK-NEXT: clrssbsy (%rdi)
+; CHECK-NEXT: retq
+entry:
+ tail call void @llvm.x86.clrssbsy(i8* %__p)
+ ret void
+}
+
+declare void @llvm.x86.clrssbsy(i8* %__p)