summaryrefslogtreecommitdiff
path: root/lib/xray/xray_trampoline_x86_64.S
diff options
context:
space:
mode:
Diffstat (limited to 'lib/xray/xray_trampoline_x86_64.S')
-rw-r--r--lib/xray/xray_trampoline_x86_64.S97
1 files changed, 49 insertions, 48 deletions
diff --git a/lib/xray/xray_trampoline_x86_64.S b/lib/xray/xray_trampoline_x86_64.S
index bf8ca75ca..ffbfb5c7e 100644
--- a/lib/xray/xray_trampoline_x86_64.S
+++ b/lib/xray/xray_trampoline_x86_64.S
@@ -14,13 +14,10 @@
//===----------------------------------------------------------------------===//
#include "../builtins/assembly.h"
-#include "../sanitizer_common/sanitizer_asm.h"
-
-
.macro SAVE_REGISTERS
subq $192, %rsp
- CFI_DEF_CFA_OFFSET(200)
+ .cfi_def_cfa_offset 200
// At this point, the stack pointer should be aligned to an 8-byte boundary,
// because any call instructions that come after this will add another 8
// bytes and therefore align it to 16-bytes.
@@ -60,28 +57,25 @@
movq 8(%rsp), %r8
movq 0(%rsp), %r9
addq $192, %rsp
- CFI_DEF_CFA_OFFSET(8)
+ .cfi_def_cfa_offset 8
.endm
-#if !defined(__APPLE__)
- .section .text
-#else
- .section __TEXT,__text
-#endif
+ .text
.file "xray_trampoline_x86.S"
//===----------------------------------------------------------------------===//
- .globl ASM_TSAN_SYMBOL(__xray_FunctionEntry)
+ .globl __xray_FunctionEntry
.align 16, 0x90
- ASM_TYPE_FUNCTION(__xray_FunctionEntry)
-ASM_TSAN_SYMBOL(__xray_FunctionEntry):
- CFI_STARTPROC
+ .type __xray_FunctionEntry,@function
+
+__xray_FunctionEntry:
+ .cfi_startproc
SAVE_REGISTERS
// This load has to be atomic, it's concurrent with __xray_patch().
// On x86/amd64, a simple (type-aligned) MOV instruction is enough.
- movq ASM_TSAN_SYMBOL(_ZN6__xray19XRayPatchedFunctionE)(%rip), %rax
+ movq _ZN6__xray19XRayPatchedFunctionE(%rip), %rax
testq %rax, %rax
je .Ltmp0
@@ -92,27 +86,28 @@ ASM_TSAN_SYMBOL(__xray_FunctionEntry):
.Ltmp0:
RESTORE_REGISTERS
retq
- ASM_SIZE(__xray_FunctionEntry)
- CFI_ENDPROC
+.Ltmp1:
+ .size __xray_FunctionEntry, .Ltmp1-__xray_FunctionEntry
+ .cfi_endproc
//===----------------------------------------------------------------------===//
- .globl ASM_TSAN_SYMBOL(__xray_FunctionExit)
+ .globl __xray_FunctionExit
.align 16, 0x90
- ASM_TYPE_FUNCTION(__xray_FunctionExit)
-ASM_TSAN_SYMBOL(__xray_FunctionExit):
- CFI_STARTPROC
+ .type __xray_FunctionExit,@function
+__xray_FunctionExit:
+ .cfi_startproc
// Save the important registers first. Since we're assuming that this
// function is only jumped into, we only preserve the registers for
// returning.
subq $56, %rsp
- CFI_DEF_CFA_OFFSET(64)
+ .cfi_def_cfa_offset 64
movq %rbp, 48(%rsp)
movupd %xmm0, 32(%rsp)
movupd %xmm1, 16(%rsp)
movq %rax, 8(%rsp)
movq %rdx, 0(%rsp)
- movq ASM_TSAN_SYMBOL(_ZN6__xray19XRayPatchedFunctionE)(%rip), %rax
+ movq _ZN6__xray19XRayPatchedFunctionE(%rip), %rax
testq %rax,%rax
je .Ltmp2
@@ -127,21 +122,22 @@ ASM_TSAN_SYMBOL(__xray_FunctionExit):
movq 8(%rsp), %rax
movq 0(%rsp), %rdx
addq $56, %rsp
- CFI_DEF_CFA_OFFSET(8)
+ .cfi_def_cfa_offset 8
retq
- ASM_SIZE(__xray_FunctionExit)
- CFI_ENDPROC
+.Ltmp3:
+ .size __xray_FunctionExit, .Ltmp3-__xray_FunctionExit
+ .cfi_endproc
//===----------------------------------------------------------------------===//
- .globl ASM_TSAN_SYMBOL(__xray_FunctionTailExit)
+ .global __xray_FunctionTailExit
.align 16, 0x90
- ASM_TYPE_FUNCTION(__xray_FunctionTailExit)
-ASM_TSAN_SYMBOL(__xray_FunctionTailExit):
- CFI_STARTPROC
+ .type __xray_FunctionTailExit,@function
+__xray_FunctionTailExit:
+ .cfi_startproc
SAVE_REGISTERS
- movq ASM_TSAN_SYMBOL(_ZN6__xray19XRayPatchedFunctionE)(%rip), %rax
+ movq _ZN6__xray19XRayPatchedFunctionE(%rip), %rax
testq %rax,%rax
je .Ltmp4
@@ -152,25 +148,26 @@ ASM_TSAN_SYMBOL(__xray_FunctionTailExit):
.Ltmp4:
RESTORE_REGISTERS
retq
- ASM_SIZE(__xray_FunctionTailExit)
- CFI_ENDPROC
+.Ltmp5:
+ .size __xray_FunctionTailExit, .Ltmp5-__xray_FunctionTailExit
+ .cfi_endproc
//===----------------------------------------------------------------------===//
- .globl ASM_TSAN_SYMBOL(__xray_ArgLoggerEntry)
+ .globl __xray_ArgLoggerEntry
.align 16, 0x90
- ASM_TYPE_FUNCTION(__xray_ArgLoggerEntry)
-ASM_TSAN_SYMBOL(__xray_ArgLoggerEntry):
- CFI_STARTPROC
+ .type __xray_ArgLoggerEntry,@function
+__xray_ArgLoggerEntry:
+ .cfi_startproc
SAVE_REGISTERS
// Again, these function pointer loads must be atomic; MOV is fine.
- movq ASM_TSAN_SYMBOL(_ZN6__xray13XRayArgLoggerE)(%rip), %rax
+ movq _ZN6__xray13XRayArgLoggerE(%rip), %rax
testq %rax, %rax
jne .Larg1entryLog
// If [arg1 logging handler] not set, defer to no-arg logging.
- movq ASM_TSAN_SYMBOL(_ZN6__xray19XRayPatchedFunctionE)(%rip), %rax
+ movq _ZN6__xray19XRayPatchedFunctionE(%rip), %rax
testq %rax, %rax
je .Larg1entryFail
@@ -189,22 +186,24 @@ ASM_TSAN_SYMBOL(__xray_ArgLoggerEntry):
.Larg1entryFail:
RESTORE_REGISTERS
retq
- ASM_SIZE(__xray_ArgLoggerEntry)
- CFI_ENDPROC
+
+.Larg1entryEnd:
+ .size __xray_ArgLoggerEntry, .Larg1entryEnd-__xray_ArgLoggerEntry
+ .cfi_endproc
//===----------------------------------------------------------------------===//
- .global ASM_TSAN_SYMBOL(__xray_CustomEvent)
+ .global __xray_CustomEvent
.align 16, 0x90
- ASM_TYPE_FUNCTION(__xray_CustomEvent)
-ASM_TSAN_SYMBOL(__xray_CustomEvent):
- CFI_STARTPROC
+ .type __xray_CustomEvent,@function
+__xray_CustomEvent:
+ .cfi_startproc
SAVE_REGISTERS
// We take two arguments to this trampoline, which should be in rdi and rsi
// already. We also make sure that we stash %rax because we use that register
// to call the logging handler.
- movq ASM_TSAN_SYMBOL(_ZN6__xray22XRayPatchedCustomEventE)(%rip), %rax
+ movq _ZN6__xray22XRayPatchedCustomEventE(%rip), %rax
testq %rax,%rax
je .LcustomEventCleanup
@@ -224,7 +223,9 @@ ASM_TSAN_SYMBOL(__xray_CustomEvent):
.LcustomEventCleanup:
RESTORE_REGISTERS
retq
- ASM_SIZE(__xray_CustomEvent)
- CFI_ENDPROC
+
+.Ltmp8:
+ .size __xray_CustomEvent, .Ltmp8-__xray_CustomEvent
+ .cfi_endproc
NO_EXEC_STACK_DIRECTIVE