Index: lib/libthr/arch/amd64/amd64/_umtx_op_err.S =================================================================== --- lib/libthr/arch/amd64/amd64/_umtx_op_err.S +++ lib/libthr/arch/amd64/amd64/_umtx_op_err.S @@ -29,8 +29,11 @@ #include #include -#define RSYSCALL_ERR(x) ENTRY(__CONCAT(x, _err)); \ - mov __CONCAT($SYS_,x),%rax; KERNCALL; ret; +#define RSYSCALL_ERR(x) ENTRY(__CONCAT(x, _err)); \ + mov __CONCAT($SYS_,x),%rax; \ + KERNCALL; \ + ret; \ + END(__CONCAT(x, _err)); #define KERNCALL movq %rcx, %r10; syscall Index: lib/msun/amd64/e_remainder.S =================================================================== --- lib/msun/amd64/e_remainder.S +++ lib/msun/amd64/e_remainder.S @@ -51,5 +51,6 @@ movsd -8(%rsp),%xmm0 fstp %st ret +END(remainder) .section .note.GNU-stack,"",%progbits Index: lib/msun/amd64/e_remainderf.S =================================================================== --- lib/msun/amd64/e_remainderf.S +++ lib/msun/amd64/e_remainderf.S @@ -21,5 +21,6 @@ movss -4(%rsp),%xmm0 fstp %st ret +END(remainderf) .section .note.GNU-stack,"",%progbits Index: lib/msun/amd64/e_remainderl.S =================================================================== --- lib/msun/amd64/e_remainderl.S +++ lib/msun/amd64/e_remainderl.S @@ -46,5 +46,6 @@ jne 1b fstp %st(1) ret +END(remainderl) .section .note.GNU-stack,"",%progbits Index: lib/msun/amd64/e_sqrtl.S =================================================================== --- lib/msun/amd64/e_sqrtl.S +++ lib/msun/amd64/e_sqrtl.S @@ -31,5 +31,6 @@ fldt 8(%rsp) fsqrt ret +END(sqrtl) .section .note.GNU-stack,"",%progbits Index: lib/msun/amd64/s_llrintl.S =================================================================== --- lib/msun/amd64/s_llrintl.S +++ lib/msun/amd64/s_llrintl.S @@ -33,5 +33,6 @@ fistpll (%rsp) popq %rax ret +END(llrintl) .section .note.GNU-stack,"",%progbits Index: lib/msun/amd64/s_logbl.S =================================================================== --- lib/msun/amd64/s_logbl.S +++ lib/msun/amd64/s_logbl.S @@ -41,5 +41,6 @@ fxtract fstp %st ret +END(logbl) .section .note.GNU-stack,"",%progbits Index: lib/msun/amd64/s_lrintl.S =================================================================== --- lib/msun/amd64/s_lrintl.S +++ lib/msun/amd64/s_lrintl.S @@ -33,5 +33,6 @@ fistpll (%rsp) popq %rax ret +END(lrintl) .section .note.GNU-stack,"",%progbits Index: lib/msun/amd64/s_remquol.S =================================================================== --- lib/msun/amd64/s_remquol.S +++ lib/msun/amd64/s_remquol.S @@ -60,5 +60,6 @@ /* Store the quotient and return. */ movl %eax,(%rdi) ret +END(remquol) .section .note.GNU-stack,"",%progbits Index: lib/msun/amd64/s_rintl.S =================================================================== --- lib/msun/amd64/s_rintl.S +++ lib/msun/amd64/s_rintl.S @@ -41,5 +41,6 @@ fldt 8(%rsp) frndint ret +END(rintl) .section .note.GNU-stack,"",%progbits Index: sys/amd64/include/asm.h =================================================================== --- sys/amd64/include/asm.h +++ sys/amd64/include/asm.h @@ -60,25 +60,36 @@ #define _START_ENTRY .text; .p2align 4,0x90 #define _ENTRY(x) _START_ENTRY; \ - .globl CNAME(x); .type CNAME(x),@function; CNAME(x): + .globl CNAME(x); .type CNAME(x),@function; CNAME(x):; \ + .cfi_startproc #ifdef PROF #define ALTENTRY(x) _ENTRY(x); \ - pushq %rbp; movq %rsp,%rbp; \ + pushq %rbp; \ + .cfi_def_cfa_offset 16; \ + .cfi_offset %rbp, -16; \ + movq %rsp,%rbp; \ call PIC_PLT(HIDENAME(mcount)); \ popq %rbp; \ + .cfi_restore %rbp; \ + .cfi_def_cfa_offset 8; \ jmp 9f #define ENTRY(x) _ENTRY(x); \ - pushq %rbp; movq %rsp,%rbp; \ + pushq %rbp; \ + .cfi_def_cfa_offset 16; \ + .cfi_offset %rbp, -16; \ + movq %rsp,%rbp; \ call PIC_PLT(HIDENAME(mcount)); \ popq %rbp; \ + .cfi_restore %rbp; \ + .cfi_def_cfa_offset 8; \ 9: #else #define ALTENTRY(x) _ENTRY(x) #define ENTRY(x) _ENTRY(x) #endif -#define END(x) .size x, . - x +#define END(x) .size x, . - x; .cfi_endproc /* * WEAK_REFERENCE(): create a weak reference alias from sym. * The macro is not a general asm macro that takes arbitrary names,