Page MenuHomeFreeBSD

D27387.id80041.diff
No OneTemporary

D27387.id80041.diff

Index: sys/arm64/include/asm.h
===================================================================
--- sys/arm64/include/asm.h
+++ sys/arm64/include/asm.h
@@ -39,10 +39,11 @@
#define _C_LABEL(x) x
#define ENTRY(sym) \
- .text; .globl sym; .align 2; .type sym,#function; sym:
+ .text; .globl sym; .align 2; .type sym,#function; sym: \
+ .cfi_startproc
#define EENTRY(sym) \
.globl sym; sym:
-#define END(sym) .size sym, . - sym
+#define END(sym) .cfi_endproc; .size sym, . - sym
#define EEND(sym)
#define WEAK_REFERENCE(sym, alias) \
Index: sys/arm64/linux/linux_locore.asm
===================================================================
--- sys/arm64/linux/linux_locore.asm
+++ sys/arm64/linux/linux_locore.asm
@@ -47,17 +47,21 @@
ENTRY(__kernel_rt_sigreturn)
brk #0 /* LINUXTODO: implement __kernel_rt_sigreturn */
ret
+END(__kernel_rt_sigreturn)
ENTRY(__kernel_gettimeofday)
ldr x8, =LINUX_SYS_gettimeofday
svc #0
ret
+END(__kernel_gettimeofday)
ENTRY(__kernel_clock_gettime)
ldr x8, =LINUX_SYS_linux_clock_gettime
svc #0
ret
+END(__kernel_clock_gettime)
ENTRY(__kernel_clock_getres)
brk #0 /* LINUXTODO: implement __kernel_clock_getres */
ret
+END(__kernel_clock_getres)
Index: sys/arm64/linux/linux_support.s
===================================================================
--- sys/arm64/linux/linux_support.s
+++ sys/arm64/linux/linux_support.s
@@ -39,19 +39,24 @@
ENTRY(futex_xchgl)
brk #0
ret
+END(futex_xchgl)
ENTRY(futex_addl)
brk #0
ret
+END(futex_addl)
ENTRY(futex_orl)
brk #0
ret
+END(futex_orl)
ENTRY(futex_andl)
brk #0
ret
+END(futex_andl)
ENTRY(futex_xorl)
brk #0
ret
+END(futex_xorl)
Index: sys/crypto/des/arch/i386/des_enc.S
===================================================================
--- sys/crypto/des/arch/i386/des_enc.S
+++ sys/crypto/des/arch/i386/des_enc.S
@@ -1354,8 +1354,7 @@
popl %edi
popl %esi
ret
-.L_des_encrypt1_end:
- .size _C_LABEL(des_encrypt1),.L_des_encrypt1_end-_C_LABEL(des_encrypt1)
+END(des_encrypt1)
ENTRY(des_encrypt2)
pushl %esi
@@ -2573,8 +2572,7 @@
popl %edi
popl %esi
ret
-.L_des_encrypt2_end:
- .size _C_LABEL(des_encrypt2),.L_des_encrypt2_end-_C_LABEL(des_encrypt2)
+END(des_encrypt2)
ENTRY(des_encrypt3)
pushl %ebx
@@ -2692,8 +2690,7 @@
popl %ebp
popl %ebx
ret
-.L_des_encrypt3_end:
- .size _C_LABEL(des_encrypt3),.L_des_encrypt3_end-_C_LABEL(des_encrypt3)
+END(des_encrypt3)
ENTRY(des_decrypt3)
pushl %ebx
@@ -2811,5 +2808,4 @@
popl %ebp
popl %ebx
ret
-.L_des_decrypt3_end:
- .size _C_LABEL(des_decrypt3),.L_des_decrypt3_end-_C_LABEL(des_decrypt3)
+END(des_decrypt3)
Index: sys/i386/bios/smapi_bios.S
===================================================================
--- sys/i386/bios/smapi_bios.S
+++ sys/i386/bios/smapi_bios.S
@@ -38,3 +38,4 @@
leave
ret
+END(smapi32)
Index: sys/i386/include/asm.h
===================================================================
--- sys/i386/include/asm.h
+++ sys/i386/include/asm.h
@@ -73,19 +73,30 @@
#define _START_ENTRY .text; .p2align 2,0x90
#define _ENTRY(x) _START_ENTRY; \
- .globl CNAME(x); .type CNAME(x),@function; CNAME(x):
-#define END(x) .size x, . - x
+ .globl CNAME(x); .type CNAME(x),@function; CNAME(x): \
+ .cfi_startproc
+#define END(x) .cfi_endproc; .size x, . - x
#ifdef PROF
#define ALTENTRY(x) _ENTRY(x); \
- pushl %ebp; movl %esp,%ebp; \
+ pushl %ebp; \
+ .cfi_def_cfa_offset 8; \
+ .cfi_offset %ebp, -8; \
+ movl %esp,%ebp; \
call PIC_PLT(HIDENAME(mcount)); \
popl %ebp; \
+ .cfi_restore %ebp; \
+ .cfi_def_cfa_offset 4; \
jmp 9f
#define ENTRY(x) _ENTRY(x); \
- pushl %ebp; movl %esp,%ebp; \
+ pushl %ebp; \
+ .cfi_def_cfa_offset 8; \
+ .cfi_offset %ebp, -8; \
+ movl %esp,%ebp; \
call PIC_PLT(HIDENAME(mcount)); \
popl %ebp; \
+ .cfi_restore %ebp; \
+ .cfi_def_cfa_offset 4; \
9:
#else
#define ALTENTRY(x) _ENTRY(x)
Index: sys/powerpc/aim/locore.S
===================================================================
--- sys/powerpc/aim/locore.S
+++ sys/powerpc/aim/locore.S
@@ -13,3 +13,4 @@
ENTRY(get_spr)
mfspr %r3, 0
blr
+END(get_spr)
Index: sys/powerpc/aim/locore64.S
===================================================================
--- sys/powerpc/aim/locore64.S
+++ sys/powerpc/aim/locore64.S
@@ -203,6 +203,7 @@
/* Unreachable */
b .
+_END(__start)
ASENTRY_NOPROF(__restartkernel_virtual)
/*
@@ -254,6 +255,7 @@
addi %r14, %r14, 1
cmpdi %r14, 16
blt 1b
+ASEND(__restartkernel_virtual)
ASENTRY_NOPROF(__restartkernel)
/*
@@ -270,5 +272,6 @@
rfid
2: bl __start
nop
+ASEND(__restartkernel)
#include <powerpc/aim/trap_subr64.S>
Index: sys/powerpc/aim/trap_subr64.S
===================================================================
--- sys/powerpc/aim/trap_subr64.S
+++ sys/powerpc/aim/trap_subr64.S
@@ -957,6 +957,7 @@
dbleave:
FRAME_LEAVE(PC_DBSAVE)
rfid
+ASEND(breakpoint)
/*
* In case of KDB we want a separate trap catcher for it
Index: sys/powerpc/booke/locore.S
===================================================================
--- sys/powerpc/booke/locore.S
+++ sys/powerpc/booke/locore.S
@@ -834,6 +834,7 @@
andi. %r3, %r3, L1CSR0_DCFI
bne 1b
blr
+END(dcache_inval)
ENTRY(dcache_disable)
/* Disable d-cache */
@@ -846,6 +847,7 @@
mtspr SPR_L1CSR0, %r3
isync
blr
+END(dcache_disable)
ENTRY(dcache_enable)
/* Enable d-cache */
@@ -857,6 +859,7 @@
mtspr SPR_L1CSR0, %r3
isync
blr
+END(dcache_enable)
ENTRY(icache_inval)
/* Invalidate i-cache */
@@ -869,6 +872,7 @@
andi. %r3, %r3, L1CSR1_ICFI
bne 1b
blr
+END(icache_inval)
ENTRY(icache_disable)
/* Disable i-cache */
@@ -880,6 +884,7 @@
mtspr SPR_L1CSR1, %r3
isync
blr
+END(icache_disable)
ENTRY(icache_enable)
/* Enable i-cache */
@@ -890,6 +895,7 @@
mtspr SPR_L1CSR1, %r3
isync
blr
+END(icache_enable)
/*
* L2 cache disable/enable/inval sequences for E500mc.
@@ -906,6 +912,7 @@
andis. %r3, %r3, L2CSR0_L2FI@h
bne 1b
blr
+END(l2cache_inval)
ENTRY(l2cache_enable)
mfspr %r3, SPR_L2CSR0
@@ -914,6 +921,7 @@
mtspr SPR_L2CSR0, %r3
isync
blr
+END(l2cache_enable)
/*
* Branch predictor setup.
@@ -929,6 +937,7 @@
mtspr SPR_BUCSR, %r3
isync
blr
+END(bpred_enable)
/*
* XXX: This should be moved to a shared AIM/booke asm file, if one ever is
@@ -938,6 +947,7 @@
/* Note: The spr number is patched at runtime */
mfspr %r3, 0
blr
+END(get_spr)
/************************************************************************/
/* Data section */
Index: sys/powerpc/booke/trap_subr.S
===================================================================
--- sys/powerpc/booke/trap_subr.S
+++ sys/powerpc/booke/trap_subr.S
@@ -1097,6 +1097,7 @@
dbleave:
FRAME_LEAVE(SPR_SRR0, SPR_SRR1)
rfi
+ASEND(breakpoint)
#endif /* KDB */
#ifdef SMP
@@ -1111,6 +1112,7 @@
isync
msync
blr
+END(tlb_lock)
ENTRY(tlb_unlock)
isync
@@ -1120,6 +1122,7 @@
isync
msync
blr
+END(tlb_unlock)
/*
* TLB miss spin locks. For each CPU we have a reservation granule (32 bytes);
Index: sys/powerpc/include/asm.h
===================================================================
--- sys/powerpc/include/asm.h
+++ sys/powerpc/include/asm.h
@@ -108,12 +108,13 @@
.globl name; \
.section ".opd","aw"; \
.p2align 3; \
- name: \
+name: \
.quad DOT_LABEL(name),.TOC.@tocbase,0; \
.previous; \
.p2align 4; \
TYPE_ENTRY(name) \
-DOT_LABEL(name):
+DOT_LABEL(name): \
+ .cfi_startproc
#define _NAKED_ENTRY(name) _ENTRY(name)
#else
#define _ENTRY(name) \
@@ -122,6 +123,7 @@
.globl name; \
.type name,@function; \
name: \
+ .cfi_startproc; \
addis %r2, %r12, (.TOC.-name)@ha; \
addi %r2, %r2, (.TOC.-name)@l; \
.localentry name, .-name;
@@ -133,10 +135,12 @@
.globl name; \
.type name,@function; \
name: \
+ .cfi_startproc; \
.localentry name, .-name;
#endif
#define _END(name) \
+ .cfi_endproc; \
.long 0; \
.byte 0,0,0,0,0,0,0,0; \
END_SIZE(name)
@@ -153,8 +157,11 @@
.p2align 4; \
.globl name; \
.type name,@function; \
- name:
-#define _END(name)
+name: \
+ .cfi_startproc
+#define _END(name) \
+ .cfi_endproc; \
+ .size name, . - name
#define _NAKED_ENTRY(name) _ENTRY(name)
@@ -186,6 +193,7 @@
# define _PROF_PROLOGUE
#endif
+#define ASEND(y) _END(ASMNAME(y))
#define ASENTRY(y) _ENTRY(ASMNAME(y)); _PROF_PROLOGUE
#define END(y) _END(CNAME(y))
#define ENTRY(y) _ENTRY(CNAME(y)); _PROF_PROLOGUE
Index: sys/powerpc/mambo/mambocall.S
===================================================================
--- sys/powerpc/mambo/mambocall.S
+++ sys/powerpc/mambo/mambocall.S
@@ -36,4 +36,4 @@
*/
.long 0x000EAEB0
blr
-
+ASEND(mambocall)
Index: sys/powerpc/ofw/ofwcall32.S
===================================================================
--- sys/powerpc/ofw/ofwcall32.S
+++ sys/powerpc/ofw/ofwcall32.S
@@ -120,6 +120,7 @@
lwz %r0,4(%r1)
mtlr %r0
blr
+ASEND(ofwcall)
/*
* RTAS Entry Point. Similar to the OF one, but simpler (no separate stack)
@@ -174,4 +175,4 @@
lwz %r0,4(%r1)
mtlr %r0
blr
-
+ASEND(rtascall)
Index: sys/powerpc/ofw/ofwcall64.S
===================================================================
--- sys/powerpc/ofw/ofwcall64.S
+++ sys/powerpc/ofw/ofwcall64.S
@@ -216,6 +216,7 @@
ld %r0,16(%r1)
mtlr %r0
blr
+ASEND(ofwcall)
/*
* RTAS 32-bit Entry Point. Similar to the OF one, but simpler (no separate
@@ -380,4 +381,4 @@
ld %r0,16(%r1)
mtlr %r0
blr
-
+ASEND(rtascall)
Index: sys/powerpc/powernv/opalcall.S
===================================================================
--- sys/powerpc/powernv/opalcall.S
+++ sys/powerpc/powernv/opalcall.S
@@ -129,4 +129,4 @@
/* And return */
blr
-
+ASEND(opal_call)
Index: sys/powerpc/powerpc/cpu_subr64.S
===================================================================
--- sys/powerpc/powerpc/cpu_subr64.S
+++ sys/powerpc/powerpc/cpu_subr64.S
@@ -96,3 +96,4 @@
bne 2b
nap
b .
+END(enter_idle_powerx)
Index: sys/powerpc/powerpc/setjmp.S
===================================================================
--- sys/powerpc/powerpc/setjmp.S
+++ sys/powerpc/powerpc/setjmp.S
@@ -75,6 +75,7 @@
/* f14-f31, fpscr */
li 3, 0
blr
+ASEND(setjmp)
.extern sigsetmask
@@ -112,3 +113,4 @@
/* f14-f31, fpscr */
mr 3, 4
blr
+ASEND(longjmp)
Index: sys/powerpc/powerpc/support.S
===================================================================
--- sys/powerpc/powerpc/support.S
+++ sys/powerpc/powerpc/support.S
@@ -73,9 +73,11 @@
#endif
#ifdef AIM
-#define ENTRY_DIRECT(x) ENTRY(x ## _direct)
+#define ENTRY_DIRECT(x) ENTRY(x ## _direct)
+#define END_DIRECT(x) END(x ## _direct)
#else
#define ENTRY_DIRECT(x) ENTRY(x)
+#define END_DIRECT(x) END(x)
#endif
#ifdef __powerpc64__
@@ -320,6 +322,7 @@
/* done */
.Lend:
blr
+END(bcopy_generic)
/*
* copyout(from_kernel, to_user, len)
@@ -332,6 +335,7 @@
nop
CLEAR_FAULT(%r7)
EPILOGUE
+END_DIRECT(copyout)
/*
* copyin(from_user, to_kernel, len)
@@ -344,6 +348,8 @@
nop
CLEAR_FAULT(%r7)
EPILOGUE
+END_DIRECT(copyin)
+
/*
* copyinstr(const void *udaddr, void *kaddr, size_t len, size_t *done)
* %r3 %r4 %r5 %r6
@@ -379,6 +385,7 @@
3:
CLEAR_FAULT_NO_CLOBBER(%r7)
EPILOGUE
+END_DIRECT(copyinstr)
ENTRY_DIRECT(subyte)
PROLOGUE
@@ -386,6 +393,7 @@
stb %r4, 0(%r3)
CLEAR_FAULT(%r7)
EPILOGUE
+END_DIRECT(subyte)
#ifndef __powerpc64__
ENTRY_DIRECT(suword)
@@ -394,6 +402,7 @@
stw %r4, 0(%r3)
CLEAR_FAULT(%r7)
EPILOGUE
+END_DIRECT(suword)
#endif
ENTRY_DIRECT(suword32)
@@ -402,6 +411,7 @@
stw %r4, 0(%r3)
CLEAR_FAULT(%r7)
EPILOGUE
+END_DIRECT(suword32)
#ifdef __powerpc64__
ENTRY_DIRECT(suword64)
@@ -410,12 +420,15 @@
std %r4, 0(%r3)
CLEAR_FAULT(%r7)
EPILOGUE
+END_DIRECT(suword64)
+
ENTRY_DIRECT(suword)
PROLOGUE
SET_FUSUFAULT(%r3, %r7)
std %r4, 0(%r3)
CLEAR_FAULT(%r7)
EPILOGUE
+END_DIRECT(suword)
#endif
ENTRY_DIRECT(fubyte)
@@ -424,6 +437,7 @@
lbz %r3, 0(%r3)
CLEAR_FAULT_NO_CLOBBER(%r7)
EPILOGUE
+END_DIRECT(fubyte)
ENTRY_DIRECT(fuword16)
PROLOGUE
@@ -431,6 +445,7 @@
lhz %r3, 0(%r3)
CLEAR_FAULT_NO_CLOBBER(%r7)
EPILOGUE
+END_DIRECT(fuword16)
#ifndef __powerpc64__
ENTRY_DIRECT(fueword)
@@ -440,6 +455,7 @@
stw %r0, 0(%r4)
CLEAR_FAULT(%r7)
EPILOGUE
+END_DIRECT(fueword)
#endif
ENTRY_DIRECT(fueword32)
PROLOGUE
@@ -448,6 +464,7 @@
stw %r0, 0(%r4)
CLEAR_FAULT(%r7)
EPILOGUE
+END_DIRECT(fueword32)
#ifdef __powerpc64__
ENTRY_DIRECT(fueword)
@@ -457,6 +474,7 @@
std %r0, 0(%r4)
CLEAR_FAULT(%r7)
EPILOGUE
+END_DIRECT(fueword)
ENTRY_DIRECT(fueword64)
PROLOGUE
@@ -465,6 +483,7 @@
std %r0, 0(%r4)
CLEAR_FAULT(%r7)
EPILOGUE
+END_DIRECT(fueword64)
#endif
/*
@@ -495,6 +514,7 @@
ENTRY_DIRECT(casueword32)
CASUEWORD32(%r3, %r7)
+END_DIRECT(casueword32)
#ifdef __powerpc64__
#define CASUEWORD64(raddr, rpcb) ;\
@@ -520,20 +540,25 @@
ENTRY_DIRECT(casueword)
CASUEWORD64(%r3, %r7)
+END_DIRECT(casueword)
ENTRY_DIRECT(casueword64)
CASUEWORD64(%r3, %r7)
+END_DIRECT(casueword64)
#else
ENTRY_DIRECT(casueword)
CASUEWORD32(%r3, %r7)
+END_DIRECT(casueword)
#endif
_NAKED_ENTRY(fusufault)
CLEAR_FAULT_NO_CLOBBER(%r7)
li %r3, -1
EPILOGUE
+_END(fusufault)
_NAKED_ENTRY(copy_fault)
CLEAR_FAULT_NO_CLOBBER(%r7)
li %r3, EFAULT
EPILOGUE
+_END(copy_fault)
Index: sys/powerpc/powerpc/swtch32.S
===================================================================
--- sys/powerpc/powerpc/swtch32.S
+++ sys/powerpc/powerpc/swtch32.S
@@ -74,6 +74,7 @@
li %r14,0 /* Tell cpu_switchin not to release a thread */
b cpu_switchin
+END(cpu_throw)
/*
* void cpu_switch(struct thread *old,
@@ -193,6 +194,7 @@
*/
stwcx. %r1, 0, %r3
blr
+END(cpu_switch)
/*
* savectx(pcb)
@@ -206,6 +208,7 @@
mflr %r4 /* Save the link register */
stw %r4,PCB_LR(%r3)
blr
+END(savectx)
/*
* fork_trampoline()
@@ -225,3 +228,4 @@
mtspr SPR_SPEFSCR, %r3
#endif
b trapexit
+END(fork_trampoline)
Index: sys/powerpc/powerpc/swtch64.S
===================================================================
--- sys/powerpc/powerpc/swtch64.S
+++ sys/powerpc/powerpc/swtch64.S
@@ -81,6 +81,7 @@
li %r18,0 /* No old pcb flags. The old thread is extinguished. */
b cpu_switchin
+END(cpu_throw)
/*
* void cpu_switch(struct thread *old,
@@ -304,6 +305,7 @@
*/
stdcx. %r1, 0, %r3
blr
+END(cpu_switch)
/*
* savectx(pcb)
@@ -338,6 +340,7 @@
mflr %r4 /* Save the link register */
std %r4,PCB_LR(%r3)
blr
+END(savectx)
/*
* fork_trampoline()
@@ -358,3 +361,4 @@
a frame pointer/saved LR */
bl trapexit
nop
+END(fork_trampoline)

File Metadata

Mime Type
text/plain
Expires
Fri, May 15, 2:06 PM (18 h, 26 m)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
33070758
Default Alt Text
D27387.id80041.diff (14 KB)

Event Timeline