summaryrefslogtreecommitdiffstats
path: root/arch/sparc/kernel/head_64.S
diff options
context:
space:
mode:
authorDavid S. Miller <davem@davemloft.net>2009-02-09 07:00:55 +0100
committerDavid S. Miller <davem@davemloft.net>2009-02-09 07:00:55 +0100
commit40bdac7dbc161639a498697f34fbd1ee800e51f4 (patch)
tree5eef654722ba36d2921cafd8440d5e346f0fbdb5 /arch/sparc/kernel/head_64.S
parentsparc64: Don't hook up pcr_ops on spitfire chips. (diff)
downloadlinux-40bdac7dbc161639a498697f34fbd1ee800e51f4.tar.xz
linux-40bdac7dbc161639a498697f34fbd1ee800e51f4.zip
sparc64: Kill .fixup section bloat.
This is an implementation of a suggestion made by Chris Torek: -------------------- Something else I noticed in passing: the EX and EX_LD/EX_ST macros scattered throughout the various .S files make a fair bit of .fixup code, all of which does the same thing. At the cost of one symbol in copy_in_user.S, you could just have one common two-instruction retl-and-mov-1 fixup that they all share. -------------------- The following is with a defconfig build: text data bss dec hex filename 3972767 344024 584449 4901240 4ac978 vmlinux.orig 3968887 344024 584449 4897360 4aba50 vmlinux Signed-off-by: David S. Miller <davem@davemloft.net>
Diffstat (limited to 'arch/sparc/kernel/head_64.S')
-rw-r--r--arch/sparc/kernel/head_64.S31
1 files changed, 28 insertions, 3 deletions
diff --git a/arch/sparc/kernel/head_64.S b/arch/sparc/kernel/head_64.S
index 8ffee714f932..a46c3a21e26d 100644
--- a/arch/sparc/kernel/head_64.S
+++ b/arch/sparc/kernel/head_64.S
@@ -891,10 +891,35 @@ prom_tba: .xword 0
tlb_type: .word 0 /* Must NOT end up in BSS */
.section ".fixup",#alloc,#execinstr
- .globl __ret_efault, __retl_efault
-__ret_efault:
+ .globl __ret_efault, __retl_efault, __ret_one, __retl_one
+ENTRY(__ret_efault)
ret
restore %g0, -EFAULT, %o0
-__retl_efault:
+ENDPROC(__ret_efault)
+
+ENTRY(__retl_efault)
retl
mov -EFAULT, %o0
+ENDPROC(__retl_efault)
+
+ENTRY(__retl_one)
+ retl
+ mov 1, %o0
+ENDPROC(__retl_one)
+
+ENTRY(__ret_one_asi)
+ wr %g0, ASI_AIUS, %asi
+ ret
+ restore %g0, 1, %o0
+ENDPROC(__ret_one_asi)
+
+ENTRY(__retl_one_asi)
+ wr %g0, ASI_AIUS, %asi
+ retl
+ mov 1, %o0
+ENDPROC(__retl_one_asi)
+
+ENTRY(__retl_o1)
+ retl
+ mov %o1, %o0
+ENDPROC(__retl_o1)