diff options
Diffstat (limited to 'arch')
-rw-r--r-- | arch/x86/boot/compressed/head_32.S | 24 | ||||
-rw-r--r-- | arch/x86/boot/compressed/head_64.S | 57 | ||||
-rw-r--r-- | arch/x86/boot/compressed/vmlinux.lds.S | 4 |
3 files changed, 5 insertions, 80 deletions
diff --git a/arch/x86/boot/compressed/head_32.S b/arch/x86/boot/compressed/head_32.S index 03557f2174bf..39f0bb43218f 100644 --- a/arch/x86/boot/compressed/head_32.S +++ b/arch/x86/boot/compressed/head_32.S @@ -49,16 +49,13 @@ * Position Independent Executable (PIE) so that linker won't optimize * R_386_GOT32X relocation to its fixed symbol address. Older * linkers generate R_386_32 relocations against locally defined symbols, - * _bss, _ebss, _got, _egot and _end, in PIE. It isn't wrong, just less - * optimal than R_386_RELATIVE. But the x86 kernel fails to properly handle - * R_386_32 relocations when relocating the kernel. To generate - * R_386_RELATIVE relocations, we mark _bss, _ebss, _got, _egot and _end as - * hidden: + * _bss, _ebss and _end, in PIE. It isn't wrong, just less optimal than + * R_386_RELATIVE. But the x86 kernel fails to properly handle R_386_32 + * relocations when relocating the kernel. To generate R_386_RELATIVE + * relocations, we mark _bss, _ebss and _end as hidden: */ .hidden _bss .hidden _ebss - .hidden _got - .hidden _egot .hidden _end __HEAD @@ -193,19 +190,6 @@ SYM_FUNC_START_LOCAL_NOALIGN(.Lrelocated) rep stosl /* - * Adjust our own GOT - */ - leal _got(%ebx), %edx - leal _egot(%ebx), %ecx -1: - cmpl %ecx, %edx - jae 2f - addl %ebx, (%edx) - addl $4, %edx - jmp 1b -2: - -/* * Do the extraction, and jump to the new kernel.. */ /* push arguments for extract_kernel: */ diff --git a/arch/x86/boot/compressed/head_64.S b/arch/x86/boot/compressed/head_64.S index 97d37f0a34f5..bf1ab30acc5b 100644 --- a/arch/x86/boot/compressed/head_64.S +++ b/arch/x86/boot/compressed/head_64.S @@ -40,8 +40,6 @@ */ .hidden _bss .hidden _ebss - .hidden _got - .hidden _egot .hidden _end __HEAD @@ -354,25 +352,6 @@ SYM_CODE_START(startup_64) leaq boot_stack_end(%rbx), %rsp /* - * paging_prepare() and cleanup_trampoline() below can have GOT - * references. Adjust the table with address we are running at. - * - * Zero RAX for adjust_got: the GOT was not adjusted before; - * there's no adjustment to undo. - */ - xorq %rax, %rax - - /* - * Calculate the address the binary is loaded at and use it as - * a GOT adjustment. - */ - call 1f -1: popq %rdi - subq $1b, %rdi - - call .Ladjust_got - - /* * At this point we are in long mode with 4-level paging enabled, * but we might want to enable 5-level paging or vice versa. * @@ -464,21 +443,6 @@ trampoline_return: pushq $0 popfq - /* - * Previously we've adjusted the GOT with address the binary was - * loaded at. Now we need to re-adjust for relocation address. - * - * Calculate the address the binary is loaded at, so that we can - * undo the previous GOT adjustment. - */ - call 1f -1: popq %rax - subq $1b, %rax - - /* The new adjustment is the relocation address */ - movq %rbx, %rdi - call .Ladjust_got - /* * Copy the compressed kernel to the end of our buffer * where decompression in place becomes safe. @@ -556,27 +520,6 @@ SYM_FUNC_START_LOCAL_NOALIGN(.Lrelocated) jmp *%rax SYM_FUNC_END(.Lrelocated) -/* - * Adjust the global offset table - * - * RAX is the previous adjustment of the table to undo (use 0 if it's the - * first time we touch GOT). - * RDI is the new adjustment to apply. - */ -.Ladjust_got: - /* Walk through the GOT adding the address to the entries */ - leaq _got(%rip), %rdx - leaq _egot(%rip), %rcx -1: - cmpq %rcx, %rdx - jae 2f - subq %rax, (%rdx) /* Undo previous adjustment */ - addq %rdi, (%rdx) /* Apply the new adjustment */ - addq $8, %rdx - jmp 1b -2: - ret - .code32 /* * This is the 32-bit trampoline that will be copied over to low memory. diff --git a/arch/x86/boot/compressed/vmlinux.lds.S b/arch/x86/boot/compressed/vmlinux.lds.S index 4bcc943842ab..a4a4a59a2628 100644 --- a/arch/x86/boot/compressed/vmlinux.lds.S +++ b/arch/x86/boot/compressed/vmlinux.lds.S @@ -43,9 +43,7 @@ SECTIONS _erodata = . ; } .got : { - _got = .; - KEEP(*(.got)) - _egot = .; + *(.got) } .got.plt : { *(.got.plt) |