2
0
mirror of https://github.com/xcat2/xNBA.git synced 2024-11-22 09:31:51 +00:00

Towards making KEEP_IT_REAL work again.

Fix bug that caused over-allocation of .text16 and .data16 memory areas
by a factor of 16.
This commit is contained in:
Michael Brown 2006-08-24 13:18:05 +00:00
parent cddf8df8d4
commit 6abfaa153b
7 changed files with 262 additions and 34 deletions

View File

@ -213,6 +213,15 @@ virt_to_user ( void * virtual ) {
#define BASEMEM_PARAMETER_INIT BASEMEM_PARAMETER_INIT_LIBKIR
#define BASEMEM_PARAMETER_DONE BASEMEM_PARAMETER_DONE_LIBKIR
/* TEXT16_CODE: declare a fragment of code that resides in .text16 */
#define TEXT16_CODE( asm_code_str ) \
".section \".text16\", \"ax\", @progbits\n\t" \
".code16\n\t" \
".arch i386\n\t" \
asm_code_str "\n\t" \
".code16gcc\n\t" \
".previous\n\t"
/* REAL_CODE: declare a fragment of code that executes in real mode */
#define REAL_CODE( asm_code_str ) \
".code16\n\t" \

View File

@ -177,19 +177,24 @@ extern void remove_from_rm_stack ( void *data, size_t size );
#define BASEMEM_PARAMETER_INIT BASEMEM_PARAMETER_INIT_LIBRM
#define BASEMEM_PARAMETER_DONE BASEMEM_PARAMETER_DONE_LIBRM
/* TEXT16_CODE: declare a fragment of code that resides in .text16 */
#define TEXT16_CODE( asm_code_str ) \
".section \".text16\", \"ax\", @progbits\n\t" \
".code16\n\t" \
".arch i386\n\t" \
asm_code_str "\n\t" \
".code32\n\t" \
".previous\n\t"
/* REAL_CODE: declare a fragment of code that executes in real mode */
#define REAL_CODE( asm_code_str ) \
"pushl $1f\n\t" \
"call real_call\n\t" \
"addl $4, %%esp\n\t" \
".section \".text16\", \"ax\", @progbits\n\t" \
".code16\n\t" \
".arch i386\n\t" \
"\n1:\n\t" \
asm_code_str "\n\t" \
"ret\n\t" \
".code32\n\t" \
".previous\n\t"
TEXT16_CODE ( "\n1:\n\t" \
asm_code_str \
"\n\t" \
"ret\n\t" )
#endif /* ASSEMBLY */

View File

@ -21,9 +21,6 @@ BIN = bin-kir
#
CFLAGS += -DKEEP_IT_REAL -include kir.h
# Link with _data_link_addr = 0; data symbols are relative to the data
# segment.
#
LDFLAGS += --defsym _data_link_addr=0
include Makefile
LDSCRIPT = arch/i386/scripts/i386-kir.lds

View File

@ -99,11 +99,11 @@ alloc_basemem:
shlw $6, %ax
/* .data16 segment address */
subw $_data16_size, %ax
subw $_data16_size_pgh, %ax
pushw %ax
/* .text16 segment address */
subw $_text16_size, %ax
subw $_text16_size_pgh, %ax
pushw %ax
/* Update FBMS */
@ -322,7 +322,14 @@ install_prealloc:
/* Install .text16 and .data16 */
call install_basemem
#ifndef KEEP_IT_REAL
#ifdef KEEP_IT_REAL
/* Preserve %ds, call init_libkir, restore registers */
pushw %ds
movw %bx, %ds
movw %ax, (init_libkir_vector+2)
lcall *init_libkir_vector
popw %ds
#else
/* Preserve registers and interrupt status, and disable interrupts */
pushfw
pushw %ds
@ -379,9 +386,14 @@ install_prealloc:
ret
.size install_prealloc, . - install_prealloc
#ifndef KEEP_IT_REAL
/* Vectors for far calls to .text16 functions */
.section ".data16"
#ifdef KEEP_IT_REAL
init_libkir_vector:
.word init_libkir
.word 0
.size init_libkir_vector, . - init_libkir_vector
#else
init_librm_vector:
.word init_librm
.word 0

View File

@ -0,0 +1,192 @@
/* -*- sh -*- */
/*
* Linker script for i386 images
*
*/
OUTPUT_FORMAT ( "elf32-i386", "elf32-i386", "elf32-i386" )
OUTPUT_ARCH ( i386 )
ENTRY ( _entry )
SECTIONS {
/* All sections in the resulting file have consecutive load
* addresses, but may have individual link addresses depending on
* the memory model being used.
*
* The linker symbols _prefix_link_addr, load_addr, and
* _max_align may be specified explicitly. If not specified, they
* will default to:
*
* _prefix_link_addr = 0
* _load_addr = 0
* _max_align = 16
*
* We guarantee alignment of virtual addresses to any alignment
* specified by the constituent object files (e.g. via
* __attribute__((aligned(x)))). Load addresses are guaranteed
* only up to _max_align. Provided that all loader and relocation
* code honours _max_align, this means that physical addresses are
* also guaranteed up to _max_align.
*
* Note that when using -DKEEP_IT_REAL, the UNDI segments are only
* guaranteed to be loaded on a paragraph boundary (i.e. 16-byte
* alignment). Using _max_align>16 will therefore not guarantee
* >16-byte alignment of physical addresses when -DKEEP_IT_REAL is
* used (though virtual addresses will still be fully aligned).
*
*/
/*
* The prefix
*/
_prefix_link_addr = DEFINED ( _prefix_link_addr ) ? _prefix_link_addr : 0;
. = _prefix_link_addr;
_prefix = .;
.prefix : AT ( _prefix_load_offset + __prefix ) {
__prefix = .;
_entry = .;
*(.prefix)
*(.prefix.*)
_eprefix_progbits = .;
}
_eprefix = .;
/*
* The 16-bit sections
*/
_text16_link_addr = 0;
. = _text16_link_addr;
_text16 = .;
.text16 : AT ( _text16_load_offset + __text16 ) {
__text16 = .;
*(.text.null_trap)
*(.text16)
*(.text16.*)
*(.text)
*(.text.*)
_etext16_progbits = .;
} = 0x9090
_etext16 = .;
_data16_link_addr = 0;
. = _data16_link_addr;
_data16 = .;
.rodata16 : AT ( _data16_load_offset + __rodata16 ) {
__rodata16 = .;
*(.rodata16)
*(.rodata16.*)
*(.rodata)
*(.rodata.*)
}
.data16 : AT ( _data16_load_offset + __data16 ) {
__data16 = .;
*(.data16)
*(.data16.*)
*(.data)
*(.data.*)
*(SORT(.tbl.*)) /* Various tables. See include/tables.h */
_edata16_progbits = .;
}
.bss16 : AT ( _data16_load_offset + __bss16 ) {
__bss16 = .;
_bss16 = .;
*(.bss16)
*(.bss16.*)
*(.bss)
*(.bss.*)
*(COMMON)
_ebss16 = .;
}
.stack16 : AT ( _data16_load_offset + __stack16 ) {
__stack16 = .;
*(.stack16)
*(.stack16.*)
*(.stack)
*(.stack.*)
}
_edata16 = .;
_end = .;
/*
* Dispose of the comment and note sections to make the link map
* easier to read
*/
/DISCARD/ : {
*(.comment)
*(.note)
}
/*
* Load address calculations. The slightly obscure nature of the
* calculations is because ALIGN(x) can only operate on the
* location counter.
*/
_max_align = DEFINED ( _max_align ) ? _max_align : 16;
_load_addr = DEFINED ( _load_addr ) ? _load_addr : 0;
. = _load_addr;
. -= _prefix_link_addr;
_prefix_load_offset = ALIGN ( _max_align );
_prefix_load_addr = _prefix_link_addr + _prefix_load_offset;
_prefix_size = _eprefix - _prefix;
_prefix_progbits_size = _eprefix_progbits - _prefix;
. = _prefix_load_addr + _prefix_progbits_size;
. -= _text16_link_addr;
_text16_load_offset = ALIGN ( _max_align );
_text16_load_addr = _text16_link_addr + _text16_load_offset;
_text16_size = _etext16 - _text16;
_text16_progbits_size = _etext16_progbits - _text16;
. = _text16_load_addr + _text16_progbits_size;
. -= _data16_link_addr;
_data16_load_offset = ALIGN ( _max_align );
_data16_load_addr = _data16_link_addr + _data16_load_offset;
_data16_size = _edata16 - _data16;
_data16_progbits_size = _edata16_progbits - _data16;
. = _data16_load_addr + _data16_progbits_size;
. = ALIGN ( _max_align );
_load_size = . - _load_addr;
/*
* Alignment checks. ALIGN() can only operate on the location
* counter, so we set the location counter to each value we want
* to check.
*/
. = _prefix_load_addr - _prefix_link_addr;
_assert = ASSERT ( ( . == ALIGN ( _max_align ) ),
"_prefix is badly aligned" );
. = _text16_load_addr - _text16_link_addr;
_assert = ASSERT ( ( . == ALIGN ( _max_align ) ),
"_text16 is badly aligned" );
. = _data16_load_addr - _data16_link_addr;
_assert = ASSERT ( ( . == ALIGN ( _max_align ) ),
"_data16 is badly aligned" );
/*
* Values calculated to save code from doing it
*/
_text16_size_pgh = ( ( _text16_size + 15 ) / 16 );
_data16_size_pgh = ( ( _data16_size + 15 ) / 16 );
_load_size_pgh = ( ( _load_size + 15 ) / 16 );
_rom_size = ( ( _load_size + 511 ) / 512 );
}

View File

@ -15,7 +15,7 @@ SECTIONS {
* addresses, but may have individual link addresses depending on
* the memory model being used.
*
* The linker symbols {prefix,text}_link_addr, load_addr, and
* The linker symbols _{prefix,textdata}_link_addr, load_addr, and
* _max_align may be specified explicitly. If not specified, they
* will default to:
*
@ -236,6 +236,8 @@ SECTIONS {
/*
* Values calculated to save code from doing it
*/
_load_size_pgh = ( _load_size / 16 );
_text16_size_pgh = ( ( _text16_size + 15 ) / 16 );
_data16_size_pgh = ( ( _data16_size + 15 ) / 16 );
_load_size_pgh = ( ( _load_size + 15 ) / 16 );
_rom_size = ( ( _load_size + 511 ) / 512 );
}

View File

@ -34,6 +34,23 @@
.section ".text16", "awx", @progbits
.code16
/****************************************************************************
* init_libkir (real-mode or 16:xx protected-mode far call)
*
* Initialise libkir ready for transitions to the kir environment
*
* Parameters:
* %cs : .text16 segment
* %ds : .data16 segment
****************************************************************************
*/
.globl init_libkir
init_libkir:
/* Record segment registers */
pushw %ds
popw %cs:kir_ds
lret
/****************************************************************************
* ext_to_kir (real-mode or 16:xx protected-mode near call)
*
@ -45,10 +62,6 @@
* %cs:0000 must point to the start of the runtime image code segment
* on entry.
*
* Note that this routine can be called *without* having first set up
* a stored kir_ds and kir_sp. If you do this, ext_to_kir will return
* without altering the segment registers or stack pointer.
*
* Parameters: none
****************************************************************************
*/
@ -73,10 +86,8 @@ ext_to_kir:
movw %ss, %ds:ext_ss
movl %esp, %ds:ext_esp
/* Load internal segment registers and stack pointer, if available */
/* Load internal segment registers and stack pointer */
movw %ds:kir_ds, %ax
testw %ax, %ax
jz 1f
movw %ax, %ss
movzwl %ds:kir_sp, %esp
movw %ax, %ds
@ -144,12 +155,12 @@ kir_to_ext:
* will also be preserved.
*
* Parameters:
* function : (16-bit) virtual address of protected-mode function to call
* function : (32-bit) virtual address of C function to call
*
* Example usage:
* pushw $pxe_api_call
* pushl $pxe_api_call
* lcall $UNDI_CS, $kir_call
* addw $2, %sp
* addw $4, %sp
* to call in to the C function
* void pxe_api_call ( struct i386_all_regs *ix86 );
****************************************************************************
@ -157,7 +168,6 @@ kir_to_ext:
.globl kir_call
kir_call:
/* Preserve flags. Must do this before any operation that may
* affect flags.
*/
@ -174,8 +184,8 @@ kir_call:
* either a 16-bit or a 32-bit stack segment.
*/
popl %cs:save_retaddr /* Scratch location */
popw %cs:save_function
subl $6, %esp /* Restore %esp */
popl %cs:save_function
subl $8, %esp /* Restore %esp */
/* Switch to internal stack. Note that the external stack is
* inaccessible once we're running internally (since we have
@ -191,6 +201,7 @@ kir_call:
pushl %cs:ext_cs_and_ss
/* Push &ix86 on stack and call function */
sti
pushl %esp
data32 call *%cs:save_function
popl %eax /* discard */
@ -231,13 +242,13 @@ ext_esp: .long 0
.globl kir_ds
kir_ds: .word 0
.globl kir_sp
kir_sp: .word 0
kir_sp: .word _estack
/****************************************************************************
* Temporary variables
****************************************************************************
*/
save_ax: .word 0
save_retaddr: .word 0
save_retaddr: .long 0
save_flags: .long 0
save_function: .long 0