OSDN Git Service

LoongArch: Add la_abs macro implementation
authorYouling Tang <tangyouling@loongson.cn>
Sat, 25 Feb 2023 07:52:56 +0000 (15:52 +0800)
committerHuacai Chen <chenhuacai@loongson.cn>
Sat, 25 Feb 2023 14:12:16 +0000 (22:12 +0800)
Use the "la_abs macro" instead of the "la.abs pseudo instruction" to
prepare for the subsequent PIE kernel. When PIE is not enabled, la_abs
is equivalent to la.abs.

Signed-off-by: Youling Tang <tangyouling@loongson.cn>
Signed-off-by: Huacai Chen <chenhuacai@loongson.cn>
arch/loongarch/include/asm/asmmacro.h
arch/loongarch/include/asm/stackframe.h
arch/loongarch/kernel/genex.S
arch/loongarch/mm/tlbex.S

index be037a4..cdc9935 100644 (file)
        nor     \dst, \src, zero
 .endm
 
+.macro la_abs reg, sym
+       la.abs  \reg, \sym
+.endm
+
 #endif /* _ASM_ASMMACRO_H */
index 0274e70..7df80e6 100644 (file)
@@ -86,7 +86,7 @@
  * new value in sp.
  */
        .macro  get_saved_sp docfi=0
-       la.abs    t1, kernelsp
+       la_abs    t1, kernelsp
 #ifdef CONFIG_SMP
        csrrd     t0, PERCPU_BASE_KS
        LONG_ADD  t1, t1, t0
index 7e5c293..44ff1ff 100644 (file)
@@ -34,7 +34,7 @@ SYM_FUNC_END(__arch_cpu_idle)
 SYM_FUNC_START(handle_vint)
        BACKUP_T0T1
        SAVE_ALL
-       la.abs  t1, __arch_cpu_idle
+       la_abs  t1, __arch_cpu_idle
        LONG_L  t0, sp, PT_ERA
        /* 32 byte rollback region */
        ori     t0, t0, 0x1f
@@ -43,7 +43,7 @@ SYM_FUNC_START(handle_vint)
        LONG_S  t0, sp, PT_ERA
 1:     move    a0, sp
        move    a1, sp
-       la.abs  t0, do_vint
+       la_abs  t0, do_vint
        jirl    ra, t0, 0
        RESTORE_ALL_AND_RET
 SYM_FUNC_END(handle_vint)
@@ -72,7 +72,7 @@ SYM_FUNC_END(except_vec_cex)
        SAVE_ALL
        build_prep_\prep
        move    a0, sp
-       la.abs  t0, do_\handler
+       la_abs  t0, do_\handler
        jirl    ra, t0, 0
        668:
        RESTORE_ALL_AND_RET
@@ -93,6 +93,6 @@ SYM_FUNC_END(except_vec_cex)
        BUILD_HANDLER reserved reserved none    /* others */
 
 SYM_FUNC_START(handle_sys)
-       la.abs  t0, handle_syscall
+       la_abs  t0, handle_syscall
        jr      t0
 SYM_FUNC_END(handle_sys)
index 3dd2a96..244e2f5 100644 (file)
@@ -39,7 +39,7 @@ SYM_FUNC_START(handle_tlb_protect)
        move            a1, zero
        csrrd           a2, LOONGARCH_CSR_BADV
        REG_S           a2, sp, PT_BVADDR
-       la.abs          t0, do_page_fault
+       la_abs          t0, do_page_fault
        jirl            ra, t0, 0
        RESTORE_ALL_AND_RET
 SYM_FUNC_END(handle_tlb_protect)
@@ -115,7 +115,7 @@ smp_pgtable_change_load:
 
 #ifdef CONFIG_64BIT
 vmalloc_load:
-       la.abs          t1, swapper_pg_dir
+       la_abs          t1, swapper_pg_dir
        b               vmalloc_done_load
 #endif
 
@@ -186,7 +186,7 @@ tlb_huge_update_load:
 nopage_tlb_load:
        dbar            0
        csrrd           ra, EXCEPTION_KS2
-       la.abs          t0, tlb_do_page_fault_0
+       la_abs          t0, tlb_do_page_fault_0
        jr              t0
 SYM_FUNC_END(handle_tlb_load)
 
@@ -262,7 +262,7 @@ smp_pgtable_change_store:
 
 #ifdef CONFIG_64BIT
 vmalloc_store:
-       la.abs          t1, swapper_pg_dir
+       la_abs          t1, swapper_pg_dir
        b               vmalloc_done_store
 #endif
 
@@ -335,7 +335,7 @@ tlb_huge_update_store:
 nopage_tlb_store:
        dbar            0
        csrrd           ra, EXCEPTION_KS2
-       la.abs          t0, tlb_do_page_fault_1
+       la_abs          t0, tlb_do_page_fault_1
        jr              t0
 SYM_FUNC_END(handle_tlb_store)
 
@@ -410,7 +410,7 @@ smp_pgtable_change_modify:
 
 #ifdef CONFIG_64BIT
 vmalloc_modify:
-       la.abs          t1, swapper_pg_dir
+       la_abs          t1, swapper_pg_dir
        b               vmalloc_done_modify
 #endif
 
@@ -482,7 +482,7 @@ tlb_huge_update_modify:
 nopage_tlb_modify:
        dbar            0
        csrrd           ra, EXCEPTION_KS2
-       la.abs          t0, tlb_do_page_fault_1
+       la_abs          t0, tlb_do_page_fault_1
        jr              t0
 SYM_FUNC_END(handle_tlb_modify)