is ELF" world. Eliminate use of them in m88k code.
ok aoyama@
-/* $OpenBSD: divsi3.S,v 1.3 2013/06/05 22:48:21 miod Exp $ */
+/* $OpenBSD: divsi3.S,v 1.4 2022/12/06 18:50:59 guenther Exp $ */
/*
* Copyright (c) 2013 Miodrag Vallat.
*/
#ifndef LDSO
-.weak _C_LABEL(__divsi3)
+.weak __divsi3
#endif
ENTRY(__divsi3)
/* if divider is zero, trap */
-/* $OpenBSD: modsi3.S,v 1.2 2013/06/05 22:48:21 miod Exp $ */
+/* $OpenBSD: modsi3.S,v 1.3 2022/12/06 18:50:59 guenther Exp $ */
/*
* Copyright (c) 2013 Miodrag Vallat.
*/
#ifndef LDSO
-.weak _C_LABEL(__modsi3)
+.weak __modsi3
#endif
ENTRY(__modsi3)
/* if divider is zero, trap */
-/* $OpenBSD: udivsi3.S,v 1.3 2013/06/05 22:48:21 miod Exp $ */
+/* $OpenBSD: udivsi3.S,v 1.4 2022/12/06 18:50:59 guenther Exp $ */
/*
* Copyright (c) 2013 Miodrag Vallat.
*/
#ifndef LDSO
-.weak _C_LABEL(__udivsi3)
+.weak __udivsi3
#endif
ENTRY(__udivsi3)
/* if divider is zero, trap */
-/* $OpenBSD: umodsi3.S,v 1.2 2013/06/05 22:48:21 miod Exp $ */
+/* $OpenBSD: umodsi3.S,v 1.3 2022/12/06 18:50:59 guenther Exp $ */
/*
* Copyright (c) 2013 Miodrag Vallat.
*/
#ifndef LDSO
-.weak _C_LABEL(__umodsi3)
+.weak __umodsi3
#endif
ENTRY(__umodsi3)
/* if divider is zero, trap */
-/* $OpenBSD: locore.S,v 1.32 2017/07/23 09:17:17 aoyama Exp $ */
+/* $OpenBSD: locore.S,v 1.33 2022/12/06 18:50:59 guenther Exp $ */
/*
* Copyright (c) 1998 Steve Murphree, Jr.
* Copyright (c) 1996 Nivas Madhur
GLOBAL(delay)
bcnd eq0, %r2, 2f
- or.u %r3, %r0, %hi16(_C_LABEL(cpuspeed))
- ld %r3, %r3, %lo16(_C_LABEL(cpuspeed))
+ or.u %r3, %r0, %hi16(cpuspeed)
+ ld %r3, %r3, %lo16(cpuspeed)
mul %r4, %r2, %r3
subu %r4, %r4, 4 /* overhead of these instructions */
-/* $OpenBSD: locore0.S,v 1.5 2022/10/14 20:53:18 aoyama Exp $ */
+/* $OpenBSD: locore0.S,v 1.6 2022/12/06 18:50:59 guenther Exp $ */
/*
* Copyright (c) 1998 Steve Murphree, Jr.
* Copyright (c) 1996 Nivas Madhur
* A few identical jump instructions to make sure the pipeline is
* in a good state. Probably overkill, but it's cheap.
*/
- br _ASM_LABEL(main_start)
- br _ASM_LABEL(main_start)
- br _ASM_LABEL(main_start)
- br _ASM_LABEL(main_start)
+ br main_start
+ br main_start
+ br main_start
+ br main_start
/*
* Startup code common to all processors.
cmp %r11, %r2, %r10
bb1 ne, %r11, 1f
- or.u %r10, %r0, %hi16(_C_LABEL(esym))
- st %r3, %r10, %lo16(_C_LABEL(esym))
+ or.u %r10, %r0, %hi16(esym)
+ st %r3, %r10, %lo16(esym)
1:
#endif
or %r24, %r0, %r4 /* save bootdev */
or %r25, %r0, %r5 /* save boothowto */
- bsr _ASM_LABEL(setup_psr)
+ bsr setup_psr
stcr %r0, VBR /* start with VBR set to zero */
FLUSH_PIPELINE
* Now we will compete with the other processors to see which one
* will be elected as the main one.
*/
- or.u %r11, %r0, %hi16(_C_LABEL(cpu_hatch_mutex))
- or %r11, %r11, %lo16(_C_LABEL(cpu_hatch_mutex))
+ or.u %r11, %r0, %hi16(cpu_hatch_mutex)
+ or %r11, %r11, %lo16(cpu_hatch_mutex)
1:
FLUSH_PIPELINE
or %r22, %r0, 1
4:
/* now try to grab the master_mpu prize */
FLUSH_PIPELINE
- or.u %r11, %r0, %hi16(_ASM_LABEL(master_mpu))
- or %r11, %r11, %lo16(_ASM_LABEL(master_mpu))
+ or.u %r11, %r0, %hi16(master_mpu)
+ or %r11, %r11, %lo16(master_mpu)
or %r22, %r0, 1
xmem %r22, %r11, %r0
* Note that we haven't released the interprocessor lock....
* We'll do that when we're ready for another CPU to go.
*/
- bcnd ne0, %r22, _ASM_LABEL(secondary_init)
+ bcnd ne0, %r22, secondary_init
/*
* Main processor specific initialization (with
*/
ASLOCAL(main_init)
/* clear BSS. PROM might have already done this... */
- or.u %r2, %r0, %hi16(_C_LABEL(edata))
- or %r2, %r2, %lo16(_C_LABEL(edata))
- or.u %r4, %r0, %hi16(_C_LABEL(end))
- or %r4, %r4, %lo16(_C_LABEL(end))
- bsr.n _C_LABEL(bzero) /* bzero(edata, end-edata) */
+ or.u %r2, %r0, %hi16(edata)
+ or %r2, %r2, %lo16(edata)
+ or.u %r4, %r0, %hi16(end)
+ or %r4, %r4, %lo16(end)
+ bsr.n bzero /* bzero(edata, end-edata) */
subu %r3, %r4, %r2
/* figure out the end of the kernel image. */
#if defined(DDB) || NKSYMS > 0
- or.u %r4, %r0, %hi16(_C_LABEL(esym))
- ld %r4, %r4, %lo16(_C_LABEL(esym))
+ or.u %r4, %r0, %hi16(esym)
+ ld %r4, %r4, %lo16(esym)
bcnd ne0, %r4, 1f
#endif
- or.u %r4, %r0, %hi16(_C_LABEL(end))
- or %r4, %r4, %lo16(_C_LABEL(end)) /* if no symbols */
+ or.u %r4, %r0, %hi16(end)
+ or %r4, %r4, %lo16(end) /* if no symbols */
1:
- or.u %r5, %r0, %hi16(_C_LABEL(first_addr))
- st %r4, %r5, %lo16(_C_LABEL(first_addr))
+ or.u %r5, %r0, %hi16(first_addr)
+ st %r4, %r5, %lo16(first_addr)
- or.u %r5, %r0, %hi16(_C_LABEL(bootdev))
- st %r24, %r5, %lo16(_C_LABEL(bootdev))
- or.u %r5, %r0, %hi16(_C_LABEL(boothowto))
- st %r25, %r5, %lo16(_C_LABEL(boothowto))
+ or.u %r5, %r0, %hi16(bootdev)
+ st %r24, %r5, %lo16(bootdev)
+ or.u %r5, %r0, %hi16(boothowto)
+ st %r25, %r5, %lo16(boothowto)
/*
* Have curcpu() point to a valid cpuinfo structure,
* mutex diagnostic code.
*/
#ifdef MULTIPROCESSOR
- or.u %r11, %r0, %hi16(_ASM_LABEL(dummy_cpu))
- or %r11, %r11, %lo16(_ASM_LABEL(dummy_cpu))
+ or.u %r11, %r0, %hi16(dummy_cpu)
+ or %r11, %r11, %lo16(dummy_cpu)
#else
/* Need to make sure this matches non-MP curcpu() */
- or.u %r11, %r0, %hi16(_C_LABEL(m88k_cpus))
- or %r11, %r11, %lo16(_C_LABEL(m88k_cpus))
+ or.u %r11, %r0, %hi16(m88k_cpus)
+ or %r11, %r11, %lo16(m88k_cpus)
#endif
stcr %r11, CPU
/* Switch to startup stack */
- or.u %r31, %r0, %hi16(_ASM_LABEL(initstack_end))
- or %r31, %r31, %lo16(_ASM_LABEL(initstack_end))
+ or.u %r31, %r0, %hi16(initstack_end)
+ or %r31, %r31, %lo16(initstack_end)
- or.u %r3, %r0, %hi16(_C_LABEL(vector_list))
- or %r3, %r3, %lo16(_C_LABEL(vector_list))
+ or.u %r3, %r0, %hi16(vector_list)
+ or %r3, %r3, %lo16(vector_list)
- bsr.n _C_LABEL(luna88k_vector_init)
+ bsr.n luna88k_vector_init
ldcr %r2, VBR
/* PIO stuff */
mak %r10, %r10, 0<8> /* shift left 8 bit */
ld.bu %r12, %r11, %lo16(OBIO_PIO0B) /* dipsw-2 (from portB) */
or %r10, %r10, %r12
- or.u %r11, %r0, %hi16(_C_LABEL(dipswitch))
- st.h %r10, %r11, %lo16(_C_LABEL(dipswitch))
+ or.u %r11, %r0, %hi16(dipswitch)
+ st.h %r10, %r11, %lo16(dipswitch)
/* read frame buffer depth from ROM work area */
ld %r12, %r0, %lo16(0x00001114) /* frame buffer depth */
- or.u %r13, %r0, %hi16(_C_LABEL(hwplanebits))
- st %r12, %r13, %lo16(_C_LABEL(hwplanebits))
+ or.u %r13, %r0, %hi16(hwplanebits)
+ st %r12, %r13, %lo16(hwplanebits)
/*
* By default, sysconsole is set to 0 (means using ttya). If
bb0 14, %r10, 1f /* if dipsw-1:2 is on, skip */
bcnd eq0, %r12, 1f /* if hwplanebits == 0, skip */
or %r10, %r0, 1 /* set sysconsole = 1 */
- or.u %r11, %r0, %hi16(_C_LABEL(sysconsole))
- st %r10, %r11, %lo16(_C_LABEL(sysconsole))
+ or.u %r11, %r0, %hi16(sysconsole)
+ st %r10, %r11, %lo16(sysconsole)
1:
or %r10, %r0, 0x84 /* initialize pio1 */
* We are still using the interrupt stack here, thus we are not
* affected...
*/
- bsr _C_LABEL(luna88k_bootstrap)
+ bsr luna88k_bootstrap
/*
* ...and we can switch to the u area stack now.
addu %r31, %r31, USPACE
/* call main() - no arguments although main() still defines one */
- bsr _C_LABEL(main)
+ bsr main
- or.u %r2, %r0, %hi16(_ASM_LABEL(main_panic))
- bsr.n _C_LABEL(panic)
- or %r2, %r2, %lo16(_ASM_LABEL(main_panic))
+ or.u %r2, %r0, %hi16(main_panic)
+ bsr.n panic
+ or %r2, %r2, %lo16(main_panic)
.data
.balign 4
* This is necessary for early spl*() usage, as well as
* mutex diagnostic code.
*/
- or.u %r11, %r0, %hi16(_ASM_LABEL(dummy_cpu))
- or %r11, %r11, %lo16(_ASM_LABEL(dummy_cpu))
+ or.u %r11, %r0, %hi16(dummy_cpu)
+ or %r11, %r11, %lo16(dummy_cpu)
st %r0, %r11, CI_FLAGS /* reset CIF_PRIMARY */
stcr %r11, CPU
* use and we'll switch to it.
*/
- or.u %r31, %r0, %hi16(_ASM_LABEL(slavestack_end))
- bsr.n _C_LABEL(secondary_pre_main) /* set cpu number */
- or %r31, %r31, %lo16(_ASM_LABEL(slavestack_end))
+ or.u %r31, %r0, %hi16(slavestack_end)
+ bsr.n secondary_pre_main /* set cpu number */
+ or %r31, %r31, %lo16(slavestack_end)
- bsr.n _C_LABEL(secondary_main)
+ bsr.n secondary_main
addu %r31, %r2, USPACE /* switch to idle stack */
#else
ASLOCAL(u0)
.space USPACE
GLOBAL(proc0paddr)
- .word _ASM_LABEL(u0) /* KVA of proc0 uarea */
+ .word u0 /* KVA of proc0 uarea */
/* The first processor to XMEM this becomes the master */
ASLOCAL(master_mpu)
.word 0 /* ci_curpcb */
.word 0 /* ci_curpmap */
.word 0 /* ci_cpuid */
- .word _ASM_LABEL(dummy_mplock) /* ci_mp_atomic_begin */
- .word _ASM_LABEL(dummy_mplock) /* ci_mp_atomic_end */
+ .word dummy_mplock /* ci_mp_atomic_begin */
+ .word dummy_mplock /* ci_mp_atomic_end */
.space CPU_INFO_SIZEOF - 7 * 4
#endif
-/* $OpenBSD: locore.S,v 1.3 2022/10/14 20:53:19 aoyama Exp $ */
+/* $OpenBSD: locore.S,v 1.4 2022/12/06 18:50:59 guenther Exp $ */
/*
* Copyright (c) 2013 Miodrag Vallat.
* They will then proceed to run the kernel, as if the kernel
* had been directly booted from the PROM.
*/
- or.u %r3, %r0, %hi16(_ASM_LABEL(cpu_park_address))
- or %r3, %r3, %lo16(_ASM_LABEL(cpu_park_address))
+ or.u %r3, %r0, %hi16(cpu_park_address)
+ or %r3, %r3, %lo16(cpu_park_address)
- or.u %r2, %r0, %hi16(_ASM_LABEL(cpu_park))
- or %r2, %r2, %lo16(_ASM_LABEL(cpu_park))
+ or.u %r2, %r0, %hi16(cpu_park)
+ or %r2, %r2, %lo16(cpu_park)
FLUSH_PIPELINE
xmem %r2, %r3, %r0
1:
/* clear BSS */
- or.u %r2, %r0, %hi16(_C_LABEL(edata))
- or %r2, %r2, %lo16(_C_LABEL(edata))
- or.u %r4, %r0, %hi16(_C_LABEL(end))
- or %r4, %r4, %lo16(_C_LABEL(end))
+ or.u %r2, %r0, %hi16(edata)
+ or %r2, %r2, %lo16(edata)
+ or.u %r4, %r0, %hi16(end)
+ or %r4, %r4, %lo16(end)
1: st %r0, %r2, %r0
addu %r2, %r2, 4
cmp %r3, %r2, %r4
bb1 ne, %r3, 1b
/* setup stack, below our image */
- or.u %r31, %r0, %hi16(_ASM_LABEL(__start))
- or %r31, %r31, %lo16(_ASM_LABEL(__start))
+ or.u %r31, %r0, %hi16(__start)
+ or %r31, %r31, %lo16(__start)
/* read dip switch settings */
or.u %r11, %r0, %hi16(OBIO_PIO0A)
ld.bu %r12, %r11, %lo16(OBIO_PIO0B)
or %r10, %r10, %r12
- or.u %r11, %r0, %hi16(_C_LABEL(dipswitch))
- st.h %r10, %r11, %lo16(_C_LABEL(dipswitch))
+ or.u %r11, %r0, %hi16(dipswitch)
+ st.h %r10, %r11, %lo16(dipswitch)
- bsr _C_LABEL(main)
- bsr _C_LABEL(_rtt)
+ bsr main
+ bsr _rtt
1: br 1b
ASLOCAL(cpu_park)
bcnd ne0, %r2, 9b
/* if kernel entry point is known, exit */
- or.u %r1, %r0, %hi16(_C_LABEL(cpu_boot))
- ld %r1, %r1, %lo16(_C_LABEL(cpu_boot))
- bcnd eq0, %r1, _ASM_LABEL(cpu_park)
+ or.u %r1, %r0, %hi16(cpu_boot)
+ ld %r1, %r1, %lo16(cpu_boot)
+ bcnd eq0, %r1, cpu_park
or.u %r2, %r0, 1
9:
subu %r2, %r2, 1
bcnd ne0, %r2, 9b
- or.u %r2, %r0, %hi16(_C_LABEL(cpu_bootarg1))
- ld %r2, %r2, %lo16(_C_LABEL(cpu_bootarg1))
- or.u %r3, %r0, %hi16(_C_LABEL(cpu_bootarg2))
- ld %r3, %r3, %lo16(_C_LABEL(cpu_bootarg2))
- or.u %r4, %r0, %hi16(_C_LABEL(cpu_bootarg3))
- ld %r4, %r4, %lo16(_C_LABEL(cpu_bootarg3))
- or.u %r5, %r0, %hi16(_C_LABEL(cpu_bootarg4))
- ld %r5, %r5, %lo16(_C_LABEL(cpu_bootarg4))
+ or.u %r2, %r0, %hi16(cpu_bootarg1)
+ ld %r2, %r2, %lo16(cpu_bootarg1)
+ or.u %r3, %r0, %hi16(cpu_bootarg2)
+ ld %r3, %r3, %lo16(cpu_bootarg2)
+ or.u %r4, %r0, %hi16(cpu_bootarg3)
+ ld %r4, %r4, %lo16(cpu_bootarg3)
+ or.u %r5, %r0, %hi16(cpu_bootarg4)
+ ld %r5, %r5, %lo16(cpu_bootarg4)
jmp %r1
GLOBAL(delay)
bcnd eq0, %r2, 2f
- or.u %r3, %r0, %hi16(_C_LABEL(cpuspeed))
- ld %r3, %r3, %lo16(_C_LABEL(cpuspeed))
+ or.u %r3, %r0, %hi16(cpuspeed)
+ ld %r3, %r3, %lo16(cpuspeed)
mul %r4, %r2, %r3
subu %r4, %r4, 4
1:
-/* $OpenBSD: asm.h,v 1.15 2016/05/27 16:32:38 deraadt Exp $ */
+/* $OpenBSD: asm.h,v 1.16 2022/12/06 18:50:59 guenther Exp $ */
/*
* Mach Operating System
#define _ENTRY(name) \
.text; .align 3; .globl name; .type name,@function; name:
-#define ENTRY(name) _ENTRY(_C_LABEL(name))
-#define ASENTRY(name) _ENTRY(_ASM_LABEL(name))
+#define ENTRY(name) _ENTRY(name)
+#define ASENTRY(name) _ENTRY(name)
#define END(name) \
.size name,.-name
#define GLOBAL(name) \
- .globl _C_LABEL(name); _C_LABEL(name):
+ .globl name; name:
#define ASGLOBAL(name) \
- .globl _ASM_LABEL(name); _ASM_LABEL(name):
+ .globl name; name:
#define LOCAL(name) \
- _C_LABEL(name):
+ name:
#define ASLOCAL(name) \
- _ASM_LABEL(name):
+ name:
#define BSS(name, size) \
- .comm _C_LABEL(name), size
+ .comm name, size
#define ASBSS(name, size) \
- .comm _ASM_LABEL(name), size
+ .comm name, size
#define STRONG_ALIAS(alias,sym) \
.global alias; \
#define RTE_ERROR_BIT 0
#define VECTOR(x) \
- .word _C_LABEL(x)
+ .word x
#endif /* _LOCORE */
-/* $OpenBSD: atomic.S,v 1.6 2014/07/15 16:26:28 miod Exp $ */
+/* $OpenBSD: atomic.S,v 1.7 2022/12/06 18:50:59 guenther Exp $ */
/*
* Copyright (c) 2009 Miodrag Vallat.
ENTRY(atomic_setbits_int)
or %r5, %r1, %r0 /* save return address */
- bsr _C_LABEL(__atomic_lock)
+ bsr __atomic_lock
ld %r4, %r2, %r0
or %r4, %r4, %r3
st %r4, %r2, %r0
- br _C_LABEL(__atomic_unlock)
+ br __atomic_unlock
ENTRY(atomic_clearbits_int)
or %r5, %r1, %r0 /* save return address */
- bsr _C_LABEL(__atomic_lock)
+ bsr __atomic_lock
ld %r4, %r2, %r0
or %r4, %r4, %r3
xor %r4, %r4, %r3 /* r4 &= ~r3 */
st %r4, %r2, %r0
- br _C_LABEL(__atomic_unlock)
+ br __atomic_unlock
ENTRY(atomic_add_int_nv_mp)
or %r5, %r1, %r0 /* save return address */
- bsr _C_LABEL(__atomic_lock)
+ bsr __atomic_lock
or %r9, %r2, %r0
ld %r2, %r9, %r0
addu %r2, %r2, %r3
st %r2, %r9, %r0
- br _C_LABEL(__atomic_unlock)
+ br __atomic_unlock
ENTRY(atomic_sub_int_nv_mp)
or %r5, %r1, %r0 /* save return address */
- bsr _C_LABEL(__atomic_lock)
+ bsr __atomic_lock
or %r9, %r2, %r0
ld %r2, %r9, %r0
subu %r2, %r2, %r3
st %r2, %r9, %r0
- br _C_LABEL(__atomic_unlock)
+ br __atomic_unlock
ENTRY(atomic_cas_uint_mp)
or %r5, %r1, %r0 /* save return address */
- bsr _C_LABEL(__atomic_lock)
+ bsr __atomic_lock
ld %r9, %r2, %r0
cmp %r3, %r3, %r9
1:
or %r2, %r9, %r0
- br _C_LABEL(__atomic_unlock)
+ br __atomic_unlock
ENTRY(atomic_swap_uint_mp)
or %r5, %r1, %r0 /* save return address */
- bsr _C_LABEL(__atomic_lock)
+ bsr __atomic_lock
ld %r4, %r2, %r0
st %r3, %r2, %r0
or %r2, %r4, %r0
- br _C_LABEL(__atomic_unlock)
+ br __atomic_unlock
GLOBAL(__atomic_lock)
* then grab the interlock.
*/
- or.u %r6, %r0, %hi16(_ASM_LABEL(__atomic_interlock))
- or %r6, %r6, %lo16(_ASM_LABEL(__atomic_interlock))
+ or.u %r6, %r0, %hi16(__atomic_interlock)
+ or %r6, %r6, %lo16(__atomic_interlock)
ldcr %r7, PSR
set %r8, %r7, 1<PSR_INTERRUPT_DISABLE_BIT>
* the interlock.
*/
- or.u %r6, %r0, %hi16(_ASM_LABEL(__atomic_interlock))
- or %r6, %r6, %lo16(_ASM_LABEL(__atomic_interlock))
+ or.u %r6, %r0, %hi16(__atomic_interlock)
+ or %r6, %r6, %lo16(__atomic_interlock)
ldcr %r7, PSR
set %r8, %r7, 1<PSR_INTERRUPT_DISABLE_BIT>
-/* $OpenBSD: eh_common.S,v 1.63 2022/09/04 06:49:11 jsg Exp $ */
+/* $OpenBSD: eh_common.S,v 1.64 2022/12/06 18:50:59 guenther Exp $ */
/*
* Mach Operating System
* Copyright (c) 1993-1991 Carnegie Mellon University
/* Invoke a C function with 2 arguments */
#define CALL(NAME, ARG1, ARG2) \
or %r2, %r0, ARG1; \
- bsr.n _C_LABEL(NAME); \
+ bsr.n NAME; \
or %r3, %r0, ARG2
/* Invoke a function and return elsewhere */
clr FLAGS, FLAGS, 1<FLAG_FROM_KERNEL> ; \
set FLAGS, FLAGS, 1<FLAG_FROM_KERNEL> ; \
/* get a stack (exception frame) */ \
-1: bsr _ASM_LABEL(m88100_setup_phase_one) ; \
+1: bsr m88100_setup_phase_one ; \
/* TMP2 now free -- use to set EF_VECTOR */ \
or TMP2, %r0, NUM ; \
st TMP2, %r31, EF_VECTOR ; \
SSBR_STUFF \
/* call setup_phase_two to restart the FPU */ \
/* and to save all general registers. */ \
- bsr _ASM_LABEL(m88100_setup_phase_two)
+ bsr m88100_setup_phase_two
#endif
#ifdef M88110
bb0 PSR_SUPERVISOR_MODE_BIT, %r1, 1f ; \
set FLAGS, FLAGS, 1<FLAG_FROM_KERNEL> ; \
/* get a stack and an exception frame */ ; \
-1: bsr _ASM_LABEL(m88110_setup_phase_one) ; \
+1: bsr m88110_setup_phase_one ; \
/* TMP2 now free -- use to set EF_VECTOR */ ; \
or TMP2, %r0, NUM ; \
/* call setup_phase_two to save all general */ ; \
/* registers. */ ; \
st TMP2, %r30, EF_VECTOR ; \
- bsr _ASM_LABEL(m88110_setup_phase_two)
+ bsr m88110_setup_phase_two
#endif
/* Some defines for use with PREP88100() */
#define Clear_SSBR_Dest \
- bsr _ASM_LABEL(clear_dest_ssbr_bit);
+ bsr clear_dest_ssbr_bit;
#define M88100_Data_Precheck \
bb1.n FLAG_IGNORE_DATA_EXCEPTION, FLAGS, \
- _ASM_LABEL(m88100_ignore_data_exception);
+ m88100_ignore_data_exception;
#define M88110_Data_Precheck \
bb1.n FLAG_IGNORE_DATA_EXCEPTION, FLAGS, \
- _ASM_LABEL(m88110_ignore_data_exception);
+ m88110_ignore_data_exception;
#ifdef M88100
/*
PREP88100("unknown", 0,,)
or %r2, %r0, T_UNKNOWNFLT
or %r3, %r0, %r30
- XCALL(_C_LABEL(m88100_trap), _ASM_LABEL(check_ast))
+ XCALL(m88100_trap, check_ast)
/* interrupt exception handler */
GLOBAL(interrupt_handler)
PREP88100("interrupt", 1,,)
or %r2, %r0, %r30
- XCALL(_C_LABEL(interrupt), _ASM_LABEL(check_ast))
+ XCALL(interrupt, check_ast)
/* instruction access exception handler */
GLOBAL(instruction_access_handler)
PREP88100("inst", 2,,)
or %r2, %r0, T_INSTFLT
or %r3, %r0, %r30
- XCALL(_C_LABEL(m88100_trap), _ASM_LABEL(check_ast))
+ XCALL(m88100_trap, check_ast)
/*
* data access exception handler --
GLOBAL(data_exception_handler_bootstrap)
PREP88100("data", 3,, M88100_Data_Precheck)
/* No need to call m88100_trap(T_DATAFLT) as PREP will do this for us */
- br _ASM_LABEL(check_ast)
+ br check_ast
GLOBAL(data_exception_handler)
PREP88100("data", 3,,)
/* No need to call m88100_trap(T_DATAFLT) as PREP will do this for us */
- br _ASM_LABEL(check_ast)
+ br check_ast
/* misaligned access exception handler */
GLOBAL(misaligned_handler)
PREP88100("misalign", 4, Clear_SSBR_Dest,)
or %r2, %r0, T_MISALGNFLT
or %r3, %r0, %r30
- XCALL(_C_LABEL(m88100_trap), _ASM_LABEL(check_ast))
+ XCALL(m88100_trap, check_ast)
/* unimplemented opcode exception handler */
GLOBAL(unimplemented_handler)
PREP88100("unimp", 5,,)
or %r2, %r0, T_ILLFLT
or %r3, %r0, %r30
- XCALL(_C_LABEL(m88100_trap), _ASM_LABEL(check_ast))
+ XCALL(m88100_trap, check_ast)
/*
* Some versions of the chip have a bug whereby false privilege
1: PREP88100("privilege", 6, Clear_SSBR_Dest,)
or %r2, %r0, T_PRIVINFLT
or %r3, %r0, %r30
- XCALL(_C_LABEL(m88100_trap), _ASM_LABEL(check_ast))
+ XCALL(m88100_trap, check_ast)
/* bounds checking exception handler */
GLOBAL(bounds_handler)
PREP88100("bounds", 7, Clear_SSBR_Dest,)
or %r2, %r0, T_BNDFLT
or %r3, %r0, %r30
- XCALL(_C_LABEL(m88100_trap), _ASM_LABEL(check_ast))
+ XCALL(m88100_trap, check_ast)
/* integer divide-by-zero exception handler */
GLOBAL(divide_handler)
PREP88100("divide", 8, Clear_SSBR_Dest,)
or %r2, %r0, T_ZERODIV
or %r3, %r0, %r30
- XCALL(_C_LABEL(m88100_trap), _ASM_LABEL(check_ast))
+ XCALL(m88100_trap, check_ast)
/* integer overflow exception handler */
GLOBAL(overflow_handler)
PREP88100("overflow", 9,,)
or %r2, %r0, T_OVFFLT
or %r3, %r0, %r30
- XCALL(_C_LABEL(m88100_trap), _ASM_LABEL(check_ast))
+ XCALL(m88100_trap, check_ast)
/* Floating-point precise handler */
#define FPp_SSBR_STUFF \
- bsr _ASM_LABEL(clear_FPp_ssbr_bit);
+ bsr clear_FPp_ssbr_bit;
GLOBAL(fp_precise_handler)
PREP88100("FPU precise", 114, FPp_SSBR_STUFF,)
or %r2, %r0, T_FPEPFLT
or %r3, %r0, %r30
- XCALL(_C_LABEL(m88100_trap), _ASM_LABEL(check_ast))
+ XCALL(m88100_trap, check_ast)
/* Floating-point imprecise handler */
#define FPi_SSBR_STUFF \
- bsr _ASM_LABEL(clear_FPi_ssbr_bit);
+ bsr clear_FPi_ssbr_bit;
GLOBAL(fp_imprecise_handler)
PREP88100("FPU imprecise", 115, FPi_SSBR_STUFF,)
or %r2, %r0, T_FPEIFLT
or %r3, %r0, %r30
- XCALL(_C_LABEL(m88100_trap), _ASM_LABEL(check_ast))
+ XCALL(m88100_trap, check_ast)
/* trap 450: system calls */
GLOBAL(syscall_handler)
PREP88100("syscall", 450,,)
ld %r2, %r30, GENREG_OFF(13)
or %r3, %r0, %r30
- XCALL(_C_LABEL(m88100_syscall), _ASM_LABEL(check_ast))
+ XCALL(m88100_syscall, check_ast)
/* trap 451: cache flush (necessary for trampolines) */
GLOBAL(cache_flush_handler)
PREP88100("cache_flush", 451,,)
or %r2, %r0, %r30
- XCALL(_C_LABEL(cache_flush), _ASM_LABEL(check_ast))
+ XCALL(cache_flush, check_ast)
GLOBAL(sigsys)
PREP88100("sigsys", 501,,)
or %r2, %r0, T_SIGSYS
or %r3, %r0, %r30
- XCALL(_C_LABEL(m88100_trap), _ASM_LABEL(check_ast))
+ XCALL(m88100_trap, check_ast)
GLOBAL(stepbpt)
PREP88100("stepbpt", 504,,)
or %r2, %r0, T_STEPBPT
or %r3, %r0, %r30
- XCALL(_C_LABEL(m88100_trap), _ASM_LABEL(check_ast))
+ XCALL(m88100_trap, check_ast)
GLOBAL(userbpt)
PREP88100("userbpt", 511,,)
or %r2, %r0, T_USERBPT
or %r3, %r0, %r30
- XCALL(_C_LABEL(m88100_trap), _ASM_LABEL(check_ast))
+ XCALL(m88100_trap, check_ast)
#ifdef DDB
GLOBAL(break)
PREP88100("break", 130,,)
or %r2, %r0, T_KDB_BREAK
or %r3, %r0, %r30
- XCALL(_C_LABEL(m88100_trap), _ASM_LABEL(check_ast))
+ XCALL(m88100_trap, check_ast)
GLOBAL(trace)
PREP88100("trace", 131,,)
or %r2, %r0, T_KDB_TRACE
or %r3, %r0, %r30
- XCALL(_C_LABEL(m88100_trap), _ASM_LABEL(check_ast))
+ XCALL(m88100_trap, check_ast)
GLOBAL(entry)
PREP88100("kdb", 132,,)
or %r2, %r0, T_KDB_ENTRY
or %r3, %r0, %r30
- XCALL(_C_LABEL(m88100_trap), _ASM_LABEL(check_ast))
+ XCALL(m88100_trap, check_ast)
#endif
/*
or %r29, %r0, 0
1:
or %r26, %r0, %r31 /* save old stack */
- or.u %r31, %r0, %hi16(_ASM_LABEL(initstack_end))
- or %r31, %r31, %lo16(_ASM_LABEL(initstack_end))
+ or.u %r31, %r0, %hi16(initstack_end)
+ or %r31, %r31, %lo16(initstack_end)
#ifdef DEBUG
/* zero the stack, so we'll know what we're lookin' at */
- or.u %r27, %r0, %hi16(_ASM_LABEL(initstack))
- or %r27, %r27, %lo16(_ASM_LABEL(initstack))
+ or.u %r27, %r0, %hi16(initstack)
+ or %r27, %r27, %lo16(initstack)
1: cmp %r28, %r27, %r31
bb1 ge, %r28, 2f /* branch if at the end of the stack */
st %r0, %r0, %r27
st %r20, %r31, 0x00
#endif
- bsr.n _C_LABEL(error_fatal)
+ bsr.n error_fatal
or %r2, %r0, %r30
/* turn interrupts back on */
*/
/* the "+2" below is to set the VALID bit. */
- or.u %r2, %r0, %hi16(_ASM_LABEL(badaddr__return_nonzero) + 2)
- or %r2, %r2, %lo16(_ASM_LABEL(badaddr__return_nonzero) + 2)
+ or.u %r2, %r0, %hi16(badaddr__return_nonzero + 2)
+ or %r2, %r2, %lo16(badaddr__return_nonzero + 2)
stcr %r2, SNIP /* Make it the next instruction to execute */
addu %r2, %r2, 4
stcr %r2, SFIP /* and the next one after that, too. */
* to blast r2..r9 as we see fit.
*/
- or.u %r2, %r0, %hi16(_ASM_LABEL(badaddr__return_nonzero))
- or %r2, %r2, %lo16(_ASM_LABEL(badaddr__return_nonzero))
+ or.u %r2, %r0, %hi16(badaddr__return_nonzero)
+ or %r2, %r2, %lo16(badaddr__return_nonzero)
stcr %r2, EXIP /* Make it the next instruction to execute */
stcr %r0, DSR /* Clear exception status */
RTE
/* make sure the upper 28 bits of the size are zero... */
ext %r6, %r3, 0<4>
- bcnd.n ne0, %r6, _ASM_LABEL(badaddr__return_nonzero)
+ bcnd.n ne0, %r6, badaddr__return_nonzero
stcr %r5, SR1
- or.u %r6, %r0, %hi16(_ASM_LABEL(badaddr_switch))
- or %r6, %r6, %lo16(_ASM_LABEL(badaddr_switch))
+ or.u %r6, %r0, %hi16(badaddr_switch)
+ or %r6, %r6, %lo16(badaddr_switch)
lda %r3, %r6[%r3]
jmp %r3
ASLOCAL(badaddr_switch)
- br _ASM_LABEL(badaddr__return_nonzero)
- br _ASM_LABEL(badaddr__b)
- br _ASM_LABEL(badaddr__h)
- br _ASM_LABEL(badaddr__return_nonzero)
- br _ASM_LABEL(badaddr__w)
- br _ASM_LABEL(badaddr__return_nonzero)
- br _ASM_LABEL(badaddr__return_nonzero)
- br _ASM_LABEL(badaddr__return_nonzero)
- br _ASM_LABEL(badaddr__d)
- br _ASM_LABEL(badaddr__return_nonzero)
- br _ASM_LABEL(badaddr__return_nonzero)
- br _ASM_LABEL(badaddr__return_nonzero)
- br _ASM_LABEL(badaddr__return_nonzero)
- br _ASM_LABEL(badaddr__return_nonzero)
- br _ASM_LABEL(badaddr__return_nonzero)
- br _ASM_LABEL(badaddr__return_nonzero)
+ br badaddr__return_nonzero
+ br badaddr__b
+ br badaddr__h
+ br badaddr__return_nonzero
+ br badaddr__w
+ br badaddr__return_nonzero
+ br badaddr__return_nonzero
+ br badaddr__return_nonzero
+ br badaddr__d
+ br badaddr__return_nonzero
+ br badaddr__return_nonzero
+ br badaddr__return_nonzero
+ br badaddr__return_nonzero
+ br badaddr__return_nonzero
+ br badaddr__return_nonzero
+ br badaddr__return_nonzero
/*
* The load attempts below will either fault or not. If they fault,
FLUSH_PIPELINE
ld.b %r6, %r2, 0
FLUSH_PIPELINE
- br.n _ASM_LABEL(badaddr__return)
+ br.n badaddr__return
or %r2, %r0, %r0
ASLOCAL(badaddr__h)
/* It's a bad address if it's misaligned. */
- bb1 0, %r2, _ASM_LABEL(badaddr__return_nonzero)
+ bb1 0, %r2, badaddr__return_nonzero
FLUSH_PIPELINE
ld.h %r6, %r2, 0
FLUSH_PIPELINE
- br.n _ASM_LABEL(badaddr__return)
+ br.n badaddr__return
or %r2, %r0, %r0
ASLOCAL(badaddr__w)
/* It's a bad address if it's misaligned. */
- bb1 0, %r2, _ASM_LABEL(badaddr__return_nonzero)
- bb1 1, %r2, _ASM_LABEL(badaddr__return_nonzero)
+ bb1 0, %r2, badaddr__return_nonzero
+ bb1 1, %r2, badaddr__return_nonzero
FLUSH_PIPELINE
ld %r6, %r2, 0
FLUSH_PIPELINE
- br.n _ASM_LABEL(badaddr__return)
+ br.n badaddr__return
or %r2, %r0, %r0 /* indicate a zero (address not bad) return.*/
ASLOCAL(badaddr__d)
/* It's a bad address if it's misaligned. */
- bb1 0, %r2, _ASM_LABEL(badaddr__return_nonzero)
- bb1 1, %r2, _ASM_LABEL(badaddr__return_nonzero)
- bb1 2, %r2, _ASM_LABEL(badaddr__return_nonzero)
+ bb1 0, %r2, badaddr__return_nonzero
+ bb1 1, %r2, badaddr__return_nonzero
+ bb1 2, %r2, badaddr__return_nonzero
FLUSH_PIPELINE
ld.d %r6, %r2, 0
FLUSH_PIPELINE
- br.n _ASM_LABEL(badaddr__return)
+ br.n badaddr__return
or %r2, %r0, %r0 /* indicate a zero (address not bad) return.*/
ASLOCAL(badaddr__return_nonzero)
NOP
NOP
- bb1 FLAG_ENABLING_FPU, FLAGS, _ASM_LABEL(m88100_use_SR3_pcb)
+ bb1 FLAG_ENABLING_FPU, FLAGS, m88100_use_SR3_pcb
/* are we coming in from user mode? If so, pick up process pcb */
- bb0 FLAG_FROM_KERNEL, FLAGS, _ASM_LABEL(m88100_pickup_stack)
+ bb0 FLAG_FROM_KERNEL, FLAGS, m88100_pickup_stack
/* Interrupt in kernel mode, not FPU restart */
/*
st %r1, %r31, EF_SR3
addu %r1, %r31, TRAPFRAME_SIZEOF /* save previous r31 */
- br.n _ASM_LABEL(m88100_have_pcb)
+ br.n m88100_have_pcb
st %r1, %r31, GENREG_OFF(31)
ASLOCAL(m88100_use_SR3_pcb)
or %r31, %r0, %r30 /* make r31 our pointer. */
addu %r30, %r30, TRAPFRAME_SIZEOF /* r30 now has previous SR3 */
st %r30, %r31, EF_SR3 /* save previous SR3 */
- br.n _ASM_LABEL(m88100_have_pcb)
+ br.n m88100_have_pcb
xcr %r30, %r30, SR3 /* restore r30 */
1:
/* we took an exception while restarting the FPU from user space.
ld %r1, %r30, GENREG_OFF(0) /* restore old r1 */
st %r0, %r30, GENREG_OFF(0) /* repair that frame */
st %r1, %r31, GENREG_OFF(1) /* store r1 */
- br.n _ASM_LABEL(m88100_have_pcb)
+ br.n m88100_have_pcb
xcr %r30, %r30, SR3 /* restore r30 */
ASLOCAL(m88100_pickup_stack)
stcr TMP, EPSR
/* the "+2" below is to set the VALID_BIT */
- or.u TMP, %r0, %hi16(_ASM_LABEL(m88100_fpu_enable) + 2)
- or TMP, TMP, %lo16(_ASM_LABEL(m88100_fpu_enable) + 2)
+ or.u TMP, %r0, %hi16(m88100_fpu_enable + 2)
+ or TMP, TMP, %lo16(m88100_fpu_enable + 2)
stcr TMP, SNIP
addu TMP, TMP, 4
stcr TMP, SFIP
1:
/* get and save IPL */
- bsr _C_LABEL(getipl)
+ bsr getipl
st %r2, %r30, EF_MASK
/*
PREP88110("unknown", 0,)
or %r2, %r0, T_UNKNOWNFLT
or %r3, %r0, %r30
- XCALL(_C_LABEL(m88110_trap), _ASM_LABEL(check_ast))
+ XCALL(m88110_trap, check_ast)
/* interrupt exception handler */
GLOBAL(m88110_interrupt_handler)
PREP88110("interrupt", 1,)
or %r2, %r0, %r30
- XCALL(_C_LABEL(interrupt), _ASM_LABEL(check_ast))
+ XCALL(interrupt, check_ast)
/* instruction access exception handler */
GLOBAL(m88110_instruction_access_handler)
PREP88110("inst", 2,)
or %r2, %r0, T_INSTFLT
or %r3, %r0, %r30
- XCALL(_C_LABEL(m88110_trap), _ASM_LABEL(check_ast))
+ XCALL(m88110_trap, check_ast)
/*
* data access exception handler --
* See badaddr() below for info about Data_Precheck.
PREP88110("data", 3, M88110_Data_Precheck)
or %r2, %r0, T_DATAFLT
or %r3, %r0, %r30
- XCALL(_C_LABEL(m88110_trap), _ASM_LABEL(check_ast))
+ XCALL(m88110_trap, check_ast)
GLOBAL(m88110_data_exception_handler)
PREP88110("data", 3,)
or %r2, %r0, T_DATAFLT
or %r3, %r0, %r30
- XCALL(_C_LABEL(m88110_trap), _ASM_LABEL(check_ast))
+ XCALL(m88110_trap, check_ast)
/* misaligned access exception handler */
GLOBAL(m88110_misaligned_handler)
PREP88110("misalign", 4,)
or %r2, %r0, T_MISALGNFLT
or %r3, %r0, %r30
- XCALL(_C_LABEL(m88110_trap), _ASM_LABEL(check_ast))
+ XCALL(m88110_trap, check_ast)
/* unimplemented opcode exception handler */
GLOBAL(m88110_unimplemented_handler)
PREP88110("unimp", 5,)
or %r2, %r0, T_ILLFLT
or %r3, %r0, %r30
- XCALL(_C_LABEL(m88110_trap), _ASM_LABEL(check_ast))
+ XCALL(m88110_trap, check_ast)
/* privilege exception handler */
GLOBAL(m88110_privilege_handler)
PREP88110("privilege", 6,)
or %r2, %r0, T_PRIVINFLT
or %r3, %r0, %r30
- XCALL(_C_LABEL(m88110_trap), _ASM_LABEL(check_ast))
+ XCALL(m88110_trap, check_ast)
/* bounds checking exception handler */
GLOBAL(m88110_bounds_handler)
PREP88110("bounds", 7,)
or %r2, %r0, T_BNDFLT
or %r3, %r0, %r30
- XCALL(_C_LABEL(m88110_trap), _ASM_LABEL(check_ast))
+ XCALL(m88110_trap, check_ast)
/* integer divide-by-zero exception handler */
GLOBAL(m88110_divide_handler)
PREP88110("divide", 8,)
or %r2, %r0, T_ZERODIV
or %r3, %r0, %r30
- XCALL(_C_LABEL(m88110_trap), _ASM_LABEL(check_ast))
+ XCALL(m88110_trap, check_ast)
/* integer overflow exception handler */
GLOBAL(m88110_overflow_handler)
PREP88110("overflow", 9,)
or %r2, %r0, T_OVFFLT
or %r3, %r0, %r30
- XCALL(_C_LABEL(m88110_trap), _ASM_LABEL(check_ast))
+ XCALL(m88110_trap, check_ast)
/* Floating-point precise handler */
GLOBAL(m88110_fpu_handler)
PREP88110("FPU", 114,)
or %r2, %r0, T_FPEPFLT
or %r3, %r0, %r30
- XCALL(_C_LABEL(m88110_trap), _ASM_LABEL(check_ast))
+ XCALL(m88110_trap, check_ast)
/* non-maskable interrupt handler (IPIs, ABORT button) */
GLOBAL(m88110_nonmaskable)
PREP88110("NMI", 11,)
or %r2, %r0, %r30
- XCALL(_C_LABEL(nmi), _ASM_LABEL(nmi_return))
+ XCALL(nmi, nmi_return)
/* software walk data MMU read miss handler */
GLOBAL(m88110_data_read_miss)
PREP88110("88110 data read miss", 12,)
or %r2, %r0, T_110_DRM
or %r3, %r0, %r30
- XCALL(_C_LABEL(m88110_trap), _ASM_LABEL(check_ast))
+ XCALL(m88110_trap, check_ast)
/* software walk data MMU write miss handler */
GLOBAL(m88110_data_write_miss)
PREP88110("88110 data write miss", 13,)
or %r2, %r0, T_110_DWM
or %r3, %r0, %r30
- XCALL(_C_LABEL(m88110_trap), _ASM_LABEL(check_ast))
+ XCALL(m88110_trap, check_ast)
/* software walk inst MMU ATC miss handler */
GLOBAL(m88110_inst_atc_miss)
PREP88110("88110 inst ATC miss", 14,)
or %r2, %r0, T_110_IAM
or %r3, %r0, %r30
- XCALL(_C_LABEL(m88110_trap), _ASM_LABEL(check_ast))
+ XCALL(m88110_trap, check_ast)
/* trap 450: system calls */
GLOBAL(m88110_syscall_handler)
PREP88110("syscall", 450,)
ld %r2, %r30, GENREG_OFF(13)
or %r3, %r0, %r30
- XCALL(_C_LABEL(m88110_syscall), _ASM_LABEL(check_ast))
+ XCALL(m88110_syscall, check_ast)
/* trap 451: cache flush (necessary for trampolines) */
GLOBAL(m88110_cache_flush_handler)
PREP88110("cache_flush", 451,)
or %r2, %r0, %r30
- XCALL(_C_LABEL(cache_flush), _ASM_LABEL(check_ast))
+ XCALL(cache_flush, check_ast)
GLOBAL(m88110_sigsys)
PREP88110("sigsys", 501,)
or %r2, %r0, T_SIGSYS
or %r3, %r0, %r30
- XCALL(_C_LABEL(m88110_trap), _ASM_LABEL(check_ast))
+ XCALL(m88110_trap, check_ast)
GLOBAL(m88110_stepbpt)
PREP88110("stepbpt", 504,)
or %r2, %r0, T_STEPBPT
or %r3, %r0, %r30
- XCALL(_C_LABEL(m88110_trap), _ASM_LABEL(check_ast))
+ XCALL(m88110_trap, check_ast)
GLOBAL(m88110_userbpt)
PREP88110("userbpt", 511,)
or %r2, %r0, T_USERBPT
or %r3, %r0, %r30
- XCALL(_C_LABEL(m88110_trap), _ASM_LABEL(check_ast))
+ XCALL(m88110_trap, check_ast)
#ifdef DDB
GLOBAL(m88110_break)
PREP88110("break", 130,)
or %r2, %r0, T_KDB_BREAK
or %r3, %r0, %r30
- XCALL(_C_LABEL(m88110_trap), _ASM_LABEL(check_ast))
+ XCALL(m88110_trap, check_ast)
GLOBAL(m88110_trace)
PREP88110("trace", 131,)
or %r2, %r0, T_KDB_TRACE
or %r3, %r0, %r30
- XCALL(_C_LABEL(m88110_trap), _ASM_LABEL(check_ast))
+ XCALL(m88110_trap, check_ast)
GLOBAL(m88110_entry)
PREP88110("kdb", 132,)
or %r2, %r0, T_KDB_ENTRY
or %r3, %r0, %r30
- XCALL(_C_LABEL(m88110_trap), _ASM_LABEL(check_ast))
+ XCALL(m88110_trap, check_ast)
#endif
/*
or %r29, %r0, 0
1:
or %r26, %r0, %r31 /* save old stack */
- or.u %r31, %r0, %hi16(_ASM_LABEL(initstack_end))
- or %r31, %r31, %lo16(_ASM_LABEL(initstack_end))
+ or.u %r31, %r0, %hi16(initstack_end)
+ or %r31, %r31, %lo16(initstack_end)
#ifdef DEBUG
/* zero the stack, so we'll know what we're lookin' at */
- or.u %r27, %r0, %hi16(_ASM_LABEL(initstack))
- or %r27, %r27, %lo16(_ASM_LABEL(initstack))
+ or.u %r27, %r0, %hi16(initstack)
+ or %r27, %r27, %lo16(initstack)
1: cmp %r28, %r27, %r31
bb1 ge, %r28, 2f /* branch if at the end of the stack */
st %r0, %r0, %r27
#endif
or %r2, %r0, %r30
- bsr _C_LABEL(error_fatal)
+ bsr error_fatal
/* turn interrupts back on */
ldcr %r1, PSR
* with interrupts disabled, and we won't check for AST or soft
* interrupts before returning to the cpu_switchto kernel code.
*/
- bb1 FLAG_FROM_KERNEL, FLAGS, _ASM_LABEL(m88110_kernel_stack)
+ bb1 FLAG_FROM_KERNEL, FLAGS, m88110_kernel_stack
/*
* Otherwise, this is an exception in user mode, we'll use the PCB
ld %r31, %r1, CI_CURPCB
addu %r31, %r31, USPACE
- br _ASM_LABEL(m88110_have_stack)
+ br m88110_have_stack
ASLOCAL(m88110_kernel_stack) /* Exception in kernel mode */
clr TMP, TMP, 1<PSR_SHADOW_FREEZE_BIT> /* enable shadowing */
stcr TMP, EPSR
- or.u TMP, %r0, %hi16(_ASM_LABEL(m88110_shadow_enable))
- or TMP, TMP, %lo16(_ASM_LABEL(m88110_shadow_enable))
+ or.u TMP, %r0, %hi16(m88110_shadow_enable)
+ or TMP, TMP, %lo16(m88110_shadow_enable)
stcr TMP, EXIP
xcr FLAGS,FLAGS,SR1
*/
/* get and save IPL */
- bsr _C_LABEL(getipl)
+ bsr getipl
st %r2, %r30, EF_MASK
subu %r31, %r31, 8 /* make some breathing space */
/* do not service AST and soft interrupts if interrupts were disabled */
ld %r2, FPTR, EF_EPSR
- bb1 PSR_INTERRUPT_DISABLE_BIT, %r2, _ASM_LABEL(ast_done)
+ bb1 PSR_INTERRUPT_DISABLE_BIT, %r2, ast_done
/* ...or we were not at spl0 */
ld %r2, FPTR, EF_MASK
- bcnd ne0, %r2, _ASM_LABEL(ast_done)
+ bcnd ne0, %r2, ast_done
#ifdef MULTIPROCESSOR
/*
or %r15, %r0, %r0
addu %r3, %r3, CI_SOFTIPI_CB
xmem %r15, %r3, %r0
- bcnd eq0, %r15, _ASM_LABEL(do_softint)
+ bcnd eq0, %r15, do_softint
- bsr.n _C_LABEL(setipl)
+ bsr.n setipl
or %r2, %r0, IPL_NONE
jsr %r15
/*
* Process soft interrupts, if any.
*/
- bsr _C_LABEL(spl0)
+ bsr spl0
/* do not service AST if not returning to user mode */
ld %r2, FPTR, EF_EPSR
- bb1 PSR_SUPERVISOR_MODE_BIT, %r2, _ASM_LABEL(ast_done)
+ bb1 PSR_SUPERVISOR_MODE_BIT, %r2, ast_done
1:
ldcr %r2, CPU
ld %r3, %r2, CI_CURPROC
ld %r2, %r3, P_ASTPENDING
- bcnd eq0, %r2, _ASM_LABEL(ast_done)
+ bcnd eq0, %r2, ast_done
- bsr.n _C_LABEL(ast)
+ bsr.n ast
or %r2, %r0, FPTR
br 1b
ENTRY(proc_trampoline)
#ifdef MULTIPROCESSOR
- bsr _C_LABEL(proc_trampoline_mp)
+ bsr proc_trampoline_mp
#endif
- bsr.n _C_LABEL(setipl) /* setipl(IPL_NONE) */
+ bsr.n setipl /* setipl(IPL_NONE) */
or %r2, %r0, %r0
ld %r1, %r31, 0 /* load func */
FLUSH_PIPELINE
/* now ready to return....*/
- bsr.n _C_LABEL(setipl)
+ bsr.n setipl
ld %r2, FPTR, EF_MASK /* get pre-exception ipl */
#if defined(M88100) && defined(M88110)
ldcr %r2, PID
extu %r3, %r2, 8<8>
- bcnd ne0, %r3, _ASM_LABEL(m88110_user_rte)
+ bcnd ne0, %r3, m88110_user_rte
/* FALLTHROUGH */
#endif
* or not.
*/
ASLOCAL(nmi_return)
- bcnd ne0, %r2, _ASM_LABEL(check_ast)
+ bcnd ne0, %r2, check_ast
ld FPTR, %r31, 0 /* grab exception frame pointer */
FLUSH_PIPELINE
/* now ready to return....*/
- bsr.n _C_LABEL(setipl)
+ bsr.n setipl
ld %r2, FPTR, EF_MASK /* get pre-exception ipl */
#ifdef MULTIPROCESSOR
* Reenable NMIs if necessary.
*/
or %r2, FPTR, %r0
- bsr _C_LABEL(nmi_wrapup)
+ bsr nmi_wrapup
#endif
- br _ASM_LABEL(m88110_restore)
+ br m88110_restore
#endif
-/* $OpenBSD: process.S,v 1.26 2013/05/17 22:35:19 miod Exp $ */
+/* $OpenBSD: process.S,v 1.27 2022/12/06 18:50:59 guenther Exp $ */
/*
* Copyright (c) 1996 Nivas Madhur
* All rights reserved.
ldcr %r2, CPU
ld %r2, %r2, CI_CURPCB
st %r1, %r2, PCB_PC /* save return address */
- bsr _ASM_LABEL(__savectx)
+ bsr __savectx
/* note that we don't need to recover r1 at this point */
/* inline pmap_deactivate(oldproc) */
ld %r2, %r3, P_ADDR
st %r2, %r11, CI_CURPCB /* curpcb = p->p_addr */
- bsr.n _C_LABEL(pmap_activate) /* pmap_activate(newproc) */
+ bsr.n pmap_activate /* pmap_activate(newproc) */
or %r2, %r0, %r3
ldcr %r2, PSR
-/* $OpenBSD: subr.S,v 1.27 2017/05/29 06:06:52 mpi Exp $ */
+/* $OpenBSD: subr.S,v 1.28 2022/12/06 18:50:59 guenther Exp $ */
/*
* Mach Operating System
* Copyright (c) 1993-1992 Carnegie Mellon University
/* set up fault handler */
ldcr %r5, CPU
ld %r6, %r5, CI_CURPCB
- or.u %r5, %r0, %hi16(_ASM_LABEL(Lciflt))
- or %r5, %r5, %lo16(_ASM_LABEL(Lciflt))
+ or.u %r5, %r0, %hi16(Lciflt)
+ or %r5, %r5, %lo16(Lciflt)
st %r5, %r6, PCB_ONFAULT /* pcb_onfault = Lciflt */
/*
* to save an hyperthetical fraction of cycle.
*/
cmp %r9, LEN, 8
- bb1 lt, %r9, _ASM_LABEL(copyin_byte_only)
+ bb1 lt, %r9, copyin_byte_only
/* If they're not aligned similarly, use byte only... */
xor %r9, SRC, DEST
mask %r8, %r9, 0x3
- bcnd ne0, %r8, _ASM_LABEL(copyin_byte_only)
+ bcnd ne0, %r8, copyin_byte_only
/*
* At this point, we don't know if they're word aligned or not,
* but we know that what needs to be done to one to align
* it is what's needed for the other.
*/
- bb1 0, SRC, _ASM_LABEL(copyin_left_align_to_halfword)
+ bb1 0, SRC, copyin_left_align_to_halfword
ASLOCAL(copyin_left_aligned_to_halfword)
- bb1 1, SRC, _ASM_LABEL(copyin_left_align_to_word)
+ bb1 1, SRC, copyin_left_align_to_word
ASLOCAL(copyin_left_aligned_to_word)
- bb1 0, LEN, _ASM_LABEL(copyin_right_align_to_halfword)
+ bb1 0, LEN, copyin_right_align_to_halfword
ASLOCAL(copyin_right_aligned_to_halfword)
- bb1 1, LEN, _ASM_LABEL(copyin_right_align_to_word)
+ bb1 1, LEN, copyin_right_align_to_word
ASLOCAL(copyin_right_aligned_to_word)
/*
* boundary, and LEN is a multiple of 4. We want it an even
* multiple of 4.
*/
- bb1.n 2, LEN, _ASM_LABEL(copyin_right_align_to_doubleword)
+ bb1.n 2, LEN, copyin_right_align_to_doubleword
or %r7, %r0, 4
ASLOCAL(copyin_right_aligned_to_doubleword)
st %r5, DEST, %r0
addu SRC, SRC, 8
st %r6, DEST, %r7
- bcnd.n ne0, LEN, _ASM_LABEL(copyin_right_aligned_to_doubleword)
+ bcnd.n ne0, LEN, copyin_right_aligned_to_doubleword
addu DEST, DEST, 8
- br.n _ASM_LABEL(Lcidone)
+ br.n Lcidone
or %r2, %r0, %r0 /* successful return */
ASLOCAL(copyin_left_align_to_halfword)
subu LEN, LEN, 1
st.b %r5, DEST, %r0
addu SRC, SRC, 1
- br.n _ASM_LABEL(copyin_left_aligned_to_halfword)
+ br.n copyin_left_aligned_to_halfword
addu DEST, DEST, 1
ASLOCAL(copyin_left_align_to_word)
subu LEN, LEN, 2
st.h %r5, DEST, %r0
addu SRC, SRC, 2
- br.n _ASM_LABEL(copyin_left_aligned_to_word)
+ br.n copyin_left_aligned_to_word
addu DEST, DEST, 2
ASLOCAL(copyin_right_align_to_halfword)
#else
ld.b.usr %r5, SRC, LEN
#endif
- br.n _ASM_LABEL(copyin_right_aligned_to_halfword)
+ br.n copyin_right_aligned_to_halfword
st.b %r5, DEST, LEN
ASLOCAL(copyin_right_align_to_word)
#else
ld.h.usr %r5, SRC, LEN
#endif
- br.n _ASM_LABEL(copyin_right_aligned_to_word)
+ br.n copyin_right_aligned_to_word
st.h %r5, DEST, LEN
ASLOCAL(copyin_right_align_to_doubleword)
#else
ld.usr %r5, SRC, LEN
#endif
- bcnd.n ne0, LEN, _ASM_LABEL(copyin_right_aligned_to_doubleword)
+ bcnd.n ne0, LEN, copyin_right_aligned_to_doubleword
st %r5, DEST, LEN
- br.n _ASM_LABEL(Lcidone)
+ br.n Lcidone
or %r2, %r0, %r0 /* successful return */
ASLOCAL(copyin_byte_only)
st %r0, %r6, PCB_ONFAULT
ASLOCAL(Lciflt)
- br.n _ASM_LABEL(Lcidone)
+ br.n Lcidone
or %r2, %r0, EFAULT /* return fault */
#undef SRC
ENTRY(copyin32)
/* check for source alignment */
mask %r8, SRC, 0x3
- bcnd ne0, %r8, _ASM_LABEL(copyin32_misaligned)
+ bcnd ne0, %r8, copyin32_misaligned
/* set up fault handler */
ldcr %r5, CPU
ld %r6, %r5, CI_CURPCB
- or.u %r5, %r0, %hi16(_ASM_LABEL(Lciflt))
- or %r5, %r5, %lo16(_ASM_LABEL(Lciflt))
+ or.u %r5, %r0, %hi16(Lciflt)
+ or %r5, %r5, %lo16(Lciflt)
st %r5, %r6, PCB_ONFAULT /* pcb_onfault = Lciflt */
#ifdef ERRATA__XXX_USR
ld.usr %r5, SRC, %r0
#endif
st %r5, DEST, %r0
- br.n _ASM_LABEL(Lcidone)
+ br.n Lcidone
or %r2, %r0, %r0 /* successful return */
ASLOCAL(copyin32_misaligned)
/* setup fault handler */
ldcr %r6, CPU
ld %r7, %r6, CI_CURPCB
- or.u %r6, %r0, %hi16(_ASM_LABEL(Lcisflt))
- or %r6, %r6, %lo16(_ASM_LABEL(Lcisflt))
+ or.u %r6, %r0, %hi16(Lcisflt)
+ or %r6, %r6, %lo16(Lcisflt)
st %r6, %r7, PCB_ONFAULT
or %r6, %r0, 0
- bcnd lt0, CNT, _ASM_LABEL(Lcisflt)
- bcnd eq0, CNT, _ASM_LABEL(Lcistoolong)
+ bcnd lt0, CNT, Lcisflt
+ bcnd eq0, CNT, Lcistoolong
1:
#ifdef ERRATA__XXX_USR
NOP
or %r2, %r0, ENAMETOOLONG /* overflow */
ASLOCAL(Lcisnull)
- bcnd eq0,%r6, _ASM_LABEL(Lcisdone) /* do not attempt to clear last byte */
+ bcnd eq0,%r6, Lcisdone /* do not attempt to clear last byte */
/* if we did not write to the string */
subu %r6, %r6, 1
st.b %r0, DEST, %r6 /* clear last byte */
- br.n _ASM_LABEL(Lcisdone)
+ br.n Lcisdone
addu %r6, %r6, 1
2: /* all done */
or %r2, %r0, 0
st %r0, %r6, PCB_ONFAULT /* clear the handler */
ASLOCAL(Lcisflt)
- br.n _ASM_LABEL(Lcisnull)
+ br.n Lcisnull
or %r2, %r0, EFAULT /* return fault */
#undef SRC
/* setup fault handler */
ldcr %r5, CPU
ld %r6, %r5, CI_CURPCB
- or.u %r5, %r0, %hi16(_ASM_LABEL(Lcoflt))
- or %r5, %r5, %lo16(_ASM_LABEL(Lcoflt))
+ or.u %r5, %r0, %hi16(Lcoflt)
+ or %r5, %r5, %lo16(Lcoflt)
st %r5, %r6, PCB_ONFAULT /* pcb_onfault = Lcoflt */
/*
* to save an hyperthetical fraction of cycle.
*/
cmp %r9, LEN, 8
- bb1 lt, %r9, _ASM_LABEL(copyout_byte_only)
+ bb1 lt, %r9, copyout_byte_only
/* If they're not aligned similarly, use byte only... */
xor %r9, SRC, DEST
mask %r8, %r9, 0x3
- bcnd ne0, %r8, _ASM_LABEL(copyout_byte_only)
+ bcnd ne0, %r8, copyout_byte_only
/*
* At this point, we don't know if they're word aligned or not,
* but we know that what needs to be done to one to align
* it is what's needed for the other.
*/
- bb1 0, SRC, _ASM_LABEL(copyout_left_align_to_halfword)
+ bb1 0, SRC, copyout_left_align_to_halfword
ASLOCAL(copyout_left_aligned_to_halfword)
- bb1 1, SRC, _ASM_LABEL(copyout_left_align_to_word)
+ bb1 1, SRC, copyout_left_align_to_word
ASLOCAL(copyout_left_aligned_to_word)
- bb1 0, LEN, _ASM_LABEL(copyout_right_align_to_halfword)
+ bb1 0, LEN, copyout_right_align_to_halfword
ASLOCAL(copyout_right_aligned_to_halfword)
- bb1 1, LEN, _ASM_LABEL(copyout_right_align_to_word)
+ bb1 1, LEN, copyout_right_align_to_word
ASLOCAL(copyout_right_aligned_to_word)
/*
* boundary, and LEN is a multiple of 4. We want it an even
* multiple of 4.
*/
- bb1.n 2, LEN, _ASM_LABEL(copyout_right_align_to_doubleword)
+ bb1.n 2, LEN, copyout_right_align_to_doubleword
or %r7, %r0, 4
ASLOCAL(copyout_right_aligned_to_doubleword)
#else
st.usr %r6, DEST, %r7
#endif
- bcnd.n ne0, LEN, _ASM_LABEL(copyout_right_aligned_to_doubleword)
+ bcnd.n ne0, LEN, copyout_right_aligned_to_doubleword
addu DEST, DEST, 8
or %r2, %r0, %r0 /* successful return */
- br _ASM_LABEL(Lcodone)
+ br Lcodone
/***************************************************/
ASLOCAL(copyout_left_align_to_halfword)
st.b.usr %r5, DEST, %r0
#endif
addu SRC, SRC, 1
- br.n _ASM_LABEL(copyout_left_aligned_to_halfword)
+ br.n copyout_left_aligned_to_halfword
addu DEST, DEST, 1
ASLOCAL(copyout_left_align_to_word)
st.h.usr %r5, DEST, %r0
#endif
addu SRC, SRC, 2
- br.n _ASM_LABEL(copyout_left_aligned_to_word)
+ br.n copyout_left_aligned_to_word
addu DEST, DEST, 2
ASLOCAL(copyout_right_align_to_halfword)
NOP
NOP
NOP
- br _ASM_LABEL(copyout_right_aligned_to_halfword)
+ br copyout_right_aligned_to_halfword
#else
- br.n _ASM_LABEL(copyout_right_aligned_to_halfword)
+ br.n copyout_right_aligned_to_halfword
st.b.usr %r5, DEST, LEN
#endif
NOP
NOP
NOP
- br _ASM_LABEL(copyout_right_aligned_to_word)
+ br copyout_right_aligned_to_word
#else
- br.n _ASM_LABEL(copyout_right_aligned_to_word)
+ br.n copyout_right_aligned_to_word
st.h.usr %r5, DEST, LEN
#endif
NOP
NOP
NOP
- bcnd ne0, LEN, _ASM_LABEL(copyout_right_aligned_to_doubleword)
+ bcnd ne0, LEN, copyout_right_aligned_to_doubleword
#else
- bcnd.n ne0, LEN, _ASM_LABEL(copyout_right_aligned_to_doubleword)
+ bcnd.n ne0, LEN, copyout_right_aligned_to_doubleword
st.usr %r5, DEST, LEN
#endif
- br.n _ASM_LABEL(Lcodone)
+ br.n Lcodone
or %r2, %r0, %r0 /* successful return */
ASLOCAL(copyout_byte_only)
st %r0, %r6, PCB_ONFAULT /* clear the handler */
ASLOCAL(Lcoflt)
- br.n _ASM_LABEL(Lcodone)
+ br.n Lcodone
or %r2, %r0, EFAULT /* return fault */
#undef SRC
/* setup fault handler */
ldcr %r6, CPU
ld %r7, %r6, CI_CURPCB
- or.u %r6, %r0, %hi16(_ASM_LABEL(Lcosflt))
- or %r6, %r6, %lo16(_ASM_LABEL(Lcosflt))
+ or.u %r6, %r0, %hi16(Lcosflt)
+ or %r6, %r6, %lo16(Lcosflt)
st %r6, %r7, PCB_ONFAULT
- bcnd lt0, CNT, _ASM_LABEL(Lcosflt)
- bcnd eq0, CNT, _ASM_LABEL(Lcosdone)
+ bcnd lt0, CNT, Lcosflt
+ bcnd eq0, CNT, Lcosdone
or %r6, %r0, 0
1:
ld.bu %r7, SRC, %r6
addu %r6, %r6, 1
cmp %r7, %r6, CNT
bb1 lt, %r7, 1b
- br.n _ASM_LABEL(Lcosdone)
+ br.n Lcosdone
or %r2, %r0, ENAMETOOLONG
2:
- br.n _ASM_LABEL(Lcosdone)
+ br.n Lcosdone
or %r2, %r0, 0
ASLOCAL(Lcosflt)
- br.n _ASM_LABEL(Lcosdone)
+ br.n Lcosdone
or %r2, %r0, EFAULT
ASLOCAL(Lcosdone)
subu %r31, %r31, 16
ldcr %r5, CPU
ld %r6, %r5, CI_CURPCB
- or.u %r5, %r0, %hi16(_ASM_LABEL(kcopy_fault))
+ or.u %r5, %r0, %hi16(kcopy_fault)
ld %r7, %r6, PCB_ONFAULT
- or %r5, %r5, %lo16(_ASM_LABEL(kcopy_fault))
+ or %r5, %r5, %lo16(kcopy_fault)
st %r7, %r31, 0 /* save old pcb_onfault */
st %r5, %r6, PCB_ONFAULT /* pcb_onfault = kcopy_fault */
- bcnd le0, %r4, _ASM_LABEL(kcopy_out) /* nothing to do if <= 0 */
+ bcnd le0, %r4, kcopy_out /* nothing to do if <= 0 */
/*
* check position of source and destination data
*/
cmp %r9, %r2, %r3 /* compare source address to destination */
- bb1 eq, %r9, _ASM_LABEL(kcopy_out) /* nothing to do if equal */
- bb1 lo, %r9, _ASM_LABEL(kcopy_reverse) /* reverse copy if src < dest */
+ bb1 eq, %r9, kcopy_out /* nothing to do if equal */
+ bb1 lo, %r9, kcopy_reverse /* reverse copy if src < dest */
/*
* source address is greater than destination address, copy forward
*/
cmp %r9, %r4, 16 /* see if we have at least 16 bytes */
- bb1 lt, %r9, _ASM_LABEL(kf_byte_copy) /* copy bytes for small length */
+ bb1 lt, %r9, kf_byte_copy /* copy bytes for small length */
/*
* determine copy strategy based on alignment of source and destination
*/
mask %r7, %r3, 3 /* get 2 low order bits of destination addr */
mak %r6, %r6, 0<4>/* convert source bits to table offset */
mak %r7, %r7, 0<2>/* convert destination bits to table offset */
- or.u %r12, %r0, %hi16(_ASM_LABEL(kf_strat))
- or %r12, %r12, %lo16(_ASM_LABEL(kf_strat))
+ or.u %r12, %r0, %hi16(kf_strat)
+ or %r12, %r12, %lo16(kf_strat)
addu %r6, %r6, %r7 /* compute final table offset for strategy */
ld %r12, %r12, %r6 /* load the strategy routine */
jmp %r12 /* branch to strategy routine */
st.b %r8, %r3, 2 /* store byte to destination */
addu %r2, %r2, 3 /* increment source pointer */
addu %r3, %r3, 3 /* increment destination pointer */
- br.n _ASM_LABEL(kf_word_copy)/* copy full words */
+ br.n kf_word_copy/* copy full words */
subu %r4, %r4, 3 /* decrement length */
/*
st.h %r6, %r3, 0 /* store half-word to destination */
addu %r2, %r2, 2 /* increment source pointer */
addu %r3, %r3, 2 /* increment destination pointer */
- br.n _ASM_LABEL(kf_word_copy)/* copy full words */
+ br.n kf_word_copy/* copy full words */
subu %r4, %r4, 2 /* decrement remaining length */
/*
*/
ASLOCAL(kf_word_copy)
cmp %r10, %r4, 16 /* see if we have 16 bytes remaining */
- bb1 lo, %r10, _ASM_LABEL(kf_byte_copy) /* not enough left, copy bytes */
+ bb1 lo, %r10, kf_byte_copy /* not enough left, copy bytes */
ld %r6, %r2, 0 /* load first word */
ld %r7, %r2, 4 /* load second word */
ld %r8, %r2, 8 /* load third word */
st %r9, %r3, 12 /* store fourth word */
addu %r2, %r2, 16 /* increment source pointer */
addu %r3, %r3, 16 /* increment destination pointer */
- br.n _ASM_LABEL(kf_word_copy)/* copy another block */
+ br.n kf_word_copy/* copy another block */
subu %r4, %r4, 16 /* decrement remaining length */
ASLOCAL(kf_1byte_half_copy)
ASLOCAL(kf_half_copy)
cmp %r10, %r4, 16 /* see if we have 16 bytes remaining */
- bb1 lo, %r10, _ASM_LABEL(kf_byte_copy) /* not enough left, copy bytes */
+ bb1 lo, %r10, kf_byte_copy /* not enough left, copy bytes */
ld.hu %r6, %r2, 0 /* load first half-word */
ld.hu %r7, %r2, 2 /* load second half-word */
ld.hu %r8, %r2, 4 /* load third half-word */
st.h %r13, %r3, 14 /* store eighth half-word */
addu %r2, %r2, 16 /* increment source pointer */
addu %r3, %r3, 16 /* increment destination pointer */
- br.n _ASM_LABEL(kf_half_copy)/* copy another block */
+ br.n kf_half_copy/* copy another block */
subu %r4, %r4, 16 /* decrement remaining length */
ASLOCAL(kf_byte_copy)
- bcnd eq0, %r4, _ASM_LABEL(kcopy_out) /* branch if nothing left to copy */
+ bcnd eq0, %r4, kcopy_out /* branch if nothing left to copy */
ld.bu %r6, %r2, 0 /* load byte from source */
st.b %r6, %r3, 0 /* store byte in destination */
addu %r2, %r2, 1 /* increment source pointer */
addu %r3, %r3, 1 /* increment destination pointer */
- br.n _ASM_LABEL(kf_byte_copy)/* branch for next byte */
+ br.n kf_byte_copy/* branch for next byte */
subu %r4, %r4, 1 /* decrement remaining length */
/*
* check for short data
*/
cmp %r9, %r4, 16 /* see if we have at least 16 bytes */
- bb1 lt, %r9, _ASM_LABEL(kr_byte_copy) /* copy bytes for small data length */
+ bb1 lt, %r9, kr_byte_copy /* copy bytes for small data length */
/*
* determine copy strategy based on alignment of source and destination
*/
mask %r7, %r3, 3 /* get 2 low order bits of destination addr */
mak %r6, %r6, 0<4>/* convert source bits to table offset */
mak %r7, %r7, 0<2>/* convert destination bits to table offset */
- or.u %r12, %r0, %hi16(_ASM_LABEL(kr_strat))
- or %r12, %r12, %lo16(_ASM_LABEL(kr_strat))
+ or.u %r12, %r0, %hi16(kr_strat)
+ or %r12, %r12, %lo16(kr_strat)
addu %r6, %r6, %r7 /* compute final table offset for strategy */
ld %r12, %r12, %r6 /* load the strategy routine */
jmp %r12 /* branch to strategy routine */
st.b %r6, %r3, 0 /* store byte to destination */
st.b %r7, %r3, 1 /* store byte to destination */
st.b %r8, %r3, 2 /* store byte to destination */
- br.n _ASM_LABEL(kr_word_copy)/* copy full words */
+ br.n kr_word_copy/* copy full words */
subu %r4, %r4, 3 /* decrement length */
/*
subu %r3, %r3, 2 /* decrement destination pointer */
ld.hu %r6, %r2, 0 /* load half-word from source */
st.h %r6, %r3, 0 /* store half-word to destination */
- br.n _ASM_LABEL(kr_word_copy)/* copy full words */
+ br.n kr_word_copy/* copy full words */
subu %r4, %r4, 2 /* decrement remaining length */
/*
*/
ASLOCAL(kr_word_copy)
cmp %r10, %r4, 16 /* see if we have 16 bytes remaining */
- bb1 lo, %r10, _ASM_LABEL(kr_byte_copy) /* not enough left, copy bytes */
+ bb1 lo, %r10, kr_byte_copy /* not enough left, copy bytes */
subu %r2, %r2, 16 /* decrement source pointer */
subu %r3, %r3, 16 /* decrement destination pointer */
ld %r6, %r2, 0 /* load first word */
st %r7, %r3, 4 /* store second word */
st %r8, %r3, 8 /* store third word */
st %r9, %r3, 12 /* store fourth word */
- br.n _ASM_LABEL(kr_word_copy)/* copy another block */
+ br.n kr_word_copy/* copy another block */
subu %r4, %r4, 16 /* decrement remaining length */
ASLOCAL(kr_1byte_half_copy)
ASLOCAL(kr_half_copy)
cmp %r10, %r4, 16 /* see if we have 16 bytes remaining */
- bb1 lo, %r10, _ASM_LABEL(kr_byte_copy) /* not enough left, copy bytes */
+ bb1 lo, %r10, kr_byte_copy /* not enough left, copy bytes */
subu %r2, %r2, 16 /* decrement source pointer */
subu %r3, %r3, 16 /* decrement destination pointer */
ld.hu %r6, %r2, 0 /* load first half-word */
st.h %r11, %r3, 10 /* store sixth half-word */
st.h %r12, %r3, 12 /* store seventh half-word */
st.h %r13, %r3, 14 /* store eighth half-word */
- br.n _ASM_LABEL(kr_half_copy)/* copy another block */
+ br.n kr_half_copy/* copy another block */
subu %r4, %r4, 16 /* decrement remaining length */
ASLOCAL(kr_byte_copy)
- bcnd eq0, %r4, _ASM_LABEL(kcopy_out) /* branch if nothing left to copy */
+ bcnd eq0, %r4, kcopy_out /* branch if nothing left to copy */
subu %r2, %r2, 1 /* decrement source pointer */
subu %r3, %r3, 1 /* decrement destination pointer */
ld.bu %r6, %r2, 0 /* load byte from source */
st.b %r6, %r3, 0 /* store byte in destination */
- br.n _ASM_LABEL(kr_byte_copy)/* branch for next byte */
+ br.n kr_byte_copy/* branch for next byte */
subu %r4, %r4, 1 /* decrement remaining length */
ASLOCAL(kcopy_out)
st %r7, %r6, PCB_ONFAULT /* restore previous pcb_onfault */
ASLOCAL(kcopy_fault)
- br.n _ASM_LABEL(kcopy_out_fault)
+ br.n kcopy_out_fault
or %r2, %r0, EFAULT /* return fault */
.data
.align 2
ASLOCAL(kf_strat)
- .word _ASM_LABEL(kf_word_copy)
- .word _ASM_LABEL(kf_byte_copy)
- .word _ASM_LABEL(kf_half_copy)
- .word _ASM_LABEL(kf_byte_copy)
- .word _ASM_LABEL(kf_byte_copy)
- .word _ASM_LABEL(kf_3byte_word_copy)
- .word _ASM_LABEL(kf_byte_copy)
- .word _ASM_LABEL(kf_1byte_half_copy)
- .word _ASM_LABEL(kf_half_copy)
- .word _ASM_LABEL(kf_byte_copy)
- .word _ASM_LABEL(kf_1half_word_copy)
- .word _ASM_LABEL(kf_byte_copy)
- .word _ASM_LABEL(kf_byte_copy)
- .word _ASM_LABEL(kf_1byte_half_copy)
- .word _ASM_LABEL(kf_byte_copy)
- .word _ASM_LABEL(kf_1byte_word_copy)
+ .word kf_word_copy
+ .word kf_byte_copy
+ .word kf_half_copy
+ .word kf_byte_copy
+ .word kf_byte_copy
+ .word kf_3byte_word_copy
+ .word kf_byte_copy
+ .word kf_1byte_half_copy
+ .word kf_half_copy
+ .word kf_byte_copy
+ .word kf_1half_word_copy
+ .word kf_byte_copy
+ .word kf_byte_copy
+ .word kf_1byte_half_copy
+ .word kf_byte_copy
+ .word kf_1byte_word_copy
ASLOCAL(kr_strat)
- .word _ASM_LABEL(kr_word_copy)
- .word _ASM_LABEL(kr_byte_copy)
- .word _ASM_LABEL(kr_half_copy)
- .word _ASM_LABEL(kr_byte_copy)
- .word _ASM_LABEL(kr_byte_copy)
- .word _ASM_LABEL(kr_1byte_word_copy)
- .word _ASM_LABEL(kr_byte_copy)
- .word _ASM_LABEL(kr_1byte_half_copy)
- .word _ASM_LABEL(kr_half_copy)
- .word _ASM_LABEL(kr_byte_copy)
- .word _ASM_LABEL(kr_1half_word_copy)
- .word _ASM_LABEL(kr_byte_copy)
- .word _ASM_LABEL(kr_byte_copy)
- .word _ASM_LABEL(kr_1byte_half_copy)
- .word _ASM_LABEL(kr_byte_copy)
- .word _ASM_LABEL(kr_3byte_word_copy)
+ .word kr_word_copy
+ .word kr_byte_copy
+ .word kr_half_copy
+ .word kr_byte_copy
+ .word kr_byte_copy
+ .word kr_1byte_word_copy
+ .word kr_byte_copy
+ .word kr_1byte_half_copy
+ .word kr_half_copy
+ .word kr_byte_copy
+ .word kr_1half_word_copy
+ .word kr_byte_copy
+ .word kr_byte_copy
+ .word kr_1byte_half_copy
+ .word kr_byte_copy
+ .word kr_3byte_word_copy
#ifdef DDB
/*
GLOBAL(sigfill)
tb0 0, %r0, 130 /* breakpoint */
GLOBAL(sigfillsiz)
- .word _C_LABEL(sigfillsiz) - _C_LABEL(sigfill)
+ .word sigfillsiz - sigfill
/*
* Helper functions for pmap_copy_page() and pmap_zero_page().
-/* $OpenBSD: bzero.S,v 1.3 2022/01/11 09:21:35 jsg Exp $ */
+/* $OpenBSD: bzero.S,v 1.4 2022/12/06 18:51:00 guenther Exp $ */
/*
* Mach Operating System
* Copyright (c) 1993-1992 Carnegie Mellon University
* (of the destination address). If either are set, it's
* not word aligned.
*/
- bb1 0, R_dest, _ASM_LABEL(not_initially_word_aligned)
- bb1 1, R_dest, _ASM_LABEL(not_initially_word_aligned)
+ bb1 0, R_dest, not_initially_word_aligned
+ bb1 1, R_dest, not_initially_word_aligned
ASLOCAL(now_word_aligned)
/*
* before we get into the main loop, grab the
* address of the label "mark" below.
*/
- or.u R_mark_address, %r0, %hi16(_ASM_LABEL(mark))
- or R_mark_address, R_mark_address, %lo16(_ASM_LABEL(mark))
+ or.u R_mark_address, %r0, %hi16(mark)
+ or R_mark_address, R_mark_address, %lo16(mark)
ASLOCAL(top_of_main_loop)
#define MAX_AT_ONE_TIME 128
clr R_bytes, R_len, 2<0>
/* if we're done clearing WORDS, jump out */
- bcnd eq0, R_bytes, _ASM_LABEL(done_doing_words)
+ bcnd eq0, R_bytes, done_doing_words
/* if the number of bytes > MAX_AT_ONE_TIME, do only the max */
cmp R_temp, R_bytes, MAX_AT_ONE_TIME
st %r0, R_dest, 0x00 /* 0 */
ASLOCAL(mark)
- br.n _ASM_LABEL(top_of_main_loop)
+ br.n top_of_main_loop
addu R_dest, R_dest, R_bytes /* bump up the dest address */
ASLOCAL(done_doing_words)
st.b %r0, R_dest, 0
addu R_dest, R_dest, 1
mask R_temp, R_dest, 0x3
- bcnd.n eq0, R_temp, _ASM_LABEL(now_word_aligned)
+ bcnd.n eq0, R_temp, now_word_aligned
subu R_len, R_len, 1
- br _ASM_LABEL(not_initially_word_aligned)
+ br not_initially_word_aligned
#undef R_dest
#undef R_len
-/* $OpenBSD: copy_subr.S,v 1.6 2013/06/15 18:38:18 miod Exp $ */
+/* $OpenBSD: copy_subr.S,v 1.7 2022/12/06 18:51:00 guenther Exp $ */
/*
* Mach Operating System
* Copyright (c) 1993-1992 Carnegie Mellon University
or SAVE, DEST, %r0
#endif
- bcnd eq0,LEN,_ASM_LABEL(bcopy_out) /* nothing to do if == 0 */
+ bcnd eq0,LEN,bcopy_out /* nothing to do if == 0 */
/*
* check position of source and destination data
*/
cmp %r9,SRC,DEST /* compare source address to destination */
- bb1 eq,%r9,_ASM_LABEL(bcopy_out) /* nothing to do if equal */
+ bb1 eq,%r9,bcopy_out /* nothing to do if equal */
#if defined(MEMMOVE) || defined(OVBCOPY)
- bb1 lo,%r9,_ASM_LABEL(bcopy_reverse) /* reverse copy if src < dest */
+ bb1 lo,%r9,bcopy_reverse /* reverse copy if src < dest */
#endif
/*
* not have to care about overlapping areas: copy forward
*/
cmp %r9,LEN,16 /* see if we have at least 16 bytes */
- bb1 lt,%r9,_ASM_LABEL(f_byte_copy) /* copy bytes for small data length */
+ bb1 lt,%r9,f_byte_copy /* copy bytes for small data length */
/*
* determine copy strategy based on alignment of source and destination
*/
mask %r7,DEST,3 /* get 2 low order bits of destination addr */
mak %r6,%r6,0<4> /* convert source bits to table offset */
mak %r7,%r7,0<2> /* convert destination bits to table offset */
- or.u %r12,%r0,%hi16(_ASM_LABEL(f_strat))
- or %r12,%r12,%lo16(_ASM_LABEL(f_strat))
+ or.u %r12,%r0,%hi16(f_strat)
+ or %r12,%r12,%lo16(f_strat)
addu %r6,%r6,%r7 /* compute final table offset for strategy */
ld %r12,%r12,%r6 /* load the strategy routine */
jmp %r12 /* branch to strategy routine */
st.b %r8,DEST,2 /* store byte to destination */
addu SRC,SRC,3 /* increment source pointer */
addu DEST,DEST,3 /* increment destination pointer */
- br.n _ASM_LABEL(f_word_copy) /* copy full words */
+ br.n f_word_copy /* copy full words */
subu LEN,LEN,3 /* decrement length */
/*
st.h %r6,DEST,0 /* store half-word to destination */
addu SRC,SRC,2 /* increment source pointer */
addu DEST,DEST,2 /* increment destination pointer */
- br.n _ASM_LABEL(f_word_copy) /* copy full words */
+ br.n f_word_copy /* copy full words */
subu LEN,LEN,2 /* decrement remaining length */
/*
*/
ASLOCAL(f_word_copy)
cmp %r10,LEN,16 /* see if we have 16 bytes remaining */
- bb1 lo,%r10,_ASM_LABEL(f_byte_copy) /* not enough left, copy bytes */
+ bb1 lo,%r10,f_byte_copy /* not enough left, copy bytes */
ld %r6,SRC,0 /* load first word */
ld %r7,SRC,4 /* load second word */
ld %r8,SRC,8 /* load third word */
st %r9,DEST,12 /* store fourth word */
addu SRC,SRC,16 /* increment source pointer */
addu DEST,DEST,16 /* increment destination pointer */
- br.n _ASM_LABEL(f_word_copy) /* branch to copy another block */
+ br.n f_word_copy /* branch to copy another block */
subu LEN,LEN,16 /* decrement remaining length */
ASLOCAL(f_1byte_half_copy)
ASLOCAL(f_half_copy)
cmp %r10,LEN,16 /* see if we have 16 bytes remaining */
- bb1 lo,%r10,_ASM_LABEL(f_byte_copy) /* not enough left, copy bytes */
+ bb1 lo,%r10,f_byte_copy /* not enough left, copy bytes */
ld.hu %r6,SRC,0 /* load first half-word */
ld.hu %r7,SRC,2 /* load second half-word */
ld.hu %r8,SRC,4 /* load third half-word */
st.h %r13,DEST,14 /* store eighth half-word */
addu SRC,SRC,16 /* increment source pointer */
addu DEST,DEST,16 /* increment destination pointer */
- br.n _ASM_LABEL(f_half_copy) /* branch to copy another block */
+ br.n f_half_copy /* branch to copy another block */
subu LEN,LEN,16 /* decrement remaining length */
ASLOCAL(f_byte_copy)
- bcnd eq0,LEN,_ASM_LABEL(bcopy_out) /* branch if nothing left to copy */
+ bcnd eq0,LEN,bcopy_out /* branch if nothing left to copy */
ld.bu %r6,SRC,0 /* load byte from source */
st.b %r6,DEST,0 /* store byte in destination */
addu SRC,SRC,1 /* increment source pointer */
addu DEST,DEST,1 /* increment destination pointer */
- br.n _ASM_LABEL(f_byte_copy) /* branch for next byte */
+ br.n f_byte_copy /* branch for next byte */
subu LEN,LEN,1 /* decrement remaining length */
#if defined(MEMMOVE) || defined(OVBCOPY)
* check for short data
*/
cmp %r9,LEN,16 /* see if we have at least 16 bytes */
- bb1 lt,%r9,_ASM_LABEL(r_byte_copy) /* copy bytes for small data length */
+ bb1 lt,%r9,r_byte_copy /* copy bytes for small data length */
/*
* determine copy strategy based on alignment of source and destination
*/
mask %r7,DEST,3 /* get 2 low order bits of destination addr */
mak %r6,%r6,0<4> /* convert source bits to table offset */
mak %r7,%r7,0<2> /* convert destination bits to table offset */
- or.u %r12,%r0,%hi16(_ASM_LABEL(r_strat))
- or %r12,%r12,%lo16(_ASM_LABEL(r_strat))
+ or.u %r12,%r0,%hi16(r_strat)
+ or %r12,%r12,%lo16(r_strat)
addu %r6,%r6,%r7 /* compute final table offset for strategy */
ld %r12,%r12,%r6 /* load the strategy routine */
jmp %r12 /* branch to strategy routine */
st.b %r6,DEST,0 /* store byte to destination */
st.b %r7,DEST,1 /* store byte to destination */
st.b %r8,DEST,2 /* store byte to destination */
- br.n _ASM_LABEL(r_word_copy) /* copy full words */
+ br.n r_word_copy /* copy full words */
subu LEN,LEN,3 /* decrement length */
/*
subu DEST,DEST,2 /* decrement destination pointer */
ld.hu %r6,SRC,0 /* load half-word from source */
st.h %r6,DEST,0 /* store half-word to destination */
- br.n _ASM_LABEL(r_word_copy) /* copy full words */
+ br.n r_word_copy /* copy full words */
subu LEN,LEN,2 /* decrement remaining length */
/*
*/
ASLOCAL(r_word_copy)
cmp %r10,LEN,16 /* see if we have 16 bytes remaining */
- bb1 lo,%r10,_ASM_LABEL(r_byte_copy) /* not enough left, copy bytes */
+ bb1 lo,%r10,r_byte_copy /* not enough left, copy bytes */
subu SRC,SRC,16 /* decrement source pointer */
subu DEST,DEST,16 /* decrement destination pointer */
ld %r6,SRC,0 /* load first word */
st %r7,DEST,4 /* store second word */
st %r8,DEST,8 /* store third word */
st %r9,DEST,12 /* store fourth word */
- br.n _ASM_LABEL(r_word_copy) /* branch to copy another block */
+ br.n r_word_copy /* branch to copy another block */
subu LEN,LEN,16 /* decrement remaining length */
ASLOCAL(r_1byte_half_copy)
ASLOCAL(r_half_copy)
cmp %r10,LEN,16 /* see if we have 16 bytes remaining */
- bb1 lo,%r10,_ASM_LABEL(r_byte_copy) /* not enough left, copy bytes */
+ bb1 lo,%r10,r_byte_copy /* not enough left, copy bytes */
subu SRC,SRC,16 /* decrement source pointer */
subu DEST,DEST,16 /* decrement destination pointer */
ld.hu %r6,SRC,0 /* load first half-word */
st.h %r11,DEST,10 /* store sixth half-word */
st.h %r12,DEST,12 /* store seventh half-word */
st.h %r13,DEST,14 /* store eighth half-word */
- br.n _ASM_LABEL(r_half_copy) /* branch to copy another block */
+ br.n r_half_copy /* branch to copy another block */
subu LEN,LEN,16 /* decrement remaining length */
ASLOCAL(r_byte_copy)
- bcnd eq0,LEN,_ASM_LABEL(bcopy_out) /* branch if nothing left to copy */
+ bcnd eq0,LEN,bcopy_out /* branch if nothing left to copy */
subu SRC,SRC,1 /* decrement source pointer */
subu DEST,DEST,1 /* decrement destination pointer */
ld.bu %r6,SRC,0 /* load byte from source */
st.b %r6,DEST,0 /* store byte in destination */
- br.n _ASM_LABEL(r_byte_copy) /* branch for next byte */
+ br.n r_byte_copy /* branch for next byte */
subu LEN,LEN,1 /* decrement remaining length */
#endif /* MEMMOVE || OVBCOPY */
.data
.align 2
ASLOCAL(f_strat)
- .word _ASM_LABEL(f_word_copy)
- .word _ASM_LABEL(f_byte_copy)
- .word _ASM_LABEL(f_half_copy)
- .word _ASM_LABEL(f_byte_copy)
- .word _ASM_LABEL(f_byte_copy)
- .word _ASM_LABEL(f_3byte_word_copy)
- .word _ASM_LABEL(f_byte_copy)
- .word _ASM_LABEL(f_1byte_half_copy)
- .word _ASM_LABEL(f_half_copy)
- .word _ASM_LABEL(f_byte_copy)
- .word _ASM_LABEL(f_1half_word_copy)
- .word _ASM_LABEL(f_byte_copy)
- .word _ASM_LABEL(f_byte_copy)
- .word _ASM_LABEL(f_1byte_half_copy)
- .word _ASM_LABEL(f_byte_copy)
- .word _ASM_LABEL(f_1byte_word_copy)
+ .word f_word_copy
+ .word f_byte_copy
+ .word f_half_copy
+ .word f_byte_copy
+ .word f_byte_copy
+ .word f_3byte_word_copy
+ .word f_byte_copy
+ .word f_1byte_half_copy
+ .word f_half_copy
+ .word f_byte_copy
+ .word f_1half_word_copy
+ .word f_byte_copy
+ .word f_byte_copy
+ .word f_1byte_half_copy
+ .word f_byte_copy
+ .word f_1byte_word_copy
#if defined(MEMMOVE) || defined(OVBCOPY)
ASLOCAL(r_strat)
- .word _ASM_LABEL(r_word_copy)
- .word _ASM_LABEL(r_byte_copy)
- .word _ASM_LABEL(r_half_copy)
- .word _ASM_LABEL(r_byte_copy)
- .word _ASM_LABEL(r_byte_copy)
- .word _ASM_LABEL(r_1byte_word_copy)
- .word _ASM_LABEL(r_byte_copy)
- .word _ASM_LABEL(r_1byte_half_copy)
- .word _ASM_LABEL(r_half_copy)
- .word _ASM_LABEL(r_byte_copy)
- .word _ASM_LABEL(r_1half_word_copy)
- .word _ASM_LABEL(r_byte_copy)
- .word _ASM_LABEL(r_byte_copy)
- .word _ASM_LABEL(r_1byte_half_copy)
- .word _ASM_LABEL(r_byte_copy)
- .word _ASM_LABEL(r_3byte_word_copy)
+ .word r_word_copy
+ .word r_byte_copy
+ .word r_half_copy
+ .word r_byte_copy
+ .word r_byte_copy
+ .word r_1byte_word_copy
+ .word r_byte_copy
+ .word r_1byte_half_copy
+ .word r_half_copy
+ .word r_byte_copy
+ .word r_1half_word_copy
+ .word r_byte_copy
+ .word r_byte_copy
+ .word r_1byte_half_copy
+ .word r_byte_copy
+ .word r_3byte_word_copy
#endif