Prefer numeric labels over L<digit> labels, as the latter clutter
authorguenther <guenther@openbsd.org>
Wed, 7 Dec 2022 19:26:02 +0000 (19:26 +0000)
committerguenther <guenther@openbsd.org>
Wed, 7 Dec 2022 19:26:02 +0000 (19:26 +0000)
the final kernel symbol table.
Add END()s to match ENTRY()s.

ok deraadt@

sys/lib/libkern/arch/amd64/bcmp.S
sys/lib/libkern/arch/amd64/bzero.S
sys/lib/libkern/arch/amd64/ffs.S
sys/lib/libkern/arch/amd64/memchr.S
sys/lib/libkern/arch/amd64/memcmp.S
sys/lib/libkern/arch/amd64/memset.S

index 2ef8087..573d7b1 100644 (file)
@@ -8,15 +8,16 @@ ENTRY(bcmp)
        shrq    $3,%rcx
        repe
        cmpsq
-       jne     L1
+       jne     1f
 
        movq    %rdx,%rcx               /* compare remainder by bytes */
        andq    $7,%rcx
        repe
        cmpsb
-       je      L2
+       je      2f
 
-L1:    incl    %eax
-L2:    RETGUARD_CHECK(bcmp, r11)
+1:     incl    %eax
+2:     RETGUARD_CHECK(bcmp, r11)
        ret
        lfence
+END(bcmp)
index db05dbf..5407498 100644 (file)
@@ -18,7 +18,7 @@ ENTRY(bzero)
         * unaligned set.
         */
        cmpq    $16,%rdx
-       jb      L1
+       jb      1f
 
        movq    %rdi,%rcx               /* compute misalignment */
        negq    %rcx
@@ -33,9 +33,10 @@ ENTRY(bzero)
        rep
        stosq
 
-L1:    movq    %rdx,%rcx               /* zero remainder by bytes */
+1:     movq    %rdx,%rcx               /* zero remainder by bytes */
        rep
        stosb
        RETGUARD_CHECK(bzero, r11)
        ret
        lfence
+END(bzero)
index 10154de..62689ca 100644 (file)
@@ -9,12 +9,13 @@
 ENTRY(ffs)
        RETGUARD_SETUP(ffs, r11)
        bsfl    %edi,%eax
-       jz      L1                      /* ZF is set if all bits are 0 */
+       jz      1f                      /* ZF is set if all bits are 0 */
        incl    %eax                    /* bits numbered from 1, not 0 */
-       jmp L2
+       jmp     2f
 
        _ALIGN_TRAPS
-L1:    xorl    %eax,%eax               /* clear result */
-L2:    RETGUARD_CHECK(ffs, r11)
+1:     xorl    %eax,%eax               /* clear result */
+2:     RETGUARD_CHECK(ffs, r11)
        ret
        lfence
+END(ffs)
index 6a5943e..063159b 100644 (file)
@@ -11,13 +11,14 @@ ENTRY(memchr)
        movb    %sil,%al                /* set character to search for */
        movq    %rdx,%rcx               /* set length of search */
        testq   %rcx,%rcx               /* test for len == 0 */
-       jz      L1
+       jz      1f
        repne                           /* search! */
        scasb
-       jne     L1                      /* scan failed, return null */
+       jne     1f                      /* scan failed, return null */
        leaq    -1(%rdi),%rax           /* adjust result of scan */
-       jmp L2
-L1:    xorq    %rax,%rax
-L2:    RETGUARD_CHECK(memchr, r11)
+       jmp     2f
+1:     xorq    %rax,%rax
+2:     RETGUARD_CHECK(memchr, r11)
        ret
        lfence
+END(memchr)
index c16879c..b994436 100644 (file)
@@ -12,27 +12,28 @@ ENTRY(memcmp)
        shrq    $3,%rcx
        repe
        cmpsq
-       jne     L5                      /* do we match so far? */
+       jne     5f                      /* do we match so far? */
 
        movq    %rdx,%rcx               /* compare remainder by bytes */
        andq    $7,%rcx
        repe
        cmpsb
-       jne     L6                      /* do we match? */
+       jne     6f                      /* do we match? */
 
        xorl    %eax,%eax               /* we match, return zero        */
-       jmp L7
+       jmp     7f
 
-L5:    movl    $8,%ecx                 /* We know that one of the next */
+5:     movl    $8,%ecx                 /* We know that one of the next */
        subq    %rcx,%rdi               /* eight pairs of bytes do not  */
        subq    %rcx,%rsi               /* match.                       */
        repe
        cmpsb
-L6:    xorl    %eax,%eax               /* Perform unsigned comparison  */
+6:     xorl    %eax,%eax               /* Perform unsigned comparison  */
        movb    -1(%rdi),%al
        xorl    %edx,%edx
        movb    -1(%rsi),%dl
        subl    %edx,%eax
-L7:    RETGUARD_CHECK(memcmp, r11)
+7:     RETGUARD_CHECK(memcmp, r11)
        ret
        lfence
+END(memcmp)
index e57a7c8..ffab4df 100644 (file)
@@ -19,7 +19,7 @@ ENTRY(memset)
         * unaligned set.
         */
        cmpq    $0x0f,%rcx
-       jle     L1
+       jle     1f
 
        movb    %al,%ah                 /* copy char to all bytes in word */
        movl    %eax,%edx
@@ -47,9 +47,10 @@ ENTRY(memset)
 
        movq    %r8,%rcx                /* set remainder by bytes */
        andq    $7,%rcx
-L1:    rep
+1:     rep
        stosb
        movq    %r11,%rax
        RETGUARD_CHECK(memset, r10)
        ret
        lfence
+END(memset)