From: "Edgar E. Iglesias" <edgar.igles...@amd.com>

Use the generic xen/linkage.h macros to annotate code symbols.

Signed-off-by: Edgar E. Iglesias <edgar.igles...@amd.com>
Reviewed-by: Stefano Stabellini <sstabell...@kernel.org>
---
 xen/arch/arm/arm64/lib/memchr.S  | 4 ++--
 xen/arch/arm/arm64/lib/memcmp.S  | 4 ++--
 xen/arch/arm/arm64/lib/memcpy.S  | 4 ++--
 xen/arch/arm/arm64/lib/memmove.S | 4 ++--
 xen/arch/arm/arm64/lib/memset.S  | 4 ++--
 xen/arch/arm/arm64/lib/strchr.S  | 4 ++--
 xen/arch/arm/arm64/lib/strcmp.S  | 4 ++--
 xen/arch/arm/arm64/lib/strlen.S  | 4 ++--
 xen/arch/arm/arm64/lib/strncmp.S | 4 ++--
 xen/arch/arm/arm64/lib/strnlen.S | 4 ++--
 xen/arch/arm/arm64/lib/strrchr.S | 4 ++--
 11 files changed, 22 insertions(+), 22 deletions(-)

diff --git a/xen/arch/arm/arm64/lib/memchr.S b/xen/arch/arm/arm64/lib/memchr.S
index 81f113bb1c..3d8aeca3ca 100644
--- a/xen/arch/arm/arm64/lib/memchr.S
+++ b/xen/arch/arm/arm64/lib/memchr.S
@@ -29,7 +29,7 @@
  * Returns:
  *     x0 - address of first occurrence of 'c' or 0
  */
-ENTRY(memchr)
+FUNC(memchr)
        and     w1, w1, #0xff
 1:     subs    x2, x2, #1
        b.mi    2f
@@ -40,4 +40,4 @@ ENTRY(memchr)
        ret
 2:     mov     x0, #0
        ret
-ENDPROC(memchr)
+END(memchr)
diff --git a/xen/arch/arm/arm64/lib/memcmp.S b/xen/arch/arm/arm64/lib/memcmp.S
index 87c2537ffe..d77dd4ce52 100644
--- a/xen/arch/arm/arm64/lib/memcmp.S
+++ b/xen/arch/arm/arm64/lib/memcmp.S
@@ -57,7 +57,7 @@ pos           .req    x11
 limit_wd       .req    x12
 mask           .req    x13
 
-ENTRY(memcmp)
+FUNC(memcmp)
        cbz     limit, .Lret0
        eor     tmp1, src1, src2
        tst     tmp1, #7
@@ -254,4 +254,4 @@ CPU_LE( rev data2, data2 )
 .Lret0:
        mov     result, #0
        ret
-ENDPROC(memcmp)
+END(memcmp)
diff --git a/xen/arch/arm/arm64/lib/memcpy.S b/xen/arch/arm/arm64/lib/memcpy.S
index d90d20ef3e..1e04b79010 100644
--- a/xen/arch/arm/arm64/lib/memcpy.S
+++ b/xen/arch/arm/arm64/lib/memcpy.S
@@ -55,7 +55,7 @@ C_h   .req    x12
 D_l    .req    x13
 D_h    .req    x14
 
-ENTRY(memcpy)
+FUNC(memcpy)
        mov     dst, dstin
        cmp     count, #16
        /*When memory length is less than 16, the accessed are not aligned.*/
@@ -197,4 +197,4 @@ ENTRY(memcpy)
        tst     count, #0x3f
        b.ne    .Ltail63
        ret
-ENDPROC(memcpy)
+END(memcpy)
diff --git a/xen/arch/arm/arm64/lib/memmove.S b/xen/arch/arm/arm64/lib/memmove.S
index a49de845d0..14438dbe9c 100644
--- a/xen/arch/arm/arm64/lib/memmove.S
+++ b/xen/arch/arm/arm64/lib/memmove.S
@@ -56,7 +56,7 @@ C_h   .req    x12
 D_l    .req    x13
 D_h    .req    x14
 
-ENTRY(memmove)
+FUNC(memmove)
        cmp     dstin, src
        b.lo    memcpy
        add     tmp1, src, count
@@ -193,4 +193,4 @@ ENTRY(memmove)
        tst     count, #0x3f
        b.ne    .Ltail63
        ret
-ENDPROC(memmove)
+END(memmove)
diff --git a/xen/arch/arm/arm64/lib/memset.S b/xen/arch/arm/arm64/lib/memset.S
index 5bf751521b..367fa60175 100644
--- a/xen/arch/arm/arm64/lib/memset.S
+++ b/xen/arch/arm/arm64/lib/memset.S
@@ -53,7 +53,7 @@ dst           .req    x8
 tmp3w          .req    w9
 tmp3           .req    x9
 
-ENTRY(memset)
+FUNC(memset)
        mov     dst, dstin      /* Preserve return value.  */
        and     A_lw, val, #255
        orr     A_lw, A_lw, A_lw, lsl #8
@@ -212,4 +212,4 @@ ENTRY(memset)
        ands    count, count, zva_bits_x
        b.ne    .Ltail_maybe_long
        ret
-ENDPROC(memset)
+END(memset)
diff --git a/xen/arch/arm/arm64/lib/strchr.S b/xen/arch/arm/arm64/lib/strchr.S
index 0506b0ff7f..83fd81e8ef 100644
--- a/xen/arch/arm/arm64/lib/strchr.S
+++ b/xen/arch/arm/arm64/lib/strchr.S
@@ -27,7 +27,7 @@
  * Returns:
  *     x0 - address of first occurrence of 'c' or 0
  */
-ENTRY(strchr)
+FUNC(strchr)
        and     w1, w1, #0xff
 1:     ldrb    w2, [x0], #1
        cmp     w2, w1
@@ -37,4 +37,4 @@ ENTRY(strchr)
        cmp     w2, w1
        csel    x0, x0, xzr, eq
        ret
-ENDPROC(strchr)
+END(strchr)
diff --git a/xen/arch/arm/arm64/lib/strcmp.S b/xen/arch/arm/arm64/lib/strcmp.S
index c6f42dd255..7677108e26 100644
--- a/xen/arch/arm/arm64/lib/strcmp.S
+++ b/xen/arch/arm/arm64/lib/strcmp.S
@@ -59,7 +59,7 @@ tmp3          .req    x9
 zeroones       .req    x10
 pos            .req    x11
 
-ENTRY(strcmp)
+FUNC(strcmp)
        eor     tmp1, src1, src2
        mov     zeroones, #REP8_01
        tst     tmp1, #7
@@ -230,4 +230,4 @@ CPU_BE(     orr     syndrome, diff, has_nul )
        lsr     data1, data1, #56
        sub     result, data1, data2, lsr #56
        ret
-ENDPROC(strcmp)
+END(strcmp)
diff --git a/xen/arch/arm/arm64/lib/strlen.S b/xen/arch/arm/arm64/lib/strlen.S
index fb6aaf1a6a..10feedaf81 100644
--- a/xen/arch/arm/arm64/lib/strlen.S
+++ b/xen/arch/arm/arm64/lib/strlen.S
@@ -56,7 +56,7 @@ pos           .req    x12
 #define REP8_7f 0x7f7f7f7f7f7f7f7f
 #define REP8_80 0x8080808080808080
 
-ENTRY(strlen)
+FUNC(strlen)
        mov     zeroones, #REP8_01
        bic     src, srcin, #15
        ands    tmp1, srcin, #15
@@ -123,4 +123,4 @@ CPU_LE( lsr tmp2, tmp2, tmp1 )      /* Shift (tmp1 & 63).  
*/
        csinv   data1, data1, xzr, le
        csel    data2, data2, data2a, le
        b       .Lrealigned
-ENDPROC(strlen)
+END(strlen)
diff --git a/xen/arch/arm/arm64/lib/strncmp.S b/xen/arch/arm/arm64/lib/strncmp.S
index a4a0f779f5..c00a641fc7 100644
--- a/xen/arch/arm/arm64/lib/strncmp.S
+++ b/xen/arch/arm/arm64/lib/strncmp.S
@@ -64,7 +64,7 @@ limit_wd      .req    x13
 mask           .req    x14
 endloop                .req    x15
 
-ENTRY(strncmp)
+FUNC(strncmp)
        cbz     limit, .Lret0
        eor     tmp1, src1, src2
        mov     zeroones, #REP8_01
@@ -307,4 +307,4 @@ CPU_BE( orr syndrome, diff, has_nul )
 .Lret0:
        mov     result, #0
        ret
-ENDPROC(strncmp)
+END(strncmp)
diff --git a/xen/arch/arm/arm64/lib/strnlen.S b/xen/arch/arm/arm64/lib/strnlen.S
index 81c8e8b54e..2059ba782c 100644
--- a/xen/arch/arm/arm64/lib/strnlen.S
+++ b/xen/arch/arm/arm64/lib/strnlen.S
@@ -59,7 +59,7 @@ limit_wd      .req    x14
 #define REP8_7f 0x7f7f7f7f7f7f7f7f
 #define REP8_80 0x8080808080808080
 
-ENTRY(strnlen)
+FUNC(strnlen)
        cbz     limit, .Lhit_limit
        mov     zeroones, #REP8_01
        bic     src, srcin, #15
@@ -168,4 +168,4 @@ CPU_LE( lsr tmp2, tmp2, tmp4 )      /* Shift (tmp1 & 63).  
*/
 .Lhit_limit:
        mov     len, limit
        ret
-ENDPROC(strnlen)
+END(strnlen)
diff --git a/xen/arch/arm/arm64/lib/strrchr.S b/xen/arch/arm/arm64/lib/strrchr.S
index 07059983f8..81033c0822 100644
--- a/xen/arch/arm/arm64/lib/strrchr.S
+++ b/xen/arch/arm/arm64/lib/strrchr.S
@@ -26,7 +26,7 @@
  * Returns:
  *     x0 - address of last occurrence of 'c' or 0
  */
-ENTRY(strrchr)
+FUNC(strrchr)
        mov     x3, #0
        and     w1, w1, #0xff
 1:     ldrb    w2, [x0], #1
@@ -37,4 +37,4 @@ ENTRY(strrchr)
        b       1b
 2:     mov     x0, x3
        ret
-ENDPROC(strrchr)
+END(strrchr)
-- 
2.40.1


Reply via email to