aboutsummaryrefslogtreecommitdiff
path: root/lib/libc
diff options
context:
space:
mode:
authorLeandro Lupori <luporl@FreeBSD.org>2021-03-25 14:54:06 +0000
committerLeandro Lupori <luporl@FreeBSD.org>2021-03-25 16:07:01 +0000
commit2f561284033c0f53d0911baf9056078e6026a278 (patch)
treeb14e93b9f27f3ae1b8c31d39a6ebc2e8df4bc5f2 /lib/libc
parent4e38478c595a9e6225b525890d7ee269a203c200 (diff)
downloadsrc-2f561284033c0f53d0911baf9056078e6026a278.tar.gz
src-2f561284033c0f53d0911baf9056078e6026a278.zip
[PowerPC64] Enforce natural alignment in bcopy
POWER architecture CPUs (Book-S) require natural alignment for cache-inhibited storage accesses. Since we can't know the caching model for a page ahead of time, always enforce natural alignment in bcopy. This fixes a SIGBUS when calling the function with misaligned pointers on POWER7. Submitted by: Bruno Larsen <bruno.larsen@eldorado.org.br> Reviewed by: luporl, bdragon (IRC) MFC after: 1 week Sponsored by: Eldorado Research Institute (eldorado.org.br) Differential Revision: https://reviews.freebsd.org/D28776
Diffstat (limited to 'lib/libc')
-rw-r--r--lib/libc/powerpc64/string/bcopy.S34
1 files changed, 34 insertions, 0 deletions
diff --git a/lib/libc/powerpc64/string/bcopy.S b/lib/libc/powerpc64/string/bcopy.S
index bb860c098feb..4dc80c264362 100644
--- a/lib/libc/powerpc64/string/bcopy.S
+++ b/lib/libc/powerpc64/string/bcopy.S
@@ -34,6 +34,11 @@ __FBSDID("$FreeBSD$");
#define BLOCK_SIZE (1 << BLOCK_SIZE_BITS)
#define BLOCK_SIZE_MASK (BLOCK_SIZE - 1)
+/* Minimum 8 byte alignment, to avoid cache-inhibited alignment faults.*/
+#ifndef ALIGN_MASK
+#define ALIGN_MASK 0x7
+#endif
+
#define MULTI_PHASE_THRESHOLD 512
#ifndef FN_NAME
@@ -66,9 +71,38 @@ ENTRY(FN_NAME)
mr %r4, %r0
#endif
+ /* First check for relative alignment, if unaligned copy one byte at a time */
+ andi. %r8, %r3, ALIGN_MASK
+ andi. %r7, %r4, ALIGN_MASK
+ cmpd %r7, %r8
+ bne .Lunaligned
+
+
cmpldi %r5, MULTI_PHASE_THRESHOLD
bge .Lmulti_phase
+ b .Lfast_copy
+
+.Lunaligned:
+ /* forward or backward copy? */
+ cmpd %r4, %r3
+ blt .Lbackward_unaligned
+
+ /* Just need to setup increment and jump to copy */
+ li %r0, 1
+ mtctr %r5
+ b .Lsingle_1_loop
+
+.Lbackward_unaligned:
+ /* advance src and dst to last byte, set decrement and jump to copy */
+ add %r3, %r3, %r5
+ addi %r3, %r3, -1
+ add %r4, %r4, %r5
+ addi %r4, %r4, -1
+ li %r0, -1
+ mtctr %r5
+ b .Lsingle_1_loop
+.Lfast_copy:
/* align src */
cmpd %r4, %r3 /* forward or backward copy? */
blt .Lbackward_align