svn commit: r278643 - stable/10/sys/arm/arm
Ian Lepore
ian at FreeBSD.org
Fri Feb 13 00:06:09 UTC 2015
Author: ian
Date: Fri Feb 13 00:06:07 2015
New Revision: 278643
URL: https://svnweb.freebsd.org/changeset/base/278643
Log:
MFC r276394, r276397:
Add armv6 implementations of the startup-time cache maintenence functions.
Create a new locore.S that includes locore-v4 or locore-v6 as needed.
Replaced:
stable/10/sys/arm/arm/locore.S
- copied unchanged from r276397, head/sys/arm/arm/locore.S
Modified:
stable/10/sys/arm/arm/cpu_asm-v6.S
Directory Properties:
stable/10/ (props changed)
Modified: stable/10/sys/arm/arm/cpu_asm-v6.S
==============================================================================
--- stable/10/sys/arm/arm/cpu_asm-v6.S Thu Feb 12 23:08:27 2015 (r278642)
+++ stable/10/sys/arm/arm/cpu_asm-v6.S Fri Feb 13 00:06:07 2015 (r278643)
@@ -33,8 +33,6 @@
#include <machine/armreg.h>
#include <machine/sysreg.h>
-#if __ARM_ARCH >= 7
-
/*
* Define cache functions used by startup code, which counts on the fact that
* only r0-r3,r12 (ip) are modified and no stack space is used. These functions
@@ -47,12 +45,18 @@
/* Invalidate D cache to PoC. (aka all cache levels)*/
ASENTRY_NP(dcache_inv_poc_all)
+#if __ARM_ARCH == 6
+ mcr CP15_DCIALL
+ DSB
+ bx lr
+#else
mrc CP15_CLIDR(r0)
ands r0, r0, #0x07000000
- mov r0, r0, lsr #23 /* Get LoC (naturally aligned) */
- beq 4f
+ mov r0, r0, lsr #23 /* Get LoC 'naturally' aligned for */
+ beq 4f /* use in the CSSELR register below */
-1: mcr CP15_CSSELR(r0) /* set cache level */
+1: sub r0, #2
+ mcr CP15_CSSELR(r0) /* set cache level */
isb
mrc CP15_CCSIDR(r0) /* read CCSIDR */
@@ -83,28 +87,31 @@ ASENTRY_NP(dcache_inv_poc_all)
3:
mrc CP15_CSSELR(r0) /* get cache level */
- add r0, r0, #2 /* next level */
- mrc CP15_CLIDR(r1)
- ands r1, r1, #0x07000000
- mov r1, r1, lsr #23 /* Get LoC (naturally aligned) */
- cmp r1, r0
- bgt 1b
+ teq r0, #0
+ bne 1b
4: dsb /* wait for stores to finish */
mov r0, #0
mcr CP15_CSSELR(r0)
isb
bx lr
+#endif /* __ARM_ARCH == 6 */
END(dcache_inv_poc_all)
/* Invalidate D cache to PoU. (aka L1 cache only)*/
ASENTRY_NP(dcache_inv_pou_all)
+#if __ARM_ARCH == 6
+ mcr CP15_DCIALL
+ DSB
+ bx lr
+#else
mrc CP15_CLIDR(r0)
ands r0, r0, #0x07000000
mov r0, r0, lsr #26 /* Get LoUU (naturally aligned) */
beq 4f
-1: mcr CP15_CSSELR(r0) /* set cache level */
+1: sub r0, #2
+ mcr CP15_CSSELR(r0) /* set cache level */
isb
mrc CP15_CCSIDR(r0) /* read CCSIDR */
@@ -125,7 +132,7 @@ ASENTRY_NP(dcache_inv_pou_all)
mov r2, ip /* r2 now contains set way decr */
/* r3 = ways/sets, r2 = way decr, r1 = set decr, r0 and ip are free */
-2: mcr CP15_DCISW(r3) /* clean & invalidate line */
+2: mcr CP15_DCISW(r3) /* invalidate line */
movs r0, r3 /* get current way/set */
beq 3f /* at 0 means we are done */
movs r0, r0, lsl #10 /* clear way bits leaving only set bits*/
@@ -135,25 +142,27 @@ ASENTRY_NP(dcache_inv_pou_all)
3:
mrc CP15_CSSELR(r0) /* get cache level */
- add r0, r0, #2 /* next level */
- mrc CP15_CLIDR(r1)
- ands r1, r1, #0x07000000
- mov r1, r1, lsr #26 /* Get LoUU (naturally aligned) */
- cmp r1, r0
- bgt 1b
+ teq r0, #0
+ bne 1b
4: dsb /* wait for stores to finish */
mov r0, #0
mcr CP15_CSSELR(r0)
bx lr
+#endif
END(dcache_inv_pou_all)
/* Write back and Invalidate D cache to PoC. */
ASENTRY_NP(dcache_wbinv_poc_all)
+#if __ARM_ARCH == 6
+ mcr CP15_DCCIALL
+ DSB
+ bx lr
+#else
mrc CP15_CLIDR(r0)
ands r0, r0, #0x07000000
- mov r0, r0, lsr #23 /* Get LoC (naturally aligned) */
beq 4f
+ mov r0, #0 /* Clean from inner to outer levels */
1: mcr CP15_CSSELR(r0) /* set cache level */
isb
@@ -176,7 +185,7 @@ ASENTRY_NP(dcache_wbinv_poc_all)
mov r2, ip /* r2 now contains set way decr */
/* r3 = ways/sets, r2 = way decr, r1 = set decr, r0 and ip are free */
-2: mcr CP15_DCCISW(r3) /* clean & invalidate line */
+2: mcr CP15_DCCISW(r3) /* clean and invalidate line */
movs r0, r3 /* get current way/set */
beq 3f /* at 0 means we are done */
movs r0, r0, lsl #10 /* clear way bits leaving only set bits*/
@@ -191,12 +200,11 @@ ASENTRY_NP(dcache_wbinv_poc_all)
ands r1, r1, #0x07000000
mov r1, r1, lsr #23 /* Get LoC (naturally aligned) */
cmp r1, r0
- bgt 1b
+ bne 1b
4: dsb /* wait for stores to finish */
mov r0, #0
mcr CP15_CSSELR(r0)
bx lr
+#endif /* __ARM_ARCH == 6 */
END(dcache_wbinv_poc_all)
-
-#endif /* __ARM_ARCH >= 7 */
Copied: stable/10/sys/arm/arm/locore.S (from r276397, head/sys/arm/arm/locore.S)
==============================================================================
--- /dev/null 00:00:00 1970 (empty, because file is newly added)
+++ stable/10/sys/arm/arm/locore.S Fri Feb 13 00:06:07 2015 (r278643, copy of r276397, head/sys/arm/arm/locore.S)
@@ -0,0 +1,41 @@
+/*-
+ * Copyright (c) 2014 Ian Lepore <ian at freebsd.org>
+ * All rights excluded.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ *
+ * $FreeBSD$
+ */
+
+/*
+ * The kernel build machinery wants the file containing the entry point to be
+ * named locore.S, but we want separate files for v4 and v6 builds, so just
+ * include the arch-appropriate file from this properly-named file.
+ */
+
+#include <machine/acle-compat.h>
+
+#if __ARM_ARCH >= 6
+#include "locore-v6.S"
+#else
+#include "locore-v4.S"
+#endif
More information about the svn-src-all
mailing list