Module Name: src Committed By: skrll Date: Tue Aug 29 06:28:27 UTC 2017
Modified Files: src/sys/arch/arm/include/arm32: pmap.h Log Message: Revert the *_PROT_RO_* define changes and do all the work in the *_PROT() macros for RO kernel pages. cats boots again - no idea how it slipped through my testing previously To generate a diff of this commit: cvs rdiff -u -r1.151 -r1.152 src/sys/arch/arm/include/arm32/pmap.h Please note that diffs are not public domain; they are subject to the copyright notices on the relevant files.
Modified files: Index: src/sys/arch/arm/include/arm32/pmap.h diff -u src/sys/arch/arm/include/arm32/pmap.h:1.151 src/sys/arch/arm/include/arm32/pmap.h:1.152 --- src/sys/arch/arm/include/arm32/pmap.h:1.151 Tue Jul 11 20:42:17 2017 +++ src/sys/arch/arm/include/arm32/pmap.h Tue Aug 29 06:28:26 2017 @@ -1,4 +1,4 @@ -/* $NetBSD: pmap.h,v 1.151 2017/07/11 20:42:17 skrll Exp $ */ +/* $NetBSD: pmap.h,v 1.152 2017/08/29 06:28:26 skrll Exp $ */ /* * Copyright (c) 2002, 2003 Wasabi Systems, Inc. @@ -728,12 +728,12 @@ extern void (*pmap_zero_page_func)(paddr */ #define L1_S_PROT_U_generic (L1_S_AP(AP_U)) #define L1_S_PROT_W_generic (L1_S_AP(AP_W)) -#define L1_S_PROT_RO_generic (L1_S_AP(AP_R)) /* AP_W == AP_R */ +#define L1_S_PROT_RO_generic (0) #define L1_S_PROT_MASK_generic (L1_S_PROT_U|L1_S_PROT_W|L1_S_PROT_RO) #define L1_S_PROT_U_xscale (L1_S_AP(AP_U)) #define L1_S_PROT_W_xscale (L1_S_AP(AP_W)) -#define L1_S_PROT_RO_xscale (L1_S_AP(AP_R)) /* AP_W == AP_R */ +#define L1_S_PROT_RO_xscale (0) #define L1_S_PROT_MASK_xscale (L1_S_PROT_U|L1_S_PROT_W|L1_S_PROT_RO) #define L1_S_PROT_U_armv6 (L1_S_AP(AP_R) | L1_S_AP(AP_U)) @@ -754,12 +754,12 @@ extern void (*pmap_zero_page_func)(paddr #define L2_L_PROT_U_generic (L2_AP(AP_U)) #define L2_L_PROT_W_generic (L2_AP(AP_W)) -#define L2_L_PROT_RO_generic (L2_AP(AP_R)) +#define L2_L_PROT_RO_generic (0) #define L2_L_PROT_MASK_generic (L2_L_PROT_U|L2_L_PROT_W|L2_L_PROT_RO) #define L2_L_PROT_U_xscale (L2_AP(AP_U)) #define L2_L_PROT_W_xscale (L2_AP(AP_W)) -#define L2_L_PROT_RO_xscale (L2_AP(AP_R)) +#define L2_L_PROT_RO_xscale (0) #define L2_L_PROT_MASK_xscale (L2_L_PROT_U|L2_L_PROT_W|L2_L_PROT_RO) #define L2_L_PROT_U_armv6n (L2_AP0(AP_R) | L2_AP0(AP_U)) @@ -780,12 +780,12 @@ extern void (*pmap_zero_page_func)(paddr #define L2_S_PROT_U_generic (L2_AP(AP_U)) #define L2_S_PROT_W_generic (L2_AP(AP_W)) -#define L2_S_PROT_RO_generic (L2_AP(AP_R)) +#define L2_S_PROT_RO_generic (0) #define L2_S_PROT_MASK_generic (L2_S_PROT_U|L2_S_PROT_W|L2_S_PROT_RO) #define L2_S_PROT_U_xscale (L2_AP0(AP_U)) #define L2_S_PROT_W_xscale (L2_AP0(AP_W)) -#define L2_S_PROT_RO_xscale (L2_AP(AP_R)) +#define L2_S_PROT_RO_xscale (0) #define L2_S_PROT_MASK_xscale (L2_S_PROT_U|L2_S_PROT_W|L2_S_PROT_RO) #define L2_S_PROT_U_armv6n (L2_AP0(AP_R) | L2_AP0(AP_U)) @@ -1028,31 +1028,44 @@ extern void (*pmap_zero_page_func)(paddr #define l1pte_set_writable(pte) (((pte) & ~L1_S_PROT_RO) | L1_S_PROT_W) #define l1pte_set_readonly(pte) (((pte) & ~L1_S_PROT_W) | L1_S_PROT_RO) -#define l2pte_set_writable(pte) (L2_S_PROT_W == L2_S_PROT_RO ? \ - ((pte) | L2_S_PROT_W) : (((pte) & ~L2_S_PROT_RO) | L2_S_PROT_W)) - -#define l2pte_set_readonly(pte) (L2_S_PROT_W == L2_S_PROT_RO ? \ - ((pte) & ~L2_S_PROT_RO) : (((pte) & ~L2_S_PROT_W) | L2_S_PROT_RO)) +#define l2pte_set_writable(pte) (((pte) & ~L2_S_PROT_RO) | L2_S_PROT_W) +#define l2pte_set_readonly(pte) (((pte) & ~L2_S_PROT_W) | L2_S_PROT_RO) #define l2pte_writable_p(pte) (((pte) & L2_S_PROT_W) == L2_S_PROT_W && \ - (L2_S_PROT_W == L2_S_PROT_RO || \ + (L2_S_PROT_RO == 0 || \ ((pte) & L2_S_PROT_RO) != L2_S_PROT_RO)) /* * These macros return various bits based on kernel/user and protection. * Note that the compiler will usually fold these at compile time. */ -#define L1_S_PROT(ku, pr) ((((ku) == PTE_USER) ? L1_S_PROT_U : 0) | \ - (((pr) & VM_PROT_WRITE) ? L1_S_PROT_W : \ - (L1_S_PROT_W == L1_S_PROT_RO ? 0 : L1_S_PROT_RO))) - -#define L2_L_PROT(ku, pr) ((((ku) == PTE_USER) ? L2_L_PROT_U : 0) | \ - (((pr) & VM_PROT_WRITE) ? L2_L_PROT_W : \ - (L2_L_PROT_W == L2_L_PROT_RO ? 0 : L2_L_PROT_RO))) - -#define L2_S_PROT(ku, pr) ((((ku) == PTE_USER) ? L2_S_PROT_U : 0) | \ - (((pr) & VM_PROT_WRITE) ? L2_S_PROT_W : \ - (L2_S_PROT_W == L2_S_PROT_RO ? 0 : L2_S_PROT_RO))) + +#define L1_S_PROT(ku, pr) ( \ + (((ku) == PTE_USER) ? \ + L1_S_PROT_U | (((pr) & VM_PROT_WRITE) ? L1_S_PROT_W : 0) \ + : \ + (((L1_S_PROT_RO && \ + ((pr) & (VM_PROT_READ | VM_PROT_WRITE)) == VM_PROT_READ) ? \ + L1_S_PROT_RO : L1_S_PROT_W))) \ + ) + +#define L2_L_PROT(ku, pr) ( \ + (((ku) == PTE_USER) ? \ + L2_L_PROT_U | (((pr) & VM_PROT_WRITE) ? L2_L_PROT_W : 0) \ + : \ + (((L2_L_PROT_RO && \ + ((pr) & (VM_PROT_READ | VM_PROT_WRITE)) == VM_PROT_READ) ? \ + L2_L_PROT_RO : L2_L_PROT_W))) \ + ) + +#define L2_S_PROT(ku, pr) ( \ + (((ku) == PTE_USER) ? \ + L2_S_PROT_U | (((pr) & VM_PROT_WRITE) ? L2_S_PROT_W : 0) \ + : \ + (((L2_S_PROT_RO && \ + ((pr) & (VM_PROT_READ | VM_PROT_WRITE)) == VM_PROT_READ) ? \ + L2_S_PROT_RO : L2_S_PROT_W))) \ + ) /* * Macros to test if a mapping is mappable with an L1 SuperSection,