svn commit: r357940 - in head/sys: amd64/amd64 sys x86/x86
Mateusz Guzik
mjg at FreeBSD.org
Fri Feb 14 23:15:43 UTC 2020
Author: mjg
Date: Fri Feb 14 23:15:41 2020
New Revision: 357940
URL: https://svnweb.freebsd.org/changeset/base/357940
Log:
Make atomic_load_ptr type-aware
Returned value has type based on the argument, meaning consumers no longer
have to cast in the commmon case.
This commit keeps the kernel compilable without patching the rest.
Modified:
head/sys/amd64/amd64/pmap.c
head/sys/sys/_cscan_atomic.h
head/sys/sys/atomic_common.h
head/sys/x86/x86/mp_x86.c
Modified: head/sys/amd64/amd64/pmap.c
==============================================================================
--- head/sys/amd64/amd64/pmap.c Fri Feb 14 22:32:33 2020 (r357939)
+++ head/sys/amd64/amd64/pmap.c Fri Feb 14 23:15:41 2020 (r357940)
@@ -796,7 +796,7 @@ again:
PV_STAT(i = 0);
for (p = &pmap_invl_gen_head;; p = prev.next) {
PV_STAT(i++);
- prevl = atomic_load_ptr(&p->next);
+ prevl = (uintptr_t)atomic_load_ptr(&p->next);
if ((prevl & PMAP_INVL_GEN_NEXT_INVALID) != 0) {
PV_STAT(atomic_add_long(&invl_start_restart, 1));
lock_delay(&lda);
@@ -903,7 +903,7 @@ pmap_delayed_invl_finish_u(void)
again:
for (p = &pmap_invl_gen_head; p != NULL; p = (void *)prevl) {
- prevl = atomic_load_ptr(&p->next);
+ prevl = (uintptr_t)atomic_load_ptr(&p->next);
if ((prevl & PMAP_INVL_GEN_NEXT_INVALID) != 0) {
PV_STAT(atomic_add_long(&invl_finish_restart, 1));
lock_delay(&lda);
@@ -954,7 +954,7 @@ DB_SHOW_COMMAND(di_queue, pmap_di_queue)
for (p = &pmap_invl_gen_head, first = true; p != NULL; p = pn,
first = false) {
- nextl = atomic_load_ptr(&p->next);
+ nextl = (uintptr_t)atomic_load_ptr(&p->next);
pn = (void *)(nextl & ~PMAP_INVL_GEN_NEXT_INVALID);
td = first ? NULL : __containerof(p, struct thread,
td_md.md_invl_gen);
Modified: head/sys/sys/_cscan_atomic.h
==============================================================================
--- head/sys/sys/_cscan_atomic.h Fri Feb 14 22:32:33 2020 (r357939)
+++ head/sys/sys/_cscan_atomic.h Fri Feb 14 23:15:41 2020 (r357940)
@@ -170,7 +170,11 @@ void kcsan_atomic_thread_fence_seq_cst(void);
#define atomic_fcmpset_acq_ptr kcsan_atomic_fcmpset_acq_ptr
#define atomic_fcmpset_rel_ptr kcsan_atomic_fcmpset_rel_ptr
#define atomic_fetchadd_ptr kcsan_atomic_fetchadd_ptr
-#define atomic_load_ptr(x) kcsan_atomic_load_ptr((volatile uintptr_t *)(x))
+#define atomic_load_ptr(x) ({ \
+ __typeof(*x) __retptr; \
+ __retptr = (void *)kcsan_atomic_load_ptr((volatile uintptr_t *)(x)); \
+ __retptr; \
+})
#define atomic_load_acq_ptr kcsan_atomic_load_acq_ptr
#define atomic_readandclear_ptr kcsan_atomic_readandclear_ptr
#define atomic_set_ptr kcsan_atomic_set_ptr
Modified: head/sys/sys/atomic_common.h
==============================================================================
--- head/sys/sys/atomic_common.h Fri Feb 14 22:32:33 2020 (r357939)
+++ head/sys/sys/atomic_common.h Fri Feb 14 23:15:41 2020 (r357940)
@@ -41,7 +41,7 @@
#define atomic_load_short(p) (*(volatile u_short *)(p))
#define atomic_load_int(p) (*(volatile u_int *)(p))
#define atomic_load_long(p) (*(volatile u_long *)(p))
-#define atomic_load_ptr(p) (*(volatile uintptr_t*)(p))
+#define atomic_load_ptr(p) (*(volatile __typeof(p))(p))
#define atomic_load_8(p) (*(volatile uint8_t *)(p))
#define atomic_load_16(p) (*(volatile uint16_t *)(p))
#define atomic_load_32(p) (*(volatile uint32_t *)(p))
Modified: head/sys/x86/x86/mp_x86.c
==============================================================================
--- head/sys/x86/x86/mp_x86.c Fri Feb 14 22:32:33 2020 (r357939)
+++ head/sys/x86/x86/mp_x86.c Fri Feb 14 23:15:41 2020 (r357940)
@@ -1106,7 +1106,7 @@ smp_after_idle_runnable(void *arg __unused)
for (cpu = 1; cpu < mp_ncpus; cpu++) {
pc = pcpu_find(cpu);
- while (atomic_load_ptr(&pc->pc_curpcb) == (uintptr_t)NULL)
+ while (atomic_load_ptr(&pc->pc_curpcb) == NULL)
cpu_spinwait();
kmem_free((vm_offset_t)bootstacks[cpu], kstack_pages *
PAGE_SIZE);
More information about the svn-src-head
mailing list