From ac2dd8b794f18323edf3ffedbc2801a1c96ec278 Mon Sep 17 00:00:00 2001 From: dwhite Date: Thu, 3 Mar 2005 02:41:37 +0000 Subject: Insert volatile cast to discourage gcc from optimizing the read outside of the while loop. Suggested by: alc MFC after: 1 day --- sys/kern/uipc_mbuf.c | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) (limited to 'sys') diff --git a/sys/kern/uipc_mbuf.c b/sys/kern/uipc_mbuf.c index 4d832a8..1f6069e 100644 --- a/sys/kern/uipc_mbuf.c +++ b/sys/kern/uipc_mbuf.c @@ -234,9 +234,12 @@ mb_free_ext(struct mbuf *m) * This is tricky. We need to make sure to decrement the * refcount in a safe way but to also clean up if we're the * last reference. This method seems to do it without race. + * The volatile cast is required to emit the proper load + * instructions. Otherwise gcc will optimize the read outside + * of the while loop. */ while (dofree == 0) { - cnt = *(m->m_ext.ref_cnt); + cnt = *(volatile u_int *)(m->m_ext.ref_cnt); if (atomic_cmpset_int(m->m_ext.ref_cnt, cnt, cnt - 1)) { if (cnt == 1) dofree = 1; -- cgit v1.1