Commit 3efd84bd authored by Poul-Henning Kamp's avatar Poul-Henning Kamp

Revisit the memory barriers, and try to catch some common platforms

with inline assembler.

I cannot possibly express the magnitude of this particular POSIX FAIL!


git-svn-id: http://www.varnish-cache.org/svn/trunk/varnish-cache@4912 d4fa192b-c00b-0410-8231-f00ffab90ce4
parent 9303f55e
......@@ -29,6 +29,10 @@
*
* Memory barriers
*
* XXX: It is utterly braindamaged, that no standard facility for this
* XXX: is available. The "just use pthreads locking" excuse does not
* XXX: make sense, and does not apply to two unthreaded programs sharing
* XXX: a memory segment.
*/
#ifndef VMB_H_INCLUDED
......@@ -37,15 +41,43 @@
void vmb_pthread(void);
#if defined(__FreeBSD__)
#include <sys/types.h>
#include <machine/atomic.h>
#define VMB() mb()
#define VWMB() wmb()
#define VRMB() rmb()
#define VMB() mb()
#define VWMB() wmb()
#define VRMB() rmb()
#elif defined(__amd64__) && defined(__GNUC__)
#define VMB() __asm __volatile("mfence;" : : : "memory")
#define VWMB() __asm __volatile("sfence;" : : : "memory")
#define VRMB() __asm __volatile("lfence;" : : : "memory")
#elif defined(__arm__)
#define VMB()
#define VWMB()
#define VRMB()
#elif defined(__i386__) && defined(__GNUC__)
#define VMB() __asm __volatile("lock; addl $0,(%%esp)" : : : "memory")
#define VWMB() __asm __volatile("lock; addl $0,(%%esp)" : : : "memory")
#define VRMB() __asm __volatile("lock; addl $0,(%%esp)" : : : "memory")
#elif defined(__sparc64__) && defined(__GNUC__)
#define VMB() __asm__ __volatile__ ("membar #MemIssue": : :"memory")
#define VWMB() VMB()
#define VRMB() VMB()
#else
#define VMB() vmb_pthread()
#define VWMB() vmb_pthread()
#define VRMB() vmb_pthread()
#define VMB() vmb_pthread()
#define VWMB() vmb_pthread()
#define VRMB() vmb_pthread()
#endif
#endif /* VMB_H_INCLUDED */
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment