00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021 #ifndef __TBB_queuing_mutex_H
00022 #define __TBB_queuing_mutex_H
00023
00024 #include "tbb_config.h"
00025
00026 #if !TBB_USE_EXCEPTIONS && _MSC_VER
00027
00028 #pragma warning (push)
00029 #pragma warning (disable: 4530)
00030 #endif
00031
00032 #include <cstring>
00033
00034 #if !TBB_USE_EXCEPTIONS && _MSC_VER
00035 #pragma warning (pop)
00036 #endif
00037
00038 #include "atomic.h"
00039 #include "tbb_profiling.h"
00040
00041 namespace tbb {
00042
00044
00045 class queuing_mutex {
00046 public:
00048 queuing_mutex() {
00049 q_tail = NULL;
00050 #if TBB_USE_THREADING_TOOLS
00051 internal_construct();
00052 #endif
00053 }
00054
00056
00058 class scoped_lock: internal::no_copy {
00060 void initialize() {
00061 mutex = NULL;
00062 #if TBB_USE_ASSERT
00063 internal::poison_pointer(next);
00064 #endif
00065 }
00066
00067 public:
00069
00070 scoped_lock() {initialize();}
00071
00073 scoped_lock( queuing_mutex& m ) {
00074 initialize();
00075 acquire(m);
00076 }
00077
00079 ~scoped_lock() {
00080 if( mutex ) release();
00081 }
00082
00084 void __TBB_EXPORTED_METHOD acquire( queuing_mutex& m );
00085
00087 bool __TBB_EXPORTED_METHOD try_acquire( queuing_mutex& m );
00088
00090 void __TBB_EXPORTED_METHOD release();
00091
00092 private:
00094 queuing_mutex* mutex;
00095
00097 scoped_lock *next;
00098
00100
00103 uintptr_t going;
00104 };
00105
00106 void __TBB_EXPORTED_METHOD internal_construct();
00107
00108
00109 static const bool is_rw_mutex = false;
00110 static const bool is_recursive_mutex = false;
00111 static const bool is_fair_mutex = true;
00112
00113 private:
00115 atomic<scoped_lock*> q_tail;
00116
00117 };
00118
00119 __TBB_DEFINE_PROFILING_SET_NAME(queuing_mutex)
00120
00121 }
00122
00123 #endif