recursive_mutex.h

00001 /*
00002     Copyright 2005-2010 Intel Corporation.  All Rights Reserved.
00003 
00004     The source code contained or described herein and all documents related
00005     to the source code ("Material") are owned by Intel Corporation or its
00006     suppliers or licensors.  Title to the Material remains with Intel
00007     Corporation or its suppliers and licensors.  The Material is protected
00008     by worldwide copyright laws and treaty provisions.  No part of the
00009     Material may be used, copied, reproduced, modified, published, uploaded,
00010     posted, transmitted, distributed, or disclosed in any way without
00011     Intel's prior express written permission.
00012 
00013     No license under any patent, copyright, trade secret or other
00014     intellectual property right is granted to or conferred upon you by
00015     disclosure or delivery of the Materials, either expressly, by
00016     implication, inducement, estoppel or otherwise.  Any license under such
00017     intellectual property rights must be express and approved by Intel in
00018     writing.
00019 */
00020 
00021 #ifndef __TBB_recursive_mutex_H
00022 #define __TBB_recursive_mutex_H
00023 
00024 #if _WIN32||_WIN64
00025 
00026 #include <windows.h>
00027 #if !defined(_WIN32_WINNT)
00028 // The following Windows API function is declared explicitly;
00029 // otherwise any user would have to specify /D_WIN32_WINNT=0x0400
00030 extern "C" BOOL WINAPI TryEnterCriticalSection( LPCRITICAL_SECTION );
00031 #endif
00032 
00033 #else /* if not _WIN32||_WIN64 */
00034 
00035 #include <pthread.h>
00036 namespace tbb { namespace internal {
00037 // Use this internal TBB function to throw an exception
00038   extern void handle_perror( int error_code, const char* what );
00039 } } //namespaces
00040 
00041 #endif /* _WIN32||_WIN64 */
00042 
00043 #include <new>
00044 #include "aligned_space.h"
00045 #include "tbb_stddef.h"
00046 #include "tbb_profiling.h"
00047 
00048 namespace tbb {
00050 
00052 class recursive_mutex {
00053 public:
00055     recursive_mutex() {
00056 #if TBB_USE_ASSERT || TBB_USE_THREADING_TOOLS
00057         internal_construct();
00058 #else
00059   #if _WIN32||_WIN64
00060         InitializeCriticalSection(&impl);
00061   #else
00062         pthread_mutexattr_t mtx_attr;
00063         int error_code = pthread_mutexattr_init( &mtx_attr );
00064         if( error_code )
00065             tbb::internal::handle_perror(error_code,"recursive_mutex: pthread_mutexattr_init failed");
00066 
00067         pthread_mutexattr_settype( &mtx_attr, PTHREAD_MUTEX_RECURSIVE );
00068         error_code = pthread_mutex_init( &impl, &mtx_attr );
00069         if( error_code )
00070             tbb::internal::handle_perror(error_code,"recursive_mutex: pthread_mutex_init failed");
00071 
00072         pthread_mutexattr_destroy( &mtx_attr );
00073   #endif /* _WIN32||_WIN64*/
00074 #endif /* TBB_USE_ASSERT */
00075     };
00076 
00077     ~recursive_mutex() {
00078 #if TBB_USE_ASSERT
00079         internal_destroy();
00080 #else
00081   #if _WIN32||_WIN64
00082         DeleteCriticalSection(&impl);
00083   #else
00084         pthread_mutex_destroy(&impl); 
00085 
00086   #endif /* _WIN32||_WIN64 */
00087 #endif /* TBB_USE_ASSERT */
00088     };
00089 
00090     class scoped_lock;
00091     friend class scoped_lock;
00092 
00094 
00096     class scoped_lock: internal::no_copy {
00097     public:
00099         scoped_lock() : my_mutex(NULL) {};
00100 
00102         scoped_lock( recursive_mutex& mutex ) {
00103 #if TBB_USE_ASSERT
00104             my_mutex = &mutex; 
00105 #endif /* TBB_USE_ASSERT */
00106             acquire( mutex );
00107         }
00108 
00110         ~scoped_lock() {
00111             if( my_mutex ) 
00112                 release();
00113         }
00114 
00116         void acquire( recursive_mutex& mutex ) {
00117 #if TBB_USE_ASSERT
00118             internal_acquire( mutex );
00119 #else
00120             my_mutex = &mutex;
00121             mutex.lock();
00122 #endif /* TBB_USE_ASSERT */
00123         }
00124 
00126         bool try_acquire( recursive_mutex& mutex ) {
00127 #if TBB_USE_ASSERT
00128             return internal_try_acquire( mutex );
00129 #else
00130             bool result = mutex.try_lock();
00131             if( result )
00132                 my_mutex = &mutex;
00133             return result;
00134 #endif /* TBB_USE_ASSERT */
00135         }
00136 
00138         void release() {
00139 #if TBB_USE_ASSERT
00140             internal_release();
00141 #else
00142             my_mutex->unlock();
00143             my_mutex = NULL;
00144 #endif /* TBB_USE_ASSERT */
00145         }
00146 
00147     private:
00149         recursive_mutex* my_mutex;
00150 
00152         void __TBB_EXPORTED_METHOD internal_acquire( recursive_mutex& m );
00153 
00155         bool __TBB_EXPORTED_METHOD internal_try_acquire( recursive_mutex& m );
00156 
00158         void __TBB_EXPORTED_METHOD internal_release();
00159 
00160         friend class recursive_mutex;
00161     };
00162 
00163     // Mutex traits
00164     static const bool is_rw_mutex = false;
00165     static const bool is_recursive_mutex = true;
00166     static const bool is_fair_mutex = false;
00167 
00168     // C++0x compatibility interface
00169     
00171     void lock() {
00172 #if TBB_USE_ASSERT
00173         aligned_space<scoped_lock,1> tmp;
00174         new(tmp.begin()) scoped_lock(*this);
00175 #else
00176   #if _WIN32||_WIN64
00177         EnterCriticalSection(&impl);
00178   #else
00179         pthread_mutex_lock(&impl);
00180   #endif /* _WIN32||_WIN64 */
00181 #endif /* TBB_USE_ASSERT */
00182     }
00183 
00185 
00186     bool try_lock() {
00187 #if TBB_USE_ASSERT
00188         aligned_space<scoped_lock,1> tmp;
00189         return (new(tmp.begin()) scoped_lock)->internal_try_acquire(*this);
00190 #else        
00191   #if _WIN32||_WIN64
00192         return TryEnterCriticalSection(&impl)!=0;
00193   #else
00194         return pthread_mutex_trylock(&impl)==0;
00195   #endif /* _WIN32||_WIN64 */
00196 #endif /* TBB_USE_ASSERT */
00197     }
00198 
00200     void unlock() {
00201 #if TBB_USE_ASSERT
00202         aligned_space<scoped_lock,1> tmp;
00203         scoped_lock& s = *tmp.begin();
00204         s.my_mutex = this;
00205         s.internal_release();
00206 #else
00207   #if _WIN32||_WIN64
00208         LeaveCriticalSection(&impl);
00209   #else
00210         pthread_mutex_unlock(&impl);
00211   #endif /* _WIN32||_WIN64 */
00212 #endif /* TBB_USE_ASSERT */
00213     }
00214 
00215 private:
00216 #if _WIN32||_WIN64
00217     CRITICAL_SECTION impl;
00218     enum state_t {
00219         INITIALIZED=0x1234,
00220         DESTROYED=0x789A,
00221     } state;
00222 #else
00223     pthread_mutex_t impl;
00224 #endif /* _WIN32||_WIN64 */
00225 
00227     void __TBB_EXPORTED_METHOD internal_construct();
00228 
00230     void __TBB_EXPORTED_METHOD internal_destroy();
00231 };
00232 
00233 __TBB_DEFINE_PROFILING_SET_NAME(recursive_mutex)
00234 
00235 } // namespace tbb 
00236 
00237 #endif /* __TBB_recursive_mutex_H */

Copyright © 2005-2009 Intel Corporation. All Rights Reserved.

Intel, Pentium, Intel Xeon, Itanium, Intel XScale and VTune are registered trademarks or trademarks of Intel Corporation or its subsidiaries in the United States and other countries.

* Other names and brands may be claimed as the property of others.