30 #ifndef _GLIBCXX_MUTEX
31 #define _GLIBCXX_MUTEX 1
33 #pragma GCC system_header
35 #ifndef __GXX_EXPERIMENTAL_CXX0X__
46 #include <bits/gthr.h>
49 #if defined(_GLIBCXX_HAS_GTHREADS) && defined(_GLIBCXX_USE_C99_STDINT_TR1)
51 namespace std _GLIBCXX_VISIBILITY(default)
53 _GLIBCXX_BEGIN_NAMESPACE_VERSION
66 typedef __gthread_mutex_t __native_type;
67 __native_type _M_mutex;
70 typedef __native_type* native_handle_type;
72 #ifdef __GTHREAD_MUTEX_INIT
73 constexpr
mutex() : _M_mutex(__GTHREAD_MUTEX_INIT) { }
78 __GTHREAD_MUTEX_INIT_FUNCTION(&_M_mutex);
81 ~mutex() { __gthread_mutex_destroy(&_M_mutex); }
84 mutex(
const mutex&) =
delete;
85 mutex& operator=(
const mutex&) =
delete;
90 int __e = __gthread_mutex_lock(&_M_mutex);
94 __throw_system_error(__e);
101 return !__gthread_mutex_trylock(&_M_mutex);
108 __gthread_mutex_unlock(&_M_mutex);
113 {
return &_M_mutex; }
116 #ifndef __GTHREAD_RECURSIVE_MUTEX_INIT
119 class __destroy_recursive_mutex
121 template<
typename _Mx,
typename _Rm>
123 _S_destroy_win32(_Mx* __mx, _Rm
const* __rmx)
125 __mx->counter = __rmx->counter;
126 __mx->sema = __rmx->sema;
127 __gthread_mutex_destroy(__mx);
132 template<
typename _Rm>
133 static typename enable_if<(bool)sizeof(&_Rm::sema), void>::type
134 _S_destroy(_Rm* __mx)
136 __gthread_mutex_t __tmp;
137 _S_destroy_win32(&__tmp, __mx);
141 template<
typename _Rm>
142 static typename enable_if<(bool)sizeof(&_Rm::actual), void>::type
143 _S_destroy(_Rm* __mx)
144 { __gthread_mutex_destroy(&__mx->actual); }
147 template<
typename _Rm>
149 typename enable_if<is_same<_Rm, __gthread_mutex_t>::value,
void>::type
150 _S_destroy(_Rm* __mx)
151 { __gthread_mutex_destroy(__mx); }
158 typedef __gthread_recursive_mutex_t __native_type;
159 __native_type _M_mutex;
162 typedef __native_type* native_handle_type;
164 #ifdef __GTHREAD_RECURSIVE_MUTEX_INIT
170 __GTHREAD_RECURSIVE_MUTEX_INIT_FUNCTION(&_M_mutex);
174 { __destroy_recursive_mutex::_S_destroy(&_M_mutex); }
183 int __e = __gthread_recursive_mutex_lock(&_M_mutex);
187 __throw_system_error(__e);
194 return !__gthread_recursive_mutex_trylock(&_M_mutex);
201 __gthread_recursive_mutex_unlock(&_M_mutex);
206 {
return &_M_mutex; }
212 typedef __gthread_mutex_t __native_type;
214 #ifdef _GLIBCXX_USE_CLOCK_MONOTONIC
220 __native_type _M_mutex;
223 typedef __native_type* native_handle_type;
225 #ifdef __GTHREAD_MUTEX_INIT
230 __GTHREAD_MUTEX_INIT_FUNCTION(&_M_mutex);
233 ~
timed_mutex() { __gthread_mutex_destroy(&_M_mutex); }
242 int __e = __gthread_mutex_lock(&_M_mutex);
246 __throw_system_error(__e);
253 return !__gthread_mutex_trylock(&_M_mutex);
256 template <
class _Rep,
class _Period>
259 {
return __try_lock_for_impl(__rtime); }
261 template <
class _Clock,
class _Duration>
271 __gthread_time_t __ts = {
272 static_cast<std::time_t
>(__s.time_since_epoch().count()),
273 static_cast<long>(__ns.count())
276 return !__gthread_mutex_timedlock(&_M_mutex, &__ts);
283 __gthread_mutex_unlock(&_M_mutex);
288 {
return &_M_mutex; }
291 template<
typename _Rep,
typename _Period>
293 ratio_less_equal<__clock_t::period, _Period>::value,
bool>::type
299 return try_lock_until(__atime);
302 template <
typename _Rep,
typename _Period>
304 !ratio_less_equal<__clock_t::period, _Period>::value,
bool>::type
310 return try_lock_until(__atime);
317 typedef __gthread_recursive_mutex_t __native_type;
319 #ifdef _GLIBCXX_USE_CLOCK_MONOTONIC
325 __native_type _M_mutex;
328 typedef __native_type* native_handle_type;
330 #ifdef __GTHREAD_RECURSIVE_MUTEX_INIT
336 __GTHREAD_RECURSIVE_MUTEX_INIT_FUNCTION(&_M_mutex);
340 { __destroy_recursive_mutex::_S_destroy(&_M_mutex); }
349 int __e = __gthread_recursive_mutex_lock(&_M_mutex);
353 __throw_system_error(__e);
360 return !__gthread_recursive_mutex_trylock(&_M_mutex);
363 template <
class _Rep,
class _Period>
366 {
return __try_lock_for_impl(__rtime); }
368 template <
class _Clock,
class _Duration>
378 __gthread_time_t __ts = {
379 static_cast<std::time_t
>(__s.time_since_epoch().count()),
380 static_cast<long>(__ns.count())
383 return !__gthread_recursive_mutex_timedlock(&_M_mutex, &__ts);
390 __gthread_recursive_mutex_unlock(&_M_mutex);
395 {
return &_M_mutex; }
398 template<
typename _Rep,
typename _Period>
400 ratio_less_equal<__clock_t::period, _Period>::value,
bool>::type
406 return try_lock_until(__atime);
409 template <
typename _Rep,
typename _Period>
411 !ratio_less_equal<__clock_t::period, _Period>::value,
bool>::type
417 return try_lock_until(__atime);
432 constexpr try_to_lock_t try_to_lock { };
433 constexpr adopt_lock_t adopt_lock { };
438 template<
typename _Mutex>
442 typedef _Mutex mutex_type;
444 explicit lock_guard(mutex_type& __m) : _M_device(__m)
445 { _M_device.lock(); }
451 { _M_device.unlock(); }
457 mutex_type& _M_device;
461 template<
typename _Mutex>
465 typedef _Mutex mutex_type;
468 : _M_device(0), _M_owns(
false)
472 : _M_device(&__m), _M_owns(
false)
479 : _M_device(&__m), _M_owns(
false)
483 : _M_device(&__m), _M_owns(_M_device->try_lock())
487 : _M_device(&__m), _M_owns(
true)
492 template<
typename _Clock,
typename _Duration>
495 : _M_device(&__m), _M_owns(_M_device->try_lock_until(__atime))
498 template<
typename _Rep,
typename _Period>
501 : _M_device(&__m), _M_owns(_M_device->try_lock_for(__rtime))
514 : _M_device(__u._M_device), _M_owns(__u._M_owns)
537 __throw_system_error(
int(errc::operation_not_permitted));
539 __throw_system_error(
int(errc::resource_deadlock_would_occur));
551 __throw_system_error(
int(errc::operation_not_permitted));
553 __throw_system_error(
int(errc::resource_deadlock_would_occur));
556 _M_owns = _M_device->try_lock();
561 template<
typename _Clock,
typename _Duration>
566 __throw_system_error(
int(errc::operation_not_permitted));
568 __throw_system_error(
int(errc::resource_deadlock_would_occur));
571 _M_owns = _M_device->try_lock_until(__atime);
576 template<
typename _Rep,
typename _Period>
581 __throw_system_error(
int(errc::operation_not_permitted));
583 __throw_system_error(
int(errc::resource_deadlock_would_occur));
586 _M_owns = _M_device->try_lock_for(__rtime);
595 __throw_system_error(
int(errc::operation_not_permitted));
606 std::swap(_M_device, __u._M_device);
607 std::swap(_M_owns, __u._M_owns);
613 mutex_type* __ret = _M_device;
623 explicit operator bool()
const
624 {
return owns_lock(); }
628 {
return _M_device; }
631 mutex_type* _M_device;
635 template<
typename _Mutex>
643 template<
typename... _Lock>
645 __do_unlock(tuple<_Lock&...>& __locks)
647 std::get<_Idx>(__locks).unlock();
648 __unlock_impl<_Idx - 1>::__do_unlock(__locks);
653 struct __unlock_impl<-1>
655 template<
typename... _Lock>
657 __do_unlock(tuple<_Lock&...>&)
661 template<
typename _Lock>
663 __try_to_lock(_Lock& __l)
664 {
return unique_lock<_Lock>(__l, try_to_lock); }
666 template<
int _Idx,
bool _Continue = true>
667 struct __try_lock_impl
669 template<
typename... _Lock>
671 __do_try_lock(tuple<_Lock&...>& __locks,
int& __idx)
674 auto __lock = __try_to_lock(std::get<_Idx>(__locks));
675 if (__lock.owns_lock())
677 __try_lock_impl<_Idx + 1, _Idx + 2 <
sizeof...(_Lock)>::
678 __do_try_lock(__locks, __idx);
686 struct __try_lock_impl<_Idx, false>
688 template<
typename... _Lock>
690 __do_try_lock(tuple<_Lock&...>& __locks,
int& __idx)
693 auto __lock = __try_to_lock(std::get<_Idx>(__locks));
694 if (__lock.owns_lock())
712 template<
typename _Lock1,
typename _Lock2,
typename... _Lock3>
714 try_lock(_Lock1& __l1, _Lock2& __l2, _Lock3&... __l3)
717 auto __locks = std::tie(__l1, __l2, __l3...);
719 { __try_lock_impl<0>::__do_try_lock(__locks, __idx); }
736 template<
typename _L1,
typename _L2,
typename ..._L3>
738 lock(_L1& __l1, _L2& __l2, _L3&... __l3)
744 auto __locks = std::tie(__l2, __l3...);
745 __try_lock_impl<0,
sizeof...(_L3)>::__do_try_lock(__locks, __idx);
758 typedef __gthread_once_t __native_type;
759 __native_type _M_once;
762 constexpr
once_flag() : _M_once(__GTHREAD_ONCE_INIT) { }
764 once_flag(
const once_flag&) =
delete;
765 once_flag& operator=(
const once_flag&) =
delete;
767 template<
typename _Callable,
typename... _Args>
769 call_once(once_flag& __once, _Callable&& __f, _Args&&... __args);
772 #ifdef _GLIBCXX_HAVE_TLS
773 extern __thread
void* __once_callable;
774 extern __thread void (*__once_call)();
776 template<
typename _Callable>
780 (*(_Callable*)__once_callable)();
783 extern function<void()> __once_functor;
786 __set_once_functor_lock_ptr(unique_lock<mutex>*);
792 extern "C" void __once_proxy();
795 template<
typename _Callable,
typename... _Args>
799 #ifdef _GLIBCXX_HAVE_TLS
800 auto __bound_functor = std::bind<void>(std::forward<_Callable>(__f),
801 std::forward<_Args>(__args)...);
802 __once_callable = &__bound_functor;
803 __once_call = &__once_call_impl<decltype(__bound_functor)>;
806 __once_functor = std::bind<void>(std::forward<_Callable>(__f),
807 std::forward<_Args>(__args)...);
808 __set_once_functor_lock_ptr(&__functor_lock);
811 int __e = __gthread_once(&(__once._M_once), &__once_proxy);
813 #ifndef _GLIBCXX_HAVE_TLS
815 __set_once_functor_lock_ptr(0);
819 __throw_system_error(__e);
823 _GLIBCXX_END_NAMESPACE_VERSION
826 #endif // _GLIBCXX_HAS_GTHREADS && _GLIBCXX_USE_C99_STDINT_TR1
828 #endif // __GXX_EXPERIMENTAL_CXX0X__
830 #endif // _GLIBCXX_MUTEX
int try_lock(_Lock1 &__l1, _Lock2 &__l2, _Lock3 &...__l3)
Generic try_lock.
constexpr enable_if< __is_duration< _ToDur >::value, _ToDur >::type duration_cast(const duration< _Rep, _Period > &__d)
duration_cast
Do not acquire ownership of the mutex.
void lock(_L1 &__l1, _L2 &__l2, _L3 &...__l3)
Generic lock.
Assume the calling thread has already obtained mutex ownership and manage it.
constexpr enable_if< __is_duration< _ToDur >::value, time_point< _Clock, _ToDur > >::type time_point_cast(const time_point< _Clock, _Dur > &__t)
time_point_cast
Try to acquire ownership of the mutex without blocking.
void call_once(once_flag &__once, _Callable &&__f, _Args &&...__args)
call_once