45
39
********************************** SMutex ************************************
46
40
**************************************************************************** */
49
static SMap * _global_mutex_list = NULL;
52
static sulong _global_mutex_lock_id = 0;
55
static sulong _global_mutex_refcount = 0;
57
#define _mutex_ref() global_mutex_refcount++
61
_global_mutex_refcount--;
62
if (_global_mutex_refcount == 0) {
63
s_map_free (_global_mutex_list, TRUE);
67
#ifndef __STDC_NO_THREADS__
68
/* We have Std Thread support */
44
_Atomic(sboolean) locked;
75
_internal_s_mutex_free (SMutex * self) {
81
if (!_global_mutex_list) {
82
_global_mutex_list = s_map_new (FREEFUNC(_internal_s_mutex_free));
89
#else /* __STDC_NO_THREADS__ */
90
/* We do not have Std Thread support */
94
_Atomic sboolean is_locked;
50
SMutex * self = malloc (sizeof (SMutex));
52
atomic_init(self->locked, FALSE);
54
mtx_init (&(self->mutex), mtx_plain);
58
s_mutex_free (SMutex * self) {
59
_globa_mutex_unref ();
60
mtx_destroy (&(self->mutex));
65
s_mutex_lock (SMutex * self) {
66
mtx_lock (&(self->mutex));
67
atomic_store(self->locked, TRUE);
71
s_mutex_unlock (SMutex * self) {
72
mtx_unlock (&(self->mutex));
73
atomic_store(self->locked, FALSE);
77
s_mutex_check_lock (SMutex * self) {
78
return atomic_load(self->locked);