about summary refs log tree commit diff
path: root/absl/base/internal
diff options
context:
space:
mode:
Diffstat (limited to 'absl/base/internal')
-rw-r--r--absl/base/internal/low_level_alloc.cc12
-rw-r--r--absl/base/internal/spinlock.h14
2 files changed, 13 insertions, 13 deletions
diff --git a/absl/base/internal/low_level_alloc.cc b/absl/base/internal/low_level_alloc.cc
index 36e4f1bae03b..64d7aa80b131 100644
--- a/absl/base/internal/low_level_alloc.cc
+++ b/absl/base/internal/low_level_alloc.cc
@@ -203,9 +203,9 @@ struct LowLevelAlloc::Arena {
 
   base_internal::SpinLock mu;
   // Head of free list, sorted by address
-  AllocList freelist GUARDED_BY(mu);
+  AllocList freelist ABSL_GUARDED_BY(mu);
   // Count of allocated blocks
-  int32_t allocation_count GUARDED_BY(mu);
+  int32_t allocation_count ABSL_GUARDED_BY(mu);
   // flags passed to NewArena
   const uint32_t flags;
   // Result of sysconf(_SC_PAGESIZE)
@@ -215,7 +215,7 @@ struct LowLevelAlloc::Arena {
   // Smallest allocation block size
   const size_t min_size;
   // PRNG state
-  uint32_t random GUARDED_BY(mu);
+  uint32_t random ABSL_GUARDED_BY(mu);
 };
 
 namespace {
@@ -275,10 +275,10 @@ static const uintptr_t kMagicAllocated = 0x4c833e95U;
 static const uintptr_t kMagicUnallocated = ~kMagicAllocated;
 
 namespace {
-class SCOPED_LOCKABLE ArenaLock {
+class ABSL_SCOPED_LOCKABLE ArenaLock {
  public:
   explicit ArenaLock(LowLevelAlloc::Arena *arena)
-      EXCLUSIVE_LOCK_FUNCTION(arena->mu)
+      ABSL_EXCLUSIVE_LOCK_FUNCTION(arena->mu)
       : arena_(arena) {
 #ifndef ABSL_LOW_LEVEL_ALLOC_ASYNC_SIGNAL_SAFE_MISSING
     if ((arena->flags & LowLevelAlloc::kAsyncSignalSafe) != 0) {
@@ -290,7 +290,7 @@ class SCOPED_LOCKABLE ArenaLock {
     arena_->mu.Lock();
   }
   ~ArenaLock() { ABSL_RAW_CHECK(left_, "haven't left Arena region"); }
-  void Leave() UNLOCK_FUNCTION() {
+  void Leave() ABSL_UNLOCK_FUNCTION() {
     arena_->mu.Unlock();
 #ifndef ABSL_LOW_LEVEL_ALLOC_ASYNC_SIGNAL_SAFE_MISSING
     if (mask_valid_) {
diff --git a/absl/base/internal/spinlock.h b/absl/base/internal/spinlock.h
index 6ee60ac8c1ff..1d1bd6cfcf5a 100644
--- a/absl/base/internal/spinlock.h
+++ b/absl/base/internal/spinlock.h
@@ -48,7 +48,7 @@
 namespace absl {
 namespace base_internal {
 
-class LOCKABLE SpinLock {
+class ABSL_LOCKABLE SpinLock {
  public:
   SpinLock() : lockword_(kSpinLockCooperative) {
     ABSL_TSAN_MUTEX_CREATE(this, __tsan_mutex_not_static);
@@ -79,7 +79,7 @@ class LOCKABLE SpinLock {
   ~SpinLock() { ABSL_TSAN_MUTEX_DESTROY(this, __tsan_mutex_not_static); }
 
   // Acquire this SpinLock.
-  inline void Lock() EXCLUSIVE_LOCK_FUNCTION() {
+  inline void Lock() ABSL_EXCLUSIVE_LOCK_FUNCTION() {
     ABSL_TSAN_MUTEX_PRE_LOCK(this, 0);
     if (!TryLockImpl()) {
       SlowLock();
@@ -91,7 +91,7 @@ class LOCKABLE SpinLock {
   // acquisition was successful.  If the lock was not acquired, false is
   // returned.  If this SpinLock is free at the time of the call, TryLock
   // will return true with high probability.
-  inline bool TryLock() EXCLUSIVE_TRYLOCK_FUNCTION(true) {
+  inline bool TryLock() ABSL_EXCLUSIVE_TRYLOCK_FUNCTION(true) {
     ABSL_TSAN_MUTEX_PRE_LOCK(this, __tsan_mutex_try_lock);
     bool res = TryLockImpl();
     ABSL_TSAN_MUTEX_POST_LOCK(
@@ -101,7 +101,7 @@ class LOCKABLE SpinLock {
   }
 
   // Release this SpinLock, which must be held by the calling thread.
-  inline void Unlock() UNLOCK_FUNCTION() {
+  inline void Unlock() ABSL_UNLOCK_FUNCTION() {
     ABSL_TSAN_MUTEX_PRE_UNLOCK(this, 0);
     uint32_t lock_value = lockword_.load(std::memory_order_relaxed);
     lock_value = lockword_.exchange(lock_value & kSpinLockCooperative,
@@ -179,13 +179,13 @@ class LOCKABLE SpinLock {
 
 // Corresponding locker object that arranges to acquire a spinlock for
 // the duration of a C++ scope.
-class SCOPED_LOCKABLE SpinLockHolder {
+class ABSL_SCOPED_LOCKABLE SpinLockHolder {
  public:
-  inline explicit SpinLockHolder(SpinLock* l) EXCLUSIVE_LOCK_FUNCTION(l)
+  inline explicit SpinLockHolder(SpinLock* l) ABSL_EXCLUSIVE_LOCK_FUNCTION(l)
       : lock_(l) {
     l->Lock();
   }
-  inline ~SpinLockHolder() UNLOCK_FUNCTION() { lock_->Unlock(); }
+  inline ~SpinLockHolder() ABSL_UNLOCK_FUNCTION() { lock_->Unlock(); }
 
   SpinLockHolder(const SpinLockHolder&) = delete;
   SpinLockHolder& operator=(const SpinLockHolder&) = delete;