jfb created this revision.
jfb added reviewers: arphaman, EricWF.
Herald added subscribers: cfe-commits, christof, aheejin.

The atomic non-member functions accept pointers to std::atomic / 
std::atomic_flag as well as to the non-atomic value. These are all dereferenced 
unconditionally when lowered, and therefore will fault if null. It's a tiny 
gotcha for new users, especially when they pass in NULL as expected value 
(instead of passing a pointer to a NULL value). We can therefore use the 
nonnull attribute to denote that:

- A warning should be generated if the argument is null
- It is undefined behavior if the argument is null (because a dereference will 
segfault)

This patch adds support for this attribute for clang and GCC, and sticks to the 
subset of the syntax both supports. In particular, work around this GCC oddity:

  https://gcc.gnu.org/bugzilla/show_bug.cgi?id=60625

The attributes are documented:

- https://gcc.gnu.org/onlinedocs/gcc-4.0.0/gcc/Function-Attributes.html
- https://clang.llvm.org/docs/AttributeReference.html#nullability-attributes

I'm authoring a companion clang patch for the __c11_* and __atomic_* builtins, 
which currently only warn on a subset of the pointer parameters.

In all cases the check needs to be explicit and not use the empty nonnull list, 
because some of the overloads are for atomic<T*> and the values themselves are 
allowed to be null.

rdar://problem/18473124


Repository:
  rCXX libc++

https://reviews.llvm.org/D47225

Files:
  include/__config
  include/atomic
  test/libcxx/atomics/diagnose_nonnull.fail.cpp

Index: test/libcxx/atomics/diagnose_nonnull.fail.cpp
===================================================================
--- /dev/null
+++ test/libcxx/atomics/diagnose_nonnull.fail.cpp
@@ -0,0 +1,92 @@
+//===----------------------------------------------------------------------===//
+//
+//                     The LLVM Compiler Infrastructure
+//
+// This file is dual licensed under the MIT and the University of Illinois Open
+// Source Licenses. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+
+// REQUIRES: verify-support
+// UNSUPPORTED: libcpp-has-no-threads
+
+// <atomic>
+
+// Test that null pointer parameters are diagnosed.
+
+#include <atomic>
+
+int main() {
+  std::atomic<int> ai = ATOMIC_VAR_INIT(0);
+  volatile std::atomic<int> vai = ATOMIC_VAR_INIT(0);
+  int i = 42;
+
+  atomic_is_lock_free((const volatile std::atomic<int>*)0); // expected-error {{null passed to a callee that requires a non-null argument}}
+  atomic_is_lock_free((const std::atomic<int>*)0); // expected-error {{null passed to a callee that requires a non-null argument}}
+  atomic_init((volatile std::atomic<int>*)0, 42); // expected-error {{null passed to a callee that requires a non-null argument}}
+  atomic_init((std::atomic<int>*)0, 42); // expected-error {{null passed to a callee that requires a non-null argument}}
+  atomic_store((volatile std::atomic<int>*)0, 42); // expected-error {{null passed to a callee that requires a non-null argument}}
+  atomic_store((std::atomic<int>*)0, 42); // expected-error {{null passed to a callee that requires a non-null argument}}
+  atomic_store_explicit((volatile std::atomic<int>*)0, 42, std::memory_order_relaxed); // expected-error {{null passed to a callee that requires a non-null argument}}
+  atomic_store_explicit((std::atomic<int>*)0, 42, std::memory_order_relaxed); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_load((const volatile std::atomic<int>*)0); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_load((const std::atomic<int>*)0); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_load_explicit((const volatile std::atomic<int>*)0, std::memory_order_relaxed); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_load_explicit((const std::atomic<int>*)0, std::memory_order_relaxed); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_exchange((volatile std::atomic<int>*)0, 42); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_exchange((std::atomic<int>*)0, 42); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_exchange_explicit((volatile std::atomic<int>*)0, 42, std::memory_order_relaxed); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_exchange_explicit((std::atomic<int>*)0, 42, std::memory_order_relaxed); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_compare_exchange_weak((volatile std::atomic<int>*)0, &i, 42); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_compare_exchange_weak((std::atomic<int>*)0, &i, 42); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_compare_exchange_strong((volatile std::atomic<int>*)0, &i, 42); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_compare_exchange_strong((std::atomic<int>*)0, &i, 42); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_compare_exchange_weak(&vai, (int*)0, 42); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_compare_exchange_weak(&ai, (int*)0, 42); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_compare_exchange_strong(&vai, (int*)0, 42); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_compare_exchange_strong(&ai, (int*)0, 42); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_compare_exchange_weak_explicit((volatile std::atomic<int>*)0, &i, 42, std::memory_order_relaxed, std::memory_order_relaxed); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_compare_exchange_weak_explicit((std::atomic<int>*)0, &i, 42, std::memory_order_relaxed, std::memory_order_relaxed); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_compare_exchange_strong_explicit((volatile std::atomic<int>*)0, &i, 42, std::memory_order_relaxed, std::memory_order_relaxed); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_compare_exchange_strong_explicit((std::atomic<int>*)0, &i, 42, std::memory_order_relaxed, std::memory_order_relaxed); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_compare_exchange_weak_explicit(&vai, (int*)0, 42, std::memory_order_relaxed, std::memory_order_relaxed); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_compare_exchange_weak_explicit(&ai, (int*)0, 42, std::memory_order_relaxed, std::memory_order_relaxed); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_compare_exchange_strong_explicit(&vai, (int*)0, 42, std::memory_order_relaxed, std::memory_order_relaxed); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_compare_exchange_strong_explicit(&ai, (int*)0, 42, std::memory_order_relaxed, std::memory_order_relaxed); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_fetch_add((volatile std::atomic<int>*)0, 42); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_fetch_add((std::atomic<int>*)0, 42); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_fetch_add((volatile std::atomic<int*>*)0, 42); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_fetch_add((std::atomic<int*>*)0, 42); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_fetch_add_explicit((volatile std::atomic<int>*)0, 42, std::memory_order_relaxed); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_fetch_add_explicit((std::atomic<int>*)0, 42, std::memory_order_relaxed); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_fetch_add_explicit((volatile std::atomic<int*>*)0, 42, std::memory_order_relaxed); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_fetch_add_explicit((std::atomic<int*>*)0, 42, std::memory_order_relaxed); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_fetch_sub((volatile std::atomic<int>*)0, 42); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_fetch_sub((std::atomic<int>*)0, 42); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_fetch_sub((volatile std::atomic<int*>*)0, 42); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_fetch_sub((std::atomic<int*>*)0, 42); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_fetch_sub_explicit((volatile std::atomic<int>*)0, 42, std::memory_order_relaxed); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_fetch_sub_explicit((std::atomic<int>*)0, 42, std::memory_order_relaxed); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_fetch_sub_explicit((volatile std::atomic<int*>*)0, 42, std::memory_order_relaxed); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_fetch_sub_explicit((std::atomic<int*>*)0, 42, std::memory_order_relaxed); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_fetch_and((volatile std::atomic<int>*)0, 42); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_fetch_and((std::atomic<int>*)0, 42); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_fetch_and_explicit((volatile std::atomic<int>*)0, 42, std::memory_order_relaxed); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_fetch_and_explicit((std::atomic<int>*)0, 42, std::memory_order_relaxed); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_fetch_or((volatile std::atomic<int>*)0, 42); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_fetch_or((std::atomic<int>*)0, 42); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_fetch_or_explicit((volatile std::atomic<int>*)0, 42, std::memory_order_relaxed); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_fetch_or_explicit((std::atomic<int>*)0, 42, std::memory_order_relaxed); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_fetch_xor((volatile std::atomic<int>*)0, 42); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_fetch_xor((std::atomic<int>*)0, 42); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_fetch_xor_explicit((volatile std::atomic<int>*)0, 42, std::memory_order_relaxed); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_fetch_xor_explicit((std::atomic<int>*)0, 42, std::memory_order_relaxed); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_flag_test_and_set((volatile std::atomic_flag*)0); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_flag_test_and_set((std::atomic_flag*)0); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_flag_test_and_set_explicit((volatile std::atomic_flag*)0, std::memory_order_relaxed); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_flag_test_and_set_explicit((std::atomic_flag*)0, std::memory_order_relaxed); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_flag_clear((volatile std::atomic_flag*)0); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_flag_clear((std::atomic_flag*)0); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_flag_clear_explicit((volatile std::atomic_flag*)0, std::memory_order_relaxed); // expected-error {{null passed to a callee that requires a non-null argument}}
+  (void)atomic_flag_clear_explicit((std::atomic_flag*)0, std::memory_order_relaxed); // expected-error {{null passed to a callee that requires a non-null argument}}
+}
Index: include/atomic
===================================================================
--- include/atomic
+++ include/atomic
@@ -646,19 +646,23 @@
 } // namespace __gcc_atomic
 
 template <typename _Tp>
+_LIBCPP_NONNULL(1)
 static inline
 typename enable_if<
     __gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value>::type
-__c11_atomic_init(volatile _Atomic(_Tp)* __a,  _Tp __val) {
+__c11_atomic_init(volatile _Atomic(_Tp)* __a,  _Tp __val)
+{
   __a->__a_value = __val;
 }
 
 template <typename _Tp>
+_LIBCPP_NONNULL(1)
 static inline
 typename enable_if<
     !__gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value &&
      __gcc_atomic::__can_assign<         _Atomic(_Tp)*, _Tp>::value>::type
-__c11_atomic_init(volatile _Atomic(_Tp)* __a,  _Tp __val) {
+__c11_atomic_init(volatile _Atomic(_Tp)* __a,  _Tp __val)
+{
   // [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
   // the default operator= in an object is not volatile, a byte-by-byte copy
   // is required.
@@ -671,7 +675,9 @@
 }
 
 template <typename _Tp>
-static inline void __c11_atomic_init(_Atomic(_Tp)* __a,  _Tp __val) {
+_LIBCPP_NONNULL(1)
+static inline void __c11_atomic_init(_Atomic(_Tp)* __a,  _Tp __val)
+{
   __a->__a_value = __val;
 }
 
@@ -684,88 +690,108 @@
 }
 
 template <typename _Tp>
+_LIBCPP_NONNULL(1)
 static inline void __c11_atomic_store(volatile _Atomic(_Tp)* __a,  _Tp __val,
-                                      memory_order __order) {
+                                      memory_order __order)
+{
   return __atomic_store(&__a->__a_value, &__val,
                         __gcc_atomic::__to_gcc_order(__order));
 }
 
 template <typename _Tp>
+_LIBCPP_NONNULL(1)
 static inline void __c11_atomic_store(_Atomic(_Tp)* __a,  _Tp __val,
-                                      memory_order __order) {
+                                      memory_order __order)
+{
   __atomic_store(&__a->__a_value, &__val,
                  __gcc_atomic::__to_gcc_order(__order));
 }
 
 template <typename _Tp>
+_LIBCPP_NONNULL(1)
 static inline _Tp __c11_atomic_load(volatile _Atomic(_Tp)* __a,
-                                    memory_order __order) {
+                                    memory_order __order)
+{
   _Tp __ret;
   __atomic_load(&__a->__a_value, &__ret,
                 __gcc_atomic::__to_gcc_order(__order));
   return __ret;
 }
 
 template <typename _Tp>
-static inline _Tp __c11_atomic_load(_Atomic(_Tp)* __a, memory_order __order) {
+_LIBCPP_NONNULL(1)
+static inline _Tp __c11_atomic_load(_Atomic(_Tp)* __a, memory_order __order)
+{
   _Tp __ret;
   __atomic_load(&__a->__a_value, &__ret,
                 __gcc_atomic::__to_gcc_order(__order));
   return __ret;
 }
 
 template <typename _Tp>
+_LIBCPP_NONNULL(1)
 static inline _Tp __c11_atomic_exchange(volatile _Atomic(_Tp)* __a,
-                                        _Tp __value, memory_order __order) {
+                                        _Tp __value, memory_order __order)
+{
   _Tp __ret;
   __atomic_exchange(&__a->__a_value, &__value, &__ret,
                     __gcc_atomic::__to_gcc_order(__order));
   return __ret;
 }
 
 template <typename _Tp>
+_LIBCPP_NONNULL(1)
 static inline _Tp __c11_atomic_exchange(_Atomic(_Tp)* __a, _Tp __value,
-                                        memory_order __order) {
+                                        memory_order __order)
+{
   _Tp __ret;
   __atomic_exchange(&__a->__a_value, &__value, &__ret,
                     __gcc_atomic::__to_gcc_order(__order));
   return __ret;
 }
 
 template <typename _Tp>
+_LIBCPP_NONNULL(1, 2)
 static inline bool __c11_atomic_compare_exchange_strong(
     volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
-    memory_order __success, memory_order __failure) {
+    memory_order __success, memory_order __failure)
+{
   return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
                                    false,
                                    __gcc_atomic::__to_gcc_order(__success),
                                    __gcc_atomic::__to_gcc_failure_order(__failure));
 }
 
 template <typename _Tp>
+_LIBCPP_NONNULL(1, 2)
 static inline bool __c11_atomic_compare_exchange_strong(
     _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
-    memory_order __failure) {
+    memory_order __failure)
+{
   return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
                                    false,
                                    __gcc_atomic::__to_gcc_order(__success),
                                    __gcc_atomic::__to_gcc_failure_order(__failure));
 }
 
 template <typename _Tp>
+_LIBCPP_NONNULL(1, 2)
 static inline bool __c11_atomic_compare_exchange_weak(
     volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
-    memory_order __success, memory_order __failure) {
+    memory_order __success, memory_order __failure)
+{
   return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
                                    true,
                                    __gcc_atomic::__to_gcc_order(__success),
                                    __gcc_atomic::__to_gcc_failure_order(__failure));
 }
 
 template <typename _Tp>
+_LIBCPP_NONNULL(1, 2)
 static inline bool __c11_atomic_compare_exchange_weak(
     _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
-    memory_order __failure) {
+    memory_order __failure)
+{
   return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
                                    true,
                                    __gcc_atomic::__to_gcc_order(__success),
@@ -786,71 +812,91 @@
 struct __skip_amt<_Tp[n]> { };
 
 template <typename _Tp, typename _Td>
+_LIBCPP_NONNULL(1)
 static inline _Tp __c11_atomic_fetch_add(volatile _Atomic(_Tp)* __a,
-                                         _Td __delta, memory_order __order) {
+                                         _Td __delta, memory_order __order)
+{
   return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
                             __gcc_atomic::__to_gcc_order(__order));
 }
 
 template <typename _Tp, typename _Td>
+_LIBCPP_NONNULL(1)
 static inline _Tp __c11_atomic_fetch_add(_Atomic(_Tp)* __a, _Td __delta,
-                                         memory_order __order) {
+                                         memory_order __order)
+{
   return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
                             __gcc_atomic::__to_gcc_order(__order));
 }
 
 template <typename _Tp, typename _Td>
+_LIBCPP_NONNULL(1)
 static inline _Tp __c11_atomic_fetch_sub(volatile _Atomic(_Tp)* __a,
-                                         _Td __delta, memory_order __order) {
+                                         _Td __delta, memory_order __order)
+{
   return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
                             __gcc_atomic::__to_gcc_order(__order));
 }
 
 template <typename _Tp, typename _Td>
+_LIBCPP_NONNULL(1)
 static inline _Tp __c11_atomic_fetch_sub(_Atomic(_Tp)* __a, _Td __delta,
-                                         memory_order __order) {
+                                         memory_order __order)
+{
   return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
                             __gcc_atomic::__to_gcc_order(__order));
 }
 
 template <typename _Tp>
+_LIBCPP_NONNULL(1)
 static inline _Tp __c11_atomic_fetch_and(volatile _Atomic(_Tp)* __a,
-                                         _Tp __pattern, memory_order __order) {
+                                         _Tp __pattern, memory_order __order)
+{
   return __atomic_fetch_and(&__a->__a_value, __pattern,
                             __gcc_atomic::__to_gcc_order(__order));
 }
 
 template <typename _Tp>
+_LIBCPP_NONNULL(1)
 static inline _Tp __c11_atomic_fetch_and(_Atomic(_Tp)* __a,
-                                         _Tp __pattern, memory_order __order) {
+                                         _Tp __pattern, memory_order __order)
+{
   return __atomic_fetch_and(&__a->__a_value, __pattern,
                             __gcc_atomic::__to_gcc_order(__order));
 }
 
 template <typename _Tp>
+_LIBCPP_NONNULL(1)
 static inline _Tp __c11_atomic_fetch_or(volatile _Atomic(_Tp)* __a,
-                                        _Tp __pattern, memory_order __order) {
+                                        _Tp __pattern, memory_order __order)
+{
   return __atomic_fetch_or(&__a->__a_value, __pattern,
                            __gcc_atomic::__to_gcc_order(__order));
 }
 
 template <typename _Tp>
+_LIBCPP_NONNULL(1)
 static inline _Tp __c11_atomic_fetch_or(_Atomic(_Tp)* __a, _Tp __pattern,
-                                        memory_order __order) {
+                                        memory_order __order)
+{
   return __atomic_fetch_or(&__a->__a_value, __pattern,
                            __gcc_atomic::__to_gcc_order(__order));
 }
 
 template <typename _Tp>
+_LIBCPP_NONNULL(1)
 static inline _Tp __c11_atomic_fetch_xor(volatile _Atomic(_Tp)* __a,
-                                         _Tp __pattern, memory_order __order) {
+                                         _Tp __pattern, memory_order __order)
+{
   return __atomic_fetch_xor(&__a->__a_value, __pattern,
                             __gcc_atomic::__to_gcc_order(__order));
 }
 
 template <typename _Tp>
+_LIBCPP_NONNULL(1)
 static inline _Tp __c11_atomic_fetch_xor(_Atomic(_Tp)* __a, _Tp __pattern,
-                                         memory_order __order) {
+                                         memory_order __order)
+{
   return __atomic_fetch_xor(&__a->__a_value, __pattern,
                             __gcc_atomic::__to_gcc_order(__order));
 }
@@ -1164,14 +1210,16 @@
 // atomic_is_lock_free
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 bool
 atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT
 {
     return __o->is_lock_free();
 }
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 bool
 atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
@@ -1182,14 +1230,16 @@
 // atomic_init
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 void
 atomic_init(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
 {
     __c11_atomic_init(&__o->__a_, __d);
 }
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 void
 atomic_init(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
@@ -1200,14 +1250,16 @@
 // atomic_store
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 void
 atomic_store(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
 {
     __o->store(__d);
 }
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 void
 atomic_store(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
@@ -1218,6 +1270,7 @@
 // atomic_store_explicit
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 void
 atomic_store_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
@@ -1227,6 +1280,7 @@
 }
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 void
 atomic_store_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
@@ -1238,14 +1292,16 @@
 // atomic_load
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 _Tp
 atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
 {
     return __o->load();
 }
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 _Tp
 atomic_load(const atomic<_Tp>* __o) _NOEXCEPT
@@ -1256,6 +1312,7 @@
 // atomic_load_explicit
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 _Tp
 atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
@@ -1265,6 +1322,7 @@
 }
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 _Tp
 atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
@@ -1276,14 +1334,16 @@
 // atomic_exchange
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 _Tp
 atomic_exchange(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
 {
     return __o->exchange(__d);
 }
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 _Tp
 atomic_exchange(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
@@ -1294,14 +1354,16 @@
 // atomic_exchange_explicit
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 _Tp
 atomic_exchange_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
 {
     return __o->exchange(__d, __m);
 }
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 _Tp
 atomic_exchange_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
@@ -1312,14 +1374,16 @@
 // atomic_compare_exchange_weak
 
 template <class _Tp>
+_LIBCPP_NONNULL(1, 2)
 inline _LIBCPP_INLINE_VISIBILITY
 bool
 atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
 {
     return __o->compare_exchange_weak(*__e, __d);
 }
 
 template <class _Tp>
+_LIBCPP_NONNULL(1, 2)
 inline _LIBCPP_INLINE_VISIBILITY
 bool
 atomic_compare_exchange_weak(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
@@ -1330,14 +1394,16 @@
 // atomic_compare_exchange_strong
 
 template <class _Tp>
+_LIBCPP_NONNULL(1, 2)
 inline _LIBCPP_INLINE_VISIBILITY
 bool
 atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
 {
     return __o->compare_exchange_strong(*__e, __d);
 }
 
 template <class _Tp>
+_LIBCPP_NONNULL(1, 2)
 inline _LIBCPP_INLINE_VISIBILITY
 bool
 atomic_compare_exchange_strong(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
@@ -1348,6 +1414,7 @@
 // atomic_compare_exchange_weak_explicit
 
 template <class _Tp>
+_LIBCPP_NONNULL(1, 2)
 inline _LIBCPP_INLINE_VISIBILITY
 bool
 atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, _Tp* __e,
@@ -1359,6 +1426,7 @@
 }
 
 template <class _Tp>
+_LIBCPP_NONNULL(1, 2)
 inline _LIBCPP_INLINE_VISIBILITY
 bool
 atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, _Tp* __e, _Tp __d,
@@ -1371,6 +1439,7 @@
 // atomic_compare_exchange_strong_explicit
 
 template <class _Tp>
+_LIBCPP_NONNULL(1, 2)
 inline _LIBCPP_INLINE_VISIBILITY
 bool
 atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o,
@@ -1382,6 +1451,7 @@
 }
 
 template <class _Tp>
+_LIBCPP_NONNULL(1, 2)
 inline _LIBCPP_INLINE_VISIBILITY
 bool
 atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, _Tp* __e,
@@ -1395,6 +1465,7 @@
 // atomic_fetch_add
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 typename enable_if
 <
@@ -1407,6 +1478,7 @@
 }
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 typename enable_if
 <
@@ -1419,14 +1491,16 @@
 }
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 _Tp*
 atomic_fetch_add(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
 {
     return __o->fetch_add(__op);
 }
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 _Tp*
 atomic_fetch_add(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
@@ -1437,6 +1511,7 @@
 // atomic_fetch_add_explicit
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 typename enable_if
 <
@@ -1449,6 +1524,7 @@
 }
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 typename enable_if
 <
@@ -1461,6 +1537,7 @@
 }
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 _Tp*
 atomic_fetch_add_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
@@ -1470,6 +1547,7 @@
 }
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 _Tp*
 atomic_fetch_add_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
@@ -1480,6 +1558,7 @@
 // atomic_fetch_sub
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 typename enable_if
 <
@@ -1492,6 +1571,7 @@
 }
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 typename enable_if
 <
@@ -1504,14 +1584,16 @@
 }
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 _Tp*
 atomic_fetch_sub(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
 {
     return __o->fetch_sub(__op);
 }
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 _Tp*
 atomic_fetch_sub(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
@@ -1522,6 +1604,7 @@
 // atomic_fetch_sub_explicit
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 typename enable_if
 <
@@ -1534,6 +1617,7 @@
 }
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 typename enable_if
 <
@@ -1546,6 +1630,7 @@
 }
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 _Tp*
 atomic_fetch_sub_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
@@ -1555,6 +1640,7 @@
 }
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 _Tp*
 atomic_fetch_sub_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
@@ -1565,6 +1651,7 @@
 // atomic_fetch_and
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 typename enable_if
 <
@@ -1577,6 +1664,7 @@
 }
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 typename enable_if
 <
@@ -1591,6 +1679,7 @@
 // atomic_fetch_and_explicit
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 typename enable_if
 <
@@ -1603,6 +1692,7 @@
 }
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 typename enable_if
 <
@@ -1617,6 +1707,7 @@
 // atomic_fetch_or
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 typename enable_if
 <
@@ -1629,6 +1720,7 @@
 }
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 typename enable_if
 <
@@ -1643,6 +1735,7 @@
 // atomic_fetch_or_explicit
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 typename enable_if
 <
@@ -1655,6 +1748,7 @@
 }
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 typename enable_if
 <
@@ -1669,6 +1763,7 @@
 // atomic_fetch_xor
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 typename enable_if
 <
@@ -1681,6 +1776,7 @@
 }
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 typename enable_if
 <
@@ -1695,6 +1791,7 @@
 // atomic_fetch_xor_explicit
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 typename enable_if
 <
@@ -1707,6 +1804,7 @@
 }
 
 template <class _Tp>
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 typename enable_if
 <
@@ -1759,55 +1857,63 @@
 #endif
 } atomic_flag;
 
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 bool
 atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT
 {
     return __o->test_and_set();
 }
 
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 bool
 atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT
 {
     return __o->test_and_set();
 }
 
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 bool
 atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
 {
     return __o->test_and_set(__m);
 }
 
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 bool
 atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
 {
     return __o->test_and_set(__m);
 }
 
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 void
 atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT
 {
     __o->clear();
 }
 
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 void
 atomic_flag_clear(atomic_flag* __o) _NOEXCEPT
 {
     __o->clear();
 }
 
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 void
 atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
 {
     __o->clear(__m);
 }
 
+_LIBCPP_NONNULL(1)
 inline _LIBCPP_INLINE_VISIBILITY
 void
 atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
Index: include/__config
===================================================================
--- include/__config
+++ include/__config
@@ -1218,6 +1218,17 @@
 #  endif
 #endif
 
+#if __has_attribute(nonnull) || _GNUC_VER >= 400
+// Function pointer parameter must be non-null: warns on null parameter,
+// undefined behavior if the parameter is null. Omitting parameter indices
+// indicates that all parameters of pointer type cannot be null.
+//
+// Note: parameter indexing starts at 1.
+#  define _LIBCPP_NONNULL(...) __attribute__((nonnull(__VA_ARGS__)))
+#else
+#  define _LIBCPP_NONNULL(...)
+#endif
+
 // Define availability macros.
 #if defined(_LIBCPP_USE_AVAILABILITY_APPLE)
 #  define _LIBCPP_AVAILABILITY_SHARED_MUTEX                                    \
_______________________________________________
cfe-commits mailing list
cfe-commits@lists.llvm.org
http://lists.llvm.org/cgi-bin/mailman/listinfo/cfe-commits

Reply via email to