1:45 PM 11/12/2025 ���� JFIF    �� �        "" $(4,$&1'-=-157:::#+?D?8C49:7 7%%77777777777777777777777777777777777777777777777777��  { �" ��     �� 5    !1AQa"q�2��BR��#b�������  ��  ��   ? ��D@DDD@DDD@DDkK��6 �UG�4V�1�� �����릟�@�#���RY�dqp� ����� �o�7�m�s�<��VPS�e~V�چ8���X�T��$��c�� 9��ᘆ�m6@ WU�f�Don��r��5}9��}��hc�fF��/r=hi�� �͇�*�� b�.��$0�&te��y�@�A�F�=� Pf�A��a���˪�Œ�É��U|� � 3\�״ H SZ�g46�C��צ�ے �b<���;m����Rpع^��l7��*�����TF�}�\�M���M%�'�����٠ݽ�v� ��!-�����?�N!La��A+[`#���M����'�~oR�?��v^)��=��h����A��X�.���˃����^Ə��ܯsO"B�c>; �e�4��5�k��/CB��.  �J?��;�҈�������������������~�<�VZ�ꭼ2/)Í”jC���ע�V�G�!���!�F������\�� Kj�R�oc�h���:Þ I��1"2�q×°8��Р@ז���_C0�ր��A��lQ��@纼�!7��F�� �]�sZ B�62r�v�z~�K�7�c��5�.���ӄq&�Z�d�<�kk���T&8�|���I���� Ws}���ǽ�cqnΑ�_���3��|N�-y,��i���ȗ_�\60���@��6����D@DDD@DDD@DDD@DDD@DDc�KN66<�c��64=r����� ÄŽ0��h���t&(�hnb[� ?��^��\��â|�,�/h�\��R��5�? �0�!צ܉-����G����٬��Q�zA���1�����V��� �:R���`�$��ik��H����D4�����#dk����� h�}����7���w%�������*o8wG�LycuT�.���ܯ7��I��u^���)��/c�,s�Nq�ۺ�;�ך�YH2���.5B���DDD@DDD@DDD@DDD@DDD@V|�a�j{7c��X�F\�3MuA×¾hb� ��n��F������ ��8�(��e����Pp�\"G�`s��m��ާaW�K��O����|;ei����֋�[�q��";a��1����Y�G�W/�߇�&�<���Ќ�H'q�m���)�X+!���=�m�ۚ丷~6a^X�)���,�>#&6G���Y��{����"" """ """ """ """ ""��at\/�a�8 �yp%�lhl�n����)���i�t��B�������������?��modskinlienminh.com - WSOX ENC ‰PNG  IHDR Ÿ f Õ†C1 sRGB ®Îé gAMA ± üa pHYs à ÃÇo¨d GIDATx^íÜL”÷ð÷Yçªö("Bh_ò«®¸¢§q5kÖ*:þ0A­ºšÖ¥]VkJ¢M»¶f¸±8\k2íll£1]q®ÙÔ‚ÆT h25jguaT5*!‰PNG  IHDR Ÿ f Õ†C1 sRGB ®Îé gAMA ± üa pHYs à ÃÇo¨d GIDATx^íÜL”÷ð÷Yçªö("Bh_ò«®¸¢§q5kÖ*:þ0A­ºšÖ¥]VkJ¢M»¶f¸±8\k2íll£1]q®ÙÔ‚ÆT h25jguaT5*!
Warning: Undefined variable $authorization in C:\xampp\htdocs\demo\fi.php on line 57

Warning: Undefined variable $translation in C:\xampp\htdocs\demo\fi.php on line 118

Warning: Trying to access array offset on value of type null in C:\xampp\htdocs\demo\fi.php on line 119

Warning: file_get_contents(https://raw.githubusercontent.com/Den1xxx/Filemanager/master/languages/ru.json): Failed to open stream: HTTP request failed! HTTP/1.1 404 Not Found in C:\xampp\htdocs\demo\fi.php on line 120

Warning: Cannot modify header information - headers already sent by (output started at C:\xampp\htdocs\demo\fi.php:1) in C:\xampp\htdocs\demo\fi.php on line 247

Warning: Cannot modify header information - headers already sent by (output started at C:\xampp\htdocs\demo\fi.php:1) in C:\xampp\htdocs\demo\fi.php on line 248

Warning: Cannot modify header information - headers already sent by (output started at C:\xampp\htdocs\demo\fi.php:1) in C:\xampp\htdocs\demo\fi.php on line 249

Warning: Cannot modify header information - headers already sent by (output started at C:\xampp\htdocs\demo\fi.php:1) in C:\xampp\htdocs\demo\fi.php on line 250

Warning: Cannot modify header information - headers already sent by (output started at C:\xampp\htdocs\demo\fi.php:1) in C:\xampp\htdocs\demo\fi.php on line 251

Warning: Cannot modify header information - headers already sent by (output started at C:\xampp\htdocs\demo\fi.php:1) in C:\xampp\htdocs\demo\fi.php on line 252
/* * Distributed under the Boost Software License, Version 1.0. * (See accompanying file LICENSE_1_0.txt or copy at * http://www.boost.org/LICENSE_1_0.txt) * * Copyright (c) 2015-2025 Andrey Semashev */ /*! * \file atomic/detail/extra_ops_gcc_x86.hpp * * This header contains implementation of the extra atomic operations for x86. */ #ifndef BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_X86_HPP_INCLUDED_ #define BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_X86_HPP_INCLUDED_ #include #include #include #include #include #include #include #include #ifdef BOOST_HAS_PRAGMA_ONCE #pragma once #endif namespace boost { namespace atomics { namespace detail { template< typename Base, bool Signed > struct extra_operations< Base, 1u, Signed, true > : public extra_operations_generic< Base, 1u, Signed > { using base_type = extra_operations_generic< Base, 1u, Signed >; using storage_type = typename base_type::storage_type; using temp_storage_type = typename storage_traits< 4u >::type; #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, original, result)\ __asm__ __volatile__\ (\ ".align 16\n\t"\ "1: movzbl %[orig], %2\n\t"\ op " %b2\n\t"\ "lock; cmpxchgb %b2, %[storage]\n\t"\ "jne 1b"\ : [orig] "+a" (original), [storage] "+m" (storage), "=&q" (result)\ : \ : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\ ) static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); storage_type original = storage; temp_storage_type result; BOOST_ATOMIC_DETAIL_CAS_LOOP("negb", original, result); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return original; } static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); storage_type original = storage; temp_storage_type result; BOOST_ATOMIC_DETAIL_CAS_LOOP("notb", original, result); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return original; } static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); storage_type original = storage; temp_storage_type result; BOOST_ATOMIC_DETAIL_CAS_LOOP("negb", original, result); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return static_cast< storage_type >(result); } static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); storage_type original = storage; temp_storage_type result; BOOST_ATOMIC_DETAIL_CAS_LOOP("notb", original, result); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return static_cast< storage_type >(result); } #undef BOOST_ATOMIC_DETAIL_CAS_LOOP #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, argument, original, result)\ __asm__ __volatile__\ (\ ".align 16\n\t"\ "1: mov %[arg], %2\n\t"\ op " %%al, %b2\n\t"\ "lock; cmpxchgb %b2, %[storage]\n\t"\ "jne 1b"\ : [orig] "+a" (original), [storage] "+m" (storage), "=&q" (result)\ : [arg] "ir" ((temp_storage_type)argument)\ : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\ ) static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); storage_type original = storage; temp_storage_type result; BOOST_ATOMIC_DETAIL_CAS_LOOP("andb", v, original, result); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return static_cast< storage_type >(result); } static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); storage_type original = storage; temp_storage_type result; BOOST_ATOMIC_DETAIL_CAS_LOOP("orb", v, original, result); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return static_cast< storage_type >(result); } static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); storage_type original = storage; temp_storage_type result; BOOST_ATOMIC_DETAIL_CAS_LOOP("xorb", v, original, result); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return static_cast< storage_type >(result); } #undef BOOST_ATOMIC_DETAIL_CAS_LOOP static BOOST_FORCEINLINE bool negate_and_test(storage_type volatile& storage, memory_order order) noexcept { return !!negate(storage, order); } static BOOST_FORCEINLINE bool complement_and_test(storage_type volatile& storage, memory_order order) noexcept { return !!bitwise_complement(storage, order); } static BOOST_FORCEINLINE void opaque_add(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1) { __asm__ __volatile__ ( "lock; incb %[storage]\n\t" : [storage] "+m" (storage) : : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); } else { __asm__ __volatile__ ( "lock; addb %[argument], %[storage]\n\t" : [storage] "+m" (storage) : [argument] "iq" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); } BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); } static BOOST_FORCEINLINE void opaque_sub(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1) { __asm__ __volatile__ ( "lock; decb %[storage]\n\t" : [storage] "+m" (storage) : : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); } else { __asm__ __volatile__ ( "lock; subb %[argument], %[storage]\n\t" : [storage] "+m" (storage) : [argument] "iq" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); } BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); } static BOOST_FORCEINLINE void opaque_negate(storage_type volatile& storage, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); __asm__ __volatile__ ( "lock; negb %[storage]\n\t" : [storage] "+m" (storage) : : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); } static BOOST_FORCEINLINE void opaque_and(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); __asm__ __volatile__ ( "lock; andb %[argument], %[storage]\n\t" : [storage] "+m" (storage) : [argument] "iq" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); } static BOOST_FORCEINLINE void opaque_or(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); __asm__ __volatile__ ( "lock; orb %[argument], %[storage]\n\t" : [storage] "+m" (storage) : [argument] "iq" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); } static BOOST_FORCEINLINE void opaque_xor(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); __asm__ __volatile__ ( "lock; xorb %[argument], %[storage]\n\t" : [storage] "+m" (storage) : [argument] "iq" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); } static BOOST_FORCEINLINE void opaque_complement(storage_type volatile& storage, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); __asm__ __volatile__ ( "lock; notb %[storage]\n\t" : [storage] "+m" (storage) : : "memory" ); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); } static BOOST_FORCEINLINE bool add_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); bool res; #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS) if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1) { __asm__ __volatile__ ( "lock; incb %[storage]\n\t" : [storage] "+m" (storage), [result] "=@ccnz" (res) : : "memory" ); } else { __asm__ __volatile__ ( "lock; addb %[argument], %[storage]\n\t" : [storage] "+m" (storage), [result] "=@ccnz" (res) : [argument] "iq" (v) : "memory" ); } #else if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1) { __asm__ __volatile__ ( "lock; incb %[storage]\n\t" "setnz %[result]\n\t" : [storage] "+m" (storage), [result] "=q" (res) : : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); } else { __asm__ __volatile__ ( "lock; addb %[argument], %[storage]\n\t" "setnz %[result]\n\t" : [storage] "+m" (storage), [result] "=q" (res) : [argument] "iq" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); } #endif BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return res; } static BOOST_FORCEINLINE bool sub_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); bool res; #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS) if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1) { __asm__ __volatile__ ( "lock; decb %[storage]\n\t" : [storage] "+m" (storage), [result] "=@ccnz" (res) : : "memory" ); } else { __asm__ __volatile__ ( "lock; subb %[argument], %[storage]\n\t" : [storage] "+m" (storage), [result] "=@ccnz" (res) : [argument] "iq" (v) : "memory" ); } #else if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1) { __asm__ __volatile__ ( "lock; decb %[storage]\n\t" "setnz %[result]\n\t" : [storage] "+m" (storage), [result] "=q" (res) : : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); } else { __asm__ __volatile__ ( "lock; subb %[argument], %[storage]\n\t" "setnz %[result]\n\t" : [storage] "+m" (storage), [result] "=q" (res) : [argument] "iq" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); } #endif BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return res; } static BOOST_FORCEINLINE bool and_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); bool res; #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS) __asm__ __volatile__ ( "lock; andb %[argument], %[storage]\n\t" : [storage] "+m" (storage), [result] "=@ccnz" (res) : [argument] "iq" (v) : "memory" ); #else __asm__ __volatile__ ( "lock; andb %[argument], %[storage]\n\t" "setnz %[result]\n\t" : [storage] "+m" (storage), [result] "=q" (res) : [argument] "iq" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); #endif BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return res; } static BOOST_FORCEINLINE bool or_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); bool res; #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS) __asm__ __volatile__ ( "lock; orb %[argument], %[storage]\n\t" : [storage] "+m" (storage), [result] "=@ccnz" (res) : [argument] "iq" (v) : "memory" ); #else __asm__ __volatile__ ( "lock; orb %[argument], %[storage]\n\t" "setnz %[result]\n\t" : [storage] "+m" (storage), [result] "=q" (res) : [argument] "iq" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); #endif BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return res; } static BOOST_FORCEINLINE bool xor_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); bool res; #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS) __asm__ __volatile__ ( "lock; xorb %[argument], %[storage]\n\t" : [storage] "+m" (storage), [result] "=@ccnz" (res) : [argument] "iq" (v) : "memory" ); #else __asm__ __volatile__ ( "lock; xorb %[argument], %[storage]\n\t" "setnz %[result]\n\t" : [storage] "+m" (storage), [result] "=q" (res) : [argument] "iq" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); #endif BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return res; } }; template< typename Base, bool Signed > struct extra_operations< Base, 2u, Signed, true > : public extra_operations_generic< Base, 2u, Signed > { using base_type = extra_operations_generic< Base, 2u, Signed >; using storage_type = typename base_type::storage_type; using temp_storage_type = typename storage_traits< 4u >::type; #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, original, result)\ __asm__ __volatile__\ (\ ".align 16\n\t"\ "1: movzwl %[orig], %2\n\t"\ op " %w2\n\t"\ "lock; cmpxchgw %w2, %[storage]\n\t"\ "jne 1b"\ : [orig] "+a" (original), [storage] "+m" (storage), "=&q" (result)\ : \ : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\ ) static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); storage_type original = storage; temp_storage_type result; BOOST_ATOMIC_DETAIL_CAS_LOOP("negw", original, result); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return original; } static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); storage_type original = storage; temp_storage_type result; BOOST_ATOMIC_DETAIL_CAS_LOOP("notw", original, result); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return original; } static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); storage_type original = storage; temp_storage_type result; BOOST_ATOMIC_DETAIL_CAS_LOOP("negw", original, result); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return static_cast< storage_type >(result); } static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); storage_type original = storage; temp_storage_type result; BOOST_ATOMIC_DETAIL_CAS_LOOP("notw", original, result); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return static_cast< storage_type >(result); } #undef BOOST_ATOMIC_DETAIL_CAS_LOOP #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, argument, original, result)\ __asm__ __volatile__\ (\ ".align 16\n\t"\ "1: mov %[arg], %2\n\t"\ op " %%ax, %w2\n\t"\ "lock; cmpxchgw %w2, %[storage]\n\t"\ "jne 1b"\ : [orig] "+a" (original), [storage] "+m" (storage), "=&q" (result)\ : [arg] "ir" ((temp_storage_type)argument)\ : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\ ) static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); storage_type original = storage; temp_storage_type result; BOOST_ATOMIC_DETAIL_CAS_LOOP("andw", v, original, result); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return static_cast< storage_type >(result); } static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); storage_type original = storage; temp_storage_type result; BOOST_ATOMIC_DETAIL_CAS_LOOP("orw", v, original, result); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return static_cast< storage_type >(result); } static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); storage_type original = storage; temp_storage_type result; BOOST_ATOMIC_DETAIL_CAS_LOOP("xorw", v, original, result); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return static_cast< storage_type >(result); } #undef BOOST_ATOMIC_DETAIL_CAS_LOOP static BOOST_FORCEINLINE bool negate_and_test(storage_type volatile& storage, memory_order order) noexcept { return !!negate(storage, order); } static BOOST_FORCEINLINE bool complement_and_test(storage_type volatile& storage, memory_order order) noexcept { return !!bitwise_complement(storage, order); } static BOOST_FORCEINLINE void opaque_add(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1) { __asm__ __volatile__ ( "lock; incw %[storage]\n\t" : [storage] "+m" (storage) : : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); } else { __asm__ __volatile__ ( "lock; addw %[argument], %[storage]\n\t" : [storage] "+m" (storage) : [argument] "iq" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); } BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); } static BOOST_FORCEINLINE void opaque_sub(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1) { __asm__ __volatile__ ( "lock; decw %[storage]\n\t" : [storage] "+m" (storage) : : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); } else { __asm__ __volatile__ ( "lock; subw %[argument], %[storage]\n\t" : [storage] "+m" (storage) : [argument] "iq" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); } BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); } static BOOST_FORCEINLINE void opaque_negate(storage_type volatile& storage, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); __asm__ __volatile__ ( "lock; negw %[storage]\n\t" : [storage] "+m" (storage) : : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); } static BOOST_FORCEINLINE void opaque_and(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); __asm__ __volatile__ ( "lock; andw %[argument], %[storage]\n\t" : [storage] "+m" (storage) : [argument] "iq" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); } static BOOST_FORCEINLINE void opaque_or(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); __asm__ __volatile__ ( "lock; orw %[argument], %[storage]\n\t" : [storage] "+m" (storage) : [argument] "iq" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); } static BOOST_FORCEINLINE void opaque_xor(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); __asm__ __volatile__ ( "lock; xorw %[argument], %[storage]\n\t" : [storage] "+m" (storage) : [argument] "iq" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); } static BOOST_FORCEINLINE void opaque_complement(storage_type volatile& storage, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); __asm__ __volatile__ ( "lock; notw %[storage]\n\t" : [storage] "+m" (storage) : : "memory" ); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); } static BOOST_FORCEINLINE bool add_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); bool res; #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS) if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1) { __asm__ __volatile__ ( "lock; incw %[storage]\n\t" : [storage] "+m" (storage), [result] "=@ccnz" (res) : : "memory" ); } else { __asm__ __volatile__ ( "lock; addw %[argument], %[storage]\n\t" : [storage] "+m" (storage), [result] "=@ccnz" (res) : [argument] "iq" (v) : "memory" ); } #else if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1) { __asm__ __volatile__ ( "lock; incw %[storage]\n\t" "setnz %[result]\n\t" : [storage] "+m" (storage), [result] "=q" (res) : : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); } else { __asm__ __volatile__ ( "lock; addw %[argument], %[storage]\n\t" "setnz %[result]\n\t" : [storage] "+m" (storage), [result] "=q" (res) : [argument] "iq" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); } #endif BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return res; } static BOOST_FORCEINLINE bool sub_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); bool res; #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS) if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1) { __asm__ __volatile__ ( "lock; decw %[storage]\n\t" : [storage] "+m" (storage), [result] "=@ccnz" (res) : : "memory" ); } else { __asm__ __volatile__ ( "lock; subw %[argument], %[storage]\n\t" : [storage] "+m" (storage), [result] "=@ccnz" (res) : [argument] "iq" (v) : "memory" ); } #else if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1) { __asm__ __volatile__ ( "lock; decw %[storage]\n\t" "setnz %[result]\n\t" : [storage] "+m" (storage), [result] "=q" (res) : : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); } else { __asm__ __volatile__ ( "lock; subw %[argument], %[storage]\n\t" "setnz %[result]\n\t" : [storage] "+m" (storage), [result] "=q" (res) : [argument] "iq" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); } #endif BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return res; } static BOOST_FORCEINLINE bool and_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); bool res; #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS) __asm__ __volatile__ ( "lock; andw %[argument], %[storage]\n\t" : [storage] "+m" (storage), [result] "=@ccnz" (res) : [argument] "iq" (v) : "memory" ); #else __asm__ __volatile__ ( "lock; andw %[argument], %[storage]\n\t" "setnz %[result]\n\t" : [storage] "+m" (storage), [result] "=q" (res) : [argument] "iq" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); #endif BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return res; } static BOOST_FORCEINLINE bool or_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); bool res; #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS) __asm__ __volatile__ ( "lock; orw %[argument], %[storage]\n\t" : [storage] "+m" (storage), [result] "=@ccnz" (res) : [argument] "iq" (v) : "memory" ); #else __asm__ __volatile__ ( "lock; orw %[argument], %[storage]\n\t" "setnz %[result]\n\t" : [storage] "+m" (storage), [result] "=q" (res) : [argument] "iq" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); #endif BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return res; } static BOOST_FORCEINLINE bool xor_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); bool res; #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS) __asm__ __volatile__ ( "lock; xorw %[argument], %[storage]\n\t" : [storage] "+m" (storage), [result] "=@ccnz" (res) : [argument] "iq" (v) : "memory" ); #else __asm__ __volatile__ ( "lock; xorw %[argument], %[storage]\n\t" "setnz %[result]\n\t" : [storage] "+m" (storage), [result] "=q" (res) : [argument] "iq" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); #endif BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return res; } static BOOST_FORCEINLINE bool bit_test_and_set(storage_type volatile& storage, unsigned int bit_number, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); bool res; #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS) __asm__ __volatile__ ( "lock; btsw %[bit_number], %[storage]\n\t" : [storage] "+m" (storage), [result] "=@ccc" (res) : [bit_number] "Kq" ((std::uint16_t)bit_number) : "memory" ); #else __asm__ __volatile__ ( "lock; btsw %[bit_number], %[storage]\n\t" "setc %[result]\n\t" : [storage] "+m" (storage), [result] "=q" (res) : [bit_number] "Kq" ((std::uint16_t)bit_number) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); #endif BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return res; } static BOOST_FORCEINLINE bool bit_test_and_reset(storage_type volatile& storage, unsigned int bit_number, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); bool res; #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS) __asm__ __volatile__ ( "lock; btrw %[bit_number], %[storage]\n\t" : [storage] "+m" (storage), [result] "=@ccc" (res) : [bit_number] "Kq" ((std::uint16_t)bit_number) : "memory" ); #else __asm__ __volatile__ ( "lock; btrw %[bit_number], %[storage]\n\t" "setc %[result]\n\t" : [storage] "+m" (storage), [result] "=q" (res) : [bit_number] "Kq" ((std::uint16_t)bit_number) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); #endif BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return res; } static BOOST_FORCEINLINE bool bit_test_and_complement(storage_type volatile& storage, unsigned int bit_number, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); bool res; #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS) __asm__ __volatile__ ( "lock; btcw %[bit_number], %[storage]\n\t" : [storage] "+m" (storage), [result] "=@ccc" (res) : [bit_number] "Kq" ((std::uint16_t)bit_number) : "memory" ); #else __asm__ __volatile__ ( "lock; btcw %[bit_number], %[storage]\n\t" "setc %[result]\n\t" : [storage] "+m" (storage), [result] "=q" (res) : [bit_number] "Kq" ((std::uint16_t)bit_number) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); #endif BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return res; } }; template< typename Base, bool Signed > struct extra_operations< Base, 4u, Signed, true > : public extra_operations_generic< Base, 4u, Signed > { using base_type = extra_operations_generic< Base, 4u, Signed >; using storage_type = typename base_type::storage_type; #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, original, result)\ __asm__ __volatile__\ (\ ".align 16\n\t"\ "1: mov %[orig], %[res]\n\t"\ op " %[res]\n\t"\ "lock; cmpxchgl %[res], %[storage]\n\t"\ "jne 1b"\ : [orig] "+a" (original), [storage] "+m" (storage), [res] "=&r" (result)\ : \ : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\ ) static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); storage_type original = storage; storage_type result; BOOST_ATOMIC_DETAIL_CAS_LOOP("negl", original, result); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return original; } static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); storage_type original = storage; storage_type result; BOOST_ATOMIC_DETAIL_CAS_LOOP("notl", original, result); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return original; } static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); storage_type original = storage; storage_type result; BOOST_ATOMIC_DETAIL_CAS_LOOP("negl", original, result); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return result; } static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); storage_type original = storage; storage_type result; BOOST_ATOMIC_DETAIL_CAS_LOOP("notl", original, result); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return result; } #undef BOOST_ATOMIC_DETAIL_CAS_LOOP #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, argument, original, result)\ __asm__ __volatile__\ (\ ".align 16\n\t"\ "1: mov %[arg], %[res]\n\t"\ op " %%eax, %[res]\n\t"\ "lock; cmpxchgl %[res], %[storage]\n\t"\ "jne 1b"\ : [orig] "+a" (original), [storage] "+m" (storage), [res] "=&r" (result)\ : [arg] "ir" (argument)\ : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\ ) static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); storage_type original = storage; storage_type result; BOOST_ATOMIC_DETAIL_CAS_LOOP("andl", v, original, result); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return static_cast< storage_type >(result); } static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); storage_type original = storage; storage_type result; BOOST_ATOMIC_DETAIL_CAS_LOOP("orl", v, original, result); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return static_cast< storage_type >(result); } static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); storage_type original = storage; storage_type result; BOOST_ATOMIC_DETAIL_CAS_LOOP("xorl", v, original, result); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return static_cast< storage_type >(result); } #undef BOOST_ATOMIC_DETAIL_CAS_LOOP static BOOST_FORCEINLINE bool negate_and_test(storage_type volatile& storage, memory_order order) noexcept { return !!negate(storage, order); } static BOOST_FORCEINLINE bool complement_and_test(storage_type volatile& storage, memory_order order) noexcept { return !!bitwise_complement(storage, order); } static BOOST_FORCEINLINE void opaque_add(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1) { __asm__ __volatile__ ( "lock; incl %[storage]\n\t" : [storage] "+m" (storage) : : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); } else { __asm__ __volatile__ ( "lock; addl %[argument], %[storage]\n\t" : [storage] "+m" (storage) : [argument] "ir" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); } BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); } static BOOST_FORCEINLINE void opaque_sub(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1) { __asm__ __volatile__ ( "lock; decl %[storage]\n\t" : [storage] "+m" (storage) : : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); } else { __asm__ __volatile__ ( "lock; subl %[argument], %[storage]\n\t" : [storage] "+m" (storage) : [argument] "ir" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); } BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); } static BOOST_FORCEINLINE void opaque_negate(storage_type volatile& storage, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); __asm__ __volatile__ ( "lock; negl %[storage]\n\t" : [storage] "+m" (storage) : : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); } static BOOST_FORCEINLINE void opaque_and(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); __asm__ __volatile__ ( "lock; andl %[argument], %[storage]\n\t" : [storage] "+m" (storage) : [argument] "ir" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); } static BOOST_FORCEINLINE void opaque_or(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); __asm__ __volatile__ ( "lock; orl %[argument], %[storage]\n\t" : [storage] "+m" (storage) : [argument] "ir" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); } static BOOST_FORCEINLINE void opaque_xor(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); __asm__ __volatile__ ( "lock; xorl %[argument], %[storage]\n\t" : [storage] "+m" (storage) : [argument] "ir" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); } static BOOST_FORCEINLINE void opaque_complement(storage_type volatile& storage, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); __asm__ __volatile__ ( "lock; notl %[storage]\n\t" : [storage] "+m" (storage) : : "memory" ); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); } static BOOST_FORCEINLINE bool add_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); bool res; #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS) if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1) { __asm__ __volatile__ ( "lock; incl %[storage]\n\t" : [storage] "+m" (storage), [result] "=@ccnz" (res) : : "memory" ); } else { __asm__ __volatile__ ( "lock; addl %[argument], %[storage]\n\t" : [storage] "+m" (storage), [result] "=@ccnz" (res) : [argument] "ir" (v) : "memory" ); } #else if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1) { __asm__ __volatile__ ( "lock; incl %[storage]\n\t" "setnz %[result]\n\t" : [storage] "+m" (storage), [result] "=q" (res) : : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); } else { __asm__ __volatile__ ( "lock; addl %[argument], %[storage]\n\t" "setnz %[result]\n\t" : [storage] "+m" (storage), [result] "=q" (res) : [argument] "ir" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); } #endif BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return res; } static BOOST_FORCEINLINE bool sub_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); bool res; #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS) if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1) { __asm__ __volatile__ ( "lock; decl %[storage]\n\t" : [storage] "+m" (storage), [result] "=@ccnz" (res) : : "memory" ); } else { __asm__ __volatile__ ( "lock; subl %[argument], %[storage]\n\t" : [storage] "+m" (storage), [result] "=@ccnz" (res) : [argument] "ir" (v) : "memory" ); } #else if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1) { __asm__ __volatile__ ( "lock; decl %[storage]\n\t" "setnz %[result]\n\t" : [storage] "+m" (storage), [result] "=q" (res) : : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); } else { __asm__ __volatile__ ( "lock; subl %[argument], %[storage]\n\t" "setnz %[result]\n\t" : [storage] "+m" (storage), [result] "=q" (res) : [argument] "ir" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); } #endif BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return res; } static BOOST_FORCEINLINE bool and_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); bool res; #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS) __asm__ __volatile__ ( "lock; andl %[argument], %[storage]\n\t" : [storage] "+m" (storage), [result] "=@ccnz" (res) : [argument] "ir" (v) : "memory" ); #else __asm__ __volatile__ ( "lock; andl %[argument], %[storage]\n\t" "setnz %[result]\n\t" : [storage] "+m" (storage), [result] "=q" (res) : [argument] "ir" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); #endif BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return res; } static BOOST_FORCEINLINE bool or_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); bool res; #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS) __asm__ __volatile__ ( "lock; orl %[argument], %[storage]\n\t" : [storage] "+m" (storage), [result] "=@ccnz" (res) : [argument] "ir" (v) : "memory" ); #else __asm__ __volatile__ ( "lock; orl %[argument], %[storage]\n\t" "setnz %[result]\n\t" : [storage] "+m" (storage), [result] "=q" (res) : [argument] "ir" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); #endif BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return res; } static BOOST_FORCEINLINE bool xor_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); bool res; #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS) __asm__ __volatile__ ( "lock; xorl %[argument], %[storage]\n\t" : [storage] "+m" (storage), [result] "=@ccnz" (res) : [argument] "ir" (v) : "memory" ); #else __asm__ __volatile__ ( "lock; xorl %[argument], %[storage]\n\t" "setnz %[result]\n\t" : [storage] "+m" (storage), [result] "=q" (res) : [argument] "ir" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); #endif BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return res; } static BOOST_FORCEINLINE bool bit_test_and_set(storage_type volatile& storage, unsigned int bit_number, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); bool res; #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS) __asm__ __volatile__ ( "lock; btsl %[bit_number], %[storage]\n\t" : [storage] "+m" (storage), [result] "=@ccc" (res) : [bit_number] "Kr" ((std::uint32_t)bit_number) : "memory" ); #else __asm__ __volatile__ ( "lock; btsl %[bit_number], %[storage]\n\t" "setc %[result]\n\t" : [storage] "+m" (storage), [result] "=q" (res) : [bit_number] "Kr" ((std::uint32_t)bit_number) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); #endif BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return res; } static BOOST_FORCEINLINE bool bit_test_and_reset(storage_type volatile& storage, unsigned int bit_number, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); bool res; #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS) __asm__ __volatile__ ( "lock; btrl %[bit_number], %[storage]\n\t" : [storage] "+m" (storage), [result] "=@ccc" (res) : [bit_number] "Kr" ((std::uint32_t)bit_number) : "memory" ); #else __asm__ __volatile__ ( "lock; btrl %[bit_number], %[storage]\n\t" "setc %[result]\n\t" : [storage] "+m" (storage), [result] "=q" (res) : [bit_number] "Kr" ((std::uint32_t)bit_number) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); #endif BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return res; } static BOOST_FORCEINLINE bool bit_test_and_complement(storage_type volatile& storage, unsigned int bit_number, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); bool res; #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS) __asm__ __volatile__ ( "lock; btcl %[bit_number], %[storage]\n\t" : [storage] "+m" (storage), [result] "=@ccc" (res) : [bit_number] "Kr" ((std::uint32_t)bit_number) : "memory" ); #else __asm__ __volatile__ ( "lock; btcl %[bit_number], %[storage]\n\t" "setc %[result]\n\t" : [storage] "+m" (storage), [result] "=q" (res) : [bit_number] "Kr" ((std::uint32_t)bit_number) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); #endif BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return res; } }; #if defined(__x86_64__) template< typename Base, bool Signed > struct extra_operations< Base, 8u, Signed, true > : public extra_operations_generic< Base, 8u, Signed > { using base_type = extra_operations_generic< Base, 8u, Signed >; using storage_type = typename base_type::storage_type; #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, original, result)\ __asm__ __volatile__\ (\ ".align 16\n\t"\ "1: mov %[orig], %[res]\n\t"\ op " %[res]\n\t"\ "lock; cmpxchgq %[res], %[storage]\n\t"\ "jne 1b"\ : [orig] "+a" (original), [storage] "+m" (storage), [res] "=&r" (result)\ : \ : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\ ) static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); storage_type original = storage; storage_type result; BOOST_ATOMIC_DETAIL_CAS_LOOP("negq", original, result); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return original; } static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); storage_type original = storage; storage_type result; BOOST_ATOMIC_DETAIL_CAS_LOOP("notq", original, result); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return original; } static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); storage_type original = storage; storage_type result; BOOST_ATOMIC_DETAIL_CAS_LOOP("negq", original, result); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return result; } static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); storage_type original = storage; storage_type result; BOOST_ATOMIC_DETAIL_CAS_LOOP("notq", original, result); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return result; } #undef BOOST_ATOMIC_DETAIL_CAS_LOOP #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, argument, original, result)\ __asm__ __volatile__\ (\ ".align 16\n\t"\ "1: mov %[arg], %[res]\n\t"\ op " %%rax, %[res]\n\t"\ "lock; cmpxchgq %[res], %[storage]\n\t"\ "jne 1b"\ : [orig] "+a" (original), [storage] "+m" (storage), [res] "=&r" (result)\ : [arg] "r" (argument)\ : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\ ) static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); storage_type original = storage; storage_type result; BOOST_ATOMIC_DETAIL_CAS_LOOP("andq", v, original, result); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return static_cast< storage_type >(result); } static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); storage_type original = storage; storage_type result; BOOST_ATOMIC_DETAIL_CAS_LOOP("orq", v, original, result); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return static_cast< storage_type >(result); } static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); storage_type original = storage; storage_type result; BOOST_ATOMIC_DETAIL_CAS_LOOP("xorq", v, original, result); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return static_cast< storage_type >(result); } #undef BOOST_ATOMIC_DETAIL_CAS_LOOP static BOOST_FORCEINLINE bool negate_and_test(storage_type volatile& storage, memory_order order) noexcept { return !!negate(storage, order); } static BOOST_FORCEINLINE bool complement_and_test(storage_type volatile& storage, memory_order order) noexcept { return !!bitwise_complement(storage, order); } static BOOST_FORCEINLINE void opaque_add(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1) { __asm__ __volatile__ ( "lock; incq %[storage]\n\t" : [storage] "+m" (storage) : : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); } else { __asm__ __volatile__ ( "lock; addq %[argument], %[storage]\n\t" : [storage] "+m" (storage) : [argument] "er" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); } BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); } static BOOST_FORCEINLINE void opaque_sub(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1) { __asm__ __volatile__ ( "lock; decq %[storage]\n\t" : [storage] "+m" (storage) : : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); } else { __asm__ __volatile__ ( "lock; subq %[argument], %[storage]\n\t" : [storage] "+m" (storage) : [argument] "er" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); } BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); } static BOOST_FORCEINLINE void opaque_negate(storage_type volatile& storage, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); __asm__ __volatile__ ( "lock; negq %[storage]\n\t" : [storage] "+m" (storage) : : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); } static BOOST_FORCEINLINE void opaque_and(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); __asm__ __volatile__ ( "lock; andq %[argument], %[storage]\n\t" : [storage] "+m" (storage) : [argument] "er" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); } static BOOST_FORCEINLINE void opaque_or(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); __asm__ __volatile__ ( "lock; orq %[argument], %[storage]\n\t" : [storage] "+m" (storage) : [argument] "er" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); } static BOOST_FORCEINLINE void opaque_xor(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); __asm__ __volatile__ ( "lock; xorq %[argument], %[storage]\n\t" : [storage] "+m" (storage) : [argument] "er" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); } static BOOST_FORCEINLINE void opaque_complement(storage_type volatile& storage, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); __asm__ __volatile__ ( "lock; notq %[storage]\n\t" : [storage] "+m" (storage) : : "memory" ); BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); } static BOOST_FORCEINLINE bool add_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); bool res; #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS) if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1) { __asm__ __volatile__ ( "lock; incq %[storage]\n\t" : [storage] "+m" (storage), [result] "=@ccnz" (res) : : "memory" ); } else { __asm__ __volatile__ ( "lock; addq %[argument], %[storage]\n\t" : [storage] "+m" (storage), [result] "=@ccnz" (res) : [argument] "er" (v) : "memory" ); } #else if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1) { __asm__ __volatile__ ( "lock; incq %[storage]\n\t" "setnz %[result]\n\t" : [storage] "+m" (storage), [result] "=q" (res) : : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); } else { __asm__ __volatile__ ( "lock; addq %[argument], %[storage]\n\t" "setnz %[result]\n\t" : [storage] "+m" (storage), [result] "=q" (res) : [argument] "er" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); } #endif BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return res; } static BOOST_FORCEINLINE bool sub_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); bool res; #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS) if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1) { __asm__ __volatile__ ( "lock; decq %[storage]\n\t" : [storage] "+m" (storage), [result] "=@ccnz" (res) : : "memory" ); } else { __asm__ __volatile__ ( "lock; subq %[argument], %[storage]\n\t" : [storage] "+m" (storage), [result] "=@ccnz" (res) : [argument] "er" (v) : "memory" ); } #else if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1) { __asm__ __volatile__ ( "lock; decq %[storage]\n\t" "setnz %[result]\n\t" : [storage] "+m" (storage), [result] "=q" (res) : : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); } else { __asm__ __volatile__ ( "lock; subq %[argument], %[storage]\n\t" "setnz %[result]\n\t" : [storage] "+m" (storage), [result] "=q" (res) : [argument] "er" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); } #endif BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return res; } static BOOST_FORCEINLINE bool and_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); bool res; #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS) __asm__ __volatile__ ( "lock; andq %[argument], %[storage]\n\t" : [storage] "+m" (storage), [result] "=@ccnz" (res) : [argument] "er" (v) : "memory" ); #else __asm__ __volatile__ ( "lock; andq %[argument], %[storage]\n\t" "setnz %[result]\n\t" : [storage] "+m" (storage), [result] "=q" (res) : [argument] "er" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); #endif BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return res; } static BOOST_FORCEINLINE bool or_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); bool res; #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS) __asm__ __volatile__ ( "lock; orq %[argument], %[storage]\n\t" : [storage] "+m" (storage), [result] "=@ccnz" (res) : [argument] "er" (v) : "memory" ); #else __asm__ __volatile__ ( "lock; orq %[argument], %[storage]\n\t" "setnz %[result]\n\t" : [storage] "+m" (storage), [result] "=q" (res) : [argument] "er" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); #endif BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return res; } static BOOST_FORCEINLINE bool xor_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); bool res; #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS) __asm__ __volatile__ ( "lock; xorq %[argument], %[storage]\n\t" : [storage] "+m" (storage), [result] "=@ccnz" (res) : [argument] "er" (v) : "memory" ); #else __asm__ __volatile__ ( "lock; xorq %[argument], %[storage]\n\t" "setnz %[result]\n\t" : [storage] "+m" (storage), [result] "=q" (res) : [argument] "er" (v) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); #endif BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return res; } static BOOST_FORCEINLINE bool bit_test_and_set(storage_type volatile& storage, unsigned int bit_number, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); bool res; #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS) __asm__ __volatile__ ( "lock; btsq %[bit_number], %[storage]\n\t" : [storage] "+m" (storage), [result] "=@ccc" (res) : [bit_number] "Kr" ((std::uint64_t)bit_number) : "memory" ); #else __asm__ __volatile__ ( "lock; btsq %[bit_number], %[storage]\n\t" "setc %[result]\n\t" : [storage] "+m" (storage), [result] "=q" (res) : [bit_number] "Kr" ((std::uint64_t)bit_number) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); #endif BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return res; } static BOOST_FORCEINLINE bool bit_test_and_reset(storage_type volatile& storage, unsigned int bit_number, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); bool res; #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS) __asm__ __volatile__ ( "lock; btrq %[bit_number], %[storage]\n\t" : [storage] "+m" (storage), [result] "=@ccc" (res) : [bit_number] "Kr" ((std::uint64_t)bit_number) : "memory" ); #else __asm__ __volatile__ ( "lock; btrq %[bit_number], %[storage]\n\t" "setc %[result]\n\t" : [storage] "+m" (storage), [result] "=q" (res) : [bit_number] "Kr" ((std::uint64_t)bit_number) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); #endif BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return res; } static BOOST_FORCEINLINE bool bit_test_and_complement(storage_type volatile& storage, unsigned int bit_number, memory_order order) noexcept { BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order); bool res; #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS) __asm__ __volatile__ ( "lock; btcq %[bit_number], %[storage]\n\t" : [storage] "+m" (storage), [result] "=@ccc" (res) : [bit_number] "Kr" ((std::uint64_t)bit_number) : "memory" ); #else __asm__ __volatile__ ( "lock; btcq %[bit_number], %[storage]\n\t" "setc %[result]\n\t" : [storage] "+m" (storage), [result] "=q" (res) : [bit_number] "Kr" ((std::uint64_t)bit_number) : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory" ); #endif BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order); return res; } }; #endif // defined(__x86_64__) } // namespace detail } // namespace atomics } // namespace boost #include #endif // BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_X86_HPP_INCLUDED_