/*- * Copyright (c) 2017 Taylor R. Campbell * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. */ #ifndef UTILS_H #define UTILS_H #include #include #define ASSERT assert static inline bool atomic_compare_exchange_weak(volatile void *ptr, void *old, void *new) { return atomic_cas_ptr(ptr, old, new) == old; } enum memory_order { memory_order_acquire, memory_order_release, }; static inline void atomic_thread_fence(enum memory_order mo) { switch (mo) { case memory_order_acquire: membar_enter(); break; case memory_order_release: membar_exit(); break; default: abort(); } } #define CACHE_LINE_SIZE 64 #define SPINLOCK_BACKOFF_MIN 1 #define SPINLOCK_BACKOFF_MAX 1000 #define SPINLOCK_BACKOFF(count) do { asm volatile("pause"); (count) <<= 1; } while (0) #endif /* UTILS_H */