Common byte and bitwise operations with optimised versions for various architectures. More...
Macros | |
#define | BYTE_LOAD16_UNSIGNED_LE(ptr_) (uint16)(((uint8 *)(ptr_))[0] | (((uint8 *)(ptr_))[1] << 8)) |
#define | BYTE_LOAD16_UNSIGNED_BE(ptr_) (uint16)(((uint8 *)(ptr_))[1] | (((uint8 *)(ptr_))[0] << 8)) |
#define | BYTE_LOAD16_UNSIGNED_PLATFORM BYTE_LOAD16_UNSIGNED_LE |
#define | BYTE_LOAD16_SIGNED_LE(ptr_) (int16)BYTE_LOAD16_UNSIGNED_LE(ptr_) |
#define | BYTE_LOAD16_SIGNED_BE(ptr_) (int16)BYTE_LOAD16_UNSIGNED_BE(ptr_) |
#define | BYTE_LOAD16_SIGNED_PLATFORM BYTE_LOAD16_SIGNED_LE |
#define | BYTE_LOAD32_UNSIGNED_LE(ptr_) |
#define | BYTE_LOAD32_UNSIGNED_BE(ptr_) |
#define | BYTE_LOAD32_UNSIGNED_PLATFORM BYTE_LOAD32_UNSIGNED_LE |
#define | BYTE_LOAD32_SIGNED_LE(ptr_) (int32)BYTE_LOAD32_UNSIGNED_LE(ptr_) |
#define | BYTE_LOAD32_SIGNED_BE(ptr_) (int32)BYTE_LOAD32_UNSIGNED_BE(ptr_) |
#define | BYTE_LOAD32_SIGNED_PLATFORM BYTE_LOAD32_SIGNED_LE |
#define | BYTE_LOAD64_UNSIGNED_LE(ptr_) |
#define | BYTE_LOAD64_UNSIGNED_BE(ptr_) |
#define | BYTE_LOAD64_UNSIGNED_PLATFORM BYTE_LOAD64_UNSIGNED_LE |
#define | BYTE_LOAD64_SIGNED_LE(ptr_) (int64)BYTE_LOAD64_UNSIGNED_LE(ptr_) |
#define | BYTE_LOAD64_SIGNED_BE(ptr_) (int64)BYTE_LOAD64_UNSIGNED_BE(ptr_) |
#define | BYTE_LOAD64_SIGNED_PLATFORM BYTE_LOAD64_SIGNED_LE |
#define | BYTE_STORE16_LE(ptr_, val_) |
#define | BYTE_STORE16_BE(ptr_, val_) |
#define | BYTE_STORE16_PLATFORM BYTE_STORE16_LE |
#define | BYTE_STORE32_LE(ptr_, val_) |
#define | BYTE_STORE32_BE(ptr_, val_) |
#define | BYTE_STORE32_PLATFORM BYTE_STORE32_LE |
#define | BYTE_STORE64_LE(ptr_, val_) |
#define | BYTE_STORE64_BE(ptr_, val_) |
#define | BYTE_STORE64_PLATFORM BYTE_STORE64_LE |
#define | MASK_BYTES_1(type_) (type_)(~(type_)0 / 0xffu) |
#define | MASK_SHORTS_1(type_) (type_)(~(type_)0 / 0xffffu) |
#define | MASK_WORDS_1(type_) (type_)(~(type_)0 / 0xffffffffu) |
#define | MASK_ALTERNATE_BYTES(type_) (type_)(MASK_SHORTS_1(type_) * 0xffu) |
#define | MASK_ALTERNATE_SHORTS(type_) (type_)(MASK_WORDS_1(type_) * 0xffffu) |
#define | BYTE_SWAP16_UNSIGNED(val_) (uint16)((uint8)(val_) << 8 | (uint16)(val_) >> 8) |
#define | BYTE_SWAP16_SIGNED(val_) (int16)BYTE_SWAP16_UNSIGNED(val_) |
#define | BYTE_SWAP16_PTR(ptr_) |
#define | BYTE_SWAP16_UNSIGNED_VAR(to_, from_) |
#define | BYTE_SWAP16_BUFFER(to_, from_, bytes_) |
#define | BYTE_SWAP32_UNSIGNED(val_) |
#define | BYTE_SWAP32_SIGNED(val_) (int32)BYTE_SWAP32_UNSIGNED(val_) |
#define | BYTE_SWAP32_PTR(ptr_) |
#define | BYTE_SWAP32_UNSIGNED_VAR(to_, from_) |
#define | BYTE_SWAP32_BUFFER(to_, from_, bytes_) |
#define | BYTE_SWAP32_BUFFER_UNALIGNED(to_, from_, bytes_) |
#define | BYTE_SWAP64_UNSIGNED(val_) |
#define | BYTE_SWAP64_SIGNED(val_) (int64)BYTE_SWAP64_UNSIGNED(val_) |
#define | BYTE_SWAP64_PTR(ptr_) |
#define | BYTE_SWAP64_UNSIGNED_VAR(to_, from_) |
#define | BYTE_SWAP64_BUFFER(to_, from_, bytes_) |
#define | BYTE_SWAP64_BUFFER_UNALIGNED(to_, from_, bytes_) |
#define | BYTE_SWAP16_UNSIGNED(val_) _byteswap_ushort((unsigned short)(val_)) |
#define | BYTE_SWAP16_PTR(ptr_) _swab((char *)(ptr_), (char *)(ptr_), 2) |
#define | BYTE_SWAP16_BUFFER(to_, from_, bytes_) _swab((char *)(from_), (char *)(to_), (int)(bytes_)) |
#define | BYTE_SWAP32_UNSIGNED(val_) _byteswap_ulong((unsigned long)(val_)) |
#define | BYTE_SWAP64_UNSIGNED(val_) _byteswap_uint64((unsigned __int64)(val_)) |
#define | BYTE_SWAP16_UNSIGNED(val_) __builtin_bswap16((unsigned short)(val_)) |
#define | BYTE_SWAP32_UNSIGNED(val_) __builtin_bswap32((unsigned long)(val_)) |
#define | BYTE_SWAP64_UNSIGNED(val_) __builtin_bswap64((unsigned long long)(val_)) |
#define | BYTE_SWAP32_PTR(ptr_) |
#define | BYTE_SWAP32_BUFFER_UNALIGNED BYTE_SWAP32_BUFFER |
#define | BYTE_SWAP64_PTR(ptr_) |
#define | BYTE_SWAP64_BUFFER_UNALIGNED BYTE_SWAP64_BUFFER |
#define | SIGN32_NEG(x) -(int32)((uint32)((int32)(x)) >> 31) |
#define | SIGN64_NEG(x) -(int64)((uint64)((int64)(x)) >> 63) |
#define | SIGN32(x) (SIGN32_NEG(x) - SIGN32_NEG(-(x))) |
#define | SIGN64(x) (SIGN64_NEG(x) - SIGN64_NEG(-(x))) |
#define | SIGN32_NEG(x) ((int32)(x) >> 31) |
#define | SIGN64_NEG(x) ((int64)(x) >> 63) |
#define | BIT_ROTATE32_LEFT(to_, from_, shift_) |
#define | BIT_ROTATE32_RIGHT(to_, from_, shift_) |
#define | BIT_ROTATE32_LEFT_SAFE(to_, from_, shift_) |
#define | BIT_ROTATE32_RIGHT_SAFE(to_, from_, shift_) |
#define | BIT_ROTATE64_LEFT(to_, from_, shift_) |
#define | BIT_ROTATE64_RIGHT(to_, from_, shift_) |
#define | BIT_ROTATE64_LEFT_SAFE(to_, from_, shift_) |
#define | BIT_ROTATE64_RIGHT_SAFE(to_, from_, shift_) |
#define | BIT_ROTATE32_LEFT(to_, from_, shift_) |
#define | BIT_ROTATE32_LEFT_SAFE(to_, from_, shift_) |
#define | BIT_ROTATE32_RIGHT(to_, from_, shift_) |
#define | BIT_ROTATE32_RIGHT_SAFE(to_, from_, shift_) |
#define | BIT_ROTATE64_LEFT(to_, from_, shift_) |
#define | BIT_ROTATE64_LEFT_SAFE(to_, from_, shift_) |
#define | BIT_ROTATE64_RIGHT(to_, from_, shift_) |
#define | BIT_ROTATE64_RIGHT_SAFE(to_, from_, shift_) |
#define | BIT_SHIFT32_SIGNED_RIGHT_EXPR(val_, shift_) ((int32)((uint32)(val_) >> (shift_)) | (SIGN32_NEG(val_) << (31 ^ (shift_)))) |
#define | BIT_SHIFT32_SIGNED_RIGHT(to_, from_, shift_) |
#define | BIT_SHIFT64_SIGNED_RIGHT_EXPR(val_, shift_) ((int64)((uint64)(val_) >> (shift_)) | (SIGN64_NEG(val_) << (63 ^ (shift_)))) |
#define | BIT_SHIFT64_SIGNED_RIGHT(to_, from_, shift_) |
#define | BIT_SHIFT32_SIGNED_RIGHT_EXPR(val_, shift_) ((int32)(val_) >> (shift_)) |
#define | BIT_SHIFT64_SIGNED_RIGHT_EXPR(val_, shift_) ((int64)(val_) >> (shift_)) |
#define | INLINE_MIN32(min_, a_, b_) |
#define | INLINE_MAX32(max_, a_, b_) |
#define | INLINE_MINMAX32(min_, max_, a_, b_) |
#define | INLINE_ABS32(abs_, val_) |
#define | INLINE_RANGE32(to_, from_, min_, max_) |
#define | INLINE_RANGE32_0(to_, from_, max_) |
#define | INLINE_MIN64(min_, a_, b_) |
#define | INLINE_MAX64(max_, a_, b_) |
#define | INLINE_MINMAX64(min_, max_, a_, b_) |
#define | INLINE_ABS64(abs_, val_) |
#define | INLINE_RANGE64(to_, from_, min_, max_) |
#define | INLINE_RANGE64_0(to_, from_, max_) |
#define | BIT_LOWEST_SET_32(_n_) ((uint32)(_n_) & -(int32)(_n_)) |
#define | BIT_LOWEST_CLEAR_32(_n_) BIT_LOWEST_SET_32(~(uint32)(_n_)) |
#define | BIT_AT_MOST_ONE_SET_32(_n_) ((_n_) == BIT_LOWEST_SET_32(_n_)) |
#define | BIT_EXACTLY_ONE_SET_32(_n_) ((_n_) != 0 && BIT_AT_MOST_ONE_SET_32(_n_)) |
#define | BIT_LOWEST_SET_64(_n_) ((uint64)(_n_) & -(int64)(_n_)) |
#define | BIT_LOWEST_CLEAR_64(_n_) BIT_LOWEST_SET_64(~(uint64)(_n_)) |
#define | BIT_AT_MOST_ONE_SET_64(_n_) ((_n_) == BIT_LOWEST_SET_64(_n_)) |
#define | BIT_EXACTLY_ONE_SET_64(_n_) ((_n_) != 0 && BIT_AT_MOST_ONE_SET_64(_n_)) |
#define | BIT_COUNT_TRAILING_ZEROS_32(n_, i_) |
#define | BIT_LOWEST_SET_INDEX_32(n_, i_) |
#define | BIT_COUNT_LEADING_ZEROS_32(n_, i_) |
#define | BIT_HIGHEST_SET_INDEX_32(n_, i_) |
#define | BIT_COUNT_TRAILING_ZEROS_64(n_, i_) |
#define | BIT_LOWEST_SET_INDEX_64(n_, i_) |
#define | BIT_COUNT_LEADING_ZEROS_64(n_, i_) |
#define | BIT_HIGHEST_SET_INDEX_64(n_, i_) |
#define | BIT_COUNT_TRAILING_ZEROS_32(n_, i_) |
#define | BIT_LOWEST_SET_INDEX_32(n_, i_) |
#define | BIT_COUNT_LEADING_ZEROS_32(n_, i_) |
#define | BIT_HIGHEST_SET_INDEX_32(n_, i_) |
#define | BIT_COUNT_TRAILING_ZEROS_64(n_, i_) |
#define | BIT_LOWEST_SET_INDEX_64(n_, i_) |
#define | BIT_COUNT_LEADING_ZEROS_64(n_, i_) |
#define | BIT_HIGHEST_SET_INDEX_64(n_, i_) |
#define | BIT_COUNT_TRAILING_ZEROS_32(n_, i_) |
#define | BIT_LOWEST_SET_INDEX_32(n_, i_) |
#define | BIT_COUNT_LEADING_ZEROS_32(n_, i_) |
#define | BIT_COUNT_TRAILING_ZEROS_64(n_, i_) |
#define | BIT_LOWEST_SET_INDEX_64(n_, i_) |
#define | BIT_COUNT_LEADING_ZEROS_64(n_, i_) |
Common byte and bitwise operations with optimised versions for various architectures.
Copyright (C) 2023 Global Graphics Software Ltd. All rights reserved. This source code contains the confidential and trade secret information of Global Graphics Software Ltd. It may not be used, copied or distributed for any reason except as set forth in the applicable Global Graphics license agreement.