|
| #define | BIT(b) (1<<(b)) |
| |
| #define | BIT32(b) ((NvU32)1<<(b)) |
| |
| #define | BIT64(b) ((NvU64)1<<(b)) |
| |
| #define | NVBIT(b) (1<<(b)) |
| |
| #define | NVBIT32(b) ((NvU32)1<<(b)) |
| |
| #define | NVBIT64(b) ((NvU64)1<<(b)) |
| |
| #define | BIT_IDX_32(n) |
| |
| #define | BIT_IDX_64(n) |
| |
| #define | IDX_32(n32) |
| |
| #define | DRF_ISBIT(bitval, drf) |
| |
| #define | DEVICE_BASE(d) (0?d) |
| |
| #define | DEVICE_EXTENT(d) (1?d) |
| |
| #define | DRF_BASE(drf) (0?drf) |
| |
| #define | DRF_EXTENT(drf) (1?drf) |
| |
| #define | DRF_SHIFT(drf) ((DRF_ISBIT(0,drf)) % 32) |
| |
| #define | DRF_SHIFT_RT(drf) ((DRF_ISBIT(1,drf)) % 32) |
| |
| #define | DRF_MASK(drf) (0xFFFFFFFF>>(31-((DRF_ISBIT(1,drf)) % 32)+((DRF_ISBIT(0,drf)) % 32))) |
| |
| #define | DRF_SHIFTMASK(drf) (DRF_MASK(drf)<<(DRF_SHIFT(drf))) |
| |
| #define | DRF_SIZE(drf) (DRF_EXTENT(drf)-DRF_BASE(drf)+1) |
| |
| #define | DRF_DEF(d, r, f, c) ((NV ## d ## r ## f ## c)<<DRF_SHIFT(NV ## d ## r ## f)) |
| |
| #define | DRF_NUM(d, r, f, n) (((n)&DRF_MASK(NV ## d ## r ## f))<<DRF_SHIFT(NV ## d ## r ## f)) |
| |
| #define | DRF_VAL(d, r, f, v) (((v)>>DRF_SHIFT(NV ## d ## r ## f))&DRF_MASK(NV ## d ## r ## f)) |
| |
| #define | DRF_VAL_SIGNED(d, r, f, v) (((DRF_VAL(d,r,f,v) ^ (NVBIT(DRF_SIZE(NV ## d ## r ## f)-1)))) - (NVBIT(DRF_SIZE(NV ## d ## r ## f)-1))) |
| |
| #define | DRF_IDX_DEF(d, r, f, i, c) ((NV ## d ## r ## f ## c)<<DRF_SHIFT(NV##d##r##f(i))) |
| |
| #define | DRF_IDX_OFFSET_DEF(d, r, f, i, o, c) ((NV ## d ## r ## f ## c)<<DRF_SHIFT(NV##d##r##f(i,o))) |
| |
| #define | DRF_IDX_NUM(d, r, f, i, n) (((n)&DRF_MASK(NV##d##r##f(i)))<<DRF_SHIFT(NV##d##r##f(i))) |
| |
| #define | DRF_IDX_VAL(d, r, f, i, v) (((v)>>DRF_SHIFT(NV##d##r##f(i)))&DRF_MASK(NV##d##r##f(i))) |
| |
| #define | DRF_IDX_OFFSET_VAL(d, r, f, i, o, v) (((v)>>DRF_SHIFT(NV##d##r##f(i,o)))&DRF_MASK(NV##d##r##f(i,o))) |
| |
| #define | DRF_VAL_FRAC(d, r, x, y, v, z) ((DRF_VAL(d,r,x,v)*z) + ((DRF_VAL(d,r,y,v)*z) / (1<<DRF_SIZE(NV##d##r##y)))) |
| |
| #define | DRF_SHIFT64(drf) ((DRF_ISBIT(0,drf)) % 64) |
| |
| #define | DRF_MASK64(drf) (NV_U64_MAX>>(63-((DRF_ISBIT(1,drf)) % 64)+((DRF_ISBIT(0,drf)) % 64))) |
| |
| #define | DRF_SHIFTMASK64(drf) (DRF_MASK64(drf)<<(DRF_SHIFT64(drf))) |
| |
| #define | DRF_DEF64(d, r, f, c) (((NvU64)(NV ## d ## r ## f ## c))<<DRF_SHIFT64(NV ## d ## r ## f)) |
| |
| #define | DRF_NUM64(d, r, f, n) ((((NvU64)(n))&DRF_MASK64(NV ## d ## r ## f))<<DRF_SHIFT64(NV ## d ## r ## f)) |
| |
| #define | DRF_VAL64(d, r, f, v) ((((NvU64)(v))>>DRF_SHIFT64(NV ## d ## r ## f))&DRF_MASK64(NV ## d ## r ## f)) |
| |
| #define | DRF_VAL_SIGNED64(d, r, f, v) (((DRF_VAL64(d,r,f,v) ^ (NVBIT64(DRF_SIZE(NV ## d ## r ## f)-1)))) - (NVBIT64(DRF_SIZE(NV ## d ## r ## f)-1))) |
| |
| #define | DRF_IDX_DEF64(d, r, f, i, c) (((NvU64)(NV ## d ## r ## f ## c))<<DRF_SHIFT64(NV##d##r##f(i))) |
| |
| #define | DRF_IDX_OFFSET_DEF64(d, r, f, i, o, c) ((NvU64)(NV ## d ## r ## f ## c)<<DRF_SHIFT64(NV##d##r##f(i,o))) |
| |
| #define | DRF_IDX_NUM64(d, r, f, i, n) ((((NvU64)(n))&DRF_MASK64(NV##d##r##f(i)))<<DRF_SHIFT64(NV##d##r##f(i))) |
| |
| #define | DRF_IDX_VAL64(d, r, f, i, v) ((((NvU64)(v))>>DRF_SHIFT64(NV##d##r##f(i)))&DRF_MASK64(NV##d##r##f(i))) |
| |
| #define | DRF_IDX_OFFSET_VAL64(d, r, f, i, o, v) (((NvU64)(v)>>DRF_SHIFT64(NV##d##r##f(i,o)))&DRF_MASK64(NV##d##r##f(i,o))) |
| |
| #define | FLD_SET_DRF64(d, r, f, c, v) (((NvU64)(v) & ~DRF_SHIFTMASK64(NV##d##r##f)) | DRF_DEF64(d,r,f,c)) |
| |
| #define | FLD_SET_DRF_NUM64(d, r, f, n, v) ((((NvU64)(v)) & ~DRF_SHIFTMASK64(NV##d##r##f)) | DRF_NUM64(d,r,f,n)) |
| |
| #define | FLD_IDX_SET_DRF64(d, r, f, i, c, v) (((NvU64)(v) & ~DRF_SHIFTMASK64(NV##d##r##f(i))) | DRF_IDX_DEF64(d,r,f,i,c)) |
| |
| #define | FLD_IDX_OFFSET_SET_DRF64(d, r, f, i, o, c, v) (((NvU64)(v) & ~DRF_SHIFTMASK64(NV##d##r##f(i,o))) | DRF_IDX_OFFSET_DEF64(d,r,f,i,o,c)) |
| |
| #define | FLD_IDX_SET_DRF_DEF64(d, r, f, i, c, v) (((NvU64)(v) & ~DRF_SHIFTMASK64(NV##d##r##f(i))) | DRF_IDX_DEF64(d,r,f,i,c)) |
| |
| #define | FLD_IDX_SET_DRF_NUM64(d, r, f, i, n, v) (((NvU64)(v) & ~DRF_SHIFTMASK64(NV##d##r##f(i))) | DRF_IDX_NUM64(d,r,f,i,n)) |
| |
| #define | FLD_SET_DRF_IDX64(d, r, f, c, i, v) (((NvU64)(v) & ~DRF_SHIFTMASK64(NV##d##r##f)) | DRF_DEF64(d,r,f,c(i))) |
| |
| #define | FLD_TEST_DRF64(d, r, f, c, v) (DRF_VAL64(d, r, f, v) == NV##d##r##f##c) |
| |
| #define | FLD_TEST_DRF_AND64(d, r, f, c, v) (DRF_VAL64(d, r, f, v) & NV##d##r##f##c) |
| |
| #define | FLD_TEST_DRF_NUM64(d, r, f, n, v) (DRF_VAL64(d, r, f, v) == n) |
| |
| #define | FLD_IDX_TEST_DRF64(d, r, f, i, c, v) (DRF_IDX_VAL64(d, r, f, i, v) == NV##d##r##f##c) |
| |
| #define | FLD_IDX_OFFSET_TEST_DRF64(d, r, f, i, o, c, v) (DRF_IDX_OFFSET_VAL64(d, r, f, i, o, v) == NV##d##r##f##c) |
| |
| #define | FLD_SET_DRF(d, r, f, c, v) ((v & ~DRF_SHIFTMASK(NV##d##r##f)) | DRF_DEF(d,r,f,c)) |
| |
| #define | FLD_SET_DRF_NUM(d, r, f, n, v) ((v & ~DRF_SHIFTMASK(NV##d##r##f)) | DRF_NUM(d,r,f,n)) |
| |
| #define | FLD_SET_DRF_DEF(d, r, f, c, v) (v = (v & ~DRF_SHIFTMASK(NV##d##r##f)) | DRF_DEF(d,r,f,c)) |
| |
| #define | FLD_IDX_SET_DRF(d, r, f, i, c, v) ((v & ~DRF_SHIFTMASK(NV##d##r##f(i))) | DRF_IDX_DEF(d,r,f,i,c)) |
| |
| #define | FLD_IDX_OFFSET_SET_DRF(d, r, f, i, o, c, v) ((v & ~DRF_SHIFTMASK(NV##d##r##f(i,o))) | DRF_IDX_OFFSET_DEF(d,r,f,i,o,c)) |
| |
| #define | FLD_IDX_SET_DRF_DEF(d, r, f, i, c, v) ((v & ~DRF_SHIFTMASK(NV##d##r##f(i))) | DRF_IDX_DEF(d,r,f,i,c)) |
| |
| #define | FLD_IDX_SET_DRF_NUM(d, r, f, i, n, v) ((v & ~DRF_SHIFTMASK(NV##d##r##f(i))) | DRF_IDX_NUM(d,r,f,i,n)) |
| |
| #define | FLD_SET_DRF_IDX(d, r, f, c, i, v) ((v & ~DRF_SHIFTMASK(NV##d##r##f)) | DRF_DEF(d,r,f,c(i))) |
| |
| #define | FLD_TEST_DRF(d, r, f, c, v) ((DRF_VAL(d, r, f, v) == NV##d##r##f##c)) |
| |
| #define | FLD_TEST_DRF_AND(d, r, f, c, v) ((DRF_VAL(d, r, f, v) & NV##d##r##f##c)) |
| |
| #define | FLD_TEST_DRF_NUM(d, r, f, n, v) ((DRF_VAL(d, r, f, v) == n)) |
| |
| #define | FLD_IDX_TEST_DRF(d, r, f, i, c, v) ((DRF_IDX_VAL(d, r, f, i, v) == NV##d##r##f##c)) |
| |
| #define | FLD_IDX_OFFSET_TEST_DRF(d, r, f, i, o, c, v) ((DRF_IDX_OFFSET_VAL(d, r, f, i, o, v) == NV##d##r##f##c)) |
| |
| #define | REF_DEF(drf, d) (((drf ## d)&DRF_MASK(drf))<<DRF_SHIFT(drf)) |
| |
| #define | REF_VAL(drf, v) (((v)>>DRF_SHIFT(drf))&DRF_MASK(drf)) |
| |
| #define | REF_NUM(drf, n) (((n)&DRF_MASK(drf))<<DRF_SHIFT(drf)) |
| |
| #define | FLD_TEST_REF(drf, c, v) (REF_VAL(drf, v) == drf##c) |
| |
| #define | FLD_TEST_REF_AND(drf, c, v) (REF_VAL(drf, v) & drf##c) |
| |
| #define | FLD_SET_REF_NUM(drf, n, v) (((v) & ~DRF_SHIFTMASK(drf)) | REF_NUM(drf,n)) |
| |
| #define | CR_DRF_DEF(d, r, f, c) ((CR ## d ## r ## f ## c)<<DRF_SHIFT(CR ## d ## r ## f)) |
| |
| #define | CR_DRF_NUM(d, r, f, n) (((n)&DRF_MASK(CR ## d ## r ## f))<<DRF_SHIFT(CR ## d ## r ## f)) |
| |
| #define | CR_DRF_VAL(d, r, f, v) (((v)>>DRF_SHIFT(CR ## d ## r ## f))&DRF_MASK(CR ## d ## r ## f)) |
| |
| #define | DRF_EXPAND_MW(drf) drf |
| |
| #define | DRF_PICK_MW(drf, v) (v?DRF_EXPAND_##drf) |
| |
| #define | DRF_WORD_MW(drf) (DRF_PICK_MW(drf,0)/32) |
| |
| #define | DRF_BASE_MW(drf) (DRF_PICK_MW(drf,0)%32) |
| |
| #define | DRF_EXTENT_MW(drf) (DRF_PICK_MW(drf,1)%32) |
| |
| #define | DRF_SHIFT_MW(drf) (DRF_PICK_MW(drf,0)%32) |
| |
| #define | DRF_MASK_MW(drf) (0xFFFFFFFF>>((31-(DRF_EXTENT_MW(drf))+(DRF_BASE_MW(drf)))%32)) |
| |
| #define | DRF_SHIFTMASK_MW(drf) ((DRF_MASK_MW(drf))<<(DRF_SHIFT_MW(drf))) |
| |
| #define | DRF_SIZE_MW(drf) (DRF_EXTENT_MW(drf)-DRF_BASE_MW(drf)+1) |
| |
| #define | DRF_DEF_MW(d, r, f, c) ((NV##d##r##f##c) << DRF_SHIFT_MW(NV##d##r##f)) |
| |
| #define | DRF_NUM_MW(d, r, f, n) (((n)&DRF_MASK_MW(NV##d##r##f))<<DRF_SHIFT_MW(NV##d##r##f)) |
| |
| #define | DRF_VAL_MW_1WORD(d, r, f, v) ((((v)[DRF_WORD_MW(NV##d##r##f)])>>DRF_SHIFT_MW(NV##d##r##f))&DRF_MASK_MW(NV##d##r##f)) |
| |
| #define | DRF_SPANS(drf) ((DRF_PICK_MW(drf,0)/32) != (DRF_PICK_MW(drf,1)/32)) |
| |
| #define | DRF_WORD_MW_LOW(drf) (DRF_PICK_MW(drf,0)/32) |
| |
| #define | DRF_WORD_MW_HIGH(drf) (DRF_PICK_MW(drf,1)/32) |
| |
| #define | DRF_MASK_MW_LOW(drf) (0xFFFFFFFF) |
| |
| #define | DRF_MASK_MW_HIGH(drf) (0xFFFFFFFF>>(31-(DRF_EXTENT_MW(drf)))) |
| |
| #define | DRF_SHIFT_MW_LOW(drf) (DRF_PICK_MW(drf,0)%32) |
| |
| #define | DRF_SHIFT_MW_HIGH(drf) (0) |
| |
| #define | DRF_MERGE_SHIFT(drf) ((32-((DRF_PICK_MW(drf,0)%32)))%32) |
| |
| #define | DRF_VAL_MW_2WORD(d, r, f, v) |
| |
| #define | DRF_VAL_MW(d, r, f, v) ( DRF_SPANS(NV##d##r##f) ? DRF_VAL_MW_2WORD(d,r,f,v) : DRF_VAL_MW_1WORD(d,r,f,v) ) |
| |
| #define | DRF_IDX_DEF_MW(d, r, f, i, c) ((NV##d##r##f##c)<<DRF_SHIFT_MW(NV##d##r##f(i))) |
| |
| #define | DRF_IDX_NUM_MW(d, r, f, i, n) (((n)&DRF_MASK_MW(NV##d##r##f(i)))<<DRF_SHIFT_MW(NV##d##r##f(i))) |
| |
| #define | DRF_IDX_VAL_MW(d, r, f, i, v) ((((v)[DRF_WORD_MW(NV##d##r##f(i))])>>DRF_SHIFT_MW(NV##d##r##f(i)))&DRF_MASK_MW(NV##d##r##f(i))) |
| |
| #define | FLD_IDX_OR_DRF_DEF(d, r, f, c, s, v) |
| |
| #define | FLD_MERGE_MW(drf, n, v) (((v)[DRF_WORD_MW(drf)] & ~DRF_SHIFTMASK_MW(drf)) | n) |
| |
| #define | FLD_ASSIGN_MW(drf, n, v) ((v)[DRF_WORD_MW(drf)] = FLD_MERGE_MW(drf, n, v)) |
| |
| #define | FLD_IDX_MERGE_MW(drf, i, n, v) (((v)[DRF_WORD_MW(drf(i))] & ~DRF_SHIFTMASK_MW(drf(i))) | n) |
| |
| #define | FLD_IDX_ASSIGN_MW(drf, i, n, v) ((v)[DRF_WORD_MW(drf(i))] = FLD_MERGE_MW(drf(i), n, v)) |
| |
| #define | FLD_SET_DRF_MW(d, r, f, c, v) FLD_MERGE_MW(NV##d##r##f, DRF_DEF_MW(d,r,f,c), v) |
| |
| #define | FLD_SET_DRF_NUM_MW(d, r, f, n, v) FLD_ASSIGN_MW(NV##d##r##f, DRF_NUM_MW(d,r,f,n), v) |
| |
| #define | FLD_SET_DRF_DEF_MW(d, r, f, c, v) FLD_ASSIGN_MW(NV##d##r##f, DRF_DEF_MW(d,r,f,c), v) |
| |
| #define | FLD_IDX_SET_DRF_MW(d, r, f, i, c, v) FLD_IDX_MERGE_MW(NV##d##r##f, i, DRF_IDX_DEF_MW(d,r,f,i,c), v) |
| |
| #define | FLD_IDX_SET_DRF_DEF_MW(d, r, f, i, c, v) FLD_IDX_MERGE_MW(NV##d##r##f, i, DRF_IDX_DEF_MW(d,r,f,i,c), v) |
| |
| #define | FLD_IDX_SET_DRF_NUM_MW(d, r, f, i, n, v) FLD_IDX_ASSIGN_MW(NV##d##r##f, i, DRF_IDX_NUM_MW(d,r,f,i,n), v) |
| |
| #define | FLD_TEST_DRF_MW(d, r, f, c, v) ((DRF_VAL_MW(d, r, f, v) == NV##d##r##f##c)) |
| |
| #define | FLD_TEST_DRF_NUM_MW(d, r, f, n, v) ((DRF_VAL_MW(d, r, f, v) == n)) |
| |
| #define | FLD_IDX_TEST_DRF_MW(d, r, f, i, c, v) ((DRF_IDX_VAL_MW(d, r, f, i, v) == NV##d##r##f##c)) |
| |
| #define | DRF_VAL_BS(d, r, f, v) ( DRF_SPANS(NV##d##r##f) ? DRF_VAL_BS_2WORD(d,r,f,v) : DRF_VAL_BS_1WORD(d,r,f,v) ) |
| |
| #define | ENG_RD_REG(g, o, d, r) GPU_REG_RD32(g, ENG_REG##d(o,d,r)) |
| |
| #define | ENG_WR_REG(g, o, d, r, v) GPU_REG_WR32(g, ENG_REG##d(o,d,r), v) |
| |
| #define | ENG_RD_DRF(g, o, d, r, f) ((GPU_REG_RD32(g, ENG_REG##d(o,d,r))>>GPU_DRF_SHIFT(NV ## d ## r ## f))&GPU_DRF_MASK(NV ## d ## r ## f)) |
| |
| #define | ENG_WR_DRF_DEF(g, o, d, r, f, c) GPU_REG_WR32(g, ENG_REG##d(o,d,r),(GPU_REG_RD32(g,ENG_REG##d(o,d,r))&~(GPU_DRF_MASK(NV##d##r##f)<<GPU_DRF_SHIFT(NV##d##r##f)))|GPU_DRF_DEF(d,r,f,c)) |
| |
| #define | ENG_WR_DRF_NUM(g, o, d, r, f, n) GPU_REG_WR32(g, ENG_REG##d(o,d,r),(GPU_REG_RD32(g,ENG_REG##d(o,d,r))&~(GPU_DRF_MASK(NV##d##r##f)<<GPU_DRF_SHIFT(NV##d##r##f)))|GPU_DRF_NUM(d,r,f,n)) |
| |
| #define | ENG_TEST_DRF_DEF(g, o, d, r, f, c) (ENG_RD_DRF(g, o, d, r, f) == NV##d##r##f##c) |
| |
| #define | ENG_RD_IDX_DRF(g, o, d, r, f, i) ((GPU_REG_RD32(g, ENG_REG##d(o,d,r(i)))>>GPU_DRF_SHIFT(NV ## d ## r ## f))&GPU_DRF_MASK(NV ## d ## r ## f)) |
| |
| #define | ENG_TEST_IDX_DRF_DEF(g, o, d, r, f, c, i) (ENG_RD_IDX_DRF(g, o, d, r, f, i) == NV##d##r##f##c) |
| |
| #define | ENG_IDX_RD_REG(g, o, d, i, r) GPU_REG_RD32(g, ENG_IDX_REG##d(o,d,i,r)) |
| |
| #define | ENG_IDX_WR_REG(g, o, d, i, r, v) GPU_REG_WR32(g, ENG_IDX_REG##d(o,d,i,r), v) |
| |
| #define | ENG_IDX_RD_DRF(g, o, d, i, r, f) ((GPU_REG_RD32(g, ENG_IDX_REG##d(o,d,i,r))>>GPU_DRF_SHIFT(NV ## d ## r ## f))&GPU_DRF_MASK(NV ## d ## r ## f)) |
| |
| #define | DRF_VAL_BS_1WORD(d, r, f, v) ((DRF_READ_1WORD_BS(d,r,f,v)>>DRF_SHIFT_MW(NV##d##r##f))&DRF_MASK_MW(NV##d##r##f)) |
| |
| #define | DRF_VAL_BS_2WORD(d, r, f, v) |
| |
| #define | DRF_READ_1BYTE_BS(drf, v) ((NvU32)(((const NvU8*)(v))[DRF_WORD_MW(drf)*4])) |
| |
| #define | DRF_READ_2BYTE_BS(drf, v) |
| |
| #define | DRF_READ_3BYTE_BS(drf, v) |
| |
| #define | DRF_READ_4BYTE_BS(drf, v) |
| |
| #define | DRF_READ_1BYTE_BS_HIGH(drf, v) ((NvU32)(((const NvU8*)(v))[DRF_WORD_MW_HIGH(drf)*4])) |
| |
| #define | DRF_READ_2BYTE_BS_HIGH(drf, v) |
| |
| #define | DRF_READ_3BYTE_BS_HIGH(drf, v) |
| |
| #define | DRF_READ_4BYTE_BS_HIGH(drf, v) |
| |
| #define | NV_TWO_N_MINUS_ONE(n) (((1ULL<<(n/2))<<((n+1)/2))-1) |
| |
| #define | DRF_READ_1WORD_BS(d, r, f, v) |
| |
| #define | DRF_READ_1WORD_BS_HIGH(d, r, f, v) |
| |
| #define | BIN_2_GRAY(n) ((n)^((n)>>1)) |
| |
| #define | GRAY_2_BIN_64b(n) (n)^=(n)>>1; (n)^=(n)>>2; (n)^=(n)>>4; (n)^=(n)>>8; (n)^=(n)>>16; (n)^=(n)>>32; |
| |
| #define | LOWESTBIT(x) ( (x) & (((x)-1) ^ (x)) ) |
| |
| #define | HIGHESTBIT(n32) |
| |
| #define | ONEBITSET(x) ( (x) && (((x) & ((x)-1)) == 0) ) |
| |
| #define | NUMSETBITS_32(n32) |
| |
| #define | LOWESTBITIDX_32(n32) |
| |
| #define | HIGHESTBITIDX_32(n32) |
| |
| #define | ROUNDUP_POW2(n32) |
| |
| #define | ROUNDUP_POW2_U64(n64) |
| |
| #define | NV_SWAP_U8(a, b) |
| |
| #define | FOR_EACH_INDEX_IN_MASK(maskWidth, index, mask) |
| | Macros allowing simple iteration over bits set in a given mask. More...
|
| |
| #define | FOR_EACH_INDEX_IN_MASK_END |
| |
| #define | NV_ANYSIZE_ARRAY 1 |
| |
| #define | NV_CEIL(a, b) (((a)+(b)-1)/(b)) |
| |
| #define | NV_DIV_AND_CEIL(a, b) NV_CEIL(a,b) |
| |
| #define | NV_MIN(a, b) (((a) < (b)) ? (a) : (b)) |
| |
| #define | NV_MAX(a, b) (((a) > (b)) ? (a) : (b)) |
| |
| #define | NV_ABS(a) ((a)>=0?(a):(-(a))) |
| |
| #define | NV_SIGN(s) ((NvS8)(((s) > 0) - ((s) < 0))) |
| |
| #define | NV_ZERO_SIGN(s) ((NvS8)((((s) >= 0) * 2) - 1)) |
| |
| #define | NV_OFFSETOF(type, member) ((NvU32)(NvU64)&(((type *)0)->member)) |
| |
| #define | NV_UNSIGNED_ROUNDED_DIV(a, b) (((a) + ((b) / 2)) / (b)) |
| |
| #define | NV_UNSIGNED_DIV_CEIL(a, b) (((a) + (b - 1)) / (b)) |
| | Performs a ceiling division of b into a (unsigned). More...
|
| |
| #define | NV_RIGHT_SHIFT_ROUNDED(a, shift) (((a) >> (shift)) + !!((NVBIT((shift) - 1) & (a)) == NVBIT((shift) - 1))) |
| | Performs a rounded right-shift of 32-bit unsigned value "a" by "shift" bits. More...
|
| |
| #define | NV_ALIGN_DOWN(v, gran) ((v) & ~((gran) - 1)) |
| |
| #define | NV_ALIGN_UP(v, gran) (((v) + ((gran) - 1)) & ~((gran)-1)) |
| |
| #define | NV_IS_ALIGNED(v, gran) (0 == ((v) & ((gran) - 1))) |
| |