Lines Matching refs:relRow
843 …d ZSTD_row_prefetch(U32 const* hashTable, U16 const* tagTable, U32 const relRow, U32 const rowLog)… in ZSTD_row_prefetch() argument
844 PREFETCH_L1(hashTable + relRow); in ZSTD_row_prefetch()
846 PREFETCH_L1(hashTable + relRow + 16); in ZSTD_row_prefetch()
849 PREFETCH_L1(tagTable + relRow); in ZSTD_row_prefetch()
851 PREFETCH_L1(tagTable + relRow + 32); in ZSTD_row_prefetch()
854 …assert(ZSTD_isAligned(hashTable + relRow, 64)); /* prefetched hash row always 64-b… in ZSTD_row_prefetch()
855 …assert(ZSTD_isAligned(tagTable + relRow, (size_t)1 << rowLog)); /* prefetched tagRow sits on corre… in ZSTD_row_prefetch()
919 U32 const relRow = (hash >> ZSTD_ROW_HASH_TAG_BITS) << rowLog; in ZSTD_row_update_internalImpl() local
920 U32* const row = hashTable + relRow; in ZSTD_row_update_internalImpl()
921 …BYTE* tagRow = (BYTE*)(tagTable + relRow); /* Though tagTable is laid out as a table of U16, each… in ZSTD_row_update_internalImpl()
1184 U32 const relRow = (hash >> ZSTD_ROW_HASH_TAG_BITS) << rowLog; in ZSTD_RowFindBestMatch() local
1186 U32* const row = hashTable + relRow; in ZSTD_RowFindBestMatch()
1187 BYTE* tagRow = (BYTE*)(tagTable + relRow); in ZSTD_RowFindBestMatch()