Lines Matching full:s64

17 #define S64_MIN ((s64)INT64_MIN)
18 #define S64_MAX ((s64)INT64_MAX)
32 #define s64 ___s64 macro
65 enum num_t { U64, first_t = U64, U32, S64, S32, last_t = S32 }; enumerator
72 case S64: return (s64)x < (s64)y ? (s64)x : (s64)y; in min_t()
83 case S64: return (s64)x > (s64)y ? (s64)x : (s64)y; in max_t()
94 case S64: return (s64)x; in cast_t()
105 case S64: return "s64"; in t_str()
116 case S64: return false; in t_is_32()
125 case U64: return S64; in t_signed()
127 case S64: return S64; in t_signed()
138 case S64: return U64; in t_unsigned()
153 case S64: return (s64)x >= SNUM_MIN_DECIMAL && (s64)x <= SNUM_MAX_DECIMAL; in num_is_small()
167 case S64: return snappendf(sb, "%lld", (s64)x); in snprintf_num()
187 case S64: in snprintf_num()
188 if ((s64)x == S64_MAX) in snprintf_num()
190 else if ((s64)x >= S64_MAX - 256) in snprintf_num()
191 return snappendf(sb, "S64_MAX-%lld", S64_MAX - (s64)x); in snprintf_num()
192 else if ((s64)x == S64_MIN) in snprintf_num()
194 else if ((s64)x <= S64_MIN + 256) in snprintf_num()
195 return snappendf(sb, "S64_MIN+%lld", (s64)x - S64_MIN); in snprintf_num()
197 return snappendf(sb, "%#llx", (s64)x); in snprintf_num()
245 [S64] = { (u64)S64_MIN, (u64)S64_MAX },
254 case S64: return unkn[U32]; in unkn_subreg()
265 case S64: return (struct range){ (s64)a, (s64)b }; in range()
318 case S64: in range_cast_u64()
320 return unkn[S64]; in range_cast_u64()
321 return range(S64, a, b); in range_cast_u64()
330 s64 a = (s64)x.a, b = (s64)x.b; in range_cast_s64()
334 /* equivalent to (s64)a <= (s64)b check */ in range_cast_s64()
342 case S64: in range_cast_s64()
356 case S64: in range_cast_u32()
357 /* u32 is always a valid zero-extended u64/s64 */ in range_cast_u32()
374 case S64: in range_cast_s32()
394 case S64: return range_cast_s64(to_t, from); in range_cast()
405 case S64: return true; in is_valid_num()
419 case S64: return (s64)x.a <= (s64)x.b; in is_valid_range()
445 if (x_t == S64 && y_t == S32 && y_cast.a <= S32_MAX && y_cast.b <= S32_MAX && in range_refine()
446 (s64)x.a >= S32_MIN && (s64)x.b <= S32_MAX) in range_refine()
526 case S64: { range_canbe(s64); } in range_canbe_op()
640 struct range r[4]; /* indexed by enum num_t: U64, U32, S64, S32 */
788 if (br_u == -1 && (t == U64 || t == S64)) { in reg_state_branch_taken_op()
1044 {"smin=", &reg->r[S64].a, S64_MIN}, in parse_reg_state()
1045 {"smax=", &reg->r[S64].b, S64_MAX}, in parse_reg_state()
1343 s64 svals[ARRAY_SIZE(upper_seeds) * ARRAY_SIZE(lower_seeds)];
1397 .setup_signed = (init_t == S64 || init_t == S32), in verify_case_op()
1399 .compare_signed = (cond_t == S64 || cond_t == S32), in verify_case_op()
1531 s64 x1 = *(const s64 *)p1, x2 = *(const s64 *)p2; in s64_cmp()
1564 /* we have exactly the same number of s64 values, they are just in in gen_vals()
1578 snprintf_num(S64, sb2, ctx->svals[i]); in gen_vals()
1579 printf("SEED #%d: u64=%-20s s64=%-20s\n", i, sb1->buf, sb2->buf); in gen_vals()
1629 snprintf_range(S64, sb2, range(S64, ctx->svals[i], ctx->svals[j])); in gen_ranges()
1630 printf("RANGE #%d: u64=%-40s s64=%-40s\n", cnt, sb1->buf, sb2->buf); in gen_ranges()
1648 ctx->sranges[cnt] = range(S64, ctx->svals[i], ctx->svals[j]); in gen_ranges()
1776 /* (u64|s64)(<range> x <const>) */ in validate_gen_range_vs_const_64()
1779 /* (u64|s64)(<const> x <range>) */ in validate_gen_range_vs_const_64()
1849 case S64: in validate_gen_range_vs_range()
1893 void test_reg_bounds_gen_consts_u64_s64(void) { validate_gen_range_vs_const_64(U64, S64); } in test_reg_bounds_gen_consts_u64_s64()
1896 /* RANGE x CONST, S64 initial range */
1897 void test_reg_bounds_gen_consts_s64_u64(void) { validate_gen_range_vs_const_64(S64, U64); } in test_reg_bounds_gen_consts_s64_u64()
1898 void test_reg_bounds_gen_consts_s64_s64(void) { validate_gen_range_vs_const_64(S64, S64); } in test_reg_bounds_gen_consts_s64_s64()
1899 void test_reg_bounds_gen_consts_s64_u32(void) { validate_gen_range_vs_const_64(S64, U32); } in test_reg_bounds_gen_consts_s64_u32()
1900 void test_reg_bounds_gen_consts_s64_s32(void) { validate_gen_range_vs_const_64(S64, S32); } in test_reg_bounds_gen_consts_s64_s32()
1903 void test_reg_bounds_gen_consts_u32_s64(void) { validate_gen_range_vs_const_32(U32, S64); } in test_reg_bounds_gen_consts_u32_s64()
1908 void test_reg_bounds_gen_consts_s32_s64(void) { validate_gen_range_vs_const_32(S32, S64); } in test_reg_bounds_gen_consts_s32_s64()
1914 void test_reg_bounds_gen_ranges_u64_s64(void) { validate_gen_range_vs_range(U64, S64); } in test_reg_bounds_gen_ranges_u64_s64()
1917 /* RANGE x RANGE, S64 initial range */
1918 void test_reg_bounds_gen_ranges_s64_u64(void) { validate_gen_range_vs_range(S64, U64); } in test_reg_bounds_gen_ranges_s64_u64()
1919 void test_reg_bounds_gen_ranges_s64_s64(void) { validate_gen_range_vs_range(S64, S64); } in test_reg_bounds_gen_ranges_s64_s64()
1920 void test_reg_bounds_gen_ranges_s64_u32(void) { validate_gen_range_vs_range(S64, U32); } in test_reg_bounds_gen_ranges_s64_u32()
1921 void test_reg_bounds_gen_ranges_s64_s32(void) { validate_gen_range_vs_range(S64, S32); } in test_reg_bounds_gen_ranges_s64_s32()
1924 void test_reg_bounds_gen_ranges_u32_s64(void) { validate_gen_range_vs_range(U32, S64); } in test_reg_bounds_gen_ranges_u32_s64()
1929 void test_reg_bounds_gen_ranges_s32_s64(void) { validate_gen_range_vs_range(S32, S64); } in test_reg_bounds_gen_ranges_s32_s64()
2014 void test_reg_bounds_rand_consts_u64_s64(void) { validate_rand_ranges(U64, S64, true /* const */); } in test_reg_bounds_rand_consts_u64_s64()
2017 /* [RANDOM] RANGE x CONST, S64 initial range */
2018 void test_reg_bounds_rand_consts_s64_u64(void) { validate_rand_ranges(S64, U64, true /* const */); } in test_reg_bounds_rand_consts_s64_u64()
2019 void test_reg_bounds_rand_consts_s64_s64(void) { validate_rand_ranges(S64, S64, true /* const */); } in test_reg_bounds_rand_consts_s64_s64()
2020 void test_reg_bounds_rand_consts_s64_u32(void) { validate_rand_ranges(S64, U32, true /* const */); } in test_reg_bounds_rand_consts_s64_u32()
2021 void test_reg_bounds_rand_consts_s64_s32(void) { validate_rand_ranges(S64, S32, true /* const */); } in test_reg_bounds_rand_consts_s64_s32()
2024 void test_reg_bounds_rand_consts_u32_s64(void) { validate_rand_ranges(U32, S64, true /* const */); } in test_reg_bounds_rand_consts_u32_s64()
2029 void test_reg_bounds_rand_consts_s32_s64(void) { validate_rand_ranges(S32, S64, true /* const */); } in test_reg_bounds_rand_consts_s32_s64()
2035 void test_reg_bounds_rand_ranges_u64_s64(void) { validate_rand_ranges(U64, S64, false /* range */);… in test_reg_bounds_rand_ranges_u64_s64()
2038 /* [RANDOM] RANGE x RANGE, S64 initial range */
2039 void test_reg_bounds_rand_ranges_s64_u64(void) { validate_rand_ranges(S64, U64, false /* range */);… in test_reg_bounds_rand_ranges_s64_u64()
2040 void test_reg_bounds_rand_ranges_s64_s64(void) { validate_rand_ranges(S64, S64, false /* range */);… in test_reg_bounds_rand_ranges_s64_s64()
2041 void test_reg_bounds_rand_ranges_s64_u32(void) { validate_rand_ranges(S64, U32, false /* range */);… in test_reg_bounds_rand_ranges_s64_u32()
2042 void test_reg_bounds_rand_ranges_s64_s32(void) { validate_rand_ranges(S64, S32, false /* range */);… in test_reg_bounds_rand_ranges_s64_s32()
2045 void test_reg_bounds_rand_ranges_u32_s64(void) { validate_rand_ranges(U32, S64, false /* range */);… in test_reg_bounds_rand_ranges_u32_s64()
2050 void test_reg_bounds_rand_ranges_s32_s64(void) { validate_rand_ranges(S32, S64, false /* range */);… in test_reg_bounds_rand_ranges_s32_s64()
2069 {U64, S64, {0, 1}, {1, 0x80000000}},
2073 {U64, S64, {0, 0xffffffff00000000ULL}, {0, 0}},
2074 {U64, S64, {0x7fffffffffffffffULL, 0xffffffff00000000ULL}, {0, 0}},
2075 {U64, S64, {0x7fffffff00000001ULL, 0xffffffff00000000ULL}, {0, 0}},
2076 {U64, S64, {0, 0xffffffffULL}, {1, 1}},
2077 {U64, S64, {0, 0xffffffffULL}, {0x7fffffff, 0x7fffffff}},
2094 {S64, S64, {0xffffffffffffffffULL, 0}, {0xffffffff00000000ULL, 0xffffffff00000000ULL}},
2099 /* longer convergence case: learning from u64 -> s64 -> u64 -> u32,
2102 {S64, U64, {0xffffffff00000001ULL, 0}, {0xffffffff00000000ULL, 0xffffffff00000000ULL}},
2109 {S32, S64, {(u32)S32_MIN, (u32)(s32)-255}, {(u32)(s32)-2, 0}},
2110 {S32, S64, {0, 1}, {(u32)S32_MIN, (u32)S32_MIN}},
2116 {S64, U64, {S64_MIN, 0}, {S64_MIN, S64_MIN}},
2117 {S64, U64, {S64_MIN, 0}, {0, 0}},
2118 {S64, U64, {S64_MIN, S64_MAX}, {S64_MAX, S64_MAX}},
2124 {S64, U32, {0x0, 0x1f}, {0xffffffff80000000ULL, 0x000000007fffffffULL}},
2125 {S64, U32, {0x0, 0x1f}, {0xffffffffffff8000ULL, 0x0000000000007fffULL}},
2126 {S64, U32, {0x0, 0x1f}, {0xffffffffffffff80ULL, 0x000000000000007fULL}},