Lines Matching refs:range

218 struct range {
222 static void snprintf_range(enum num_t t, struct strbuf *sb, struct range x)
234 static void print_range(enum num_t t, struct range x, const char *sfx)
242 static const struct range unkn[] = {
249 static struct range unkn_subreg(enum num_t t)
260 static struct range range(enum num_t t, u64 a, u64 b)
263 case U64: return (struct range){ (u64)a, (u64)b };
264 case U32: return (struct range){ (u32)a, (u32)b };
265 case S64: return (struct range){ (s64)a, (s64)b };
266 case S32: return (struct range){ (u32)(s32)a, (u32)(s32)b };
267 default: printf("range!\n"); exit(1);
276 static bool range_eq(struct range x, struct range y)
281 static struct range range_cast_to_s32(struct range x)
286 * s32 range to be correct
289 return range(S32, a, b);
293 * 0x00000000 is also valid), while lower bits form a proper s32 range
297 * over full 64-bit numbers range will form a proper [-16, 16]
298 * ([0xffffff00; 0x00000010]) range in its lower 32 bits.
301 return range(S32, a, b);
307 static struct range range_cast_u64(enum num_t to_t, struct range x)
317 return range(U32, a, b);
321 return range(S64, a, b);
328 static struct range range_cast_s64(enum num_t to_t, struct range x)
337 return range(U64, a, b);
341 return range(U32, a, b);
350 static struct range range_cast_u32(enum num_t to_t, struct range x)
358 return range(to_t, a, b);
362 return range_cast_to_s32(range(U32, a, b));
367 static struct range range_cast_s32(enum num_t to_t, struct range x)
377 return range(to_t, a, b);
384 /* Reinterpret range in *from_t* domain as a range in *to_t* domain preserving
385 * all possible information. Worst case, it will be unknown range within
389 static struct range range_cast(enum num_t from_t, enum num_t to_t, struct range from)
411 static bool is_valid_range(enum num_t t, struct range x)
425 static struct range range_improve(enum num_t t, struct range old, struct range new)
427 return range(t, max_t(t, old.a, new.a), min_t(t, old.b, new.b));
430 static struct range range_refine(enum num_t x_t, struct range x, enum num_t y_t, struct range y)
432 struct range y_cast;
436 /* the case when new range knowledge, *y*, is a 32-bit subregister
437 * range, while previous range knowledge, *x*, is a full register
438 * 64-bit range, needs special treatment to take into account upper 32
439 * bits of full register range
442 struct range x_swap;
449 x_swap = range(x_t, swap_low32(x.a, y_cast.a), swap_low32(x.b, y_cast.b));
455 /* otherwise, plain range cast and intersection works */
491 /* Can register with range [x.a, x.b] *EVER* satisfy
493 * a regsiter with range [y.a, y.b]
496 static bool range_canbe_op(enum num_t t, struct range x, struct range y, enum op op)
520 /* Does register with range [x.a, x.b] *ALWAYS* satisfy
522 * a regsiter with range [y.a, y.b]
525 static bool range_always_op(enum num_t t, struct range x, struct range y, enum op op)
531 /* Does register with range [x.a, x.b] *NEVER* satisfy
533 * a regsiter with range [y.a, y.b]
536 static bool range_never_op(enum num_t t, struct range x, struct range y, enum op op)
546 static int range_branch_taken_op(enum num_t t, struct range x, struct range y, enum op op)
562 static void range_cond(enum num_t t, struct range x, struct range y,
563 enum op op, struct range *newx, struct range *newy)
573 *newx = range(t, x.a, min_t(t, x.b, y.b - 1));
574 *newy = range(t, max_t(t, x.a + 1, y.a), y.b);
577 *newx = range(t, x.a, min_t(t, x.b, y.b));
578 *newy = range(t, max_t(t, x.a, y.a), y.b);
581 *newx = range(t, max_t(t, x.a, y.a + 1), x.b);
582 *newy = range(t, y.a, min_t(t, x.b - 1, y.b));
585 *newx = range(t, max_t(t, x.a, y.a), x.b);
586 *newy = range(t, y.a, min_t(t, x.b, y.b));
589 *newx = range(t, max_t(t, x.a, y.a), min_t(t, x.b, y.b));
590 *newy = range(t, max_t(t, x.a, y.a), min_t(t, x.b, y.b));
596 *newx = range(t, x.a, x.b);
597 *newy = range(t, y.a + 1, y.b);
600 *newx = range(t, x.a, x.b);
601 *newy = range(t, y.a, y.b - 1);
604 *newx = range(t, x.a + 1, x.b);
605 *newy = range(t, y.a, y.b);
608 *newx = range(t, x.a, x.b - 1);
609 *newy = range(t, y.a, y.b);
612 *newx = range(t, x.a, x.b);
613 *newy = range(t, y.a, y.b);
627 struct range r[4]; /* indexed by enum num_t: U64, U32, S64, S32 */
652 static void print_refinement(enum num_t s_t, struct range src,
653 enum num_t d_t, struct range old, struct range new,
664 static void reg_state_refine(struct reg_state *r, enum num_t t, struct range x, const char *ctx)
667 struct range old;
671 /* try to derive new knowledge from just learned range x of type t */
708 rs->r[tt] = tt == t ? range(t, val, val) : unkn[tt];
720 struct range z1, z2;
807 /* whether to establish initial value range on full register (r1) or
811 /* whether to establish initial value range using signed or unsigned
824 static int load_range_cmp_prog(struct range x, struct range y, enum op op,
875 /* ; setup initial r6/w6 possible value range ([x.a, x.b])
896 /* ; setup initial r7/w7 possible value range ([y.a, y.b])
917 /* ; range test instruction
1003 * - range: R6_rwD=scalar(id=1,...), where "..." is a comma-separated
1004 * list of optional range specifiers:
1064 reg->r[t] = range(t, sval, sval);
1169 static bool assert_range_eq(enum num_t t, struct range x, struct range y,
1230 /* Simulate provided test case purely with our own range-based logic.
1235 struct range x, struct range y, enum op op,
1254 struct range z = t_is_32(init_t) ? unkn_subreg(t) : unkn[t];
1333 struct range *uranges, *sranges;
1334 struct range *usubranges, *ssubranges;
1354 struct range x;
1355 struct range y;
1373 struct range x, struct range y, enum op op)
1439 struct range x, struct range y, bool is_subtest)
1493 struct range x, struct range y)
1615 snprintf_range(U64, sb1, range(U64, ctx->uvals[i], ctx->uvals[j]));
1616 snprintf_range(S64, sb2, range(S64, ctx->svals[i], ctx->svals[j]));
1634 ctx->uranges[cnt] = range(U64, ctx->uvals[i], ctx->uvals[j]);
1635 ctx->sranges[cnt] = range(S64, ctx->svals[i], ctx->svals[j]);
1648 snprintf_range(U32, sb1, range(U32, ctx->usubvals[i], ctx->usubvals[j]));
1649 snprintf_range(S32, sb2, range(S32, ctx->ssubvals[i], ctx->ssubvals[j]));
1667 ctx->usubranges[cnt] = range(U32, ctx->usubvals[i], ctx->usubvals[j]);
1668 ctx->ssubranges[cnt] = range(S32, ctx->ssubvals[i], ctx->ssubvals[j]);
1740 struct range rconst;
1741 const struct range *ranges;
1761 rconst = range(init_t, vals[i], vals[i]);
1763 /* (u64|s64)(<range> x <const>) */
1766 /* (u64|s64)(<const> x <range>) */
1779 struct range rconst;
1780 const struct range *ranges;
1800 rconst = range(init_t, vals[i], vals[i]);
1802 /* (u32|s32)(<range> x <const>) */
1805 /* (u32|s32)(<const> x <range>) */
1818 const struct range *ranges;
1857 /* (<range> x <range>) */
1878 /* RANGE x CONST, U64 initial range */
1883 /* RANGE x CONST, S64 initial range */
1888 /* RANGE x CONST, U32 initial range */
1893 /* RANGE x CONST, S32 initial range */
1899 /* RANGE x RANGE, U64 initial range */
1904 /* RANGE x RANGE, S64 initial range */
1909 /* RANGE x RANGE, U32 initial range */
1914 /* RANGE x RANGE, S32 initial range */
1940 static struct range rand_range(enum num_t t)
1944 return range(t, min_t(t, x, y), max_t(t, x, y));
1950 struct range range1, range2;
1980 range2 = range(init_t, t, t);
1999 /* [RANDOM] RANGE x CONST, U64 initial range */
2004 /* [RANDOM] RANGE x CONST, S64 initial range */
2009 /* [RANDOM] RANGE x CONST, U32 initial range */
2014 /* [RANDOM] RANGE x CONST, S32 initial range */
2020 /* [RANDOM] RANGE x RANGE, U64 initial range */
2021 void test_reg_bounds_rand_ranges_u64_u64(void) { validate_rand_ranges(U64, U64, false /* range */); }
2022 void test_reg_bounds_rand_ranges_u64_s64(void) { validate_rand_ranges(U64, S64, false /* range */); }
2023 void test_reg_bounds_rand_ranges_u64_u32(void) { validate_rand_ranges(U64, U32, false /* range */); }
2024 void test_reg_bounds_rand_ranges_u64_s32(void) { validate_rand_ranges(U64, S32, false /* range */); }
2025 /* [RANDOM] RANGE x RANGE, S64 initial range */
2026 void test_reg_bounds_rand_ranges_s64_u64(void) { validate_rand_ranges(S64, U64, false /* range */); }
2027 void test_reg_bounds_rand_ranges_s64_s64(void) { validate_rand_ranges(S64, S64, false /* range */); }
2028 void test_reg_bounds_rand_ranges_s64_u32(void) { validate_rand_ranges(S64, U32, false /* range */); }
2029 void test_reg_bounds_rand_ranges_s64_s32(void) { validate_rand_ranges(S64, S32, false /* range */); }
2030 /* [RANDOM] RANGE x RANGE, U32 initial range */
2031 void test_reg_bounds_rand_ranges_u32_u64(void) { validate_rand_ranges(U32, U64, false /* range */); }
2032 void test_reg_bounds_rand_ranges_u32_s64(void) { validate_rand_ranges(U32, S64, false /* range */); }
2033 void test_reg_bounds_rand_ranges_u32_u32(void) { validate_rand_ranges(U32, U32, false /* range */); }
2034 void test_reg_bounds_rand_ranges_u32_s32(void) { validate_rand_ranges(U32, S32, false /* range */); }
2035 /* [RANDOM] RANGE x RANGE, S32 initial range */
2036 void test_reg_bounds_rand_ranges_s32_u64(void) { validate_rand_ranges(S32, U64, false /* range */); }
2037 void test_reg_bounds_rand_ranges_s32_s64(void) { validate_rand_ranges(S32, S64, false /* range */); }
2038 void test_reg_bounds_rand_ranges_s32_u32(void) { validate_rand_ranges(S32, U32, false /* range */); }
2039 void test_reg_bounds_rand_ranges_s32_s32(void) { validate_rand_ranges(S32, S32, false /* range */); }
2080 /* verifier knows about [-1, 0] range for s32 for this case already */