Lines Matching refs:ltbno
683 xfs_agblock_t ltbno; /* start bno of left side entry */
715 if ((error = xfs_alloc_ag_vextent_small(args, cnt_cur, <bno,
754 if ((error = xfs_alloc_get_rec(cnt_cur, <bno,
775 if ((error = xfs_alloc_get_rec(cnt_cur, <bno, <len, &i)))
778 xfs_alloc_compute_aligned(ltbno, ltlen, args->alignment,
788 args->alignment, ltbno, ltlen, <new);
807 if ((error = xfs_alloc_get_rec(cnt_cur, <bno, <len, &i)))
810 ASSERT(ltbno + ltlen <= be32_to_cpu(XFS_BUF_TO_AGF(args->agbp)->agf_length));
822 ASSERT(bnew >= ltbno);
823 ASSERT(bnew + blen <= ltbno + ltlen);
832 if ((error = xfs_alloc_fixup_trees(cnt_cur, bno_cur_lt, ltbno,
896 if ((error = xfs_alloc_get_rec(bno_cur_lt, <bno, <len, &i)))
899 xfs_alloc_compute_aligned(ltbno, ltlen, args->alignment,
943 args->alignment, ltbno, ltlen, <new);
1051 bno_cur_lt, <bno,
1055 xfs_alloc_compute_aligned(ltbno, ltlen,
1080 ltbno, ltlen, <new);
1142 ltbno = gtbno;
1161 (void)xfs_alloc_compute_diff(args->agbno, rlen, args->alignment, ltbno,
1163 ASSERT(ltnew >= ltbno);
1164 ASSERT(ltnew + rlen <= ltbno + ltlen);
1167 if ((error = xfs_alloc_fixup_trees(cnt_cur, bno_cur_lt, ltbno, ltlen,
1451 xfs_agblock_t ltbno; /* start of left neighbor block */
1473 if ((error = xfs_alloc_get_rec(bno_cur, <bno, <len, &i)))
1479 if (ltbno + ltlen < bno)
1487 XFS_WANT_CORRUPTED_GOTO(ltbno + ltlen <= bno, error0);
1529 if ((error = xfs_alloc_lookup_eq(cnt_cur, ltbno, ltlen, &i)))
1569 i == 1 && xxbno == ltbno && xxlen == ltlen,
1576 nbno = ltbno;
1589 if ((error = xfs_alloc_lookup_eq(cnt_cur, ltbno, ltlen, &i)))
1602 nbno = ltbno;