Lines Matching defs:rl

151 	runlist *rl;
198 rl = ntfs_mapping_pairs_decompress(vol, a, NULL);
199 if (!rl) {
215 for (i = 0, total = 0; rl[i].length; i++) {
216 if (total + (rl[i].length << vol->cluster_size_bits) >=
226 * rl[i].length << vol->cluster_size_bits, do the
241 if (rl[i].length < intlth)
242 intlth = rl[i].length;
246 free(rl);
261 rl[i].lcn << vol->cluster_size_bits,
277 free(rl);
297 r = ntfs_pread(vol->dev, rl[i].lcn << vol->cluster_size_bits,
298 rl[i].length << vol->cluster_size_bits,
300 if (r != rl[i].length << vol->cluster_size_bits) {
304 else if (r < rl[i].length << vol->cluster_size_bits) {
312 free(rl);
317 free(rl);
336 na->rl = NULL;
593 if (NAttrNonResident(na) && na->rl)
594 free(na->rl);
619 lcn = ntfs_rl_vcn_to_lcn(na->rl, vcn);
630 runlist_element *rl;
633 rl = ntfs_mapping_pairs_decompress(na->ni->vol, ctx->attr,
634 na->rl);
635 if (rl) {
636 na->rl = rl;
660 runlist_element *rl;
681 rl = (runlist_element*)NULL;
690 if (ntfs_rl_vcn_to_lcn(na->rl, needed)
692 rl = ntfs_mapping_pairs_decompress(na->ni->vol,
693 a, na->rl);
696 rl = na->rl;
697 if (rl) {
698 na->rl = rl;
705 rl = (runlist_element*)NULL;
717 } while (rl && !done && (needed < last_vcn));
727 rl = (runlist_element*)NULL;
731 if (rl && startseen)
733 return (rl ? 0 : -1);
775 runlist_element *rl;
778 if (ntfs_rl_vcn_to_lcn(na->rl, next_vcn) == LCN_RL_NOT_MAPPED)
789 rl = ntfs_mapping_pairs_decompress(na->ni->vol,
790 a, na->rl);
791 if (!rl)
793 na->rl = rl;
863 * cluster number (lcn) of a device using the runlist @na->rl to map vcns to
890 lcn = ntfs_rl_vcn_to_lcn(na->rl, vcn);
930 runlist_element *rl;
942 rl = na->rl;
943 if (!rl)
945 if (vcn < rl[0].vcn)
947 while (rl->length) {
948 if (vcn < rl[1].vcn) {
949 if (rl->lcn >= (LCN)LCN_HOLE)
950 return rl;
953 rl++;
955 switch (rl->lcn) {
993 runlist_element *rl;
1108 rl = ntfs_attr_find_vcn(na, pos >> vol->cluster_size_bits);
1109 if (!rl) {
1126 ofs = pos - (rl->vcn << vol->cluster_size_bits);
1127 for (; count; rl++, ofs = 0) {
1128 if (rl->lcn == LCN_RL_NOT_MAPPED) {
1129 rl = ntfs_attr_find_vcn(na, rl->vcn);
1130 if (!rl) {
1139 ofs = pos + total - (rl->vcn << vol->cluster_size_bits);
1141 if (!rl->length) {
1146 if (rl->lcn < (LCN)0) {
1147 if (rl->lcn != (LCN)LCN_HOLE) {
1150 (long long)rl->lcn);
1154 to_read = min(count, (rl->length <<
1164 to_read = min(count, (rl->length << vol->cluster_size_bits) -
1168 " %lld.\n", (long long)to_read, (long long)rl->vcn,
1169 (long long )rl->lcn, (long long)ofs);
1170 br = ntfs_pread(vol->dev, (rl->lcn << vol->cluster_size_bits) +
1261 rli = na->rl;
1302 runlist_element **rl, VCN *update_from)
1323 to_write = min(count, ((*rl)->length << vol->cluster_size_bits) - *ofs);
1325 cur_vcn = (*rl)->vcn;
1326 from_vcn = (*rl)->vcn + (*ofs >> vol->cluster_size_bits);
1334 if (!na->rl) {
1339 if ((*rl)->lcn == LCN_HOLE) {
1350 /* Restore @*rl, it probably get lost during runlist mapping. */
1351 *rl = ntfs_attr_find_vcn(na, cur_vcn);
1352 if (!*rl) {
1360 rlc = *rl;
1379 rlc = *rl;
1392 + 1 + (*rl)->vcn - from_vcn;
1404 if ((from_vcn & -na->compression_block_clusters) <= (*rl)->vcn)
1405 alloc_vcn = (*rl)->vcn;
1410 if (need > (*rl)->length) {
1414 (long long)(*rl)->length);
1428 *rl = ntfs_runlists_merge(na->rl, rlc);
1434 if (*rl && (na->data_flags & ATTR_COMPRESSION_MASK)) {
1435 runlist_element *oldrl = na->rl;
1436 na->rl = *rl;
1437 *rl = ntfs_rl_extend(na,*rl,2);
1438 if (!*rl) na->rl = oldrl; /* restore to original if failed */
1440 if (!*rl) {
1451 na->rl = *rl;
1454 *rl = ntfs_attr_find_vcn(na, cur_vcn);
1455 if (!*rl) {
1466 if ((*rl)->lcn < 0)
1467 (*rl)++;
1469 if ((*rl)->lcn < 0) {
1481 if ((*rl)->vcn < cur_vcn) {
1486 *ofs += (cur_vcn - (*rl)->vcn) << vol->cluster_size_bits;
1488 if ((*rl)->vcn > cur_vcn) {
1492 *ofs -= ((*rl)->vcn - cur_vcn) << vol->cluster_size_bits;
1522 runlist_element *rl = *prl;
1527 if (rl->length > na->compression_block_clusters) {
1532 rl = *prl;
1536 if (*prl && (rl->length > na->compression_block_clusters)) {
1541 int beginwrite = (pos >> cluster_size_bits) - rl->vcn;
1543 | (na->compression_block_clusters - 1)) + 1 - rl->vcn;
1546 - (rl->length & (na->compression_block_clusters - 1));
1553 if (endblock < rl[0].length) {
1568 xrl = rl;
1577 } while (xrl != rl);
1578 rl[1].length = na->compression_block_clusters;
1579 rl[2].length = rl[0].length - endblock;
1580 rl[0].length = endblock
1582 rl[1].lcn = LCN_HOLE;
1583 rl[2].lcn = LCN_HOLE;
1584 rl[1].vcn = rl[0].vcn + rl[0].length;
1585 rl[2].vcn = rl[1].vcn
1587 rl = ++(*prl);
1597 xrl = rl;
1606 } while (xrl != rl);
1609 rl[1].length = rl[0].length - endblock;
1610 rl[0].length = endblock;
1611 rl[1].vcn = rl[0].vcn + rl[0].length;
1612 rl[1].lcn = LCN_HOLE;
1616 rl[1].length = rl[0].length - endblock;
1617 rl[0].length = endblock;
1618 rl[1].vcn = rl[0].vcn + rl[0].length;
1619 rl[1].lcn = LCN_HOLE;
1620 rl = ++(*prl);
1624 if (rl[1].length) {
1636 xrl = rl;
1645 } while (xrl != rl);
1647 rl[2].lcn = rl[1].lcn;
1648 rl[2].vcn = rl[1].vcn;
1649 rl[2].length = rl[1].length;
1651 rl[1].vcn -= na->compression_block_clusters;
1652 rl[1].lcn = LCN_HOLE;
1653 rl[1].length = na->compression_block_clusters;
1654 rl[0].length -= na->compression_block_clusters;
1655 if (pos >= (rl[1].vcn << cluster_size_bits)) {
1656 rl = ++(*prl);
1684 runlist_element *rl = *prl;
1693 endblock = (((pos + count - 1) >> cluster_size_bits) | (na->compression_block_clusters - 1)) + 1 - rl->vcn;
1695 zrl = rl;
1707 runlist_element *orl = na->rl;
1717 irl = *prl - na->rl;
1730 rl = (runlist_element*)NULL;
1739 if ((na->rl != orl) || ((*prl)->lcn != olcn)) {
1740 zrl = &na->rl[irl];
1750 rl = (runlist_element*)NULL;
1752 rl = ntfs_rl_extend(na,*prl,2);
1756 *prl = rl;
1757 if (rl && undecided) {
1759 zrl = rl;
1773 if (rl && (allocated < endblock) && (zrl->lcn == LCN_HOLE)) {
1784 /* beware : rl was reallocated */
1785 rl = *prl;
1786 zrl = &rl[irl];
1856 runlist_element *rl;
2043 na->rl = ntfs_rl_extend(na,na->rl,2);
2044 if (!na->rl)
2105 rl = ntfs_attr_find_vcn(na, pos >> vol->cluster_size_bits);
2106 if (!rl) {
2135 if ((rl->lcn == (LCN)LCN_HOLE)
2137 if (rl->length < na->compression_block_clusters)
2145 - rl->length;
2153 &rl, pos, count, &update_from);
2156 if (rl->lcn >= 0) {
2163 &rl, pos, count, &update_from,
2176 ofs = pos - (rl->vcn << vol->cluster_size_bits);
2182 for (hole_end = 0; count; rl++, ofs = 0) {
2183 if (rl->lcn == LCN_RL_NOT_MAPPED) {
2184 rl = ntfs_attr_find_vcn(na, rl->vcn);
2185 if (!rl) {
2194 ofs = pos + total - (rl->vcn << vol->cluster_size_bits);
2196 if (!rl->length) {
2201 if (rl->lcn < (LCN)0) {
2202 hole_end = rl->vcn + rl->length;
2204 if (rl->lcn != (LCN)LCN_HOLE) {
2208 (long long)rl->lcn);
2211 if (ntfs_attr_fill_hole(na, fullcount, &ofs, &rl,
2216 while (rl->length
2217 && (ofs >= (rl->length << vol->cluster_size_bits))) {
2218 ofs -= rl->length << vol->cluster_size_bits;
2219 rl++;
2224 to_write = min(count, (rl->length << vol->cluster_size_bits) - ofs);
2227 "%lld.\n", (long long)to_write, (long long)rl->vcn,
2228 (long long)rl->lcn, (long long)ofs);
2231 s64 wpos = (rl->lcn << vol->cluster_size_bits) + ofs;
2232 s64 wend = (rl->vcn << vol->cluster_size_bits) + ofs + to_write;
2262 rl, wpos, ofs, to_write,
2276 rl, wpos, ofs, to_write,
2434 runlist_element *rl;
2480 na->rl = ntfs_rl_extend(na,na->rl,2);
2481 if (!na->rl)
2485 rl = ntfs_attr_find_vcn(na, (na->initialized_size - 1) >> vol->cluster_size_bits);
2486 if (!rl) {
2504 if (rl->lcn >= 0) {
2507 xrl = rl;
2514 if (rl->lcn == (LCN)LCN_HOLE) {
2515 if (rl->length < na->compression_block_clusters)
2518 - rl->length;
2527 ofs = na->initialized_size - (rl->vcn << vol->cluster_size_bits);
2529 if (rl->lcn == LCN_RL_NOT_MAPPED) {
2530 rl = ntfs_attr_find_vcn(na, rl->vcn);
2531 if (!rl) {
2540 ofs = na->initialized_size - (rl->vcn << vol->cluster_size_bits);
2542 if (!rl->length) {
2547 if (rl->lcn < (LCN)0) {
2548 if (rl->lcn != (LCN)LCN_HOLE) {
2552 (long long)rl->lcn);
2556 if (ntfs_attr_fill_hole(na, (s64)0, &ofs, &rl, &update_from))
2559 while (rl->length
2560 && (ofs >= (rl->length << vol->cluster_size_bits))) {
2561 ofs -= rl->length << vol->cluster_size_bits;
2562 rl++;
2569 failed = ntfs_compressed_close(na, rl, ofs, &update_from);
5111 runlist *rl;
5141 rl = ntfs_cluster_alloc(vol, 0, new_allocated_size >>
5143 if (!rl)
5146 rl = NULL;
5153 na->rl = rl;
5169 if (rl) {
5184 mp_size = ntfs_get_size_for_mapping_pairs(vol, rl, 0, INT_MAX);
5254 rl, 0, NULL) < 0) {
5267 if (rl && ntfs_cluster_free(vol, na, 0, -1) < 0)
5273 na->rl = NULL;
5274 free(rl);
5722 bytes_read = ntfs_rl_pread(vol, na->rl, 0, na->initialized_size,
5753 free(na->rl);
5754 na->rl = NULL;
5789 sparse = ntfs_rl_sparse(na->rl);
5871 new_compr_size = ntfs_rl_get_compressed_size(na->ni->vol, na->rl);
5914 if (!na || !na->rl) {
5951 for (xrl = na->rl; xrl->length; xrl++) {
6001 stop_rl = na->rl;
6025 first_lcn = ntfs_rl_vcn_to_lcn(na->rl, stop_vcn);
6140 a->mapping_pairs_offset), mp_size, na->rl,
6221 na->rl, stop_vcn, INT_MAX);
6265 le16_to_cpu(a->mapping_pairs_offset), mp_size, na->rl,
6302 * Build mapping pairs from @na->rl and write them to the disk. Also, this
6404 if (ntfs_rl_truncate(&na->rl, first_free_vcn)) {
6409 free(na->rl);
6410 na->rl = NULL;
6506 runlist *rl, *rln;
6566 rl = ntfs_malloc(0x1000);
6567 if (!rl)
6570 rl[0].vcn = (na->allocated_size >>
6572 rl[0].lcn = LCN_HOLE;
6573 rl[0].length = first_free_vcn -
6575 rl[1].vcn = first_free_vcn;
6576 rl[1].lcn = LCN_ENOENT;
6577 rl[1].length = 0;
6587 if (na->rl->length) {
6589 for (rl = na->rl; (rl + 1)->length; rl++)
6595 while (rl->lcn < 0 && rl != na->rl)
6596 rl--;
6600 if (rl->lcn >= 0)
6601 lcn_seek_from = rl->lcn + rl->length;
6604 rl = ntfs_cluster_alloc(vol, na->allocated_size >>
6609 if (!rl) {
6620 rln = ntfs_runlists_merge(na->rl, rl);
6625 ntfs_cluster_free_from_rl(vol, rl);
6626 free(rl);
6630 na->rl = rln;
6707 if (ntfs_rl_truncate(&na->rl, org_alloc_size >>
6713 free(na->rl);
6714 na->rl = NULL;