Lines Matching defs:rm

248 static void vdev_raidz_generate_parity(raidz_map_t *rm);
270 vdev_raidz_map_free(raidz_map_t *rm)
275 for (c = 0; c < rm->rm_firstdatacol; c++) {
276 if (rm->rm_col[c].rc_abd != NULL)
277 abd_free(rm->rm_col[c].rc_abd);
279 if (rm->rm_col[c].rc_gdata != NULL)
280 zio_buf_free(rm->rm_col[c].rc_gdata,
281 rm->rm_col[c].rc_size);
285 for (c = rm->rm_firstdatacol; c < rm->rm_cols; c++) {
286 if (rm->rm_col[c].rc_abd != NULL)
287 abd_put(rm->rm_col[c].rc_abd);
288 size += rm->rm_col[c].rc_size;
291 if (rm->rm_abd_copy != NULL)
292 abd_free(rm->rm_abd_copy);
294 kmem_free(rm, offsetof(raidz_map_t, rm_col[rm->rm_scols]));
300 raidz_map_t *rm = zio->io_vsd;
302 ASSERT0(rm->rm_freed);
303 rm->rm_freed = 1;
305 if (rm->rm_reports == 0)
306 vdev_raidz_map_free(rm);
313 raidz_map_t *rm = arg;
315 ASSERT3U(rm->rm_reports, >, 0);
317 if (--rm->rm_reports == 0 && rm->rm_freed != 0)
318 vdev_raidz_map_free(rm);
324 raidz_map_t *rm = zcr->zcr_cbdata;
336 if (c < rm->rm_firstdatacol) {
342 if (rm->rm_col[0].rc_gdata == NULL) {
352 for (x = 0; x < rm->rm_firstdatacol; x++) {
353 bad_parity[x] = rm->rm_col[x].rc_abd;
354 rm->rm_col[x].rc_gdata =
355 zio_buf_alloc(rm->rm_col[x].rc_size);
356 rm->rm_col[x].rc_abd =
357 abd_get_from_buf(rm->rm_col[x].rc_gdata,
358 rm->rm_col[x].rc_size);
363 for (; x < rm->rm_cols; x++) {
364 abd_put(rm->rm_col[x].rc_abd);
365 rm->rm_col[x].rc_abd = abd_get_from_buf(buf,
366 rm->rm_col[x].rc_size);
367 buf += rm->rm_col[x].rc_size;
373 vdev_raidz_generate_parity(rm);
376 for (x = 0; x < rm->rm_firstdatacol; x++) {
377 abd_put(rm->rm_col[x].rc_abd);
378 rm->rm_col[x].rc_abd = bad_parity[x];
382 for (x = rm->rm_firstdatacol; x < rm->rm_cols; x++) {
383 abd_put(rm->rm_col[x].rc_abd);
384 rm->rm_col[x].rc_abd = abd_get_offset(
385 rm->rm_abd_copy, offset);
386 offset += rm->rm_col[x].rc_size;
390 ASSERT3P(rm->rm_col[c].rc_gdata, !=, NULL);
391 good = rm->rm_col[c].rc_gdata;
396 for (x = rm->rm_firstdatacol; x < c; x++)
397 good += rm->rm_col[x].rc_size;
400 bad = abd_borrow_buf_copy(rm->rm_col[c].rc_abd, rm->rm_col[c].rc_size);
403 abd_return_buf(rm->rm_col[c].rc_abd, bad, rm->rm_col[c].rc_size);
418 raidz_map_t *rm = zio->io_vsd;
422 zcr->zcr_cbdata = rm;
427 rm->rm_reports++;
428 ASSERT3U(rm->rm_reports, >, 0);
430 if (rm->rm_abd_copy != NULL)
443 for (c = rm->rm_firstdatacol; c < rm->rm_cols; c++)
444 size += rm->rm_col[c].rc_size;
446 rm->rm_abd_copy =
447 abd_alloc_sametype(rm->rm_col[rm->rm_firstdatacol].rc_abd, size);
449 for (offset = 0, c = rm->rm_firstdatacol; c < rm->rm_cols; c++) {
450 raidz_col_t *col = &rm->rm_col[c];
451 abd_t *tmp = abd_get_offset(rm->rm_abd_copy, offset);
475 raidz_map_t *rm;
521 rm = kmem_alloc(offsetof(raidz_map_t, rm_col[scols]), KM_SLEEP);
523 rm->rm_cols = acols;
524 rm->rm_scols = scols;
525 rm->rm_bigcols = bc;
526 rm->rm_skipstart = bc;
527 rm->rm_missingdata = 0;
528 rm->rm_missingparity = 0;
529 rm->rm_firstdatacol = nparity;
530 rm->rm_abd_copy = NULL;
531 rm->rm_reports = 0;
532 rm->rm_freed = 0;
533 rm->rm_ecksuminjected = 0;
544 rm->rm_col[c].rc_devidx = col;
545 rm->rm_col[c].rc_offset = coff;
546 rm->rm_col[c].rc_abd = NULL;
547 rm->rm_col[c].rc_gdata = NULL;
548 rm->rm_col[c].rc_error = 0;
549 rm->rm_col[c].rc_tried = 0;
550 rm->rm_col[c].rc_skipped = 0;
553 rm->rm_col[c].rc_size = 0;
555 rm->rm_col[c].rc_size = (q + 1) << unit_shift;
557 rm->rm_col[c].rc_size = q << unit_shift;
559 asize += rm->rm_col[c].rc_size;
563 rm->rm_asize = roundup(asize, (nparity + 1) << unit_shift);
564 rm->rm_nskip = roundup(tot, nparity + 1) - tot;
565 ASSERT3U(rm->rm_asize - asize, ==, rm->rm_nskip << unit_shift);
566 ASSERT3U(rm->rm_nskip, <=, nparity);
569 for (c = 0; c < rm->rm_firstdatacol; c++) {
570 rm->rm_col[c].rc_abd =
571 abd_alloc_linear(rm->rm_col[c].rc_size, B_TRUE);
574 rm->rm_col[c].rc_abd = abd_get_offset(abd, 0);
575 off = rm->rm_col[c].rc_size;
578 rm->rm_col[c].rc_abd = abd_get_offset(abd, off);
579 off += rm->rm_col[c].rc_size;
603 ASSERT(rm->rm_cols >= 2);
604 ASSERT(rm->rm_col[0].rc_size == rm->rm_col[1].rc_size);
606 if (rm->rm_firstdatacol == 1 && (offset & (1ULL << 20))) {
607 devidx = rm->rm_col[0].rc_devidx;
608 o = rm->rm_col[0].rc_offset;
609 rm->rm_col[0].rc_devidx = rm->rm_col[1].rc_devidx;
610 rm->rm_col[0].rc_offset = rm->rm_col[1].rc_offset;
611 rm->rm_col[1].rc_devidx = devidx;
612 rm->rm_col[1].rc_offset = o;
614 if (rm->rm_skipstart == 0)
615 rm->rm_skipstart = 1;
618 return (rm);
683 vdev_raidz_generate_parity_p(raidz_map_t *rm)
689 for (c = rm->rm_firstdatacol; c < rm->rm_cols; c++) {
690 src = rm->rm_col[c].rc_abd;
691 p = abd_to_buf(rm->rm_col[VDEV_RAIDZ_P].rc_abd);
693 if (c == rm->rm_firstdatacol) {
694 abd_copy_to_buf(p, src, rm->rm_col[c].rc_size);
697 (void) abd_iterate_func(src, 0, rm->rm_col[c].rc_size,
704 vdev_raidz_generate_parity_pq(raidz_map_t *rm)
710 pcnt = rm->rm_col[VDEV_RAIDZ_P].rc_size / sizeof (p[0]);
711 ASSERT(rm->rm_col[VDEV_RAIDZ_P].rc_size ==
712 rm->rm_col[VDEV_RAIDZ_Q].rc_size);
714 for (c = rm->rm_firstdatacol; c < rm->rm_cols; c++) {
715 src = rm->rm_col[c].rc_abd;
716 p = abd_to_buf(rm->rm_col[VDEV_RAIDZ_P].rc_abd);
717 q = abd_to_buf(rm->rm_col[VDEV_RAIDZ_Q].rc_abd);
719 ccnt = rm->rm_col[c].rc_size / sizeof (p[0]);
721 if (c == rm->rm_firstdatacol) {
722 abd_copy_to_buf(p, src, rm->rm_col[c].rc_size);
723 (void) memcpy(q, p, rm->rm_col[c].rc_size);
726 (void) abd_iterate_func(src, 0, rm->rm_col[c].rc_size,
730 if (c == rm->rm_firstdatacol) {
748 vdev_raidz_generate_parity_pqr(raidz_map_t *rm)
754 pcnt = rm->rm_col[VDEV_RAIDZ_P].rc_size / sizeof (p[0]);
755 ASSERT(rm->rm_col[VDEV_RAIDZ_P].rc_size ==
756 rm->rm_col[VDEV_RAIDZ_Q].rc_size);
757 ASSERT(rm->rm_col[VDEV_RAIDZ_P].rc_size ==
758 rm->rm_col[VDEV_RAIDZ_R].rc_size);
760 for (c = rm->rm_firstdatacol; c < rm->rm_cols; c++) {
761 src = rm->rm_col[c].rc_abd;
762 p = abd_to_buf(rm->rm_col[VDEV_RAIDZ_P].rc_abd);
763 q = abd_to_buf(rm->rm_col[VDEV_RAIDZ_Q].rc_abd);
764 r = abd_to_buf(rm->rm_col[VDEV_RAIDZ_R].rc_abd);
766 ccnt = rm->rm_col[c].rc_size / sizeof (p[0]);
768 if (c == rm->rm_firstdatacol) {
769 abd_copy_to_buf(p, src, rm->rm_col[c].rc_size);
770 (void) memcpy(q, p, rm->rm_col[c].rc_size);
771 (void) memcpy(r, p, rm->rm_col[c].rc_size);
774 (void) abd_iterate_func(src, 0, rm->rm_col[c].rc_size,
778 if (c == rm->rm_firstdatacol) {
802 vdev_raidz_generate_parity(raidz_map_t *rm)
804 switch (rm->rm_firstdatacol) {
806 vdev_raidz_generate_parity_p(rm);
809 vdev_raidz_generate_parity_pq(rm);
812 vdev_raidz_generate_parity_pqr(rm);
936 vdev_raidz_reconstruct_p(raidz_map_t *rm, int *tgts, int ntgts)
943 ASSERT(x >= rm->rm_firstdatacol);
944 ASSERT(x < rm->rm_cols);
946 ASSERT(rm->rm_col[x].rc_size <= rm->rm_col[VDEV_RAIDZ_P].rc_size);
947 ASSERT(rm->rm_col[x].rc_size > 0);
949 src = rm->rm_col[VDEV_RAIDZ_P].rc_abd;
950 dst = rm->rm_col[x].rc_abd;
952 abd_copy(dst, src, rm->rm_col[x].rc_size);
954 for (c = rm->rm_firstdatacol; c < rm->rm_cols; c++) {
955 uint64_t size = MIN(rm->rm_col[x].rc_size,
956 rm->rm_col[c].rc_size);
958 src = rm->rm_col[c].rc_abd;
959 dst = rm->rm_col[x].rc_abd;
972 vdev_raidz_reconstruct_q(raidz_map_t *rm, int *tgts, int ntgts)
980 ASSERT(rm->rm_col[x].rc_size <= rm->rm_col[VDEV_RAIDZ_Q].rc_size);
982 for (c = rm->rm_firstdatacol; c < rm->rm_cols; c++) {
983 uint64_t size = (c == x) ? 0 : MIN(rm->rm_col[x].rc_size,
984 rm->rm_col[c].rc_size);
986 src = rm->rm_col[c].rc_abd;
987 dst = rm->rm_col[x].rc_abd;
989 if (c == rm->rm_firstdatacol) {
991 if (rm->rm_col[x].rc_size > size)
993 rm->rm_col[x].rc_size - size);
995 ASSERT3U(size, <=, rm->rm_col[x].rc_size);
999 size, rm->rm_col[x].rc_size - size,
1004 src = rm->rm_col[VDEV_RAIDZ_Q].rc_abd;
1005 dst = rm->rm_col[x].rc_abd;
1006 exp = 255 - (rm->rm_cols - 1 - x);
1009 (void) abd_iterate_func(dst, 0, rm->rm_col[x].rc_size,
1016 vdev_raidz_reconstruct_pq(raidz_map_t *rm, int *tgts, int ntgts)
1027 ASSERT(x >= rm->rm_firstdatacol);
1028 ASSERT(y < rm->rm_cols);
1030 ASSERT(rm->rm_col[x].rc_size >= rm->rm_col[y].rc_size);
1039 pdata = rm->rm_col[VDEV_RAIDZ_P].rc_abd;
1040 qdata = rm->rm_col[VDEV_RAIDZ_Q].rc_abd;
1041 xsize = rm->rm_col[x].rc_size;
1042 ysize = rm->rm_col[y].rc_size;
1044 rm->rm_col[VDEV_RAIDZ_P].rc_abd =
1045 abd_alloc_linear(rm->rm_col[VDEV_RAIDZ_P].rc_size, B_TRUE);
1046 rm->rm_col[VDEV_RAIDZ_Q].rc_abd =
1047 abd_alloc_linear(rm->rm_col[VDEV_RAIDZ_Q].rc_size, B_TRUE);
1048 rm->rm_col[x].rc_size = 0;
1049 rm->rm_col[y].rc_size = 0;
1051 vdev_raidz_generate_parity_pq(rm);
1053 rm->rm_col[x].rc_size = xsize;
1054 rm->rm_col[y].rc_size = ysize;
1058 pxy = abd_to_buf(rm->rm_col[VDEV_RAIDZ_P].rc_abd);
1059 qxy = abd_to_buf(rm->rm_col[VDEV_RAIDZ_Q].rc_abd);
1060 xd = rm->rm_col[x].rc_abd;
1061 yd = rm->rm_col[y].rc_abd;
1079 b = vdev_raidz_pow2[255 - (rm->rm_cols - 1 - x)];
1092 abd_free(rm->rm_col[VDEV_RAIDZ_P].rc_abd);
1093 abd_free(rm->rm_col[VDEV_RAIDZ_Q].rc_abd);
1098 rm->rm_col[VDEV_RAIDZ_P].rc_abd = pdata;
1099 rm->rm_col[VDEV_RAIDZ_Q].rc_abd = qdata;
1256 vdev_raidz_matrix_init(raidz_map_t *rm, int n, int nmap, int *map,
1262 ASSERT(n == rm->rm_cols - rm->rm_firstdatacol);
1286 vdev_raidz_matrix_invert(raidz_map_t *rm, int n, int nmissing, int *missing,
1298 ASSERT3S(used[i], <, rm->rm_firstdatacol);
1301 ASSERT3S(used[i], >=, rm->rm_firstdatacol);
1318 ASSERT3U(used[j], >=, rm->rm_firstdatacol);
1319 jj = used[j] - rm->rm_firstdatacol;
1380 vdev_raidz_matrix_reconstruct(raidz_map_t *rm, int n, int nmissing,
1412 ASSERT3U(c, <, rm->rm_cols);
1414 src = abd_to_buf(rm->rm_col[c].rc_abd);
1415 ccount = rm->rm_col[c].rc_size;
1417 cc = missing[j] + rm->rm_firstdatacol;
1418 ASSERT3U(cc, >=, rm->rm_firstdatacol);
1419 ASSERT3U(cc, <, rm->rm_cols);
1422 dst[j] = abd_to_buf(rm->rm_col[cc].rc_abd);
1423 dcount[j] = rm->rm_col[cc].rc_size;
1426 ASSERT(ccount >= rm->rm_col[missing[0]].rc_size || i > 0);
1456 vdev_raidz_reconstruct_general(raidz_map_t *rm, int *tgts, int ntgts)
1478 if (!abd_is_linear(rm->rm_col[rm->rm_firstdatacol].rc_abd)) {
1479 bufs = kmem_alloc(rm->rm_cols * sizeof (abd_t *), KM_PUSHPAGE);
1481 for (c = rm->rm_firstdatacol; c < rm->rm_cols; c++) {
1482 raidz_col_t *col = &rm->rm_col[c];
1490 n = rm->rm_cols - rm->rm_firstdatacol;
1497 if (tgts[t] >= rm->rm_firstdatacol) {
1499 tgts[t] - rm->rm_firstdatacol;
1509 ASSERT(c < rm->rm_firstdatacol);
1544 for (tt = 0, c = rm->rm_firstdatacol; c < rm->rm_cols; c++) {
1546 c == missing_rows[tt] + rm->rm_firstdatacol) {
1559 vdev_raidz_matrix_init(rm, n, nmissing_rows, parity_map, rows);
1564 vdev_raidz_matrix_invert(rm, n, nmissing_rows, missing_rows, rows,
1570 vdev_raidz_matrix_reconstruct(rm, n, nmissing_rows, missing_rows,
1579 for (c = rm->rm_firstdatacol; c < rm->rm_cols; c++) {
1580 raidz_col_t *col = &rm->rm_col[c];
1586 kmem_free(bufs, rm->rm_cols * sizeof (abd_t *));
1593 vdev_raidz_reconstruct(raidz_map_t *rm, int *t, int nt)
1609 nbadparity = rm->rm_firstdatacol;
1610 nbaddata = rm->rm_cols - nbadparity;
1612 for (i = 0, c = 0; c < rm->rm_cols; c++) {
1613 if (c < rm->rm_firstdatacol)
1619 } else if (rm->rm_col[c].rc_error != 0) {
1621 } else if (c >= rm->rm_firstdatacol) {
1642 return (vdev_raidz_reconstruct_p(rm, dt, 1));
1644 ASSERT(rm->rm_firstdatacol > 1);
1647 return (vdev_raidz_reconstruct_q(rm, dt, 1));
1649 ASSERT(rm->rm_firstdatacol > 2);
1653 ASSERT(rm->rm_firstdatacol > 1);
1657 return (vdev_raidz_reconstruct_pq(rm, dt, 2));
1659 ASSERT(rm->rm_firstdatacol > 2);
1665 code = vdev_raidz_reconstruct_general(rm, tgts, ntgts);
1795 raidz_map_t *rm;
1825 rm = vdev_raidz_map_alloc(abd,
1831 for (c = rm->rm_firstdatacol; c < rm->rm_cols;
1833 rc = &rm->rm_col[c];
1870 vdev_raidz_map_free(rm);
1904 vdev_raidz_io_verify(zio_t *zio, raidz_map_t *rm, int col)
1915 raidz_col_t *rc = &rm->rm_col[col];
1959 raidz_map_t *rm;
1963 rm = vdev_raidz_map_alloc(zio->io_abd, zio->io_size, zio->io_offset,
1968 zio->io_vsd = rm;
1971 ASSERT3U(rm->rm_asize, ==, vdev_psize_to_asize(vd, zio->io_size));
1974 for (c = 0; c < rm->rm_cols; c++) {
1975 rc = &rm->rm_col[c];
1988 vdev_raidz_generate_parity(rm);
1990 for (c = 0; c < rm->rm_cols; c++) {
1991 rc = &rm->rm_col[c];
1997 vdev_raidz_io_verify(zio, rm, c);
2009 for (c = rm->rm_skipstart, i = 0; i < rm->rm_nskip; c++, i++) {
2010 ASSERT(c <= rm->rm_scols);
2011 if (c == rm->rm_scols)
2013 rc = &rm->rm_col[c];
2032 for (c = rm->rm_cols - 1; c >= 0; c--) {
2033 rc = &rm->rm_col[c];
2036 if (c >= rm->rm_firstdatacol)
2037 rm->rm_missingdata++;
2039 rm->rm_missingparity++;
2046 if (c >= rm->rm_firstdatacol)
2047 rm->rm_missingdata++;
2049 rm->rm_missingparity++;
2054 if (c >= rm->rm_firstdatacol || rm->rm_missingdata > 0 ||
2078 raidz_map_t *rm = zio->io_vsd;
2085 zbc.zbc_injected = rm->rm_ecksuminjected;
2103 raidz_map_t *rm = zio->io_vsd;
2107 rm->rm_ecksuminjected = 1;
2119 raidz_parity_verify(zio_t *zio, raidz_map_t *rm)
2132 for (c = 0; c < rm->rm_firstdatacol; c++) {
2133 rc = &rm->rm_col[c];
2140 vdev_raidz_generate_parity(rm);
2142 for (c = 0; c < rm->rm_firstdatacol; c++) {
2143 rc = &rm->rm_col[c];
2163 vdev_raidz_worst_error(raidz_map_t *rm)
2167 for (int c = 0; c < rm->rm_cols; c++)
2168 error = zio_worst_error(error, rm->rm_col[c].rc_error);
2184 raidz_map_t *rm = zio->io_vsd;
2192 ASSERT(total_errors < rm->rm_firstdatacol);
2199 for (n = 1; n <= rm->rm_firstdatacol - total_errors; n++) {
2211 c < rm->rm_firstdatacol) {
2212 c = rm->rm_firstdatacol;
2215 while (rm->rm_col[c].rc_error != 0) {
2217 ASSERT3S(c, <, rm->rm_cols);
2226 tgts[n] = rm->rm_cols;
2235 orig[n - 1] = zio_buf_alloc(rm->rm_col[0].rc_size);
2252 ASSERT3S(c, <, rm->rm_cols);
2253 rc = &rm->rm_col[c];
2262 code = vdev_raidz_reconstruct(rm, tgts, n);
2268 rc = &rm->rm_col[c];
2285 rc = &rm->rm_col[c];
2296 next < rm->rm_cols &&
2297 rm->rm_col[next].rc_error != 0; next++)
2313 rm->rm_col[c].rc_error != 0; c++)
2325 zio_buf_free(orig[i], rm->rm_col[0].rc_size);
2357 raidz_map_t *rm = zio->io_vsd;
2370 ASSERT(rm->rm_missingparity <= rm->rm_firstdatacol);
2371 ASSERT(rm->rm_missingdata <= rm->rm_cols - rm->rm_firstdatacol);
2373 for (c = 0; c < rm->rm_cols; c++) {
2374 rc = &rm->rm_col[c];
2379 if (c < rm->rm_firstdatacol)
2388 } else if (c < rm->rm_firstdatacol && !rc->rc_tried) {
2405 if (total_errors > rm->rm_firstdatacol)
2406 zio->io_error = vdev_raidz_worst_error(rm);
2431 if (total_errors <= rm->rm_firstdatacol - parity_untried) {
2443 rm->rm_firstdatacol ||
2445 n = raidz_parity_verify(zio, rm);
2448 rm->rm_firstdatacol);
2461 ASSERT(parity_errors < rm->rm_firstdatacol);
2467 for (c = rm->rm_firstdatacol; c < rm->rm_cols; c++) {
2468 rc = &rm->rm_col[c];
2475 ASSERT(rm->rm_firstdatacol >= n);
2477 code = vdev_raidz_reconstruct(rm, tgts, n);
2495 if (parity_errors < rm->rm_firstdatacol - n ||
2497 n = raidz_parity_verify(zio, rm);
2500 rm->rm_firstdatacol);
2516 rm->rm_missingdata = 0;
2517 rm->rm_missingparity = 0;
2519 for (c = 0; c < rm->rm_cols; c++) {
2520 if (rm->rm_col[c].rc_tried)
2525 rc = &rm->rm_col[c];
2533 } while (++c < rm->rm_cols);
2548 if (total_errors > rm->rm_firstdatacol) {
2549 zio->io_error = vdev_raidz_worst_error(rm);
2551 } else if (total_errors < rm->rm_firstdatacol &&
2558 if (code != (1 << rm->rm_firstdatacol) - 1)
2559 (void) raidz_parity_verify(zio, rm);
2576 for (c = 0; c < rm->rm_cols; c++) {
2577 rc = &rm->rm_col[c];
2582 rm->rm_ecksuminjected;
2602 for (c = 0; c < rm->rm_cols; c++) {
2603 rc = &rm->rm_col[c];