Lines Matching full:pick
160 bch2_data_update_read_done(&op->write, rbio->pick.crc); in promote_start()
167 struct extent_ptr_decoded *pick, in __promote_alloc() argument
275 struct extent_ptr_decoded *pick, in promote_alloc() argument
293 ? max(pick->crc.compressed_size, pick->crc.live_size) in promote_alloc()
309 k, pos, pick, opts, sectors, rbio, failed); in promote_alloc()
421 rbio->pick.ptr, in bch2_read_retry_nodecode()
423 rbio->pick.crc.offset)) { in bch2_read_retry_nodecode()
464 bch2_mark_io_failure(&failed, &rbio->pick); in bch2_rbio_retry()
506 u64 data_offset = rbio->data_pos.offset - rbio->pick.crc.offset; in __bch2_rbio_narrow_crcs()
513 if (crc_is_compressed(rbio->pick.crc)) in __bch2_rbio_narrow_crcs()
522 !bch2_bkey_matches_ptr(c, k, rbio->pick.ptr, data_offset)) in __bch2_rbio_narrow_crcs()
527 k.k->p.offset > data_offset + rbio->pick.crc.uncompressed_size) in __bch2_rbio_narrow_crcs()
531 rbio->pick.crc, NULL, &new_crc, in __bch2_rbio_narrow_crcs()
533 rbio->pick.crc.csum_type)) { in __bch2_rbio_narrow_crcs()
574 struct bch_extent_crc_unpacked crc = rbio->pick.crc; in __bch2_read_endio()
592 if (bch2_crc_cmp(csum, rbio->pick.crc.csum) && !c->opts.no_data_io) in __bch2_read_endio()
674 bch2_csum_err_msg(&buf, crc.csum_type, rbio->pick.crc.csum, csum); in __bch2_read_endio()
676 struct bch_dev *ca = rbio->have_ioref ? bch2_dev_have_ref(c, rbio->pick.ptr.dev) : NULL; in __bch2_read_endio()
706 struct bch_dev *ca = rbio->have_ioref ? bch2_dev_have_ref(c, rbio->pick.ptr.dev) : NULL; in bch2_read_endio()
732 (ca && dev_ptr_stale(ca, &rbio->pick.ptr))) { in bch2_read_endio()
744 crc_is_compressed(rbio->pick.crc) || in bch2_read_endio()
745 bch2_csum_type_is_encryption(rbio->pick.crc.csum_type)) in bch2_read_endio()
747 else if (rbio->pick.crc.csum_type) in bch2_read_endio()
846 struct extent_ptr_decoded pick; in __bch2_read_extent() local
865 pick_ret = bch2_bkey_pick_read_device(c, k, failed, &pick); in __bch2_read_extent()
884 struct bch_dev *ca = bch2_dev_get_ioref(c, pick.ptr.dev, READ); in __bch2_read_extent()
893 !pick.ptr.cached && in __bch2_read_extent()
895 unlikely(dev_ptr_stale(ca, &pick.ptr))) { in __bch2_read_extent()
896 read_from_stale_dirty_pointer(trans, ca, k, pick.ptr); in __bch2_read_extent()
897 bch2_mark_io_failure(failed, &pick); in __bch2_read_extent()
913 if (pick.crc.compressed_size > orig->bio.bi_vcnt * PAGE_SECTORS) { in __bch2_read_extent()
919 iter.bi_size = pick.crc.compressed_size << 9; in __bch2_read_extent()
928 bch2_can_narrow_extent_crcs(k, pick.crc); in __bch2_read_extent()
935 if (crc_is_compressed(pick.crc) || in __bch2_read_extent()
936 (pick.crc.csum_type != BCH_CSUM_none && in __bch2_read_extent()
937 (bvec_iter_sectors(iter) != pick.crc.uncompressed_size || in __bch2_read_extent()
938 (bch2_csum_type_is_encryption(pick.crc.csum_type) && in __bch2_read_extent()
946 promote = promote_alloc(trans, iter, k, &pick, orig->opts, flags, in __bch2_read_extent()
950 EBUG_ON(crc_is_compressed(pick.crc)); in __bch2_read_extent()
951 EBUG_ON(pick.crc.csum_type && in __bch2_read_extent()
952 (bvec_iter_sectors(iter) != pick.crc.uncompressed_size || in __bch2_read_extent()
953 bvec_iter_sectors(iter) != pick.crc.live_size || in __bch2_read_extent()
954 pick.crc.offset || in __bch2_read_extent()
958 pick.ptr.offset += pick.crc.offset + in __bch2_read_extent()
961 pick.crc.compressed_size = bvec_iter_sectors(iter); in __bch2_read_extent()
962 pick.crc.uncompressed_size = bvec_iter_sectors(iter); in __bch2_read_extent()
963 pick.crc.offset = 0; in __bch2_read_extent()
964 pick.crc.live_size = bvec_iter_sectors(iter); in __bch2_read_extent()
975 pick.crc.compressed_size << 9); in __bch2_read_extent()
977 pick.crc.compressed_size << 9; in __bch2_read_extent()
979 unsigned sectors = pick.crc.compressed_size; in __bch2_read_extent()
1011 EBUG_ON(bio_sectors(&rbio->bio) != pick.crc.compressed_size); in __bch2_read_extent()
1029 rbio->pick = pick; in __bch2_read_extent()
1039 orig->pick = pick; in __bch2_read_extent()
1042 rbio->bio.bi_iter.bi_sector = pick.ptr.offset; in __bch2_read_extent()
1055 if (ca && pick.ptr.cached && !(flags & BCH_READ_NODECODE)) in __bch2_read_extent()
1056 bch2_bucket_io_time_reset(trans, pick.ptr.dev, in __bch2_read_extent()
1057 PTR_BUCKET_NR(ca, &pick.ptr), READ); in __bch2_read_extent()
1064 if (!rbio->pick.idx) { in __bch2_read_extent()
1116 bch2_mark_io_failure(failed, &pick); in __bch2_read_extent()