Lines Matching +full:9 +full:k

39 static void trace_move_extent2(struct bch_fs *c, struct bkey_s_c k,  in trace_move_extent2()  argument
46 bch2_bkey_val_to_text(&buf, c, k); in trace_move_extent2()
54 static void trace_move_extent_read2(struct bch_fs *c, struct bkey_s_c k) in trace_move_extent_read2() argument
59 bch2_bkey_val_to_text(&buf, c, k); in trace_move_extent_read2()
124 bch2_bkey_val_to_text(&buf, c, bkey_i_to_s_c(io->write.k.k)); in move_write()
246 struct bkey_s_c k, in bch2_move_extent() argument
252 struct bkey_ptrs_c ptrs = bch2_bkey_ptrs_c(k); in bch2_move_extent()
256 unsigned sectors = k.k->size, pages; in bch2_move_extent()
259 trace_move_extent2(c, k, &io_opts, &data_opts); in bch2_move_extent()
264 bch2_data_update_opts_normalize(k, &data_opts); in bch2_move_extent()
269 return bch2_extent_drop_ptrs(trans, iter, k, &io_opts, &data_opts); in bch2_move_extent()
280 bkey_for_each_ptr_decode(k.k, ptrs, p, entry) in bch2_move_extent()
291 io->read_sectors = k.k->size; in bch2_move_extent()
292 io->write_sectors = k.k->size; in bch2_move_extent()
298 if (bch2_bio_alloc_pages(&io->write.op.wbio.bio, sectors << 9, in bch2_move_extent()
307 io->rbio.bio.bi_iter.bi_size = sectors << 9; in bch2_move_extent()
310 io->rbio.bio.bi_iter.bi_sector = bkey_start_offset(k.k); in bch2_move_extent()
314 io_opts, data_opts, iter->btree_id, k); in bch2_move_extent()
321 bch2_ratelimit_increment(ctxt->rate, k.k->size); in bch2_move_extent()
325 atomic64_add(k.k->size, &ctxt->stats->sectors_moved); in bch2_move_extent()
333 this_cpu_add(c->counters[BCH_COUNTER_io_move], k.k->size); in bch2_move_extent()
334 this_cpu_add(c->counters[BCH_COUNTER_move_extent_read], k.k->size); in bch2_move_extent()
335 trace_move_extent_read2(c, k); in bch2_move_extent()
351 bkey_start_pos(k.k), in bch2_move_extent()
352 iter->btree_id, k, 0, in bch2_move_extent()
373 bch2_bkey_val_to_text(&buf, c, k); in bch2_move_extent()
390 if (io_opts->cur_inum != extent_k.k->p.inode) { in bch2_move_get_io_opts()
393 ret = for_each_btree_key(trans, iter, BTREE_ID_inodes, POS(0, extent_k.k->p.inode), in bch2_move_get_io_opts()
394 BTREE_ITER_all_snapshots, k, ({ in bch2_move_get_io_opts()
395 if (k.k->p.offset != extent_k.k->p.inode) in bch2_move_get_io_opts()
398 if (!bkey_is_inode(k.k)) in bch2_move_get_io_opts()
402 BUG_ON(bch2_inode_unpack(k, &inode)); in bch2_move_get_io_opts()
404 struct snapshot_io_opts_entry e = { .snapshot = k.k->p.snapshot }; in bch2_move_get_io_opts()
409 io_opts->cur_inum = extent_k.k->p.inode; in bch2_move_get_io_opts()
416 if (extent_k.k->p.snapshot) in bch2_move_get_io_opts()
418 if (bch2_snapshot_is_ancestor(c, extent_k.k->p.snapshot, i->snapshot)) in bch2_move_get_io_opts()
429 struct bkey_s_c k; in bch2_move_get_io_opts_one() local
433 if (!extent_k.k->p.inode) { in bch2_move_get_io_opts_one()
438 k = bch2_bkey_get_iter(trans, &iter, BTREE_ID_inodes, in bch2_move_get_io_opts_one()
439 SPOS(0, extent_k.k->p.inode, extent_k.k->p.snapshot), in bch2_move_get_io_opts_one()
441 ret = bkey_err(k); in bch2_move_get_io_opts_one()
445 if (!ret && bkey_is_inode(k.k)) { in bch2_move_get_io_opts_one()
447 bch2_inode_unpack(k, &inode); in bch2_move_get_io_opts_one()
493 atomic_read(&ctxt->write_sectors) < c->opts.move_bytes_in_flight >> 9 && in bch2_move_ratelimit()
494 atomic_read(&ctxt->read_sectors) < c->opts.move_bytes_in_flight >> 9 && in bch2_move_ratelimit()
513 struct bkey_s_c k; in bch2_move_data_btree() local
536 k = bch2_btree_iter_peek(&iter); in bch2_move_data_btree()
537 if (!k.k) in bch2_move_data_btree()
540 ret = bkey_err(k); in bch2_move_data_btree()
546 if (bkey_ge(bkey_start_pos(k.k), end)) in bch2_move_data_btree()
552 if (!bkey_extent_is_direct_data(k.k)) in bch2_move_data_btree()
555 io_opts = bch2_move_get_io_opts(trans, &snapshot_io_opts, k); in bch2_move_data_btree()
561 if (!pred(c, arg, k, io_opts, &data_opts)) in bch2_move_data_btree()
566 * save a copy of @k elsewhere: in bch2_move_data_btree()
568 bch2_bkey_buf_reassemble(&sk, c, k); in bch2_move_data_btree()
569 k = bkey_i_to_s_c(sk.k); in bch2_move_data_btree()
571 ret2 = bch2_move_extent(ctxt, NULL, &iter, k, *io_opts, data_opts); in bch2_move_data_btree()
587 atomic64_add(k.k->size, &ctxt->stats->sectors_seen); in bch2_move_data_btree()
662 struct bkey_s_c k; in bch2_evacuate_bucket() local
685 bkey_err(k = bch2_btree_iter_peek_slot(&iter))); in bch2_evacuate_bucket()
692 a = bch2_alloc_to_v4(k, &a_convert); in bch2_evacuate_bucket()
719 k = bch2_backpointer_get_key(trans, &iter, bp_pos, bp, 0); in bch2_evacuate_bucket()
720 ret = bkey_err(k); in bch2_evacuate_bucket()
725 if (!k.k) in bch2_evacuate_bucket()
728 bch2_bkey_buf_reassemble(&sk, c, k); in bch2_evacuate_bucket()
729 k = bkey_i_to_s_c(sk.k); in bch2_evacuate_bucket()
731 ret = bch2_move_get_io_opts_one(trans, &io_opts, k); in bch2_evacuate_bucket()
742 bkey_for_each_ptr(bch2_bkey_ptrs_c(k), ptr) { in bch2_evacuate_bucket()
754 &iter, k, io_opts, data_opts); in bch2_evacuate_bucket()
768 atomic64_add(k.k->size, &ctxt->stats->sectors_seen); in bch2_evacuate_bucket()
857 bpos_cmp(b->key.k.p, end.pos)) > 0) in bch2_move_btree()
890 struct bkey_s_c k, in rereplicate_pred() argument
894 unsigned nr_good = bch2_bkey_durability(c, k); in rereplicate_pred()
895 unsigned replicas = bkey_is_btree_ptr(k.k) in rereplicate_pred()
900 struct bkey_ptrs_c ptrs = bch2_bkey_ptrs_c(k); in rereplicate_pred()
922 struct bkey_s_c k, in migrate_pred() argument
926 struct bkey_ptrs_c ptrs = bch2_bkey_ptrs_c(k); in migrate_pred()
1013 struct bkey_s_c k, in drop_extra_replicas_pred() argument
1017 unsigned durability = bch2_bkey_durability(c, k); in drop_extra_replicas_pred()
1018 unsigned replicas = bkey_is_btree_ptr(k.k) in drop_extra_replicas_pred()
1026 bkey_for_each_ptr_decode(k.k, bch2_bkey_ptrs_c(k), p, entry) { in drop_extra_replicas_pred()
1124 prt_human_readable_u64(out, atomic64_read(&stats->sectors_seen) << 9); in bch2_move_stats_to_text()
1128 prt_human_readable_u64(out, atomic64_read(&stats->sectors_moved) << 9); in bch2_move_stats_to_text()
1132 prt_human_readable_u64(out, atomic64_read(&stats->sectors_raced) << 9); in bch2_move_stats_to_text()
1149 c->opts.move_bytes_in_flight >> 9); in bch2_moving_ctxt_to_text()
1155 c->opts.move_bytes_in_flight >> 9); in bch2_moving_ctxt_to_text()