/linux-6.12.1/fs/bcachefs/ |
D | data_update.c | 112 struct bkey_s_c old = bkey_i_to_s_c(m->k.k); in trace_move_extent_fail2() 158 bch2_bkey_val_to_text(&buf, c, bkey_i_to_s_c(insert)); in trace_move_extent_fail2() 186 struct bkey_s_c old = bkey_i_to_s_c(m->k.k); in __bch2_data_update_index_update() 209 trace_move_extent_fail2(m, k, bkey_i_to_s_c(&new->k_i), in __bch2_data_update_index_update() 249 trace_move_extent_fail2(m, k, bkey_i_to_s_c(&new->k_i), insert, "no rewrites found:"); in __bch2_data_update_index_update() 259 if ((ptr_c = bch2_bkey_has_device_c(bkey_i_to_s_c(insert), ptr->dev)) && in __bch2_data_update_index_update() 266 trace_move_extent_fail2(m, k, bkey_i_to_s_c(&new->k_i), insert, "new replicas conflicted:"); in __bch2_data_update_index_update() 275 durability = bch2_bkey_durability(c, bkey_i_to_s_c(insert)) + in __bch2_data_update_index_update() 276 bch2_bkey_durability(c, bkey_i_to_s_c(&new->k_i)); in __bch2_data_update_index_update() 326 int invalid = bch2_bkey_validate(c, bkey_i_to_s_c(insert), __btree_node_type(0, m->btree_id), in __bch2_data_update_index_update() [all …]
|
D | btree_gc.c | 119 bch2_bkey_val_to_text(&buf, c, bkey_i_to_s_c(&b->key)); in set_node_min() 155 bch2_bkey_val_to_text(&buf, c, bkey_i_to_s_c(&b->key)); in set_node_max() 214 bch2_bkey_val_to_text(&buf, c, bkey_i_to_s_c(&b->key)); in btree_check_node_boundaries() 218 bch2_bkey_val_to_text(&buf, c, bkey_i_to_s_c(&prev->key)); in btree_check_node_boundaries() 222 bch2_bkey_val_to_text(&buf, c, bkey_i_to_s_c(&cur->key)); in btree_check_node_boundaries() 282 bch2_bkey_val_to_text(&buf, c, bkey_i_to_s_c(&b->key)); in btree_repair_node_end() 285 bch2_bkey_val_to_text(&buf, c, bkey_i_to_s_c(&child->key)); in btree_repair_node_end() 344 bch2_bkey_val_to_text(&buf, c, bkey_i_to_s_c(cur_k.k)); in bch2_btree_repair_topology_recurse() 481 bch2_bkey_val_to_text(&buf, c, bkey_i_to_s_c(&b->key)); in bch2_btree_repair_topology_recurse() 697 struct bkey_s_c k = bkey_i_to_s_c(&b->key); in bch2_gc_btree()
|
D | move.c | 124 bch2_bkey_val_to_text(&buf, c, bkey_i_to_s_c(io->write.k.k)); in move_write() 569 k = bkey_i_to_s_c(sk.k); in bch2_move_data_btree() 729 k = bkey_i_to_s_c(sk.k); in bch2_evacuate_bucket() 783 unsigned sectors = btree_ptr_sectors_written(bkey_i_to_s_c(&b->key)); in bch2_evacuate_bucket() 949 return rereplicate_pred(c, arg, bkey_i_to_s_c(&b->key), io_opts, data_opts); in rereplicate_btree_pred() 957 return migrate_pred(c, arg, bkey_i_to_s_c(&b->key), io_opts, data_opts); in migrate_btree_pred() 1046 return drop_extra_replicas_pred(c, arg, bkey_i_to_s_c(&b->key), io_opts, data_opts); in drop_extra_replicas_btree_pred()
|
D | btree_update_interior.c | 103 bch2_bkey_val_to_text(&buf, c, bkey_i_to_s_c(&b->key)); in bch2_btree_node_check_topology() 105 bch2_bkey_val_to_text(&buf, c, bkey_i_to_s_c(prev.k)); in bch2_btree_node_check_topology() 124 bch2_bkey_val_to_text(&buf, c, bkey_i_to_s_c(&b->key)); in bch2_btree_node_check_topology() 135 bch2_bkey_val_to_text(&buf, c, bkey_i_to_s_c(&b->key)); in bch2_btree_node_check_topology() 137 bch2_bkey_val_to_text(&buf, c, bkey_i_to_s_c(prev.k)); in bch2_btree_node_check_topology() 615 if (!bch2_dev_btree_bitmap_marked(as->c, bkey_i_to_s_c(k))) in btree_update_new_nodes_marked_sb() 626 bch2_dev_btree_bitmap_mark(c, bkey_i_to_s_c(k)); in btree_update_new_nodes_mark_sb() 651 ret = bch2_key_trigger_old(trans, as->btree_id, level, bkey_i_to_s_c(k), in btree_update_nodes_written_trans() 1364 !btree_ptr_sectors_written(bkey_i_to_s_c(insert))); in bch2_insert_fixup_btree_ptr() 1369 if (bch2_bkey_validate(c, bkey_i_to_s_c(insert), in bch2_insert_fixup_btree_ptr() [all …]
|
D | sb-clean.c | 116 bch2_bkey_val_to_text(&buf1, c, bkey_i_to_s_c(k1)); in bch2_verify_superblock_clean() 121 bch2_bkey_val_to_text(&buf2, c, bkey_i_to_s_c(k2)); in bch2_verify_superblock_clean()
|
D | logged_ops.c | 111 bch2_bkey_val_to_text(&buf, c, bkey_i_to_s_c(k)); in bch2_logged_op_finish()
|
D | str_hash.h | 270 desc.hash_bkey(info, bkey_i_to_s_c(insert)), in bch2_hash_set_or_get_in_snapshot() 275 if (!desc.cmp_bkey(k, bkey_i_to_s_c(insert))) in bch2_hash_set_or_get_in_snapshot()
|
D | backpointers.c | 346 return bkey_i_to_s_c(&b->key); in bch2_backpointer_get_key() 376 bkey_i_to_s_c(&b->key), in bch2_backpointer_get_node() 383 backpointer_not_found(trans, bp_pos, bp, bkey_i_to_s_c(&b->key)); in bch2_backpointer_get_node() 652 bch2_bkey_val_to_text(&buf, c, bkey_i_to_s_c(&n_bp_k.k_i)); in check_bp_exists() 721 k = bkey_i_to_s_c(&b->key); in check_btree_root_to_backpointers()
|
D | bkey_methods.h | 115 return bch2_key_trigger(trans, btree_id, level, bkey_i_to_s_c(&deleted), new, in bch2_key_trigger_new()
|
D | extent_update.c | 124 ret = count_iters_for_insert(trans, bkey_i_to_s_c(insert), 0, end, in bch2_extent_atomic_end()
|
D | migrate.c | 128 if (!bch2_bkey_has_device_c(bkey_i_to_s_c(&b->key), dev_idx)) in bch2_dev_metadata_drop()
|
D | debug.c | 166 ptrs = bch2_bkey_ptrs_c(bkey_i_to_s_c(&b->key)); in __bch2_btree_verify() 173 bch2_bkey_val_to_text(&buf, c, bkey_i_to_s_c(&b->key)); in __bch2_btree_verify() 192 if (bch2_bkey_pick_read_device(c, bkey_i_to_s_c(&b->key), NULL, &pick) <= 0) { in bch2_btree_node_ondisk_to_text() 479 bch2_bkey_val_to_text(out, c, bkey_i_to_s_c(&b->key)); in bch2_cached_btree_node_to_text()
|
D | btree_io.c | 531 b->written, btree_ptr_sectors_written(bkey_i_to_s_c(&b->key))); in btree_err_msg() 686 unsigned ptr_written = btree_ptr_sectors_written(bkey_i_to_s_c(&b->key)); in validate_bset() 986 unsigned ptr_written = btree_ptr_sectors_written(bkey_i_to_s_c(&b->key)); in bch2_btree_node_read_done() 1319 bkey_i_to_s_c(&b->key), in btree_node_read_work() 1577 struct bkey_s_c k = bkey_i_to_s_c(&b->key); in btree_node_read_all_replicas() 1659 ret = bch2_bkey_pick_read_device(c, bkey_i_to_s_c(&b->key), in bch2_btree_node_read() 1860 if (!bch2_bkey_nr_ptrs(bkey_i_to_s_c(&wbio->key))) { in btree_node_write_work() 1935 int ret = bch2_bkey_validate(c, bkey_i_to_s_c(&b->key), in validate_bset_for_write() 2113 BUG_ON(btree_ptr_sectors_written(bkey_i_to_s_c(&b->key)) != sectors_to_write); in __bch2_btree_node_write()
|
D | rebalance.c | 130 (void *) bch2_bkey_rebalance_opts(bkey_i_to_s_c(n))); in bch2_bkey_clear_needs_rebalance() 230 k = bkey_i_to_s_c(sk.k); in do_rebalance_extent()
|
D | io_write.c | 157 unsigned new_replicas = bch2_bkey_replicas(c, bkey_i_to_s_c(new)); in bch2_sum_sector_overwrites() 158 bool new_compressed = bch2_bkey_sectors_compressed(bkey_i_to_s_c(new)); in bch2_sum_sector_overwrites() 177 *disk_sectors_delta += sectors * bch2_bkey_nr_ptrs_allocated(bkey_i_to_s_c(new)); in bch2_sum_sector_overwrites() 403 struct bkey_ptrs_c ptrs = bch2_bkey_ptrs_c(bkey_i_to_s_c(k)); in bch2_submit_wbio_replicas() 492 if (!bch2_bkey_nr_ptrs(bkey_i_to_s_c(src))) in bch2_write_drop_io_error_ptrs() 1132 if (!bch2_extents_match(bkey_i_to_s_c(orig), k)) { in bch2_nocow_write_convert_one_unwritten()
|
D | bkey_types.h | 85 static inline struct bkey_s_c bkey_i_to_s_c(const struct bkey_i *k) in bkey_i_to_s_c() function
|
D | btree_cache.c | 914 bch2_bkey_val_to_text(&buf, c, bkey_i_to_s_c(k)); in bch2_btree_node_fill() 923 bch2_bkey_val_to_text(&buf, c, bkey_i_to_s_c(k)); in bch2_btree_node_fill() 1011 bch2_bkey_val_to_text(&buf, c, bkey_i_to_s_c(&b->key)); in btree_bad_header() 1407 bch2_bkey_val_to_text(out, c, bkey_i_to_s_c(&b->key)); in bch2_btree_pos_to_text() 1424 bch2_val_to_text(out, c, bkey_i_to_s_c(&b->key)); in bch2_btree_node_to_text()
|
D | btree_journal_iter.c | 329 return bkey_i_to_s_c(k->k); in bch2_journal_iter_peek() 631 bch2_bkey_val_to_text(&buf, c, bkey_i_to_s_c(i->k)); in bch2_journal_keys_dump()
|
D | btree_node_scan.c | 532 bch2_bkey_val_to_text(&buf, c, bkey_i_to_s_c(&tmp.k)); in bch2_get_scanned_nodes() 536 BUG_ON(bch2_bkey_validate(c, bkey_i_to_s_c(&tmp.k), BKEY_TYPE_btree, 0)); in bch2_get_scanned_nodes()
|
D | btree_iter.c | 938 bch2_bkey_val_to_text(&buf, c, bkey_i_to_s_c(&l->b->key)); in btree_path_down() 1463 bch2_bkey_val_to_text(buf, trans->c, bkey_i_to_s_c(i->k)); in bch2_trans_updates_to_text() 1836 k = bkey_i_to_s_c(ck->k); in bch2_btree_path_peek_slot() 2064 *k = bkey_i_to_s_c(i->k); in bch2_btree_trans_peek_prev_updates() 2081 *k = bkey_i_to_s_c(i->k); in bch2_btree_trans_peek_updates() 2094 *k = bkey_i_to_s_c(i->k); in bch2_btree_trans_peek_slot_updates() 2120 return bkey_i_to_s_c(k); in btree_trans_peek_slot_journal() 2138 k = bkey_i_to_s_c(next_journal); in btree_trans_peek_journal()
|
D | btree_update.h | 260 *k = bkey_i_to_s_c(mut); in __bch2_bkey_make_mut()
|
D | buckets.c | 312 enum bch_data_type data_type = bch2_bkey_ptr_data_type(bkey_i_to_s_c(new), p, entry); in bch2_check_fix_ptrs() 361 bch2_bkey_val_to_text(&buf, c, bkey_i_to_s_c(new)); in bch2_check_fix_ptrs() 663 bch2_bkey_to_replicas(&acc.replicas, bkey_i_to_s_c(&s->k_i)); in bch2_trigger_stripe_ptr()
|
D | journal_io.c | 370 ret = bch2_bkey_validate(c, bkey_i_to_s_c(k), in journal_validate_key() 425 bch2_bkey_val_to_text(out, c, bkey_i_to_s_c(k)); in journal_entry_btree_keys_to_text() 1443 bch2_bkey_has_device_c(bkey_i_to_s_c(&w->key), ca->dev_idx) || in __journal_write_alloc() 2055 w->devs_written = bch2_bkey_devs(bkey_i_to_s_c(&w->key)); in CLOSURE_CALLBACK()
|
D | btree_update.c | 49 if (!bch2_bkey_merge(c, bkey_i_to_s(update), bkey_i_to_s_c(*insert))) in extent_front_merge() 318 ret = bch2_trans_update_extent_overwrite(trans, &iter, flags, k, bkey_i_to_s_c(insert)); in bch2_trans_update_extent()
|
D | ec.c | 665 bch2_bkey_val_to_text(&err, c, bkey_i_to_s_c(&buf->key)); in ec_validate_checksums() 1246 bch2_bkey_val_to_text(&buf, c, bkey_i_to_s_c(&new->k_i)); in ec_stripe_key_update() 2272 bch2_bkey_to_replicas(&acc.replicas, bkey_i_to_s_c(&s->k_i)); in bch2_invalidate_stripe_to_dev() 2285 bch2_bkey_to_replicas(&acc.replicas, bkey_i_to_s_c(&s->k_i)); in bch2_invalidate_stripe_to_dev() 2425 bch2_bkey_val_to_text(out, c, bkey_i_to_s_c(&s->new_stripe.key)); in bch2_new_stripe_to_text()
|