/linux-6.12.1/fs/ntfs3/ |
D | attrib.c | 65 CLST evcn = le64_to_cpu(attr->nres.evcn); in attr_load_runs() local 69 if (svcn >= evcn + 1 || run_is_mapped_full(run, svcn, evcn)) in attr_load_runs() 72 if (vcn && (evcn < *vcn || *vcn < svcn)) in attr_load_runs() 81 err = run_unpack_ex(run, ni->mi.sbi, ni->mi.rno, svcn, evcn, in attr_load_runs() 416 CLST alen, vcn, lcn, new_alen, old_alen, svcn, evcn; in attr_set_size() local 473 evcn = le64_to_cpu(attr_b->nres.evcn); in attr_set_size() 475 if (svcn <= vcn && vcn <= evcn) { in attr_set_size() 493 evcn = le64_to_cpu(attr->nres.evcn); in attr_set_size() 599 next_svcn = le64_to_cpu(attr->nres.evcn) + 1; in attr_set_size() 620 evcn = old_alen - 1; in attr_set_size() [all …]
|
D | frecord.c | 232 *vcn > le64_to_cpu(attr->nres.evcn)) { in ni_find_attr() 330 vcn <= le64_to_cpu(attr->nres.evcn)) in ni_load_attr() 533 CLST svcn, evcn = 0, svcn_p, evcn_p, next_svcn; in ni_repack() 553 } else if (svcn != evcn + 1) { in ni_repack() 558 evcn = le64_to_cpu(attr->nres.evcn); in ni_repack() 560 if (svcn > evcn + 1) { in ni_repack() 571 if (evcn + 1 == alloc) in ni_repack() 583 err = run_unpack(&run, sbi, ni->mi.rno, svcn, evcn, svcn, in ni_repack() 593 evcn_p = evcn; in ni_repack() 603 err = mi_pack_runs(mi_p, attr_p, &run, evcn + 1 - svcn_p); in ni_repack() [all …]
|
D | run.c | 167 bool run_is_mapped_full(const struct runs_tree *run, CLST svcn, CLST evcn) in run_is_mapped_full() argument 181 if (next_vcn > evcn) in run_is_mapped_full() 921 CLST svcn, CLST evcn, CLST vcn, const u8 *run_buf, in run_unpack() argument 932 if (evcn + 1 == svcn) in run_unpack() 935 if (evcn < svcn) in run_unpack() 994 if (next_vcn > evcn + 1) in run_unpack() 1036 if (vcn64 != evcn + 1) { in run_unpack() 1053 CLST svcn, CLST evcn, CLST vcn, const u8 *run_buf, in run_unpack_ex() argument 1062 ret = run_unpack(run, sbi, ino, svcn, evcn, vcn, run_buf, run_buf_size); in run_unpack_ex() 1076 next_vcn <= evcn; in run_unpack_ex()
|
D | record.c | 303 if (le64_to_cpu(attr->nres.svcn) > le64_to_cpu(attr->nres.evcn) + 1) in mi_enum_attr() 642 attr->nres.evcn = cpu_to_le64(svcn + plen - 1); in mi_pack_runs()
|
D | fslog.c | 2695 u64 dsize, svcn, evcn; in check_attr() local 2720 evcn = le64_to_cpu(attr->nres.evcn); in check_attr() 2723 if (svcn > evcn + 1 || run_off >= asize || in check_attr() 2732 if (run_unpack(NULL, sbi, 0, svcn, evcn, svcn, in check_attr() 3007 attr->nres.evcn = cpu_to_le64((u64)bytes_to_cluster(sbi, size) - 1); in attr_create_nonres_log() 3361 attr->nres.evcn = cpu_to_le64(t64); in do_action() 3364 oa2->attr->nres.evcn = attr->nres.evcn; in do_action() 4794 le64_to_cpu(attr->nres.evcn), svcn, in log_replay()
|
D | ntfs_fs.h | 808 bool run_is_mapped_full(const struct runs_tree *run, CLST svcn, CLST evcn); 813 CLST svcn, CLST evcn, CLST vcn, const u8 *run_buf, 818 CLST svcn, CLST evcn, CLST vcn, const u8 *run_buf,
|
D | attrlist.c | 78 0, le64_to_cpu(attr->nres.evcn), 0, in ntfs_load_attr_list()
|
D | inode.c | 401 err = run_unpack_ex(run, sbi, ino, t64, le64_to_cpu(attr->nres.evcn), in ntfs_read_mft() 1463 attr->nres.evcn = cpu_to_le64(-1ll); in ntfs_create_inode() 1534 attr->nres.evcn = cpu_to_le64(clst - 1); in ntfs_create_inode()
|
D | ntfs.h | 336 __le64 evcn; // 0x18: End VCN of this segment. member
|