Searched refs:cpu_asid_mask (Results 1 – 12 of 12) sorted by relevance
26 return ~(u64)(cpu_asid_mask(&cpu_data[cpu])); in asid_version_mask()31 return cpu_asid_mask(&cpu_data[cpu]) + 1; in asid_first_version()36 #define cpu_asid(cpu, mm) (cpu_context((cpu), (mm)) & cpu_asid_mask(&cpu_data[cpu]))56 if (!((++asid) & cpu_asid_mask(&cpu_data[cpu]))) in get_new_mmu_context()148 asid = read_csr_asid() & cpu_asid_mask(¤t_cpu_data); in drop_mmu_context()
113 static inline unsigned long cpu_asid_mask(struct cpuinfo_loongarch *cpuinfo) in cpu_asid_mask() function
44 old_ctx = read_c0_entryhi() & cpu_asid_mask(¤t_cpu_data); in local_flush_tlb_from()70 unsigned long asid_mask = cpu_asid_mask(¤t_cpu_data); in local_flush_tlb_range()149 unsigned long asid_mask = cpu_asid_mask(¤t_cpu_data); in local_flush_tlb_page()181 unsigned long asid_mask = cpu_asid_mask(¤t_cpu_data); in __update_tlb()220 unsigned long asid_mask = cpu_asid_mask(¤t_cpu_data); in add_wired_entry()
36 if (!((asid += cpu_asid_inc()) & cpu_asid_mask(&cpu_data[cpu]))) { in get_new_mmu_context()88 __set_bit(mmid & cpu_asid_mask(&cpu_data[cpu]), mmid_map); in flush_context()131 mmid_mask = cpu_asid_mask(&boot_cpu_data); in get_new_mmid()236 write_c0_memorymapid(ctx & cpu_asid_mask(&boot_cpu_data)); in check_switch_mmu_context()
316 pid = read_c0_entryhi() & cpu_asid_mask(¤t_cpu_data); in __update_tlb()
96 unsigned long asid_mask = cpu_asid_mask(&cpu_data[cpu]); in asid_version_mask()125 (cpu_context((cpu), (mm)) & cpu_asid_mask(&cpu_data[cpu]))211 write_c0_memorymapid(ctx & cpu_asid_mask(&cpu_data[cpu])); in drop_mmu_context()
203 static inline unsigned long cpu_asid_mask(struct cpuinfo_mips *cpuinfo) in cpu_asid_mask() function
269 #define KVM_ENTRYHI_ASID cpu_asid_mask(&boot_cpu_data)
30 asid_mask = cpu_asid_mask(¤t_cpu_data); in dump_tlb()
72 unsigned long asidmask = cpu_asid_mask(¤t_cpu_data); in dump_tlb()
35 unsigned long asidmask = cpu_asid_mask(¤t_cpu_data); in dump_tlb()
634 WARN_ON(asid_mask != cpu_asid_mask(c)); in decode_config4()