Lines Matching refs:smmu_domain
19 arm_smmu_update_s1_domain_cd_entry(struct arm_smmu_domain *smmu_domain) in arm_smmu_update_s1_domain_cd_entry() argument
25 spin_lock_irqsave(&smmu_domain->devices_lock, flags); in arm_smmu_update_s1_domain_cd_entry()
26 list_for_each_entry(master_domain, &smmu_domain->devices, devices_elm) { in arm_smmu_update_s1_domain_cd_entry()
34 arm_smmu_make_s1_cd(&target_cd, master, smmu_domain); in arm_smmu_update_s1_domain_cd_entry()
38 spin_unlock_irqrestore(&smmu_domain->devices_lock, flags); in arm_smmu_update_s1_domain_cd_entry()
132 struct arm_smmu_domain *smmu_domain = in arm_smmu_mm_arch_invalidate_secondary_tlbs() local
142 if (!(smmu_domain->smmu->features & ARM_SMMU_FEAT_RANGE_INV)) { in arm_smmu_mm_arch_invalidate_secondary_tlbs()
151 arm_smmu_tlb_inv_asid(smmu_domain->smmu, smmu_domain->cd.asid); in arm_smmu_mm_arch_invalidate_secondary_tlbs()
153 arm_smmu_tlb_inv_range_asid(start, size, smmu_domain->cd.asid, in arm_smmu_mm_arch_invalidate_secondary_tlbs()
154 PAGE_SIZE, false, smmu_domain); in arm_smmu_mm_arch_invalidate_secondary_tlbs()
156 arm_smmu_atc_inv_domain(smmu_domain, start, size); in arm_smmu_mm_arch_invalidate_secondary_tlbs()
161 struct arm_smmu_domain *smmu_domain = in arm_smmu_mm_release() local
170 spin_lock_irqsave(&smmu_domain->devices_lock, flags); in arm_smmu_mm_release()
171 list_for_each_entry(master_domain, &smmu_domain->devices, in arm_smmu_mm_release()
181 smmu_domain->cd.asid); in arm_smmu_mm_release()
185 spin_unlock_irqrestore(&smmu_domain->devices_lock, flags); in arm_smmu_mm_release()
187 arm_smmu_tlb_inv_asid(smmu_domain->smmu, smmu_domain->cd.asid); in arm_smmu_mm_release()
188 arm_smmu_atc_inv_domain(smmu_domain, 0, 0); in arm_smmu_mm_release()
337 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_sva_set_dev_pasid() local
350 arm_smmu_make_sva_cd(&target, master, domain->mm, smmu_domain->cd.asid); in arm_smmu_sva_set_dev_pasid()
351 ret = arm_smmu_set_pasid(master, smmu_domain, id, &target); in arm_smmu_sva_set_dev_pasid()
359 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_sva_domain_free() local
364 arm_smmu_tlb_inv_asid(smmu_domain->smmu, smmu_domain->cd.asid); in arm_smmu_sva_domain_free()
372 xa_erase(&arm_smmu_asid_xa, smmu_domain->cd.asid); in arm_smmu_sva_domain_free()
378 mmu_notifier_put(&smmu_domain->mmu_notifier); in arm_smmu_sva_domain_free()
391 struct arm_smmu_domain *smmu_domain; in arm_smmu_sva_domain_alloc() local
395 smmu_domain = arm_smmu_domain_alloc(); in arm_smmu_sva_domain_alloc()
396 if (IS_ERR(smmu_domain)) in arm_smmu_sva_domain_alloc()
397 return ERR_CAST(smmu_domain); in arm_smmu_sva_domain_alloc()
398 smmu_domain->domain.type = IOMMU_DOMAIN_SVA; in arm_smmu_sva_domain_alloc()
399 smmu_domain->domain.ops = &arm_smmu_sva_domain_ops; in arm_smmu_sva_domain_alloc()
400 smmu_domain->smmu = smmu; in arm_smmu_sva_domain_alloc()
402 ret = xa_alloc(&arm_smmu_asid_xa, &asid, smmu_domain, in arm_smmu_sva_domain_alloc()
407 smmu_domain->cd.asid = asid; in arm_smmu_sva_domain_alloc()
408 smmu_domain->mmu_notifier.ops = &arm_smmu_mmu_notifier_ops; in arm_smmu_sva_domain_alloc()
409 ret = mmu_notifier_register(&smmu_domain->mmu_notifier, mm); in arm_smmu_sva_domain_alloc()
413 return &smmu_domain->domain; in arm_smmu_sva_domain_alloc()
416 xa_erase(&arm_smmu_asid_xa, smmu_domain->cd.asid); in arm_smmu_sva_domain_alloc()
418 kfree(smmu_domain); in arm_smmu_sva_domain_alloc()