Lines Matching refs:smmu_domain
242 static void arm_smmu_tlb_sync_context(struct arm_smmu_domain *smmu_domain) in arm_smmu_tlb_sync_context() argument
244 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_tlb_sync_context()
247 spin_lock_irqsave(&smmu_domain->cb_lock, flags); in arm_smmu_tlb_sync_context()
248 __arm_smmu_tlb_sync(smmu, ARM_SMMU_CB(smmu, smmu_domain->cfg.cbndx), in arm_smmu_tlb_sync_context()
250 spin_unlock_irqrestore(&smmu_domain->cb_lock, flags); in arm_smmu_tlb_sync_context()
255 struct arm_smmu_domain *smmu_domain = cookie; in arm_smmu_tlb_inv_context_s1() local
261 arm_smmu_cb_write(smmu_domain->smmu, smmu_domain->cfg.cbndx, in arm_smmu_tlb_inv_context_s1()
262 ARM_SMMU_CB_S1_TLBIASID, smmu_domain->cfg.asid); in arm_smmu_tlb_inv_context_s1()
263 arm_smmu_tlb_sync_context(smmu_domain); in arm_smmu_tlb_inv_context_s1()
268 struct arm_smmu_domain *smmu_domain = cookie; in arm_smmu_tlb_inv_context_s2() local
269 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_tlb_inv_context_s2()
273 arm_smmu_gr0_write(smmu, ARM_SMMU_GR0_TLBIVMID, smmu_domain->cfg.vmid); in arm_smmu_tlb_inv_context_s2()
280 struct arm_smmu_domain *smmu_domain = cookie; in arm_smmu_tlb_inv_range_s1() local
281 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_tlb_inv_range_s1()
282 struct arm_smmu_cfg *cfg = &smmu_domain->cfg; in arm_smmu_tlb_inv_range_s1()
308 struct arm_smmu_domain *smmu_domain = cookie; in arm_smmu_tlb_inv_range_s2() local
309 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_tlb_inv_range_s2()
310 int idx = smmu_domain->cfg.cbndx; in arm_smmu_tlb_inv_range_s2()
317 if (smmu_domain->cfg.fmt == ARM_SMMU_CTX_FMT_AARCH64) in arm_smmu_tlb_inv_range_s2()
328 struct arm_smmu_domain *smmu_domain = cookie; in arm_smmu_tlb_inv_walk_s1() local
329 struct arm_smmu_cfg *cfg = &smmu_domain->cfg; in arm_smmu_tlb_inv_walk_s1()
380 struct arm_smmu_domain *smmu_domain = cookie; in arm_smmu_tlb_add_page_s2_v1() local
381 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_tlb_add_page_s2_v1()
386 arm_smmu_gr0_write(smmu, ARM_SMMU_GR0_TLBIVMID, smmu_domain->cfg.vmid); in arm_smmu_tlb_add_page_s2_v1()
454 struct arm_smmu_domain *smmu_domain = dev; in arm_smmu_context_fault() local
455 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_context_fault()
458 int idx = smmu_domain->cfg.cbndx; in arm_smmu_context_fault()
466 ret = report_iommu_fault(&smmu_domain->domain, NULL, cfi.iova, in arm_smmu_context_fault()
509 static void arm_smmu_init_context_bank(struct arm_smmu_domain *smmu_domain, in arm_smmu_init_context_bank() argument
512 struct arm_smmu_cfg *cfg = &smmu_domain->cfg; in arm_smmu_init_context_bank()
513 struct arm_smmu_cb *cb = &smmu_domain->smmu->cbs[cfg->cbndx]; in arm_smmu_init_context_bank()
655 static int arm_smmu_alloc_context_bank(struct arm_smmu_domain *smmu_domain, in arm_smmu_alloc_context_bank() argument
660 return smmu->impl->alloc_context_bank(smmu_domain, smmu, dev, start); in arm_smmu_alloc_context_bank()
665 static int arm_smmu_init_domain_context(struct arm_smmu_domain *smmu_domain, in arm_smmu_init_domain_context() argument
674 struct iommu_domain *domain = &smmu_domain->domain; in arm_smmu_init_domain_context()
675 struct arm_smmu_cfg *cfg = &smmu_domain->cfg; in arm_smmu_init_domain_context()
678 mutex_lock(&smmu_domain->init_mutex); in arm_smmu_init_domain_context()
679 if (smmu_domain->smmu) in arm_smmu_init_domain_context()
701 smmu_domain->stage = ARM_SMMU_DOMAIN_S2; in arm_smmu_init_domain_context()
703 smmu_domain->stage = ARM_SMMU_DOMAIN_S1; in arm_smmu_init_domain_context()
718 (smmu_domain->stage == ARM_SMMU_DOMAIN_S1)) in arm_smmu_init_domain_context()
731 switch (smmu_domain->stage) { in arm_smmu_init_domain_context()
748 smmu_domain->flush_ops = &arm_smmu_s1_tlb_ops; in arm_smmu_init_domain_context()
768 smmu_domain->flush_ops = &arm_smmu_s2_tlb_ops_v2; in arm_smmu_init_domain_context()
770 smmu_domain->flush_ops = &arm_smmu_s2_tlb_ops_v1; in arm_smmu_init_domain_context()
777 ret = arm_smmu_alloc_context_bank(smmu_domain, smmu, dev, start); in arm_smmu_init_domain_context()
782 smmu_domain->smmu = smmu; in arm_smmu_init_domain_context()
792 if (smmu_domain->stage == ARM_SMMU_DOMAIN_S2) in arm_smmu_init_domain_context()
802 .tlb = smmu_domain->flush_ops, in arm_smmu_init_domain_context()
807 ret = smmu->impl->init_context(smmu_domain, &pgtbl_cfg, dev); in arm_smmu_init_domain_context()
812 if (smmu_domain->pgtbl_quirks) in arm_smmu_init_domain_context()
813 pgtbl_cfg.quirks |= smmu_domain->pgtbl_quirks; in arm_smmu_init_domain_context()
815 pgtbl_ops = alloc_io_pgtable_ops(fmt, &pgtbl_cfg, smmu_domain); in arm_smmu_init_domain_context()
834 arm_smmu_init_context_bank(smmu_domain, &pgtbl_cfg); in arm_smmu_init_domain_context()
853 smmu_domain); in arm_smmu_init_domain_context()
856 "arm-smmu-context-fault", smmu_domain); in arm_smmu_init_domain_context()
864 mutex_unlock(&smmu_domain->init_mutex); in arm_smmu_init_domain_context()
867 smmu_domain->pgtbl_ops = pgtbl_ops; in arm_smmu_init_domain_context()
872 smmu_domain->smmu = NULL; in arm_smmu_init_domain_context()
874 mutex_unlock(&smmu_domain->init_mutex); in arm_smmu_init_domain_context()
878 static void arm_smmu_destroy_domain_context(struct arm_smmu_domain *smmu_domain) in arm_smmu_destroy_domain_context() argument
880 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_destroy_domain_context()
881 struct arm_smmu_cfg *cfg = &smmu_domain->cfg; in arm_smmu_destroy_domain_context()
900 devm_free_irq(smmu->dev, irq, smmu_domain); in arm_smmu_destroy_domain_context()
903 free_io_pgtable_ops(smmu_domain->pgtbl_ops); in arm_smmu_destroy_domain_context()
911 struct arm_smmu_domain *smmu_domain; in arm_smmu_domain_alloc_paging() local
918 smmu_domain = kzalloc(sizeof(*smmu_domain), GFP_KERNEL); in arm_smmu_domain_alloc_paging()
919 if (!smmu_domain) in arm_smmu_domain_alloc_paging()
922 mutex_init(&smmu_domain->init_mutex); in arm_smmu_domain_alloc_paging()
923 spin_lock_init(&smmu_domain->cb_lock); in arm_smmu_domain_alloc_paging()
925 return &smmu_domain->domain; in arm_smmu_domain_alloc_paging()
930 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_domain_free() local
936 arm_smmu_destroy_domain_context(smmu_domain); in arm_smmu_domain_free()
937 kfree(smmu_domain); in arm_smmu_domain_free()
1157 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_attach_dev() local
1181 ret = arm_smmu_init_domain_context(smmu_domain, smmu, dev); in arm_smmu_attach_dev()
1189 if (smmu_domain->smmu != smmu) { in arm_smmu_attach_dev()
1196 smmu_domain->cfg.cbndx, fwspec); in arm_smmu_attach_dev()
1293 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_flush_iotlb_all() local
1294 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_flush_iotlb_all()
1296 if (smmu_domain->flush_ops) { in arm_smmu_flush_iotlb_all()
1298 smmu_domain->flush_ops->tlb_flush_all(smmu_domain); in arm_smmu_flush_iotlb_all()
1306 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_iotlb_sync() local
1307 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_iotlb_sync()
1314 smmu_domain->stage == ARM_SMMU_DOMAIN_S1) in arm_smmu_iotlb_sync()
1315 arm_smmu_tlb_sync_context(smmu_domain); in arm_smmu_iotlb_sync()
1324 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_iova_to_phys_hard() local
1325 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_iova_to_phys_hard()
1326 struct arm_smmu_cfg *cfg = &smmu_domain->cfg; in arm_smmu_iova_to_phys_hard()
1327 struct io_pgtable_ops *ops= smmu_domain->pgtbl_ops; in arm_smmu_iova_to_phys_hard()
1340 spin_lock_irqsave(&smmu_domain->cb_lock, flags); in arm_smmu_iova_to_phys_hard()
1350 spin_unlock_irqrestore(&smmu_domain->cb_lock, flags); in arm_smmu_iova_to_phys_hard()
1359 spin_unlock_irqrestore(&smmu_domain->cb_lock, flags); in arm_smmu_iova_to_phys_hard()
1376 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_iova_to_phys() local
1377 struct io_pgtable_ops *ops = smmu_domain->pgtbl_ops; in arm_smmu_iova_to_phys()
1382 if (smmu_domain->smmu->features & ARM_SMMU_FEAT_TRANS_OPS && in arm_smmu_iova_to_phys()
1383 smmu_domain->stage == ARM_SMMU_DOMAIN_S1) in arm_smmu_iova_to_phys()
1563 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_enable_nesting() local
1566 mutex_lock(&smmu_domain->init_mutex); in arm_smmu_enable_nesting()
1567 if (smmu_domain->smmu) in arm_smmu_enable_nesting()
1570 smmu_domain->stage = ARM_SMMU_DOMAIN_NESTED; in arm_smmu_enable_nesting()
1571 mutex_unlock(&smmu_domain->init_mutex); in arm_smmu_enable_nesting()
1579 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_set_pgtable_quirks() local
1582 mutex_lock(&smmu_domain->init_mutex); in arm_smmu_set_pgtable_quirks()
1583 if (smmu_domain->smmu) in arm_smmu_set_pgtable_quirks()
1586 smmu_domain->pgtbl_quirks = quirks; in arm_smmu_set_pgtable_quirks()
1587 mutex_unlock(&smmu_domain->init_mutex); in arm_smmu_set_pgtable_quirks()