Lines Matching +full:fw +full:- +full:cfg
1 /* SPDX-License-Identifier: GPL-2.0-only */
18 #include <linux/io-64-nonatomic-hi-lo.h>
19 #include <linux/io-pgtable.h>
367 struct arm_smmu_cfg *cfg; member
381 struct arm_smmu_cfg cfg; member
393 static inline u32 arm_smmu_lpae_tcr(const struct io_pgtable_cfg *cfg) in arm_smmu_lpae_tcr() argument
395 u32 tcr = FIELD_PREP(ARM_SMMU_TCR_TG0, cfg->arm_lpae_s1_cfg.tcr.tg) | in arm_smmu_lpae_tcr()
396 FIELD_PREP(ARM_SMMU_TCR_SH0, cfg->arm_lpae_s1_cfg.tcr.sh) | in arm_smmu_lpae_tcr()
397 FIELD_PREP(ARM_SMMU_TCR_ORGN0, cfg->arm_lpae_s1_cfg.tcr.orgn) | in arm_smmu_lpae_tcr()
398 FIELD_PREP(ARM_SMMU_TCR_IRGN0, cfg->arm_lpae_s1_cfg.tcr.irgn) | in arm_smmu_lpae_tcr()
399 FIELD_PREP(ARM_SMMU_TCR_T0SZ, cfg->arm_lpae_s1_cfg.tcr.tsz); in arm_smmu_lpae_tcr()
405 if (cfg->quirks & IO_PGTABLE_QUIRK_ARM_TTBR1) { in arm_smmu_lpae_tcr()
414 static inline u32 arm_smmu_lpae_tcr2(const struct io_pgtable_cfg *cfg) in arm_smmu_lpae_tcr2() argument
416 return FIELD_PREP(ARM_SMMU_TCR2_PASIZE, cfg->arm_lpae_s1_cfg.tcr.ips) | in arm_smmu_lpae_tcr2()
420 static inline u32 arm_smmu_lpae_vtcr(const struct io_pgtable_cfg *cfg) in arm_smmu_lpae_vtcr() argument
423 FIELD_PREP(ARM_SMMU_VTCR_PS, cfg->arm_lpae_s2_cfg.vtcr.ps) | in arm_smmu_lpae_vtcr()
424 FIELD_PREP(ARM_SMMU_VTCR_TG0, cfg->arm_lpae_s2_cfg.vtcr.tg) | in arm_smmu_lpae_vtcr()
425 FIELD_PREP(ARM_SMMU_VTCR_SH0, cfg->arm_lpae_s2_cfg.vtcr.sh) | in arm_smmu_lpae_vtcr()
426 FIELD_PREP(ARM_SMMU_VTCR_ORGN0, cfg->arm_lpae_s2_cfg.vtcr.orgn) | in arm_smmu_lpae_vtcr()
427 FIELD_PREP(ARM_SMMU_VTCR_IRGN0, cfg->arm_lpae_s2_cfg.vtcr.irgn) | in arm_smmu_lpae_vtcr()
428 FIELD_PREP(ARM_SMMU_VTCR_SL0, cfg->arm_lpae_s2_cfg.vtcr.sl) | in arm_smmu_lpae_vtcr()
429 FIELD_PREP(ARM_SMMU_VTCR_T0SZ, cfg->arm_lpae_s2_cfg.vtcr.tsz); in arm_smmu_lpae_vtcr()
443 struct io_pgtable_cfg *cfg, struct device *dev);
458 #define INVALID_SMENDX -1
459 #define cfg_smendx(cfg, fw, i) \ argument
460 (i >= fw->num_ids ? INVALID_SMENDX : cfg->smendx[i])
461 #define for_each_cfg_sme(cfg, fw, i, idx) \ argument
462 for (i = 0; idx = cfg_smendx(cfg, fw, i), i < fw->num_ids; ++i)
471 return -ENOSPC; in __arm_smmu_alloc_bitmap()
479 return smmu->base + (n << smmu->pgshift); in arm_smmu_page()
484 if (smmu->impl && unlikely(smmu->impl->read_reg)) in arm_smmu_readl()
485 return smmu->impl->read_reg(smmu, page, offset); in arm_smmu_readl()
492 if (smmu->impl && unlikely(smmu->impl->write_reg)) in arm_smmu_writel()
493 smmu->impl->write_reg(smmu, page, offset, val); in arm_smmu_writel()
500 if (smmu->impl && unlikely(smmu->impl->read_reg64)) in arm_smmu_readq()
501 return smmu->impl->read_reg64(smmu, page, offset); in arm_smmu_readq()
508 if (smmu->impl && unlikely(smmu->impl->write_reg64)) in arm_smmu_writeq()
509 smmu->impl->write_reg64(smmu, page, offset, val); in arm_smmu_writeq()
516 #define ARM_SMMU_CB(s, n) ((s)->numpage + (n))