Lines Matching refs:smmu_domain

549 	struct arm_smmu_domain *smmu_domain = cookie;  in arm_smmu_tlb_sync()  local
550 __arm_smmu_tlb_sync(smmu_domain->smmu); in arm_smmu_tlb_sync()
555 struct arm_smmu_domain *smmu_domain = cookie; in arm_smmu_tlb_inv_context() local
556 struct arm_smmu_cfg *cfg = &smmu_domain->cfg; in arm_smmu_tlb_inv_context()
557 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_tlb_inv_context()
577 struct arm_smmu_domain *smmu_domain = cookie; in arm_smmu_tlb_inv_range_nosync() local
578 struct arm_smmu_cfg *cfg = &smmu_domain->cfg; in arm_smmu_tlb_inv_range_nosync()
579 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_tlb_inv_range_nosync()
613 struct arm_smmu_domain *smmu_domain = cookie; in arm_smmu_flush_pgtable() local
614 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_flush_pgtable()
647 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_context_fault() local
648 struct arm_smmu_cfg *cfg = &smmu_domain->cfg; in arm_smmu_context_fault()
649 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_context_fault()
718 static void arm_smmu_init_context_bank(struct arm_smmu_domain *smmu_domain, in arm_smmu_init_context_bank() argument
723 struct arm_smmu_cfg *cfg = &smmu_domain->cfg; in arm_smmu_init_context_bank()
724 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_init_context_bank()
823 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_init_domain_context() local
824 struct arm_smmu_cfg *cfg = &smmu_domain->cfg; in arm_smmu_init_domain_context()
826 mutex_lock(&smmu_domain->init_mutex); in arm_smmu_init_domain_context()
827 if (smmu_domain->smmu) in arm_smmu_init_domain_context()
849 smmu_domain->stage = ARM_SMMU_DOMAIN_S2; in arm_smmu_init_domain_context()
851 smmu_domain->stage = ARM_SMMU_DOMAIN_S1; in arm_smmu_init_domain_context()
853 switch (smmu_domain->stage) { in arm_smmu_init_domain_context()
904 smmu_domain->smmu = smmu; in arm_smmu_init_domain_context()
905 pgtbl_ops = alloc_io_pgtable_ops(fmt, &pgtbl_cfg, smmu_domain); in arm_smmu_init_domain_context()
915 arm_smmu_init_context_bank(smmu_domain, &pgtbl_cfg); in arm_smmu_init_domain_context()
930 mutex_unlock(&smmu_domain->init_mutex); in arm_smmu_init_domain_context()
933 smmu_domain->pgtbl_ops = pgtbl_ops; in arm_smmu_init_domain_context()
937 smmu_domain->smmu = NULL; in arm_smmu_init_domain_context()
939 mutex_unlock(&smmu_domain->init_mutex); in arm_smmu_init_domain_context()
945 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_destroy_domain_context() local
946 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_destroy_domain_context()
947 struct arm_smmu_cfg *cfg = &smmu_domain->cfg; in arm_smmu_destroy_domain_context()
966 if (smmu_domain->pgtbl_ops) in arm_smmu_destroy_domain_context()
967 free_io_pgtable_ops(smmu_domain->pgtbl_ops); in arm_smmu_destroy_domain_context()
974 struct arm_smmu_domain *smmu_domain; in arm_smmu_domain_alloc() local
983 smmu_domain = kzalloc(sizeof(*smmu_domain), GFP_KERNEL); in arm_smmu_domain_alloc()
984 if (!smmu_domain) in arm_smmu_domain_alloc()
987 mutex_init(&smmu_domain->init_mutex); in arm_smmu_domain_alloc()
988 spin_lock_init(&smmu_domain->pgtbl_lock); in arm_smmu_domain_alloc()
990 return &smmu_domain->domain; in arm_smmu_domain_alloc()
995 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_domain_free() local
1002 kfree(smmu_domain); in arm_smmu_domain_free()
1080 static int arm_smmu_domain_add_master(struct arm_smmu_domain *smmu_domain, in arm_smmu_domain_add_master() argument
1084 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_domain_add_master()
1097 (smmu_domain->cfg.cbndx << S2CR_CBNDX_SHIFT); in arm_smmu_domain_add_master()
1104 static void arm_smmu_domain_remove_master(struct arm_smmu_domain *smmu_domain, in arm_smmu_domain_remove_master() argument
1108 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_domain_remove_master()
1132 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_attach_dev() local
1156 if (smmu_domain->smmu != smmu) { in arm_smmu_attach_dev()
1159 dev_name(smmu_domain->smmu->dev), dev_name(smmu->dev)); in arm_smmu_attach_dev()
1168 ret = arm_smmu_domain_add_master(smmu_domain, cfg); in arm_smmu_attach_dev()
1176 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_detach_dev() local
1184 arm_smmu_domain_remove_master(smmu_domain, cfg); in arm_smmu_detach_dev()
1192 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_map() local
1193 struct io_pgtable_ops *ops= smmu_domain->pgtbl_ops; in arm_smmu_map()
1198 spin_lock_irqsave(&smmu_domain->pgtbl_lock, flags); in arm_smmu_map()
1200 spin_unlock_irqrestore(&smmu_domain->pgtbl_lock, flags); in arm_smmu_map()
1209 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_unmap() local
1210 struct io_pgtable_ops *ops= smmu_domain->pgtbl_ops; in arm_smmu_unmap()
1215 spin_lock_irqsave(&smmu_domain->pgtbl_lock, flags); in arm_smmu_unmap()
1217 spin_unlock_irqrestore(&smmu_domain->pgtbl_lock, flags); in arm_smmu_unmap()
1224 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_iova_to_phys_hard() local
1225 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_iova_to_phys_hard()
1226 struct arm_smmu_cfg *cfg = &smmu_domain->cfg; in arm_smmu_iova_to_phys_hard()
1227 struct io_pgtable_ops *ops= smmu_domain->pgtbl_ops; in arm_smmu_iova_to_phys_hard()
1270 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_iova_to_phys() local
1271 struct io_pgtable_ops *ops= smmu_domain->pgtbl_ops; in arm_smmu_iova_to_phys()
1276 spin_lock_irqsave(&smmu_domain->pgtbl_lock, flags); in arm_smmu_iova_to_phys()
1277 if (smmu_domain->smmu->features & ARM_SMMU_FEAT_TRANS_OPS && in arm_smmu_iova_to_phys()
1278 smmu_domain->stage == ARM_SMMU_DOMAIN_S1) { in arm_smmu_iova_to_phys()
1284 spin_unlock_irqrestore(&smmu_domain->pgtbl_lock, flags); in arm_smmu_iova_to_phys()
1403 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_domain_get_attr() local
1407 *(int *)data = (smmu_domain->stage == ARM_SMMU_DOMAIN_NESTED); in arm_smmu_domain_get_attr()
1418 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_domain_set_attr() local
1420 mutex_lock(&smmu_domain->init_mutex); in arm_smmu_domain_set_attr()
1424 if (smmu_domain->smmu) { in arm_smmu_domain_set_attr()
1430 smmu_domain->stage = ARM_SMMU_DOMAIN_NESTED; in arm_smmu_domain_set_attr()
1432 smmu_domain->stage = ARM_SMMU_DOMAIN_S1; in arm_smmu_domain_set_attr()
1440 mutex_unlock(&smmu_domain->init_mutex); in arm_smmu_domain_set_attr()