dom 55 arch/arm/include/asm/domain.h #define domain_mask(dom) ((3) << (2 * (dom))) dom 56 arch/arm/include/asm/domain.h #define domain_val(dom,type) ((type) << (2 * (dom))) dom 116 arch/arm/include/asm/domain.h #define modify_domain(dom,type) \ dom 119 arch/arm/include/asm/domain.h domain &= ~domain_mask(dom); \ dom 120 arch/arm/include/asm/domain.h domain = domain | domain_val(dom, type); \ dom 125 arch/arm/include/asm/domain.h static inline void modify_domain(unsigned dom, unsigned type) { } dom 91 arch/s390/include/asm/trace/zcrypt.h TP_PROTO(void *ptr, u32 fc, u32 rc, u16 dev, u16 dom), dom 92 arch/s390/include/asm/trace/zcrypt.h TP_ARGS(ptr, fc, rc, dev, dom), dom 104 arch/s390/include/asm/trace/zcrypt.h __entry->domain = dom;), dom 440 arch/x86/include/asm/xen/hypercall.h domid_t dom, unsigned int nr_bufs, struct xen_dm_op_buf *bufs) dom 444 arch/x86/include/asm/xen/hypercall.h ret = _hypercall3(int, dm_op, dom, nr_bufs, bufs); dom 359 arch/x86/kernel/apic/vector.c static void x86_vector_deactivate(struct irq_domain *dom, struct irq_data *irqd) dom 439 arch/x86/kernel/apic/vector.c static int x86_vector_activate(struct irq_domain *dom, struct irq_data *irqd, dom 258 arch/x86/kernel/cpu/resctrl/ctrlmondata.c char *dom = NULL, *id; dom 271 arch/x86/kernel/cpu/resctrl/ctrlmondata.c dom = strsep(&line, ";"); dom 272 arch/x86/kernel/cpu/resctrl/ctrlmondata.c id = strsep(&dom, "="); dom 273 arch/x86/kernel/cpu/resctrl/ctrlmondata.c if (!dom || kstrtoul(id, 10, &dom_id)) { dom 277 arch/x86/kernel/cpu/resctrl/ctrlmondata.c dom = strim(dom); dom 280 arch/x86/kernel/cpu/resctrl/ctrlmondata.c data.buf = dom; dom 367 arch/x86/kernel/cpu/resctrl/ctrlmondata.c struct rdt_domain *dom; dom 397 arch/x86/kernel/cpu/resctrl/ctrlmondata.c list_for_each_entry(dom, &r->domains, list) dom 398 arch/x86/kernel/cpu/resctrl/ctrlmondata.c dom->have_new_ctrl = false; dom 442 arch/x86/kernel/cpu/resctrl/ctrlmondata.c struct rdt_domain *dom; dom 447 arch/x86/kernel/cpu/resctrl/ctrlmondata.c list_for_each_entry(dom, &r->domains, list) { dom 451 arch/x86/kernel/cpu/resctrl/ctrlmondata.c ctrl_val = (!is_mba_sc(r) ? dom->ctrl_val[closid] : dom 452 arch/x86/kernel/cpu/resctrl/ctrlmondata.c dom->mbps_val[closid]); dom 453 arch/x86/kernel/cpu/resctrl/ctrlmondata.c seq_printf(s, r->format_str, dom->id, max_data_width, dom 592 arch/x86/kernel/cpu/resctrl/internal.h void mbm_setup_overflow_handler(struct rdt_domain *dom, dom 598 arch/x86/kernel/cpu/resctrl/internal.h void cqm_setup_limbo_handler(struct rdt_domain *dom, unsigned long delay_ms); dom 496 arch/x86/kernel/cpu/resctrl/monitor.c void cqm_setup_limbo_handler(struct rdt_domain *dom, unsigned long delay_ms) dom 501 arch/x86/kernel/cpu/resctrl/monitor.c cpu = cpumask_any(&dom->cpu_mask); dom 502 arch/x86/kernel/cpu/resctrl/monitor.c dom->cqm_work_cpu = cpu; dom 504 arch/x86/kernel/cpu/resctrl/monitor.c schedule_delayed_work_on(cpu, &dom->cqm_limbo, delay); dom 541 arch/x86/kernel/cpu/resctrl/monitor.c void mbm_setup_overflow_handler(struct rdt_domain *dom, unsigned long delay_ms) dom 548 arch/x86/kernel/cpu/resctrl/monitor.c cpu = cpumask_any(&dom->cpu_mask); dom 549 arch/x86/kernel/cpu/resctrl/monitor.c dom->mbm_work_cpu = cpu; dom 550 arch/x86/kernel/cpu/resctrl/monitor.c schedule_delayed_work_on(cpu, &dom->mbm_over, delay); dom 803 arch/x86/kernel/cpu/resctrl/rdtgroup.c struct rdt_domain *dom; dom 811 arch/x86/kernel/cpu/resctrl/rdtgroup.c list_for_each_entry(dom, &r->domains, list) { dom 814 arch/x86/kernel/cpu/resctrl/rdtgroup.c ctrl = dom->ctrl_val; dom 817 arch/x86/kernel/cpu/resctrl/rdtgroup.c seq_printf(seq, "%d=", dom->id); dom 846 arch/x86/kernel/cpu/resctrl/rdtgroup.c pseudo_locked = dom->plr ? dom->plr->cbm : 0; dom 1960 arch/x86/kernel/cpu/resctrl/rdtgroup.c struct rdt_domain *dom; dom 2018 arch/x86/kernel/cpu/resctrl/rdtgroup.c list_for_each_entry(dom, &r->domains, list) dom 2019 arch/x86/kernel/cpu/resctrl/rdtgroup.c mbm_setup_overflow_handler(dom, MBM_OVERFLOW_INTERVAL); dom 2430 arch/x86/kernel/cpu/resctrl/rdtgroup.c struct rdt_domain *dom; dom 2433 arch/x86/kernel/cpu/resctrl/rdtgroup.c list_for_each_entry(dom, &r->domains, list) { dom 2434 arch/x86/kernel/cpu/resctrl/rdtgroup.c ret = mkdir_mondata_subdir(parent_kn, dom, r, prgrp); dom 38 drivers/edac/i10nm_base.c static struct pci_dev *pci_get_dev_wrapper(int dom, unsigned int bus, dom 43 drivers/edac/i10nm_base.c pdev = pci_get_domain_bus_and_slot(dom, bus, PCI_DEVFN(dev, fun)); dom 341 drivers/edac/sb_edac.c enum domain dom; dom 358 drivers/edac/sb_edac.c enum domain dom; dom 401 drivers/edac/sb_edac.c .dom = domain dom 732 drivers/edac/sb_edac.c static struct sbridge_dev *get_sbridge_dev(int seg, u8 bus, enum domain dom, dom 752 drivers/edac/sb_edac.c (dom == SOCK || dom == sbridge_dev->dom)) dom 759 drivers/edac/sb_edac.c static struct sbridge_dev *alloc_sbridge_dev(int seg, u8 bus, enum domain dom, dom 778 drivers/edac/sb_edac.c sbridge_dev->dom = dom; dom 1636 drivers/edac/sb_edac.c pvt->sbridge_dev->dom, i); dom 1657 drivers/edac/sb_edac.c pvt->sbridge_dev->mc, pvt->sbridge_dev->dom, i, j, dom 1668 drivers/edac/sb_edac.c pvt->sbridge_dev->source_id, pvt->sbridge_dev->dom, i, j); dom 1921 drivers/edac/sb_edac.c if (sbridge_dev->node_id == node_id && sbridge_dev->dom == ha) dom 2373 drivers/edac/sb_edac.c sbridge_dev = get_sbridge_dev(seg, bus, dev_descr->dom, dom 2377 drivers/edac/sb_edac.c if (dev_descr->dom == IMC1 && devno != 1) { dom 2384 drivers/edac/sb_edac.c if (dev_descr->dom == SOCK) dom 2387 drivers/edac/sb_edac.c sbridge_dev = alloc_sbridge_dev(seg, bus, dev_descr->dom, table); dom 2409 drivers/edac/sb_edac.c if (dev_descr->dom == SOCK && i < table->n_imcs_per_sock) dom 3291 drivers/edac/sb_edac.c pvt->sbridge_dev->source_id, pvt->sbridge_dev->dom); dom 3316 drivers/edac/sb_edac.c pvt->sbridge_dev->source_id, pvt->sbridge_dev->dom); dom 3341 drivers/edac/sb_edac.c pvt->sbridge_dev->source_id, pvt->sbridge_dev->dom); dom 3366 drivers/edac/sb_edac.c pvt->sbridge_dev->source_id, pvt->sbridge_dev->dom); dom 3390 drivers/edac/sb_edac.c pvt->sbridge_dev->source_id, pvt->sbridge_dev->dom); dom 358 drivers/firmware/arm_scmi/perf.c struct perf_dom_info *dom = pi->dom_info + domain; dom 360 drivers/firmware/arm_scmi/perf.c if (dom->fc_info && dom->fc_info->limit_set_addr) { dom 361 drivers/firmware/arm_scmi/perf.c iowrite32(max_perf, dom->fc_info->limit_set_addr); dom 362 drivers/firmware/arm_scmi/perf.c iowrite32(min_perf, dom->fc_info->limit_set_addr + 4); dom 363 drivers/firmware/arm_scmi/perf.c scmi_perf_fc_ring_db(dom->fc_info->limit_set_db); dom 400 drivers/firmware/arm_scmi/perf.c struct perf_dom_info *dom = pi->dom_info + domain; dom 402 drivers/firmware/arm_scmi/perf.c if (dom->fc_info && dom->fc_info->limit_get_addr) { dom 403 drivers/firmware/arm_scmi/perf.c *max_perf = ioread32(dom->fc_info->limit_get_addr); dom 404 drivers/firmware/arm_scmi/perf.c *min_perf = ioread32(dom->fc_info->limit_get_addr + 4); dom 438 drivers/firmware/arm_scmi/perf.c struct perf_dom_info *dom = pi->dom_info + domain; dom 440 drivers/firmware/arm_scmi/perf.c if (dom->fc_info && dom->fc_info->level_set_addr) { dom 441 drivers/firmware/arm_scmi/perf.c iowrite32(level, dom->fc_info->level_set_addr); dom 442 drivers/firmware/arm_scmi/perf.c scmi_perf_fc_ring_db(dom->fc_info->level_set_db); dom 475 drivers/firmware/arm_scmi/perf.c struct perf_dom_info *dom = pi->dom_info + domain; dom 477 drivers/firmware/arm_scmi/perf.c if (dom->fc_info && dom->fc_info->level_get_addr) { dom 478 drivers/firmware/arm_scmi/perf.c *level = ioread32(dom->fc_info->level_get_addr); dom 601 drivers/firmware/arm_scmi/perf.c struct perf_dom_info *dom; dom 608 drivers/firmware/arm_scmi/perf.c dom = pi->dom_info + domain; dom 610 drivers/firmware/arm_scmi/perf.c for (opp = dom->opp, idx = 0; idx < dom->opp_count; idx++, opp++) { dom 611 drivers/firmware/arm_scmi/perf.c freq = opp->perf * dom->mult_factor; dom 618 drivers/firmware/arm_scmi/perf.c freq = (--opp)->perf * dom->mult_factor; dom 630 drivers/firmware/arm_scmi/perf.c struct perf_dom_info *dom; dom 637 drivers/firmware/arm_scmi/perf.c dom = pi->dom_info + domain; dom 639 drivers/firmware/arm_scmi/perf.c return dom->opp[dom->opp_count - 1].trans_latency_us * 1000; dom 646 drivers/firmware/arm_scmi/perf.c struct perf_dom_info *dom = pi->dom_info + domain; dom 648 drivers/firmware/arm_scmi/perf.c return scmi_perf_level_set(handle, domain, freq / dom->mult_factor, dom 658 drivers/firmware/arm_scmi/perf.c struct perf_dom_info *dom = pi->dom_info + domain; dom 662 drivers/firmware/arm_scmi/perf.c *freq = level * dom->mult_factor; dom 671 drivers/firmware/arm_scmi/perf.c struct perf_dom_info *dom; dom 676 drivers/firmware/arm_scmi/perf.c dom = pi->dom_info + domain; dom 677 drivers/firmware/arm_scmi/perf.c if (!dom) dom 680 drivers/firmware/arm_scmi/perf.c for (opp = dom->opp, idx = 0; idx < dom->opp_count; idx++, opp++) { dom 681 drivers/firmware/arm_scmi/perf.c opp_freq = opp->perf * dom->mult_factor; dom 730 drivers/firmware/arm_scmi/perf.c struct perf_dom_info *dom = pinfo->dom_info + domain; dom 732 drivers/firmware/arm_scmi/perf.c scmi_perf_domain_attributes_get(handle, domain, dom); dom 733 drivers/firmware/arm_scmi/perf.c scmi_perf_describe_levels_get(handle, domain, dom); dom 735 drivers/firmware/arm_scmi/perf.c if (dom->perf_fastchannels) dom 736 drivers/firmware/arm_scmi/perf.c scmi_perf_domain_init_fc(handle, domain, &dom->fc_info); dom 170 drivers/firmware/arm_scmi/power.c struct power_dom_info *dom = pi->dom_info + domain; dom 172 drivers/firmware/arm_scmi/power.c return dom->name; dom 205 drivers/firmware/arm_scmi/power.c struct power_dom_info *dom = pinfo->dom_info + domain; dom 207 drivers/firmware/arm_scmi/power.c scmi_power_domain_attributes_get(handle, domain, dom); dom 120 drivers/firmware/arm_scmi/reset.c struct reset_dom_info *dom = pi->dom_info + domain; dom 122 drivers/firmware/arm_scmi/reset.c return dom->name; dom 128 drivers/firmware/arm_scmi/reset.c struct reset_dom_info *dom = pi->dom_info + domain; dom 130 drivers/firmware/arm_scmi/reset.c return dom->latency_us; dom 138 drivers/firmware/arm_scmi/reset.c struct scmi_msg_reset_domain_reset *dom; dom 146 drivers/firmware/arm_scmi/reset.c sizeof(*dom), 0, &t); dom 150 drivers/firmware/arm_scmi/reset.c dom = t->tx.buf; dom 151 drivers/firmware/arm_scmi/reset.c dom->domain_id = cpu_to_le32(domain); dom 152 drivers/firmware/arm_scmi/reset.c dom->flags = cpu_to_le32(flags); dom 153 drivers/firmware/arm_scmi/reset.c dom->reset_state = cpu_to_le32(state); dom 215 drivers/firmware/arm_scmi/reset.c struct reset_dom_info *dom = pinfo->dom_info + domain; dom 217 drivers/firmware/arm_scmi/reset.c scmi_reset_domain_attributes_get(handle, domain, dom); dom 2314 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c struct iommu_domain *dom; dom 2319 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c dom = iommu_get_domain_for_dev(adev->dev); dom 2335 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c addr = dom ? iommu_iova_to_phys(dom, addr) : addr; dom 2370 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c struct iommu_domain *dom; dom 2374 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c dom = iommu_get_domain_for_dev(adev->dev); dom 2386 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c addr = dom ? iommu_iova_to_phys(dom, addr) : addr; dom 471 drivers/gpu/drm/etnaviv/etnaviv_perfmon.c const struct etnaviv_pm_domain *dom; dom 476 drivers/gpu/drm/etnaviv/etnaviv_perfmon.c dom = pm_domain(gpu, domain->iter); dom 477 drivers/gpu/drm/etnaviv/etnaviv_perfmon.c if (!dom) dom 481 drivers/gpu/drm/etnaviv/etnaviv_perfmon.c domain->nr_signals = dom->nr_signals; dom 482 drivers/gpu/drm/etnaviv/etnaviv_perfmon.c strncpy(domain->name, dom->name, sizeof(domain->name)); dom 495 drivers/gpu/drm/etnaviv/etnaviv_perfmon.c const struct etnaviv_pm_domain *dom; dom 501 drivers/gpu/drm/etnaviv/etnaviv_perfmon.c dom = pm_domain(gpu, signal->domain); dom 502 drivers/gpu/drm/etnaviv/etnaviv_perfmon.c if (!dom) dom 505 drivers/gpu/drm/etnaviv/etnaviv_perfmon.c if (signal->iter >= dom->nr_signals) dom 508 drivers/gpu/drm/etnaviv/etnaviv_perfmon.c sig = &dom->signal[signal->iter]; dom 514 drivers/gpu/drm/etnaviv/etnaviv_perfmon.c if (signal->iter == dom->nr_signals) dom 524 drivers/gpu/drm/etnaviv/etnaviv_perfmon.c const struct etnaviv_pm_domain *dom; dom 529 drivers/gpu/drm/etnaviv/etnaviv_perfmon.c dom = meta->domains + r->domain; dom 531 drivers/gpu/drm/etnaviv/etnaviv_perfmon.c if (r->signal >= dom->nr_signals) dom 541 drivers/gpu/drm/etnaviv/etnaviv_perfmon.c const struct etnaviv_pm_domain *dom; dom 546 drivers/gpu/drm/etnaviv/etnaviv_perfmon.c dom = meta->domains + pmr->domain; dom 547 drivers/gpu/drm/etnaviv/etnaviv_perfmon.c sig = &dom->signal[pmr->signal]; dom 548 drivers/gpu/drm/etnaviv/etnaviv_perfmon.c val = sig->sample(gpu, dom, sig); dom 38 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c struct nvkm_perfdom *dom; dom 41 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c list_for_each_entry(dom, &pm->domains, head) dom 47 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c nvkm_perfdom_count_perfsig(struct nvkm_perfdom *dom) dom 52 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c if (dom) { dom 53 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c for (i = 0; i < dom->signal_nr; i++) { dom 54 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c if (dom->signal[i].name) dom 64 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c struct nvkm_perfdom *dom; dom 67 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c list_for_each_entry(dom, &pm->domains, head) { dom 69 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c return dom; dom 77 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c struct nvkm_perfdom *dom = *pdom; dom 79 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c if (dom == NULL) { dom 80 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c dom = nvkm_perfdom_find(pm, di); dom 81 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c if (dom == NULL) dom 83 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c *pdom = dom; dom 86 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c if (!dom->signal[si].name) dom 88 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c return &dom->signal[si]; dom 133 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c struct nvkm_perfdom *dom = NULL; dom 142 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c ctr->signal[i], &dom); dom 172 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c struct nvkm_perfdom *dom = NULL; dom 181 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c ctr->signal[i], &dom); dom 208 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c nvkm_perfdom_init(struct nvkm_perfdom *dom, void *data, u32 size) dom 213 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c struct nvkm_object *object = &dom->object; dom 214 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c struct nvkm_pm *pm = dom->perfmon->pm; dom 224 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c if (dom->ctr[i]) { dom 225 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c dom->func->init(pm, dom, dom->ctr[i]); dom 228 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c nvkm_perfsrc_enable(pm, dom->ctr[i]); dom 233 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c dom->func->next(pm, dom); dom 238 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c nvkm_perfdom_sample(struct nvkm_perfdom *dom, void *data, u32 size) dom 243 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c struct nvkm_object *object = &dom->object; dom 244 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c struct nvkm_pm *pm = dom->perfmon->pm; dom 255 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c list_for_each_entry(dom, &pm->domains, head) dom 256 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c dom->func->next(pm, dom); dom 262 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c nvkm_perfdom_read(struct nvkm_perfdom *dom, void *data, u32 size) dom 267 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c struct nvkm_object *object = &dom->object; dom 268 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c struct nvkm_pm *pm = dom->perfmon->pm; dom 278 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c if (dom->ctr[i]) dom 279 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c dom->func->read(pm, dom, dom->ctr[i]); dom 282 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c if (!dom->clk) dom 286 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c if (dom->ctr[i]) dom 287 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c args->v0.ctr[i] = dom->ctr[i]->ctr; dom 288 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c args->v0.clk = dom->clk; dom 295 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c struct nvkm_perfdom *dom = nvkm_perfdom(object); dom 298 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c return nvkm_perfdom_init(dom, data, size); dom 300 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c return nvkm_perfdom_sample(dom, data, size); dom 302 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c return nvkm_perfdom_read(dom, data, size); dom 312 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c struct nvkm_perfdom *dom = nvkm_perfdom(object); dom 313 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c struct nvkm_pm *pm = dom->perfmon->pm; dom 317 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c struct nvkm_perfctr *ctr = dom->ctr[i]; dom 326 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c return dom; dom 330 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c nvkm_perfctr_new(struct nvkm_perfdom *dom, int slot, u8 domain, dom 337 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c if (!dom) dom 349 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c ctr->signal[i] = signal[i] - dom->signal; dom 354 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c list_add_tail(&ctr->head, &dom->list); dom 377 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c struct nvkm_perfdom *dom; dom 416 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c if (!(dom = kzalloc(sizeof(*dom), GFP_KERNEL))) dom 418 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c nvkm_object_ctor(&nvkm_perfdom, oclass, &dom->object); dom 419 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c dom->perfmon = perfmon; dom 420 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c *pobject = &dom->object; dom 422 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c dom->func = sdom->func; dom 423 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c dom->addr = sdom->addr; dom 424 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c dom->mode = args->v0.mode; dom 426 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c dom->ctr[c] = ctr[c]; dom 442 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c struct nvkm_perfdom *dom; dom 459 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c dom = nvkm_perfdom_find(pm, di); dom 460 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c if (dom == NULL) dom 464 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c args->v0.signal_nr = nvkm_perfdom_count_perfsig(dom); dom 465 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c strncpy(args->v0.name, dom->name, sizeof(args->v0.name) - 1); dom 491 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c struct nvkm_perfdom *dom; dom 506 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c dom = nvkm_perfdom_find(pm, args->v0.domain); dom 507 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c if (dom == NULL || si >= (int)dom->signal_nr) dom 511 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c sig = &dom->signal[si]; dom 514 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c "/%s/%02x", dom->name, si); dom 524 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c while (++si < dom->signal_nr) { dom 525 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c if (all || dom->signal[si].name) { dom 544 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c struct nvkm_perfdom *dom = NULL; dom 560 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c sig = nvkm_perfsig_find(pm, args->v0.domain, args->v0.signal, &dom); dom 772 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c struct nvkm_perfdom *dom; dom 782 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c dom = kzalloc(struct_size(dom, signal, sdom->signal_nr), dom 784 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c if (!dom) dom 788 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c snprintf(dom->name, sizeof(dom->name), dom 792 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c snprintf(dom->name, sizeof(dom->name), dom 796 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c list_add_tail(&dom->head, &pm->domains); dom 797 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c INIT_LIST_HEAD(&dom->list); dom 798 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c dom->func = sdom->func; dom 799 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c dom->addr = addr; dom 800 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c dom->signal_nr = sdom->signal_nr; dom 805 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c &dom->signal[ssig->signal]; dom 835 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c struct nvkm_perfdom *dom, *next_dom; dom 838 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c list_for_each_entry_safe(dom, next_dom, &pm->domains, head) { dom 839 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c list_del(&dom->head); dom 840 drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c kfree(dom); dom 128 drivers/gpu/drm/nouveau/nvkm/engine/pm/gf100.c gf100_perfctr_init(struct nvkm_pm *pm, struct nvkm_perfdom *dom, dom 139 drivers/gpu/drm/nouveau/nvkm/engine/pm/gf100.c nvkm_wr32(device, dom->addr + 0x09c, 0x00040002 | (dom->mode << 3)); dom 140 drivers/gpu/drm/nouveau/nvkm/engine/pm/gf100.c nvkm_wr32(device, dom->addr + 0x100, 0x00000000); dom 141 drivers/gpu/drm/nouveau/nvkm/engine/pm/gf100.c nvkm_wr32(device, dom->addr + 0x040 + (ctr->slot * 0x08), src); dom 142 drivers/gpu/drm/nouveau/nvkm/engine/pm/gf100.c nvkm_wr32(device, dom->addr + 0x044 + (ctr->slot * 0x08), log); dom 146 drivers/gpu/drm/nouveau/nvkm/engine/pm/gf100.c gf100_perfctr_read(struct nvkm_pm *pm, struct nvkm_perfdom *dom, dom 152 drivers/gpu/drm/nouveau/nvkm/engine/pm/gf100.c case 0: ctr->ctr = nvkm_rd32(device, dom->addr + 0x08c); break; dom 153 drivers/gpu/drm/nouveau/nvkm/engine/pm/gf100.c case 1: ctr->ctr = nvkm_rd32(device, dom->addr + 0x088); break; dom 154 drivers/gpu/drm/nouveau/nvkm/engine/pm/gf100.c case 2: ctr->ctr = nvkm_rd32(device, dom->addr + 0x080); break; dom 155 drivers/gpu/drm/nouveau/nvkm/engine/pm/gf100.c case 3: ctr->ctr = nvkm_rd32(device, dom->addr + 0x090); break; dom 157 drivers/gpu/drm/nouveau/nvkm/engine/pm/gf100.c dom->clk = nvkm_rd32(device, dom->addr + 0x070); dom 161 drivers/gpu/drm/nouveau/nvkm/engine/pm/gf100.c gf100_perfctr_next(struct nvkm_pm *pm, struct nvkm_perfdom *dom) dom 164 drivers/gpu/drm/nouveau/nvkm/engine/pm/gf100.c nvkm_wr32(device, dom->addr + 0x06c, dom->signal_nr - 0x40 + 0x27); dom 165 drivers/gpu/drm/nouveau/nvkm/engine/pm/gf100.c nvkm_wr32(device, dom->addr + 0x0ec, 0x00000011); dom 27 drivers/gpu/drm/nouveau/nvkm/engine/pm/nv40.c nv40_perfctr_init(struct nvkm_pm *pm, struct nvkm_perfdom *dom, dom 38 drivers/gpu/drm/nouveau/nvkm/engine/pm/nv40.c nvkm_wr32(device, 0x00a7c0 + dom->addr, 0x00000001 | (dom->mode << 4)); dom 39 drivers/gpu/drm/nouveau/nvkm/engine/pm/nv40.c nvkm_wr32(device, 0x00a400 + dom->addr + (ctr->slot * 0x40), src); dom 40 drivers/gpu/drm/nouveau/nvkm/engine/pm/nv40.c nvkm_wr32(device, 0x00a420 + dom->addr + (ctr->slot * 0x40), log); dom 44 drivers/gpu/drm/nouveau/nvkm/engine/pm/nv40.c nv40_perfctr_read(struct nvkm_pm *pm, struct nvkm_perfdom *dom, dom 50 drivers/gpu/drm/nouveau/nvkm/engine/pm/nv40.c case 0: ctr->ctr = nvkm_rd32(device, 0x00a700 + dom->addr); break; dom 51 drivers/gpu/drm/nouveau/nvkm/engine/pm/nv40.c case 1: ctr->ctr = nvkm_rd32(device, 0x00a6c0 + dom->addr); break; dom 52 drivers/gpu/drm/nouveau/nvkm/engine/pm/nv40.c case 2: ctr->ctr = nvkm_rd32(device, 0x00a680 + dom->addr); break; dom 53 drivers/gpu/drm/nouveau/nvkm/engine/pm/nv40.c case 3: ctr->ctr = nvkm_rd32(device, 0x00a740 + dom->addr); break; dom 55 drivers/gpu/drm/nouveau/nvkm/engine/pm/nv40.c dom->clk = nvkm_rd32(device, 0x00a600 + dom->addr); dom 59 drivers/gpu/drm/nouveau/nvkm/engine/pm/nv40.c nv40_perfctr_next(struct nvkm_pm *pm, struct nvkm_perfdom *dom) dom 274 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gf100.c calc_clk(struct gf100_clk *clk, struct nvkm_cstate *cstate, int idx, int dom) dom 277 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gf100.c u32 freq = cstate->domain[dom]; dom 288 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk104.c struct nvkm_cstate *cstate, int idx, int dom) dom 291 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk104.c u32 freq = cstate->domain[dom]; dom 275 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gt215.c int idx, u32 pll, int dom) dom 277 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gt215.c int ret = gt215_pll_info(&clk->base, idx, pll, cstate->domain[dom], dom 278 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gt215.c &clk->eng[dom]); dom 363 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gt215.c prog_pll(struct gt215_clk *clk, int idx, u32 pll, int dom) dom 365 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gt215.c struct gt215_clk_info *info = &clk->eng[dom]; dom 407 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gt215.c prog_clk(struct gt215_clk *clk, int idx, int dom) dom 409 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gt215.c struct gt215_clk_info *info = &clk->eng[dom]; dom 443 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gt215.c prog_core(struct gt215_clk *clk, int dom) dom 445 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gt215.c struct gt215_clk_info *info = &clk->eng[dom]; dom 452 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gt215.c prog_pll(clk, 0x00, 0x004200, dom); dom 51 drivers/input/rmi4/rmi_f01.c char dom[11]; /* YYYY/MM/DD + '\0' */ dom 172 drivers/input/rmi4/rmi_f01.c snprintf(props->dom, sizeof(props->dom), "20%02d/%02d/%02d", dom 281 drivers/input/rmi4/rmi_f01.c return scnprintf(buf, PAGE_SIZE, "%s\n", f01->properties.dom); dom 185 drivers/iommu/amd_iommu.c static struct protection_domain *to_pdomain(struct iommu_domain *dom) dom 187 drivers/iommu/amd_iommu.c return container_of(dom, struct protection_domain, domain); dom 1658 drivers/iommu/amd_iommu.c static int iommu_map_page(struct protection_domain *dom, dom 1678 drivers/iommu/amd_iommu.c pte = alloc_pte(dom, bus_addr, page_size, NULL, gfp, &updated); dom 1710 drivers/iommu/amd_iommu.c spin_lock_irqsave(&dom->lock, flags); dom 1711 drivers/iommu/amd_iommu.c update_domain(dom); dom 1712 drivers/iommu/amd_iommu.c spin_unlock_irqrestore(&dom->lock, flags); dom 1721 drivers/iommu/amd_iommu.c static unsigned long iommu_unmap_page(struct protection_domain *dom, dom 1735 drivers/iommu/amd_iommu.c pte = fetch_pte(dom, bus_addr, &unmap_size); dom 1867 drivers/iommu/amd_iommu.c static void dma_ops_domain_flush_tlb(struct dma_ops_domain *dom) dom 1871 drivers/iommu/amd_iommu.c spin_lock_irqsave(&dom->domain.lock, flags); dom 1872 drivers/iommu/amd_iommu.c domain_flush_tlb(&dom->domain); dom 1873 drivers/iommu/amd_iommu.c domain_flush_complete(&dom->domain); dom 1874 drivers/iommu/amd_iommu.c spin_unlock_irqrestore(&dom->domain.lock, flags); dom 1879 drivers/iommu/amd_iommu.c struct dma_ops_domain *dom; dom 1881 drivers/iommu/amd_iommu.c dom = container_of(iovad, struct dma_ops_domain, iovad); dom 1883 drivers/iommu/amd_iommu.c dma_ops_domain_flush_tlb(dom); dom 1890 drivers/iommu/amd_iommu.c static void dma_ops_domain_free(struct dma_ops_domain *dom) dom 1892 drivers/iommu/amd_iommu.c if (!dom) dom 1895 drivers/iommu/amd_iommu.c put_iova_domain(&dom->iovad); dom 1897 drivers/iommu/amd_iommu.c free_pagetable(&dom->domain); dom 1899 drivers/iommu/amd_iommu.c if (dom->domain.id) dom 1900 drivers/iommu/amd_iommu.c domain_id_free(dom->domain.id); dom 1902 drivers/iommu/amd_iommu.c kfree(dom); dom 2999 drivers/iommu/amd_iommu.c static void amd_iommu_domain_free(struct iommu_domain *dom) dom 3004 drivers/iommu/amd_iommu.c domain = to_pdomain(dom); dom 3011 drivers/iommu/amd_iommu.c if (!dom) dom 3014 drivers/iommu/amd_iommu.c switch (dom->type) { dom 3032 drivers/iommu/amd_iommu.c static void amd_iommu_detach_device(struct iommu_domain *dom, dom 3055 drivers/iommu/amd_iommu.c (dom->type == IOMMU_DOMAIN_UNMANAGED)) dom 3062 drivers/iommu/amd_iommu.c static int amd_iommu_attach_device(struct iommu_domain *dom, dom 3065 drivers/iommu/amd_iommu.c struct protection_domain *domain = to_pdomain(dom); dom 3086 drivers/iommu/amd_iommu.c if (dom->type == IOMMU_DOMAIN_UNMANAGED) dom 3098 drivers/iommu/amd_iommu.c static int amd_iommu_map(struct iommu_domain *dom, unsigned long iova, dom 3101 drivers/iommu/amd_iommu.c struct protection_domain *domain = to_pdomain(dom); dom 3122 drivers/iommu/amd_iommu.c static size_t amd_iommu_unmap(struct iommu_domain *dom, unsigned long iova, dom 3126 drivers/iommu/amd_iommu.c struct protection_domain *domain = to_pdomain(dom); dom 3139 drivers/iommu/amd_iommu.c static phys_addr_t amd_iommu_iova_to_phys(struct iommu_domain *dom, dom 3142 drivers/iommu/amd_iommu.c struct protection_domain *domain = to_pdomain(dom); dom 3259 drivers/iommu/amd_iommu.c struct protection_domain *dom = to_pdomain(domain); dom 3262 drivers/iommu/amd_iommu.c spin_lock_irqsave(&dom->lock, flags); dom 3263 drivers/iommu/amd_iommu.c domain_flush_tlb_pde(dom); dom 3264 drivers/iommu/amd_iommu.c domain_flush_complete(dom); dom 3265 drivers/iommu/amd_iommu.c spin_unlock_irqrestore(&dom->lock, flags); dom 3318 drivers/iommu/amd_iommu.c void amd_iommu_domain_direct_map(struct iommu_domain *dom) dom 3320 drivers/iommu/amd_iommu.c struct protection_domain *domain = to_pdomain(dom); dom 3338 drivers/iommu/amd_iommu.c int amd_iommu_domain_enable_v2(struct iommu_domain *dom, int pasids) dom 3340 drivers/iommu/amd_iommu.c struct protection_domain *domain = to_pdomain(dom); dom 3451 drivers/iommu/amd_iommu.c int amd_iommu_flush_page(struct iommu_domain *dom, int pasid, dom 3454 drivers/iommu/amd_iommu.c struct protection_domain *domain = to_pdomain(dom); dom 3472 drivers/iommu/amd_iommu.c int amd_iommu_flush_tlb(struct iommu_domain *dom, int pasid) dom 3474 drivers/iommu/amd_iommu.c struct protection_domain *domain = to_pdomain(dom); dom 3551 drivers/iommu/amd_iommu.c int amd_iommu_domain_set_gcr3(struct iommu_domain *dom, int pasid, dom 3554 drivers/iommu/amd_iommu.c struct protection_domain *domain = to_pdomain(dom); dom 3566 drivers/iommu/amd_iommu.c int amd_iommu_domain_clear_gcr3(struct iommu_domain *dom, int pasid) dom 3568 drivers/iommu/amd_iommu.c struct protection_domain *domain = to_pdomain(dom); dom 44 drivers/iommu/amd_iommu_proto.h extern void amd_iommu_domain_direct_map(struct iommu_domain *dom); dom 45 drivers/iommu/amd_iommu_proto.h extern int amd_iommu_domain_enable_v2(struct iommu_domain *dom, int pasids); dom 46 drivers/iommu/amd_iommu_proto.h extern int amd_iommu_flush_page(struct iommu_domain *dom, int pasid, dom 48 drivers/iommu/amd_iommu_proto.h extern int amd_iommu_flush_tlb(struct iommu_domain *dom, int pasid); dom 49 drivers/iommu/amd_iommu_proto.h extern int amd_iommu_domain_set_gcr3(struct iommu_domain *dom, int pasid, dom 51 drivers/iommu/amd_iommu_proto.h extern int amd_iommu_domain_clear_gcr3(struct iommu_domain *dom, int pasid); dom 692 drivers/iommu/arm-smmu-v3.c static struct arm_smmu_domain *to_smmu_domain(struct iommu_domain *dom) dom 694 drivers/iommu/arm-smmu-v3.c return container_of(dom, struct arm_smmu_domain, domain); dom 128 drivers/iommu/arm-smmu.c static struct arm_smmu_domain *to_smmu_domain(struct iommu_domain *dom) dom 130 drivers/iommu/arm-smmu.c return container_of(dom, struct arm_smmu_domain, domain); dom 281 drivers/iommu/exynos-iommu.c static struct exynos_iommu_domain *to_exynos_domain(struct iommu_domain *dom) dom 283 drivers/iommu/exynos-iommu.c return container_of(dom, struct exynos_iommu_domain, domain); dom 26 drivers/iommu/fsl_pamu_domain.c static struct fsl_dma_domain *to_fsl_dma_domain(struct iommu_domain *dom) dom 28 drivers/iommu/fsl_pamu_domain.c return container_of(dom, struct fsl_dma_domain, iommu_domain); dom 432 drivers/iommu/intel-iommu.c static struct dmar_domain *to_dmar_domain(struct iommu_domain *dom) dom 434 drivers/iommu/intel-iommu.c return container_of(dom, struct dmar_domain, domain); dom 1378 drivers/iommu/iommu.c struct iommu_domain *dom; dom 1380 drivers/iommu/iommu.c dom = __iommu_domain_alloc(dev->bus, iommu_def_domain_type); dom 1381 drivers/iommu/iommu.c if (!dom && iommu_def_domain_type != IOMMU_DOMAIN_DMA) { dom 1382 drivers/iommu/iommu.c dom = __iommu_domain_alloc(dev->bus, IOMMU_DOMAIN_DMA); dom 1383 drivers/iommu/iommu.c if (dom) { dom 1390 drivers/iommu/iommu.c group->default_domain = dom; dom 1392 drivers/iommu/iommu.c group->domain = dom; dom 1394 drivers/iommu/iommu.c if (dom && !iommu_dma_strict) { dom 1396 drivers/iommu/iommu.c iommu_domain_set_attr(dom, dom 82 drivers/iommu/ipmmu-vmsa.c static struct ipmmu_vmsa_domain *to_vmsa_domain(struct iommu_domain *dom) dom 84 drivers/iommu/ipmmu-vmsa.c return container_of(dom, struct ipmmu_vmsa_domain, io_domain); dom 50 drivers/iommu/msm_iommu.c static struct msm_priv *to_msm_priv(struct iommu_domain *dom) dom 52 drivers/iommu/msm_iommu.c return container_of(dom, struct msm_priv, domain); dom 159 drivers/iommu/mtk_iommu.c static struct mtk_iommu_domain *to_mtk_domain(struct iommu_domain *dom) dom 161 drivers/iommu/mtk_iommu.c return container_of(dom, struct mtk_iommu_domain, domain); dom 265 drivers/iommu/mtk_iommu.c struct mtk_iommu_domain *dom = data->m4u_dom; dom 288 drivers/iommu/mtk_iommu.c if (report_iommu_fault(&dom->domain, data->dev, fault_iova, dom 330 drivers/iommu/mtk_iommu.c static int mtk_iommu_domain_finalise(struct mtk_iommu_domain *dom) dom 334 drivers/iommu/mtk_iommu.c spin_lock_init(&dom->pgtlock); dom 336 drivers/iommu/mtk_iommu.c dom->cfg = (struct io_pgtable_cfg) { dom 348 drivers/iommu/mtk_iommu.c dom->iop = alloc_io_pgtable_ops(ARM_V7S, &dom->cfg, data); dom 349 drivers/iommu/mtk_iommu.c if (!dom->iop) { dom 355 drivers/iommu/mtk_iommu.c dom->domain.pgsize_bitmap = dom->cfg.pgsize_bitmap; dom 361 drivers/iommu/mtk_iommu.c struct mtk_iommu_domain *dom; dom 366 drivers/iommu/mtk_iommu.c dom = kzalloc(sizeof(*dom), GFP_KERNEL); dom 367 drivers/iommu/mtk_iommu.c if (!dom) dom 370 drivers/iommu/mtk_iommu.c if (iommu_get_dma_cookie(&dom->domain)) dom 373 drivers/iommu/mtk_iommu.c if (mtk_iommu_domain_finalise(dom)) dom 376 drivers/iommu/mtk_iommu.c dom->domain.geometry.aperture_start = 0; dom 377 drivers/iommu/mtk_iommu.c dom->domain.geometry.aperture_end = DMA_BIT_MASK(32); dom 378 drivers/iommu/mtk_iommu.c dom->domain.geometry.force_aperture = true; dom 380 drivers/iommu/mtk_iommu.c return &dom->domain; dom 383 drivers/iommu/mtk_iommu.c iommu_put_dma_cookie(&dom->domain); dom 385 drivers/iommu/mtk_iommu.c kfree(dom); dom 391 drivers/iommu/mtk_iommu.c struct mtk_iommu_domain *dom = to_mtk_domain(domain); dom 393 drivers/iommu/mtk_iommu.c free_io_pgtable_ops(dom->iop); dom 401 drivers/iommu/mtk_iommu.c struct mtk_iommu_domain *dom = to_mtk_domain(domain); dom 409 drivers/iommu/mtk_iommu.c data->m4u_dom = dom; dom 410 drivers/iommu/mtk_iommu.c writel(dom->cfg.arm_v7s_cfg.ttbr[0] & MMU_PT_ADDR_MASK, dom 432 drivers/iommu/mtk_iommu.c struct mtk_iommu_domain *dom = to_mtk_domain(domain); dom 441 drivers/iommu/mtk_iommu.c spin_lock_irqsave(&dom->pgtlock, flags); dom 442 drivers/iommu/mtk_iommu.c ret = dom->iop->map(dom->iop, iova, paddr, size, prot); dom 443 drivers/iommu/mtk_iommu.c spin_unlock_irqrestore(&dom->pgtlock, flags); dom 452 drivers/iommu/mtk_iommu.c struct mtk_iommu_domain *dom = to_mtk_domain(domain); dom 456 drivers/iommu/mtk_iommu.c spin_lock_irqsave(&dom->pgtlock, flags); dom 457 drivers/iommu/mtk_iommu.c unmapsz = dom->iop->unmap(dom->iop, iova, size, gather); dom 458 drivers/iommu/mtk_iommu.c spin_unlock_irqrestore(&dom->pgtlock, flags); dom 482 drivers/iommu/mtk_iommu.c struct mtk_iommu_domain *dom = to_mtk_domain(domain); dom 487 drivers/iommu/mtk_iommu.c spin_lock_irqsave(&dom->pgtlock, flags); dom 488 drivers/iommu/mtk_iommu.c pa = dom->iop->iova_to_phys(dom->iop, iova); dom 489 drivers/iommu/mtk_iommu.c spin_unlock_irqrestore(&dom->pgtlock, flags); dom 99 drivers/iommu/mtk_iommu_v1.c static struct mtk_iommu_domain *to_mtk_domain(struct iommu_domain *dom) dom 101 drivers/iommu/mtk_iommu_v1.c return container_of(dom, struct mtk_iommu_domain, domain); dom 163 drivers/iommu/mtk_iommu_v1.c struct mtk_iommu_domain *dom = data->m4u_dom; dom 181 drivers/iommu/mtk_iommu_v1.c if (report_iommu_fault(&dom->domain, data->dev, fault_iova, dom 223 drivers/iommu/mtk_iommu_v1.c struct mtk_iommu_domain *dom = data->m4u_dom; dom 225 drivers/iommu/mtk_iommu_v1.c spin_lock_init(&dom->pgtlock); dom 227 drivers/iommu/mtk_iommu_v1.c dom->pgt_va = dma_alloc_coherent(data->dev, M2701_IOMMU_PGT_SIZE, dom 228 drivers/iommu/mtk_iommu_v1.c &dom->pgt_pa, GFP_KERNEL); dom 229 drivers/iommu/mtk_iommu_v1.c if (!dom->pgt_va) dom 232 drivers/iommu/mtk_iommu_v1.c writel(dom->pgt_pa, data->base + REG_MMU_PT_BASE_ADDR); dom 234 drivers/iommu/mtk_iommu_v1.c dom->data = data; dom 241 drivers/iommu/mtk_iommu_v1.c struct mtk_iommu_domain *dom; dom 246 drivers/iommu/mtk_iommu_v1.c dom = kzalloc(sizeof(*dom), GFP_KERNEL); dom 247 drivers/iommu/mtk_iommu_v1.c if (!dom) dom 250 drivers/iommu/mtk_iommu_v1.c return &dom->domain; dom 255 drivers/iommu/mtk_iommu_v1.c struct mtk_iommu_domain *dom = to_mtk_domain(domain); dom 256 drivers/iommu/mtk_iommu_v1.c struct mtk_iommu_data *data = dom->data; dom 259 drivers/iommu/mtk_iommu_v1.c dom->pgt_va, dom->pgt_pa); dom 266 drivers/iommu/mtk_iommu_v1.c struct mtk_iommu_domain *dom = to_mtk_domain(domain); dom 274 drivers/iommu/mtk_iommu_v1.c data->m4u_dom = dom; dom 300 drivers/iommu/mtk_iommu_v1.c struct mtk_iommu_domain *dom = to_mtk_domain(domain); dom 304 drivers/iommu/mtk_iommu_v1.c u32 *pgt_base_iova = dom->pgt_va + (iova >> MT2701_IOMMU_PAGE_SHIFT); dom 308 drivers/iommu/mtk_iommu_v1.c spin_lock_irqsave(&dom->pgtlock, flags); dom 319 drivers/iommu/mtk_iommu_v1.c spin_unlock_irqrestore(&dom->pgtlock, flags); dom 321 drivers/iommu/mtk_iommu_v1.c mtk_iommu_tlb_flush_range(dom->data, iova, size); dom 330 drivers/iommu/mtk_iommu_v1.c struct mtk_iommu_domain *dom = to_mtk_domain(domain); dom 332 drivers/iommu/mtk_iommu_v1.c u32 *pgt_base_iova = dom->pgt_va + (iova >> MT2701_IOMMU_PAGE_SHIFT); dom 335 drivers/iommu/mtk_iommu_v1.c spin_lock_irqsave(&dom->pgtlock, flags); dom 337 drivers/iommu/mtk_iommu_v1.c spin_unlock_irqrestore(&dom->pgtlock, flags); dom 339 drivers/iommu/mtk_iommu_v1.c mtk_iommu_tlb_flush_range(dom->data, iova, size); dom 347 drivers/iommu/mtk_iommu_v1.c struct mtk_iommu_domain *dom = to_mtk_domain(domain); dom 351 drivers/iommu/mtk_iommu_v1.c spin_lock_irqsave(&dom->pgtlock, flags); dom 352 drivers/iommu/mtk_iommu_v1.c pa = *(dom->pgt_va + (iova >> MT2701_IOMMU_PAGE_SHIFT)); dom 354 drivers/iommu/mtk_iommu_v1.c spin_unlock_irqrestore(&dom->pgtlock, flags); dom 71 drivers/iommu/omap-iommu.c static struct omap_iommu_domain *to_omap_domain(struct iommu_domain *dom) dom 73 drivers/iommu/omap-iommu.c return container_of(dom, struct omap_iommu_domain, domain); dom 70 drivers/iommu/qcom_iommu.c static struct qcom_iommu_domain *to_qcom_iommu_domain(struct iommu_domain *dom) dom 72 drivers/iommu/qcom_iommu.c return container_of(dom, struct qcom_iommu_domain, domain); dom 120 drivers/iommu/rockchip-iommu.c static inline void rk_table_flush(struct rk_iommu_domain *dom, dma_addr_t dma, dom 128 drivers/iommu/rockchip-iommu.c static struct rk_iommu_domain *to_rk_domain(struct iommu_domain *dom) dom 130 drivers/iommu/rockchip-iommu.c return container_of(dom, struct rk_iommu_domain, domain); dom 37 drivers/iommu/s390-iommu.c static struct s390_domain *to_s390_domain(struct iommu_domain *dom) dom 39 drivers/iommu/s390-iommu.c return container_of(dom, struct s390_domain, domain); dom 60 drivers/iommu/tegra-smmu.c static struct tegra_smmu_as *to_smmu_as(struct iommu_domain *dom) dom 62 drivers/iommu/tegra-smmu.c return container_of(dom, struct tegra_smmu_as, domain); dom 54 drivers/irqchip/irq-aspeed-vic.c struct irq_domain *dom; dom 103 drivers/irqchip/irq-aspeed-vic.c handle_domain_irq(vic->dom, irq, regs); dom 214 drivers/irqchip/irq-aspeed-vic.c vic->dom = irq_domain_add_simple(node, NUM_IRQS, 0, dom 144 drivers/irqchip/irq-sni-exiu.c static int exiu_domain_alloc(struct irq_domain *dom, unsigned int virq, dom 149 drivers/irqchip/irq-sni-exiu.c struct exiu_irq_data *info = dom->host_data; dom 153 drivers/irqchip/irq-sni-exiu.c if (is_of_node(dom->parent->fwnode)) { dom 165 drivers/irqchip/irq-sni-exiu.c irq_domain_set_hwirq_and_chip(dom, virq, hwirq, &exiu_irq_chip, info); dom 167 drivers/irqchip/irq-sni-exiu.c parent_fwspec.fwnode = dom->parent->fwnode; dom 168 drivers/irqchip/irq-sni-exiu.c return irq_domain_alloc_irqs_parent(dom, virq, nr_irqs, &parent_fwspec); dom 55 drivers/irqchip/irq-tango.c static void tangox_dispatch_irqs(struct irq_domain *dom, unsigned int status, dom 63 drivers/irqchip/irq-tango.c virq = irq_find_mapping(dom, base + hwirq); dom 72 drivers/irqchip/irq-tango.c struct irq_domain *dom = irq_desc_get_handler_data(desc); dom 74 drivers/irqchip/irq-tango.c struct tangox_irq_chip *chip = dom->host_data; dom 82 drivers/irqchip/irq-tango.c tangox_dispatch_irqs(dom, status_lo, 0); dom 83 drivers/irqchip/irq-tango.c tangox_dispatch_irqs(dom, status_hi, 32); dom 161 drivers/irqchip/irq-tango.c static void __init tangox_irq_domain_init(struct irq_domain *dom) dom 167 drivers/irqchip/irq-tango.c gc = irq_get_domain_generic_chip(dom, i * 32); dom 176 drivers/irqchip/irq-tango.c struct irq_domain *dom; dom 193 drivers/irqchip/irq-tango.c dom = irq_domain_add_linear(node, 64, &irq_generic_chip_ops, chip); dom 194 drivers/irqchip/irq-tango.c if (!dom) dom 197 drivers/irqchip/irq-tango.c err = irq_alloc_domain_generic_chips(dom, 32, 2, node->name, dom 202 drivers/irqchip/irq-tango.c tangox_irq_domain_init(dom); dom 204 drivers/irqchip/irq-tango.c irq_set_chained_handler_and_data(irq, tangox_irq_handler, dom); dom 349 drivers/lightnvm/core.c if ((tt->flags & NVM_TGT_F_HOST_L2P) != (dev->geo.dom & NVM_RSP_L2P)) { dom 216 drivers/media/platform/qcom/venus/hfi_parser.c u32 dom; dom 222 drivers/media/platform/qcom/venus/hfi_parser.c dom = inst->session_type; dom 226 drivers/media/platform/qcom/venus/hfi_parser.c if (cap->codec & codecs && cap->domain == dom) dom 1122 drivers/net/ethernet/emulex/benet/be_cmds.c int be_cmd_pmac_del(struct be_adapter *adapter, u32 if_id, int pmac_id, u32 dom) dom 1144 drivers/net/ethernet/emulex/benet/be_cmds.c req->hdr.domain = dom; dom 1691 drivers/net/ethernet/emulex/benet/be_cmds.c u8 *link_status, u32 dom) dom 1717 drivers/net/ethernet/emulex/benet/be_cmds.c req->hdr.domain = dom; dom 3861 drivers/net/ethernet/emulex/benet/be_cmds.c int be_cmd_set_mac(struct be_adapter *adapter, u8 *mac, int if_id, u32 dom) dom 3869 drivers/net/ethernet/emulex/benet/be_cmds.c &pmac_id, if_id, dom); dom 3872 drivers/net/ethernet/emulex/benet/be_cmds.c be_cmd_pmac_del(adapter, if_id, pmac_id, dom); dom 3874 drivers/net/ethernet/emulex/benet/be_cmds.c return be_cmd_set_mac_list(adapter, mac, mac ? 1 : 0, dom); dom 2408 drivers/net/ethernet/emulex/benet/be_cmds.h u8 *link_status, u32 dom); dom 2474 drivers/net/ethernet/emulex/benet/be_cmds.h int be_cmd_set_mac(struct be_adapter *adapter, u8 *mac, int if_id, u32 dom); dom 156 drivers/nvme/host/lightnvm.c __le32 dom; dom 348 drivers/nvme/host/lightnvm.c geo->dom = le32_to_cpu(id->dom); dom 1044 drivers/nvme/host/lightnvm.c return scnprintf(page, PAGE_SIZE, "%u\n", geo->dom); dom 2830 drivers/pci/controller/pci-hyperv.c static u16 hv_get_dom_num(u16 dom) dom 2834 drivers/pci/controller/pci-hyperv.c if (test_and_set_bit(dom, hvpci_dom_map) == 0) dom 2835 drivers/pci/controller/pci-hyperv.c return dom; dom 2849 drivers/pci/controller/pci-hyperv.c static void hv_put_dom_num(u16 dom) dom 2851 drivers/pci/controller/pci-hyperv.c clear_bit(dom, hvpci_dom_map); dom 2865 drivers/pci/controller/pci-hyperv.c u16 dom_req, dom; dom 2894 drivers/pci/controller/pci-hyperv.c dom = hv_get_dom_num(dom_req); dom 2896 drivers/pci/controller/pci-hyperv.c if (dom == HVPCI_DOM_INVALID) { dom 2903 drivers/pci/controller/pci-hyperv.c if (dom != dom_req) dom 2906 drivers/pci/controller/pci-hyperv.c dom_req, dom); dom 2908 drivers/pci/controller/pci-hyperv.c hbus->sysdata.domain = dom; dom 22 drivers/pci/controller/pcie-tango.c struct irq_domain *dom; dom 38 drivers/pci/controller/pcie-tango.c virq = irq_find_mapping(pcie->dom, base + idx); dom 135 drivers/pci/controller/pcie-tango.c static int tango_irq_domain_alloc(struct irq_domain *dom, unsigned int virq, dom 138 drivers/pci/controller/pcie-tango.c struct tango_pcie *pcie = dom->host_data; dom 150 drivers/pci/controller/pcie-tango.c irq_domain_set_info(dom, virq, pos, &tango_chip, dom 156 drivers/pci/controller/pcie-tango.c static void tango_irq_domain_free(struct irq_domain *dom, unsigned int virq, dom 160 drivers/pci/controller/pcie-tango.c struct irq_data *d = irq_domain_get_irq_data(dom, virq); dom 294 drivers/pci/controller/pcie-tango.c pcie->dom = irq_dom; dom 1571 drivers/pci/msi.c struct irq_domain *dom; dom 1575 drivers/pci/msi.c dom = of_msi_map_get_device_domain(&pdev->dev, rid); dom 1576 drivers/pci/msi.c if (!dom) dom 1577 drivers/pci/msi.c dom = iort_get_device_domain(&pdev->dev, rid); dom 1578 drivers/pci/msi.c return dom; dom 1391 drivers/s390/crypto/ap_bus.c int rc, dom, depth, type, comp_type, borked; dom 1417 drivers/s390/crypto/ap_bus.c for (dom = 0; dom < AP_DOMAINS; dom++) { dom 1418 drivers/s390/crypto/ap_bus.c qid = AP_MKQID(id, dom); dom 1423 drivers/s390/crypto/ap_bus.c if (dom >= AP_DOMAINS) { dom 1443 drivers/s390/crypto/ap_bus.c if (dom >= AP_DOMAINS) dom 1454 drivers/s390/crypto/ap_bus.c for (dom = 0; dom < AP_DOMAINS; dom++) { dom 1455 drivers/s390/crypto/ap_bus.c qid = AP_MKQID(id, dom); dom 1460 drivers/s390/crypto/ap_bus.c if (!ap_test_config_usage_domain(dom)) { dom 1484 drivers/s390/crypto/ap_bus.c id, dom); dom 1521 drivers/s390/crypto/ap_bus.c dev_set_name(&aq->ap_dev.device, "%02x.%04x", id, dom); dom 393 drivers/s390/crypto/pkey_api.c int i, card, dom, rc; dom 421 drivers/s390/crypto/pkey_api.c dom = apqns[i].domain; dom 423 drivers/s390/crypto/pkey_api.c rc = cca_genseckey(card, dom, ksize, keybuf); dom 426 drivers/s390/crypto/pkey_api.c rc = cca_gencipherkey(card, dom, ksize, kflags, dom 440 drivers/s390/crypto/pkey_api.c int i, card, dom, rc; dom 468 drivers/s390/crypto/pkey_api.c dom = apqns[i].domain; dom 470 drivers/s390/crypto/pkey_api.c rc = cca_clr2seckey(card, dom, ksize, dom 474 drivers/s390/crypto/pkey_api.c rc = cca_clr2cipherkey(card, dom, ksize, kflags, dom 571 drivers/s390/crypto/pkey_api.c int i, card, dom, rc; dom 613 drivers/s390/crypto/pkey_api.c dom = apqns[i].domain; dom 615 drivers/s390/crypto/pkey_api.c rc = cca_sec2protkey(card, dom, key, pkey->protkey, dom 618 drivers/s390/crypto/pkey_api.c rc = cca_cipher2protkey(card, dom, key, pkey->protkey, dom 1063 drivers/s390/crypto/zcrypt_ccamisc.c int cca_clr2cipherkey(u16 card, u16 dom, u32 keybitsize, u32 keygenflags, dom 1098 drivers/s390/crypto/zcrypt_ccamisc.c rc = _ip_cprb_helper(card, dom, "AES ", "FIRST ", "MIN3PART", dom 1106 drivers/s390/crypto/zcrypt_ccamisc.c rc = _ip_cprb_helper(card, dom, "AES ", "ADD-PART", NULL, dom 1114 drivers/s390/crypto/zcrypt_ccamisc.c rc = _ip_cprb_helper(card, dom, "AES ", "ADD-PART", NULL, dom 1122 drivers/s390/crypto/zcrypt_ccamisc.c rc = _ip_cprb_helper(card, dom, "AES ", "COMPLETE", NULL, dom 1539 drivers/s390/crypto/zcrypt_ccamisc.c int cca_get_info(u16 card, u16 dom, struct cca_info *ci, int verify) dom 1543 drivers/s390/crypto/zcrypt_ccamisc.c rc = cca_info_cache_fetch(card, dom, ci); dom 1545 drivers/s390/crypto/zcrypt_ccamisc.c rc = fetch_cca_info(card, dom, ci); dom 1547 drivers/s390/crypto/zcrypt_ccamisc.c cca_info_cache_update(card, dom, ci); dom 1562 drivers/s390/crypto/zcrypt_ccamisc.c u16 card, dom; dom 1581 drivers/s390/crypto/zcrypt_ccamisc.c dom = AP_QID_QUEUE(device_status[i].qid); dom 1585 drivers/s390/crypto/zcrypt_ccamisc.c if (cca_info_cache_fetch(card, dom, &ci) == 0 && dom 1592 drivers/s390/crypto/zcrypt_ccamisc.c if (fetch_cca_info(card, dom, &ci) == 0) { dom 1593 drivers/s390/crypto/zcrypt_ccamisc.c cca_info_cache_update(card, dom, &ci); dom 1603 drivers/s390/crypto/zcrypt_ccamisc.c cca_info_cache_scrub(card, dom); dom 1613 drivers/s390/crypto/zcrypt_ccamisc.c dom = AP_QID_QUEUE(device_status[i].qid); dom 1615 drivers/s390/crypto/zcrypt_ccamisc.c if (fetch_cca_info(card, dom, &ci) == 0) { dom 1616 drivers/s390/crypto/zcrypt_ccamisc.c cca_info_cache_update(card, dom, &ci); dom 1631 drivers/s390/crypto/zcrypt_ccamisc.c dom = AP_QID_QUEUE(device_status[oi].qid); dom 1638 drivers/s390/crypto/zcrypt_ccamisc.c *pdomain = dom; dom 1680 drivers/s390/crypto/zcrypt_ccamisc.c int i, n, card, dom, curmatch, oldmatch, rc = 0; dom 1700 drivers/s390/crypto/zcrypt_ccamisc.c dom = AP_QID_QUEUE(device_status[i].qid); dom 1711 drivers/s390/crypto/zcrypt_ccamisc.c if (domain != 0xFFFF && dom != domain) dom 1714 drivers/s390/crypto/zcrypt_ccamisc.c if (cca_get_info(card, dom, &ci, verify)) dom 1736 drivers/s390/crypto/zcrypt_ccamisc.c (*apqns)[n] = (((u16)card) << 16) | ((u16) dom); dom 213 drivers/s390/crypto/zcrypt_ccamisc.h int cca_get_info(u16 card, u16 dom, struct cca_info *ci, int verify); dom 394 drivers/s390/crypto/zcrypt_msgtype6.c unsigned short **dom) dom 479 drivers/s390/crypto/zcrypt_msgtype6.c *dom = (unsigned short *)&msg->cprbx.domain; dom 1104 drivers/s390/crypto/zcrypt_msgtype6.c unsigned int *func_code, unsigned short **dom) dom 1119 drivers/s390/crypto/zcrypt_msgtype6.c return XCRB_msg_to_type6CPRB_msgX(ap_msg, xcRB, func_code, dom); dom 324 drivers/soc/amlogic/meson-ee-pwrc.c struct meson_ee_pwrc_domain *dom) dom 328 drivers/soc/amlogic/meson-ee-pwrc.c dom->pwrc = pwrc; dom 329 drivers/soc/amlogic/meson-ee-pwrc.c dom->num_rstc = dom->desc.reset_names_count; dom 330 drivers/soc/amlogic/meson-ee-pwrc.c dom->num_clks = dom->desc.clk_names_count; dom 332 drivers/soc/amlogic/meson-ee-pwrc.c if (dom->num_rstc) { dom 335 drivers/soc/amlogic/meson-ee-pwrc.c if (count != dom->num_rstc) dom 337 drivers/soc/amlogic/meson-ee-pwrc.c count, dom->desc.name); dom 339 drivers/soc/amlogic/meson-ee-pwrc.c dom->rstc = devm_reset_control_array_get(&pdev->dev, false, dom 341 drivers/soc/amlogic/meson-ee-pwrc.c if (IS_ERR(dom->rstc)) dom 342 drivers/soc/amlogic/meson-ee-pwrc.c return PTR_ERR(dom->rstc); dom 345 drivers/soc/amlogic/meson-ee-pwrc.c if (dom->num_clks) { dom 346 drivers/soc/amlogic/meson-ee-pwrc.c int ret = devm_clk_bulk_get_all(&pdev->dev, &dom->clks); dom 350 drivers/soc/amlogic/meson-ee-pwrc.c if (dom->num_clks != ret) { dom 352 drivers/soc/amlogic/meson-ee-pwrc.c ret, dom->desc.name); dom 353 drivers/soc/amlogic/meson-ee-pwrc.c dom->num_clks = ret; dom 357 drivers/soc/amlogic/meson-ee-pwrc.c dom->base.name = dom->desc.name; dom 358 drivers/soc/amlogic/meson-ee-pwrc.c dom->base.power_on = meson_ee_pwrc_on; dom 359 drivers/soc/amlogic/meson-ee-pwrc.c dom->base.power_off = meson_ee_pwrc_off; dom 372 drivers/soc/amlogic/meson-ee-pwrc.c if (dom->num_clks && dom->desc.get_power && !dom->desc.get_power(dom)) { dom 373 drivers/soc/amlogic/meson-ee-pwrc.c ret = clk_bulk_prepare_enable(dom->num_clks, dom->clks); dom 377 drivers/soc/amlogic/meson-ee-pwrc.c ret = pm_genpd_init(&dom->base, &pm_domain_always_on_gov, dom 382 drivers/soc/amlogic/meson-ee-pwrc.c ret = pm_genpd_init(&dom->base, NULL, dom 383 drivers/soc/amlogic/meson-ee-pwrc.c (dom->desc.get_power ? dom 384 drivers/soc/amlogic/meson-ee-pwrc.c dom->desc.get_power(dom) : true)); dom 441 drivers/soc/amlogic/meson-ee-pwrc.c struct meson_ee_pwrc_domain *dom = &pwrc->domains[i]; dom 443 drivers/soc/amlogic/meson-ee-pwrc.c memcpy(&dom->desc, &match->domains[i], sizeof(dom->desc)); dom 445 drivers/soc/amlogic/meson-ee-pwrc.c ret = meson_ee_pwrc_init_domain(pdev, pwrc, dom); dom 449 drivers/soc/amlogic/meson-ee-pwrc.c pwrc->xlate.domains[i] = &dom->base; dom 461 drivers/soc/amlogic/meson-ee-pwrc.c struct meson_ee_pwrc_domain *dom = &pwrc->domains[i]; dom 463 drivers/soc/amlogic/meson-ee-pwrc.c if (dom->desc.get_power && !dom->desc.get_power(dom)) dom 464 drivers/soc/amlogic/meson-ee-pwrc.c meson_ee_pwrc_off(&dom->base); dom 493 drivers/soc/bcm/bcm2835-power.c struct bcm2835_power_domain *dom = &power->domains[pd_xlate_index]; dom 495 drivers/soc/bcm/bcm2835-power.c dom->clk = devm_clk_get(dev->parent, name); dom 496 drivers/soc/bcm/bcm2835-power.c if (IS_ERR(dom->clk)) { dom 497 drivers/soc/bcm/bcm2835-power.c int ret = PTR_ERR(dom->clk); dom 505 drivers/soc/bcm/bcm2835-power.c dom->clk = NULL; dom 508 drivers/soc/bcm/bcm2835-power.c dom->base.name = name; dom 509 drivers/soc/bcm/bcm2835-power.c dom->base.power_on = bcm2835_power_pd_power_on; dom 510 drivers/soc/bcm/bcm2835-power.c dom->base.power_off = bcm2835_power_pd_power_off; dom 512 drivers/soc/bcm/bcm2835-power.c dom->domain = pd_xlate_index; dom 513 drivers/soc/bcm/bcm2835-power.c dom->power = power; dom 516 drivers/soc/bcm/bcm2835-power.c pm_genpd_init(&dom->base, NULL, true); dom 518 drivers/soc/bcm/bcm2835-power.c power->pd_xlate.domains[pd_xlate_index] = &dom->base; dom 672 drivers/soc/bcm/bcm2835-power.c struct generic_pm_domain *dom = &power->domains[i].base; dom 674 drivers/soc/bcm/bcm2835-power.c if (dom->name) dom 675 drivers/soc/bcm/bcm2835-power.c pm_genpd_remove(dom); dom 83 drivers/soc/bcm/raspberrypi-power.c struct rpi_power_domain *dom = &rpi_domains->domains[xlate_index]; dom 85 drivers/soc/bcm/raspberrypi-power.c dom->fw = rpi_domains->fw; dom 87 drivers/soc/bcm/raspberrypi-power.c dom->base.name = name; dom 88 drivers/soc/bcm/raspberrypi-power.c dom->base.power_on = rpi_domain_on; dom 89 drivers/soc/bcm/raspberrypi-power.c dom->base.power_off = rpi_domain_off; dom 99 drivers/soc/bcm/raspberrypi-power.c pm_genpd_init(&dom->base, NULL, true); dom 101 drivers/soc/bcm/raspberrypi-power.c rpi_domains->xlate.domains[xlate_index] = &dom->base; dom 107 drivers/soc/bcm/raspberrypi-power.c struct rpi_power_domain *dom = &rpi_domains->domains[xlate_index]; dom 113 drivers/soc/bcm/raspberrypi-power.c dom->domain = xlate_index + 1; dom 122 drivers/soc/bcm/raspberrypi-power.c struct rpi_power_domain *dom = &rpi_domains->domains[xlate_index]; dom 124 drivers/soc/bcm/raspberrypi-power.c dom->old_interface = true; dom 125 drivers/soc/bcm/raspberrypi-power.c dom->domain = domain; dom 128 drivers/soc/dove/pmu.c #define to_pmu_domain(dom) container_of(dom, struct pmu_domain, base) dom 934 drivers/xen/events/events_base.c status.dom = DOMID_SELF; dom 530 drivers/xen/evtchn.c alloc_unbound.dom = DOMID_SELF; dom 707 drivers/xen/grant-table.c query.dom = DOMID_SELF; dom 1050 drivers/xen/grant-table.c foreign->domid = map_ops[i].dom; dom 1200 drivers/xen/grant-table.c getframes.dom = DOMID_SELF; dom 1272 drivers/xen/grant-table.c setup.dom = DOMID_SELF; dom 271 drivers/xen/privcmd.c if (data->domid != DOMID_INVALID && data->domid != mmapcmd.dom) dom 298 drivers/xen/privcmd.c state.domain = mmapcmd.dom; dom 478 drivers/xen/privcmd.c if (data->domid != DOMID_INVALID && data->domid != m.dom) dom 546 drivers/xen/privcmd.c state.domain = m.dom; dom 640 drivers/xen/privcmd.c if (data->domid != DOMID_INVALID && data->domid != kdata.dom) dom 698 drivers/xen/privcmd.c rc = HYPERVISOR_dm_op(kdata.dom, kdata.num, xbufs); dom 713 drivers/xen/privcmd.c domid_t dom; dom 715 drivers/xen/privcmd.c if (copy_from_user(&dom, udata, sizeof(dom))) dom 720 drivers/xen/privcmd.c data->domid = dom; dom 721 drivers/xen/privcmd.c else if (data->domid != dom) dom 741 drivers/xen/privcmd.c if (data->domid != DOMID_INVALID && data->domid != kdata.dom) dom 779 drivers/xen/privcmd.c xdata.domid = kdata.dom; dom 799 drivers/xen/privcmd.c DOMID_SELF : kdata.dom; dom 399 drivers/xen/xenbus/xenbus_client.c alloc_unbound.dom = DOMID_SELF; dom 53 drivers/xen/xenbus/xenbus_dev_backend.c arg.dom = DOMID_SELF; dom 725 drivers/xen/xenbus/xenbus_probe.c alloc_unbound.dom = DOMID_SELF; dom 77 fs/nfsd/export.c struct auth_domain *dom = NULL; dom 98 fs/nfsd/export.c dom = auth_domain_find(buf); dom 99 fs/nfsd/export.c if (!dom) dom 124 fs/nfsd/export.c key.ek_client = dom; dom 161 fs/nfsd/export.c if (dom) dom 162 fs/nfsd/export.c auth_domain_put(dom); dom 530 fs/nfsd/export.c struct auth_domain *dom = NULL; dom 549 fs/nfsd/export.c dom = auth_domain_find(buf); dom 550 fs/nfsd/export.c if (!dom) dom 562 fs/nfsd/export.c exp.ex_client = dom; dom 661 fs/nfsd/export.c auth_domain_put(dom); dom 357 fs/nfsd/nfsctl.c struct auth_domain *dom; dom 389 fs/nfsd/nfsctl.c dom = unix_domain_find(dname); dom 390 fs/nfsd/nfsctl.c if (!dom) dom 393 fs/nfsd/nfsctl.c len = exp_rootfh(netns(file), dom, path, &fh, maxsize); dom 394 fs/nfsd/nfsctl.c auth_domain_put(dom); dom 163 fs/proc_namespace.c int dom = get_dominating_id(r, &p->root); dom 165 fs/proc_namespace.c if (dom && dom != master) dom 166 fs/proc_namespace.c seq_printf(m, " propagate_from:%i", dom); dom 385 include/linux/lightnvm.h u32 dom; dom 162 include/linux/sunrpc/svcauth.h extern int auth_unix_add_addr(struct net *net, struct in6_addr *addr, struct auth_domain *dom); dom 166 include/linux/sunrpc/svcauth.h extern int auth_unix_forget_old(struct auth_domain *dom); dom 25 include/linux/sunrpc/svcauth_gss.h u32 svcauth_gss_flavor(struct auth_domain *dom); dom 179 include/linux/writeback.h static inline void wb_domain_size_changed(struct wb_domain *dom) dom 181 include/linux/writeback.h spin_lock(&dom->lock); dom 182 include/linux/writeback.h dom->dirty_limit_tstamp = jiffies; dom 183 include/linux/writeback.h dom->dirty_limit = 0; dom 184 include/linux/writeback.h spin_unlock(&dom->lock); dom 346 include/linux/writeback.h int wb_domain_init(struct wb_domain *dom, gfp_t gfp); dom 348 include/linux/writeback.h void wb_domain_exit(struct wb_domain *dom); dom 44 include/uapi/linux/hsi/cs-protocol.h #define CS_CMD(id, dom) \ dom 45 include/uapi/linux/hsi/cs-protocol.h (((id) << CS_CMD_SHIFT) | ((dom) << CS_DOMAIN_SHIFT)) dom 58 include/uapi/xen/privcmd.h domid_t dom; /* target domain */ dom 64 include/uapi/xen/privcmd.h domid_t dom; /* target domain */ dom 75 include/uapi/xen/privcmd.h domid_t dom; /* target domain */ dom 87 include/uapi/xen/privcmd.h domid_t dom; dom 93 include/uapi/xen/privcmd.h domid_t dom; dom 159 include/xen/grant_table.h map->dom = domid; dom 29 include/xen/interface/event_channel.h domid_t dom, remote_dom; dom 130 include/xen/interface/event_channel.h domid_t dom; dom 143 include/xen/interface/event_channel.h domid_t dom; dom 146 include/xen/interface/event_channel.h domid_t dom; dom 190 include/xen/interface/event_channel.h domid_t dom; dom 267 include/xen/interface/grant_table.h domid_t dom; dom 309 include/xen/interface/grant_table.h domid_t dom; dom 324 include/xen/interface/grant_table.h domid_t dom; dom 400 include/xen/interface/grant_table.h domid_t dom; dom 460 include/xen/interface/grant_table.h domid_t dom; dom 474 include/xen/interface/grant_table.h domid_t dom; dom 131 mm/page-writeback.c struct wb_domain *dom; dom 159 mm/page-writeback.c .dom = &global_wb_domain, \ dom 162 mm/page-writeback.c #define GDTC_INIT_NO_WB .dom = &global_wb_domain dom 165 mm/page-writeback.c .dom = mem_cgroup_wb_domain(__wb), \ dom 171 mm/page-writeback.c return dtc->dom; dom 176 mm/page-writeback.c return dtc->dom; dom 577 mm/page-writeback.c static void wb_domain_writeout_inc(struct wb_domain *dom, dom 581 mm/page-writeback.c __fprop_inc_percpu_max(&dom->completions, completions, dom 584 mm/page-writeback.c if (unlikely(!dom->period_time)) { dom 591 mm/page-writeback.c dom->period_time = wp_next_time(jiffies); dom 592 mm/page-writeback.c mod_timer(&dom->period_timer, dom->period_time); dom 630 mm/page-writeback.c struct wb_domain *dom = from_timer(dom, t, period_timer); dom 631 mm/page-writeback.c int miss_periods = (jiffies - dom->period_time) / dom 634 mm/page-writeback.c if (fprop_new_period(&dom->completions, miss_periods + 1)) { dom 635 mm/page-writeback.c dom->period_time = wp_next_time(dom->period_time + dom 637 mm/page-writeback.c mod_timer(&dom->period_timer, dom->period_time); dom 643 mm/page-writeback.c dom->period_time = 0; dom 647 mm/page-writeback.c int wb_domain_init(struct wb_domain *dom, gfp_t gfp) dom 649 mm/page-writeback.c memset(dom, 0, sizeof(*dom)); dom 651 mm/page-writeback.c spin_lock_init(&dom->lock); dom 653 mm/page-writeback.c timer_setup(&dom->period_timer, writeout_period, TIMER_DEFERRABLE); dom 655 mm/page-writeback.c dom->dirty_limit_tstamp = jiffies; dom 657 mm/page-writeback.c return fprop_global_init(&dom->completions, gfp); dom 661 mm/page-writeback.c void wb_domain_exit(struct wb_domain *dom) dom 663 mm/page-writeback.c del_timer_sync(&dom->period_timer); dom 664 mm/page-writeback.c fprop_global_destroy(&dom->completions); dom 722 mm/page-writeback.c static unsigned long hard_dirty_limit(struct wb_domain *dom, dom 725 mm/page-writeback.c return max(thresh, dom->dirty_limit); dom 766 mm/page-writeback.c struct wb_domain *dom = dtc_dom(dtc); dom 775 mm/page-writeback.c fprop_fraction_percpu(&dom->completions, dtc->wb_completions, dom 1135 mm/page-writeback.c struct wb_domain *dom = dtc_dom(dtc); dom 1137 mm/page-writeback.c unsigned long limit = dom->dirty_limit; dom 1159 mm/page-writeback.c dom->dirty_limit = limit; dom 1165 mm/page-writeback.c struct wb_domain *dom = dtc_dom(dtc); dom 1170 mm/page-writeback.c if (time_before(now, dom->dirty_limit_tstamp + BANDWIDTH_INTERVAL)) dom 1173 mm/page-writeback.c spin_lock(&dom->lock); dom 1174 mm/page-writeback.c if (time_after_eq(now, dom->dirty_limit_tstamp + BANDWIDTH_INTERVAL)) { dom 1176 mm/page-writeback.c dom->dirty_limit_tstamp = now; dom 1178 mm/page-writeback.c spin_unlock(&dom->lock); dom 2046 mm/page-writeback.c struct wb_domain *dom = &global_wb_domain; dom 2051 mm/page-writeback.c dom->dirty_limit = dirty_thresh; dom 794 net/sunrpc/auth_gss/svcauth_gss.c u32 svcauth_gss_flavor(struct auth_domain *dom) dom 796 net/sunrpc/auth_gss/svcauth_gss.c struct gss_domain *gd = container_of(dom, struct gss_domain, h); dom 1832 net/sunrpc/auth_gss/svcauth_gss.c struct auth_domain *dom = container_of(head, struct auth_domain, rcu_head); dom 1833 net/sunrpc/auth_gss/svcauth_gss.c struct gss_domain *gd = container_of(dom, struct gss_domain, h); dom 1835 net/sunrpc/auth_gss/svcauth_gss.c kfree(dom->name); dom 1840 net/sunrpc/auth_gss/svcauth_gss.c svcauth_gss_domain_release(struct auth_domain *dom) dom 1842 net/sunrpc/auth_gss/svcauth_gss.c call_rcu(&dom->rcu_head, svcauth_gss_domain_release_rcu); dom 149 net/sunrpc/svcauth.c struct auth_domain *dom = container_of(kref, struct auth_domain, ref); dom 151 net/sunrpc/svcauth.c hlist_del_rcu(&dom->hash); dom 152 net/sunrpc/svcauth.c dom->flavour->domain_release(dom); dom 156 net/sunrpc/svcauth.c void auth_domain_put(struct auth_domain *dom) dom 158 net/sunrpc/svcauth.c kref_put_lock(&dom->ref, auth_domain_release, &auth_domain_lock); dom 43 net/sunrpc/svcauth_unix.c struct auth_domain *dom = container_of(head, struct auth_domain, rcu_head); dom 44 net/sunrpc/svcauth_unix.c struct unix_domain *ud = container_of(dom, struct unix_domain, h); dom 46 net/sunrpc/svcauth_unix.c kfree(dom->name); dom 50 net/sunrpc/svcauth_unix.c static void svcauth_unix_domain_release(struct auth_domain *dom) dom 52 net/sunrpc/svcauth_unix.c call_rcu(&dom->rcu_head, svcauth_unix_domain_release_rcu); dom 189 net/sunrpc/svcauth_unix.c struct auth_domain *dom; dom 231 net/sunrpc/svcauth_unix.c dom = unix_domain_find(buf); dom 232 net/sunrpc/svcauth_unix.c if (dom == NULL) dom 235 net/sunrpc/svcauth_unix.c dom = NULL; dom 241 net/sunrpc/svcauth_unix.c container_of(dom, struct unix_domain, h), dom 246 net/sunrpc/svcauth_unix.c if (dom) dom 247 net/sunrpc/svcauth_unix.c auth_domain_put(dom); dom 259 net/sunrpc/svcauth_unix.c char *dom = "-no-domain-"; dom 271 net/sunrpc/svcauth_unix.c dom = im->m_client->h.name; dom 275 net/sunrpc/svcauth_unix.c im->m_class, &addr.s6_addr32[3], dom); dom 277 net/sunrpc/svcauth_unix.c seq_printf(m, "%s %pI6 %s\n", im->m_class, &addr, dom); dom 109 net/tipc/monitor.c static int dom_rec_len(struct tipc_mon_domain *dom, u16 mcnt) dom 111 net/tipc/monitor.c return ((void *)&dom->members - (void *)dom) + (mcnt * sizeof(u32)); dom 218 net/tipc/monitor.c struct tipc_mon_domain *dom = peer->domain; dom 223 net/tipc/monitor.c if (!dom || !peer->is_up) dom 229 net/tipc/monitor.c for (i = 0; i < dom->member_cnt; i++) { dom 230 net/tipc/monitor.c addr = dom->members[i]; dom 244 net/tipc/monitor.c struct tipc_mon_domain *dom = self->domain; dom 246 net/tipc/monitor.c u64 prev_up_map = dom->up_map; dom 255 net/tipc/monitor.c dom->len = dom_rec_len(dom, member_cnt); dom 256 net/tipc/monitor.c diff = dom->member_cnt != member_cnt; dom 257 net/tipc/monitor.c dom->member_cnt = member_cnt; dom 260 net/tipc/monitor.c diff |= dom->members[i] != peer->addr; dom 261 net/tipc/monitor.c dom->members[i] = peer->addr; dom 262 net/tipc/monitor.c map_set(&dom->up_map, i, peer->is_up); dom 265 net/tipc/monitor.c diff |= dom->up_map != prev_up_map; dom 268 net/tipc/monitor.c dom->gen = ++mon->dom_gen; dom 269 net/tipc/monitor.c cache->len = htons(dom->len); dom 270 net/tipc/monitor.c cache->gen = htons(dom->gen); dom 272 net/tipc/monitor.c cache->up_map = cpu_to_be64(dom->up_map); dom 412 net/tipc/monitor.c struct tipc_mon_domain *dom; dom 423 net/tipc/monitor.c dom = peer->domain; dom 426 net/tipc/monitor.c mon_identify_lost_members(peer, dom, applied); dom 427 net/tipc/monitor.c kfree(dom); dom 448 net/tipc/monitor.c struct tipc_mon_domain *dom; dom 497 net/tipc/monitor.c dom = peer->domain; dom 498 net/tipc/monitor.c if (dom) dom 499 net/tipc/monitor.c memcpy(&dom_bef, dom, dom->len); dom 502 net/tipc/monitor.c if (!dom || (dom->len < new_dlen)) { dom 503 net/tipc/monitor.c kfree(dom); dom 504 net/tipc/monitor.c dom = kmalloc(new_dlen, GFP_ATOMIC); dom 505 net/tipc/monitor.c peer->domain = dom; dom 506 net/tipc/monitor.c if (!dom) dom 509 net/tipc/monitor.c dom->len = new_dlen; dom 510 net/tipc/monitor.c dom->gen = new_gen; dom 511 net/tipc/monitor.c dom->member_cnt = new_member_cnt; dom 512 net/tipc/monitor.c dom->up_map = be64_to_cpu(arrv_dom->up_map); dom 514 net/tipc/monitor.c dom->members[i] = ntohl(arrv_dom->members[i]); dom 529 net/tipc/monitor.c struct tipc_mon_domain *dom = data; dom 535 net/tipc/monitor.c dom->len = 0; dom 541 net/tipc/monitor.c len = dom_rec_len(dom, 0); dom 543 net/tipc/monitor.c dom->len = htons(len); dom 544 net/tipc/monitor.c dom->gen = htons(gen); dom 545 net/tipc/monitor.c dom->ack_gen = htons(state->peer_gen); dom 546 net/tipc/monitor.c dom->member_cnt = 0; dom 555 net/tipc/monitor.c dom->ack_gen = htons(state->peer_gen); dom 611 net/tipc/monitor.c struct tipc_mon_domain *dom; dom 618 net/tipc/monitor.c dom = kzalloc(sizeof(*dom), GFP_ATOMIC); dom 619 net/tipc/monitor.c if (!mon || !self || !dom) { dom 622 net/tipc/monitor.c kfree(dom); dom 630 net/tipc/monitor.c self->domain = dom; dom 705 net/tipc/monitor.c struct tipc_mon_domain *dom = peer->domain; dom 733 net/tipc/monitor.c if (dom) { dom 734 net/tipc/monitor.c if (nla_put_u32(msg->skb, TIPC_NLA_MON_PEER_DOMGEN, dom->gen)) dom 737 net/tipc/monitor.c dom->up_map, TIPC_NLA_MON_PEER_PAD)) dom 740 net/tipc/monitor.c dom->member_cnt * sizeof(u32), &dom->members)) dom 680 scripts/gcc-plugins/randomize_layout_plugin.c basic_block dom; dom 688 scripts/gcc-plugins/randomize_layout_plugin.c dom = get_immediate_dominator(CDI_DOMINATORS, bb); dom 689 scripts/gcc-plugins/randomize_layout_plugin.c if (!dom) dom 692 scripts/gcc-plugins/randomize_layout_plugin.c dom_stmt = last_stmt(dom);