umc               339 arch/x86/include/asm/mce.h int umc_normaddr_to_sysaddr(u64 norm_addr, u16 nid, u8 umc, u64 *sys_addr);
umc               348 arch/x86/include/asm/mce.h umc_normaddr_to_sysaddr(u64 norm_addr, u16 nid, u8 umc, u64 *sys_addr)	{ return -EINVAL; };
umc               662 arch/x86/kernel/cpu/mce/amd.c int umc_normaddr_to_sysaddr(u64 norm_addr, u16 nid, u8 umc, u64 *sys_addr)
umc               679 arch/x86/kernel/cpu/mce/amd.c 	if (amd_df_indirect_read(nid, 0, 0x1B4, umc, &tmp))
umc               693 arch/x86/kernel/cpu/mce/amd.c 	if (amd_df_indirect_read(nid, 0, 0x110 + (8 * base), umc, &tmp))
umc               716 arch/x86/kernel/cpu/mce/amd.c 	if (amd_df_indirect_read(nid, 0, 0x114 + (8 * base), umc, &tmp))
umc               772 arch/x86/kernel/cpu/mce/amd.c 		if (amd_df_indirect_read(nid, 0, 0x50, umc, &tmp))
umc               789 arch/x86/kernel/cpu/mce/amd.c 			if (amd_df_indirect_read(nid, 1, 0x208, umc, &tmp))
umc               828 arch/x86/kernel/cpu/mce/amd.c 		if (amd_df_indirect_read(nid, 0, 0x104, umc, &tmp))
umc               729 drivers/edac/amd64_edac.c 	if (pvt->umc) {
umc               733 drivers/edac/amd64_edac.c 			if (!(pvt->umc[i].sdp_ctrl & UMC_SDP_INIT))
umc               739 drivers/edac/amd64_edac.c 			if (pvt->umc[i].umc_cfg & BIT(12))
umc               839 drivers/edac/amd64_edac.c 	struct amd64_umc *umc;
umc               844 drivers/edac/amd64_edac.c 		umc = &pvt->umc[i];
umc               846 drivers/edac/amd64_edac.c 		edac_dbg(1, "UMC%d DIMM cfg: 0x%x\n", i, umc->dimm_cfg);
umc               847 drivers/edac/amd64_edac.c 		edac_dbg(1, "UMC%d UMC cfg: 0x%x\n", i, umc->umc_cfg);
umc               848 drivers/edac/amd64_edac.c 		edac_dbg(1, "UMC%d SDP ctrl: 0x%x\n", i, umc->sdp_ctrl);
umc               849 drivers/edac/amd64_edac.c 		edac_dbg(1, "UMC%d ECC ctrl: 0x%x\n", i, umc->ecc_ctrl);
umc               856 drivers/edac/amd64_edac.c 		edac_dbg(1, "UMC%d UMC cap high: 0x%x\n", i, umc->umc_cap_hi);
umc               859 drivers/edac/amd64_edac.c 				i, (umc->umc_cap_hi & BIT(30)) ? "yes" : "no",
umc               860 drivers/edac/amd64_edac.c 				    (umc->umc_cap_hi & BIT(31)) ? "yes" : "no");
umc               862 drivers/edac/amd64_edac.c 				i, (umc->umc_cfg & BIT(12)) ? "yes" : "no");
umc               864 drivers/edac/amd64_edac.c 				i, (umc->dimm_cfg & BIT(6)) ? "yes" : "no");
umc               866 drivers/edac/amd64_edac.c 				i, (umc->dimm_cfg & BIT(7)) ? "yes" : "no");
umc               918 drivers/edac/amd64_edac.c 	if (pvt->umc)
umc               940 drivers/edac/amd64_edac.c 		int umc;
umc               942 drivers/edac/amd64_edac.c 		for_each_umc(umc) {
umc               943 drivers/edac/amd64_edac.c 			pvt->csels[umc].b_cnt = 4;
umc               944 drivers/edac/amd64_edac.c 			pvt->csels[umc].m_cnt = 2;
umc               961 drivers/edac/amd64_edac.c 	int cs, umc;
umc               963 drivers/edac/amd64_edac.c 	for_each_umc(umc) {
umc               964 drivers/edac/amd64_edac.c 		umc_base_reg = get_umc_base(umc) + UMCCH_BASE_ADDR;
umc               965 drivers/edac/amd64_edac.c 		umc_base_reg_sec = get_umc_base(umc) + UMCCH_BASE_ADDR_SEC;
umc               967 drivers/edac/amd64_edac.c 		for_each_chip_select(cs, umc, pvt) {
umc               968 drivers/edac/amd64_edac.c 			base = &pvt->csels[umc].csbases[cs];
umc               969 drivers/edac/amd64_edac.c 			base_sec = &pvt->csels[umc].csbases_sec[cs];
umc               976 drivers/edac/amd64_edac.c 					 umc, cs, *base, base_reg);
umc               980 drivers/edac/amd64_edac.c 					 umc, cs, *base_sec, base_reg_sec);
umc               983 drivers/edac/amd64_edac.c 		umc_mask_reg = get_umc_base(umc) + UMCCH_ADDR_MASK;
umc               984 drivers/edac/amd64_edac.c 		umc_mask_reg_sec = get_umc_base(umc) + UMCCH_ADDR_MASK_SEC;
umc               986 drivers/edac/amd64_edac.c 		for_each_chip_select_mask(cs, umc, pvt) {
umc               987 drivers/edac/amd64_edac.c 			mask = &pvt->csels[umc].csmasks[cs];
umc               988 drivers/edac/amd64_edac.c 			mask_sec = &pvt->csels[umc].csmasks_sec[cs];
umc               995 drivers/edac/amd64_edac.c 					 umc, cs, *mask, mask_reg);
umc               999 drivers/edac/amd64_edac.c 					 umc, cs, *mask_sec, mask_reg_sec);
umc              1013 drivers/edac/amd64_edac.c 	if (pvt->umc)
umc              1106 drivers/edac/amd64_edac.c 		if ((pvt->umc[0].dimm_cfg | pvt->umc[1].dimm_cfg) & BIT(5))
umc              1108 drivers/edac/amd64_edac.c 		else if ((pvt->umc[0].dimm_cfg | pvt->umc[1].dimm_cfg) & BIT(4))
umc              1449 drivers/edac/amd64_edac.c 		channels += !!(pvt->umc[i].sdp_ctrl & UMC_SDP_INIT);
umc              1583 drivers/edac/amd64_edac.c static int f17_addr_mask_to_cs_size(struct amd64_pvt *pvt, u8 umc,
umc              1611 drivers/edac/amd64_edac.c 		addr_mask_orig = pvt->csels[umc].csmasks_sec[dimm];
umc              1613 drivers/edac/amd64_edac.c 		addr_mask_orig = pvt->csels[umc].csmasks[dimm];
umc              2648 drivers/edac/amd64_edac.c 	if (pvt->umc) {
umc              2697 drivers/edac/amd64_edac.c 	if (pvt->umc) {
umc              2710 drivers/edac/amd64_edac.c 	if (pvt->umc) {
umc              2715 drivers/edac/amd64_edac.c 			if (pvt->umc[i].sdp_ctrl & UMC_SDP_INIT) {
umc              2716 drivers/edac/amd64_edac.c 				if (pvt->umc[i].ecc_ctrl & BIT(9)) {
umc              2719 drivers/edac/amd64_edac.c 				} else if (pvt->umc[i].ecc_ctrl & BIT(7)) {
umc              2745 drivers/edac/amd64_edac.c 	struct amd64_umc *umc;
umc              2752 drivers/edac/amd64_edac.c 		umc = &pvt->umc[i];
umc              2754 drivers/edac/amd64_edac.c 		amd_smn_read(nid, umc_base + UMCCH_DIMM_CFG, &umc->dimm_cfg);
umc              2755 drivers/edac/amd64_edac.c 		amd_smn_read(nid, umc_base + UMCCH_UMC_CFG, &umc->umc_cfg);
umc              2756 drivers/edac/amd64_edac.c 		amd_smn_read(nid, umc_base + UMCCH_SDP_CTRL, &umc->sdp_ctrl);
umc              2757 drivers/edac/amd64_edac.c 		amd_smn_read(nid, umc_base + UMCCH_ECC_CTRL, &umc->ecc_ctrl);
umc              2758 drivers/edac/amd64_edac.c 		amd_smn_read(nid, umc_base + UMCCH_UMC_CAP_HI, &umc->umc_cap_hi);
umc              2787 drivers/edac/amd64_edac.c 	if (pvt->umc) {
umc              2885 drivers/edac/amd64_edac.c 	if (!pvt->umc) {
umc              2909 drivers/edac/amd64_edac.c 	u8 umc, cs;
umc              2924 drivers/edac/amd64_edac.c 	for_each_umc(umc) {
umc              2925 drivers/edac/amd64_edac.c 		for_each_chip_select(cs, umc, pvt) {
umc              2926 drivers/edac/amd64_edac.c 			if (!csrow_enabled(cs, umc, pvt))
umc              2930 drivers/edac/amd64_edac.c 			dimm = mci->csrows[cs]->channels[umc]->dimm;
umc              2935 drivers/edac/amd64_edac.c 			dimm->nr_pages = get_csrow_nr_pages(pvt, umc, cs);
umc              2960 drivers/edac/amd64_edac.c 	if (pvt->umc)
umc              3258 drivers/edac/amd64_edac.c 		if (pvt->umc[i].sdp_ctrl & UMC_SDP_INIT) {
umc              3259 drivers/edac/amd64_edac.c 			ecc_en &= !!(pvt->umc[i].umc_cap_hi & UMC_ECC_ENABLED);
umc              3260 drivers/edac/amd64_edac.c 			cpk_en &= !!(pvt->umc[i].umc_cap_hi & UMC_ECC_CHIPKILL_CAP);
umc              3262 drivers/edac/amd64_edac.c 			dev_x4  &= !!(pvt->umc[i].dimm_cfg & BIT(6));
umc              3263 drivers/edac/amd64_edac.c 			dev_x16 &= !!(pvt->umc[i].dimm_cfg & BIT(7));
umc              3291 drivers/edac/amd64_edac.c 	if (pvt->umc) {
umc              3446 drivers/edac/amd64_edac.c 		pvt->umc = kcalloc(num_umcs, sizeof(struct amd64_umc), GFP_KERNEL);
umc              3447 drivers/edac/amd64_edac.c 		if (!pvt->umc) {
umc              3523 drivers/edac/amd64_edac.c 		kfree(pvt->umc);
umc              3625 drivers/edac/amd64_edac.c 	if (pvt->umc)
umc               385 drivers/edac/amd64_edac.h 	struct amd64_umc *umc;	/* UMC registers */
umc               953 drivers/gpu/drm/amd/amdgpu/amdgpu.h 	struct amdgpu_umc		umc;
umc               599 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c 		if (adev->umc.funcs->query_ras_error_count)
umc               600 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c 			adev->umc.funcs->query_ras_error_count(adev, &err_data);
umc               604 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c 		if (adev->umc.funcs->query_ras_error_address)
umc               605 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c 			adev->umc.funcs->query_ras_error_address(adev, &err_data);
umc              1466 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c 	if (adev->umc.funcs->ras_init)
umc              1467 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c 		adev->umc.funcs->ras_init(adev);
umc                40 drivers/gpu/drm/amd/amdgpu/amdgpu_umc.h 	for (umc_inst = 0; umc_inst < adev->umc.umc_inst_num; umc_inst++) {	\
umc                42 drivers/gpu/drm/amd/amdgpu/amdgpu_umc.h 		adev->umc.funcs->enable_umc_index_mode(adev, umc_inst);	\
umc                44 drivers/gpu/drm/amd/amdgpu/amdgpu_umc.h 			channel_inst < adev->umc.channel_inst_num;	\
umc                47 drivers/gpu/drm/amd/amdgpu/amdgpu_umc.h 			umc_reg_offset = adev->umc.channel_offs * channel_inst;	\
umc                49 drivers/gpu/drm/amd/amdgpu/amdgpu_umc.h 			channel_index = adev->umc.channel_idx_tbl[	\
umc                50 drivers/gpu/drm/amd/amdgpu/amdgpu_umc.h 				umc_inst * adev->umc.channel_inst_num + channel_inst];	\
umc                54 drivers/gpu/drm/amd/amdgpu/amdgpu_umc.h 	adev->umc.funcs->disable_umc_index_mode(adev);
umc               251 drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c 	if (adev->umc.funcs->query_ras_error_count)
umc               252 drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c 		adev->umc.funcs->query_ras_error_count(adev, err_data);
umc               256 drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c 	if (adev->umc.funcs->query_ras_error_address)
umc               257 drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c 		adev->umc.funcs->query_ras_error_address(adev, err_data);
umc               741 drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c 		adev->umc.max_ras_err_cnt_per_query = UMC_V6_1_TOTAL_CHANNEL_NUM;
umc               742 drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c 		adev->umc.channel_inst_num = UMC_V6_1_CHANNEL_INSTANCE_NUM;
umc               743 drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c 		adev->umc.umc_inst_num = UMC_V6_1_UMC_INSTANCE_NUM;
umc               744 drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c 		adev->umc.channel_offs = UMC_V6_1_PER_CHANNEL_OFFSET;
umc               745 drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c 		adev->umc.channel_idx_tbl = &umc_v6_1_channel_idx_tbl[0][0];
umc               746 drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c 		adev->umc.funcs = &umc_v6_1_funcs;
umc              3520 drivers/scsi/megaraid.c 	megacmd_t	__user *umc;
umc              3540 drivers/scsi/megaraid.c 			umc = MBOX_P(uiocp);
umc              3542 drivers/scsi/megaraid.c 			if (get_user(upthru, (mega_passthru __user * __user *)&umc->xferaddr))
umc              3557 drivers/scsi/megaraid.c 			umc = (megacmd_t __user *)uioc_mimd->mbox;
umc              3559 drivers/scsi/megaraid.c 			if (get_user(upthru, (mega_passthru __user * __user *)&umc->xferaddr))
umc               100 drivers/staging/uwb/include/umc.h int __must_check umc_device_register(struct umc_dev *umc);
umc               101 drivers/staging/uwb/include/umc.h void umc_device_unregister(struct umc_dev *umc);
umc               121 drivers/staging/uwb/include/umc.h int umc_match_pci_id(struct umc_driver *umc_drv, struct umc_dev *umc);
umc               190 drivers/staging/uwb/include/umc.h int umc_controller_reset(struct umc_dev *umc);
umc                19 drivers/staging/uwb/umc-bus.c 		struct umc_dev *umc = to_umc_dev(dev);
umc                23 drivers/staging/uwb/umc-bus.c 			ret = umc_drv->pre_reset(umc);
umc                35 drivers/staging/uwb/umc-bus.c 		struct umc_dev *umc = to_umc_dev(dev);
umc                39 drivers/staging/uwb/umc-bus.c 			ret = umc_drv->post_reset(umc);
umc                60 drivers/staging/uwb/umc-bus.c int umc_controller_reset(struct umc_dev *umc)
umc                62 drivers/staging/uwb/umc-bus.c 	struct device *parent = umc->dev.parent;
umc                82 drivers/staging/uwb/umc-bus.c int umc_match_pci_id(struct umc_driver *umc_drv, struct umc_dev *umc)
umc                87 drivers/staging/uwb/umc-bus.c 	if (!dev_is_pci(umc->dev.parent))
umc                90 drivers/staging/uwb/umc-bus.c 	pci = to_pci_dev(umc->dev.parent);
umc               121 drivers/staging/uwb/umc-bus.c 	struct umc_dev *umc = to_umc_dev(dev);
umc               124 drivers/staging/uwb/umc-bus.c 	if (umc->cap_id == umc_driver->cap_id) {
umc               126 drivers/staging/uwb/umc-bus.c 			return umc_driver->match(umc_driver, umc);
umc               135 drivers/staging/uwb/umc-bus.c 	struct umc_dev *umc;
umc               140 drivers/staging/uwb/umc-bus.c 	umc = to_umc_dev(dev);
umc               143 drivers/staging/uwb/umc-bus.c 	err = umc_driver->probe(umc);
umc               154 drivers/staging/uwb/umc-bus.c 	struct umc_dev *umc;
umc               158 drivers/staging/uwb/umc-bus.c 	umc = to_umc_dev(dev);
umc               160 drivers/staging/uwb/umc-bus.c 	umc_driver->remove(umc);
umc               167 drivers/staging/uwb/umc-bus.c 	struct umc_dev *umc = to_umc_dev(dev);
umc               169 drivers/staging/uwb/umc-bus.c 	return sprintf(buf, "0x%02x\n", umc->cap_id);
umc               175 drivers/staging/uwb/umc-bus.c 	struct umc_dev *umc = to_umc_dev(dev);
umc               177 drivers/staging/uwb/umc-bus.c 	return sprintf(buf, "0x%04x\n", umc->version);
umc                14 drivers/staging/uwb/umc-dev.c 	struct umc_dev *umc = to_umc_dev(dev);
umc                16 drivers/staging/uwb/umc-dev.c 	kfree(umc);
umc                29 drivers/staging/uwb/umc-dev.c 	struct umc_dev *umc;
umc                31 drivers/staging/uwb/umc-dev.c 	umc = kzalloc(sizeof(struct umc_dev), GFP_KERNEL);
umc                32 drivers/staging/uwb/umc-dev.c 	if (umc) {
umc                33 drivers/staging/uwb/umc-dev.c 		dev_set_name(&umc->dev, "%s-%d", dev_name(parent), n);
umc                34 drivers/staging/uwb/umc-dev.c 		umc->dev.parent  = parent;
umc                35 drivers/staging/uwb/umc-dev.c 		umc->dev.bus     = &umc_bus_type;
umc                36 drivers/staging/uwb/umc-dev.c 		umc->dev.release = umc_device_release;
umc                38 drivers/staging/uwb/umc-dev.c 		umc->dev.dma_mask = parent->dma_mask;
umc                40 drivers/staging/uwb/umc-dev.c 	return umc;
umc                51 drivers/staging/uwb/umc-dev.c int umc_device_register(struct umc_dev *umc)
umc                55 drivers/staging/uwb/umc-dev.c 	err = request_resource(umc->resource.parent, &umc->resource);
umc                57 drivers/staging/uwb/umc-dev.c 		dev_err(&umc->dev, "can't allocate resource range %pR: %d\n",
umc                58 drivers/staging/uwb/umc-dev.c 			&umc->resource, err);
umc                62 drivers/staging/uwb/umc-dev.c 	err = device_register(&umc->dev);
umc                68 drivers/staging/uwb/umc-dev.c 	put_device(&umc->dev);
umc                69 drivers/staging/uwb/umc-dev.c 	release_resource(&umc->resource);
umc                84 drivers/staging/uwb/umc-dev.c void umc_device_unregister(struct umc_dev *umc)
umc                87 drivers/staging/uwb/umc-dev.c 	if (!umc)
umc                89 drivers/staging/uwb/umc-dev.c 	dev = get_device(&umc->dev);
umc                90 drivers/staging/uwb/umc-dev.c 	device_unregister(&umc->dev);
umc                91 drivers/staging/uwb/umc-dev.c 	release_resource(&umc->resource);
umc               420 drivers/staging/uwb/whc-rc.c static int whcrc_pre_reset(struct umc_dev *umc)
umc               422 drivers/staging/uwb/whc-rc.c 	struct whcrc *whcrc = umc_get_drvdata(umc);
umc               429 drivers/staging/uwb/whc-rc.c static int whcrc_post_reset(struct umc_dev *umc)
umc               431 drivers/staging/uwb/whc-rc.c 	struct whcrc *whcrc = umc_get_drvdata(umc);
umc                93 drivers/staging/uwb/whci.c 	struct umc_dev *umc;
umc                97 drivers/staging/uwb/whci.c 	umc = umc_device_create(&card->pci->dev, n);
umc                98 drivers/staging/uwb/whci.c 	if (umc == NULL)
umc               108 drivers/staging/uwb/whci.c 	umc->version         = UWBCAPDATA_TO_VERSION(capdata);
umc               109 drivers/staging/uwb/whci.c 	umc->cap_id          = n == 0 ? 0 : UWBCAPDATA_TO_CAP_ID(capdata);
umc               110 drivers/staging/uwb/whci.c 	umc->bar	     = bar;
umc               111 drivers/staging/uwb/whci.c 	umc->resource.start  = pci_resource_start(card->pci, bar)
umc               113 drivers/staging/uwb/whci.c 	umc->resource.end    = umc->resource.start
umc               115 drivers/staging/uwb/whci.c 	umc->resource.name   = dev_name(&umc->dev);
umc               116 drivers/staging/uwb/whci.c 	umc->resource.flags  = card->pci->resource[bar].flags;
umc               117 drivers/staging/uwb/whci.c 	umc->resource.parent = &card->pci->resource[bar];
umc               118 drivers/staging/uwb/whci.c 	umc->irq             = card->pci->irq;
umc               120 drivers/staging/uwb/whci.c 	err = umc_device_register(umc);
umc               123 drivers/staging/uwb/whci.c 	card->devs[n] = umc;
umc               127 drivers/staging/uwb/whci.c 	kfree(umc);
umc               133 drivers/staging/uwb/whci.c 	struct umc_dev *umc = card->devs[n];
umc               135 drivers/staging/uwb/whci.c 	umc_device_unregister(umc);
umc               141 drivers/staging/wusbcore/host/whci/asl.c 	whci_wait_for(&whc->umc->dev, whc->base + WUSBSTS,
umc               149 drivers/staging/wusbcore/host/whci/asl.c 	whci_wait_for(&whc->umc->dev, whc->base + WUSBSTS,
umc                87 drivers/staging/wusbcore/host/whci/hcd.c 	whci_wait_for(&whc->umc->dev, whc->base + WUSBSTS,
umc               118 drivers/staging/wusbcore/host/whci/hcd.c 		dev_err(&whc->umc->dev, "isochronous transfers unsupported\n");
umc               226 drivers/staging/wusbcore/host/whci/hcd.c static int whc_probe(struct umc_dev *umc)
umc               232 drivers/staging/wusbcore/host/whci/hcd.c 	struct device *dev = &umc->dev;
umc               245 drivers/staging/wusbcore/host/whci/hcd.c 	whc->umc = umc;
umc               252 drivers/staging/wusbcore/host/whci/hcd.c 	wusbhc->uwb_rc = uwb_rc_get_by_grandpa(umc->dev.parent);
umc               280 drivers/staging/wusbcore/host/whci/hcd.c 	ret = usb_add_hcd(usb_hcd, whc->umc->irq, IRQF_SHARED);
umc               311 drivers/staging/wusbcore/host/whci/hcd.c static void whc_remove(struct umc_dev *umc)
umc               313 drivers/staging/wusbcore/host/whci/hcd.c 	struct usb_hcd *usb_hcd = dev_get_drvdata(&umc->dev);
umc                51 drivers/staging/wusbcore/host/whci/hw.c 		dev_err(&whc->umc->dev, "generic command timeout (%04x/%04x)\n",
umc                91 drivers/staging/wusbcore/host/whci/hw.c 	dev_err(&whc->umc->dev, "hardware error: %s\n", reason);
umc                22 drivers/staging/wusbcore/host/whci/init.c 	whci_wait_for(&whc->umc->dev, whc->base + WUSBCMD, WUSBCMD_WHCRESET, 0,
umc                57 drivers/staging/wusbcore/host/whci/init.c 	whc->workqueue = alloc_ordered_workqueue(dev_name(&whc->umc->dev), 0);
umc                74 drivers/staging/wusbcore/host/whci/init.c 	start = whc->umc->resource.start;
umc                75 drivers/staging/wusbcore/host/whci/init.c 	len   = whc->umc->resource.end - start + 1;
umc                77 drivers/staging/wusbcore/host/whci/init.c 		dev_err(&whc->umc->dev, "can't request HC region\n");
umc                84 drivers/staging/wusbcore/host/whci/init.c 		dev_err(&whc->umc->dev, "ioremap\n");
umc                97 drivers/staging/wusbcore/host/whci/init.c 	dev_dbg(&whc->umc->dev, "N_DEVICES = %d, N_KEYS = %d, N_MMC_IES = %d\n",
umc               100 drivers/staging/wusbcore/host/whci/init.c 	whc->qset_pool = dma_pool_create("qset", &whc->umc->dev,
umc               118 drivers/staging/wusbcore/host/whci/init.c 	whc->gen_cmd_buf = dma_alloc_coherent(&whc->umc->dev, WHC_GEN_CMD_DATA_LEN,
umc               125 drivers/staging/wusbcore/host/whci/init.c 	whc->dn_buf = dma_alloc_coherent(&whc->umc->dev,
umc               134 drivers/staging/wusbcore/host/whci/init.c 	whc->di_buf = dma_alloc_coherent(&whc->umc->dev,
umc               155 drivers/staging/wusbcore/host/whci/init.c 		dma_free_coherent(&whc->umc->dev, sizeof(struct di_buf_entry) * whc->n_devices,
umc               158 drivers/staging/wusbcore/host/whci/init.c 		dma_free_coherent(&whc->umc->dev, sizeof(struct dn_buf_entry) * WHC_N_DN_ENTRIES,
umc               161 drivers/staging/wusbcore/host/whci/init.c 		dma_free_coherent(&whc->umc->dev, WHC_GEN_CMD_DATA_LEN,
umc               169 drivers/staging/wusbcore/host/whci/init.c 	len   = resource_size(&whc->umc->resource);
umc                35 drivers/staging/wusbcore/host/whci/int.c 		dev_err(&whc->umc->dev, "FIXME: host system error\n");
umc               150 drivers/staging/wusbcore/host/whci/pzl.c 	whci_wait_for(&whc->umc->dev, whc->base + WUSBSTS,
umc               162 drivers/staging/wusbcore/host/whci/pzl.c 	whci_wait_for(&whc->umc->dev, whc->base + WUSBSTS,
umc               379 drivers/staging/wusbcore/host/whci/pzl.c 	whc->pz_list = dma_alloc_coherent(&whc->umc->dev, sizeof(u64) * 16,
umc               402 drivers/staging/wusbcore/host/whci/pzl.c 		dma_free_coherent(&whc->umc->dev,  sizeof(u64) * 16, whc->pz_list,
umc               610 drivers/staging/wusbcore/host/whci/qset.c 		std->dma_addr = dma_map_single(&whc->umc->dev, std->bounce_buf, std->len,
umc               612 drivers/staging/wusbcore/host/whci/qset.c 		if (dma_mapping_error(&whc->umc->dev, std->dma_addr))
umc                23 drivers/staging/wusbcore/host/whci/whcd.h 	struct umc_dev *umc;
umc                21 drivers/staging/wusbcore/host/whci/wusb.c 	return whci_wait_for(&whc->umc->dev,
umc               148 drivers/staging/wusbcore/host/whci/wusb.c 	ret = whci_wait_for(&whc->umc->dev, whc->base + WUSBSETSECKEYCMD,