mmr 670 arch/x86/include/asm/uv/uv_bau.h void (*write_l_sw_ack)(unsigned long mmr); mmr 671 arch/x86/include/asm/uv/uv_bau.h void (*write_g_sw_ack)(int pnode, unsigned long mmr); mmr 672 arch/x86/include/asm/uv/uv_bau.h void (*write_payload_first)(int pnode, unsigned long mmr); mmr 673 arch/x86/include/asm/uv/uv_bau.h void (*write_payload_last)(int pnode, unsigned long mmr); mmr 70 arch/x86/kernel/apic/x2apic_uv_x.c unsigned long val, *mmr; mmr 72 arch/x86/kernel/apic/x2apic_uv_x.c mmr = early_ioremap(UV_LOCAL_MMR_BASE | addr, sizeof(*mmr)); mmr 73 arch/x86/kernel/apic/x2apic_uv_x.c val = *mmr; mmr 74 arch/x86/kernel/apic/x2apic_uv_x.c early_iounmap(mmr, sizeof(*mmr)); mmr 143 arch/x86/kernel/apic/x2apic_uv_x.c u64 mmr; mmr 150 arch/x86/kernel/apic/x2apic_uv_x.c mmr = uv_early_read_mmr(UVH_TSC_SYNC_MMR); mmr 155 arch/x86/kernel/apic/x2apic_uv_x.c sync_state = (mmr >> mmr_shift) & UVH_TSC_SYNC_MASK; mmr 814 arch/x86/kernel/apic/x2apic_uv_x.c union uvh_rh_gam_mmr_overlay_config_mmr_u mmr; mmr 817 arch/x86/kernel/apic/x2apic_uv_x.c mmr.v = uv_read_local_mmr(UVH_RH_GAM_MMR_OVERLAY_CONFIG_MMR); mmr 818 arch/x86/kernel/apic/x2apic_uv_x.c if (mmr.s.enable) mmr 819 arch/x86/kernel/apic/x2apic_uv_x.c map_high("MMR", mmr.s.base, shift, shift, max_pnode, map_uc); mmr 828 arch/x86/kernel/apic/x2apic_uv_x.c unsigned long mmr; mmr 841 arch/x86/kernel/apic/x2apic_uv_x.c mmr = UVH_RH_GAM_MMIOH_REDIRECT_CONFIG0_MMR; mmr 852 arch/x86/kernel/apic/x2apic_uv_x.c mmr = UVH_RH_GAM_MMIOH_REDIRECT_CONFIG1_MMR; mmr 871 arch/x86/kernel/apic/x2apic_uv_x.c unsigned long m_redirect = mmr + i * 8; mmr 921 arch/x86/kernel/apic/x2apic_uv_x.c unsigned long mmr, base; mmr 932 arch/x86/kernel/apic/x2apic_uv_x.c mmr = UV1H_RH_GAM_MMIOH_OVERLAY_CONFIG_MMR; mmr 934 arch/x86/kernel/apic/x2apic_uv_x.c mmioh.v = uv_read_local_mmr(mmr); mmr 940 arch/x86/kernel/apic/x2apic_uv_x.c mmr = UV2H_RH_GAM_MMIOH_OVERLAY_CONFIG_MMR; mmr 942 arch/x86/kernel/apic/x2apic_uv_x.c mmioh.v = uv_read_local_mmr(mmr); mmr 231 arch/x86/platform/uv/tlb_uv.c unsigned long mmr = 0; mmr 252 arch/x86/platform/uv/tlb_uv.c mmr = ops.read_l_sw_ack(); mmr 260 arch/x86/platform/uv/tlb_uv.c if (mmr & (msg_res << UV_SW_ACK_NPENDING)) { mmr 394 arch/x86/platform/uv/tlb_uv.c unsigned long mmr; mmr 403 arch/x86/platform/uv/tlb_uv.c mmr = ops.read_l_sw_ack(); mmr 406 arch/x86/platform/uv/tlb_uv.c if (mmr & msg_res) { mmr 686 arch/x86/platform/uv/tlb_uv.c u64 mmr = bcp->status_mmr; mmr 690 arch/x86/platform/uv/tlb_uv.c descriptor_stat = read_status(mmr, index, desc); mmr 717 arch/x86/platform/uv/tlb_uv.c descriptor_stat = read_status(mmr, index, desc); mmr 2248 arch/x86/platform/uv/tlb_uv.c unsigned long mmr; mmr 2253 arch/x86/platform/uv/tlb_uv.c mmr = 1; /* should be 1 to broadcast to both sockets */ mmr 2255 arch/x86/platform/uv/tlb_uv.c write_mmr_data_broadcast(pnode, mmr); mmr 139 drivers/infiniband/hw/mlx4/mlx4_ib.h struct mlx4_mr mmr; mmr 68 drivers/infiniband/hw/mlx4/mr.c ~0ull, convert_access(acc), 0, 0, &mr->mmr); mmr 72 drivers/infiniband/hw/mlx4/mr.c err = mlx4_mr_enable(to_mdev(pd->device)->dev, &mr->mmr); mmr 76 drivers/infiniband/hw/mlx4/mr.c mr->ibmr.rkey = mr->ibmr.lkey = mr->mmr.key; mmr 82 drivers/infiniband/hw/mlx4/mr.c (void) mlx4_mr_free(to_mdev(pd->device)->dev, &mr->mmr); mmr 428 drivers/infiniband/hw/mlx4/mr.c convert_access(access_flags), n, shift, &mr->mmr); mmr 432 drivers/infiniband/hw/mlx4/mr.c err = mlx4_ib_umem_write_mtt(dev, &mr->mmr.mtt, mr->umem); mmr 436 drivers/infiniband/hw/mlx4/mr.c err = mlx4_mr_enable(dev->dev, &mr->mmr); mmr 440 drivers/infiniband/hw/mlx4/mr.c mr->ibmr.rkey = mr->ibmr.lkey = mr->mmr.key; mmr 448 drivers/infiniband/hw/mlx4/mr.c (void) mlx4_mr_free(to_mdev(pd->device)->dev, &mr->mmr); mmr 465 drivers/infiniband/hw/mlx4/mr.c struct mlx4_ib_mr *mmr = to_mmr(mr); mmr 474 drivers/infiniband/hw/mlx4/mr.c err = mlx4_mr_hw_get_mpt(dev->dev, &mmr->mmr, &pmpt_entry); mmr 489 drivers/infiniband/hw/mlx4/mr.c !mmr->umem->writable) { mmr 505 drivers/infiniband/hw/mlx4/mr.c mlx4_mr_rereg_mem_cleanup(dev->dev, &mmr->mmr); mmr 506 drivers/infiniband/hw/mlx4/mr.c ib_umem_release(mmr->umem); mmr 507 drivers/infiniband/hw/mlx4/mr.c mmr->umem = mlx4_get_umem_mr(udata, start, length, mmr 509 drivers/infiniband/hw/mlx4/mr.c if (IS_ERR(mmr->umem)) { mmr 510 drivers/infiniband/hw/mlx4/mr.c err = PTR_ERR(mmr->umem); mmr 512 drivers/infiniband/hw/mlx4/mr.c mmr->umem = NULL; mmr 515 drivers/infiniband/hw/mlx4/mr.c n = ib_umem_page_count(mmr->umem); mmr 518 drivers/infiniband/hw/mlx4/mr.c err = mlx4_mr_rereg_mem_write(dev->dev, &mmr->mmr, mmr 522 drivers/infiniband/hw/mlx4/mr.c ib_umem_release(mmr->umem); mmr 525 drivers/infiniband/hw/mlx4/mr.c mmr->mmr.iova = virt_addr; mmr 526 drivers/infiniband/hw/mlx4/mr.c mmr->mmr.size = length; mmr 528 drivers/infiniband/hw/mlx4/mr.c err = mlx4_ib_umem_write_mtt(dev, &mmr->mmr.mtt, mmr->umem); mmr 530 drivers/infiniband/hw/mlx4/mr.c mlx4_mr_rereg_mem_cleanup(dev->dev, &mmr->mmr); mmr 531 drivers/infiniband/hw/mlx4/mr.c ib_umem_release(mmr->umem); mmr 539 drivers/infiniband/hw/mlx4/mr.c err = mlx4_mr_hw_write_mpt(dev->dev, &mmr->mmr, pmpt_entry); mmr 541 drivers/infiniband/hw/mlx4/mr.c mmr->mmr.access = mr_access_flags; mmr 604 drivers/infiniband/hw/mlx4/mr.c ret = mlx4_mr_free(to_mdev(ibmr->device)->dev, &mr->mmr); mmr 673 drivers/infiniband/hw/mlx4/mr.c max_num_sg, 0, &mr->mmr); mmr 682 drivers/infiniband/hw/mlx4/mr.c err = mlx4_mr_enable(dev->dev, &mr->mmr); mmr 686 drivers/infiniband/hw/mlx4/mr.c mr->ibmr.rkey = mr->ibmr.lkey = mr->mmr.key; mmr 695 drivers/infiniband/hw/mlx4/mr.c (void) mlx4_mr_free(dev->dev, &mr->mmr); mmr 1630 drivers/infiniband/hw/mlx5/mr.c struct mlx5_ib_mr *mmr = to_mmr(ibmr); mmr 1633 drivers/infiniband/hw/mlx5/mr.c dereg_mr(to_mdev(mmr->mtt_mr->ibmr.device), mmr->mtt_mr); mmr 1634 drivers/infiniband/hw/mlx5/mr.c dereg_mr(to_mdev(mmr->klm_mr->ibmr.device), mmr->klm_mr); mmr 1637 drivers/infiniband/hw/mlx5/mr.c dereg_mr(to_mdev(ibmr->device), mmr); mmr 1989 drivers/infiniband/hw/mlx5/mr.c struct mlx5_ib_mr *mmr = to_mmr(ibmr); mmr 2000 drivers/infiniband/hw/mlx5/mr.c if (!mmr->sig) { mmr 2006 drivers/infiniband/hw/mlx5/mr.c mmr->sig->sig_status_checked = true; mmr 2007 drivers/infiniband/hw/mlx5/mr.c if (!mmr->sig->sig_err_exists) mmr 2010 drivers/infiniband/hw/mlx5/mr.c if (ibmr->lkey == mmr->sig->err_item.key) mmr 2011 drivers/infiniband/hw/mlx5/mr.c memcpy(&mr_status->sig_err, &mmr->sig->err_item, mmr 2016 drivers/infiniband/hw/mlx5/mr.c mr_status->sig_err.key = mmr->sig->err_item.key; mmr 2019 drivers/infiniband/hw/mlx5/mr.c mmr->sig->sig_err_exists = false; mmr 953 drivers/infiniband/hw/mthca/mthca_provider.c struct mthca_mr *mmr = to_mmr(mr); mmr 955 drivers/infiniband/hw/mthca/mthca_provider.c mthca_free_mr(to_mdev(mr->device), mmr); mmr 956 drivers/infiniband/hw/mthca/mthca_provider.c ib_umem_release(mmr->umem); mmr 957 drivers/infiniband/hw/mthca/mthca_provider.c kfree(mmr); mmr 143 drivers/infiniband/hw/vmw_pvrdma/pvrdma.h struct pvrdma_mr mmr; mmr 93 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c mr->mmr.mr_handle = resp->mr_handle; mmr 150 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c mr->mmr.iova = virt_addr; mmr 151 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c mr->mmr.size = length; mmr 181 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c mr->mmr.mr_handle = resp->mr_handle; mmr 253 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c mr->mmr.mr_handle = resp->mr_handle; mmr 286 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c cmd->mr_handle = mr->mmr.mr_handle; mmr 312 drivers/misc/sgi-gru/grufile.c unsigned long mmr = 0; mmr 326 drivers/misc/sgi-gru/grufile.c mmr = UVH_GR0_TLB_INT0_CONFIG + mmr 329 drivers/misc/sgi-gru/grufile.c mmr = UVH_GR1_TLB_INT0_CONFIG + mmr 336 drivers/misc/sgi-gru/grufile.c return mmr; mmr 358 drivers/misc/sgi-gru/grufile.c unsigned long mmr; mmr 362 drivers/misc/sgi-gru/grufile.c mmr = gru_chiplet_cpu_to_mmr(chiplet, cpu, &core); mmr 363 drivers/misc/sgi-gru/grufile.c if (mmr == 0) mmr 389 drivers/misc/sgi-gru/grufile.c unsigned long mmr; mmr 395 drivers/misc/sgi-gru/grufile.c mmr = gru_chiplet_cpu_to_mmr(chiplet, cpu, &core); mmr 396 drivers/misc/sgi-gru/grufile.c if (mmr == 0) mmr 408 drivers/misc/sgi-gru/grufile.c unsigned long mmr; mmr 412 drivers/misc/sgi-gru/grufile.c mmr = gru_chiplet_cpu_to_mmr(chiplet, cpu, &core); mmr 413 drivers/misc/sgi-gru/grufile.c if (mmr == 0) mmr 416 drivers/misc/sgi-gru/grufile.c irq = uv_setup_irq(irq_name, cpu, blade, mmr, UV_AFFINITY_CPU); mmr 437 drivers/misc/sgi-gru/grufile.c unsigned long mmr; mmr 439 drivers/misc/sgi-gru/grufile.c mmr = gru_chiplet_cpu_to_mmr(chiplet, cpu, &core); mmr 440 drivers/misc/sgi-gru/grufile.c if (mmr) { mmr 293 drivers/net/ethernet/mellanox/mlx4/mr.c int mlx4_mr_hw_get_mpt(struct mlx4_dev *dev, struct mlx4_mr *mmr, mmr 297 drivers/net/ethernet/mellanox/mlx4/mr.c int key = key_to_hw_index(mmr->key) & (dev->caps.num_mpts - 1); mmr 300 drivers/net/ethernet/mellanox/mlx4/mr.c if (mmr->enabled != MLX4_MPT_EN_HW) mmr 310 drivers/net/ethernet/mellanox/mlx4/mr.c mmr->enabled = MLX4_MPT_EN_SW; mmr 344 drivers/net/ethernet/mellanox/mlx4/mr.c int mlx4_mr_hw_write_mpt(struct mlx4_dev *dev, struct mlx4_mr *mmr, mmr 360 drivers/net/ethernet/mellanox/mlx4/mr.c int key = key_to_hw_index(mmr->key) & (dev->caps.num_mpts - 1); mmr 371 drivers/net/ethernet/mellanox/mlx4/mr.c mmr->pd = be32_to_cpu((*mpt_entry)->pd_flags) & MLX4_MPT_PD_MASK; mmr 372 drivers/net/ethernet/mellanox/mlx4/mr.c mmr->enabled = MLX4_MPT_EN_HW; mmr 46 drivers/video/fbdev/mb862xx/mb862xxfb.h unsigned long mmr; /* memory mode for SDRAM */ mmr 622 drivers/video/fbdev/mb862xx/mb862xxfbdrv.c unsigned long ccf, mmr; mmr 648 drivers/video/fbdev/mb862xx/mb862xxfbdrv.c mmr = par->gc_mode ? par->gc_mode->mmr : 0x414fb7f2; mmr 657 drivers/video/fbdev/mb862xx/mb862xxfbdrv.c outreg(host, GC_MMR, mmr); mmr 1525 include/linux/mlx4/device.h int mlx4_mr_hw_get_mpt(struct mlx4_dev *dev, struct mlx4_mr *mmr, mmr 1527 include/linux/mlx4/device.h int mlx4_mr_hw_write_mpt(struct mlx4_dev *dev, struct mlx4_mr *mmr,