Searched refs:vp_id (Results 1 - 12 of 12) sorted by relevance

/linux-4.4.14/arch/arm/mach-omap2/
H A Dprm.h153 u32 (*vp_check_txdone)(u8 vp_id);
154 void (*vp_clear_txdone)(u8 vp_id);
181 u32 omap_prm_vp_check_txdone(u8 vp_id);
182 void omap_prm_vp_clear_txdone(u8 vp_id);
H A Dvp.h34 u32 (*check_txdone)(u8 vp_id);
35 void (*clear_txdone)(u8 vp_id);
H A Dprm3xxx.c100 static u32 omap3_prm_vp_check_txdone(u8 vp_id) omap3_prm_vp_check_txdone() argument
102 struct omap3_vp *vp = &omap3_vp[vp_id]; omap3_prm_vp_check_txdone()
110 static void omap3_prm_vp_clear_txdone(u8 vp_id) omap3_prm_vp_clear_txdone() argument
112 struct omap3_vp *vp = &omap3_vp[vp_id]; omap3_prm_vp_clear_txdone()
H A Dprm44xx.c143 static u32 omap4_prm_vp_check_txdone(u8 vp_id) omap4_prm_vp_check_txdone() argument
145 struct omap4_vp *vp = &omap4_vp[vp_id]; omap4_prm_vp_check_txdone()
154 static void omap4_prm_vp_clear_txdone(u8 vp_id) omap4_prm_vp_clear_txdone() argument
156 struct omap4_vp *vp = &omap4_vp[vp_id]; omap4_prm_vp_clear_txdone()
H A Dprm_common.c567 u32 omap_prm_vp_check_txdone(u8 vp_id) omap_prm_vp_check_txdone() argument
575 return prm_ll_data->vp_check_txdone(vp_id); omap_prm_vp_check_txdone()
584 void omap_prm_vp_clear_txdone(u8 vp_id) omap_prm_vp_clear_txdone() argument
592 prm_ll_data->vp_clear_txdone(vp_id); omap_prm_vp_clear_txdone()
/linux-4.4.14/drivers/scsi/qla2xxx/
H A Dqla_mid.c32 uint32_t vp_id; qla24xx_allocate_vp_id() local
36 /* Find an empty slot and assign an vp_id */ qla24xx_allocate_vp_id()
38 vp_id = find_first_zero_bit(ha->vp_idx_map, ha->max_npiv_vports + 1); qla24xx_allocate_vp_id()
39 if (vp_id > ha->max_npiv_vports) { qla24xx_allocate_vp_id()
41 "vp_id %d is bigger than max-supported %d.\n", qla24xx_allocate_vp_id()
42 vp_id, ha->max_npiv_vports); qla24xx_allocate_vp_id()
44 return vp_id; qla24xx_allocate_vp_id()
47 set_bit(vp_id, ha->vp_idx_map); qla24xx_allocate_vp_id()
49 vha->vp_idx = vp_id; qla24xx_allocate_vp_id()
59 return vp_id; qla24xx_allocate_vp_id()
65 uint16_t vp_id; qla24xx_deallocate_vp_id() local
89 vp_id = vha->vp_idx; qla24xx_deallocate_vp_id()
91 clear_bit(vp_id, ha->vp_idx_map); qla24xx_deallocate_vp_id()
468 "Couldn't allocate vp_id.\n"); qla24xx_create_vhost()
H A Dqla_def.h2598 int vp_id; member in struct:vport_info
/linux-4.4.14/drivers/net/ethernet/neterion/vxge/
H A Dvxge-config.c47 int vxge_hw_vpath_wait_receive_idle(struct __vxge_hw_device *hldev, u32 vp_id) vxge_hw_vpath_wait_receive_idle() argument
54 vpath = &hldev->virtual_paths[vp_id]; vxge_hw_vpath_wait_receive_idle()
2192 u32 vp_id; __vxge_hw_channel_allocate() local
2195 vp_id = vph->vpath->vp_id; __vxge_hw_channel_allocate()
2221 channel->vp_id = vp_id; __vxge_hw_channel_allocate()
2814 u32 vp_id; __vxge_hw_ring_create() local
2825 vp_id = vp->vpath->vp_id; __vxge_hw_ring_create()
2827 config = &hldev->config.vp_config[vp_id].ring; __vxge_hw_ring_create()
2843 ring->vp_id = vp_id; __vxge_hw_ring_create()
2932 device_config->vp_config[i].vp_id = i; vxge_hw_device_config_default_get()
3468 config = &vpath->hldev->config.vp_config[vpath->vp_id].fifo; __vxge_hw_fifo_create()
3487 fifo->vp_id = vpath->vp_id; __vxge_hw_fifo_create()
4023 1 << (16 - vpath->vp_id)), __vxge_hw_vpath_reset_check()
4034 __vxge_hw_vpath_reset(struct __vxge_hw_device *hldev, u32 vp_id) __vxge_hw_vpath_reset() argument
4038 val64 = VXGE_HW_CMN_RSTHDLR_CFG0_SW_RESET_VPATH(1 << (16 - vp_id)); __vxge_hw_vpath_reset()
4051 __vxge_hw_vpath_sw_reset(struct __vxge_hw_device *hldev, u32 vp_id) __vxge_hw_vpath_sw_reset() argument
4056 vpath = &hldev->virtual_paths[vp_id]; __vxge_hw_vpath_sw_reset()
4076 __vxge_hw_vpath_prc_configure(struct __vxge_hw_device *hldev, u32 vp_id) __vxge_hw_vpath_prc_configure() argument
4083 vpath = &hldev->virtual_paths[vp_id]; __vxge_hw_vpath_prc_configure()
4148 __vxge_hw_vpath_kdfc_configure(struct __vxge_hw_device *hldev, u32 vp_id) __vxge_hw_vpath_kdfc_configure() argument
4156 vpath = &hldev->virtual_paths[vp_id]; __vxge_hw_vpath_kdfc_configure()
4209 (hldev->kdfc + (vp_id * __vxge_hw_vpath_kdfc_configure()
4221 __vxge_hw_vpath_mac_configure(struct __vxge_hw_device *hldev, u32 vp_id) __vxge_hw_vpath_mac_configure() argument
4228 vpath = &hldev->virtual_paths[vp_id]; __vxge_hw_vpath_mac_configure()
4288 __vxge_hw_vpath_tim_configure(struct __vxge_hw_device *hldev, u32 vp_id) __vxge_hw_vpath_tim_configure() argument
4295 vpath = &hldev->virtual_paths[vp_id]; __vxge_hw_vpath_tim_configure()
4306 (vp_id * VXGE_HW_MAX_INTR_PER_VP) + __vxge_hw_vpath_tim_configure()
4406 val64 |= VXGE_HW_TIM_CFG3_INT_NUM_UTIL_SEL(vp_id); __vxge_hw_vpath_tim_configure()
4513 val64 |= VXGE_HW_TIM_CFG3_INT_NUM_UTIL_SEL(vp_id); __vxge_hw_vpath_tim_configure()
4549 __vxge_hw_vpath_initialize(struct __vxge_hw_device *hldev, u32 vp_id) __vxge_hw_vpath_initialize() argument
4557 vpath = &hldev->virtual_paths[vp_id]; __vxge_hw_vpath_initialize()
4559 if (!(hldev->vpath_assignments & vxge_mBIT(vp_id))) { __vxge_hw_vpath_initialize()
4569 status = __vxge_hw_vpath_mac_configure(hldev, vp_id); __vxge_hw_vpath_initialize()
4573 status = __vxge_hw_vpath_kdfc_configure(hldev, vp_id); __vxge_hw_vpath_initialize()
4577 status = __vxge_hw_vpath_tim_configure(hldev, vp_id); __vxge_hw_vpath_initialize()
4611 static void __vxge_hw_vp_terminate(struct __vxge_hw_device *hldev, u32 vp_id) __vxge_hw_vp_terminate() argument
4615 vpath = &hldev->virtual_paths[vp_id]; __vxge_hw_vp_terminate()
4621 vpath->hldev->tim_int_mask1, vpath->vp_id); __vxge_hw_vp_terminate()
4622 hldev->stats.hw_dev_info_stats.vpath_info[vpath->vp_id] = NULL; __vxge_hw_vp_terminate()
4655 __vxge_hw_vp_initialize(struct __vxge_hw_device *hldev, u32 vp_id, __vxge_hw_vp_initialize() argument
4661 if (!(hldev->vpath_assignments & vxge_mBIT(vp_id))) { __vxge_hw_vp_initialize()
4666 vpath = &hldev->virtual_paths[vp_id]; __vxge_hw_vp_initialize()
4669 vpath->vp_id = vp_id; __vxge_hw_vp_initialize()
4673 vpath->vp_reg = hldev->vpath_reg[vp_id]; __vxge_hw_vp_initialize()
4674 vpath->vpmgmt_reg = hldev->vpmgmt_reg[vp_id]; __vxge_hw_vp_initialize()
4676 __vxge_hw_vpath_reset(hldev, vp_id); __vxge_hw_vp_initialize()
4692 vpath->sw_stats = &hldev->stats.sw_dev_info_stats.vpath_info[vp_id]; __vxge_hw_vp_initialize()
4695 hldev->tim_int_mask1, vp_id); __vxge_hw_vp_initialize()
4697 status = __vxge_hw_vpath_initialize(hldev, vp_id); __vxge_hw_vp_initialize()
4699 __vxge_hw_vp_terminate(hldev, vp_id); __vxge_hw_vp_initialize()
4813 vpath = &hldev->virtual_paths[attr->vp_id]; vxge_hw_vpath_open()
4820 status = __vxge_hw_vp_initialize(hldev, attr->vp_id, vxge_hw_vpath_open()
4821 &hldev->config.vp_config[attr->vp_id]); vxge_hw_vpath_open()
4844 __vxge_hw_vpath_prc_configure(hldev, attr->vp_id); vxge_hw_vpath_open()
4848 (attr->vp_id * VXGE_HW_MAX_INTR_PER_VP) + vxge_hw_vpath_open()
4862 hldev->stats.hw_dev_info_stats.vpath_info[attr->vp_id] = vxge_hw_vpath_open()
4866 &hldev->stats.hw_dev_info_stats.vpath_info_sav[attr->vp_id]; vxge_hw_vpath_open()
4878 hldev->vpaths_deployed |= vxge_mBIT(vpath->vp_id); vxge_hw_vpath_open()
4896 __vxge_hw_vp_terminate(hldev, attr->vp_id); vxge_hw_vpath_open()
4977 u32 vp_id = vp->vpath->vp_id; vxge_hw_vpath_close() local
5001 devh->vpaths_deployed &= ~vxge_mBIT(vp_id); vxge_hw_vpath_close()
5014 __vxge_hw_vp_terminate(devh, vp_id); vxge_hw_vpath_close()
5027 u32 vp_id; vxge_hw_vpath_reset() local
5030 vp_id = vpath->vp_id; vxge_hw_vpath_reset()
5037 status = __vxge_hw_vpath_reset(vpath->hldev, vp_id); vxge_hw_vpath_reset()
5055 u32 vp_id; vxge_hw_vpath_recover_from_reset() local
5057 vp_id = vp->vpath->vp_id; vxge_hw_vpath_recover_from_reset()
5070 status = __vxge_hw_vpath_sw_reset(hldev, vp_id); vxge_hw_vpath_recover_from_reset()
5074 status = __vxge_hw_vpath_initialize(hldev, vp_id); vxge_hw_vpath_recover_from_reset()
5079 __vxge_hw_vpath_prc_configure(hldev, vp_id); vxge_hw_vpath_recover_from_reset()
5110 1 << (16 - vp->vpath->vp_id)); vxge_hw_vpath_enable()
H A Dvxge-main.c1354 * @vp_id: vpath for which to enable the interrupts
1358 static void vxge_vpath_intr_enable(struct vxgedev *vdev, int vp_id) vxge_vpath_intr_enable() argument
1360 struct vxge_vpath *vpath = &vdev->vpaths[vp_id]; vxge_vpath_intr_enable()
1387 * @vp_id: vpath for which to disable the interrupts
1391 static void vxge_vpath_intr_disable(struct vxgedev *vdev, int vp_id) vxge_vpath_intr_disable() argument
1393 struct vxge_vpath *vpath = &vdev->vpaths[vp_id]; vxge_vpath_intr_disable()
1505 * @vp_id: vpath to reset
1509 static int vxge_reset_vpath(struct vxgedev *vdev, int vp_id) vxge_reset_vpath() argument
1512 struct vxge_vpath *vpath = &vdev->vpaths[vp_id]; vxge_reset_vpath()
1530 "failed for vpath:%d", vp_id); vxge_reset_vpath()
1536 "vpath:%d", vp_id); vxge_reset_vpath()
1558 vxge_vpath_intr_enable(vdev, vp_id); vxge_reset_vpath()
1570 clear_bit(vp_id, &vdev->vp_reset); vxge_reset_vpath()
1612 int ret = 0, vp_id, i; do_vxge_reset() local
1630 for (vp_id = 0; vp_id < vdev->no_of_vpath; vp_id++) { do_vxge_reset()
1631 while (test_bit(vp_id, &vdev->vp_reset)) do_vxge_reset()
1748 for (vp_id = 0; vp_id < vdev->no_of_vpath; vp_id++) { do_vxge_reset()
1749 vxge_restore_vpath_mac_addr(&vdev->vpaths[vp_id]); do_vxge_reset()
1750 vxge_restore_vpath_vid_table(&vdev->vpaths[vp_id]); do_vxge_reset()
2030 u32 vp_id = 0; vxge_open_vpaths() local
2052 attr.vp_id = vpath->device_id; vxge_open_vpaths()
2106 vp_id = vpath->handle->vpath->vp_id; vxge_open_vpaths()
2107 vdev->vpaths_deployed |= vxge_mBIT(vp_id); vxge_open_vpaths()
2273 int msix_id = (vpath->handle->vpath->vp_id * vxge_alarm_msix_handle()
2572 msix_idx = (vdev->vpaths[0].handle->vpath->vp_id * vxge_add_isr()
3330 int vp_id; vxge_vlan_rx_add_vid() local
3333 for (vp_id = 0; vp_id < vdev->no_of_vpath; vp_id++) { vxge_vlan_rx_add_vid()
3334 vpath = &vdev->vpaths[vp_id]; vxge_vlan_rx_add_vid()
3356 int vp_id; vxge_vlan_rx_kill_vid() local
3361 for (vp_id = 0; vp_id < vdev->no_of_vpath; vp_id++) { vxge_vlan_rx_kill_vid()
3362 vpath = &vdev->vpaths[vp_id]; vxge_vlan_rx_kill_vid()
3566 enum vxge_hw_event type, u64 vp_id) vxge_callback_crit_err()
3583 if (vpath->device_id == vp_id) vxge_callback_crit_err()
3742 device_config->vp_config[i].vp_id = i; vxge_config_vpaths()
3565 vxge_callback_crit_err(struct __vxge_hw_device *hldev, enum vxge_hw_event type, u64 vp_id) vxge_callback_crit_err() argument
H A Dvxge-traffic.c131 if (vpath->hldev->first_vp_id != vpath->vp_id) vxge_hw_vpath_intr_enable()
181 val64 = VXGE_HW_TIM_CLR_INT_EN_VP(1 << (16 - vpath->vp_id)); vxge_hw_vpath_intr_disable()
497 * @vp_id: Vpath Id
503 __vxge_hw_device_handle_error(struct __vxge_hw_device *hldev, u32 vp_id, __vxge_hw_device_handle_error() argument
537 type, vp_id); __vxge_hw_device_handle_error()
860 __vxge_hw_device_handle_error(hldev, vpath->vp_id, alarm_event); __vxge_hw_vpath_alarm_process()
2219 u32 vp_id = vp->vpath->vp_id; vxge_hw_vpath_msix_set() local
2222 (vp_id * 4) + tim_msix_id[0]) | vxge_hw_vpath_msix_set()
2224 (vp_id * 4) + tim_msix_id[1]); vxge_hw_vpath_msix_set()
2330 tim_int_mask1, vp->vpath->vp_id); vxge_hw_vpath_inta_mask_tx_rx()
2368 tim_int_mask1, vp->vpath->vp_id); vxge_hw_vpath_inta_unmask_tx_rx()
H A Dvxge-config.h341 * @vp_id: Virtual Path Id
361 u32 vp_id; member in struct:vxge_hw_vp_config
631 u32 vp_id; member in struct:__vxge_hw_channel
652 * @vp_id: Virtual path id
675 u32 vp_id; member in struct:__vxge_hw_virtualpath
930 u32 vp_id; member in struct:__vxge_hw_ring
1014 u32 vp_id; member in struct:__vxge_hw_fifo
1836 * @vp_id: Identifier of Virtual Path
1844 u32 vp_id; member in struct:vxge_hw_vpath_attr
2110 int vxge_hw_vpath_wait_receive_idle(struct __vxge_hw_device *hldev, u32 vp_id);
H A Dvxge-traffic.h2114 u32 vp_id);

Completed in 398 milliseconds