Lines Matching refs:bna
198 bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); in bna_bfi_ucast_req()
214 bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); in bna_bfi_mcast_add_req()
230 bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); in bna_bfi_mcast_del_req()
245 bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); in bna_bfi_mcast_filter_req()
260 bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); in bna_bfi_rx_promisc_req()
285 bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); in bna_bfi_rx_vlan_filter_set()
300 bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); in bna_bfi_vlan_strip_enable()
316 bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); in bna_bfi_rit_cfg()
336 bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); in bna_bfi_rss_cfg()
351 bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); in bna_bfi_rss_enable()
392 mchandle = bna_mcam_mod_handle_get(&rxf->rx->bna->mcam_mod); in bna_rxf_mchandle_attach()
419 bna_mcam_mod_handle_put(&rxf->rx->bna->mcam_mod, mchandle); in bna_rxf_mcast_del()
437 list_move_tail(&mac->qe, bna_mcam_mod_del_q(rxf->rx->bna)); in bna_rxf_mcast_cfg_apply()
485 list_move_tail(&mac->qe, bna_mcam_mod_del_q(rxf->rx->bna)); in bna_rxf_mcast_cfg_reset()
669 list_move_tail(&mac->qe, bna_ucam_mod_free_q(rxf->rx->bna)); in bna_rxf_uninit()
674 bna_ucam_mod_free_q(rxf->rx->bna)); in bna_rxf_uninit()
681 list_move_tail(&mac->qe, bna_mcam_mod_free_q(rxf->rx->bna)); in bna_rxf_uninit()
686 if (rxf->rx->bna->promisc_rid == rxf->rx->rid) in bna_rxf_uninit()
687 rxf->rx->bna->promisc_rid = BFI_INVALID_RID; in bna_rxf_uninit()
688 if (rxf->rx->bna->default_mode_rid == rxf->rx->rid) in bna_rxf_uninit()
689 rxf->rx->bna->default_mode_rid = BFI_INVALID_RID; in bna_rxf_uninit()
738 bna_cam_mod_mac_get(bna_ucam_mod_free_q(rxf->rx->bna)); in bna_rx_ucast_set()
746 rxf->cam_fltr_cbarg = rx->bna->bnad; in bna_rx_ucast_set()
764 cbfn(rx->bna->bnad, rx); in bna_rx_mcast_add()
768 mac = bna_cam_mod_mac_get(bna_mcam_mod_free_q(rxf->rx->bna)); in bna_rx_mcast_add()
775 rxf->cam_fltr_cbarg = rx->bna->bnad; in bna_rx_mcast_add()
785 struct bna_ucam_mod *ucam_mod = &rx->bna->ucam_mod; in bna_rx_ucast_listset()
843 struct bna_mcam_mod *mcam_mod = &rx->bna->mcam_mod; in bna_rx_mcast_listset()
911 list_move_tail(&mac->qe, bna_mcam_mod_free_q(rxf->rx->bna)); in bna_rx_mcast_delall()
919 del_mac = bna_cam_mod_mac_get(bna_mcam_mod_del_q(rxf->rx->bna)); in bna_rx_mcast_delall()
923 list_add_tail(&mac->qe, bna_mcam_mod_free_q(rxf->rx->bna)); in bna_rx_mcast_delall()
971 list_move_tail(&mac->qe, bna_ucam_mod_del_q(rxf->rx->bna)); in bna_rxf_ucast_cfg_apply()
1009 bna_ucam_mod_del_q(rxf->rx->bna)); in bna_rxf_ucast_cfg_reset()
1014 bna_ucam_mod_del_q(rxf->rx->bna)); in bna_rxf_ucast_cfg_reset()
1047 struct bna *bna = rxf->rx->bna; in bna_rxf_promisc_cfg_apply() local
1064 bna->promisc_rid = BFI_INVALID_RID; in bna_rxf_promisc_cfg_apply()
1075 struct bna *bna = rxf->rx->bna; in bna_rxf_promisc_cfg_reset() local
1083 bna->promisc_rid = BFI_INVALID_RID; in bna_rxf_promisc_cfg_reset()
1161 struct bna *bna = rxf->rx->bna; in bna_rxf_promisc_enable() local
1177 bna->promisc_rid = rxf->rx->rid; in bna_rxf_promisc_enable()
1187 struct bna *bna = rxf->rx->bna; in bna_rxf_promisc_disable() local
1199 bna->promisc_rid = BFI_INVALID_RID; in bna_rxf_promisc_disable()
1294 (rx)->rx_stall_cbfn((rx)->bna->bnad, (rx)); \
1378 rx->rx_cleanup_cbfn(rx->bna->bnad, rx); in bna_rx_sm_stop_wait()
1415 rx->rx_post_cbfn(rx->bna->bnad, rx); in bna_rx_sm_rxf_start_wait_entry()
1432 rx->rx_cleanup_cbfn(rx->bna->bnad, rx); in bna_rx_sm_rxf_stop_wait()
1483 bna_ib_start(rx->bna, &rxp->cq.ib, is_regular); in bna_rx_sm_started_entry()
1485 bna_ethport_cb_rx_started(&rx->bna->ethport); in bna_rx_sm_started_entry()
1494 bna_ethport_cb_rx_stopped(&rx->bna->ethport); in bna_rx_sm_started()
1500 bna_ethport_cb_rx_stopped(&rx->bna->ethport); in bna_rx_sm_started()
1503 rx->rx_cleanup_cbfn(rx->bna->bnad, rx); in bna_rx_sm_started()
1524 rx->rx_cleanup_cbfn(rx->bna->bnad, rx); in bna_rx_sm_rxf_start_wait()
1633 cfg_req->rx_cfg.frame_size = bna_enet_mtu_get(&rx->bna->enet); in bna_bfi_rx_enet_start()
1663 bna_enet_mtu_get(&rx->bna->enet); in bna_bfi_rx_enet_start()
1719 bfa_msgq_cmd_post(&rx->bna->msgq, &rx->msgq_cmd); in bna_bfi_rx_enet_start()
1733 bfa_msgq_cmd_post(&rx->bna->msgq, &rx->msgq_cmd); in bna_bfi_rx_enet_stop()
1743 bna_ib_stop(rx->bna, &rxp->cq.ib); in bna_rx_enet_stop()
1951 rx_mod->stop_cbfn(&rx_mod->bna->enet); in bna_rx_mod_cb_rx_stopped_all()
1968 bna_rx_mod_cb_rx_stopped(&rx->bna->rx_mod, rx); in bna_rx_stop()
1971 rx->stop_cbarg = &rx->bna->rx_mod; in bna_rx_stop()
2031 void bna_rx_mod_init(struct bna_rx_mod *rx_mod, struct bna *bna, in bna_rx_mod_init() argument
2039 rx_mod->bna = bna; in bna_rx_mod_init()
2059 for (index = 0; index < bna->ioceth.attr.num_rxp; index++) { in bna_rx_mod_init()
2063 rx_ptr->bna = NULL; in bna_rx_mod_init()
2073 for (index = 0; index < bna->ioceth.attr.num_rxp; index++) { in bna_rx_mod_init()
2080 for (index = 0; index < (bna->ioceth.attr.num_rxp * 2); index++) { in bna_rx_mod_init()
2090 rx_mod->bna = NULL; in bna_rx_mod_uninit()
2101 bfa_msgq_rsp_copy(&rx->bna->msgq, (u8 *)cfg_rsp, in bna_bfi_rx_enet_start_rsp()
2112 rx->bna->pcidev.pci_bar_kva in bna_bfi_rx_enet_start_rsp()
2116 rx->bna->pcidev.pci_bar_kva in bna_bfi_rx_enet_start_rsp()
2121 rx->bna->pcidev.pci_bar_kva in bna_bfi_rx_enet_start_rsp()
2258 bna_rx_create(struct bna *bna, struct bnad *bnad, in bna_rx_create() argument
2264 struct bna_rx_mod *rx_mod = &bna->rx_mod; in bna_rx_create()
2316 rx->bna = bna; in bna_rx_create()
2332 if (rx->bna->rx_mod.flags & BNA_RX_MOD_F_ENET_STARTED) { in bna_rx_create()
2335 if (!(rx->bna->rx_mod.flags & in bna_rx_create()
2340 if (rx->bna->rx_mod.flags & BNA_RX_MOD_F_ENET_LOOPBACK) in bna_rx_create()
2399 q0->rcb->bnad = bna->bnad; in bna_rx_create()
2425 q1->rcb->bnad = bna->bnad; in bna_rx_create()
2469 rxp->cq.ccb->bnad = bna->bnad; in bna_rx_create()
2493 struct bna_rx_mod *rx_mod = &rx->bna->rx_mod; in bna_rx_destroy()
2506 rx->rcb_destroy_cbfn(rx->bna->bnad, q0->rcb); in bna_rx_destroy()
2514 rx->rcb_destroy_cbfn(rx->bna->bnad, q1->rcb); in bna_rx_destroy()
2524 rx->ccb_destroy_cbfn(rx->bna->bnad, rxp->cq.ccb); in bna_rx_destroy()
2538 rx->bna = NULL; in bna_rx_destroy()
2560 (*cbfn)(rx->bna->bnad, rx); in bna_rx_disable()
2563 rx->stop_cbarg = rx->bna->bnad; in bna_rx_disable()
2612 if ((rx->bna->promisc_rid != BFI_INVALID_RID) && in bna_rx_mode_set()
2613 (rx->bna->promisc_rid != rxf->rx->rid)) in bna_rx_mode_set()
2617 if (rx->bna->default_mode_rid != BFI_INVALID_RID) in bna_rx_mode_set()
2627 if ((rx->bna->default_mode_rid != BFI_INVALID_RID) && in bna_rx_mode_set()
2628 (rx->bna->default_mode_rid != rxf->rx->rid)) { in bna_rx_mode_set()
2633 if (rx->bna->promisc_rid != BFI_INVALID_RID) in bna_rx_mode_set()
2659 rxf->cam_fltr_cbarg = rx->bna->bnad; in bna_rx_mode_set()
2693 bna_rx_dim_reconfig(struct bna *bna, const u32 vector[][BNA_BIAS_T_MAX]) in bna_rx_dim_reconfig() argument
2699 bna->rx_mod.dim_vector[i][j] = vector[i][j]; in bna_rx_dim_reconfig()
2705 struct bna *bna = ccb->cq->rx->bna; in bna_rx_dim_update() local
2746 coalescing_timeo = bna->rx_mod.dim_vector[load][bias]; in bna_rx_dim_update()
2884 bna_ib_start(tx->bna, &txq->ib, is_regular); in bna_tx_sm_started_entry()
2886 tx->tx_resume_cbfn(tx->bna->bnad, tx); in bna_tx_sm_started_entry()
2895 tx->tx_stall_cbfn(tx->bna->bnad, tx); in bna_tx_sm_started()
2901 tx->tx_stall_cbfn(tx->bna->bnad, tx); in bna_tx_sm_started()
2902 tx->tx_cleanup_cbfn(tx->bna->bnad, tx); in bna_tx_sm_started()
2926 tx->tx_cleanup_cbfn(tx->bna->bnad, tx); in bna_tx_sm_stop_wait()
2972 tx->tx_stall_cbfn(tx->bna->bnad, tx); in bna_tx_sm_prio_stop_wait_entry()
2986 tx->tx_cleanup_cbfn(tx->bna->bnad, tx); in bna_tx_sm_prio_stop_wait()
3005 tx->tx_cleanup_cbfn(tx->bna->bnad, tx); in bna_tx_sm_prio_cleanup_wait_entry()
3139 bfa_msgq_cmd_post(&tx->bna->msgq, &tx->msgq_cmd); in bna_bfi_tx_enet_start()
3153 bfa_msgq_cmd_post(&tx->bna->msgq, &tx->msgq_cmd); in bna_bfi_tx_enet_stop()
3163 bna_ib_stop(tx->bna, &txq->ib); in bna_tx_enet_stop()
3224 struct bna_tx_mod *tx_mod = &tx->bna->tx_mod; in bna_tx_free()
3242 tx->bna = NULL; in bna_tx_free()
3264 tx->stop_cbarg = &tx->bna->tx_mod; in bna_tx_stop()
3284 bfa_msgq_rsp_copy(&tx->bna->msgq, (u8 *)cfg_rsp, in bna_bfi_tx_enet_start_rsp()
3293 tx->bna->pcidev.pci_bar_kva in bna_bfi_tx_enet_start_rsp()
3296 tx->bna->pcidev.pci_bar_kva in bna_bfi_tx_enet_start_rsp()
3371 bna_tx_create(struct bna *bna, struct bnad *bnad, in bna_tx_create() argument
3377 struct bna_tx_mod *tx_mod = &bna->tx_mod; in bna_tx_create()
3399 tx->bna = bna; in bna_tx_create()
3432 if (tx->bna->tx_mod.flags & BNA_TX_MOD_F_ENET_STARTED) { in bna_tx_create()
3435 if (!(tx->bna->tx_mod.flags & in bna_tx_create()
3440 if (tx->bna->tx_mod.flags & BNA_TX_MOD_F_ENET_LOOPBACK) in bna_tx_create()
3495 (tx->tcb_setup_cbfn)(bna->bnad, txq->tcb); in bna_tx_create()
3525 (tx->tcb_destroy_cbfn)(tx->bna->bnad, txq->tcb); in bna_tx_destroy()
3527 tx->bna->tx_mod.rid_mask &= ~BIT(tx->rid); in bna_tx_destroy()
3548 (*cbfn)(tx->bna->bnad, tx); in bna_tx_disable()
3553 tx->stop_cbarg = tx->bna->bnad; in bna_tx_disable()
3580 tx_mod->stop_cbfn(&tx_mod->bna->enet); in bna_tx_mod_cb_tx_stopped_all()
3585 bna_tx_mod_init(struct bna_tx_mod *tx_mod, struct bna *bna, in bna_tx_mod_init() argument
3590 tx_mod->bna = bna; in bna_tx_mod_init()
3603 for (i = 0; i < bna->ioceth.attr.num_txq; i++) { in bna_tx_mod_init()
3618 tx_mod->bna = NULL; in bna_tx_mod_uninit()