Searched refs:rxf (Results 1 - 11 of 11) sorted by relevance

/linux-4.4.14/drivers/net/ethernet/brocade/bna/
H A Dbna_tx_rx.c33 #define bna_rxf_vlan_cfg_soft_reset(rxf) \
35 (rxf)->vlan_pending_bitmask = (u8)BFI_VLAN_BMASK_ALL; \
36 (rxf)->vlan_strip_pending = true; \
39 #define bna_rxf_rss_cfg_soft_reset(rxf) \
41 if ((rxf)->rss_status == BNA_STATUS_T_ENABLED) \
42 (rxf)->rss_pending = (BNA_RSS_F_RIT_PENDING | \
47 static int bna_rxf_cfg_apply(struct bna_rxf *rxf);
48 static void bna_rxf_cfg_reset(struct bna_rxf *rxf);
49 static int bna_rxf_ucast_cfg_apply(struct bna_rxf *rxf);
50 static int bna_rxf_promisc_cfg_apply(struct bna_rxf *rxf);
51 static int bna_rxf_allmulti_cfg_apply(struct bna_rxf *rxf);
52 static int bna_rxf_vlan_strip_cfg_apply(struct bna_rxf *rxf);
53 static int bna_rxf_ucast_cfg_reset(struct bna_rxf *rxf,
55 static int bna_rxf_promisc_cfg_reset(struct bna_rxf *rxf,
57 static int bna_rxf_allmulti_cfg_reset(struct bna_rxf *rxf,
70 bna_rxf_sm_stopped_entry(struct bna_rxf *rxf) bna_rxf_sm_stopped_entry() argument
72 call_rxf_stop_cbfn(rxf); bna_rxf_sm_stopped_entry()
76 bna_rxf_sm_stopped(struct bna_rxf *rxf, enum bna_rxf_event event) bna_rxf_sm_stopped() argument
80 bfa_fsm_set_state(rxf, bna_rxf_sm_cfg_wait); bna_rxf_sm_stopped()
84 call_rxf_stop_cbfn(rxf); bna_rxf_sm_stopped()
92 call_rxf_cam_fltr_cbfn(rxf); bna_rxf_sm_stopped()
101 bna_rxf_sm_cfg_wait_entry(struct bna_rxf *rxf) bna_rxf_sm_cfg_wait_entry() argument
103 if (!bna_rxf_cfg_apply(rxf)) { bna_rxf_sm_cfg_wait_entry()
105 bfa_fsm_set_state(rxf, bna_rxf_sm_started); bna_rxf_sm_cfg_wait_entry()
110 bna_rxf_sm_cfg_wait(struct bna_rxf *rxf, enum bna_rxf_event event) bna_rxf_sm_cfg_wait() argument
114 bfa_fsm_set_state(rxf, bna_rxf_sm_last_resp_wait); bna_rxf_sm_cfg_wait()
118 bna_rxf_cfg_reset(rxf); bna_rxf_sm_cfg_wait()
119 call_rxf_start_cbfn(rxf); bna_rxf_sm_cfg_wait()
120 call_rxf_cam_fltr_cbfn(rxf); bna_rxf_sm_cfg_wait()
121 bfa_fsm_set_state(rxf, bna_rxf_sm_stopped); bna_rxf_sm_cfg_wait()
129 if (!bna_rxf_cfg_apply(rxf)) { bna_rxf_sm_cfg_wait()
131 bfa_fsm_set_state(rxf, bna_rxf_sm_started); bna_rxf_sm_cfg_wait()
141 bna_rxf_sm_started_entry(struct bna_rxf *rxf) bna_rxf_sm_started_entry() argument
143 call_rxf_start_cbfn(rxf); bna_rxf_sm_started_entry()
144 call_rxf_cam_fltr_cbfn(rxf); bna_rxf_sm_started_entry()
148 bna_rxf_sm_started(struct bna_rxf *rxf, enum bna_rxf_event event) bna_rxf_sm_started() argument
153 bna_rxf_cfg_reset(rxf); bna_rxf_sm_started()
154 bfa_fsm_set_state(rxf, bna_rxf_sm_stopped); bna_rxf_sm_started()
158 bfa_fsm_set_state(rxf, bna_rxf_sm_cfg_wait); bna_rxf_sm_started()
167 bna_rxf_sm_last_resp_wait_entry(struct bna_rxf *rxf) bna_rxf_sm_last_resp_wait_entry() argument
172 bna_rxf_sm_last_resp_wait(struct bna_rxf *rxf, enum bna_rxf_event event) bna_rxf_sm_last_resp_wait() argument
177 bna_rxf_cfg_reset(rxf); bna_rxf_sm_last_resp_wait()
178 bfa_fsm_set_state(rxf, bna_rxf_sm_stopped); bna_rxf_sm_last_resp_wait()
187 bna_bfi_ucast_req(struct bna_rxf *rxf, struct bna_mac *mac, bna_bfi_ucast_req() argument
190 struct bfi_enet_ucast_req *req = &rxf->bfi_enet_cmd.ucast_req; bna_bfi_ucast_req()
192 bfi_msgq_mhdr_set(req->mh, BFI_MC_ENET, req_type, 0, rxf->rx->rid); bna_bfi_ucast_req()
196 bfa_msgq_cmd_set(&rxf->msgq_cmd, NULL, NULL, bna_bfi_ucast_req()
198 bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); bna_bfi_ucast_req()
202 bna_bfi_mcast_add_req(struct bna_rxf *rxf, struct bna_mac *mac) bna_bfi_mcast_add_req() argument
205 &rxf->bfi_enet_cmd.mcast_add_req; bna_bfi_mcast_add_req()
208 0, rxf->rx->rid); bna_bfi_mcast_add_req()
212 bfa_msgq_cmd_set(&rxf->msgq_cmd, NULL, NULL, bna_bfi_mcast_add_req()
214 bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); bna_bfi_mcast_add_req()
218 bna_bfi_mcast_del_req(struct bna_rxf *rxf, u16 handle) bna_bfi_mcast_del_req() argument
221 &rxf->bfi_enet_cmd.mcast_del_req; bna_bfi_mcast_del_req()
224 0, rxf->rx->rid); bna_bfi_mcast_del_req()
228 bfa_msgq_cmd_set(&rxf->msgq_cmd, NULL, NULL, bna_bfi_mcast_del_req()
230 bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); bna_bfi_mcast_del_req()
234 bna_bfi_mcast_filter_req(struct bna_rxf *rxf, enum bna_status status) bna_bfi_mcast_filter_req() argument
236 struct bfi_enet_enable_req *req = &rxf->bfi_enet_cmd.req; bna_bfi_mcast_filter_req()
239 BFI_ENET_H2I_MAC_MCAST_FILTER_REQ, 0, rxf->rx->rid); bna_bfi_mcast_filter_req()
243 bfa_msgq_cmd_set(&rxf->msgq_cmd, NULL, NULL, bna_bfi_mcast_filter_req()
245 bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); bna_bfi_mcast_filter_req()
249 bna_bfi_rx_promisc_req(struct bna_rxf *rxf, enum bna_status status) bna_bfi_rx_promisc_req() argument
251 struct bfi_enet_enable_req *req = &rxf->bfi_enet_cmd.req; bna_bfi_rx_promisc_req()
254 BFI_ENET_H2I_RX_PROMISCUOUS_REQ, 0, rxf->rx->rid); bna_bfi_rx_promisc_req()
258 bfa_msgq_cmd_set(&rxf->msgq_cmd, NULL, NULL, bna_bfi_rx_promisc_req()
260 bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); bna_bfi_rx_promisc_req()
264 bna_bfi_rx_vlan_filter_set(struct bna_rxf *rxf, u8 block_idx) bna_bfi_rx_vlan_filter_set() argument
266 struct bfi_enet_rx_vlan_req *req = &rxf->bfi_enet_cmd.vlan_req; bna_bfi_rx_vlan_filter_set()
271 BFI_ENET_H2I_RX_VLAN_SET_REQ, 0, rxf->rx->rid); bna_bfi_rx_vlan_filter_set()
277 if (rxf->vlan_filter_status == BNA_STATUS_T_ENABLED) bna_bfi_rx_vlan_filter_set()
279 htonl(rxf->vlan_filter_table[j]); bna_bfi_rx_vlan_filter_set()
283 bfa_msgq_cmd_set(&rxf->msgq_cmd, NULL, NULL, bna_bfi_rx_vlan_filter_set()
285 bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); bna_bfi_rx_vlan_filter_set()
289 bna_bfi_vlan_strip_enable(struct bna_rxf *rxf) bna_bfi_vlan_strip_enable() argument
291 struct bfi_enet_enable_req *req = &rxf->bfi_enet_cmd.req; bna_bfi_vlan_strip_enable()
294 BFI_ENET_H2I_RX_VLAN_STRIP_ENABLE_REQ, 0, rxf->rx->rid); bna_bfi_vlan_strip_enable()
297 req->enable = rxf->vlan_strip_status; bna_bfi_vlan_strip_enable()
298 bfa_msgq_cmd_set(&rxf->msgq_cmd, NULL, NULL, bna_bfi_vlan_strip_enable()
300 bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); bna_bfi_vlan_strip_enable()
304 bna_bfi_rit_cfg(struct bna_rxf *rxf) bna_bfi_rit_cfg() argument
306 struct bfi_enet_rit_req *req = &rxf->bfi_enet_cmd.rit_req; bna_bfi_rit_cfg()
309 BFI_ENET_H2I_RIT_CFG_REQ, 0, rxf->rx->rid); bna_bfi_rit_cfg()
312 req->size = htons(rxf->rit_size); bna_bfi_rit_cfg()
313 memcpy(&req->table[0], rxf->rit, rxf->rit_size); bna_bfi_rit_cfg()
314 bfa_msgq_cmd_set(&rxf->msgq_cmd, NULL, NULL, bna_bfi_rit_cfg()
316 bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); bna_bfi_rit_cfg()
320 bna_bfi_rss_cfg(struct bna_rxf *rxf) bna_bfi_rss_cfg() argument
322 struct bfi_enet_rss_cfg_req *req = &rxf->bfi_enet_cmd.rss_req; bna_bfi_rss_cfg()
326 BFI_ENET_H2I_RSS_CFG_REQ, 0, rxf->rx->rid); bna_bfi_rss_cfg()
329 req->cfg.type = rxf->rss_cfg.hash_type; bna_bfi_rss_cfg()
330 req->cfg.mask = rxf->rss_cfg.hash_mask; bna_bfi_rss_cfg()
333 htonl(rxf->rss_cfg.toeplitz_hash_key[i]); bna_bfi_rss_cfg()
334 bfa_msgq_cmd_set(&rxf->msgq_cmd, NULL, NULL, bna_bfi_rss_cfg()
336 bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); bna_bfi_rss_cfg()
340 bna_bfi_rss_enable(struct bna_rxf *rxf) bna_bfi_rss_enable() argument
342 struct bfi_enet_enable_req *req = &rxf->bfi_enet_cmd.req; bna_bfi_rss_enable()
345 BFI_ENET_H2I_RSS_ENABLE_REQ, 0, rxf->rx->rid); bna_bfi_rss_enable()
348 req->enable = rxf->rss_status; bna_bfi_rss_enable()
349 bfa_msgq_cmd_set(&rxf->msgq_cmd, NULL, NULL, bna_bfi_rss_enable()
351 bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); bna_bfi_rss_enable()
356 bna_rxf_mcmac_get(struct bna_rxf *rxf, const u8 *mac_addr) bna_rxf_mcmac_get() argument
360 list_for_each_entry(mac, &rxf->mcast_active_q, qe) bna_rxf_mcmac_get()
364 list_for_each_entry(mac, &rxf->mcast_pending_del_q, qe) bna_rxf_mcmac_get()
372 bna_rxf_mchandle_get(struct bna_rxf *rxf, int handle) bna_rxf_mchandle_get() argument
376 list_for_each_entry(mchandle, &rxf->mcast_handle_q, qe) bna_rxf_mchandle_get()
384 bna_rxf_mchandle_attach(struct bna_rxf *rxf, u8 *mac_addr, int handle) bna_rxf_mchandle_attach() argument
389 mcmac = bna_rxf_mcmac_get(rxf, mac_addr); bna_rxf_mchandle_attach()
390 mchandle = bna_rxf_mchandle_get(rxf, handle); bna_rxf_mchandle_attach()
392 mchandle = bna_mcam_mod_handle_get(&rxf->rx->bna->mcam_mod); bna_rxf_mchandle_attach()
395 list_add_tail(&mchandle->qe, &rxf->mcast_handle_q); bna_rxf_mchandle_attach()
402 bna_rxf_mcast_del(struct bna_rxf *rxf, struct bna_mac *mac, bna_rxf_mcast_del() argument
415 bna_bfi_mcast_del_req(rxf, mchandle->handle); bna_rxf_mcast_del()
419 bna_mcam_mod_handle_put(&rxf->rx->bna->mcam_mod, mchandle); bna_rxf_mcast_del()
427 bna_rxf_mcast_cfg_apply(struct bna_rxf *rxf) bna_rxf_mcast_cfg_apply() argument
433 while (!list_empty(&rxf->mcast_pending_del_q)) { bna_rxf_mcast_cfg_apply()
434 mac = list_first_entry(&rxf->mcast_pending_del_q, bna_rxf_mcast_cfg_apply()
436 ret = bna_rxf_mcast_del(rxf, mac, BNA_HARD_CLEANUP); bna_rxf_mcast_cfg_apply()
437 list_move_tail(&mac->qe, bna_mcam_mod_del_q(rxf->rx->bna)); bna_rxf_mcast_cfg_apply()
443 if (!list_empty(&rxf->mcast_pending_add_q)) { bna_rxf_mcast_cfg_apply()
444 mac = list_first_entry(&rxf->mcast_pending_add_q, bna_rxf_mcast_cfg_apply()
446 list_move_tail(&mac->qe, &rxf->mcast_active_q); bna_rxf_mcast_cfg_apply()
447 bna_bfi_mcast_add_req(rxf, mac); bna_rxf_mcast_cfg_apply()
455 bna_rxf_vlan_cfg_apply(struct bna_rxf *rxf) bna_rxf_vlan_cfg_apply() argument
460 if (rxf->vlan_pending_bitmask) { bna_rxf_vlan_cfg_apply()
461 vlan_pending_bitmask = rxf->vlan_pending_bitmask; bna_rxf_vlan_cfg_apply()
466 rxf->vlan_pending_bitmask &= ~BIT(block_idx); bna_rxf_vlan_cfg_apply()
467 bna_bfi_rx_vlan_filter_set(rxf, block_idx); bna_rxf_vlan_cfg_apply()
475 bna_rxf_mcast_cfg_reset(struct bna_rxf *rxf, enum bna_cleanup_type cleanup) bna_rxf_mcast_cfg_reset() argument
481 while (!list_empty(&rxf->mcast_pending_del_q)) { bna_rxf_mcast_cfg_reset()
482 mac = list_first_entry(&rxf->mcast_pending_del_q, bna_rxf_mcast_cfg_reset()
484 ret = bna_rxf_mcast_del(rxf, mac, cleanup); bna_rxf_mcast_cfg_reset()
485 list_move_tail(&mac->qe, bna_mcam_mod_del_q(rxf->rx->bna)); bna_rxf_mcast_cfg_reset()
491 while (!list_empty(&rxf->mcast_active_q)) { bna_rxf_mcast_cfg_reset()
492 mac = list_first_entry(&rxf->mcast_active_q, bna_rxf_mcast_cfg_reset()
494 list_move_tail(&mac->qe, &rxf->mcast_pending_add_q); bna_rxf_mcast_cfg_reset()
495 if (bna_rxf_mcast_del(rxf, mac, cleanup)) bna_rxf_mcast_cfg_reset()
503 bna_rxf_rss_cfg_apply(struct bna_rxf *rxf) bna_rxf_rss_cfg_apply() argument
505 if (rxf->rss_pending) { bna_rxf_rss_cfg_apply()
506 if (rxf->rss_pending & BNA_RSS_F_RIT_PENDING) { bna_rxf_rss_cfg_apply()
507 rxf->rss_pending &= ~BNA_RSS_F_RIT_PENDING; bna_rxf_rss_cfg_apply()
508 bna_bfi_rit_cfg(rxf); bna_rxf_rss_cfg_apply()
512 if (rxf->rss_pending & BNA_RSS_F_CFG_PENDING) { bna_rxf_rss_cfg_apply()
513 rxf->rss_pending &= ~BNA_RSS_F_CFG_PENDING; bna_rxf_rss_cfg_apply()
514 bna_bfi_rss_cfg(rxf); bna_rxf_rss_cfg_apply()
518 if (rxf->rss_pending & BNA_RSS_F_STATUS_PENDING) { bna_rxf_rss_cfg_apply()
519 rxf->rss_pending &= ~BNA_RSS_F_STATUS_PENDING; bna_rxf_rss_cfg_apply()
520 bna_bfi_rss_enable(rxf); bna_rxf_rss_cfg_apply()
529 bna_rxf_cfg_apply(struct bna_rxf *rxf) bna_rxf_cfg_apply() argument
531 if (bna_rxf_ucast_cfg_apply(rxf)) bna_rxf_cfg_apply()
534 if (bna_rxf_mcast_cfg_apply(rxf)) bna_rxf_cfg_apply()
537 if (bna_rxf_promisc_cfg_apply(rxf)) bna_rxf_cfg_apply()
540 if (bna_rxf_allmulti_cfg_apply(rxf)) bna_rxf_cfg_apply()
543 if (bna_rxf_vlan_cfg_apply(rxf)) bna_rxf_cfg_apply()
546 if (bna_rxf_vlan_strip_cfg_apply(rxf)) bna_rxf_cfg_apply()
549 if (bna_rxf_rss_cfg_apply(rxf)) bna_rxf_cfg_apply()
556 bna_rxf_cfg_reset(struct bna_rxf *rxf) bna_rxf_cfg_reset() argument
558 bna_rxf_ucast_cfg_reset(rxf, BNA_SOFT_CLEANUP); bna_rxf_cfg_reset()
559 bna_rxf_mcast_cfg_reset(rxf, BNA_SOFT_CLEANUP); bna_rxf_cfg_reset()
560 bna_rxf_promisc_cfg_reset(rxf, BNA_SOFT_CLEANUP); bna_rxf_cfg_reset()
561 bna_rxf_allmulti_cfg_reset(rxf, BNA_SOFT_CLEANUP); bna_rxf_cfg_reset()
562 bna_rxf_vlan_cfg_soft_reset(rxf); bna_rxf_cfg_reset()
563 bna_rxf_rss_cfg_soft_reset(rxf); bna_rxf_cfg_reset()
567 bna_rit_init(struct bna_rxf *rxf, int rit_size) bna_rit_init() argument
569 struct bna_rx *rx = rxf->rx; bna_rit_init()
573 rxf->rit_size = rit_size; bna_rit_init()
575 rxf->rit[offset] = rxp->cq.ccb->id; bna_rit_init()
581 bna_bfi_rxf_cfg_rsp(struct bna_rxf *rxf, struct bfi_msgq_mhdr *msghdr) bna_bfi_rxf_cfg_rsp() argument
583 bfa_fsm_send_event(rxf, RXF_E_FW_RESP); bna_bfi_rxf_cfg_rsp()
587 bna_bfi_rxf_ucast_set_rsp(struct bna_rxf *rxf, bna_bfi_rxf_ucast_set_rsp() argument
595 rxf->ucast_active_set = 0; bna_bfi_rxf_ucast_set_rsp()
598 bfa_fsm_send_event(rxf, RXF_E_FW_RESP); bna_bfi_rxf_ucast_set_rsp()
602 bna_bfi_rxf_mcast_add_rsp(struct bna_rxf *rxf, bna_bfi_rxf_mcast_add_rsp() argument
606 &rxf->bfi_enet_cmd.mcast_add_req; bna_bfi_rxf_mcast_add_rsp()
610 bna_rxf_mchandle_attach(rxf, (u8 *)&req->mac_addr, bna_bfi_rxf_mcast_add_rsp()
612 bfa_fsm_send_event(rxf, RXF_E_FW_RESP); bna_bfi_rxf_mcast_add_rsp()
616 bna_rxf_init(struct bna_rxf *rxf, bna_rxf_init() argument
621 rxf->rx = rx; bna_rxf_init()
623 INIT_LIST_HEAD(&rxf->ucast_pending_add_q); bna_rxf_init()
624 INIT_LIST_HEAD(&rxf->ucast_pending_del_q); bna_rxf_init()
625 rxf->ucast_pending_set = 0; bna_rxf_init()
626 rxf->ucast_active_set = 0; bna_rxf_init()
627 INIT_LIST_HEAD(&rxf->ucast_active_q); bna_rxf_init()
628 rxf->ucast_pending_mac = NULL; bna_rxf_init()
630 INIT_LIST_HEAD(&rxf->mcast_pending_add_q); bna_rxf_init()
631 INIT_LIST_HEAD(&rxf->mcast_pending_del_q); bna_rxf_init()
632 INIT_LIST_HEAD(&rxf->mcast_active_q); bna_rxf_init()
633 INIT_LIST_HEAD(&rxf->mcast_handle_q); bna_rxf_init()
635 rxf->rit = (u8 *) bna_rxf_init()
637 bna_rit_init(rxf, q_config->num_paths); bna_rxf_init()
639 rxf->rss_status = q_config->rss_status; bna_rxf_init()
640 if (rxf->rss_status == BNA_STATUS_T_ENABLED) { bna_rxf_init()
641 rxf->rss_cfg = q_config->rss_config; bna_rxf_init()
642 rxf->rss_pending |= BNA_RSS_F_CFG_PENDING; bna_rxf_init()
643 rxf->rss_pending |= BNA_RSS_F_RIT_PENDING; bna_rxf_init()
644 rxf->rss_pending |= BNA_RSS_F_STATUS_PENDING; bna_rxf_init()
647 rxf->vlan_filter_status = BNA_STATUS_T_DISABLED; bna_rxf_init()
648 memset(rxf->vlan_filter_table, 0, bna_rxf_init()
650 rxf->vlan_filter_table[0] |= 1; /* for pure priority tagged frames */ bna_rxf_init()
651 rxf->vlan_pending_bitmask = (u8)BFI_VLAN_BMASK_ALL; bna_rxf_init()
653 rxf->vlan_strip_status = q_config->vlan_strip_status; bna_rxf_init()
655 bfa_fsm_set_state(rxf, bna_rxf_sm_stopped); bna_rxf_init()
659 bna_rxf_uninit(struct bna_rxf *rxf) bna_rxf_uninit() argument
663 rxf->ucast_pending_set = 0; bna_rxf_uninit()
664 rxf->ucast_active_set = 0; bna_rxf_uninit()
666 while (!list_empty(&rxf->ucast_pending_add_q)) { bna_rxf_uninit()
667 mac = list_first_entry(&rxf->ucast_pending_add_q, bna_rxf_uninit()
669 list_move_tail(&mac->qe, bna_ucam_mod_free_q(rxf->rx->bna)); bna_rxf_uninit()
672 if (rxf->ucast_pending_mac) { bna_rxf_uninit()
673 list_add_tail(&rxf->ucast_pending_mac->qe, bna_rxf_uninit()
674 bna_ucam_mod_free_q(rxf->rx->bna)); bna_rxf_uninit()
675 rxf->ucast_pending_mac = NULL; bna_rxf_uninit()
678 while (!list_empty(&rxf->mcast_pending_add_q)) { bna_rxf_uninit()
679 mac = list_first_entry(&rxf->mcast_pending_add_q, bna_rxf_uninit()
681 list_move_tail(&mac->qe, bna_mcam_mod_free_q(rxf->rx->bna)); bna_rxf_uninit()
684 rxf->rxmode_pending = 0; bna_rxf_uninit()
685 rxf->rxmode_pending_bitmask = 0; bna_rxf_uninit()
686 if (rxf->rx->bna->promisc_rid == rxf->rx->rid) bna_rxf_uninit()
687 rxf->rx->bna->promisc_rid = BFI_INVALID_RID; bna_rxf_uninit()
688 if (rxf->rx->bna->default_mode_rid == rxf->rx->rid) bna_rxf_uninit()
689 rxf->rx->bna->default_mode_rid = BFI_INVALID_RID; bna_rxf_uninit()
691 rxf->rss_pending = 0; bna_rxf_uninit()
692 rxf->vlan_strip_pending = false; bna_rxf_uninit()
694 rxf->rx = NULL; bna_rxf_uninit()
704 bna_rxf_start(struct bna_rxf *rxf) bna_rxf_start() argument
706 rxf->start_cbfn = bna_rx_cb_rxf_started; bna_rxf_start()
707 rxf->start_cbarg = rxf->rx; bna_rxf_start()
708 bfa_fsm_send_event(rxf, RXF_E_START); bna_rxf_start()
718 bna_rxf_stop(struct bna_rxf *rxf) bna_rxf_stop() argument
720 rxf->stop_cbfn = bna_rx_cb_rxf_stopped; bna_rxf_stop()
721 rxf->stop_cbarg = rxf->rx; bna_rxf_stop()
722 bfa_fsm_send_event(rxf, RXF_E_STOP); bna_rxf_stop()
726 bna_rxf_fail(struct bna_rxf *rxf) bna_rxf_fail() argument
728 bfa_fsm_send_event(rxf, RXF_E_FAIL); bna_rxf_fail()
734 struct bna_rxf *rxf = &rx->rxf; bna_rx_ucast_set() local
736 if (rxf->ucast_pending_mac == NULL) { bna_rx_ucast_set()
737 rxf->ucast_pending_mac = bna_rx_ucast_set()
738 bna_cam_mod_mac_get(bna_ucam_mod_free_q(rxf->rx->bna)); bna_rx_ucast_set()
739 if (rxf->ucast_pending_mac == NULL) bna_rx_ucast_set()
743 ether_addr_copy(rxf->ucast_pending_mac->addr, ucmac); bna_rx_ucast_set()
744 rxf->ucast_pending_set = 1; bna_rx_ucast_set()
745 rxf->cam_fltr_cbfn = NULL; bna_rx_ucast_set()
746 rxf->cam_fltr_cbarg = rx->bna->bnad; bna_rx_ucast_set()
748 bfa_fsm_send_event(rxf, RXF_E_CONFIG); bna_rx_ucast_set()
757 struct bna_rxf *rxf = &rx->rxf; bna_rx_mcast_add() local
761 if (bna_mac_find(&rxf->mcast_active_q, addr) || bna_rx_mcast_add()
762 bna_mac_find(&rxf->mcast_pending_add_q, addr)) { bna_rx_mcast_add()
768 mac = bna_cam_mod_mac_get(bna_mcam_mod_free_q(rxf->rx->bna)); bna_rx_mcast_add()
772 list_add_tail(&mac->qe, &rxf->mcast_pending_add_q); bna_rx_mcast_add()
774 rxf->cam_fltr_cbfn = cbfn; bna_rx_mcast_add()
775 rxf->cam_fltr_cbarg = rx->bna->bnad; bna_rx_mcast_add()
777 bfa_fsm_send_event(rxf, RXF_E_CONFIG); bna_rx_mcast_add()
786 struct bna_rxf *rxf = &rx->rxf; bna_rx_ucast_listset() local
793 while (!list_empty(&rxf->ucast_pending_add_q)) { bna_rx_ucast_listset()
794 mac = list_first_entry(&rxf->ucast_pending_add_q, bna_rx_ucast_listset()
800 while (!list_empty(&rxf->ucast_active_q)) { bna_rx_ucast_listset()
801 mac = list_first_entry(&rxf->ucast_active_q, bna_rx_ucast_listset()
806 list_add_tail(&del_mac->qe, &rxf->ucast_pending_del_q); bna_rx_ucast_listset()
824 list_move_tail(&mac->qe, &rxf->ucast_pending_add_q); bna_rx_ucast_listset()
827 bfa_fsm_send_event(rxf, RXF_E_CONFIG); bna_rx_ucast_listset()
844 struct bna_rxf *rxf = &rx->rxf; bna_rx_mcast_listset() local
851 while (!list_empty(&rxf->mcast_pending_add_q)) { bna_rx_mcast_listset()
852 mac = list_first_entry(&rxf->mcast_pending_add_q, bna_rx_mcast_listset()
858 while (!list_empty(&rxf->mcast_active_q)) { bna_rx_mcast_listset()
859 mac = list_first_entry(&rxf->mcast_active_q, bna_rx_mcast_listset()
864 list_add_tail(&del_mac->qe, &rxf->mcast_pending_del_q); bna_rx_mcast_listset()
884 list_move_tail(&mac->qe, &rxf->mcast_pending_add_q); bna_rx_mcast_listset()
887 bfa_fsm_send_event(rxf, RXF_E_CONFIG); bna_rx_mcast_listset()
903 struct bna_rxf *rxf = &rx->rxf; bna_rx_mcast_delall() local
908 while (!list_empty(&rxf->mcast_pending_add_q)) { bna_rx_mcast_delall()
909 mac = list_first_entry(&rxf->mcast_pending_add_q, bna_rx_mcast_delall()
911 list_move_tail(&mac->qe, bna_mcam_mod_free_q(rxf->rx->bna)); bna_rx_mcast_delall()
915 while (!list_empty(&rxf->mcast_active_q)) { bna_rx_mcast_delall()
916 mac = list_first_entry(&rxf->mcast_active_q, bna_rx_mcast_delall()
919 del_mac = bna_cam_mod_mac_get(bna_mcam_mod_del_q(rxf->rx->bna)); bna_rx_mcast_delall()
921 list_add_tail(&del_mac->qe, &rxf->mcast_pending_del_q); bna_rx_mcast_delall()
923 list_add_tail(&mac->qe, bna_mcam_mod_free_q(rxf->rx->bna)); bna_rx_mcast_delall()
928 bfa_fsm_send_event(rxf, RXF_E_CONFIG); bna_rx_mcast_delall()
934 struct bna_rxf *rxf = &rx->rxf; bna_rx_vlan_add() local
939 rxf->vlan_filter_table[index] |= bit; bna_rx_vlan_add()
940 if (rxf->vlan_filter_status == BNA_STATUS_T_ENABLED) { bna_rx_vlan_add()
941 rxf->vlan_pending_bitmask |= BIT(group_id); bna_rx_vlan_add()
942 bfa_fsm_send_event(rxf, RXF_E_CONFIG); bna_rx_vlan_add()
949 struct bna_rxf *rxf = &rx->rxf; bna_rx_vlan_del() local
954 rxf->vlan_filter_table[index] &= ~bit; bna_rx_vlan_del()
955 if (rxf->vlan_filter_status == BNA_STATUS_T_ENABLED) { bna_rx_vlan_del()
956 rxf->vlan_pending_bitmask |= BIT(group_id); bna_rx_vlan_del()
957 bfa_fsm_send_event(rxf, RXF_E_CONFIG); bna_rx_vlan_del()
962 bna_rxf_ucast_cfg_apply(struct bna_rxf *rxf) bna_rxf_ucast_cfg_apply() argument
967 if (!list_empty(&rxf->ucast_pending_del_q)) { bna_rxf_ucast_cfg_apply()
968 mac = list_first_entry(&rxf->ucast_pending_del_q, bna_rxf_ucast_cfg_apply()
970 bna_bfi_ucast_req(rxf, mac, BFI_ENET_H2I_MAC_UCAST_DEL_REQ); bna_rxf_ucast_cfg_apply()
971 list_move_tail(&mac->qe, bna_ucam_mod_del_q(rxf->rx->bna)); bna_rxf_ucast_cfg_apply()
976 if (rxf->ucast_pending_set) { bna_rxf_ucast_cfg_apply()
977 rxf->ucast_pending_set = 0; bna_rxf_ucast_cfg_apply()
978 ether_addr_copy(rxf->ucast_active_mac.addr, bna_rxf_ucast_cfg_apply()
979 rxf->ucast_pending_mac->addr); bna_rxf_ucast_cfg_apply()
980 rxf->ucast_active_set = 1; bna_rxf_ucast_cfg_apply()
981 bna_bfi_ucast_req(rxf, &rxf->ucast_active_mac, bna_rxf_ucast_cfg_apply()
987 if (!list_empty(&rxf->ucast_pending_add_q)) { bna_rxf_ucast_cfg_apply()
988 mac = list_first_entry(&rxf->ucast_pending_add_q, bna_rxf_ucast_cfg_apply()
990 list_add_tail(&mac->qe, &rxf->ucast_active_q); bna_rxf_ucast_cfg_apply()
991 bna_bfi_ucast_req(rxf, mac, BFI_ENET_H2I_MAC_UCAST_ADD_REQ); bna_rxf_ucast_cfg_apply()
999 bna_rxf_ucast_cfg_reset(struct bna_rxf *rxf, enum bna_cleanup_type cleanup) bna_rxf_ucast_cfg_reset() argument
1004 while (!list_empty(&rxf->ucast_pending_del_q)) { bna_rxf_ucast_cfg_reset()
1005 mac = list_first_entry(&rxf->ucast_pending_del_q, bna_rxf_ucast_cfg_reset()
1009 bna_ucam_mod_del_q(rxf->rx->bna)); bna_rxf_ucast_cfg_reset()
1011 bna_bfi_ucast_req(rxf, mac, bna_rxf_ucast_cfg_reset()
1014 bna_ucam_mod_del_q(rxf->rx->bna)); bna_rxf_ucast_cfg_reset()
1020 while (!list_empty(&rxf->ucast_active_q)) { bna_rxf_ucast_cfg_reset()
1021 mac = list_first_entry(&rxf->ucast_active_q, bna_rxf_ucast_cfg_reset()
1023 list_move_tail(&mac->qe, &rxf->ucast_pending_add_q); bna_rxf_ucast_cfg_reset()
1025 bna_bfi_ucast_req(rxf, mac, bna_rxf_ucast_cfg_reset()
1031 if (rxf->ucast_active_set) { bna_rxf_ucast_cfg_reset()
1032 rxf->ucast_pending_set = 1; bna_rxf_ucast_cfg_reset()
1033 rxf->ucast_active_set = 0; bna_rxf_ucast_cfg_reset()
1035 bna_bfi_ucast_req(rxf, &rxf->ucast_active_mac, bna_rxf_ucast_cfg_reset()
1045 bna_rxf_promisc_cfg_apply(struct bna_rxf *rxf) bna_rxf_promisc_cfg_apply() argument
1047 struct bna *bna = rxf->rx->bna; bna_rxf_promisc_cfg_apply()
1050 if (is_promisc_enable(rxf->rxmode_pending, bna_rxf_promisc_cfg_apply()
1051 rxf->rxmode_pending_bitmask)) { bna_rxf_promisc_cfg_apply()
1053 promisc_inactive(rxf->rxmode_pending, bna_rxf_promisc_cfg_apply()
1054 rxf->rxmode_pending_bitmask); bna_rxf_promisc_cfg_apply()
1055 rxf->rxmode_active |= BNA_RXMODE_PROMISC; bna_rxf_promisc_cfg_apply()
1056 bna_bfi_rx_promisc_req(rxf, BNA_STATUS_T_ENABLED); bna_rxf_promisc_cfg_apply()
1058 } else if (is_promisc_disable(rxf->rxmode_pending, bna_rxf_promisc_cfg_apply()
1059 rxf->rxmode_pending_bitmask)) { bna_rxf_promisc_cfg_apply()
1061 promisc_inactive(rxf->rxmode_pending, bna_rxf_promisc_cfg_apply()
1062 rxf->rxmode_pending_bitmask); bna_rxf_promisc_cfg_apply()
1063 rxf->rxmode_active &= ~BNA_RXMODE_PROMISC; bna_rxf_promisc_cfg_apply()
1065 bna_bfi_rx_promisc_req(rxf, BNA_STATUS_T_DISABLED); bna_rxf_promisc_cfg_apply()
1073 bna_rxf_promisc_cfg_reset(struct bna_rxf *rxf, enum bna_cleanup_type cleanup) bna_rxf_promisc_cfg_reset() argument
1075 struct bna *bna = rxf->rx->bna; bna_rxf_promisc_cfg_reset()
1078 if (is_promisc_disable(rxf->rxmode_pending, bna_rxf_promisc_cfg_reset()
1079 rxf->rxmode_pending_bitmask)) { bna_rxf_promisc_cfg_reset()
1080 promisc_inactive(rxf->rxmode_pending, bna_rxf_promisc_cfg_reset()
1081 rxf->rxmode_pending_bitmask); bna_rxf_promisc_cfg_reset()
1082 rxf->rxmode_active &= ~BNA_RXMODE_PROMISC; bna_rxf_promisc_cfg_reset()
1085 bna_bfi_rx_promisc_req(rxf, BNA_STATUS_T_DISABLED); bna_rxf_promisc_cfg_reset()
1091 if (rxf->rxmode_active & BNA_RXMODE_PROMISC) { bna_rxf_promisc_cfg_reset()
1092 promisc_enable(rxf->rxmode_pending, bna_rxf_promisc_cfg_reset()
1093 rxf->rxmode_pending_bitmask); bna_rxf_promisc_cfg_reset()
1094 rxf->rxmode_active &= ~BNA_RXMODE_PROMISC; bna_rxf_promisc_cfg_reset()
1096 bna_bfi_rx_promisc_req(rxf, BNA_STATUS_T_DISABLED); bna_rxf_promisc_cfg_reset()
1105 bna_rxf_allmulti_cfg_apply(struct bna_rxf *rxf) bna_rxf_allmulti_cfg_apply() argument
1108 if (is_allmulti_enable(rxf->rxmode_pending, bna_rxf_allmulti_cfg_apply()
1109 rxf->rxmode_pending_bitmask)) { bna_rxf_allmulti_cfg_apply()
1111 allmulti_inactive(rxf->rxmode_pending, bna_rxf_allmulti_cfg_apply()
1112 rxf->rxmode_pending_bitmask); bna_rxf_allmulti_cfg_apply()
1113 rxf->rxmode_active |= BNA_RXMODE_ALLMULTI; bna_rxf_allmulti_cfg_apply()
1114 bna_bfi_mcast_filter_req(rxf, BNA_STATUS_T_DISABLED); bna_rxf_allmulti_cfg_apply()
1116 } else if (is_allmulti_disable(rxf->rxmode_pending, bna_rxf_allmulti_cfg_apply()
1117 rxf->rxmode_pending_bitmask)) { bna_rxf_allmulti_cfg_apply()
1119 allmulti_inactive(rxf->rxmode_pending, bna_rxf_allmulti_cfg_apply()
1120 rxf->rxmode_pending_bitmask); bna_rxf_allmulti_cfg_apply()
1121 rxf->rxmode_active &= ~BNA_RXMODE_ALLMULTI; bna_rxf_allmulti_cfg_apply()
1122 bna_bfi_mcast_filter_req(rxf, BNA_STATUS_T_ENABLED); bna_rxf_allmulti_cfg_apply()
1130 bna_rxf_allmulti_cfg_reset(struct bna_rxf *rxf, enum bna_cleanup_type cleanup) bna_rxf_allmulti_cfg_reset() argument
1133 if (is_allmulti_disable(rxf->rxmode_pending, bna_rxf_allmulti_cfg_reset()
1134 rxf->rxmode_pending_bitmask)) { bna_rxf_allmulti_cfg_reset()
1135 allmulti_inactive(rxf->rxmode_pending, bna_rxf_allmulti_cfg_reset()
1136 rxf->rxmode_pending_bitmask); bna_rxf_allmulti_cfg_reset()
1137 rxf->rxmode_active &= ~BNA_RXMODE_ALLMULTI; bna_rxf_allmulti_cfg_reset()
1139 bna_bfi_mcast_filter_req(rxf, BNA_STATUS_T_ENABLED); bna_rxf_allmulti_cfg_reset()
1145 if (rxf->rxmode_active & BNA_RXMODE_ALLMULTI) { bna_rxf_allmulti_cfg_reset()
1146 allmulti_enable(rxf->rxmode_pending, bna_rxf_allmulti_cfg_reset()
1147 rxf->rxmode_pending_bitmask); bna_rxf_allmulti_cfg_reset()
1148 rxf->rxmode_active &= ~BNA_RXMODE_ALLMULTI; bna_rxf_allmulti_cfg_reset()
1150 bna_bfi_mcast_filter_req(rxf, BNA_STATUS_T_ENABLED); bna_rxf_allmulti_cfg_reset()
1159 bna_rxf_promisc_enable(struct bna_rxf *rxf) bna_rxf_promisc_enable() argument
1161 struct bna *bna = rxf->rx->bna; bna_rxf_promisc_enable()
1164 if (is_promisc_enable(rxf->rxmode_pending, bna_rxf_promisc_enable()
1165 rxf->rxmode_pending_bitmask) || bna_rxf_promisc_enable()
1166 (rxf->rxmode_active & BNA_RXMODE_PROMISC)) { bna_rxf_promisc_enable()
1168 } else if (is_promisc_disable(rxf->rxmode_pending, bna_rxf_promisc_enable()
1169 rxf->rxmode_pending_bitmask)) { bna_rxf_promisc_enable()
1171 promisc_inactive(rxf->rxmode_pending, bna_rxf_promisc_enable()
1172 rxf->rxmode_pending_bitmask); bna_rxf_promisc_enable()
1175 promisc_enable(rxf->rxmode_pending, bna_rxf_promisc_enable()
1176 rxf->rxmode_pending_bitmask); bna_rxf_promisc_enable()
1177 bna->promisc_rid = rxf->rx->rid; bna_rxf_promisc_enable()
1185 bna_rxf_promisc_disable(struct bna_rxf *rxf) bna_rxf_promisc_disable() argument
1187 struct bna *bna = rxf->rx->bna; bna_rxf_promisc_disable()
1190 if (is_promisc_disable(rxf->rxmode_pending, bna_rxf_promisc_disable()
1191 rxf->rxmode_pending_bitmask) || bna_rxf_promisc_disable()
1192 (!(rxf->rxmode_active & BNA_RXMODE_PROMISC))) { bna_rxf_promisc_disable()
1194 } else if (is_promisc_enable(rxf->rxmode_pending, bna_rxf_promisc_disable()
1195 rxf->rxmode_pending_bitmask)) { bna_rxf_promisc_disable()
1197 promisc_inactive(rxf->rxmode_pending, bna_rxf_promisc_disable()
1198 rxf->rxmode_pending_bitmask); bna_rxf_promisc_disable()
1200 } else if (rxf->rxmode_active & BNA_RXMODE_PROMISC) { bna_rxf_promisc_disable()
1202 promisc_disable(rxf->rxmode_pending, bna_rxf_promisc_disable()
1203 rxf->rxmode_pending_bitmask); bna_rxf_promisc_disable()
1211 bna_rxf_allmulti_enable(struct bna_rxf *rxf) bna_rxf_allmulti_enable() argument
1215 if (is_allmulti_enable(rxf->rxmode_pending, bna_rxf_allmulti_enable()
1216 rxf->rxmode_pending_bitmask) || bna_rxf_allmulti_enable()
1217 (rxf->rxmode_active & BNA_RXMODE_ALLMULTI)) { bna_rxf_allmulti_enable()
1219 } else if (is_allmulti_disable(rxf->rxmode_pending, bna_rxf_allmulti_enable()
1220 rxf->rxmode_pending_bitmask)) { bna_rxf_allmulti_enable()
1222 allmulti_inactive(rxf->rxmode_pending, bna_rxf_allmulti_enable()
1223 rxf->rxmode_pending_bitmask); bna_rxf_allmulti_enable()
1226 allmulti_enable(rxf->rxmode_pending, bna_rxf_allmulti_enable()
1227 rxf->rxmode_pending_bitmask); bna_rxf_allmulti_enable()
1235 bna_rxf_allmulti_disable(struct bna_rxf *rxf) bna_rxf_allmulti_disable() argument
1239 if (is_allmulti_disable(rxf->rxmode_pending, bna_rxf_allmulti_disable()
1240 rxf->rxmode_pending_bitmask) || bna_rxf_allmulti_disable()
1241 (!(rxf->rxmode_active & BNA_RXMODE_ALLMULTI))) { bna_rxf_allmulti_disable()
1243 } else if (is_allmulti_enable(rxf->rxmode_pending, bna_rxf_allmulti_disable()
1244 rxf->rxmode_pending_bitmask)) { bna_rxf_allmulti_disable()
1246 allmulti_inactive(rxf->rxmode_pending, bna_rxf_allmulti_disable()
1247 rxf->rxmode_pending_bitmask); bna_rxf_allmulti_disable()
1248 } else if (rxf->rxmode_active & BNA_RXMODE_ALLMULTI) { bna_rxf_allmulti_disable()
1250 allmulti_disable(rxf->rxmode_pending, bna_rxf_allmulti_disable()
1251 rxf->rxmode_pending_bitmask); bna_rxf_allmulti_disable()
1259 bna_rxf_vlan_strip_cfg_apply(struct bna_rxf *rxf) bna_rxf_vlan_strip_cfg_apply() argument
1261 if (rxf->vlan_strip_pending) { bna_rxf_vlan_strip_cfg_apply()
1262 rxf->vlan_strip_pending = false; bna_rxf_vlan_strip_cfg_apply()
1263 bna_bfi_vlan_strip_enable(rxf); bna_rxf_vlan_strip_cfg_apply()
1416 bna_rxf_start(&rx->rxf); bna_rx_sm_rxf_start_wait_entry()
1430 bna_rxf_fail(&rx->rxf); bna_rx_sm_rxf_stop_wait()
1436 bna_rxf_stop(&rx->rxf); bna_rx_sm_rxf_stop_wait()
1495 bna_rxf_stop(&rx->rxf); bna_rx_sm_started()
1501 bna_rxf_fail(&rx->rxf); bna_rx_sm_started()
1522 bna_rxf_fail(&rx->rxf); bna_rx_sm_rxf_start_wait()
1715 cfg_req->rx_cfg.strip_vlan = rx->rxf.vlan_strip_status; bna_bfi_rx_enet_start()
2481 bna_rxf_init(&rx->rxf, rx, rx_cfg, res_info); bna_rx_create()
2499 bna_rxf_uninit(&rx->rxf); bna_rx_destroy()
2580 struct bna_rxf *rxf = &rx->rxf; bna_rx_vlan_strip_enable() local
2582 if (rxf->vlan_strip_status == BNA_STATUS_T_DISABLED) { bna_rx_vlan_strip_enable()
2583 rxf->vlan_strip_status = BNA_STATUS_T_ENABLED; bna_rx_vlan_strip_enable()
2584 rxf->vlan_strip_pending = true; bna_rx_vlan_strip_enable()
2585 bfa_fsm_send_event(rxf, RXF_E_CONFIG); bna_rx_vlan_strip_enable()
2592 struct bna_rxf *rxf = &rx->rxf; bna_rx_vlan_strip_disable() local
2594 if (rxf->vlan_strip_status != BNA_STATUS_T_DISABLED) { bna_rx_vlan_strip_disable()
2595 rxf->vlan_strip_status = BNA_STATUS_T_DISABLED; bna_rx_vlan_strip_disable()
2596 rxf->vlan_strip_pending = true; bna_rx_vlan_strip_disable()
2597 bfa_fsm_send_event(rxf, RXF_E_CONFIG); bna_rx_vlan_strip_disable()
2605 struct bna_rxf *rxf = &rx->rxf; bna_rx_mode_set() local
2613 (rx->bna->promisc_rid != rxf->rx->rid)) bna_rx_mode_set()
2628 (rx->bna->default_mode_rid != rxf->rx->rid)) { bna_rx_mode_set()
2640 if (bna_rxf_promisc_enable(rxf)) bna_rx_mode_set()
2643 if (bna_rxf_promisc_disable(rxf)) bna_rx_mode_set()
2648 if (bna_rxf_allmulti_enable(rxf)) bna_rx_mode_set()
2651 if (bna_rxf_allmulti_disable(rxf)) bna_rx_mode_set()
2658 rxf->cam_fltr_cbfn = NULL; bna_rx_mode_set()
2659 rxf->cam_fltr_cbarg = rx->bna->bnad; bna_rx_mode_set()
2660 bfa_fsm_send_event(rxf, RXF_E_CONFIG); bna_rx_mode_set()
2672 struct bna_rxf *rxf = &rx->rxf; bna_rx_vlanfilter_enable() local
2674 if (rxf->vlan_filter_status == BNA_STATUS_T_DISABLED) { bna_rx_vlanfilter_enable()
2675 rxf->vlan_filter_status = BNA_STATUS_T_ENABLED; bna_rx_vlanfilter_enable()
2676 rxf->vlan_pending_bitmask = (u8)BFI_VLAN_BMASK_ALL; bna_rx_vlanfilter_enable()
2677 bfa_fsm_send_event(rxf, RXF_E_CONFIG); bna_rx_vlanfilter_enable()
H A Dbna.h83 #define call_rxf_stop_cbfn(rxf) \
85 if ((rxf)->stop_cbfn) { \
88 cbfn = (rxf)->stop_cbfn; \
89 cbarg = (rxf)->stop_cbarg; \
90 (rxf)->stop_cbfn = NULL; \
91 (rxf)->stop_cbarg = NULL; \
96 #define call_rxf_start_cbfn(rxf) \
98 if ((rxf)->start_cbfn) { \
101 cbfn = (rxf)->start_cbfn; \
102 cbarg = (rxf)->start_cbarg; \
103 (rxf)->start_cbfn = NULL; \
104 (rxf)->start_cbarg = NULL; \
109 #define call_rxf_cam_fltr_cbfn(rxf) \
111 if ((rxf)->cam_fltr_cbfn) { \
114 cbfn = (rxf)->cam_fltr_cbfn; \
115 cbarg = (rxf)->cam_fltr_cbarg; \
116 (rxf)->cam_fltr_cbfn = NULL; \
117 (rxf)->cam_fltr_cbarg = NULL; \
118 cbfn(cbarg, rxf->rx); \
334 void bna_bfi_rxf_cfg_rsp(struct bna_rxf *rxf, struct bfi_msgq_mhdr *msghdr);
335 void bna_bfi_rxf_mcast_add_rsp(struct bna_rxf *rxf,
337 void bna_bfi_rxf_ucast_set_rsp(struct bna_rxf *rxf,
H A Dbnad_ethtool.c597 sprintf(string, "rxf%d_ucast_octets", i); bnad_get_strings()
599 sprintf(string, "rxf%d_ucast", i); bnad_get_strings()
601 sprintf(string, "rxf%d_ucast_vlan", i); bnad_get_strings()
603 sprintf(string, "rxf%d_mcast_octets", i); bnad_get_strings()
605 sprintf(string, "rxf%d_mcast", i); bnad_get_strings()
607 sprintf(string, "rxf%d_mcast_vlan", i); bnad_get_strings()
609 sprintf(string, "rxf%d_bcast_octets", i); bnad_get_strings()
611 sprintf(string, "rxf%d_bcast", i); bnad_get_strings()
613 sprintf(string, "rxf%d_bcast_vlan", i); bnad_get_strings()
615 sprintf(string, "rxf%d_frame_drops", i); bnad_get_strings()
891 /* Fill hardware stats excluding the rxf/txf into ethtool bufs */ bnad_get_ethtool_stats()
912 /* Fill rxf stats into ethtool buffers */ bnad_get_ethtool_stats()
H A Dbna_enet.c314 bna_bfi_rxf_cfg_rsp(&rx->rxf, msghdr); bna_msgq_rsp_handler()
320 bna_bfi_rxf_ucast_set_rsp(&rx->rxf, msghdr); bna_msgq_rsp_handler()
326 bna_bfi_rxf_mcast_add_rsp(&rx->rxf, msghdr); bna_msgq_rsp_handler()
H A Dbna_types.h795 struct bna_rxf rxf; member in struct:bna_rx
/linux-4.4.14/drivers/net/wireless/ath/wil6210/
H A Ddebugfs.c1216 ulong rxf = ndev->stats.rx_packets; wil_info_debugfs_show() local
1222 seq_printf(s, "Rx irqs:packets : %8d : %8ld\n", rx, rxf - rxf_old); wil_info_debugfs_show()
1224 rxf_old = rxf; wil_info_debugfs_show()
/linux-4.4.14/drivers/net/ethernet/tehuti/
H A Dtehuti.c989 * It creates rxf and rxd fifos, update relevant HW registers, preallocate
1081 * bdx_rx_alloc_skbs - fill rxf fifo with new skbs
1085 * It allocates skbs, build rxf descs and push it (rxf descr) into rxf fifo.
1135 /*TBD: to do - delayed rxf wptr like in txd */ bdx_rx_alloc_skbs()
/linux-4.4.14/drivers/net/ethernet/emulex/benet/
H A Dbe_cmds.h760 struct be_rxf_stats_v0 rxf; member in struct:be_hw_stats_v0
1965 struct be_rxf_stats_v1 rxf; member in struct:be_hw_stats_v1
1988 struct be_rxf_stats_v2 rxf; member in struct:be_hw_stats_v2
H A Dbe_main.c363 struct be_rxf_stats_v0 *rxf_stats = &hw_stats->rxf; populate_be_v0_stats()
412 struct be_rxf_stats_v1 *rxf_stats = &hw_stats->rxf; populate_be_v1_stats()
458 struct be_rxf_stats_v2 *rxf_stats = &hw_stats->rxf; populate_be_v2_stats()
/linux-4.4.14/drivers/video/fbdev/
H A Dffb.c195 u32 rxf; member in struct:ffb_fbc
/linux-4.4.14/drivers/net/ethernet/atheros/atl1e/
H A Datl1e.h426 u32 page_size; /* bytes length of rxf page */

Completed in 410 milliseconds