Lines Matching refs:lp
124 static inline u32 axienet_dma_in32(struct axienet_local *lp, off_t reg) in axienet_dma_in32() argument
126 return in_be32(lp->dma_regs + reg); in axienet_dma_in32()
138 static inline void axienet_dma_out32(struct axienet_local *lp, in axienet_dma_out32() argument
141 out_be32((lp->dma_regs + reg), value); in axienet_dma_out32()
155 struct axienet_local *lp = netdev_priv(ndev); in axienet_dma_bd_release() local
158 dma_unmap_single(ndev->dev.parent, lp->rx_bd_v[i].phys, in axienet_dma_bd_release()
159 lp->max_frm_size, DMA_FROM_DEVICE); in axienet_dma_bd_release()
161 (lp->rx_bd_v[i].sw_id_offset)); in axienet_dma_bd_release()
164 if (lp->rx_bd_v) { in axienet_dma_bd_release()
166 sizeof(*lp->rx_bd_v) * RX_BD_NUM, in axienet_dma_bd_release()
167 lp->rx_bd_v, in axienet_dma_bd_release()
168 lp->rx_bd_p); in axienet_dma_bd_release()
170 if (lp->tx_bd_v) { in axienet_dma_bd_release()
172 sizeof(*lp->tx_bd_v) * TX_BD_NUM, in axienet_dma_bd_release()
173 lp->tx_bd_v, in axienet_dma_bd_release()
174 lp->tx_bd_p); in axienet_dma_bd_release()
194 struct axienet_local *lp = netdev_priv(ndev); in axienet_dma_bd_init() local
197 lp->tx_bd_ci = 0; in axienet_dma_bd_init()
198 lp->tx_bd_tail = 0; in axienet_dma_bd_init()
199 lp->rx_bd_ci = 0; in axienet_dma_bd_init()
204 lp->tx_bd_v = dma_zalloc_coherent(ndev->dev.parent, in axienet_dma_bd_init()
205 sizeof(*lp->tx_bd_v) * TX_BD_NUM, in axienet_dma_bd_init()
206 &lp->tx_bd_p, GFP_KERNEL); in axienet_dma_bd_init()
207 if (!lp->tx_bd_v) in axienet_dma_bd_init()
210 lp->rx_bd_v = dma_zalloc_coherent(ndev->dev.parent, in axienet_dma_bd_init()
211 sizeof(*lp->rx_bd_v) * RX_BD_NUM, in axienet_dma_bd_init()
212 &lp->rx_bd_p, GFP_KERNEL); in axienet_dma_bd_init()
213 if (!lp->rx_bd_v) in axienet_dma_bd_init()
217 lp->tx_bd_v[i].next = lp->tx_bd_p + in axienet_dma_bd_init()
218 sizeof(*lp->tx_bd_v) * in axienet_dma_bd_init()
223 lp->rx_bd_v[i].next = lp->rx_bd_p + in axienet_dma_bd_init()
224 sizeof(*lp->rx_bd_v) * in axienet_dma_bd_init()
227 skb = netdev_alloc_skb_ip_align(ndev, lp->max_frm_size); in axienet_dma_bd_init()
231 lp->rx_bd_v[i].sw_id_offset = (u32) skb; in axienet_dma_bd_init()
232 lp->rx_bd_v[i].phys = dma_map_single(ndev->dev.parent, in axienet_dma_bd_init()
234 lp->max_frm_size, in axienet_dma_bd_init()
236 lp->rx_bd_v[i].cntrl = lp->max_frm_size; in axienet_dma_bd_init()
240 cr = axienet_dma_in32(lp, XAXIDMA_RX_CR_OFFSET); in axienet_dma_bd_init()
243 ((lp->coalesce_count_rx) << XAXIDMA_COALESCE_SHIFT)); in axienet_dma_bd_init()
250 axienet_dma_out32(lp, XAXIDMA_RX_CR_OFFSET, cr); in axienet_dma_bd_init()
253 cr = axienet_dma_in32(lp, XAXIDMA_TX_CR_OFFSET); in axienet_dma_bd_init()
256 ((lp->coalesce_count_tx) << XAXIDMA_COALESCE_SHIFT)); in axienet_dma_bd_init()
263 axienet_dma_out32(lp, XAXIDMA_TX_CR_OFFSET, cr); in axienet_dma_bd_init()
267 axienet_dma_out32(lp, XAXIDMA_RX_CDESC_OFFSET, lp->rx_bd_p); in axienet_dma_bd_init()
268 cr = axienet_dma_in32(lp, XAXIDMA_RX_CR_OFFSET); in axienet_dma_bd_init()
269 axienet_dma_out32(lp, XAXIDMA_RX_CR_OFFSET, in axienet_dma_bd_init()
271 axienet_dma_out32(lp, XAXIDMA_RX_TDESC_OFFSET, lp->rx_bd_p + in axienet_dma_bd_init()
272 (sizeof(*lp->rx_bd_v) * (RX_BD_NUM - 1))); in axienet_dma_bd_init()
277 axienet_dma_out32(lp, XAXIDMA_TX_CDESC_OFFSET, lp->tx_bd_p); in axienet_dma_bd_init()
278 cr = axienet_dma_in32(lp, XAXIDMA_TX_CR_OFFSET); in axienet_dma_bd_init()
279 axienet_dma_out32(lp, XAXIDMA_TX_CR_OFFSET, in axienet_dma_bd_init()
298 struct axienet_local *lp = netdev_priv(ndev); in axienet_set_mac_address() local
306 axienet_iow(lp, XAE_UAW0_OFFSET, in axienet_set_mac_address()
311 axienet_iow(lp, XAE_UAW1_OFFSET, in axienet_set_mac_address()
312 (((axienet_ior(lp, XAE_UAW1_OFFSET)) & in axienet_set_mac_address()
351 struct axienet_local *lp = netdev_priv(ndev); in axienet_set_multicast_list() local
359 reg = axienet_ior(lp, XAE_FMI_OFFSET); in axienet_set_multicast_list()
361 axienet_iow(lp, XAE_FMI_OFFSET, reg); in axienet_set_multicast_list()
379 reg = axienet_ior(lp, XAE_FMI_OFFSET) & 0xFFFFFF00; in axienet_set_multicast_list()
382 axienet_iow(lp, XAE_FMI_OFFSET, reg); in axienet_set_multicast_list()
383 axienet_iow(lp, XAE_AF0_OFFSET, af0reg); in axienet_set_multicast_list()
384 axienet_iow(lp, XAE_AF1_OFFSET, af1reg); in axienet_set_multicast_list()
388 reg = axienet_ior(lp, XAE_FMI_OFFSET); in axienet_set_multicast_list()
391 axienet_iow(lp, XAE_FMI_OFFSET, reg); in axienet_set_multicast_list()
394 reg = axienet_ior(lp, XAE_FMI_OFFSET) & 0xFFFFFF00; in axienet_set_multicast_list()
397 axienet_iow(lp, XAE_FMI_OFFSET, reg); in axienet_set_multicast_list()
398 axienet_iow(lp, XAE_AF0_OFFSET, 0); in axienet_set_multicast_list()
399 axienet_iow(lp, XAE_AF1_OFFSET, 0); in axienet_set_multicast_list()
420 struct axienet_local *lp = netdev_priv(ndev); in axienet_setoptions() local
424 reg = ((axienet_ior(lp, tp->reg)) & ~(tp->m_or)); in axienet_setoptions()
427 axienet_iow(lp, tp->reg, reg); in axienet_setoptions()
431 lp->options |= options; in axienet_setoptions()
434 static void __axienet_device_reset(struct axienet_local *lp, in __axienet_device_reset() argument
442 axienet_dma_out32(lp, offset, XAXIDMA_CR_RESET_MASK); in __axienet_device_reset()
444 while (axienet_dma_in32(lp, offset) & XAXIDMA_CR_RESET_MASK) { in __axienet_device_reset()
468 struct axienet_local *lp = netdev_priv(ndev); in axienet_device_reset() local
470 __axienet_device_reset(lp, &ndev->dev, XAXIDMA_TX_CR_OFFSET); in axienet_device_reset()
471 __axienet_device_reset(lp, &ndev->dev, XAXIDMA_RX_CR_OFFSET); in axienet_device_reset()
473 lp->max_frm_size = XAE_MAX_VLAN_FRAME_SIZE; in axienet_device_reset()
474 lp->options &= (~XAE_OPTION_JUMBO); in axienet_device_reset()
478 (lp->jumbo_support)) { in axienet_device_reset()
479 lp->max_frm_size = ndev->mtu + XAE_HDR_VLAN_SIZE + in axienet_device_reset()
481 lp->options |= XAE_OPTION_JUMBO; in axienet_device_reset()
489 axienet_status = axienet_ior(lp, XAE_RCW1_OFFSET); in axienet_device_reset()
491 axienet_iow(lp, XAE_RCW1_OFFSET, axienet_status); in axienet_device_reset()
493 axienet_status = axienet_ior(lp, XAE_IP_OFFSET); in axienet_device_reset()
495 axienet_iow(lp, XAE_IS_OFFSET, XAE_INT_RXRJECT_MASK); in axienet_device_reset()
497 axienet_iow(lp, XAE_FCC_OFFSET, XAE_FCC_FCRX_MASK); in axienet_device_reset()
501 axienet_setoptions(ndev, lp->options & in axienet_device_reset()
505 axienet_setoptions(ndev, lp->options); in axienet_device_reset()
523 struct axienet_local *lp = netdev_priv(ndev); in axienet_adjust_link() local
524 struct phy_device *phy = lp->phy_dev; in axienet_adjust_link()
527 if (lp->last_link != link_state) { in axienet_adjust_link()
529 if (lp->phy_type == XAE_PHY_TYPE_1000BASE_X) in axienet_adjust_link()
533 (lp->phy_type == XAE_PHY_TYPE_MII)) in axienet_adjust_link()
538 emmc_reg = axienet_ior(lp, XAE_EMMC_OFFSET); in axienet_adjust_link()
557 axienet_iow(lp, XAE_EMMC_OFFSET, emmc_reg); in axienet_adjust_link()
558 lp->last_link = link_state; in axienet_adjust_link()
582 struct axienet_local *lp = netdev_priv(ndev); in axienet_start_xmit_done() local
586 cur_p = &lp->tx_bd_v[lp->tx_bd_ci]; in axienet_start_xmit_done()
604 ++lp->tx_bd_ci; in axienet_start_xmit_done()
605 lp->tx_bd_ci %= TX_BD_NUM; in axienet_start_xmit_done()
606 cur_p = &lp->tx_bd_v[lp->tx_bd_ci]; in axienet_start_xmit_done()
628 static inline int axienet_check_tx_bd_space(struct axienet_local *lp, in axienet_check_tx_bd_space() argument
632 cur_p = &lp->tx_bd_v[(lp->tx_bd_tail + num_frag) % TX_BD_NUM]; in axienet_check_tx_bd_space()
659 struct axienet_local *lp = netdev_priv(ndev); in axienet_start_xmit() local
663 cur_p = &lp->tx_bd_v[lp->tx_bd_tail]; in axienet_start_xmit()
665 if (axienet_check_tx_bd_space(lp, num_frag)) { in axienet_start_xmit()
672 if (lp->features & XAE_FEATURE_FULL_TX_CSUM) { in axienet_start_xmit()
675 } else if (lp->features & XAE_FEATURE_PARTIAL_RX_CSUM) { in axienet_start_xmit()
691 ++lp->tx_bd_tail; in axienet_start_xmit()
692 lp->tx_bd_tail %= TX_BD_NUM; in axienet_start_xmit()
693 cur_p = &lp->tx_bd_v[lp->tx_bd_tail]; in axienet_start_xmit()
705 tail_p = lp->tx_bd_p + sizeof(*lp->tx_bd_v) * lp->tx_bd_tail; in axienet_start_xmit()
707 axienet_dma_out32(lp, XAXIDMA_TX_TDESC_OFFSET, tail_p); in axienet_start_xmit()
708 ++lp->tx_bd_tail; in axienet_start_xmit()
709 lp->tx_bd_tail %= TX_BD_NUM; in axienet_start_xmit()
730 struct axienet_local *lp = netdev_priv(ndev); in axienet_recv() local
734 tail_p = lp->rx_bd_p + sizeof(*lp->rx_bd_v) * lp->rx_bd_ci; in axienet_recv()
735 cur_p = &lp->rx_bd_v[lp->rx_bd_ci]; in axienet_recv()
742 lp->max_frm_size, in axienet_recv()
751 if (lp->features & XAE_FEATURE_FULL_RX_CSUM) { in axienet_recv()
758 } else if ((lp->features & XAE_FEATURE_PARTIAL_RX_CSUM) != 0 && in axienet_recv()
770 new_skb = netdev_alloc_skb_ip_align(ndev, lp->max_frm_size); in axienet_recv()
775 lp->max_frm_size, in axienet_recv()
777 cur_p->cntrl = lp->max_frm_size; in axienet_recv()
781 ++lp->rx_bd_ci; in axienet_recv()
782 lp->rx_bd_ci %= RX_BD_NUM; in axienet_recv()
783 cur_p = &lp->rx_bd_v[lp->rx_bd_ci]; in axienet_recv()
789 axienet_dma_out32(lp, XAXIDMA_RX_TDESC_OFFSET, tail_p); in axienet_recv()
807 struct axienet_local *lp = netdev_priv(ndev); in axienet_tx_irq() local
809 status = axienet_dma_in32(lp, XAXIDMA_TX_SR_OFFSET); in axienet_tx_irq()
811 axienet_start_xmit_done(lp->ndev); in axienet_tx_irq()
819 (lp->tx_bd_v[lp->tx_bd_ci]).phys); in axienet_tx_irq()
821 cr = axienet_dma_in32(lp, XAXIDMA_TX_CR_OFFSET); in axienet_tx_irq()
825 axienet_dma_out32(lp, XAXIDMA_TX_CR_OFFSET, cr); in axienet_tx_irq()
827 cr = axienet_dma_in32(lp, XAXIDMA_RX_CR_OFFSET); in axienet_tx_irq()
831 axienet_dma_out32(lp, XAXIDMA_RX_CR_OFFSET, cr); in axienet_tx_irq()
833 tasklet_schedule(&lp->dma_err_tasklet); in axienet_tx_irq()
836 axienet_dma_out32(lp, XAXIDMA_TX_SR_OFFSET, status); in axienet_tx_irq()
855 struct axienet_local *lp = netdev_priv(ndev); in axienet_rx_irq() local
857 status = axienet_dma_in32(lp, XAXIDMA_RX_SR_OFFSET); in axienet_rx_irq()
859 axienet_recv(lp->ndev); in axienet_rx_irq()
867 (lp->rx_bd_v[lp->rx_bd_ci]).phys); in axienet_rx_irq()
869 cr = axienet_dma_in32(lp, XAXIDMA_TX_CR_OFFSET); in axienet_rx_irq()
873 axienet_dma_out32(lp, XAXIDMA_TX_CR_OFFSET, cr); in axienet_rx_irq()
875 cr = axienet_dma_in32(lp, XAXIDMA_RX_CR_OFFSET); in axienet_rx_irq()
879 axienet_dma_out32(lp, XAXIDMA_RX_CR_OFFSET, cr); in axienet_rx_irq()
881 tasklet_schedule(&lp->dma_err_tasklet); in axienet_rx_irq()
884 axienet_dma_out32(lp, XAXIDMA_RX_SR_OFFSET, status); in axienet_rx_irq()
906 struct axienet_local *lp = netdev_priv(ndev); in axienet_open() local
910 mdio_mcreg = axienet_ior(lp, XAE_MDIO_MC_OFFSET); in axienet_open()
911 ret = axienet_mdio_wait_until_ready(lp); in axienet_open()
918 axienet_iow(lp, XAE_MDIO_MC_OFFSET, in axienet_open()
922 axienet_iow(lp, XAE_MDIO_MC_OFFSET, mdio_mcreg); in axienet_open()
923 ret = axienet_mdio_wait_until_ready(lp); in axienet_open()
927 if (lp->phy_node) { in axienet_open()
928 lp->phy_dev = of_phy_connect(lp->ndev, lp->phy_node, in axienet_open()
931 if (!lp->phy_dev) { in axienet_open()
932 dev_err(lp->dev, "of_phy_connect() failed\n"); in axienet_open()
935 phy_start(lp->phy_dev); in axienet_open()
939 tasklet_init(&lp->dma_err_tasklet, axienet_dma_err_handler, in axienet_open()
940 (unsigned long) lp); in axienet_open()
943 ret = request_irq(lp->tx_irq, axienet_tx_irq, 0, ndev->name, ndev); in axienet_open()
947 ret = request_irq(lp->rx_irq, axienet_rx_irq, 0, ndev->name, ndev); in axienet_open()
954 free_irq(lp->tx_irq, ndev); in axienet_open()
956 if (lp->phy_dev) in axienet_open()
957 phy_disconnect(lp->phy_dev); in axienet_open()
958 lp->phy_dev = NULL; in axienet_open()
959 tasklet_kill(&lp->dma_err_tasklet); in axienet_open()
960 dev_err(lp->dev, "request_irq() failed\n"); in axienet_open()
977 struct axienet_local *lp = netdev_priv(ndev); in axienet_stop() local
981 cr = axienet_dma_in32(lp, XAXIDMA_RX_CR_OFFSET); in axienet_stop()
982 axienet_dma_out32(lp, XAXIDMA_RX_CR_OFFSET, in axienet_stop()
984 cr = axienet_dma_in32(lp, XAXIDMA_TX_CR_OFFSET); in axienet_stop()
985 axienet_dma_out32(lp, XAXIDMA_TX_CR_OFFSET, in axienet_stop()
987 axienet_setoptions(ndev, lp->options & in axienet_stop()
990 tasklet_kill(&lp->dma_err_tasklet); in axienet_stop()
992 free_irq(lp->tx_irq, ndev); in axienet_stop()
993 free_irq(lp->rx_irq, ndev); in axienet_stop()
995 if (lp->phy_dev) in axienet_stop()
996 phy_disconnect(lp->phy_dev); in axienet_stop()
997 lp->phy_dev = NULL; in axienet_stop()
1016 struct axienet_local *lp = netdev_priv(ndev); in axienet_change_mtu() local
1020 if (lp->jumbo_support) { in axienet_change_mtu()
1043 struct axienet_local *lp = netdev_priv(ndev); in axienet_poll_controller() local
1044 disable_irq(lp->tx_irq); in axienet_poll_controller()
1045 disable_irq(lp->rx_irq); in axienet_poll_controller()
1046 axienet_rx_irq(lp->tx_irq, ndev); in axienet_poll_controller()
1047 axienet_tx_irq(lp->rx_irq, ndev); in axienet_poll_controller()
1048 enable_irq(lp->tx_irq); in axienet_poll_controller()
1049 enable_irq(lp->rx_irq); in axienet_poll_controller()
1079 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_get_settings() local
1080 struct phy_device *phydev = lp->phy_dev; in axienet_ethtools_get_settings()
1100 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_set_settings() local
1101 struct phy_device *phydev = lp->phy_dev; in axienet_ethtools_set_settings()
1151 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_get_regs() local
1157 data[0] = axienet_ior(lp, XAE_RAF_OFFSET); in axienet_ethtools_get_regs()
1158 data[1] = axienet_ior(lp, XAE_TPF_OFFSET); in axienet_ethtools_get_regs()
1159 data[2] = axienet_ior(lp, XAE_IFGP_OFFSET); in axienet_ethtools_get_regs()
1160 data[3] = axienet_ior(lp, XAE_IS_OFFSET); in axienet_ethtools_get_regs()
1161 data[4] = axienet_ior(lp, XAE_IP_OFFSET); in axienet_ethtools_get_regs()
1162 data[5] = axienet_ior(lp, XAE_IE_OFFSET); in axienet_ethtools_get_regs()
1163 data[6] = axienet_ior(lp, XAE_TTAG_OFFSET); in axienet_ethtools_get_regs()
1164 data[7] = axienet_ior(lp, XAE_RTAG_OFFSET); in axienet_ethtools_get_regs()
1165 data[8] = axienet_ior(lp, XAE_UAWL_OFFSET); in axienet_ethtools_get_regs()
1166 data[9] = axienet_ior(lp, XAE_UAWU_OFFSET); in axienet_ethtools_get_regs()
1167 data[10] = axienet_ior(lp, XAE_TPID0_OFFSET); in axienet_ethtools_get_regs()
1168 data[11] = axienet_ior(lp, XAE_TPID1_OFFSET); in axienet_ethtools_get_regs()
1169 data[12] = axienet_ior(lp, XAE_PPST_OFFSET); in axienet_ethtools_get_regs()
1170 data[13] = axienet_ior(lp, XAE_RCW0_OFFSET); in axienet_ethtools_get_regs()
1171 data[14] = axienet_ior(lp, XAE_RCW1_OFFSET); in axienet_ethtools_get_regs()
1172 data[15] = axienet_ior(lp, XAE_TC_OFFSET); in axienet_ethtools_get_regs()
1173 data[16] = axienet_ior(lp, XAE_FCC_OFFSET); in axienet_ethtools_get_regs()
1174 data[17] = axienet_ior(lp, XAE_EMMC_OFFSET); in axienet_ethtools_get_regs()
1175 data[18] = axienet_ior(lp, XAE_PHYC_OFFSET); in axienet_ethtools_get_regs()
1176 data[19] = axienet_ior(lp, XAE_MDIO_MC_OFFSET); in axienet_ethtools_get_regs()
1177 data[20] = axienet_ior(lp, XAE_MDIO_MCR_OFFSET); in axienet_ethtools_get_regs()
1178 data[21] = axienet_ior(lp, XAE_MDIO_MWD_OFFSET); in axienet_ethtools_get_regs()
1179 data[22] = axienet_ior(lp, XAE_MDIO_MRD_OFFSET); in axienet_ethtools_get_regs()
1180 data[23] = axienet_ior(lp, XAE_MDIO_MIS_OFFSET); in axienet_ethtools_get_regs()
1181 data[24] = axienet_ior(lp, XAE_MDIO_MIP_OFFSET); in axienet_ethtools_get_regs()
1182 data[25] = axienet_ior(lp, XAE_MDIO_MIE_OFFSET); in axienet_ethtools_get_regs()
1183 data[26] = axienet_ior(lp, XAE_MDIO_MIC_OFFSET); in axienet_ethtools_get_regs()
1184 data[27] = axienet_ior(lp, XAE_UAW0_OFFSET); in axienet_ethtools_get_regs()
1185 data[28] = axienet_ior(lp, XAE_UAW1_OFFSET); in axienet_ethtools_get_regs()
1186 data[29] = axienet_ior(lp, XAE_FMI_OFFSET); in axienet_ethtools_get_regs()
1187 data[30] = axienet_ior(lp, XAE_AF0_OFFSET); in axienet_ethtools_get_regs()
1188 data[31] = axienet_ior(lp, XAE_AF1_OFFSET); in axienet_ethtools_get_regs()
1205 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_get_pauseparam() local
1207 regval = axienet_ior(lp, XAE_FCC_OFFSET); in axienet_ethtools_get_pauseparam()
1227 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_set_pauseparam() local
1235 regval = axienet_ior(lp, XAE_FCC_OFFSET); in axienet_ethtools_set_pauseparam()
1244 axienet_iow(lp, XAE_FCC_OFFSET, regval); in axienet_ethtools_set_pauseparam()
1262 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_get_coalesce() local
1263 regval = axienet_dma_in32(lp, XAXIDMA_RX_CR_OFFSET); in axienet_ethtools_get_coalesce()
1266 regval = axienet_dma_in32(lp, XAXIDMA_TX_CR_OFFSET); in axienet_ethtools_get_coalesce()
1284 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_set_coalesce() local
1314 lp->coalesce_count_rx = ecoalesce->rx_max_coalesced_frames; in axienet_ethtools_set_coalesce()
1316 lp->coalesce_count_tx = ecoalesce->tx_max_coalesced_frames; in axienet_ethtools_set_coalesce()
1346 struct axienet_local *lp = (struct axienet_local *) data; in axienet_dma_err_handler() local
1347 struct net_device *ndev = lp->ndev; in axienet_dma_err_handler()
1350 axienet_setoptions(ndev, lp->options & in axienet_dma_err_handler()
1352 mdio_mcreg = axienet_ior(lp, XAE_MDIO_MC_OFFSET); in axienet_dma_err_handler()
1353 axienet_mdio_wait_until_ready(lp); in axienet_dma_err_handler()
1358 axienet_iow(lp, XAE_MDIO_MC_OFFSET, (mdio_mcreg & in axienet_dma_err_handler()
1361 __axienet_device_reset(lp, &ndev->dev, XAXIDMA_TX_CR_OFFSET); in axienet_dma_err_handler()
1362 __axienet_device_reset(lp, &ndev->dev, XAXIDMA_RX_CR_OFFSET); in axienet_dma_err_handler()
1364 axienet_iow(lp, XAE_MDIO_MC_OFFSET, mdio_mcreg); in axienet_dma_err_handler()
1365 axienet_mdio_wait_until_ready(lp); in axienet_dma_err_handler()
1368 cur_p = &lp->tx_bd_v[i]; in axienet_dma_err_handler()
1388 cur_p = &lp->rx_bd_v[i]; in axienet_dma_err_handler()
1397 lp->tx_bd_ci = 0; in axienet_dma_err_handler()
1398 lp->tx_bd_tail = 0; in axienet_dma_err_handler()
1399 lp->rx_bd_ci = 0; in axienet_dma_err_handler()
1402 cr = axienet_dma_in32(lp, XAXIDMA_RX_CR_OFFSET); in axienet_dma_err_handler()
1412 axienet_dma_out32(lp, XAXIDMA_RX_CR_OFFSET, cr); in axienet_dma_err_handler()
1415 cr = axienet_dma_in32(lp, XAXIDMA_TX_CR_OFFSET); in axienet_dma_err_handler()
1425 axienet_dma_out32(lp, XAXIDMA_TX_CR_OFFSET, cr); in axienet_dma_err_handler()
1429 axienet_dma_out32(lp, XAXIDMA_RX_CDESC_OFFSET, lp->rx_bd_p); in axienet_dma_err_handler()
1430 cr = axienet_dma_in32(lp, XAXIDMA_RX_CR_OFFSET); in axienet_dma_err_handler()
1431 axienet_dma_out32(lp, XAXIDMA_RX_CR_OFFSET, in axienet_dma_err_handler()
1433 axienet_dma_out32(lp, XAXIDMA_RX_TDESC_OFFSET, lp->rx_bd_p + in axienet_dma_err_handler()
1434 (sizeof(*lp->rx_bd_v) * (RX_BD_NUM - 1))); in axienet_dma_err_handler()
1439 axienet_dma_out32(lp, XAXIDMA_TX_CDESC_OFFSET, lp->tx_bd_p); in axienet_dma_err_handler()
1440 cr = axienet_dma_in32(lp, XAXIDMA_TX_CR_OFFSET); in axienet_dma_err_handler()
1441 axienet_dma_out32(lp, XAXIDMA_TX_CR_OFFSET, in axienet_dma_err_handler()
1444 axienet_status = axienet_ior(lp, XAE_RCW1_OFFSET); in axienet_dma_err_handler()
1446 axienet_iow(lp, XAE_RCW1_OFFSET, axienet_status); in axienet_dma_err_handler()
1448 axienet_status = axienet_ior(lp, XAE_IP_OFFSET); in axienet_dma_err_handler()
1450 axienet_iow(lp, XAE_IS_OFFSET, XAE_INT_RXRJECT_MASK); in axienet_dma_err_handler()
1451 axienet_iow(lp, XAE_FCC_OFFSET, XAE_FCC_FCRX_MASK); in axienet_dma_err_handler()
1455 axienet_setoptions(ndev, lp->options & in axienet_dma_err_handler()
1459 axienet_setoptions(ndev, lp->options); in axienet_dma_err_handler()
1480 struct axienet_local *lp; in axienet_of_probe() local
1484 ndev = alloc_etherdev(sizeof(*lp)); in axienet_of_probe()
1496 lp = netdev_priv(ndev); in axienet_of_probe()
1497 lp->ndev = ndev; in axienet_of_probe()
1498 lp->dev = &op->dev; in axienet_of_probe()
1499 lp->options = XAE_OPTION_DEFAULTS; in axienet_of_probe()
1501 lp->regs = of_iomap(op->dev.of_node, 0); in axienet_of_probe()
1502 if (!lp->regs) { in axienet_of_probe()
1508 lp->features = 0; in axienet_of_probe()
1514 lp->csum_offload_on_tx_path = in axienet_of_probe()
1516 lp->features |= XAE_FEATURE_PARTIAL_TX_CSUM; in axienet_of_probe()
1521 lp->csum_offload_on_tx_path = in axienet_of_probe()
1523 lp->features |= XAE_FEATURE_FULL_TX_CSUM; in axienet_of_probe()
1528 lp->csum_offload_on_tx_path = XAE_NO_CSUM_OFFLOAD; in axienet_of_probe()
1535 lp->csum_offload_on_rx_path = in axienet_of_probe()
1537 lp->features |= XAE_FEATURE_PARTIAL_RX_CSUM; in axienet_of_probe()
1540 lp->csum_offload_on_rx_path = in axienet_of_probe()
1542 lp->features |= XAE_FEATURE_FULL_RX_CSUM; in axienet_of_probe()
1545 lp->csum_offload_on_rx_path = XAE_NO_CSUM_OFFLOAD; in axienet_of_probe()
1557 lp->jumbo_support = 1; in axienet_of_probe()
1561 lp->phy_type = be32_to_cpup(p); in axienet_of_probe()
1570 lp->dma_regs = of_iomap(np, 0); in axienet_of_probe()
1571 if (lp->dma_regs) { in axienet_of_probe()
1572 dev_dbg(&op->dev, "MEM base: %p\n", lp->dma_regs); in axienet_of_probe()
1577 lp->rx_irq = irq_of_parse_and_map(np, 1); in axienet_of_probe()
1578 lp->tx_irq = irq_of_parse_and_map(np, 0); in axienet_of_probe()
1580 if ((lp->rx_irq <= 0) || (lp->tx_irq <= 0)) { in axienet_of_probe()
1595 lp->coalesce_count_rx = XAXIDMA_DFT_RX_THRESHOLD; in axienet_of_probe()
1596 lp->coalesce_count_tx = XAXIDMA_DFT_TX_THRESHOLD; in axienet_of_probe()
1598 lp->phy_node = of_parse_phandle(op->dev.of_node, "phy-handle", 0); in axienet_of_probe()
1599 ret = axienet_mdio_setup(lp, op->dev.of_node); in axienet_of_probe()
1603 ret = register_netdev(lp->ndev); in axienet_of_probe()
1605 dev_err(lp->dev, "register_netdev() error (%i)\n", ret); in axienet_of_probe()
1612 if (lp->dma_regs) in axienet_of_probe()
1613 iounmap(lp->dma_regs); in axienet_of_probe()
1615 iounmap(lp->regs); in axienet_of_probe()
1625 struct axienet_local *lp = netdev_priv(ndev); in axienet_of_remove() local
1627 axienet_mdio_teardown(lp); in axienet_of_remove()
1630 of_node_put(lp->phy_node); in axienet_of_remove()
1631 lp->phy_node = NULL; in axienet_of_remove()
1633 iounmap(lp->regs); in axienet_of_remove()
1634 if (lp->dma_regs) in axienet_of_remove()
1635 iounmap(lp->dma_regs); in axienet_of_remove()