Lines Matching refs:val64
32 u64 val64; in vxge_hw_vpath_intr_enable() local
86 val64 = readq(&vp_reg->vpath_general_int_status); in vxge_hw_vpath_intr_enable()
159 u64 val64; in vxge_hw_vpath_intr_disable() local
181 val64 = VXGE_HW_TIM_CLR_INT_EN_VP(1 << (16 - vpath->vp_id)); in vxge_hw_vpath_intr_disable()
226 u64 val64; in vxge_hw_vpath_tti_ci_set() local
236 val64 = readq(&vp_reg->tim_cfg1_int_num[VXGE_HW_VPATH_INTR_TX]); in vxge_hw_vpath_tti_ci_set()
237 val64 |= VXGE_HW_TIM_CFG1_INT_NUM_TIMER_CI; in vxge_hw_vpath_tti_ci_set()
238 fifo->tim_tti_cfg1_saved = val64; in vxge_hw_vpath_tti_ci_set()
239 writeq(val64, &vp_reg->tim_cfg1_int_num[VXGE_HW_VPATH_INTR_TX]); in vxge_hw_vpath_tti_ci_set()
245 u64 val64 = ring->tim_rti_cfg1_saved; in vxge_hw_vpath_dynamic_rti_ci_set() local
247 val64 |= VXGE_HW_TIM_CFG1_INT_NUM_TIMER_CI; in vxge_hw_vpath_dynamic_rti_ci_set()
248 ring->tim_rti_cfg1_saved = val64; in vxge_hw_vpath_dynamic_rti_ci_set()
249 writeq(val64, &ring->vp_reg->tim_cfg1_int_num[VXGE_HW_VPATH_INTR_RX]); in vxge_hw_vpath_dynamic_rti_ci_set()
254 u64 val64 = fifo->tim_tti_cfg3_saved; in vxge_hw_vpath_dynamic_tti_rtimer_set() local
257 val64 &= ~VXGE_HW_TIM_CFG3_INT_NUM_RTIMER_VAL(0x3ffffff); in vxge_hw_vpath_dynamic_tti_rtimer_set()
259 val64 |= VXGE_HW_TIM_CFG3_INT_NUM_RTIMER_VAL(timer) | in vxge_hw_vpath_dynamic_tti_rtimer_set()
262 writeq(val64, &fifo->vp_reg->tim_cfg3_int_num[VXGE_HW_VPATH_INTR_TX]); in vxge_hw_vpath_dynamic_tti_rtimer_set()
270 u64 val64 = ring->tim_rti_cfg3_saved; in vxge_hw_vpath_dynamic_rti_rtimer_set() local
273 val64 &= ~VXGE_HW_TIM_CFG3_INT_NUM_RTIMER_VAL(0x3ffffff); in vxge_hw_vpath_dynamic_rti_rtimer_set()
275 val64 |= VXGE_HW_TIM_CFG3_INT_NUM_RTIMER_VAL(timer) | in vxge_hw_vpath_dynamic_rti_rtimer_set()
278 writeq(val64, &ring->vp_reg->tim_cfg3_int_num[VXGE_HW_VPATH_INTR_RX]); in vxge_hw_vpath_dynamic_rti_rtimer_set()
369 u64 val64; in vxge_hw_device_intr_enable() local
384 val64 = hldev->tim_int_mask0[VXGE_HW_VPATH_INTR_TX] | in vxge_hw_device_intr_enable()
387 if (val64 != 0) { in vxge_hw_device_intr_enable()
388 writeq(val64, &hldev->common_reg->tim_int_status0); in vxge_hw_device_intr_enable()
390 writeq(~val64, &hldev->common_reg->tim_int_mask0); in vxge_hw_device_intr_enable()
405 val64 = readq(&hldev->common_reg->titan_general_int_status); in vxge_hw_device_intr_enable()
451 u64 val64; in vxge_hw_device_mask_all() local
453 val64 = VXGE_HW_TITAN_MASK_ALL_INT_ALARM | in vxge_hw_device_mask_all()
456 __vxge_hw_pio_mem_write32_upper((u32)vxge_bVALn(val64, 0, 32), in vxge_hw_device_mask_all()
470 u64 val64 = 0; in vxge_hw_device_unmask_all() local
473 val64 = VXGE_HW_TITAN_MASK_ALL_INT_TRAFFIC; in vxge_hw_device_unmask_all()
475 __vxge_hw_pio_mem_write32_upper((u32)vxge_bVALn(val64, 0, 32), in vxge_hw_device_unmask_all()
605 u64 val64; in __vxge_hw_vpath_alarm_process() local
646 val64 = readq(&vp_reg->xgmac_vp_int_status); in __vxge_hw_vpath_alarm_process()
648 if (val64 & in __vxge_hw_vpath_alarm_process()
651 val64 = readq(&vp_reg->asic_ntwk_vp_err_reg); in __vxge_hw_vpath_alarm_process()
653 if (((val64 & in __vxge_hw_vpath_alarm_process()
655 (!(val64 & in __vxge_hw_vpath_alarm_process()
657 ((val64 & in __vxge_hw_vpath_alarm_process()
659 (!(val64 & in __vxge_hw_vpath_alarm_process()
673 if (((val64 & in __vxge_hw_vpath_alarm_process()
675 (!(val64 & in __vxge_hw_vpath_alarm_process()
677 ((val64 & in __vxge_hw_vpath_alarm_process()
679 (!(val64 & in __vxge_hw_vpath_alarm_process()
712 val64 = readq(&vp_reg->general_errors_reg); in __vxge_hw_vpath_alarm_process()
715 if ((val64 & in __vxge_hw_vpath_alarm_process()
724 if ((val64 & in __vxge_hw_vpath_alarm_process()
733 if ((val64 & in __vxge_hw_vpath_alarm_process()
738 if ((val64 & in __vxge_hw_vpath_alarm_process()
743 if ((val64 & in __vxge_hw_vpath_alarm_process()
760 val64 = readq(&vp_reg->kdfcctl_errors_reg); in __vxge_hw_vpath_alarm_process()
763 if ((val64 & in __vxge_hw_vpath_alarm_process()
773 if ((val64 & in __vxge_hw_vpath_alarm_process()
783 if ((val64 & in __vxge_hw_vpath_alarm_process()
806 val64 = readq(&vp_reg->wrdma_alarm_status); in __vxge_hw_vpath_alarm_process()
808 if (val64 & VXGE_HW_WRDMA_ALARM_STATUS_PRC_ALARM_PRC_INT) { in __vxge_hw_vpath_alarm_process()
810 val64 = readq(&vp_reg->prc_alarm_reg); in __vxge_hw_vpath_alarm_process()
813 if ((val64 & VXGE_HW_PRC_ALARM_REG_PRC_RING_BUMP)& in __vxge_hw_vpath_alarm_process()
817 if ((val64 & VXGE_HW_PRC_ALARM_REG_PRC_RXDCM_SC_ERR) & in __vxge_hw_vpath_alarm_process()
826 if ((val64 & VXGE_HW_PRC_ALARM_REG_PRC_RXDCM_SC_ABORT) in __vxge_hw_vpath_alarm_process()
835 if ((val64 & VXGE_HW_PRC_ALARM_REG_PRC_QUANTA_SIZE_ERR) in __vxge_hw_vpath_alarm_process()
894 u64 val64; in vxge_hw_device_begin_irq() local
899 val64 = readq(&hldev->common_reg->titan_general_int_status); in vxge_hw_device_begin_irq()
901 if (unlikely(!val64)) { in vxge_hw_device_begin_irq()
908 if (unlikely(val64 == VXGE_HW_ALL_FOXES)) { in vxge_hw_device_begin_irq()
924 *reason = val64; in vxge_hw_device_begin_irq()
929 if (val64 & in vxge_hw_device_begin_irq()
938 if (unlikely(val64 & in vxge_hw_device_begin_irq()
2019 u64 val64; in vxge_hw_vpath_promisc_enable() local
2035 val64 = readq(&vpath->vp_reg->rxmac_vcfg0); in vxge_hw_vpath_promisc_enable()
2037 if (!(val64 & VXGE_HW_RXMAC_VCFG0_UCAST_ALL_ADDR_EN)) { in vxge_hw_vpath_promisc_enable()
2039 val64 |= VXGE_HW_RXMAC_VCFG0_UCAST_ALL_ADDR_EN | in vxge_hw_vpath_promisc_enable()
2044 writeq(val64, &vpath->vp_reg->rxmac_vcfg0); in vxge_hw_vpath_promisc_enable()
2061 u64 val64; in vxge_hw_vpath_promisc_disable() local
2072 val64 = readq(&vpath->vp_reg->rxmac_vcfg0); in vxge_hw_vpath_promisc_disable()
2074 if (val64 & VXGE_HW_RXMAC_VCFG0_UCAST_ALL_ADDR_EN) { in vxge_hw_vpath_promisc_disable()
2076 val64 &= ~(VXGE_HW_RXMAC_VCFG0_UCAST_ALL_ADDR_EN | in vxge_hw_vpath_promisc_disable()
2080 writeq(val64, &vpath->vp_reg->rxmac_vcfg0); in vxge_hw_vpath_promisc_disable()
2095 u64 val64; in vxge_hw_vpath_bcast_enable() local
2106 val64 = readq(&vpath->vp_reg->rxmac_vcfg0); in vxge_hw_vpath_bcast_enable()
2108 if (!(val64 & VXGE_HW_RXMAC_VCFG0_BCAST_EN)) { in vxge_hw_vpath_bcast_enable()
2109 val64 |= VXGE_HW_RXMAC_VCFG0_BCAST_EN; in vxge_hw_vpath_bcast_enable()
2110 writeq(val64, &vpath->vp_reg->rxmac_vcfg0); in vxge_hw_vpath_bcast_enable()
2127 u64 val64; in vxge_hw_vpath_mcast_enable() local
2138 val64 = readq(&vpath->vp_reg->rxmac_vcfg0); in vxge_hw_vpath_mcast_enable()
2140 if (!(val64 & VXGE_HW_RXMAC_VCFG0_MCAST_ALL_ADDR_EN)) { in vxge_hw_vpath_mcast_enable()
2141 val64 |= VXGE_HW_RXMAC_VCFG0_MCAST_ALL_ADDR_EN; in vxge_hw_vpath_mcast_enable()
2142 writeq(val64, &vpath->vp_reg->rxmac_vcfg0); in vxge_hw_vpath_mcast_enable()
2160 u64 val64; in vxge_hw_vpath_mcast_disable() local
2171 val64 = readq(&vpath->vp_reg->rxmac_vcfg0); in vxge_hw_vpath_mcast_disable()
2173 if (val64 & VXGE_HW_RXMAC_VCFG0_MCAST_ALL_ADDR_EN) { in vxge_hw_vpath_mcast_disable()
2174 val64 &= ~VXGE_HW_RXMAC_VCFG0_MCAST_ALL_ADDR_EN; in vxge_hw_vpath_mcast_disable()
2175 writeq(val64, &vpath->vp_reg->rxmac_vcfg0); in vxge_hw_vpath_mcast_disable()
2221 u64 val64; in vxge_hw_vpath_msix_set() local
2226 val64 = VXGE_HW_INTERRUPT_CFG0_GROUP0_MSIX_FOR_TXTI( in vxge_hw_vpath_msix_set()
2231 writeq(val64, &vp_reg->interrupt_cfg0); in vxge_hw_vpath_msix_set()
2331 u64 val64; in vxge_hw_vpath_inta_mask_tx_rx() local
2337 val64 = readq(&hldev->common_reg->tim_int_mask0); in vxge_hw_vpath_inta_mask_tx_rx()
2342 tim_int_mask0[VXGE_HW_VPATH_INTR_RX] | val64), in vxge_hw_vpath_inta_mask_tx_rx()
2346 val64 = readl(&hldev->common_reg->tim_int_mask1); in vxge_hw_vpath_inta_mask_tx_rx()
2352 tim_int_mask1[VXGE_HW_VPATH_INTR_RX] | val64), in vxge_hw_vpath_inta_mask_tx_rx()
2369 u64 val64; in vxge_hw_vpath_inta_unmask_tx_rx() local
2375 val64 = readq(&hldev->common_reg->tim_int_mask0); in vxge_hw_vpath_inta_unmask_tx_rx()
2380 tim_int_mask0[VXGE_HW_VPATH_INTR_RX])) & val64, in vxge_hw_vpath_inta_unmask_tx_rx()
2388 tim_int_mask1[VXGE_HW_VPATH_INTR_RX])) & val64, in vxge_hw_vpath_inta_unmask_tx_rx()
2412 u64 val64 = 0; in vxge_hw_vpath_poll_rx() local
2440 val64 = in vxge_hw_vpath_poll_rx()