Lines Matching refs:val64
32 u64 val64; in vxge_hw_vpath_intr_enable() local
86 val64 = readq(&vp_reg->vpath_general_int_status); in vxge_hw_vpath_intr_enable()
159 u64 val64; in vxge_hw_vpath_intr_disable() local
181 val64 = VXGE_HW_TIM_CLR_INT_EN_VP(1 << (16 - vpath->vp_id)); in vxge_hw_vpath_intr_disable()
226 u64 val64; in vxge_hw_vpath_tti_ci_set() local
236 val64 = readq(&vp_reg->tim_cfg1_int_num[VXGE_HW_VPATH_INTR_TX]); in vxge_hw_vpath_tti_ci_set()
237 val64 |= VXGE_HW_TIM_CFG1_INT_NUM_TIMER_CI; in vxge_hw_vpath_tti_ci_set()
238 fifo->tim_tti_cfg1_saved = val64; in vxge_hw_vpath_tti_ci_set()
239 writeq(val64, &vp_reg->tim_cfg1_int_num[VXGE_HW_VPATH_INTR_TX]); in vxge_hw_vpath_tti_ci_set()
245 u64 val64 = ring->tim_rti_cfg1_saved; in vxge_hw_vpath_dynamic_rti_ci_set() local
247 val64 |= VXGE_HW_TIM_CFG1_INT_NUM_TIMER_CI; in vxge_hw_vpath_dynamic_rti_ci_set()
248 ring->tim_rti_cfg1_saved = val64; in vxge_hw_vpath_dynamic_rti_ci_set()
249 writeq(val64, &ring->vp_reg->tim_cfg1_int_num[VXGE_HW_VPATH_INTR_RX]); in vxge_hw_vpath_dynamic_rti_ci_set()
254 u64 val64 = fifo->tim_tti_cfg3_saved; in vxge_hw_vpath_dynamic_tti_rtimer_set() local
257 val64 &= ~VXGE_HW_TIM_CFG3_INT_NUM_RTIMER_VAL(0x3ffffff); in vxge_hw_vpath_dynamic_tti_rtimer_set()
259 val64 |= VXGE_HW_TIM_CFG3_INT_NUM_RTIMER_VAL(timer) | in vxge_hw_vpath_dynamic_tti_rtimer_set()
262 writeq(val64, &fifo->vp_reg->tim_cfg3_int_num[VXGE_HW_VPATH_INTR_TX]); in vxge_hw_vpath_dynamic_tti_rtimer_set()
270 u64 val64 = ring->tim_rti_cfg3_saved; in vxge_hw_vpath_dynamic_rti_rtimer_set() local
273 val64 &= ~VXGE_HW_TIM_CFG3_INT_NUM_RTIMER_VAL(0x3ffffff); in vxge_hw_vpath_dynamic_rti_rtimer_set()
275 val64 |= VXGE_HW_TIM_CFG3_INT_NUM_RTIMER_VAL(timer) | in vxge_hw_vpath_dynamic_rti_rtimer_set()
278 writeq(val64, &ring->vp_reg->tim_cfg3_int_num[VXGE_HW_VPATH_INTR_RX]); in vxge_hw_vpath_dynamic_rti_rtimer_set()
369 u64 val64; in vxge_hw_device_intr_enable() local
384 val64 = hldev->tim_int_mask0[VXGE_HW_VPATH_INTR_TX] | in vxge_hw_device_intr_enable()
387 if (val64 != 0) { in vxge_hw_device_intr_enable()
388 writeq(val64, &hldev->common_reg->tim_int_status0); in vxge_hw_device_intr_enable()
390 writeq(~val64, &hldev->common_reg->tim_int_mask0); in vxge_hw_device_intr_enable()
405 val64 = readq(&hldev->common_reg->titan_general_int_status); in vxge_hw_device_intr_enable()
451 u64 val64; in vxge_hw_device_mask_all() local
453 val64 = VXGE_HW_TITAN_MASK_ALL_INT_ALARM | in vxge_hw_device_mask_all()
456 __vxge_hw_pio_mem_write32_upper((u32)vxge_bVALn(val64, 0, 32), in vxge_hw_device_mask_all()
470 u64 val64 = 0; in vxge_hw_device_unmask_all() local
473 val64 = VXGE_HW_TITAN_MASK_ALL_INT_TRAFFIC; in vxge_hw_device_unmask_all()
475 __vxge_hw_pio_mem_write32_upper((u32)vxge_bVALn(val64, 0, 32), in vxge_hw_device_unmask_all()
605 u64 val64; in __vxge_hw_vpath_alarm_process() local
646 val64 = readq(&vp_reg->xgmac_vp_int_status); in __vxge_hw_vpath_alarm_process()
648 if (val64 & in __vxge_hw_vpath_alarm_process()
651 val64 = readq(&vp_reg->asic_ntwk_vp_err_reg); in __vxge_hw_vpath_alarm_process()
653 if (((val64 & in __vxge_hw_vpath_alarm_process()
655 (!(val64 & in __vxge_hw_vpath_alarm_process()
657 ((val64 & in __vxge_hw_vpath_alarm_process()
659 (!(val64 & in __vxge_hw_vpath_alarm_process()
673 if (((val64 & in __vxge_hw_vpath_alarm_process()
675 (!(val64 & in __vxge_hw_vpath_alarm_process()
677 ((val64 & in __vxge_hw_vpath_alarm_process()
679 (!(val64 & in __vxge_hw_vpath_alarm_process()
712 val64 = readq(&vp_reg->general_errors_reg); in __vxge_hw_vpath_alarm_process()
715 if ((val64 & in __vxge_hw_vpath_alarm_process()
724 if ((val64 & in __vxge_hw_vpath_alarm_process()
733 if ((val64 & in __vxge_hw_vpath_alarm_process()
738 if ((val64 & in __vxge_hw_vpath_alarm_process()
743 if ((val64 & in __vxge_hw_vpath_alarm_process()
760 val64 = readq(&vp_reg->kdfcctl_errors_reg); in __vxge_hw_vpath_alarm_process()
763 if ((val64 & in __vxge_hw_vpath_alarm_process()
773 if ((val64 & in __vxge_hw_vpath_alarm_process()
783 if ((val64 & in __vxge_hw_vpath_alarm_process()
806 val64 = readq(&vp_reg->wrdma_alarm_status); in __vxge_hw_vpath_alarm_process()
808 if (val64 & VXGE_HW_WRDMA_ALARM_STATUS_PRC_ALARM_PRC_INT) { in __vxge_hw_vpath_alarm_process()
810 val64 = readq(&vp_reg->prc_alarm_reg); in __vxge_hw_vpath_alarm_process()
813 if ((val64 & VXGE_HW_PRC_ALARM_REG_PRC_RING_BUMP)& in __vxge_hw_vpath_alarm_process()
817 if ((val64 & VXGE_HW_PRC_ALARM_REG_PRC_RXDCM_SC_ERR) & in __vxge_hw_vpath_alarm_process()
826 if ((val64 & VXGE_HW_PRC_ALARM_REG_PRC_RXDCM_SC_ABORT) in __vxge_hw_vpath_alarm_process()
835 if ((val64 & VXGE_HW_PRC_ALARM_REG_PRC_QUANTA_SIZE_ERR) in __vxge_hw_vpath_alarm_process()
894 u64 val64; in vxge_hw_device_begin_irq() local
899 val64 = readq(&hldev->common_reg->titan_general_int_status); in vxge_hw_device_begin_irq()
901 if (unlikely(!val64)) { in vxge_hw_device_begin_irq()
908 if (unlikely(val64 == VXGE_HW_ALL_FOXES)) { in vxge_hw_device_begin_irq()
924 *reason = val64; in vxge_hw_device_begin_irq()
929 if (val64 & in vxge_hw_device_begin_irq()
938 if (unlikely(val64 & in vxge_hw_device_begin_irq()
2014 u64 val64; in vxge_hw_vpath_promisc_enable() local
2030 val64 = readq(&vpath->vp_reg->rxmac_vcfg0); in vxge_hw_vpath_promisc_enable()
2032 if (!(val64 & VXGE_HW_RXMAC_VCFG0_UCAST_ALL_ADDR_EN)) { in vxge_hw_vpath_promisc_enable()
2034 val64 |= VXGE_HW_RXMAC_VCFG0_UCAST_ALL_ADDR_EN | in vxge_hw_vpath_promisc_enable()
2039 writeq(val64, &vpath->vp_reg->rxmac_vcfg0); in vxge_hw_vpath_promisc_enable()
2056 u64 val64; in vxge_hw_vpath_promisc_disable() local
2067 val64 = readq(&vpath->vp_reg->rxmac_vcfg0); in vxge_hw_vpath_promisc_disable()
2069 if (val64 & VXGE_HW_RXMAC_VCFG0_UCAST_ALL_ADDR_EN) { in vxge_hw_vpath_promisc_disable()
2071 val64 &= ~(VXGE_HW_RXMAC_VCFG0_UCAST_ALL_ADDR_EN | in vxge_hw_vpath_promisc_disable()
2075 writeq(val64, &vpath->vp_reg->rxmac_vcfg0); in vxge_hw_vpath_promisc_disable()
2090 u64 val64; in vxge_hw_vpath_bcast_enable() local
2101 val64 = readq(&vpath->vp_reg->rxmac_vcfg0); in vxge_hw_vpath_bcast_enable()
2103 if (!(val64 & VXGE_HW_RXMAC_VCFG0_BCAST_EN)) { in vxge_hw_vpath_bcast_enable()
2104 val64 |= VXGE_HW_RXMAC_VCFG0_BCAST_EN; in vxge_hw_vpath_bcast_enable()
2105 writeq(val64, &vpath->vp_reg->rxmac_vcfg0); in vxge_hw_vpath_bcast_enable()
2122 u64 val64; in vxge_hw_vpath_mcast_enable() local
2133 val64 = readq(&vpath->vp_reg->rxmac_vcfg0); in vxge_hw_vpath_mcast_enable()
2135 if (!(val64 & VXGE_HW_RXMAC_VCFG0_MCAST_ALL_ADDR_EN)) { in vxge_hw_vpath_mcast_enable()
2136 val64 |= VXGE_HW_RXMAC_VCFG0_MCAST_ALL_ADDR_EN; in vxge_hw_vpath_mcast_enable()
2137 writeq(val64, &vpath->vp_reg->rxmac_vcfg0); in vxge_hw_vpath_mcast_enable()
2155 u64 val64; in vxge_hw_vpath_mcast_disable() local
2166 val64 = readq(&vpath->vp_reg->rxmac_vcfg0); in vxge_hw_vpath_mcast_disable()
2168 if (val64 & VXGE_HW_RXMAC_VCFG0_MCAST_ALL_ADDR_EN) { in vxge_hw_vpath_mcast_disable()
2169 val64 &= ~VXGE_HW_RXMAC_VCFG0_MCAST_ALL_ADDR_EN; in vxge_hw_vpath_mcast_disable()
2170 writeq(val64, &vpath->vp_reg->rxmac_vcfg0); in vxge_hw_vpath_mcast_disable()
2216 u64 val64; in vxge_hw_vpath_msix_set() local
2221 val64 = VXGE_HW_INTERRUPT_CFG0_GROUP0_MSIX_FOR_TXTI( in vxge_hw_vpath_msix_set()
2226 writeq(val64, &vp_reg->interrupt_cfg0); in vxge_hw_vpath_msix_set()
2326 u64 val64; in vxge_hw_vpath_inta_mask_tx_rx() local
2332 val64 = readq(&hldev->common_reg->tim_int_mask0); in vxge_hw_vpath_inta_mask_tx_rx()
2337 tim_int_mask0[VXGE_HW_VPATH_INTR_RX] | val64), in vxge_hw_vpath_inta_mask_tx_rx()
2341 val64 = readl(&hldev->common_reg->tim_int_mask1); in vxge_hw_vpath_inta_mask_tx_rx()
2347 tim_int_mask1[VXGE_HW_VPATH_INTR_RX] | val64), in vxge_hw_vpath_inta_mask_tx_rx()
2364 u64 val64; in vxge_hw_vpath_inta_unmask_tx_rx() local
2370 val64 = readq(&hldev->common_reg->tim_int_mask0); in vxge_hw_vpath_inta_unmask_tx_rx()
2375 tim_int_mask0[VXGE_HW_VPATH_INTR_RX])) & val64, in vxge_hw_vpath_inta_unmask_tx_rx()
2383 tim_int_mask1[VXGE_HW_VPATH_INTR_RX])) & val64, in vxge_hw_vpath_inta_unmask_tx_rx()
2407 u64 val64 = 0; in vxge_hw_vpath_poll_rx() local
2435 val64 = in vxge_hw_vpath_poll_rx()