Lines Matching refs:tmp
173 u32 tmp = readl(&ep->dev->regs->pciirqenb0); in enable_pciirqenb() local
176 tmp |= BIT(ep->num); in enable_pciirqenb()
178 tmp |= BIT(ep_bit[ep->num]); in enable_pciirqenb()
179 writel(tmp, &ep->dev->regs->pciirqenb0); in enable_pciirqenb()
190 u32 tmp = 0; in net2280_enable() local
249 tmp = readl(&ep->cfg->ep_cfg); in net2280_enable()
251 if ((tmp & 0xf) != usb_endpoint_num(desc)) { in net2280_enable()
257 tmp &= ~USB3380_EP_CFG_MASK_IN; in net2280_enable()
259 tmp &= ~USB3380_EP_CFG_MASK_OUT; in net2280_enable()
282 tmp |= type << ENDPOINT_TYPE; in net2280_enable()
283 tmp |= desc->bEndpointAddress; in net2280_enable()
285 tmp |= (4 << ENDPOINT_BYTE_COUNT); in net2280_enable()
286 tmp |= BIT(ENDPOINT_ENABLE); in net2280_enable()
287 ep->is_in = (tmp & USB_DIR_IN) != 0; in net2280_enable()
291 tmp |= type << IN_ENDPOINT_TYPE; in net2280_enable()
292 tmp |= BIT(IN_ENDPOINT_ENABLE); in net2280_enable()
294 tmp |= type << OUT_ENDPOINT_TYPE; in net2280_enable()
295 tmp |= BIT(OUT_ENDPOINT_ENABLE); in net2280_enable()
296 tmp |= (ep->is_in << ENDPOINT_DIRECTION); in net2280_enable()
299 tmp |= (4 << ENDPOINT_BYTE_COUNT); in net2280_enable()
301 tmp |= usb_endpoint_num(desc); in net2280_enable()
302 tmp |= (ep->ep.maxburst << MAX_BURST_SIZE); in net2280_enable()
321 writel(tmp, &ep->cfg->ep_cfg); in net2280_enable()
327 tmp = BIT(DATA_PACKET_RECEIVED_INTERRUPT_ENABLE) | in net2280_enable()
330 tmp |= readl(&ep->regs->ep_irqenb); in net2280_enable()
331 writel(tmp, &ep->regs->ep_irqenb); in net2280_enable()
333 tmp = BIT((8 + ep->num)); /* completion */ in net2280_enable()
334 tmp |= readl(&dev->regs->pciirqenb1); in net2280_enable()
335 writel(tmp, &dev->regs->pciirqenb1); in net2280_enable()
342 tmp = BIT(SHORT_PACKET_TRANSFERRED_INTERRUPT_ENABLE); in net2280_enable()
343 writel(tmp, &ep->regs->ep_irqenb); in net2280_enable()
349 tmp = desc->bEndpointAddress; in net2280_enable()
351 _ep->name, tmp & 0x0f, DIR_STRING(tmp), in net2280_enable()
386 u32 tmp; in ep_reset_228x() local
402 tmp = readl(®s->pciirqenb0); in ep_reset_228x()
403 tmp &= ~BIT(ep->num); in ep_reset_228x()
404 writel(tmp, ®s->pciirqenb0); in ep_reset_228x()
406 tmp = readl(®s->pciirqenb1); in ep_reset_228x()
407 tmp &= ~BIT((8 + ep->num)); /* completion */ in ep_reset_228x()
408 writel(tmp, ®s->pciirqenb1); in ep_reset_228x()
416 tmp = BIT(SET_NAK_OUT_PACKETS_MODE) | in ep_reset_228x()
422 tmp = BIT(CLEAR_NAK_OUT_PACKETS_MODE) | in ep_reset_228x()
429 tmp |= BIT(CLEAR_ENDPOINT_TOGGLE) | in ep_reset_228x()
432 writel(tmp, &ep->regs->ep_rsp); in ep_reset_228x()
436 tmp = BIT(FIFO_OVERFLOW) | in ep_reset_228x()
439 tmp = 0; in ep_reset_228x()
441 writel(tmp | BIT(TIMEOUT) | in ep_reset_228x()
462 u32 tmp, dmastat; in ep_reset_338x() local
487 tmp = readl(®s->pciirqenb0); in ep_reset_338x()
488 tmp &= ~BIT(ep_bit[ep->num]); in ep_reset_338x()
489 writel(tmp, ®s->pciirqenb0); in ep_reset_338x()
492 tmp = readl(®s->pciirqenb1); in ep_reset_338x()
493 tmp &= ~BIT((8 + ep->num)); /* completion */ in ep_reset_338x()
494 writel(tmp, ®s->pciirqenb1); in ep_reset_338x()
507 tmp = readl(&ep->cfg->ep_cfg); in ep_reset_338x()
509 tmp &= ~USB3380_EP_CFG_MASK_IN; in ep_reset_338x()
511 tmp &= ~USB3380_EP_CFG_MASK_OUT; in ep_reset_338x()
512 writel(tmp, &ep->cfg->ep_cfg); in ep_reset_338x()
617 u32 tmp; in write_fifo() local
645 tmp = get_unaligned((u32 *)buf); in write_fifo()
646 cpu_to_le32s(&tmp); in write_fifo()
647 writel(tmp, ®s->ep_data); in write_fifo()
657 tmp = count ? get_unaligned((u32 *)buf) : count; in write_fifo()
658 cpu_to_le32s(&tmp); in write_fifo()
660 writel(tmp, ®s->ep_data); in write_fifo()
676 u32 tmp; in out_flush() local
680 tmp = readl(statp); in out_flush()
681 if (tmp & BIT(NAK_OUT_PACKETS)) { in out_flush()
683 ep->ep.name, __func__, tmp); in out_flush()
693 tmp = readl(statp); in out_flush()
694 if (tmp & BIT(DATA_OUT_PING_TOKEN_INTERRUPT) && in out_flush()
717 unsigned count, tmp, is_short; in read_fifo() local
726 tmp = readl(&ep->regs->ep_stat); in read_fifo()
727 if ((tmp & BIT(NAK_OUT_PACKETS))) in read_fifo()
729 else if ((tmp & BIT(FIFO_FULL))) { in read_fifo()
743 tmp = readl(&ep->regs->ep_stat); in read_fifo()
746 if (count == 0 && (tmp & BIT(NAK_OUT_PACKETS)) == 0) in read_fifo()
750 tmp = req->req.length - req->req.actual; in read_fifo()
751 if (count > tmp) { in read_fifo()
753 if ((tmp % ep->ep.maxpacket) != 0) { in read_fifo()
756 ep->ep.name, count, tmp); in read_fifo()
763 count = tmp; in read_fifo()
775 tmp = readl(®s->ep_data); in read_fifo()
776 cpu_to_le32s(&tmp); in read_fifo()
777 put_unaligned(tmp, (u32 *)buf); in read_fifo()
782 tmp = readl(®s->ep_data); in read_fifo()
785 *buf++ = (u8) tmp; in read_fifo()
786 tmp >>= 8; in read_fifo()
856 unsigned int tmp = BIT(VALID_BIT) | (ep->is_in << DMA_DIRECTION); in start_queue() local
859 tmp |= BIT(END_OF_CHAIN); in start_queue()
861 writel(tmp, &dma->dmacount); in start_queue()
880 u32 tmp; in start_dma() local
895 tmp = readl(&ep->regs->ep_avail); in start_dma()
896 if (tmp) { in start_dma()
901 tmp = min(tmp, req->req.length); in start_dma()
904 req->td->dmacount = cpu_to_le32(req->req.length - tmp); in start_dma()
905 writel(BIT(DMA_DONE_INTERRUPT_ENABLE) | tmp, in start_dma()
916 tmp = dmactl_default; in start_dma()
925 tmp |= BIT(DMA_FIFO_VALIDATE); in start_dma()
937 start_queue(ep, tmp, req->td_dma); in start_dma()
944 dma_addr_t tmp; in queue_dma() local
951 tmp = ep->td_dma; in queue_dma()
953 req->td_dma = tmp; in queue_dma()
1147 u32 tmp; in scan_dma_completions() local
1154 tmp = le32_to_cpup(&req->td->dmacount); in scan_dma_completions()
1155 if ((tmp & BIT(VALID_BIT)) != 0) in scan_dma_completions()
1164 tmp = readl(&ep->dma->dmacount); in scan_dma_completions()
1165 if (tmp & DMA_BYTE_COUNT_MASK) in scan_dma_completions()
1168 dma_done(ep, req, tmp, 0); in scan_dma_completions()
1174 tmp = readl(&ep->regs->ep_stat); in scan_dma_completions()
1179 if ((tmp & BIT(NAK_OUT_PACKETS)) == 0) { in scan_dma_completions()
1184 tmp = readl(&ep->regs->ep_avail); in scan_dma_completions()
1185 if (tmp) { in scan_dma_completions()
1190 ep->ep.name, tmp, in scan_dma_completions()
1196 dma_done(ep, req, tmp, 0); in scan_dma_completions()
1485 u32 tmp; in net2280_wakeup() local
1493 tmp = readl(&dev->usb->usbctl); in net2280_wakeup()
1494 if (tmp & BIT(DEVICE_REMOTE_WAKEUP_ENABLE)) in net2280_wakeup()
1505 u32 tmp; in net2280_set_selfpowered() local
1513 tmp = readl(&dev->usb->usbctl); in net2280_set_selfpowered()
1515 tmp |= BIT(SELF_POWERED_STATUS); in net2280_set_selfpowered()
1518 tmp &= ~BIT(SELF_POWERED_STATUS); in net2280_set_selfpowered()
1521 writel(tmp, &dev->usb->usbctl); in net2280_set_selfpowered()
1530 u32 tmp; in net2280_pullup() local
1538 tmp = readl(&dev->usb->usbctl); in net2280_pullup()
1542 writel(tmp | BIT(USB_DETECT_ENABLE), &dev->usb->usbctl); in net2280_pullup()
1544 writel(tmp & ~BIT(USB_DETECT_ENABLE), &dev->usb->usbctl); in net2280_pullup()
1944 u32 tmp = 0, tmp_reg; in defect7374_enable_data_eps_zero() local
1960 tmp = ((0 << ENDPOINT_NUMBER) | BIT(ENDPOINT_DIRECTION) | in defect7374_enable_data_eps_zero()
1967 writel(tmp, &dev->ep[i].cfg->ep_cfg); in defect7374_enable_data_eps_zero()
1970 tmp = ((0 << ENDPOINT_NUMBER) | BIT(ENDPOINT_ENABLE)); in defect7374_enable_data_eps_zero()
1971 writel(tmp, &dev->dep[1].dep_cfg); in defect7374_enable_data_eps_zero()
1972 writel(tmp, &dev->dep[3].dep_cfg); in defect7374_enable_data_eps_zero()
1973 writel(tmp, &dev->dep[4].dep_cfg); in defect7374_enable_data_eps_zero()
1974 writel(tmp, &dev->dep[5].dep_cfg); in defect7374_enable_data_eps_zero()
1985 tmp = in defect7374_enable_data_eps_zero()
1988 writel(tmp, &dev->plregs->pl_ep_ctrl); in defect7374_enable_data_eps_zero()
1996 tmp = (readl(&dev->plregs->pl_ep_cfg_4) | in defect7374_enable_data_eps_zero()
1998 writel(tmp, &dev->plregs->pl_ep_cfg_4); in defect7374_enable_data_eps_zero()
2000 tmp = readl(&dev->plregs->pl_ep_ctrl) & in defect7374_enable_data_eps_zero()
2002 writel(tmp, &dev->plregs->pl_ep_ctrl); in defect7374_enable_data_eps_zero()
2025 u32 tmp; in usb_reset_228x() local
2038 for (tmp = 0; tmp < 4; tmp++) { in usb_reset_228x()
2039 struct net2280_ep *ep = &dev->ep[tmp + 1]; in usb_reset_228x()
2048 tmp = readl(&dev->regs->devinit) | in usb_reset_228x()
2053 writel(tmp, &dev->regs->devinit); in usb_reset_228x()
2061 u32 tmp; in usb_reset_338x() local
2076 for (tmp = 0; tmp < 4; tmp++) { in usb_reset_338x()
2077 struct net2280_ep *ep = &dev->ep[tmp + 1]; in usb_reset_338x()
2083 dma = &dev->dma[tmp]; in usb_reset_338x()
2093 tmp = readl(&dev->regs->devinit) | in usb_reset_338x()
2099 writel(tmp, &dev->regs->devinit); in usb_reset_338x()
2105 for (tmp = 1; tmp < dev->n_ep; tmp++) in usb_reset_338x()
2106 list_add_tail(&dev->ep[tmp].ep.ep_list, &dev->gadget.ep_list); in usb_reset_338x()
2119 u32 tmp; in usb_reinit_228x() local
2122 for (tmp = 0; tmp < 7; tmp++) { in usb_reinit_228x()
2123 struct net2280_ep *ep = &dev->ep[tmp]; in usb_reinit_228x()
2125 ep->ep.name = ep_info_dft[tmp].name; in usb_reinit_228x()
2126 ep->ep.caps = ep_info_dft[tmp].caps; in usb_reinit_228x()
2128 ep->num = tmp; in usb_reinit_228x()
2130 if (tmp > 0 && tmp <= 4) { in usb_reinit_228x()
2132 ep->dma = &dev->dma[tmp - 1]; in usb_reinit_228x()
2135 ep->regs = &dev->epregs[tmp]; in usb_reinit_228x()
2136 ep->cfg = &dev->epregs[tmp]; in usb_reinit_228x()
2150 for (tmp = 0; tmp < 5; tmp++) in usb_reinit_228x()
2151 writel(EP_DONTUSE, &dev->dep[tmp].dep_cfg); in usb_reinit_228x()
2157 u32 tmp, val; in usb_reinit_338x() local
2203 tmp = readl(&dev->usb_ext->usbctl2) & in usb_reinit_338x()
2205 writel(tmp, &dev->usb_ext->usbctl2); in usb_reinit_338x()
2788 int tmp = 0; in handle_stat0_irqs_superspeed() local
2972 tmp = dev->driver->setup(&dev->gadget, &r); in handle_stat0_irqs_superspeed()
2976 if (tmp < 0) { in handle_stat0_irqs_superspeed()
2978 r.bRequestType, r.bRequest, tmp); in handle_stat0_irqs_superspeed()
3030 int tmp; in handle_stat0_irqs() local
3068 tmp = BIT(FIFO_OVERFLOW) | in handle_stat0_irqs()
3071 tmp = 0; in handle_stat0_irqs()
3073 writel(tmp | BIT(TIMEOUT) | in handle_stat0_irqs()
3096 tmp = 0; in handle_stat0_irqs()
3214 tmp = dev->driver->setup(&dev->gadget, &u.r); in handle_stat0_irqs()
3219 if (tmp < 0) { in handle_stat0_irqs()
3222 u.r.bRequestType, u.r.bRequest, tmp); in handle_stat0_irqs()
3282 u32 tmp, num, mask, scratch; in handle_stat1_irqs() local
3285 tmp = BIT(VBUS_INTERRUPT) | BIT(ROOT_PORT_RESET_INTERRUPT); in handle_stat1_irqs()
3293 if (stat & tmp) { in handle_stat1_irqs()
3301 writel(tmp, &dev->regs->irqstat1); in handle_stat1_irqs()
3331 stat &= ~tmp; in handle_stat1_irqs()
3343 tmp = BIT(SUSPEND_REQUEST_CHANGE_INTERRUPT); in handle_stat1_irqs()
3344 if (stat & tmp) { in handle_stat1_irqs()
3345 writel(tmp, &dev->regs->irqstat1); in handle_stat1_irqs()
3356 stat &= ~tmp; in handle_stat1_irqs()
3386 tmp = BIT(num); in handle_stat1_irqs()
3387 if ((tmp & scratch) == 0) in handle_stat1_irqs()
3389 scratch ^= tmp; in handle_stat1_irqs()
3398 tmp = readl(&dma->dmastat); in handle_stat1_irqs()
3399 writel(tmp, &dma->dmastat); in handle_stat1_irqs()
3405 (tmp & BIT(DMA_TRANSACTION_DONE_INTERRUPT))) in handle_stat1_irqs()
3409 if (!(tmp & BIT(DMA_TRANSACTION_DONE_INTERRUPT))) { in handle_stat1_irqs()
3411 ep->ep.name, tmp); in handle_stat1_irqs()
3430 tmp = readl(&dma->dmactl); in handle_stat1_irqs()