Lines Matching refs:zatm_dev
125 #define zin_n(r) inl(zatm_dev->base+r*4)
126 #define zin(r) inl(zatm_dev->base+uPD98401_##r*4)
127 #define zout(v,r) outl(v,zatm_dev->base+uPD98401_##r*4)
140 static void zpokel(struct zatm_dev *zatm_dev,u32 value,u32 addr) in zpokel() argument
149 static u32 zpeekl(struct zatm_dev *zatm_dev,u32 addr) in zpeekl() argument
180 struct zatm_dev *zatm_dev; in refill_pool() local
187 zatm_dev = ZATM_DEV(dev); in refill_pool()
196 offset = zatm_dev->pool_info[pool].offset+ in refill_pool()
200 spin_lock_irqsave(&zatm_dev->lock, flags); in refill_pool()
201 free = zpeekl(zatm_dev,zatm_dev->pool_base+2*pool) & in refill_pool()
203 spin_unlock_irqrestore(&zatm_dev->lock, flags); in refill_pool()
204 if (free >= zatm_dev->pool_info[pool].low_water) return; in refill_pool()
206 zpeekl(zatm_dev,zatm_dev->pool_base+2*pool), in refill_pool()
207 zpeekl(zatm_dev,zatm_dev->pool_base+2*pool+1)); in refill_pool()
211 while (free < zatm_dev->pool_info[pool].high_water) { in refill_pool()
232 spin_lock_irqsave(&zatm_dev->lock, flags); in refill_pool()
233 if (zatm_dev->last_free[pool]) in refill_pool()
234 ((struct rx_buffer_head *) (zatm_dev->last_free[pool]-> in refill_pool()
236 zatm_dev->last_free[pool] = skb; in refill_pool()
237 skb_queue_tail(&zatm_dev->pool[pool],skb); in refill_pool()
238 spin_unlock_irqrestore(&zatm_dev->lock, flags); in refill_pool()
242 spin_lock_irqsave(&zatm_dev->lock, flags); in refill_pool()
247 spin_unlock_irqrestore(&zatm_dev->lock, flags); in refill_pool()
249 zpeekl(zatm_dev,zatm_dev->pool_base+2*pool), in refill_pool()
250 zpeekl(zatm_dev,zatm_dev->pool_base+2*pool+1)); in refill_pool()
280 struct zatm_dev *zatm_dev; in use_pool() local
284 zatm_dev = ZATM_DEV(dev); in use_pool()
285 if (!(zatm_dev->pool_info[pool].ref_count++)) { in use_pool()
286 skb_queue_head_init(&zatm_dev->pool[pool]); in use_pool()
290 spin_lock_irqsave(&zatm_dev->lock, flags); in use_pool()
291 zpokel(zatm_dev,((zatm_dev->pool_info[pool].low_water/4) << in use_pool()
295 zatm_dev->pool_base+pool*2); in use_pool()
296 zpokel(zatm_dev,(unsigned long) dummy,zatm_dev->pool_base+ in use_pool()
298 spin_unlock_irqrestore(&zatm_dev->lock, flags); in use_pool()
299 zatm_dev->last_free[pool] = NULL; in use_pool()
302 DPRINTK("pool %d: %d\n",pool,zatm_dev->pool_info[pool].ref_count); in use_pool()
319 struct zatm_dev *zatm_dev = ZATM_DEV(vcc->dev);
327 zpeekl(zatm_dev,zatm_vcc->tx_chan*VC_SIZE/4+i));
330 zpeekl(zatm_dev,uPD98401_IM(zatm_vcc->shaper)+16*i));
331 qrp = (unsigned long *) zpeekl(zatm_dev,zatm_vcc->tx_chan*VC_SIZE/4+
361 struct zatm_dev *zatm_dev; in poll_rx() local
367 zatm_dev = ZATM_DEV(dev); in poll_rx()
368 pos = (zatm_dev->mbx_start[mbx] & ~0xffffUL) | zin(MTA(mbx)); in poll_rx()
377 if (((pos += 16) & 0xffff) == zatm_dev->mbx_end[mbx]) in poll_rx()
378 pos = zatm_dev->mbx_start[mbx]; in poll_rx()
384 printk("POOL: 0x%08x, 0x%08x\n",zpeekl(zatm_dev, in poll_rx()
385 zatm_dev->pool_base), in poll_rx()
386 zpeekl(zatm_dev,zatm_dev->pool_base+1)); in poll_rx()
418 if (chan < zatm_dev->chans && zatm_dev->rx_map[chan]) { in poll_rx()
420 vcc = zatm_dev->rx_map[chan]; in poll_rx()
422 if (skb == zatm_dev->last_free[pos]) in poll_rx()
423 zatm_dev->last_free[pos] = NULL; in poll_rx()
424 skb_unlink(skb, zatm_dev->pool + pos); in poll_rx()
484 struct zatm_dev *zatm_dev; in open_rx_first() local
491 zatm_dev = ZATM_DEV(vcc->dev); in open_rx_first()
509 spin_lock_irqsave(&zatm_dev->lock, flags); in open_rx_first()
515 spin_unlock_irqrestore(&zatm_dev->lock, flags); in open_rx_first()
521 spin_lock_irqsave(&zatm_dev->lock, flags); in open_rx_first()
522 zpokel(zatm_dev,zatm_vcc->pool << uPD98401_RXVC_POOL_SHIFT, in open_rx_first()
524 zpokel(zatm_dev,uPD98401_RXVC_OD | (vcc->qos.aal == ATM_AAL5 ? in open_rx_first()
526 zpokel(zatm_dev,0,chan*VC_SIZE/4+2); in open_rx_first()
528 zatm_dev->rx_map[chan] = vcc; in open_rx_first()
529 spin_unlock_irqrestore(&zatm_dev->lock, flags); in open_rx_first()
536 struct zatm_dev *zatm_dev; in open_rx_second() local
542 zatm_dev = ZATM_DEV(vcc->dev); in open_rx_second()
545 spin_lock_irqsave(&zatm_dev->lock, flags); in open_rx_second()
549 zpokel(zatm_dev,(zpeekl(zatm_dev,pos) & ~(0xffff << shift)) | in open_rx_second()
551 spin_unlock_irqrestore(&zatm_dev->lock, flags); in open_rx_second()
558 struct zatm_dev *zatm_dev; in close_rx() local
564 zatm_dev = ZATM_DEV(vcc->dev); in close_rx()
569 spin_lock_irqsave(&zatm_dev->lock, flags); in close_rx()
572 zpokel(zatm_dev,zpeekl(zatm_dev,pos) & ~(0xffff << shift),pos); in close_rx()
577 spin_unlock_irqrestore(&zatm_dev->lock, flags); in close_rx()
579 spin_lock_irqsave(&zatm_dev->lock, flags); in close_rx()
591 spin_unlock_irqrestore(&zatm_dev->lock, flags); in close_rx()
592 zatm_dev->rx_map[zatm_vcc->rx_chan] = NULL; in close_rx()
600 struct zatm_dev *zatm_dev; in start_rx() local
604 zatm_dev = ZATM_DEV(dev); in start_rx()
605 size = sizeof(struct atm_vcc *)*zatm_dev->chans; in start_rx()
606 zatm_dev->rx_map = kzalloc(size,GFP_KERNEL); in start_rx()
607 if (!zatm_dev->rx_map) return -ENOMEM; in start_rx()
609 zpokel(zatm_dev,(1 << dev->ci_range.vci_bits)-1,uPD98401_VRR); in start_rx()
612 zatm_dev->pool_info[i].ref_count = 0; in start_rx()
613 zatm_dev->pool_info[i].rqa_count = 0; in start_rx()
614 zatm_dev->pool_info[i].rqu_count = 0; in start_rx()
615 zatm_dev->pool_info[i].low_water = LOW_MARK; in start_rx()
616 zatm_dev->pool_info[i].high_water = HIGH_MARK; in start_rx()
617 zatm_dev->pool_info[i].offset = 0; in start_rx()
618 zatm_dev->pool_info[i].next_off = 0; in start_rx()
619 zatm_dev->pool_info[i].next_cnt = 0; in start_rx()
620 zatm_dev->pool_info[i].next_thres = OFF_CNG_THRES; in start_rx()
632 struct zatm_dev *zatm_dev; in do_tx() local
640 zatm_dev = ZATM_DEV(vcc->dev); in do_tx()
643 spin_lock_irqsave(&zatm_dev->lock, flags); in do_tx()
646 spin_unlock_irqrestore(&zatm_dev->lock, flags); in do_tx()
698 DPRINTK("QRP=0x%08lx\n",zpeekl(zatm_dev,zatm_vcc->tx_chan*VC_SIZE/4+ in do_tx()
703 spin_unlock_irqrestore(&zatm_dev->lock, flags); in do_tx()
743 struct zatm_dev *zatm_dev; in poll_tx() local
748 zatm_dev = ZATM_DEV(dev); in poll_tx()
749 pos = (zatm_dev->mbx_start[mbx] & ~0xffffUL) | zin(MTA(mbx)); in poll_tx()
765 chan = (zatm_dev->mbx_start[mbx][pos >> 2] & uPD98401_TXI_CONN) in poll_tx()
768 if (chan < zatm_dev->chans && zatm_dev->tx_map[chan]) in poll_tx()
769 dequeue_tx(zatm_dev->tx_map[chan]); in poll_tx()
775 if (((pos += 4) & 0xffff) == zatm_dev->mbx_end[mbx]) in poll_tx()
776 pos = zatm_dev->mbx_start[mbx]; in poll_tx()
788 struct zatm_dev *zatm_dev; in alloc_shaper() local
794 zatm_dev = ZATM_DEV(dev); in alloc_shaper()
795 if (!zatm_dev->free_shapers) return -EAGAIN; in alloc_shaper()
796 for (shaper = 0; !((zatm_dev->free_shapers >> shaper) & 1); shaper++); in alloc_shaper()
797 zatm_dev->free_shapers &= ~1 << shaper; in alloc_shaper()
801 zatm_dev->ubr_ref_cnt++; in alloc_shaper()
802 zatm_dev->ubr = shaper; in alloc_shaper()
817 if (max > zatm_dev->tx_bw) max = zatm_dev->tx_bw; in alloc_shaper()
835 if (zatm_dev->tx_bw < *pcr) return -EAGAIN; in alloc_shaper()
836 zatm_dev->tx_bw -= *pcr; in alloc_shaper()
838 spin_lock_irqsave(&zatm_dev->lock, flags); in alloc_shaper()
840 zpokel(zatm_dev,(i << uPD98401_IM_I_SHIFT) | m,uPD98401_IM(shaper)); in alloc_shaper()
841 zpokel(zatm_dev,c << uPD98401_PC_C_SHIFT,uPD98401_PC(shaper)); in alloc_shaper()
842 zpokel(zatm_dev,0,uPD98401_X(shaper)); in alloc_shaper()
843 zpokel(zatm_dev,0,uPD98401_Y(shaper)); in alloc_shaper()
844 zpokel(zatm_dev,uPD98401_PS_E,uPD98401_PS(shaper)); in alloc_shaper()
845 spin_unlock_irqrestore(&zatm_dev->lock, flags); in alloc_shaper()
852 struct zatm_dev *zatm_dev; in dealloc_shaper() local
855 zatm_dev = ZATM_DEV(dev); in dealloc_shaper()
856 if (shaper == zatm_dev->ubr) { in dealloc_shaper()
857 if (--zatm_dev->ubr_ref_cnt) return; in dealloc_shaper()
858 zatm_dev->ubr = -1; in dealloc_shaper()
860 spin_lock_irqsave(&zatm_dev->lock, flags); in dealloc_shaper()
861 zpokel(zatm_dev,zpeekl(zatm_dev,uPD98401_PS(shaper)) & ~uPD98401_PS_E, in dealloc_shaper()
863 spin_unlock_irqrestore(&zatm_dev->lock, flags); in dealloc_shaper()
864 zatm_dev->free_shapers |= 1 << shaper; in dealloc_shaper()
870 struct zatm_dev *zatm_dev; in close_tx() local
876 zatm_dev = ZATM_DEV(vcc->dev); in close_tx()
890 spin_lock_irqsave(&zatm_dev->lock, flags); in close_tx()
901 spin_unlock_irqrestore(&zatm_dev->lock, flags); in close_tx()
903 zatm_dev->tx_map[chan] = NULL; in close_tx()
904 if (zatm_vcc->shaper != zatm_dev->ubr) { in close_tx()
905 zatm_dev->tx_bw += vcc->qos.txtp.min_pcr; in close_tx()
914 struct zatm_dev *zatm_dev; in open_tx_first() local
922 zatm_dev = ZATM_DEV(vcc->dev); in open_tx_first()
926 spin_lock_irqsave(&zatm_dev->lock, flags); in open_tx_first()
932 spin_unlock_irqrestore(&zatm_dev->lock, flags); in open_tx_first()
938 if (unlimited && zatm_dev->ubr != -1) zatm_vcc->shaper = zatm_dev->ubr; in open_tx_first()
965 zpokel(zatm_dev,virt_to_bus(zatm_vcc->ring), in open_tx_first()
973 struct zatm_dev *zatm_dev; in open_tx_second() local
978 zatm_dev = ZATM_DEV(vcc->dev); in open_tx_second()
982 spin_lock_irqsave(&zatm_dev->lock, flags); in open_tx_second()
983 zpokel(zatm_dev,0,zatm_vcc->tx_chan*VC_SIZE/4); in open_tx_second()
984 zpokel(zatm_dev,uPD98401_TXVC_L | (zatm_vcc->shaper << in open_tx_second()
987 zpokel(zatm_dev,0,zatm_vcc->tx_chan*VC_SIZE/4+2); in open_tx_second()
988 spin_unlock_irqrestore(&zatm_dev->lock, flags); in open_tx_second()
989 zatm_dev->tx_map[zatm_vcc->tx_chan] = vcc; in open_tx_second()
996 struct zatm_dev *zatm_dev; in start_tx() local
1000 zatm_dev = ZATM_DEV(dev); in start_tx()
1001 zatm_dev->tx_map = kmalloc(sizeof(struct atm_vcc *)* in start_tx()
1002 zatm_dev->chans,GFP_KERNEL); in start_tx()
1003 if (!zatm_dev->tx_map) return -ENOMEM; in start_tx()
1004 zatm_dev->tx_bw = ATM_OC3_PCR; in start_tx()
1005 zatm_dev->free_shapers = (1 << NR_SHAPERS)-1; in start_tx()
1006 zatm_dev->ubr = -1; in start_tx()
1007 zatm_dev->ubr_ref_cnt = 0; in start_tx()
1009 for (i = 0; i < NR_SHAPERS; i++) zpokel(zatm_dev,0,uPD98401_PS(i)); in start_tx()
1020 struct zatm_dev *zatm_dev; in zatm_int() local
1025 zatm_dev = ZATM_DEV(dev); in zatm_int()
1042 zatm_dev->pool_info[i].rqa_count++; in zatm_int()
1057 zatm_dev->pool_info[i].rqu_count++; in zatm_int()
1097 static void eprom_set(struct zatm_dev *zatm_dev, unsigned long value, in eprom_set() argument
1102 if ((error = pci_write_config_dword(zatm_dev->pci_dev,cmd,value))) in eprom_set()
1108 static unsigned long eprom_get(struct zatm_dev *zatm_dev, unsigned short cmd) in eprom_get() argument
1113 if ((error = pci_read_config_dword(zatm_dev->pci_dev,cmd,&value))) in eprom_get()
1120 static void eprom_put_bits(struct zatm_dev *zatm_dev, unsigned long data, in eprom_put_bits() argument
1128 eprom_set(zatm_dev,value,cmd); in eprom_put_bits()
1129 eprom_set(zatm_dev,value | ZEPROM_SK,cmd); in eprom_put_bits()
1130 eprom_set(zatm_dev,value,cmd); in eprom_put_bits()
1135 static void eprom_get_byte(struct zatm_dev *zatm_dev, unsigned char *byte, in eprom_get_byte() argument
1142 eprom_set(zatm_dev,ZEPROM_CS,cmd); in eprom_get_byte()
1143 eprom_set(zatm_dev,ZEPROM_CS | ZEPROM_SK,cmd); in eprom_get_byte()
1145 if (eprom_get(zatm_dev,cmd) & ZEPROM_DO) *byte |= 1; in eprom_get_byte()
1146 eprom_set(zatm_dev,ZEPROM_CS,cmd); in eprom_get_byte()
1155 struct zatm_dev *zatm_dev; in eprom_try_esi() local
1158 zatm_dev = ZATM_DEV(dev); in eprom_try_esi()
1160 eprom_set(zatm_dev,ZEPROM_CS,cmd); /* select EPROM */ in eprom_try_esi()
1161 eprom_put_bits(zatm_dev,ZEPROM_CMD_READ,ZEPROM_CMD_LEN,cmd); in eprom_try_esi()
1162 eprom_put_bits(zatm_dev,i >> 1,ZEPROM_ADDR_LEN,cmd); in eprom_try_esi()
1163 eprom_get_byte(zatm_dev,buf+i+swap,cmd); in eprom_try_esi()
1164 eprom_get_byte(zatm_dev,buf+i+1-swap,cmd); in eprom_try_esi()
1165 eprom_set(zatm_dev,0,cmd); /* deselect EPROM */ in eprom_try_esi()
1184 struct zatm_dev *zatm_dev; in zatm_init() local
1191 zatm_dev = ZATM_DEV(dev); in zatm_init()
1192 spin_lock_init(&zatm_dev->lock); in zatm_init()
1193 pci_dev = zatm_dev->pci_dev; in zatm_init()
1194 zatm_dev->base = pci_resource_start(pci_dev, 0); in zatm_init()
1195 zatm_dev->irq = pci_dev->irq; in zatm_init()
1209 dev->number,pci_dev->revision,zatm_dev->base,zatm_dev->irq); in zatm_init()
1216 zpokel(zatm_dev,0x55555555,i); in zatm_init()
1217 if (zpeekl(zatm_dev,i) != 0x55555555) last = i; in zatm_init()
1219 zpokel(zatm_dev,0xAAAAAAAA,i); in zatm_init()
1220 if (zpeekl(zatm_dev,i) != 0xAAAAAAAA) last = i; in zatm_init()
1221 else zpokel(zatm_dev,i,i); in zatm_init()
1225 if (zpeekl(zatm_dev,i) != i) break; in zatm_init()
1226 zatm_dev->mem = i << 2; in zatm_init()
1227 while (i) zpokel(zatm_dev,0,--i); in zatm_init()
1234 printk("mem=%dkB,%s (",zatm_dev->mem >> 10,zatm_dev->copper ? "UTP" : in zatm_init()
1241 spin_lock_irqsave(&zatm_dev->lock, flags); in zatm_init()
1242 t0 = zpeekl(zatm_dev,uPD98401_TSR); in zatm_init()
1244 t1 = zpeekl(zatm_dev,uPD98401_TSR); in zatm_init()
1246 t2 = zpeekl(zatm_dev,uPD98401_TSR); in zatm_init()
1247 spin_unlock_irqrestore(&zatm_dev->lock, flags); in zatm_init()
1250 zatm_dev->khz = t2-2*t1+t0; in zatm_init()
1254 zin(VER) & uPD98401_MINOR,zatm_dev->khz/1000,zatm_dev->khz % 1000); in zatm_init()
1261 struct zatm_dev *zatm_dev = ZATM_DEV(dev); in zatm_start() local
1262 struct pci_dev *pdev = zatm_dev->pci_dev; in zatm_start()
1268 zatm_dev->rx_map = zatm_dev->tx_map = NULL; in zatm_start()
1270 zatm_dev->mbx_start[i] = 0; in zatm_start()
1271 error = request_irq(zatm_dev->irq, zatm_int, IRQF_SHARED, DEV_LABEL, dev); in zatm_start()
1274 dev->number,zatm_dev->irq); in zatm_start()
1281 vccs = (zatm_dev->mem-NR_SHAPERS*SHAPER_SIZE-pools*POOL_SIZE)/ in zatm_start()
1288 zatm_dev->chans = vccs; /* ??? */ in zatm_start()
1291 zpokel(zatm_dev,curr,uPD98401_PMA); /* receive pool */ in zatm_start()
1292 zatm_dev->pool_base = curr; in zatm_start()
1295 zpokel(zatm_dev,curr,uPD98401_SMA); /* shapers */ in zatm_start()
1298 zpokel(zatm_dev,curr,uPD98401_TOS); /* free pool */ in zatm_start()
1301 (zatm_dev->mem-curr*4)/VC_SIZE); in zatm_start()
1327 zatm_dev->mbx_start[i] = (unsigned long)mbx; in zatm_start()
1328 zatm_dev->mbx_dma[i] = mbx_dma; in zatm_start()
1329 zatm_dev->mbx_end[i] = (zatm_dev->mbx_start[i] + MBX_SIZE(i)) & in zatm_start()
1333 zout(zatm_dev->mbx_end[i], MBA(i)); in zatm_start()
1353 kfree(zatm_dev->rx_map); in zatm_start()
1355 kfree(zatm_dev->tx_map); in zatm_start()
1359 (void *)zatm_dev->mbx_start[i], in zatm_start()
1360 zatm_dev->mbx_dma[i]); in zatm_start()
1362 free_irq(zatm_dev->irq, dev); in zatm_start()
1385 struct zatm_dev *zatm_dev; in zatm_open() local
1392 zatm_dev = ZATM_DEV(vcc->dev); in zatm_open()
1441 struct zatm_dev *zatm_dev; in zatm_ioctl() local
1444 zatm_dev = ZATM_DEV(dev); in zatm_ioctl()
1459 spin_lock_irqsave(&zatm_dev->lock, flags); in zatm_ioctl()
1460 info = zatm_dev->pool_info[pool]; in zatm_ioctl()
1462 zatm_dev->pool_info[pool].rqa_count = 0; in zatm_ioctl()
1463 zatm_dev->pool_info[pool].rqu_count = 0; in zatm_ioctl()
1465 spin_unlock_irqrestore(&zatm_dev->lock, flags); in zatm_ioctl()
1485 info.low_water = zatm_dev-> in zatm_ioctl()
1488 info.high_water = zatm_dev-> in zatm_ioctl()
1491 info.next_thres = zatm_dev-> in zatm_ioctl()
1496 spin_lock_irqsave(&zatm_dev->lock, flags); in zatm_ioctl()
1497 zatm_dev->pool_info[pool].low_water = in zatm_ioctl()
1499 zatm_dev->pool_info[pool].high_water = in zatm_ioctl()
1501 zatm_dev->pool_info[pool].next_thres = in zatm_ioctl()
1503 spin_unlock_irqrestore(&zatm_dev->lock, flags); in zatm_ioctl()
1552 struct zatm_dev *zatm_dev; in zatm_phy_put() local
1554 zatm_dev = ZATM_DEV(dev); in zatm_phy_put()
1564 struct zatm_dev *zatm_dev; in zatm_phy_get() local
1566 zatm_dev = ZATM_DEV(dev); in zatm_phy_get()
1591 struct zatm_dev *zatm_dev; in zatm_init_one() local
1594 zatm_dev = kmalloc(sizeof(*zatm_dev), GFP_KERNEL); in zatm_init_one()
1595 if (!zatm_dev) { in zatm_init_one()
1616 zatm_dev->pci_dev = pci_dev; in zatm_init_one()
1617 dev->dev_data = zatm_dev; in zatm_init_one()
1618 zatm_dev->copper = (int)ent->driver_data; in zatm_init_one()
1623 zatm_dev->more = zatm_boards; in zatm_init_one()
1636 kfree(zatm_dev); in zatm_init_one()