Lines Matching refs:unack
68 struct sk_buff_head unack; /* Unack'ed packets queue */ member
156 BT_DBG("hu %p retransmitting %u pkts", hu, h5->unack.qlen); in h5_timed_event()
158 spin_lock_irqsave_nested(&h5->unack.lock, flags, SINGLE_DEPTH_NESTING); in h5_timed_event()
160 while ((skb = __skb_dequeue_tail(&h5->unack)) != NULL) { in h5_timed_event()
165 spin_unlock_irqrestore(&h5->unack.lock, flags); in h5_timed_event()
183 skb_queue_purge(&h5->unack); in h5_peer_reset()
205 skb_queue_head_init(&h5->unack); in h5_open()
232 skb_queue_purge(&h5->unack); in h5_close()
248 spin_lock_irqsave(&h5->unack.lock, flags); in h5_pkt_cull()
250 to_remove = skb_queue_len(&h5->unack); in h5_pkt_cull()
268 skb_queue_walk_safe(&h5->unack, skb, tmp) { in h5_pkt_cull()
272 __skb_unlink(skb, &h5->unack); in h5_pkt_cull()
276 if (skb_queue_empty(&h5->unack)) in h5_pkt_cull()
280 spin_unlock_irqrestore(&h5->unack.lock, flags); in h5_pkt_cull()
711 spin_lock_irqsave_nested(&h5->unack.lock, flags, SINGLE_DEPTH_NESTING); in h5_dequeue()
713 if (h5->unack.qlen >= h5->tx_win) in h5_dequeue()
721 __skb_queue_tail(&h5->unack, skb); in h5_dequeue()
723 spin_unlock_irqrestore(&h5->unack.lock, flags); in h5_dequeue()
732 spin_unlock_irqrestore(&h5->unack.lock, flags); in h5_dequeue()