/linux-4.1.27/drivers/tty/hvc/ |
D | hvc_beat.c | 46 static int qlen = 0; in hvc_beat_get_chars() local 50 if (qlen) { in hvc_beat_get_chars() 51 if (qlen > cnt) { in hvc_beat_get_chars() 53 qlen -= cnt; in hvc_beat_get_chars() 54 memmove(q + cnt, q, qlen); in hvc_beat_get_chars() 59 memcpy(buf, q, qlen); in hvc_beat_get_chars() 60 r = qlen; in hvc_beat_get_chars() 61 qlen = 0; in hvc_beat_get_chars() 67 qlen = got; in hvc_beat_get_chars()
|
/linux-4.1.27/sound/oss/ |
D | pas2_midi.c | 30 static volatile int qlen; variable 89 qlen = qhead = qtail = 0; in pas_midi_open() 138 while (qlen && dump_to_midi(tmp_queue[qhead])) in pas_midi_out() 140 qlen--; in pas_midi_out() 150 if (!qlen) in pas_midi_out() 158 if (qlen >= 256) in pas_midi_out() 164 qlen++; in pas_midi_out() 188 return qlen; in pas_buffer_status() 249 while (qlen && dump_to_midi(tmp_queue[qhead])) in pas_midi_interrupt() 251 qlen--; in pas_midi_interrupt()
|
D | dmabuf.c | 172 dmap->qlen = dmap->qhead = dmap->qtail = dmap->user_counter = 0; in dma_init_buffers() 352 if (!signal_pending(current) && adev->dmap_out->qlen && in dma_reset_output() 376 dmap->qlen = dmap->qhead = dmap->qtail = dmap->user_counter = 0; in dma_reset_output() 393 dmap->qlen = dmap->qhead = dmap->qtail = dmap->user_counter = 0; in dma_reset_input() 440 if (dmap->qlen > 0 && !(dmap->flags & DMA_ACTIVE)) in DMAbuf_sync() 445 adev->dmap_out->qlen && adev->dmap_out->underrun_count == 0) { in DMAbuf_sync() 454 return adev->dmap_out->qlen; in DMAbuf_sync() 477 return adev->dmap_out->qlen; in DMAbuf_sync() 570 } else while (dmap->qlen <= 0 && n++ < 10) { in DMAbuf_getrdbuffer() 602 if (dmap->qlen <= 0) in DMAbuf_getrdbuffer() [all …]
|
D | sequencer.c | 68 static volatile int qhead, qtail, qlen; variable 334 if (qlen >= SEQ_MAX_QUEUE) in seq_queue() 340 if (!nonblock && qlen >= SEQ_MAX_QUEUE && !waitqueue_active(&seq_sleeper)) { in seq_queue() 346 if (qlen >= SEQ_MAX_QUEUE) in seq_queue() 355 qlen++; in seq_queue() 637 if ((SEQ_MAX_QUEUE - qlen) >= output_threshold) in seq_timing_event() 662 if ((SEQ_MAX_QUEUE - qlen) >= output_threshold) in seq_timing_event() 774 if ((SEQ_MAX_QUEUE - qlen) >= output_threshold) in play_event() 874 while (qlen > 0) in seq_startplay() 879 qlen--; in seq_startplay() [all …]
|
D | audio.c | 164 for (i = dmap->qlen + 1; i < dmap->nbufs; i++) in sync_output() 783 info.fragments = dmap->qlen; in dma_ioctl() 810 if (cmd == SNDCTL_DSP_GETISPACE && dmap->qlen) in dma_ioctl() 855 (dmap_out->mapping_flags & DMA_MAP_MAPPED || dmap_out->qlen > 0) && in dma_ioctl() 893 cinfo.blocks = dmap_in->qlen; in dma_ioctl() 896 dmap_in->qlen = 0; /* Reset interrupt counter */ in dma_ioctl() 911 cinfo.blocks = dmap_out->qlen; in dma_ioctl() 914 dmap_out->qlen = 0; /* Reset interrupt counter */ in dma_ioctl() 943 if (audio_devs[dev]->dmap_out->qlen > 0) in dma_ioctl()
|
D | dev_table.h | 111 int qlen; member
|
/linux-4.1.27/sound/core/seq/oss/ |
D | seq_oss_readq.c | 61 q->qlen = 0; in snd_seq_oss_readq_new() 89 if (q->qlen) { in snd_seq_oss_readq_clear() 90 q->qlen = 0; in snd_seq_oss_readq_clear() 131 if (q->qlen >= q->maxlen - 1) { in snd_seq_oss_readq_put_event() 138 q->qlen++; in snd_seq_oss_readq_put_event() 157 if (q->qlen == 0) in snd_seq_oss_readq_pick() 170 (q->qlen > 0 || q->head == q->tail), in snd_seq_oss_readq_wait() 181 if (q->qlen > 0) { in snd_seq_oss_readq_free() 183 q->qlen--; in snd_seq_oss_readq_free() 195 return q->qlen; in snd_seq_oss_readq_poll() [all …]
|
D | seq_oss_readq.h | 33 int qlen; member
|
D | seq_oss_ioctl.c | 117 return put_user(dp->readq->qlen, p) ? -EFAULT : 0; in snd_seq_oss_ioctl()
|
/linux-4.1.27/include/trace/events/ |
D | rcu.h | 434 long qlen), 436 TP_ARGS(rcuname, rhp, qlen_lazy, qlen), 443 __field(long, qlen) 451 __entry->qlen = qlen; 456 __entry->qlen_lazy, __entry->qlen) 470 long qlen_lazy, long qlen), 472 TP_ARGS(rcuname, rhp, offset, qlen_lazy, qlen), 479 __field(long, qlen) 487 __entry->qlen = qlen; 492 __entry->qlen_lazy, __entry->qlen) [all …]
|
/linux-4.1.27/net/sched/ |
D | sch_sfq.c | 107 sfq_index qlen; /* number of skbs in skblist */ member 229 int qlen = slot->qlen; in sfq_link() local 231 p = qlen + SFQ_MAX_FLOWS; in sfq_link() 232 n = q->dep[qlen].next; in sfq_link() 237 q->dep[qlen].next = x; /* sfq_dep_head(q, p)->next = x */ in sfq_link() 257 d = q->slots[x].qlen--; in sfq_dec() 270 d = ++q->slots[x].qlen; in sfq_inc() 333 sch->q.qlen--; in sfq_drop() 373 sfq_index x, qlen; in sfq_enqueue() local 447 if (slot->qlen >= q->maxdepth) { in sfq_enqueue() [all …]
|
D | sch_sfb.c | 44 u16 qlen; /* length of virtual queue */ member 135 if (b[hash].qlen < 0xFFFF) in increment_one_qlen() 136 b[hash].qlen++; in increment_one_qlen() 164 if (b[hash].qlen > 0) in decrement_one_qlen() 165 b[hash].qlen--; in decrement_one_qlen() 204 u32 qlen = 0, prob = 0, totalpm = 0; in sfb_compute_qlen() local 208 if (qlen < b->qlen) in sfb_compute_qlen() 209 qlen = b->qlen; in sfb_compute_qlen() 217 return qlen; in sfb_compute_qlen() 292 if (unlikely(sch->q.qlen >= q->limit)) { in sfb_enqueue() [all …]
|
D | sch_drr.c | 55 unsigned int len = cl->qdisc->q.qlen; in drr_purge_queue() 248 if (cl->qdisc->q.qlen == 0) in drr_qlen_notify() 278 __u32 qlen = cl->qdisc->q.qlen; in drr_dump_class_stats() local 282 if (qlen) in drr_dump_class_stats() 287 gnet_stats_copy_queue(d, NULL, &cl->qdisc->qstats, qlen) < 0) in drr_dump_class_stats() 376 if (cl->qdisc->q.qlen == 1) { in drr_enqueue() 381 sch->q.qlen++; in drr_enqueue() 406 if (cl->qdisc->q.qlen == 0) in drr_dequeue() 411 sch->q.qlen--; in drr_dequeue() 432 sch->q.qlen--; in drr_drop() [all …]
|
D | sch_pie.c | 186 unsigned int qlen; in pie_change() local 231 qlen = sch->q.qlen; in pie_change() 232 while (sch->q.qlen > sch->limit) { in pie_change() 238 qdisc_tree_decrease_qlen(sch, qlen - sch->q.qlen); in pie_change() 248 int qlen = sch->qstats.backlog; /* current queue size in bytes */ in pie_process_dequeue() local 254 if (qlen >= QUEUE_THRESHOLD && q->vars.dq_count == DQCOUNT_INVALID) { in pie_process_dequeue() 293 if (qlen < QUEUE_THRESHOLD) in pie_process_dequeue() 313 u32 qlen = sch->qstats.backlog; /* queue size in bytes */ in calculate_probability() local 324 qdelay = (qlen << PIE_SCALE) / q->vars.avg_dq_rate; in calculate_probability() 331 if (qdelay == 0 && qlen != 0) in calculate_probability() [all …]
|
D | sch_codel.c | 85 if (q->stats.drop_count && sch->q.qlen) { in codel_qdisc_dequeue() 118 unsigned int qlen; in codel_change() local 148 qlen = sch->q.qlen; in codel_change() 149 while (sch->q.qlen > sch->limit) { in codel_change() 155 qdisc_tree_decrease_qlen(sch, qlen - sch->q.qlen); in codel_change()
|
D | sch_prio.c | 88 sch->q.qlen++; in prio_enqueue() 120 sch->q.qlen--; in prio_dequeue() 138 sch->q.qlen--; in prio_drop() 154 sch->q.qlen = 0; in prio_reset() 194 qdisc_tree_decrease_qlen(child, child->q.qlen); in prio_tune() 214 old->q.qlen); in prio_tune() 274 qdisc_tree_decrease_qlen(*old, (*old)->q.qlen); in prio_graft() 328 gnet_stats_copy_queue(d, NULL, &cl_q->qstats, cl_q->q.qlen) < 0) in prio_dump_class_stats()
|
D | sch_multiq.c | 86 sch->q.qlen++; in multiq_enqueue() 116 sch->q.qlen--; in multiq_dequeue() 166 sch->q.qlen--; in multiq_drop() 183 sch->q.qlen = 0; in multiq_reset() 221 qdisc_tree_decrease_qlen(child, child->q.qlen); in multiq_tune() 242 old->q.qlen); in multiq_tune() 309 qdisc_tree_decrease_qlen(*old, (*old)->q.qlen); in multiq_graft() 364 gnet_stats_copy_queue(d, NULL, &cl_q->qstats, cl_q->q.qlen) < 0) in multiq_dump_class_stats()
|
D | sch_red.c | 100 sch->q.qlen++; in red_enqueue() 121 sch->q.qlen--; in red_dequeue() 146 sch->q.qlen--; in red_drop() 161 sch->q.qlen = 0; in red_reset() 213 qdisc_tree_decrease_qlen(q->qdisc, q->qdisc->q.qlen); in red_change() 229 if (!q->qdisc->q.qlen) in red_change() 319 qdisc_tree_decrease_qlen(*old, (*old)->q.qlen); in red_graft()
|
D | sch_mqprio.c | 229 sch->q.qlen = 0; in mqprio_dump() 236 sch->q.qlen += qdisc->q.qlen; in mqprio_dump() 329 __u32 qlen = 0; in mqprio_dump_class_stats() local 347 qlen += qdisc->q.qlen; in mqprio_dump_class_stats() 359 gnet_stats_copy_queue(d, NULL, &qstats, qlen) < 0) in mqprio_dump_class_stats() 367 &sch->qstats, sch->q.qlen) < 0) in mqprio_dump_class_stats()
|
D | sch_htb.c | 547 WARN_ON(cl->level || !cl->un.leaf.q || !cl->un.leaf.q->q.qlen); in htb_activate() 580 if (q->direct_queue.qlen < q->direct_qlen) { in htb_enqueue() 603 sch->q.qlen++; in htb_enqueue() 833 if (unlikely(cl->un.leaf.q->q.qlen == 0)) { in htb_dequeue_tree() 871 if (!cl->un.leaf.q->q.qlen) in htb_dequeue_tree() 892 sch->q.qlen--; in htb_dequeue() 896 if (!sch->q.qlen) in htb_dequeue() 958 sch->q.qlen--; in htb_drop() 959 if (!cl->un.leaf.q->q.qlen) in htb_drop() 992 sch->q.qlen = 0; in htb_reset() [all …]
|
D | sch_hhf.c | 378 sch->q.qlen--; in hhf_drop() 417 if (++sch->q.qlen <= sch->limit) in hhf_enqueue() 459 sch->q.qlen--; in hhf_dequeue() 538 unsigned int qlen; in hhf_change() local 587 qlen = sch->q.qlen; in hhf_change() 588 while (sch->q.qlen > sch->limit) { in hhf_change() 593 qdisc_tree_decrease_qlen(sch, qlen - sch->q.qlen); in hhf_change()
|
D | sch_choke.c | 133 --sch->q.qlen; in choke_drop_by_idx() 284 q->vars.qavg = red_calc_qavg(p, &q->vars, sch->q.qlen); in choke_enqueue() 331 if (sch->q.qlen < q->limit) { in choke_enqueue() 334 ++sch->q.qlen; in choke_enqueue() 367 --sch->q.qlen; in choke_dequeue() 451 unsigned int oqlen = sch->q.qlen, tail = 0; in choke_change() 464 --sch->q.qlen; in choke_change() 467 qdisc_tree_decrease_qlen(sch, oqlen - sch->q.qlen); in choke_change()
|
D | sch_tbf.c | 184 sch->q.qlen += nb; in tbf_segment() 208 sch->q.qlen++; in tbf_enqueue() 218 sch->q.qlen--; in tbf_drop() 264 sch->q.qlen--; in tbf_dequeue() 295 sch->q.qlen = 0; in tbf_reset() 402 qdisc_tree_decrease_qlen(q->qdisc, q->qdisc->q.qlen); in tbf_change() 508 qdisc_tree_decrease_qlen(*old, (*old)->q.qlen); in tbf_graft()
|
D | sch_mq.c | 105 sch->q.qlen = 0; in mq_dump() 112 sch->q.qlen += qdisc->q.qlen; in mq_dump() 203 gnet_stats_copy_queue(d, NULL, &sch->qstats, sch->q.qlen) < 0) in mq_dump_class_stats()
|
D | sch_cbq.c | 390 sch->q.qlen++; in cbq_enqueue() 547 cl->qdisc->q.qlen--; in cbq_ovl_drop() 647 sch->q.qlen++; in cbq_reshape_fail() 676 if (cl->q->q.qlen > 1) { in cbq_update_toplevel() 839 if (cl->q->q.qlen && in cbq_dequeue_prio() 883 if (cl->q->q.qlen == 0 || prio != cl->cpriority) { in cbq_dequeue_prio() 900 if (cl->q->q.qlen) in cbq_dequeue_prio() 907 if (cl->q->q.qlen) in cbq_dequeue_prio() 962 sch->q.qlen--; in cbq_dequeue() 997 if (sch->q.qlen) { in cbq_dequeue() [all …]
|
D | sch_qfq.c | 223 unsigned int len = cl->qdisc->q.qlen; in qfq_purge_queue() 330 if (cl->qdisc->q.qlen > 0) { /* adding an active class */ in qfq_add_to_agg() 380 if (cl->qdisc->q.qlen > 0) /* class is active */ in qfq_deact_rm_from_agg() 674 &cl->qdisc->qstats, cl->qdisc->q.qlen) < 0) in qfq_dump_class_stats() 1007 if (cl->qdisc->q.qlen == 0) /* no more packets, remove from list */ in agg_dequeue() 1143 } else if (sch->q.qlen == 0) { /* no aggregate to serve */ in qfq_dequeue() 1158 sch->q.qlen--; in qfq_dequeue() 1258 ++sch->q.qlen; in qfq_enqueue() 1262 if (cl->qdisc->q.qlen != 1) { in qfq_enqueue() 1424 if (cl->qdisc->q.qlen == 0) in qfq_qlen_notify() [all …]
|
D | sch_teql.c | 85 if (q->q.qlen < dev->tx_queue_len) { in teql_enqueue() 114 sch->q.qlen = dat->q.qlen + q->q.qlen; in teql_dequeue() 131 sch->q.qlen = 0; in teql_reset()
|
D | sch_dsmark.c | 73 qdisc_tree_decrease_qlen(*old, (*old)->q.qlen); in dsmark_graft() 265 sch->q.qlen++; in dsmark_enqueue() 287 sch->q.qlen--; in dsmark_dequeue() 337 sch->q.qlen--; in dsmark_drop() 402 sch->q.qlen = 0; in dsmark_reset()
|
D | sch_fq.c | 68 int qlen; /* number of packets in flow queue */ member 303 flow->qlen--; in fq_dequeue_head() 305 sch->q.qlen--; in fq_dequeue_head() 373 if (unlikely(sch->q.qlen >= sch->limit)) in fq_enqueue() 377 if (unlikely(f->qlen >= q->flow_plimit && f != &q->internal)) { in fq_enqueue() 382 f->qlen++; in fq_enqueue() 399 sch->q.qlen++; in fq_enqueue() 731 while (sch->q.qlen > sch->limit) { in fq_change()
|
D | sch_fq_codel.c | 165 sch->q.qlen--; in fq_codel_drop() 201 if (++sch->q.qlen <= sch->limit) in fq_codel_enqueue() 230 sch->q.qlen--; in dequeue() 280 if (q->cstats.drop_count && sch->q.qlen) { in fq_codel_dequeue() 347 while (sch->q.qlen > sch->limit) { in fq_codel_change() 547 qs.qlen++; in fq_codel_dump_class_stats()
|
D | sch_generic.c | 52 q->q.qlen++; /* it's still part of the queue */ in dev_requeue_skb() 95 q->q.qlen--; in dequeue_skb() 181 dev->name, ret, q->q.qlen); in sch_direct_xmit() 488 qdisc->q.qlen++; in pfifo_fast_enqueue() 504 qdisc->q.qlen--; in pfifo_fast_dequeue() 538 qdisc->q.qlen = 0; in pfifo_fast_reset() 660 qdisc->q.qlen = 0; in qdisc_reset()
|
D | sch_hfsc.c | 770 if (cl->qdisc->q.qlen == 0 && cl->cl_flags & HFSC_FSC) in update_vf() 897 unsigned int len = cl->qdisc->q.qlen; in hfsc_purge_queue() 1035 if (cl->qdisc->q.qlen != 0) { in hfsc_change_class() 1242 if (cl->qdisc->q.qlen == 0) { in hfsc_qlen_notify() 1381 gnet_stats_copy_queue(d, NULL, &cl->qstats, cl->qdisc->q.qlen) < 0) in hfsc_dump_class_stats() 1535 sch->q.qlen = 0; in hfsc_reset_qdisc() 1607 if (cl->qdisc->q.qlen == 1) in hfsc_enqueue() 1610 sch->q.qlen++; in hfsc_enqueue() 1625 if (sch->q.qlen == 0) in hfsc_dequeue() 1662 if (cl->qdisc->q.qlen != 0) { in hfsc_dequeue() [all …]
|
D | sch_atm.c | 436 sch->q.qlen++; in atm_tc_enqueue() 509 sch->q.qlen--; in atm_tc_dequeue() 566 sch->q.qlen = 0; in atm_tc_reset() 641 gnet_stats_copy_queue(d, NULL, &flow->qstats, flow->q->q.qlen) < 0) in atm_tc_dump_class_stats()
|
D | sch_netem.c | 395 sch->q.qlen++; in tfifo_enqueue() 536 sch->q.qlen--; in netem_drop() 580 sch->q.qlen--; in netem_dequeue() 1044 qdisc_tree_decrease_qlen(*old, (*old)->q.qlen); in netem_graft()
|
D | em_meta.c | 413 dst->value = skb->sk->sk_receive_queue.qlen; in META_COLLECTOR() 422 dst->value = skb->sk->sk_write_queue.qlen; in META_COLLECTOR() 485 dst->value = skb->sk->sk_error_queue.qlen; in META_COLLECTOR()
|
D | sch_api.c | 776 sch->q.qlen -= n; in qdisc_tree_decrease_qlen() 1341 __u32 qlen; in tc_fill_qdisc() local 1359 qlen = q->q.qlen; in tc_fill_qdisc() 1379 gnet_stats_copy_queue(&d, cpu_qstats, &q->qstats, qlen) < 0) in tc_fill_qdisc()
|
D | act_api.c | 629 p->tcfc_qstats.qlen) < 0) in tcf_action_copy_stats()
|
/linux-4.1.27/drivers/md/ |
D | dm-queue-length.c | 38 atomic_t qlen; /* the number of in-flight I/Os */ member 98 DMEMIT("%d ", atomic_read(&pi->qlen)); in ql_status() 141 atomic_set(&pi->qlen, 0); in ql_add_path() 185 (atomic_read(&pi->qlen) < atomic_read(&best->qlen))) in ql_select_path() 188 if (!atomic_read(&best->qlen)) in ql_select_path() 205 atomic_inc(&pi->qlen); in ql_start_io() 215 atomic_dec(&pi->qlen); in ql_end_io()
|
/linux-4.1.27/net/core/ |
D | gen_stats.c | 230 qstats->qlen = 0; in __gnet_stats_copy_queue_cpu() 241 __u32 qlen) in __gnet_stats_copy_queue() argument 246 qstats->qlen = q->qlen; in __gnet_stats_copy_queue() 253 qstats->qlen = qlen; in __gnet_stats_copy_queue() 273 struct gnet_stats_queue *q, __u32 qlen) in gnet_stats_copy_queue() argument 277 __gnet_stats_copy_queue(&qstats, cpu_q, q, qlen); in gnet_stats_copy_queue() 281 d->tc_stats.qlen = qstats.qlen; in gnet_stats_copy_queue()
|
D | request_sock.c | 181 fastopenq->qlen--; in reqsk_fastopen_remove() 209 fastopenq->qlen++; in reqsk_fastopen_remove()
|
D | dev.c | 3293 static bool skb_flow_limit(struct sk_buff *skb, unsigned int qlen) in skb_flow_limit() argument 3300 if (qlen < (netdev_max_backlog >> 1)) in skb_flow_limit() 3338 unsigned int qlen; in enqueue_to_backlog() local 3347 qlen = skb_queue_len(&sd->input_pkt_queue); in enqueue_to_backlog() 3348 if (qlen <= netdev_max_backlog && !skb_flow_limit(skb, qlen)) { in enqueue_to_backlog() 3349 if (qlen) { in enqueue_to_backlog()
|
D | netpoll.c | 242 while (skb_pool.qlen < MAX_SKBS) { in refill_skbs()
|
D | neighbour.c | 1390 if (tbl->proxy_queue.qlen > NEIGH_VAR(p, PROXY_QLEN)) { in pneigh_enqueue() 1816 .ndtc_proxy_qlen = tbl->proxy_queue.qlen, in neightbl_fill_info()
|
/linux-4.1.27/sound/core/seq/ |
D | seq_midi_event.c | 63 int qlen; member 152 dev->qlen = 0; in reset_encode() 268 dev->qlen = status_event[dev->type].qlen; in snd_midi_event_encode_byte() 270 if (dev->qlen > 0) { in snd_midi_event_encode_byte() 274 dev->qlen--; in snd_midi_event_encode_byte() 278 dev->qlen = status_event[dev->type].qlen - 1; in snd_midi_event_encode_byte() 282 if (dev->qlen == 0) { in snd_midi_event_encode_byte() 387 int qlen; in snd_midi_event_decode() local 398 qlen = status_event[type].qlen + 1; in snd_midi_event_decode() 403 qlen = status_event[type].qlen; in snd_midi_event_decode() [all …]
|
/linux-4.1.27/kernel/rcu/ |
D | tiny_plugin.h | 35 RCU_TRACE(long qlen); /* Number of pending CBs. */ 80 rcp->qlen -= n; in rcu_trace_sub_qlen() 89 seq_printf(m, "rcu_sched: qlen: %ld\n", rcu_sched_ctrlblk.qlen); in show_tiny_stats() 90 seq_printf(m, "rcu_bh: qlen: %ld\n", rcu_bh_ctrlblk.qlen); in show_tiny_stats() 151 jiffies - rcp->gp_start, rcp->qlen); in check_cpu_stall()
|
D | tree.c | 1208 totqlen += per_cpu_ptr(rsp->rda, cpu)->qlen; in print_other_cpu_stall() 1255 totqlen += per_cpu_ptr(rsp->rda, cpu)->qlen; in print_cpu_stall() 2359 rsp->qlen += rdp->qlen; in rcu_send_cbs_to_orphanage() 2360 rdp->n_cbs_orphaned += rdp->qlen; in rcu_send_cbs_to_orphanage() 2362 ACCESS_ONCE(rdp->qlen) = 0; in rcu_send_cbs_to_orphanage() 2413 rdp->qlen += rsp->qlen; in rcu_adopt_orphan_cbs() 2414 rdp->n_cbs_adopted += rsp->qlen; in rcu_adopt_orphan_cbs() 2415 if (rsp->qlen_lazy != rsp->qlen) in rcu_adopt_orphan_cbs() 2418 rsp->qlen = 0; in rcu_adopt_orphan_cbs() 2544 WARN_ONCE(rdp->qlen != 0 || rdp->nxtlist != NULL, in rcu_cleanup_dead_cpu() [all …]
|
D | tiny.c | 178 RCU_TRACE(trace_rcu_batch_start(rcp->name, 0, rcp->qlen, -1)); in __rcu_process_callbacks() 251 RCU_TRACE(rcp->qlen++); in __call_rcu()
|
D | tree.h | 305 long qlen; /* # of queued callbacks, incl lazy */ member 456 long qlen; /* Total number of callbacks. */ member
|
D | tree_trace.c | 135 ql += rdp->qlen; in print_one_rcu_data() 280 ACCESS_ONCE(rsp->n_force_qs_lh), rsp->qlen_lazy, rsp->qlen); in print_one_rcu_state()
|
D | tree_plugin.h | 2067 long ql = rsp->qlen; in rcu_nocb_adopt_orphan_cbs() 2073 rsp->qlen = 0; in rcu_nocb_adopt_orphan_cbs() 2534 atomic_long_set(&rdp->nocb_q_count, rdp->qlen); in init_nocb_callback_list() 2537 rdp->qlen = 0; in init_nocb_callback_list()
|
/linux-4.1.27/drivers/usb/gadget/legacy/ |
D | gmidi.c | 64 static unsigned int qlen = 32; variable 65 module_param(qlen, uint, S_IRUGO); 66 MODULE_PARM_DESC(qlen, "USB read request queue length"); 168 midi_opts->qlen = qlen; in midi_bind()
|
D | zero.c | 70 .qlen = GZERO_QLEN, 272 module_param_named(qlen, gzero_options.qlen, uint, S_IRUGO|S_IWUSR); 273 MODULE_PARM_DESC(qlen, "depth of loopback queue"); 320 lb_opts->qlen = gzero_options.qlen; in zero_bind()
|
D | printer.c | 58 static unsigned qlen = 10; variable 59 module_param(qlen, uint, S_IRUGO|S_IWUSR); 61 #define QLEN qlen
|
/linux-4.1.27/drivers/usb/gadget/function/ |
D | f_loopback.c | 49 static unsigned qlen; variable 325 for (i = 0; i < qlen && result == 0; i++) { in enable_endpoint() 400 qlen = lb_opts->qlen; in loopback_alloc() 401 if (!qlen) in loopback_alloc() 402 qlen = 32; in loopback_alloc() 442 result = sprintf(page, "%d", opts->qlen); in f_lb_opts_qlen_show() 464 opts->qlen = num; in f_lb_opts_qlen_store() 472 __CONFIGFS_ATTR(qlen, S_IRUGO | S_IWUSR, 545 lb_opts->qlen = GZERO_QLEN; in loopback_alloc_instance()
|
D | g_zero.h | 21 unsigned qlen; member 46 unsigned qlen; member
|
D | u_midi.h | 29 unsigned int qlen; member
|
D | f_midi.c | 89 unsigned int buflen, qlen; member 360 for (i = 0; i < midi->qlen && err == 0; i++) { in f_midi_set_alt() 967 F_MIDI_OPT(qlen, false, 0); 1060 opts->qlen = 32; in f_midi_alloc_inst() 1154 midi->qlen = opts->qlen; in f_midi_alloc()
|
D | f_hid.c | 55 unsigned int qlen; member 555 for (i = 0; i < hidg->qlen && status == 0; i++) { in hidg_set_alt() 971 hidg->qlen = 4; in hidg_alloc()
|
D | u_ether.c | 91 static inline int qlen(struct usb_gadget *gadget, unsigned qmult) in qlen() function 1065 result = alloc_requests(dev, link, qlen(dev->gadget, in gether_connect() 1070 DBG(dev, "qlen %d\n", qlen(dev->gadget, dev->qmult)); in gether_connect()
|
/linux-4.1.27/drivers/nfc/st21nfcb/ |
D | ndlc.c | 94 if (ndlc->send_q.qlen) in llt_ndlc_send_queue() 96 ndlc->send_q.qlen, ndlc->ack_pending_q.qlen); in llt_ndlc_send_queue() 98 while (ndlc->send_q.qlen) { in llt_ndlc_send_queue() 153 if (ndlc->rcv_q.qlen) in llt_ndlc_rcv_queue() 154 pr_debug("rcvQlen=%d\n", ndlc->rcv_q.qlen); in llt_ndlc_rcv_queue()
|
/linux-4.1.27/net/nfc/hci/ |
D | llc_shdlc.c | 335 if (shdlc->send_q.qlen == 0) { in llc_shdlc_rcv_s_frame() 479 if (shdlc->rcv_q.qlen) in llc_shdlc_handle_rcv_queue() 480 pr_debug("rcvQlen=%d\n", shdlc->rcv_q.qlen); in llc_shdlc_handle_rcv_queue() 535 if (shdlc->send_q.qlen) in llc_shdlc_handle_send_queue() 538 shdlc->send_q.qlen, shdlc->ns, shdlc->dnr, in llc_shdlc_handle_send_queue() 541 shdlc->ack_pending_q.qlen); in llc_shdlc_handle_send_queue() 543 while (shdlc->send_q.qlen && shdlc->ack_pending_q.qlen < shdlc->w && in llc_shdlc_handle_send_queue()
|
/linux-4.1.27/drivers/net/wireless/brcm80211/include/ |
D | brcmu_utils.h | 89 return pq->q[prec].skblist.qlen; in pktq_plen() 94 return pq->q[prec].max - pq->q[prec].skblist.qlen; in pktq_pavail() 99 return pq->q[prec].skblist.qlen >= pq->q[prec].max; in pktq_pfull()
|
/linux-4.1.27/crypto/ |
D | chainiv.c | 128 if (!ctx->queue.qlen) { in async_chainiv_schedule_work() 132 if (!ctx->queue.qlen || in async_chainiv_schedule_work() 197 if (ctx->queue.qlen) { in async_chainiv_givencrypt() 280 BUG_ON(test_bit(CHAINIV_STATE_INUSE, &ctx->state) || ctx->queue.qlen); in async_chainiv_exit()
|
D | algapi.c | 855 queue->qlen = 0; in crypto_init_queue() 865 if (unlikely(queue->qlen >= queue->max_qlen)) { in crypto_enqueue_request() 873 queue->qlen++; in crypto_enqueue_request() 885 if (unlikely(!queue->qlen)) in __crypto_dequeue_request() 888 queue->qlen--; in __crypto_dequeue_request()
|
D | mcryptd.c | 94 BUG_ON(cpu_queue->queue.qlen); in mcryptd_fini_queue() 186 if (!cpu_queue->queue.qlen) in mcryptd_queue_worker() 190 if (cpu_queue->queue.qlen) in mcryptd_queue_worker()
|
D | cryptd.c | 111 BUG_ON(cpu_queue->queue.qlen); in cryptd_fini_queue() 160 if (cpu_queue->queue.qlen) in cryptd_queue_worker()
|
/linux-4.1.27/drivers/net/ethernet/intel/i40e/ |
D | i40e_lan_hmc.h | 46 u16 qlen; member 86 u16 qlen; member
|
D | i40e_lan_hmc.c | 708 {I40E_HMC_STORE(i40e_hmc_obj_txq, qlen), 13, 33 + 128 }, 726 { I40E_HMC_STORE(i40e_hmc_obj_rxq, qlen), 13, 89 },
|
D | i40e_virtchnl_pf.c | 378 tx_ctx.qlen = info->ring_len; in i40e_config_vsi_tx_queue() 444 rx_ctx.qlen = info->ring_len; in i40e_config_vsi_rx_queue()
|
D | i40e_main.c | 2467 tx_ctx.qlen = ring->count; in i40e_configure_tx_ring() 2560 rx_ctx.qlen = ring->count; in i40e_configure_rx_ring()
|
/linux-4.1.27/drivers/net/ethernet/intel/i40evf/ |
D | i40e_lan_hmc.h | 46 u16 qlen; member 86 u16 qlen; member
|
/linux-4.1.27/drivers/tty/serial/ |
D | men_z135_uart.c | 303 int qlen; in men_z135_handle_tx() local 320 qlen = uart_circ_chars_pending(xmit); in men_z135_handle_tx() 321 if (qlen <= 0) in men_z135_handle_tx() 335 txfree, qlen); in men_z135_handle_tx() 342 if (align && qlen >= 3 && BYTES_TO_ALIGN(wptr)) in men_z135_handle_tx() 344 else if (qlen > txfree) in men_z135_handle_tx() 347 n = qlen; in men_z135_handle_tx()
|
/linux-4.1.27/net/caif/ |
D | caif_dev.c | 166 int err, high = 0, qlen = 0; in transmit() local 189 qlen = qdisc_qlen(rcu_dereference_bh(txq->qdisc)); in transmit() 191 if (likely(qlen == 0)) in transmit() 195 if (likely(qlen < high)) in transmit() 215 qlen, high); in transmit()
|
/linux-4.1.27/drivers/scsi/fcoe/ |
D | fcoe_transport.c | 398 while (port->fcoe_pending_queue.qlen) { in fcoe_check_wait_queue() 400 port->fcoe_pending_queue.qlen++; in fcoe_check_wait_queue() 410 port->fcoe_pending_queue.qlen--; in fcoe_check_wait_queue() 414 port->fcoe_pending_queue.qlen--; in fcoe_check_wait_queue() 417 if (port->fcoe_pending_queue.qlen < port->min_queue_depth) in fcoe_check_wait_queue() 419 if (port->fcoe_pending_queue.qlen && !timer_pending(&port->timer)) in fcoe_check_wait_queue() 423 if (port->fcoe_pending_queue.qlen > port->max_queue_depth) in fcoe_check_wait_queue()
|
D | fcoe.c | 539 if (port->fcoe_pending_queue.qlen) in fcoe_port_send() 2480 if (pp->fcoe_rx_list.qlen == 1) in fcoe_percpu_clean()
|
/linux-4.1.27/Documentation/ABI/testing/ |
D | configfs-usb-gadget-loopback | 7 qlen - depth of loopback queue
|
D | configfs-usb-gadget-midi | 10 qlen - USB read request queue length
|
/linux-4.1.27/net/ipv4/ |
D | tcp_fastopen.c | 146 queue->fastopenq->qlen++; in tcp_fastopen_create_child() 242 if (fastopenq->qlen >= fastopenq->max_qlen) { in tcp_fastopen_queue_check() 253 fastopenq->qlen--; in tcp_fastopen_queue_check()
|
D | inet_connection_sock.c | 609 int qlen, expire = 0, resend = 0; in reqsk_timer_handler() local 637 qlen = listen_sock_qlen(lopt); in reqsk_timer_handler() 638 if (qlen >> (lopt->max_qlen_log - 1)) { in reqsk_timer_handler() 642 if (qlen < young) in reqsk_timer_handler()
|
D | ipmr.c | 1096 if (c->mfc_un.unres.unresolved.qlen > 3) { in ipmr_cache_unresolved()
|
/linux-4.1.27/include/uapi/linux/ |
D | gen_stats.h | 60 __u32 qlen; member
|
D | i2o-dev.h | 91 unsigned int qlen; /* Length in bytes of query string buffer */ member
|
D | pkt_sched.h | 40 __u32 qlen; member
|
/linux-4.1.27/include/sound/ |
D | seq_midi_event.h | 31 int qlen; /* queue length */ member
|
/linux-4.1.27/drivers/net/usb/ |
D | usbnet.c | 434 if (dev->done.qlen == 1) in defer_bh() 1399 if (dev->txq.qlen >= TX_QLEN (dev)) in usbnet_start_xmit() 1432 for (i = 0; i < 10 && dev->rxq.qlen < RX_QLEN(dev); i++) { in rx_alloc_submit() 1482 if (dev->txq.qlen + dev->rxq.qlen + dev->done.qlen == 0) in usbnet_bh() 1491 int temp = dev->rxq.qlen; in usbnet_bh() 1496 if (temp != dev->rxq.qlen) in usbnet_bh() 1499 temp, dev->rxq.qlen); in usbnet_bh() 1500 if (dev->rxq.qlen < RX_QLEN(dev)) in usbnet_bh() 1503 if (dev->txq.qlen < TX_QLEN (dev)) in usbnet_bh() 1777 if (dev->txq.qlen && PMSG_IS_AUTO(message)) { in usbnet_suspend() [all …]
|
/linux-4.1.27/include/linux/ |
D | skbuff.h | 188 __u32 qlen; member 1307 return list_->qlen; in skb_queue_len() 1323 list->qlen = 0; in __skb_queue_head_init() 1362 list->qlen++; in __skb_insert() 1389 head->qlen += list->qlen; in skb_queue_splice() 1405 head->qlen += list->qlen; in skb_queue_splice_init() 1420 head->qlen += list->qlen; in skb_queue_splice_tail() 1437 head->qlen += list->qlen; in skb_queue_splice_tail_init() 1513 list->qlen--; in __skb_unlink()
|
/linux-4.1.27/include/net/ |
D | gen_stats.h | 45 struct gnet_stats_queue *q, __u32 qlen);
|
D | sch_generic.h | 272 return q->q.qlen; in qdisc_qlen() 438 if (q->q.qlen) { in qdisc_all_tx_empty() 657 sch->q.qlen++; in qdisc_peek_dequeued() 670 sch->q.qlen--; in qdisc_dequeue_peeked()
|
D | request_sock.h | 162 int qlen; /* # of pending (TCP_SYN_RECV) reqs */ member
|
/linux-4.1.27/net/netfilter/ |
D | nfnetlink_log.c | 61 unsigned int qlen; /* number of nlmsgs in skb */ member 353 if (inst->qlen > 1) { in __nfulnl_send() 367 inst->qlen = 0; in __nfulnl_send() 717 inst->qlen++; in nfulnl_log_packet() 722 if (inst->qlen >= qthreshold) in nfulnl_log_packet() 1012 inst->peer_portid, inst->qlen, in seq_show()
|
D | Kconfig | 251 4: eth0: <BROADCAST,MULTICAST,UP> mtu 1500 qdisc pfifo_fast qlen 1000
|
/linux-4.1.27/drivers/net/wireless/brcm80211/brcmfmac/ |
D | bcmsdh.c | 522 if (!pktlist->qlen) in brcmf_sdiod_sglist_rw() 554 target_list->qlen); in brcmf_sdiod_sglist_rw() 555 seg_sz = target_list->qlen; in brcmf_sdiod_sglist_rw() 713 addr, pktq->qlen); in brcmf_sdiod_recv_chain() 719 if (pktq->qlen == 1) in brcmf_sdiod_recv_chain() 776 brcmf_dbg(SDIO, "addr = 0x%x, size = %d\n", addr, pktq->qlen); in brcmf_sdiod_send_pkt() 782 if (pktq->qlen == 1 || !sdiodev->sg_support) in brcmf_sdiod_send_pkt()
|
D | msgbuf.c | 1295 int qlen; in brcmf_proto_msgbuf_rx_trigger() local 1308 qlen = brcmf_flowring_qlen(msgbuf->flow, flowid); in brcmf_proto_msgbuf_rx_trigger() 1309 if ((qlen > BRCMF_MSGBUF_TRICKLE_TXWORKER_THRS) || in brcmf_proto_msgbuf_rx_trigger() 1310 ((qlen) && (atomic_read(&commonring->outstanding_tx) < in brcmf_proto_msgbuf_rx_trigger()
|
D | sdio.c | 1746 bus->glom.qlen, pfirst, pfirst->data, in brcmf_sdio_rxglom() 2246 if (bus->txglom && pktq->qlen > 1) { in brcmf_sdio_txpkt_prep() 2348 bus->tx_seq = (bus->tx_seq + pktq->qlen) % SDPCM_SEQ_WRAP; in brcmf_sdio_txpkt()
|
/linux-4.1.27/net/unix/ |
D | diag.c | 67 sk->sk_receive_queue.qlen * sizeof(u32)); in sk_diag_dump_icons() 102 rql.udiag_rqueue = sk->sk_receive_queue.qlen; in sk_diag_show_rqlen()
|
/linux-4.1.27/net/dccp/ |
D | qpolicy.c | 26 sk->sk_write_queue.qlen >= dccp_sk(sk)->dccps_tx_qlen; in qpolicy_simple_full()
|
D | input.c | 185 if (sk->sk_write_queue.qlen > 0 || !(sk->sk_shutdown & SEND_SHUTDOWN)) in dccp_deliver_input_to_ccids()
|
/linux-4.1.27/drivers/net/caif/ |
D | caif_serial.c | 269 if (ser->head.qlen <= SEND_QUEUE_LOW && in handle_tx() 288 if (ser->head.qlen > SEND_QUEUE_HIGH && in caif_xmit()
|
D | caif_spi.c | 443 if (cfspi->flow_off_sent && cfspi->qhead.qlen < cfspi->qd_low_mark && in cfspi_xmitlen() 509 cfspi->qhead.qlen > cfspi->qd_high_mark && in cfspi_xmit()
|
D | caif_hsi.c | 107 if (cfhsi->qhead[i].qlen) in cfhsi_can_send_aggregate() 112 if (cfhsi->qhead[CFHSI_PRIO_BEBK].qlen >= CFHSI_MAX_PKTS) in cfhsi_can_send_aggregate()
|
/linux-4.1.27/drivers/usb/host/ |
D | ohci-dbg.c | 530 unsigned qlen = 0; in fill_periodic_buffer() local 534 qlen++; in fill_periodic_buffer() 544 qlen, in fill_periodic_buffer()
|
/linux-4.1.27/drivers/tty/serial/jsm/ |
D | jsm_cls.c | 462 int qlen; in cls_copy_data_from_queue_to_uart() local 487 qlen = uart_circ_chars_pending(circ); in cls_copy_data_from_queue_to_uart() 490 n = min(n, qlen); in cls_copy_data_from_queue_to_uart()
|
D | jsm_neo.c | 492 int qlen; in neo_copy_data_from_queue_to_uart() local 538 qlen = uart_circ_chars_pending(circ); in neo_copy_data_from_queue_to_uart() 541 n = min(n, qlen); in neo_copy_data_from_queue_to_uart()
|
/linux-4.1.27/include/linux/sunrpc/ |
D | sched.h | 188 unsigned short qlen; /* total # tasks waiting in queue */ member
|
/linux-4.1.27/net/sunrpc/ |
D | sched.c | 182 queue->qlen++; in __rpc_add_wait_queue() 215 queue->qlen--; in __rpc_remove_wait_queue() 229 queue->qlen = 0; in __rpc_init_priority_wait_queue()
|
D | xprt.c | 984 xprt->stat.bklog_u += xprt->backlog.qlen; in xprt_transmit() 985 xprt->stat.sending_u += xprt->sending.qlen; in xprt_transmit() 986 xprt->stat.pending_u += xprt->pending.qlen; in xprt_transmit()
|
/linux-4.1.27/drivers/net/wireless/rtlwifi/ |
D | usb.c | 630 unsigned int qlen; in _rtl_rx_completed() local 641 qlen = skb_queue_len(&rtlusb->rx_queue); in _rtl_rx_completed() 642 if (qlen >= __RX_SKB_MAX_QUEUED) { in _rtl_rx_completed() 645 qlen); in _rtl_rx_completed()
|
/linux-4.1.27/drivers/staging/dgnc/ |
D | dgnc_tty.c | 910 int qlen = 0; in dgnc_wakeup_writes() local 921 qlen = ch->ch_w_head - ch->ch_w_tail; in dgnc_wakeup_writes() 922 if (qlen < 0) in dgnc_wakeup_writes() 923 qlen += WQUEUESIZE; in dgnc_wakeup_writes() 925 if (qlen >= (WQUEUESIZE - 256)) { in dgnc_wakeup_writes() 945 if ((qlen == 0) && (ch->ch_bd->bd_ops->get_uart_bytes_left(ch) == 0)) { in dgnc_wakeup_writes() 986 if ((qlen == 0) && (ch->ch_bd->bd_ops->get_uart_bytes_left(ch) == 0)) in dgnc_wakeup_writes()
|
D | dgnc_cls.c | 981 int qlen; in cls_copy_data_from_queue_to_uart() local 1007 qlen = (head - tail) & WQUEUEMASK; in cls_copy_data_from_queue_to_uart() 1010 n = min(n, qlen); in cls_copy_data_from_queue_to_uart()
|
D | dgnc_neo.c | 1413 int qlen; in neo_copy_data_from_queue_to_uart() local 1496 qlen = (head - tail) & WQUEUEMASK; in neo_copy_data_from_queue_to_uart() 1499 n = min(n, qlen); in neo_copy_data_from_queue_to_uart()
|
/linux-4.1.27/drivers/bluetooth/ |
D | hci_bcsp.c | 310 if (bcsp->unack.qlen < BCSP_TXWINSIZE) { in bcsp_dequeue() 677 BT_DBG("hu %p retransmitting %u pkts", hu, bcsp->unack.qlen); in bcsp_timed_event()
|
D | hci_h5.c | 156 BT_DBG("hu %p retransmitting %u pkts", hu, h5->unack.qlen); in h5_timed_event() 713 if (h5->unack.qlen >= h5->tx_win) in h5_dequeue()
|
/linux-4.1.27/drivers/input/misc/ |
D | wistron_btns.c | 1160 u8 qlen; in poll_bios() local 1164 qlen = CMOS_READ(cmos_address); in poll_bios() 1165 if (qlen == 0) in poll_bios()
|
/linux-4.1.27/drivers/net/wireless/brcm80211/brcmutil/ |
D | utils.c | 224 len += pq->q[prec].skblist.qlen; in brcmu_pktq_mlen()
|
/linux-4.1.27/include/crypto/ |
D | algapi.h | 72 unsigned int qlen; member
|
/linux-4.1.27/drivers/misc/ |
D | hpilo.c | 48 int qlen = len & 7 ? (len >> 3) + 1 : len >> 3; in mk_entry() local 49 return id << ENTRY_BITPOS_DESCRIPTOR | qlen << ENTRY_BITPOS_QWORDS; in mk_entry()
|
/linux-4.1.27/drivers/net/ethernet/toshiba/ |
D | tc35815.c | 1806 int qlen = (lp->tfd_start + TX_FD_NUM in tc35815_txdone() local 1816 if (lp->lstats.max_tx_qlen < qlen) in tc35815_txdone() 1817 lp->lstats.max_tx_qlen = qlen; in tc35815_txdone()
|
/linux-4.1.27/Documentation/RCU/ |
D | trace.txt | 545 rcu_sched: qlen: 0 546 rcu_bh: qlen: 0 551 o "qlen" is the number of RCU callbacks currently waiting either
|
/linux-4.1.27/drivers/net/ppp/ |
D | ppp_generic.c | 1291 if (ppp->file.rq.qlen > PPP_MAX_RQLEN) in ppp_send_frame() 1674 while (pch->file.rq.qlen > PPP_MAX_RQLEN && in ppp_input() 1824 while (ppp->file.rq.qlen > PPP_MAX_RQLEN && in ppp_receive_nonmp_frame()
|
/linux-4.1.27/drivers/crypto/ |
D | hifn_795x.c | 1929 dev->success, dev->queue.qlen, dev->queue.max_qlen, in hifn_work() 2177 if (dev->started < HIFN_QUEUE_LENGTH && dev->queue.qlen) in hifn_setup_crypto() 2560 if (dev->started < HIFN_QUEUE_LENGTH && dev->queue.qlen) in hifn_tasklet_callback()
|
/linux-4.1.27/Documentation/usb/ |
D | gadget_printer.txt | 70 qlen - The number of 8k buffers to use per endpoint. The default is 10, you
|
D | gadget-testing.txt | 233 qlen - depth of loopback queue 311 qlen - USB read request queue length
|
/linux-4.1.27/net/atm/ |
D | clip.c | 363 if (entry->neigh->arp_queue.qlen < ATMARP_MAX_UNRES_PACKETS) in clip_start_xmit()
|
D | lec.c | 284 if (entry && (entry->tx_wait.qlen < LEC_UNRES_QUE_LEN)) { in lec_start_xmit()
|
/linux-4.1.27/drivers/staging/i2o/ |
D | README.ioctl | 346 u32 qlen; /* Length in bytes of query string buffer */
|
/linux-4.1.27/drivers/scsi/bnx2fc/ |
D | bnx2fc_fcoe.c | 388 if (port->fcoe_pending_queue.qlen) in bnx2fc_xmit() 459 if (bg->fcoe_rx_list.qlen == 1) in bnx2fc_rcv()
|
/linux-4.1.27/drivers/net/ethernet/sun/ |
D | niu.c | 3699 int qlen, rcr_done = 0, work_done = 0; in niu_rx_work() local 3705 qlen = nr64(RCRSTAT_A(rp->rx_channel)) & RCRSTAT_A_QLEN; in niu_rx_work() 3708 qlen = (le64_to_cpup(&mbox->rcrstat_a) & RCRSTAT_A_QLEN); in niu_rx_work() 3715 __func__, rp->rx_channel, (unsigned long long)stat, qlen); in niu_rx_work() 3718 qlen = min(qlen, budget); in niu_rx_work() 3719 while (work_done < qlen) { in niu_rx_work() 3739 if (qlen > 10) in niu_rx_work()
|
/linux-4.1.27/net/ipv6/ |
D | ip6mr.c | 1288 if (c->mfc_un.unres.unresolved.qlen > 3) { in ip6mr_cache_unresolved()
|
/linux-4.1.27/drivers/net/ethernet/chelsio/cxgb4/ |
D | sge.c | 1601 if (q->sendq.qlen == 1) in ofld_xmit()
|
/linux-4.1.27/scripts/ |
D | checkpatch.pl | 919 my $qlen = 0;
|
/linux-4.1.27/net/mac80211/ |
D | cfg.c | 1511 pinfo->frame_qlen = mpath->frame_queue.qlen; in mpath_set_pinfo()
|
/linux-4.1.27/net/xfrm/ |
D | xfrm_policy.c | 1975 if (pq->hold_queue.qlen > XFRM_MAX_QUEUE_LEN) { in xdst_queue_output()
|
/linux-4.1.27/Documentation/networking/ |
D | can.txt | 1041 2: can0: <NOARP,UP,LOWER_UP,ECHO> mtu 16 qdisc pfifo_fast state UP qlen 10
|