Home
last modified time | relevance | path

Searched refs:rb (Results 1 – 192 of 192) sorted by relevance

/linux-4.4.14/kernel/events/
Dring_buffer.c22 atomic_set(&handle->rb->poll, POLLIN); in perf_output_wakeup()
38 struct ring_buffer *rb = handle->rb; in perf_output_get_handle() local
41 local_inc(&rb->nest); in perf_output_get_handle()
42 handle->wakeup = local_read(&rb->wakeup); in perf_output_get_handle()
47 struct ring_buffer *rb = handle->rb; in perf_output_put_handle() local
51 head = local_read(&rb->head); in perf_output_put_handle()
57 if (!local_dec_and_test(&rb->nest)) in perf_output_put_handle()
87 rb->user_page->data_head = head; in perf_output_put_handle()
93 if (unlikely(head != local_read(&rb->head))) { in perf_output_put_handle()
94 local_inc(&rb->nest); in perf_output_put_handle()
[all …]
Dinternal.h58 extern void rb_free(struct ring_buffer *rb);
62 struct ring_buffer *rb; in rb_free_rcu() local
64 rb = container_of(rcu_head, struct ring_buffer, rcu_head); in rb_free_rcu()
65 rb_free(rb); in rb_free_rcu()
71 extern int rb_alloc_aux(struct ring_buffer *rb, struct perf_event *event,
73 extern void rb_free_aux(struct ring_buffer *rb);
75 extern void ring_buffer_put(struct ring_buffer *rb);
77 static inline bool rb_has_aux(struct ring_buffer *rb) in rb_has_aux() argument
79 return !!rb->aux_nr_pages; in rb_has_aux()
86 perf_mmap_to_page(struct ring_buffer *rb, unsigned long pgoff);
[all …]
Dcore.c3560 struct ring_buffer *rb);
3712 if (event->rb) { in _free_event()
4076 struct ring_buffer *rb; in perf_poll() local
4089 rb = event->rb; in perf_poll()
4090 if (rb) in perf_poll()
4091 events = atomic_xchg(&rb->poll, 0); in perf_poll()
4407 struct ring_buffer *rb; in perf_event_init_userpage() local
4410 rb = rcu_dereference(event->rb); in perf_event_init_userpage()
4411 if (!rb) in perf_event_init_userpage()
4414 userpg = rb->user_page; in perf_event_init_userpage()
[all …]
/linux-4.4.14/drivers/misc/mic/scif/
Dscif_rb.c36 void scif_rb_init(struct scif_rb *rb, u32 *read_ptr, u32 *write_ptr, in scif_rb_init() argument
39 rb->rb_base = rb_base; in scif_rb_init()
40 rb->size = (1 << size); in scif_rb_init()
41 rb->read_ptr = read_ptr; in scif_rb_init()
42 rb->write_ptr = write_ptr; in scif_rb_init()
43 rb->current_read_offset = *read_ptr; in scif_rb_init()
44 rb->current_write_offset = *write_ptr; in scif_rb_init()
48 static void memcpy_torb(struct scif_rb *rb, void *header, in memcpy_torb() argument
53 if (header + size >= rb->rb_base + rb->size) { in memcpy_torb()
55 size1 = (u32)(rb->rb_base + rb->size - header); in memcpy_torb()
[all …]
Dscif_rb.h83 void scif_rb_init(struct scif_rb *rb, u32 *read_ptr, u32 *write_ptr,
87 int scif_rb_write(struct scif_rb *rb, void *msg, u32 size);
89 void scif_rb_commit(struct scif_rb *rb);
91 u32 scif_rb_space(struct scif_rb *rb);
95 u32 scif_rb_get_next(struct scif_rb *rb, void *msg, u32 size);
97 void scif_rb_update_read_ptr(struct scif_rb *rb);
99 u32 scif_rb_count(struct scif_rb *rb, u32 size);
/linux-4.4.14/drivers/scsi/bfa/
Dbfa_ioc_ct.c192 void __iomem *rb; in bfa_ioc_ct_reg_init() local
195 rb = bfa_ioc_bar0(ioc); in bfa_ioc_ct_reg_init()
197 ioc->ioc_regs.hfn_mbox = rb + ct_fnreg[pcifn].hfn_mbox; in bfa_ioc_ct_reg_init()
198 ioc->ioc_regs.lpu_mbox = rb + ct_fnreg[pcifn].lpu_mbox; in bfa_ioc_ct_reg_init()
199 ioc->ioc_regs.host_page_num_fn = rb + ct_fnreg[pcifn].hfn_pgn; in bfa_ioc_ct_reg_init()
202 ioc->ioc_regs.heartbeat = rb + BFA_IOC0_HBEAT_REG; in bfa_ioc_ct_reg_init()
203 ioc->ioc_regs.ioc_fwstate = rb + BFA_IOC0_STATE_REG; in bfa_ioc_ct_reg_init()
204 ioc->ioc_regs.alt_ioc_fwstate = rb + BFA_IOC1_STATE_REG; in bfa_ioc_ct_reg_init()
205 ioc->ioc_regs.hfn_mbox_cmd = rb + ct_p0reg[pcifn].hfn; in bfa_ioc_ct_reg_init()
206 ioc->ioc_regs.lpu_mbox_cmd = rb + ct_p0reg[pcifn].lpu; in bfa_ioc_ct_reg_init()
[all …]
Dbfa_ioc_cb.c145 void __iomem *rb; in bfa_ioc_cb_reg_init() local
148 rb = bfa_ioc_bar0(ioc); in bfa_ioc_cb_reg_init()
150 ioc->ioc_regs.hfn_mbox = rb + iocreg_fnreg[pcifn].hfn_mbox; in bfa_ioc_cb_reg_init()
151 ioc->ioc_regs.lpu_mbox = rb + iocreg_fnreg[pcifn].lpu_mbox; in bfa_ioc_cb_reg_init()
152 ioc->ioc_regs.host_page_num_fn = rb + iocreg_fnreg[pcifn].hfn_pgn; in bfa_ioc_cb_reg_init()
155 ioc->ioc_regs.heartbeat = rb + BFA_IOC0_HBEAT_REG; in bfa_ioc_cb_reg_init()
156 ioc->ioc_regs.ioc_fwstate = rb + BFA_IOC0_STATE_REG; in bfa_ioc_cb_reg_init()
157 ioc->ioc_regs.alt_ioc_fwstate = rb + BFA_IOC1_STATE_REG; in bfa_ioc_cb_reg_init()
159 ioc->ioc_regs.heartbeat = (rb + BFA_IOC1_HBEAT_REG); in bfa_ioc_cb_reg_init()
160 ioc->ioc_regs.ioc_fwstate = (rb + BFA_IOC1_STATE_REG); in bfa_ioc_cb_reg_init()
[all …]
Dbfad_debugfs.c259 void __iomem *rb, *reg_addr; in bfad_debugfs_write_regrd() local
289 rb = bfa_ioc_bar0(ioc); in bfad_debugfs_write_regrd()
303 reg_addr = rb + addr; in bfad_debugfs_write_regrd()
Dbfa_ioc.h334 bfa_status_t (*ioc_pll_init) (void __iomem *rb, enum bfi_asic_mode m);
860 bfa_status_t bfa_ioc_cb_pll_init(void __iomem *rb, enum bfi_asic_mode mode);
861 bfa_status_t bfa_ioc_ct_pll_init(void __iomem *rb, enum bfi_asic_mode mode);
862 bfa_status_t bfa_ioc_ct2_pll_init(void __iomem *rb, enum bfi_asic_mode mode);
Dbfa_ioc.c5520 void __iomem *rb; in bfa_phy_busy() local
5522 rb = bfa_ioc_bar0(ioc); in bfa_phy_busy()
5523 return readl(rb + BFA_PHY_LOCK_STATUS); in bfa_phy_busy()
/linux-4.4.14/drivers/net/ethernet/brocade/bna/
Dbfa_ioc_ct.c57 static enum bfa_status bfa_ioc_ct_pll_init(void __iomem *rb,
59 static enum bfa_status bfa_ioc_ct2_pll_init(void __iomem *rb,
259 void __iomem *rb; in bfa_ioc_ct_reg_init() local
262 rb = bfa_ioc_bar0(ioc); in bfa_ioc_ct_reg_init()
264 ioc->ioc_regs.hfn_mbox = rb + ct_fnreg[pcifn].hfn_mbox; in bfa_ioc_ct_reg_init()
265 ioc->ioc_regs.lpu_mbox = rb + ct_fnreg[pcifn].lpu_mbox; in bfa_ioc_ct_reg_init()
266 ioc->ioc_regs.host_page_num_fn = rb + ct_fnreg[pcifn].hfn_pgn; in bfa_ioc_ct_reg_init()
269 ioc->ioc_regs.heartbeat = rb + BFA_IOC0_HBEAT_REG; in bfa_ioc_ct_reg_init()
270 ioc->ioc_regs.ioc_fwstate = rb + BFA_IOC0_STATE_REG; in bfa_ioc_ct_reg_init()
271 ioc->ioc_regs.alt_ioc_fwstate = rb + BFA_IOC1_STATE_REG; in bfa_ioc_ct_reg_init()
[all …]
Dbnad_debugfs.c317 void __iomem *rb, *reg_addr; in bnad_debugfs_write_regrd() local
342 rb = bfa_ioc_bar0(ioc); in bnad_debugfs_write_regrd()
355 reg_addr = rb + addr; in bnad_debugfs_write_regrd()
Dbfa_ioc.h203 enum bfa_status (*ioc_pll_init) (void __iomem *rb,
/linux-4.4.14/lib/
Drbtree_test.c12 struct rb_node rb; member
31 if (key < rb_entry(parent, struct test_node, rb)->key) in insert()
37 rb_link_node(&node->rb, parent, new); in insert()
38 rb_insert_color(&node->rb, root); in insert()
43 rb_erase(&node->rb, root); in erase()
49 if (node->rb.rb_left) { in augment_recompute()
50 child_augmented = rb_entry(node->rb.rb_left, struct test_node, in augment_recompute()
51 rb)->augmented; in augment_recompute()
55 if (node->rb.rb_right) { in augment_recompute()
56 child_augmented = rb_entry(node->rb.rb_right, struct test_node, in augment_recompute()
[all …]
Dinterval_tree.c9 INTERVAL_TREE_DEFINE(struct interval_tree_node, rb,
Drbtree.c71 static inline void rb_set_black(struct rb_node *rb) in rb_set_black() argument
73 rb->__rb_parent_color |= RB_BLACK; in rb_set_black()
/linux-4.4.14/mm/
Dinterval_tree.c24 INTERVAL_TREE_DEFINE(struct vm_area_struct, shared.rb,
39 if (!prev->shared.rb.rb_right) { in vma_interval_tree_insert_after()
41 link = &prev->shared.rb.rb_right; in vma_interval_tree_insert_after()
43 parent = rb_entry(prev->shared.rb.rb_right, in vma_interval_tree_insert_after()
44 struct vm_area_struct, shared.rb); in vma_interval_tree_insert_after()
47 while (parent->shared.rb.rb_left) { in vma_interval_tree_insert_after()
48 parent = rb_entry(parent->shared.rb.rb_left, in vma_interval_tree_insert_after()
49 struct vm_area_struct, shared.rb); in vma_interval_tree_insert_after()
53 link = &parent->shared.rb.rb_left; in vma_interval_tree_insert_after()
57 rb_link_node(&node->shared.rb, &parent->shared.rb, link); in vma_interval_tree_insert_after()
[all …]
Dkmemleak.c405 struct rb_node *rb = object_tree_root.rb_node; in lookup_object() local
407 while (rb) { in lookup_object()
409 rb_entry(rb, struct kmemleak_object, rb_node); in lookup_object()
411 rb = object->rb_node.rb_left; in lookup_object()
413 rb = object->rb_node.rb_right; in lookup_object()
Dnommu.c1247 struct rb_node *rb; in do_mmap() local
1307 for (rb = rb_first(&nommu_region_tree); rb; rb = rb_next(rb)) { in do_mmap()
1308 pregion = rb_entry(rb, struct vm_region, vm_rb); in do_mmap()
/linux-4.4.14/drivers/block/drbd/
Ddrbd_interval.c11 struct drbd_interval *this = rb_entry(node, struct drbd_interval, rb); in interval_end()
27 if (node->rb.rb_left) { in compute_subtree_last()
28 sector_t left = interval_end(node->rb.rb_left); in compute_subtree_last()
32 if (node->rb.rb_right) { in compute_subtree_last()
33 sector_t right = interval_end(node->rb.rb_right); in compute_subtree_last()
40 RB_DECLARE_CALLBACKS(static, augment_callbacks, struct drbd_interval, rb,
56 rb_entry(*new, struct drbd_interval, rb); in drbd_insert_interval()
74 rb_link_node(&this->rb, parent, new); in drbd_insert_interval()
75 rb_insert_augmented(&this->rb, root, &augment_callbacks); in drbd_insert_interval()
97 rb_entry(node, struct drbd_interval, rb); in drbd_contains_interval()
[all …]
Ddrbd_interval.h8 struct rb_node rb; member
20 RB_CLEAR_NODE(&i->rb); in drbd_clear_interval()
25 return RB_EMPTY_NODE(&i->rb); in drbd_interval_empty()
/linux-4.4.14/drivers/target/iscsi/
Discsi_target_configfs.c50 ssize_t rb; in lio_target_np_sctp_show() local
54 rb = sprintf(page, "1\n"); in lio_target_np_sctp_show()
56 rb = sprintf(page, "0\n"); in lio_target_np_sctp_show()
58 return rb; in lio_target_np_sctp_show()
118 ssize_t rb; in lio_target_np_iser_show() local
122 rb = sprintf(page, "1\n"); in lio_target_np_iser_show()
124 rb = sprintf(page, "0\n"); in lio_target_np_iser_show()
126 return rb; in lio_target_np_iser_show()
517 ssize_t rb; \
522 rb = snprintf(page, PAGE_SIZE, \
[all …]
/linux-4.4.14/fs/jffs2/
Dnodelist.h230 struct rb_node rb; member
271 struct rb_node rb; member
334 return rb_entry(node, struct jffs2_node_frag, rb); in frag_first()
344 return rb_entry(node, struct jffs2_node_frag, rb); in frag_last()
347 #define frag_next(frag) rb_entry(rb_next(&(frag)->rb), struct jffs2_node_frag, rb)
348 #define frag_prev(frag) rb_entry(rb_prev(&(frag)->rb), struct jffs2_node_frag, rb)
349 #define frag_parent(frag) rb_entry(rb_parent(&(frag)->rb), struct jffs2_node_frag, rb)
350 #define frag_left(frag) rb_entry((frag)->rb.rb_left, struct jffs2_node_frag, rb)
351 #define frag_right(frag) rb_entry((frag)->rb.rb_right, struct jffs2_node_frag, rb)
352 #define frag_erase(frag, list) rb_erase(&frag->rb, list);
[all …]
Dnodelist.c124 struct rb_node *parent = &base->rb; in jffs2_fragtree_insert()
131 base = rb_entry(parent, struct jffs2_node_frag, rb); in jffs2_fragtree_insert()
134 link = &base->rb.rb_right; in jffs2_fragtree_insert()
136 link = &base->rb.rb_left; in jffs2_fragtree_insert()
143 rb_link_node(&newfrag->rb, &base->rb, link); in jffs2_fragtree_insert()
189 rb_link_node(&holefrag->rb, &this->rb, &this->rb.rb_right); in no_overlapping_node()
193 rb_link_node(&holefrag->rb, NULL, &root->rb_node); in no_overlapping_node()
195 rb_insert_color(&holefrag->rb, root); in no_overlapping_node()
204 rb_link_node(&newfrag->rb, &this->rb, &this->rb.rb_right); in no_overlapping_node()
207 rb_link_node(&newfrag->rb, NULL, &root->rb_node); in no_overlapping_node()
[all …]
Dreadinode.c185 tn = rb_entry(next, struct jffs2_tmp_dnode_info, rb); in jffs2_lookup_tn()
188 next = tn->rb.rb_right; in jffs2_lookup_tn()
190 next = tn->rb.rb_left; in jffs2_lookup_tn()
286 rb_replace_node(&this->rb, &tn->rb, &rii->tn_root); in jffs2_add_tn_to_tree()
344 insert_point = rb_entry(parent, struct jffs2_tmp_dnode_info, rb); in jffs2_add_tn_to_tree()
346 link = &insert_point->rb.rb_right; in jffs2_add_tn_to_tree()
349 link = &insert_point->rb.rb_left; in jffs2_add_tn_to_tree()
351 link = &insert_point->rb.rb_right; in jffs2_add_tn_to_tree()
353 rb_link_node(&tn->rb, &insert_point->rb, link); in jffs2_add_tn_to_tree()
354 rb_insert_color(&tn->rb, &rii->tn_root); in jffs2_add_tn_to_tree()
[all …]
/linux-4.4.14/tools/include/linux/
Drbtree_augmented.h69 rbname ## _propagate(struct rb_node *rb, struct rb_node *stop) \
71 while (rb != stop) { \
72 rbstruct *node = rb_entry(rb, rbstruct, rbfield); \
77 rb = rb_parent(&node->rbfield); \
108 #define rb_color(rb) __rb_color((rb)->__rb_parent_color) argument
109 #define rb_is_red(rb) __rb_is_red((rb)->__rb_parent_color) argument
110 #define rb_is_black(rb) __rb_is_black((rb)->__rb_parent_color) argument
112 static inline void rb_set_parent(struct rb_node *rb, struct rb_node *p) in rb_set_parent() argument
114 rb->__rb_parent_color = rb_color(rb) | (unsigned long)p; in rb_set_parent()
117 static inline void rb_set_parent_color(struct rb_node *rb, in rb_set_parent_color() argument
[all …]
/linux-4.4.14/arch/x86/mm/
Dpat_rbtree.c51 struct memtype *data = container_of(node, struct memtype, rb); in get_subtree_max_end()
61 child_max_end = get_subtree_max_end(data->rb.rb_right); in compute_subtree_max_end()
65 child_max_end = get_subtree_max_end(data->rb.rb_left); in compute_subtree_max_end()
72 RB_DECLARE_CALLBACKS(static, memtype_rb_augment_cb, struct memtype, rb, in RB_DECLARE_CALLBACKS() argument
83 struct memtype *data = container_of(node, struct memtype, rb); in RB_DECLARE_CALLBACKS()
113 node = rb_next(&match->rb); in memtype_rb_exact_match()
115 match = container_of(node, struct memtype, rb); in memtype_rb_exact_match()
142 node = rb_next(&match->rb); in memtype_rb_check_conflict()
144 match = container_of(node, struct memtype, rb); in memtype_rb_check_conflict()
154 node = rb_next(&match->rb); in memtype_rb_check_conflict()
[all …]
Dpat_internal.h14 struct rb_node rb; member
/linux-4.4.14/include/linux/
Drbtree_augmented.h66 rbname ## _propagate(struct rb_node *rb, struct rb_node *stop) \
68 while (rb != stop) { \
69 rbstruct *node = rb_entry(rb, rbstruct, rbfield); \
74 rb = rb_parent(&node->rbfield); \
105 #define rb_color(rb) __rb_color((rb)->__rb_parent_color) argument
106 #define rb_is_red(rb) __rb_is_red((rb)->__rb_parent_color) argument
107 #define rb_is_black(rb) __rb_is_black((rb)->__rb_parent_color) argument
109 static inline void rb_set_parent(struct rb_node *rb, struct rb_node *p) in rb_set_parent() argument
111 rb->__rb_parent_color = rb_color(rb) | (unsigned long)p; in rb_set_parent()
114 static inline void rb_set_parent_color(struct rb_node *rb, in rb_set_parent_color() argument
[all …]
Dinterval_tree_generic.h158 struct rb_node *rb = node->ITRB.rb_right, *prev; \
168 if (rb) { \
169 ITSTRUCT *right = rb_entry(rb, ITSTRUCT, ITRB); \
177 rb = rb_parent(&node->ITRB); \
178 if (!rb) \
181 node = rb_entry(rb, ITSTRUCT, ITRB); \
182 rb = node->ITRB.rb_right; \
183 } while (prev == rb); \
Dinterval_tree.h7 struct rb_node rb; member
Drmap.h77 struct rb_node rb; /* locked by anon_vma->rwsem */ member
Dmm_types.h328 struct rb_node rb; member
Dperf_event.h545 struct ring_buffer *rb; member
668 struct ring_buffer *rb; member
Dkernfs.h121 struct rb_node rb; member
Dfb.h722 extern int fb_find_mode_cvt(struct fb_videomode *mode, int margins, int rb);
/linux-4.4.14/drivers/xen/xenbus/
Dxenbus_dev_frontend.c126 struct read_buffer *rb; in xenbus_file_read() local
144 rb = list_entry(u->read_buffers.next, struct read_buffer, list); in xenbus_file_read()
147 unsigned sz = min((unsigned)len - i, rb->len - rb->cons); in xenbus_file_read()
149 ret = copy_to_user(ubuf + i, &rb->msg[rb->cons], sz); in xenbus_file_read()
152 rb->cons += sz - ret; in xenbus_file_read()
161 if (rb->cons == rb->len) { in xenbus_file_read()
162 list_del(&rb->list); in xenbus_file_read()
163 kfree(rb); in xenbus_file_read()
166 rb = list_entry(u->read_buffers.next, in xenbus_file_read()
187 struct read_buffer *rb; in queue_reply() local
[all …]
/linux-4.4.14/arch/arm/lib/
Dgetuser.S45 rb .req ip label
47 3: ldrbt rb, [r0], #0
49 rb .req r0 label
51 3: ldrb rb, [r0, #1]
54 orr r2, r2, rb, lsl #8
56 orr r2, rb, r2, lsl #8
105 rb .req ip label
107 10: ldrbt rb, [r0], #0
109 rb .req r0 label
111 10: ldrb rb, [r0, #1]
[all …]
/linux-4.4.14/arch/tile/kernel/
Dunaligned.c180 uint64_t *rb, uint64_t *clob1, uint64_t *clob2, in find_regs() argument
208 *rb = reg; in find_regs()
209 alias_reg_map = (1ULL << *ra) | (1ULL << *rb); in find_regs()
260 *rb = reg; in find_regs()
261 alias_reg_map = (1ULL << *ra) | (1ULL << *rb); in find_regs()
316 static bool check_regs(uint64_t rd, uint64_t ra, uint64_t rb, in check_regs() argument
330 if ((rb >= 56) && (rb != TREG_ZERO)) in check_regs()
394 static tilegx_bundle_bits jit_x0_dblalign(int rd, int ra, int rb) in jit_x0_dblalign() argument
399 create_SrcB_X0(rb); in jit_x0_dblalign()
436 static tilegx_bundle_bits jit_x1_st1_add(int ra, int rb, int imm8) in jit_x1_st1_add() argument
[all …]
/linux-4.4.14/arch/sparc/kernel/
Dbtext.c22 static void draw_byte_32(unsigned char *bits, unsigned int *base, int rb);
23 static void draw_byte_16(unsigned char *bits, unsigned int *base, int rb);
24 static void draw_byte_8(unsigned char *bits, unsigned int *base, int rb);
197 int rb = dispDeviceRowBytes; in draw_byte() local
202 draw_byte_32(font, (unsigned int *)base, rb); in draw_byte()
206 draw_byte_16(font, (unsigned int *)base, rb); in draw_byte()
209 draw_byte_8(font, (unsigned int *)base, rb); in draw_byte()
241 static void draw_byte_32(unsigned char *font, unsigned int *base, int rb) in draw_byte_32() argument
258 base = (unsigned int *) ((char *)base + rb); in draw_byte_32()
262 static void draw_byte_16(unsigned char *font, unsigned int *base, int rb) in draw_byte_16() argument
[all …]
/linux-4.4.14/tools/perf/
Dbuiltin-lock.c43 struct rb_node rb; /* used for sorting */ member
110 struct rb_node rb; member
125 st = container_of(node, struct thread_stat, rb); in thread_stat_find()
139 struct rb_node **rb = &thread_stats.rb_node; in thread_stat_insert() local
143 while (*rb) { in thread_stat_insert()
144 p = container_of(*rb, struct thread_stat, rb); in thread_stat_insert()
145 parent = *rb; in thread_stat_insert()
148 rb = &(*rb)->rb_left; in thread_stat_insert()
150 rb = &(*rb)->rb_right; in thread_stat_insert()
155 rb_link_node(&new->rb, parent, rb); in thread_stat_insert()
[all …]
Dbuiltin-kvm.c479 struct rb_node **rb = &result->rb_node; in insert_to_result() local
483 while (*rb) { in insert_to_result()
484 p = container_of(*rb, struct kvm_event, rb); in insert_to_result()
485 parent = *rb; in insert_to_result()
488 rb = &(*rb)->rb_left; in insert_to_result()
490 rb = &(*rb)->rb_right; in insert_to_result()
493 rb_link_node(&event->rb, parent, rb); in insert_to_result()
494 rb_insert_color(&event->rb, result); in insert_to_result()
537 return container_of(node, struct kvm_event, rb); in pop_from_result()
/linux-4.4.14/arch/powerpc/include/asm/
Dkvm_book3s_64.h137 unsigned long rb = 0, va_low, sllp; in compute_tlbie_rb() local
162 rb = (v & ~0x7fUL) << 16; /* AVA field */ in compute_tlbie_rb()
164 rb |= (v >> HPTE_V_SSIZE_SHIFT) << 8; /* B field */ in compute_tlbie_rb()
188 rb |= sllp << 5; /* AP field */ in compute_tlbie_rb()
189 rb |= (va_low & 0x7ff) << 12; /* remaining 11 bits of AVA */ in compute_tlbie_rb()
198 rb |= (va_low << mmu_psize_defs[b_psize].shift) & 0x7ff000; in compute_tlbie_rb()
202 rb &= ~((1ul << mmu_psize_defs[a_psize].shift) - 1); in compute_tlbie_rb()
209 rb |= ((va_low << aval_shift) & 0xfe); in compute_tlbie_rb()
211 rb |= 1; /* L field */ in compute_tlbie_rb()
213 rb |= penc << 12; /* LP field */ in compute_tlbie_rb()
[all …]
Dppc_asm.h27 #define ACCOUNT_CPU_USER_ENTRY(ra, rb) argument
28 #define ACCOUNT_CPU_USER_EXIT(ra, rb) argument
31 #define ACCOUNT_CPU_USER_ENTRY(ra, rb) \ argument
33 ld rb,PACA_STARTTIME_USER(r13); \
35 subf rb,rb,ra; /* subtract start value */ \
37 add ra,ra,rb; /* add on to user time */ \
40 #define ACCOUNT_CPU_USER_EXIT(ra, rb) \ argument
42 ld rb,PACA_STARTTIME(r13); \
44 subf rb,rb,ra; /* subtract start value */ \
46 add ra,ra,rb; /* add on to system time */ \
[all …]
Dasm-compat.h76 #define PPC405_ERR77(ra,rb) stringify_in_c(dcbt ra, rb;) argument
79 #define PPC405_ERR77(ra,rb) argument
Dexception-64s.h119 #define SAVE_PPR(area, ra, rb) \ argument
122 ld rb,area+EX_PPR(r13); /* Read PPR from paca */ \
123 std rb,TASKTHREADPPR(ra); \
Dkvm_ppc.h665 static inline ulong kvmppc_get_ea_indexed(struct kvm_vcpu *vcpu, int ra, int rb) in kvmppc_get_ea_indexed() argument
670 ea = kvmppc_get_gpr(vcpu, rb); in kvmppc_get_ea_indexed()
Dkvm_host.h350 void (*slbmte)(struct kvm_vcpu *vcpu, u64 rb, u64 rs);
/linux-4.4.14/fs/ocfs2/
Drefcounttree.c84 struct ocfs2_refcount_block *rb = in ocfs2_validate_refcount_block() local
96 rc = ocfs2_validate_meta_ecc(sb, bh->b_data, &rb->rf_check); in ocfs2_validate_refcount_block()
104 if (!OCFS2_IS_VALID_REFCOUNT_BLOCK(rb)) { in ocfs2_validate_refcount_block()
108 rb->rf_signature); in ocfs2_validate_refcount_block()
112 if (le64_to_cpu(rb->rf_blkno) != bh->b_blocknr) { in ocfs2_validate_refcount_block()
116 (unsigned long long)le64_to_cpu(rb->rf_blkno)); in ocfs2_validate_refcount_block()
120 if (le32_to_cpu(rb->rf_fs_generation) != OCFS2_SB(sb)->fs_generation) { in ocfs2_validate_refcount_block()
124 le32_to_cpu(rb->rf_fs_generation)); in ocfs2_validate_refcount_block()
458 struct ocfs2_refcount_block *rb; in ocfs2_lock_refcount_tree() local
485 rb = (struct ocfs2_refcount_block *)ref_root_bh->b_data; in ocfs2_lock_refcount_tree()
[all …]
Dxattr.c6275 struct ocfs2_refcount_block *rb = in ocfs2_reflink_lock_xattr_allocators() local
6299 if (le32_to_cpu(rb->rf_flags) & OCFS2_REFCOUNT_TREE_FL) in ocfs2_reflink_lock_xattr_allocators()
6300 *credits += le16_to_cpu(rb->rf_list.l_tree_depth) * in ocfs2_reflink_lock_xattr_allocators()
6301 le16_to_cpu(rb->rf_list.l_next_free_rec) + 1; in ocfs2_reflink_lock_xattr_allocators()
6743 struct ocfs2_refcount_block *rb; in ocfs2_lock_reflink_xattr_rec_allocators() local
6767 rb = (struct ocfs2_refcount_block *)args->reflink->ref_root_bh->b_data; in ocfs2_lock_reflink_xattr_rec_allocators()
6774 if (le32_to_cpu(rb->rf_flags) & OCFS2_REFCOUNT_TREE_FL) in ocfs2_lock_reflink_xattr_rec_allocators()
6775 *credits += le16_to_cpu(rb->rf_list.l_tree_depth) * in ocfs2_lock_reflink_xattr_rec_allocators()
6776 le16_to_cpu(rb->rf_list.l_next_free_rec) + 1; in ocfs2_lock_reflink_xattr_rec_allocators()
Dalloc.c392 struct ocfs2_refcount_block *rb = et->et_object; in ocfs2_refcount_tree_fill_root_el() local
394 et->et_root_el = &rb->rf_list; in ocfs2_refcount_tree_fill_root_el()
400 struct ocfs2_refcount_block *rb = et->et_object; in ocfs2_refcount_tree_set_last_eb_blk() local
402 rb->rf_last_eb_blk = cpu_to_le64(blkno); in ocfs2_refcount_tree_set_last_eb_blk()
407 struct ocfs2_refcount_block *rb = et->et_object; in ocfs2_refcount_tree_get_last_eb_blk() local
409 return le64_to_cpu(rb->rf_last_eb_blk); in ocfs2_refcount_tree_get_last_eb_blk()
415 struct ocfs2_refcount_block *rb = et->et_object; in ocfs2_refcount_tree_update_clusters() local
417 le32_add_cpu(&rb->rf_clusters, clusters); in ocfs2_refcount_tree_update_clusters()
/linux-4.4.14/drivers/mtd/ubi/
Dattach.c270 av = rb_entry(parent, struct ubi_ainf_volume, rb); in add_volume()
297 rb_link_node(&av->rb, parent, p); in add_volume()
298 rb_insert_color(&av->rb, &ai->volumes); in add_volume()
481 aeb = rb_entry(parent, struct ubi_ainf_peb, u.rb); in ubi_add_to_av()
592 rb_link_node(&aeb->u.rb, parent, p); in ubi_add_to_av()
593 rb_insert_color(&aeb->u.rb, &av->root); in ubi_add_to_av()
612 av = rb_entry(p, struct ubi_ainf_volume, rb); in ubi_find_av()
633 struct rb_node *rb; in ubi_remove_av() local
638 while ((rb = rb_first(&av->root))) { in ubi_remove_av()
639 aeb = rb_entry(rb, struct ubi_ainf_peb, u.rb); in ubi_remove_av()
[all …]
Dwl.c161 e1 = rb_entry(parent, struct ubi_wl_entry, u.rb); in wl_tree_add()
176 rb_link_node(&e->u.rb, parent, p); in wl_tree_add()
177 rb_insert_color(&e->u.rb, root); in wl_tree_add()
257 e1 = rb_entry(p, struct ubi_wl_entry, u.rb); in in_wl_tree()
317 e = rb_entry(rb_first(root), struct ubi_wl_entry, u.rb); in find_wl_entry()
324 e1 = rb_entry(p, struct ubi_wl_entry, u.rb); in find_wl_entry()
358 first = rb_entry(rb_first(root), struct ubi_wl_entry, u.rb); in find_mean_wl_entry()
359 last = rb_entry(rb_last(root), struct ubi_wl_entry, u.rb); in find_mean_wl_entry()
362 e = rb_entry(root->rb_node, struct ubi_wl_entry, u.rb); in find_mean_wl_entry()
398 rb_erase(&e->u.rb, &ubi->free); in wl_get_wle()
[all …]
Dubi.h178 struct rb_node rb; member
200 struct rb_node rb; member
655 struct rb_node rb; member
690 struct rb_node rb; member
941 ubi_rb_for_each_entry((tmp_rb), (e), &(ubi)->free, u.rb)
950 ubi_rb_for_each_entry((tmp_rb), (e), &(ubi)->used, u.rb)
959 ubi_rb_for_each_entry((tmp_rb), (e), &(ubi)->scrub, u.rb)
978 #define ubi_rb_for_each_entry(rb, pos, root, member) \ argument
979 for (rb = rb_first(root), \
980 pos = (rb ? container_of(rb, typeof(*pos), member) : NULL); \
[all …]
Dfastmap-wl.c41 ubi_rb_for_each_entry(p, e, root, u.rb) { in find_anchor_wl_entry()
74 ubi_rb_for_each_entry(p, e, root, u.rb) in anchor_pebs_avalible()
109 rb_erase(&e->u.rb, &ubi->free); in ubi_wl_get_fm_peb()
156 rb_erase(&e->u.rb, &ubi->free); in ubi_refill_pools()
388 struct ubi_wl_entry, u.rb); in may_reserve_for_fm()
Deba.c105 le = rb_entry(p, struct ubi_ltree_entry, rb); in ltree_lookup()
171 le1 = rb_entry(parent, struct ubi_ltree_entry, rb); in ltree_add_entry()
186 rb_link_node(&le->rb, parent, p); in ltree_add_entry()
187 rb_insert_color(&le->rb, &ubi->ltree); in ltree_add_entry()
232 rb_erase(&le->rb, &ubi->ltree); in leb_read_unlock()
284 rb_erase(&le->rb, &ubi->ltree); in leb_write_trylock()
308 rb_erase(&le->rb, &ubi->ltree); in leb_write_unlock()
1321 struct rb_node *rb; in self_check_eba() local
1361 ubi_rb_for_each_entry(rb, aeb, &av->root, u.rb) in self_check_eba()
1368 ubi_rb_for_each_entry(rb, aeb, &av->root, u.rb) in self_check_eba()
[all …]
Dfastmap.c191 av = rb_entry(parent, struct ubi_ainf_volume, rb); in add_vol()
217 rb_link_node(&av->rb, parent, p); in add_vol()
218 rb_insert_color(&av->rb, &ai->volumes); in add_vol()
242 tmp_aeb = rb_entry(parent, struct ubi_ainf_peb, u.rb); in assign_aeb_to_av()
257 rb_link_node(&aeb->u.rb, parent, p); in assign_aeb_to_av()
258 rb_insert_color(&aeb->u.rb, &av->root); in assign_aeb_to_av()
281 aeb = rb_entry(parent, struct ubi_ainf_peb, u.rb); in update_vol()
352 rb_link_node(&new_aeb->u.rb, parent, p); in update_vol()
353 rb_insert_color(&new_aeb->u.rb, &av->root); in update_vol()
385 tmp_av = rb_entry(parent, struct ubi_ainf_volume, rb); in process_pool_aeb()
[all …]
Dvtbl.c376 struct rb_node *rb; in process_lvol() local
409 ubi_rb_for_each_entry(rb, aeb, &av->root, u.rb) { in process_lvol()
/linux-4.4.14/arch/powerpc/lib/
Dsstep.c131 int ra, rb; in xform_ea() local
135 rb = (instr >> 11) & 0x1f; in xform_ea()
136 ea = regs->gpr[rb]; in xform_ea()
645 unsigned int opcode, ra, rb, rd, spr, u; in analyse_instr() local
728 rb = (instr >> 11) & 0x1f; in analyse_instr()
731 rb = (regs->ccr >> (31 - rb)) & 1; in analyse_instr()
732 val = (instr >> (6 + ra * 2 + rb)) & 1; in analyse_instr()
769 rb = (instr >> 11) & 0x1f; in analyse_instr()
842 regs->gpr[ra] = (regs->gpr[ra] & ~imm) | (ROTATE(val, rb) & imm); in analyse_instr()
849 regs->gpr[ra] = ROTATE(val, rb) & MASK32(mb, me); in analyse_instr()
[all …]
/linux-4.4.14/drivers/tty/hvc/
Dhvc_iucv.c221 struct iucv_tty_buffer *rb; in hvc_iucv_write() local
239 rb = list_first_entry(&priv->tty_inqueue, struct iucv_tty_buffer, list); in hvc_iucv_write()
242 if (!rb->mbuf) { /* message not yet received ... */ in hvc_iucv_write()
245 rb->mbuf = kmalloc(rb->msg.length, GFP_ATOMIC | GFP_DMA); in hvc_iucv_write()
246 if (!rb->mbuf) in hvc_iucv_write()
249 rc = __iucv_message_receive(priv->path, &rb->msg, 0, in hvc_iucv_write()
250 rb->mbuf, rb->msg.length, NULL); in hvc_iucv_write()
262 if (rc || (rb->mbuf->version != MSG_VERSION) || in hvc_iucv_write()
263 (rb->msg.length != MSG_SIZE(rb->mbuf->datalen))) in hvc_iucv_write()
267 switch (rb->mbuf->type) { in hvc_iucv_write()
[all …]
/linux-4.4.14/fs/nilfs2/
Drecovery.c353 struct nilfs_recovery_block *rb; in nilfs_scan_dsync_log() local
361 rb = kmalloc(sizeof(*rb), GFP_NOFS); in nilfs_scan_dsync_log()
362 if (unlikely(!rb)) { in nilfs_scan_dsync_log()
366 rb->ino = ino; in nilfs_scan_dsync_log()
367 rb->blocknr = blocknr++; in nilfs_scan_dsync_log()
368 rb->vblocknr = le64_to_cpu(binfo->bi_vblocknr); in nilfs_scan_dsync_log()
369 rb->blkoff = le64_to_cpu(binfo->bi_blkoff); in nilfs_scan_dsync_log()
371 list_add_tail(&rb->list, head); in nilfs_scan_dsync_log()
390 struct nilfs_recovery_block *rb; in dispose_recovery_list() local
392 rb = list_first_entry(head, struct nilfs_recovery_block, list); in dispose_recovery_list()
[all …]
/linux-4.4.14/net/sunrpc/xprtrdma/
Dxprt_rdma.h128 rdmab_addr(struct rpcrdma_regbuf *rb) in rdmab_addr() argument
130 return rb->rg_iov.addr; in rdmab_addr()
134 rdmab_length(struct rpcrdma_regbuf *rb) in rdmab_length() argument
136 return rb->rg_iov.length; in rdmab_length()
140 rdmab_lkey(struct rpcrdma_regbuf *rb) in rdmab_lkey() argument
142 return rb->rg_iov.lkey; in rdmab_lkey()
146 rdmab_to_msg(struct rpcrdma_regbuf *rb) in rdmab_to_msg() argument
148 return (struct rpcrdma_msg *)rb->rg_base; in rdmab_to_msg()
286 struct rpcrdma_regbuf *rb; in rpcr_to_rdmar() local
288 rb = container_of(buffer, struct rpcrdma_regbuf, rg_base); in rpcr_to_rdmar()
[all …]
Dbackchannel.c39 struct rpcrdma_regbuf *rb; in rpcrdma_bc_setup_rqst() local
50 rb = rpcrdma_alloc_regbuf(ia, size, GFP_KERNEL); in rpcrdma_bc_setup_rqst()
51 if (IS_ERR(rb)) in rpcrdma_bc_setup_rqst()
53 req->rl_rdmabuf = rb; in rpcrdma_bc_setup_rqst()
56 rb = rpcrdma_alloc_regbuf(ia, size, GFP_KERNEL); in rpcrdma_bc_setup_rqst()
57 if (IS_ERR(rb)) in rpcrdma_bc_setup_rqst()
59 rb->rg_owner = req; in rpcrdma_bc_setup_rqst()
60 req->rl_sendbuf = rb; in rpcrdma_bc_setup_rqst()
Dtransport.c493 struct rpcrdma_regbuf *rb; in xprt_rdma_allocate() local
520 rb = rpcrdma_alloc_regbuf(&r_xprt->rx_ia, min_size, flags); in xprt_rdma_allocate()
521 if (IS_ERR(rb)) in xprt_rdma_allocate()
523 req->rl_rdmabuf = rb; in xprt_rdma_allocate()
547 rb = rpcrdma_alloc_regbuf(&r_xprt->rx_ia, size, flags); in xprt_rdma_allocate()
548 if (IS_ERR(rb)) in xprt_rdma_allocate()
550 rb->rg_owner = req; in xprt_rdma_allocate()
554 req->rl_sendbuf = rb; in xprt_rdma_allocate()
571 struct rpcrdma_regbuf *rb; in xprt_rdma_free() local
577 rb = container_of(buffer, struct rpcrdma_regbuf, rg_base[0]); in xprt_rdma_free()
[all …]
Dverbs.c1202 struct rpcrdma_regbuf *rb; in rpcrdma_alloc_regbuf() local
1205 rb = kmalloc(sizeof(*rb) + size, flags); in rpcrdma_alloc_regbuf()
1206 if (rb == NULL) in rpcrdma_alloc_regbuf()
1209 iov = &rb->rg_iov; in rpcrdma_alloc_regbuf()
1211 (void *)rb->rg_base, size, in rpcrdma_alloc_regbuf()
1218 rb->rg_size = size; in rpcrdma_alloc_regbuf()
1219 rb->rg_owner = NULL; in rpcrdma_alloc_regbuf()
1220 return rb; in rpcrdma_alloc_regbuf()
1223 kfree(rb); in rpcrdma_alloc_regbuf()
1234 rpcrdma_free_regbuf(struct rpcrdma_ia *ia, struct rpcrdma_regbuf *rb) in rpcrdma_free_regbuf() argument
[all …]
/linux-4.4.14/arch/powerpc/kvm/
Dbook3s_pr_papr.c90 unsigned long v = 0, pteg, rb; in kvmppc_h_pr_remove() local
108 rb = compute_tlbie_rb(pte[0], pte[1], pte_index); in kvmppc_h_pr_remove()
109 vcpu->arch.mmu.tlbie(vcpu, rb, rb & 1 ? true : false); in kvmppc_h_pr_remove()
150 unsigned long pteg, rb, flags; in kvmppc_h_pr_bulk_remove() local
189 rb = compute_tlbie_rb(pte[0], pte[1], in kvmppc_h_pr_bulk_remove()
191 vcpu->arch.mmu.tlbie(vcpu, rb, rb & 1 ? true : false); in kvmppc_h_pr_bulk_remove()
208 unsigned long rb, pteg, r, v; in kvmppc_h_pr_protect() local
233 rb = compute_tlbie_rb(v, r, pte_index); in kvmppc_h_pr_protect()
234 vcpu->arch.mmu.tlbie(vcpu, rb, rb & 1 ? true : false); in kvmppc_h_pr_protect()
De500_emulate.c54 static int kvmppc_e500_emul_msgclr(struct kvm_vcpu *vcpu, int rb) in kvmppc_e500_emul_msgclr() argument
56 ulong param = vcpu->arch.gpr[rb]; in kvmppc_e500_emul_msgclr()
66 static int kvmppc_e500_emul_msgsnd(struct kvm_vcpu *vcpu, int rb) in kvmppc_e500_emul_msgsnd() argument
68 ulong param = vcpu->arch.gpr[rb]; in kvmppc_e500_emul_msgsnd()
69 int prio = dbell2prio(rb); in kvmppc_e500_emul_msgsnd()
136 int rb = get_rb(inst); in kvmppc_core_emulate_op_e500() local
150 emulated = kvmppc_e500_emul_msgsnd(vcpu, rb); in kvmppc_core_emulate_op_e500()
154 emulated = kvmppc_e500_emul_msgclr(vcpu, rb); in kvmppc_core_emulate_op_e500()
167 ea = kvmppc_get_ea_indexed(vcpu, ra, rb); in kvmppc_core_emulate_op_e500()
173 ea = kvmppc_get_ea_indexed(vcpu, ra, rb); in kvmppc_core_emulate_op_e500()
[all …]
Dbook3s_hv_ras.c54 unsigned long rb = be64_to_cpu(slb->save_area[i].esid); in reload_slb() local
57 rb = (rb & ~0xFFFul) | i; /* insert entry number */ in reload_slb()
58 asm volatile("slbmte %0,%1" : : "r" (rs), "r" (rb)); in reload_slb()
Dbook3s_64_mmu.c378 static void kvmppc_mmu_book3s_64_slbmte(struct kvm_vcpu *vcpu, u64 rs, u64 rb) in kvmppc_mmu_book3s_64_slbmte() argument
385 dprintk("KVM MMU: slbmte(0x%llx, 0x%llx)\n", rs, rb); in kvmppc_mmu_book3s_64_slbmte()
389 esid = GET_ESID(rb); in kvmppc_mmu_book3s_64_slbmte()
390 esid_1t = GET_ESID_1T(rb); in kvmppc_mmu_book3s_64_slbmte()
391 slb_nr = rb & 0xfff; in kvmppc_mmu_book3s_64_slbmte()
402 slbe->valid = (rb & SLB_ESID_V) ? 1 : 0; in kvmppc_mmu_book3s_64_slbmte()
423 slbe->orige = rb & (ESID_MASK | SLB_ESID_V); in kvmppc_mmu_book3s_64_slbmte()
497 u64 rb = 0, rs = 0; in kvmppc_mmu_book3s_64_mtsrin() local
520 rb |= (srnum & 0xf) << 28; in kvmppc_mmu_book3s_64_mtsrin()
522 rb |= 1 << 27; in kvmppc_mmu_book3s_64_mtsrin()
[all …]
Dbook3s_emulate.c97 int rb = get_rb(inst); in kvmppc_core_emulate_op_pr() local
167 srnum = (kvmppc_get_gpr(vcpu, rb) >> 28) & 0xf; in kvmppc_core_emulate_op_pr()
182 (kvmppc_get_gpr(vcpu, rb) >> 28) & 0xf, in kvmppc_core_emulate_op_pr()
189 ulong addr = kvmppc_get_gpr(vcpu, rb); in kvmppc_core_emulate_op_pr()
229 kvmppc_get_gpr(vcpu, rb)); in kvmppc_core_emulate_op_pr()
236 kvmppc_get_gpr(vcpu, rb)); in kvmppc_core_emulate_op_pr()
250 rb_val = kvmppc_get_gpr(vcpu, rb); in kvmppc_core_emulate_op_pr()
261 rb_val = kvmppc_get_gpr(vcpu, rb); in kvmppc_core_emulate_op_pr()
271 ulong rb_val = kvmppc_get_gpr(vcpu, rb); in kvmppc_core_emulate_op_pr()
674 ulong rb = get_rb(inst); in kvmppc_alignment_dar()
[all …]
Dbook3s_hv_rm_mmu.c436 unsigned long v, r, rb; in kvmppc_do_h_remove() local
457 rb = compute_tlbie_rb(v, be64_to_cpu(hpte[1]), pte_index); in kvmppc_do_h_remove()
458 do_tlbies(kvm, &rb, 1, global_invalidates(kvm, flags), true); in kvmppc_do_h_remove()
607 unsigned long v, r, rb, mask, bits; in kvmppc_h_protect() local
651 rb = compute_tlbie_rb(v, r, pte_index); in kvmppc_h_protect()
654 do_tlbies(kvm, &rb, 1, global_invalidates(kvm, flags), in kvmppc_h_protect()
800 unsigned long rb; in kvmppc_invalidate_hpte() local
803 rb = compute_tlbie_rb(be64_to_cpu(hptep[0]), be64_to_cpu(hptep[1]), in kvmppc_invalidate_hpte()
805 do_tlbies(kvm, &rb, 1, 1, true); in kvmppc_invalidate_hpte()
812 unsigned long rb; in kvmppc_clear_ref_hpte() local
[all …]
/linux-4.4.14/Documentation/
Drbtree.txt246 Interval tree is an example of augmented rb tree. Reference -
272 node = rb_entry(root->rb_node, struct interval_tree_node, rb);
275 if (node->rb.rb_left) {
277 rb_entry(node->rb.rb_left,
278 struct interval_tree_node, rb);
295 if (node->rb.rb_right) {
296 node = rb_entry(node->rb.rb_right,
297 struct interval_tree_node, rb);
312 if (node->rb.rb_left) {
313 subtree_last = rb_entry(node->rb.rb_left,
[all …]
Dkprobes.txt649 r = unoptimized kretprobe, rb = boosted kretprobe, ro = optimized kretprobe.
652 k = 0.80 usec; b = 0.33; o = 0.05; r = 1.10; rb = 0.61; ro = 0.33
655 k = 0.99 usec; b = 0.43; o = 0.06; r = 1.24; rb = 0.68; ro = 0.30
Djava.txt294 classfile = fopen(argv[1], "rb");
/linux-4.4.14/arch/powerpc/kernel/
Dmce_power.c33 unsigned long rb; in flush_tlb_206() local
38 rb = TLBIEL_INVAL_SET; in flush_tlb_206()
41 rb = TLBIEL_INVAL_SET_LPID; in flush_tlb_206()
50 asm volatile("tlbiel %0" : : "r" (rb)); in flush_tlb_206()
51 rb += 1 << TLBIEL_INVAL_SET_SHIFT; in flush_tlb_206()
107 unsigned long rb = be64_to_cpu(slb->save_area[i].esid); in flush_and_reload_slb() local
110 rb = (rb & ~0xFFFul) | i; in flush_and_reload_slb()
111 asm volatile("slbmte %0,%1" : : "r" (rs), "r" (rb)); in flush_and_reload_slb()
Dbtext.c425 static void draw_byte_32(unsigned char *font, unsigned int *base, int rb) in draw_byte_32() argument
442 base = (unsigned int *) ((char *)base + rb); in draw_byte_32()
446 static inline void draw_byte_16(unsigned char *font, unsigned int *base, int rb) in draw_byte_16() argument
460 base = (unsigned int *) ((char *)base + rb); in draw_byte_16()
464 static inline void draw_byte_8(unsigned char *font, unsigned int *base, int rb) in draw_byte_8() argument
476 base = (unsigned int *) ((char *)base + rb); in draw_byte_8()
484 int rb = dispDeviceRowBytes; in draw_byte() local
490 draw_byte_32(font, (unsigned int *)base, rb); in draw_byte()
494 draw_byte_16(font, (unsigned int *)base, rb); in draw_byte()
497 draw_byte_8(font, (unsigned int *)base, rb); in draw_byte()
Dtime.c1082 u64 ra, rb, rc; in div128_by_32() local
1092 rb = ((u64) do_div(ra, divisor) << 32) + c; in div128_by_32()
1095 rc = ((u64) do_div(rb, divisor) << 32) + d; in div128_by_32()
1096 y = rb; in div128_by_32()
Dkvm.c376 static void kvm_patch_ins_mtsrin(u32 *inst, u32 rt, u32 rb) in kvm_patch_ins_mtsrin() argument
401 p[kvm_emulate_mtsrin_reg1_offs] |= (rb << 10); in kvm_patch_ins_mtsrin()
Dtraps.c352 unsigned int rb; in check_io_access() local
355 rb = (*nip >> 11) & 0x1f; in check_io_access()
358 regs->gpr[rb] - _IO_BASE, nip); in check_io_access()
/linux-4.4.14/drivers/i2c/
Di2c-stub.c101 struct smbus_block_data *b, *rb = NULL; in stub_find_block() local
105 rb = b; in stub_find_block()
109 if (rb == NULL && create) { in stub_find_block()
110 rb = devm_kzalloc(dev, sizeof(*rb), GFP_KERNEL); in stub_find_block()
111 if (rb == NULL) in stub_find_block()
112 return rb; in stub_find_block()
113 rb->command = command; in stub_find_block()
114 list_add(&rb->node, &chip->smbus_blocks); in stub_find_block()
116 return rb; in stub_find_block()
/linux-4.4.14/drivers/gpu/drm/msm/adreno/
Dadreno_gpu.c60 ret = msm_gem_get_iova(gpu->rb->bo, gpu->id, &gpu->rb_iova); in adreno_hw_init()
70 AXXX_CP_RB_CNTL_BUFSZ(ilog2(gpu->rb->size / 8)) | in adreno_hw_init()
107 gpu->rb->cur = gpu->rb->start; in adreno_recover()
127 struct msm_ringbuffer *ring = gpu->rb; in adreno_submit()
207 uint32_t wptr = get_wptr(gpu->rb); in adreno_flush()
218 uint32_t wptr = get_wptr(gpu->rb); in adreno_idle()
242 seq_printf(m, "rb wptr: %d\n", get_wptr(gpu->rb)); in adreno_show()
283 printk("rb wptr: %d\n", get_wptr(gpu->rb)); in adreno_dump_info()
314 uint32_t size = gpu->rb->size / 4; in ring_freewords()
315 uint32_t wptr = get_wptr(gpu->rb); in ring_freewords()
Da4xx_gpu.c112 struct msm_ringbuffer *ring = gpu->rb; in a4xx_me_init()
Da3xx_gpu.c46 struct msm_ringbuffer *ring = gpu->rb; in a3xx_me_init()
/linux-4.4.14/drivers/staging/wilc1000/
Dlinux_wlan_spi.c247 int linux_spi_read(u8 *rb, u32 rlen) in linux_spi_read() argument
265 .rx_buf = rb + (i * TXRX_PHASE_SIZE), in linux_spi_read()
289 .rx_buf = rb + (blk * TXRX_PHASE_SIZE), in linux_spi_read()
321 int linux_spi_read(u8 *rb, u32 rlen) in linux_spi_read() argument
329 .rx_buf = rb, in linux_spi_read()
366 int linux_spi_write_read(u8 *wb, u8 *rb, u32 rlen) in linux_spi_write_read() argument
374 .rx_buf = rb, in linux_spi_write_read()
Dlinux_wlan_spi.h11 int linux_spi_read(u8 *rb, u32 rlen);
12 int linux_spi_write_read(u8 *wb, u8 *rb, u32 rlen);
Dwilc_spi.c273 u8 wb[32], rb[32]; in spi_cmd_complete() local
412 if (!g_spi.spi_trx(wb, rb, len2)) { in spi_cmd_complete()
428 rsp = rb[rix++]; in spi_cmd_complete()
442 rsp = rb[rix++]; in spi_cmd_complete()
462 rsp = rb[rix++]; in spi_cmd_complete()
483 b[0] = rb[rix++]; in spi_cmd_complete()
484 b[1] = rb[rix++]; in spi_cmd_complete()
485 b[2] = rb[rix++]; in spi_cmd_complete()
486 b[3] = rb[rix++]; in spi_cmd_complete()
498 crc[0] = rb[rix++]; in spi_cmd_complete()
[all …]
/linux-4.4.14/fs/ubifs/
Dorphan.c85 o = rb_entry(parent, struct ubifs_orphan, rb); in ubifs_add_orphan()
99 rb_link_node(&orphan->rb, parent, p); in ubifs_add_orphan()
100 rb_insert_color(&orphan->rb, &c->orph_tree); in ubifs_add_orphan()
123 o = rb_entry(p, struct ubifs_orphan, rb); in ubifs_delete_orphan()
451 rb_erase(&orphan->rb, &c->orph_tree); in erase_deleted()
525 o = rb_entry(parent, struct ubifs_orphan, rb); in insert_dead_orphan()
537 rb_link_node(&orphan->rb, parent, p); in insert_dead_orphan()
538 rb_insert_color(&orphan->rb, &c->orph_tree); in insert_dead_orphan()
734 struct rb_node rb; member
755 o = rb_entry(p, struct ubifs_orphan, rb); in dbg_find_orphan()
[all …]
Dlog.c50 bud = rb_entry(p, struct ubifs_bud, rb); in ubifs_search_bud()
83 bud = rb_entry(p, struct ubifs_bud, rb); in ubifs_get_wbuf()
134 b = rb_entry(parent, struct ubifs_bud, rb); in ubifs_add_bud()
142 rb_link_node(&bud->rb, parent, p); in ubifs_add_bud()
143 rb_insert_color(&bud->rb, &c->buds); in ubifs_add_bud()
314 bud = rb_entry(p1, struct ubifs_bud, rb); in remove_buds()
540 struct rb_node rb; member
559 dr = rb_entry(parent, struct done_ref, rb); in done_already()
574 rb_link_node(&dr->rb, parent, p); in done_already()
575 rb_insert_color(&dr->rb, done_tree); in done_already()
[all …]
Ddebug.c580 struct rb_node *rb; in ubifs_dump_budg() local
622 for (rb = rb_first(&c->buds); rb; rb = rb_next(rb)) { in ubifs_dump_budg()
623 bud = rb_entry(rb, struct ubifs_bud, rb); in ubifs_dump_budg()
648 struct rb_node *rb; in ubifs_dump_lprop() local
710 for (rb = rb_first((struct rb_root *)&c->buds); rb; rb = rb_next(rb)) { in ubifs_dump_lprop()
711 bud = rb_entry(rb, struct ubifs_bud, rb); in ubifs_dump_lprop()
1760 struct rb_node rb; member
1807 fscki = rb_entry(parent, struct fsck_inode, rb); in add_inode()
1863 rb_link_node(&fscki->rb, parent, p); in add_inode()
1864 rb_insert_color(&fscki->rb, &fsckd->inodes); in add_inode()
[all …]
Drecovery.c1242 struct rb_node rb; member
1266 e = rb_entry(parent, struct size_entry, rb); in add_ino()
1282 rb_link_node(&e->rb, parent, p); in add_ino()
1283 rb_insert_color(&e->rb, &c->size_tree); in add_ino()
1299 e = rb_entry(p, struct size_entry, rb); in find_ino()
1321 rb_erase(&e->rb, &c->size_tree); in remove_ino()
1333 rbtree_postorder_for_each_entry_safe(e, n, &c->size_tree, rb) { in ubifs_destroy_size_tree()
1482 e = rb_entry(this, struct size_entry, rb); in ubifs_recover_size()
1540 rb_erase(&e->rb, &c->size_tree); in ubifs_recover_size()
Dreplay.c279 struct replay_entry *ra, *rb; in replay_entries_cmp() local
286 rb = list_entry(b, struct replay_entry, list); in replay_entries_cmp()
287 ubifs_assert(ra->sqnum != rb->sqnum); in replay_entries_cmp()
288 if (ra->sqnum > rb->sqnum) in replay_entries_cmp()
Dubifs.h283 struct rb_node rb; member
723 struct rb_node rb; member
923 struct rb_node rb; member
Dtnc.c91 o = rb_entry(parent, struct ubifs_old_idx, rb); in insert_old_idx()
106 rb_link_node(&old_idx->rb, parent, p); in insert_old_idx()
107 rb_insert_color(&old_idx->rb, &c->old_idx); in insert_old_idx()
183 rbtree_postorder_for_each_entry_safe(old_idx, n, &c->old_idx, rb) in destroy_old_idx()
Dtnc_commit.c175 o = rb_entry(p, struct ubifs_old_idx, rb); in find_old_idx()
Dsuper.c879 rbtree_postorder_for_each_entry_safe(bud, n, &c->buds, rb) in free_buds()
/linux-4.4.14/net/packet/
Daf_packet.c195 struct packet_ring_buffer *rb,
492 struct packet_ring_buffer *rb, in packet_lookup_frame() argument
499 pg_vec_pos = position / rb->frames_per_block; in packet_lookup_frame()
500 frame_offset = position % rb->frames_per_block; in packet_lookup_frame()
502 h.raw = rb->pg_vec[pg_vec_pos].buffer + in packet_lookup_frame()
503 (frame_offset * rb->frame_size); in packet_lookup_frame()
512 struct packet_ring_buffer *rb, in packet_current_frame() argument
515 return packet_lookup_frame(po, rb, rb->head, status); in packet_current_frame()
605 struct packet_ring_buffer *rb, in init_prb_bdqc() argument
609 struct tpacket_kbdq_core *p1 = GET_PBDQC_FROM_RB(rb); in init_prb_bdqc()
[all …]
/linux-4.4.14/fs/btrfs/
Dextent_map.c228 struct rb_node *rb; in try_merge_map() local
231 rb = rb_prev(&em->rb_node); in try_merge_map()
232 if (rb) in try_merge_map()
233 merge = rb_entry(rb, struct extent_map, rb_node); in try_merge_map()
234 if (rb && mergable_maps(merge, em)) { in try_merge_map()
250 rb = rb_next(&em->rb_node); in try_merge_map()
251 if (rb) in try_merge_map()
252 merge = rb_entry(rb, struct extent_map, rb_node); in try_merge_map()
253 if (rb && mergable_maps(em, merge)) { in try_merge_map()
Draid56.c1668 struct btrfs_raid_bio *rb = container_of(b, struct btrfs_raid_bio, in plug_cmp() local
1671 u64 b_sector = rb->bio_list.head->bi_iter.bi_sector; in plug_cmp()
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/engine/dma/
Dbase.c37 container_of(node, typeof(*dmaobj), rb); in nvkm_dma_search()
70 struct nvkm_dmaobj *obj = container_of(*ptr, typeof(*obj), rb); in nvkm_dma_oclass_new()
81 rb_link_node(&dmaobj->rb, parent, ptr); in nvkm_dma_oclass_new()
82 rb_insert_color(&dmaobj->rb, &client->dmaroot); in nvkm_dma_oclass_new()
Duser.c46 if (!RB_EMPTY_NODE(&dmaobj->rb)) in nvkm_dmaobj_dtor()
47 rb_erase(&dmaobj->rb, &dmaobj->object.client->dmaroot); in nvkm_dmaobj_dtor()
77 RB_CLEAR_NODE(&dmaobj->rb); in nvkm_dmaobj_ctor()
/linux-4.4.14/drivers/staging/lustre/lnet/lnet/
Drouter.c1220 lnet_destroy_rtrbuf(lnet_rtrbuf_t *rb, int npages) in lnet_destroy_rtrbuf() argument
1225 __free_page(rb->rb_kiov[npages].kiov_page); in lnet_destroy_rtrbuf()
1227 LIBCFS_FREE(rb, sz); in lnet_destroy_rtrbuf()
1236 lnet_rtrbuf_t *rb; in lnet_new_rtrbuf() local
1239 LIBCFS_CPT_ALLOC(rb, lnet_cpt_table(), cpt, sz); in lnet_new_rtrbuf()
1240 if (rb == NULL) in lnet_new_rtrbuf()
1243 rb->rb_pool = rbp; in lnet_new_rtrbuf()
1251 __free_page(rb->rb_kiov[i].kiov_page); in lnet_new_rtrbuf()
1253 LIBCFS_FREE(rb, sz); in lnet_new_rtrbuf()
1257 rb->rb_kiov[i].kiov_len = PAGE_CACHE_SIZE; in lnet_new_rtrbuf()
[all …]
Dlib-move.c896 lnet_rtrbuf_t *rb; in lnet_post_routed_recv_locked() local
947 rb = list_entry(rbp->rbp_bufs.next, lnet_rtrbuf_t, rb_list); in lnet_post_routed_recv_locked()
948 list_del(&rb->rb_list); in lnet_post_routed_recv_locked()
951 msg->msg_kiov = &rb->rb_kiov[0]; in lnet_post_routed_recv_locked()
1030 lnet_rtrbuf_t *rb; in lnet_return_rx_credits_locked() local
1038 rb = list_entry(msg->msg_kiov, lnet_rtrbuf_t, rb_kiov[0]); in lnet_return_rx_credits_locked()
1039 rbp = rb->rb_pool; in lnet_return_rx_credits_locked()
1050 list_add(&rb->rb_list, &rbp->rbp_bufs); in lnet_return_rx_credits_locked()
/linux-4.4.14/tools/testing/selftests/powerpc/primitives/asm/
Dasm-compat.h76 #define PPC405_ERR77(ra,rb) stringify_in_c(dcbt ra, rb;) argument
79 #define PPC405_ERR77(ra,rb) argument
/linux-4.4.14/Documentation/devicetree/bindings/mtd/
Dsunxi-nand.txt18 - allwinner,rb : shall contain the native Ready/Busy ids.
20 - rb-gpios : shall contain the gpios used as R/B pins.
42 allwinner,rb = <0>;
/linux-4.4.14/crypto/
Dtgr192.c401 static void tgr192_round(u64 * ra, u64 * rb, u64 * rc, u64 x, int mul) in tgr192_round() argument
404 u64 b = *rb; in tgr192_round()
415 *rb = b; in tgr192_round()
420 static void tgr192_pass(u64 * ra, u64 * rb, u64 * rc, u64 * x, int mul) in tgr192_pass() argument
423 u64 b = *rb; in tgr192_pass()
436 *rb = b; in tgr192_pass()
/linux-4.4.14/sound/hda/
Dhdac_controller.c45 bus->corb.addr = bus->rb.addr; in snd_hdac_bus_init_cmd_io()
46 bus->corb.buf = (__le32 *)bus->rb.area; in snd_hdac_bus_init_cmd_io()
64 bus->rirb.addr = bus->rb.addr + 2048; in snd_hdac_bus_init_cmd_io()
65 bus->rirb.buf = (__le32 *)(bus->rb.area + 2048); in snd_hdac_bus_init_cmd_io()
485 PAGE_SIZE, &bus->rb); in snd_hdac_bus_alloc_stream_pages()
502 if (bus->rb.area) in snd_hdac_bus_free_stream_pages()
503 bus->io_ops->dma_free_pages(bus, &bus->rb); in snd_hdac_bus_free_stream_pages()
/linux-4.4.14/fs/kernfs/
Ddir.c25 #define rb_to_kn(X) rb_entry((X), struct kernfs_node, rb)
270 node = &pos->rb.rb_left; in kernfs_link_sibling()
272 node = &pos->rb.rb_right; in kernfs_link_sibling()
278 rb_link_node(&kn->rb, parent, node); in kernfs_link_sibling()
279 rb_insert_color(&kn->rb, &kn->parent->dir.children); in kernfs_link_sibling()
301 if (RB_EMPTY_NODE(&kn->rb)) in kernfs_unlink_sibling()
307 rb_erase(&kn->rb, &kn->parent->dir.children); in kernfs_unlink_sibling()
308 RB_CLEAR_NODE(&kn->rb); in kernfs_unlink_sibling()
558 RB_CLEAR_NODE(&kn->rb); in __kernfs_new_node()
1008 rbn = rb_next(&pos->rb); in kernfs_next_descendant_post()
[all …]
/linux-4.4.14/drivers/spi/
Dspi-mpc52xx-psc.c137 unsigned rb = 0; /* number of bytes receieved */ in mpc52xx_psc_spi_transfer_rxtx() local
151 while (rb < t->len) { in mpc52xx_psc_spi_transfer_rxtx()
152 if (t->len - rb > MPC52xx_PSC_BUFSIZE) { in mpc52xx_psc_spi_transfer_rxtx()
157 rfalarm = MPC52xx_PSC_BUFSIZE - (t->len - rb); in mpc52xx_psc_spi_transfer_rxtx()
179 if (t->len - rb == 1) { in mpc52xx_psc_spi_transfer_rxtx()
192 for (; recv_at_once; rb++, recv_at_once--) in mpc52xx_psc_spi_transfer_rxtx()
193 rx_buf[rb] = in_8(&psc->mpc52xx_psc_buffer_8); in mpc52xx_psc_spi_transfer_rxtx()
195 for (; recv_at_once; rb++, recv_at_once--) in mpc52xx_psc_spi_transfer_rxtx()
/linux-4.4.14/drivers/i2c/busses/
Di2c-cpm.c194 u_char *rb; in cpm_i2c_parse_message() local
205 rb = cpm->rxbuf[rx]; in cpm_i2c_parse_message()
208 rb = (u_char *) (((ulong) rb + 1) & ~1); in cpm_i2c_parse_message()
254 u_char *rb; in cpm_i2c_check_message() local
261 rb = cpm->rxbuf[rx]; in cpm_i2c_check_message()
264 rb = (u_char *) (((uint) rb + 1) & ~1); in cpm_i2c_check_message()
284 memcpy(pmsg->buf, rb, pmsg->len); in cpm_i2c_check_message()
/linux-4.4.14/drivers/media/dvb-frontends/
Ddib3000mb.c56 u8 rb[2]; in dib3000_read_reg() local
59 { .addr = state->config.demod_address, .flags = I2C_M_RD, .buf = rb, .len = 2 }, in dib3000_read_reg()
66 (rb[0] << 8) | rb[1],(rb[0] << 8) | rb[1]); in dib3000_read_reg()
68 return (rb[0] << 8) | rb[1]; in dib3000_read_reg()
Ddib3000mc.c53 u8 rb[2]; in dib3000mc_read_word() local
56 { .addr = state->i2c_addr >> 1, .flags = I2C_M_RD, .buf = rb, .len = 2 }, in dib3000mc_read_word()
62 return (rb[0] << 8) | rb[1]; in dib3000mc_read_word()
/linux-4.4.14/drivers/mtd/nand/
Dsunxi_nand.c207 struct sunxi_nand_rb rb; member
356 struct sunxi_nand_rb *rb; in sunxi_nfc_dev_ready() local
363 rb = &sunxi_nand->sels[sunxi_nand->selected].rb; in sunxi_nfc_dev_ready()
365 switch (rb->type) { in sunxi_nfc_dev_ready()
368 NFC_RB_STATE(rb->info.nativeid)); in sunxi_nfc_dev_ready()
374 NFC_RB_STATE(rb->info.nativeid)); in sunxi_nfc_dev_ready()
377 ret = gpio_get_value(rb->info.gpio); in sunxi_nfc_dev_ready()
411 if (sel->rb.type == RB_NONE) { in sunxi_nfc_select_chip()
415 if (sel->rb.type == RB_NATIVE) in sunxi_nfc_select_chip()
416 ctl |= NFC_RB_SEL(sel->rb.info.nativeid); in sunxi_nfc_select_chip()
[all …]
/linux-4.4.14/drivers/iio/
Dindustrialio-buffer.c109 struct iio_buffer *rb = indio_dev->buffer; in iio_buffer_read_first_n_outer() local
117 if (!rb || !rb->access->read_first_n) in iio_buffer_read_first_n_outer()
120 datum_size = rb->bytes_per_datum; in iio_buffer_read_first_n_outer()
132 to_wait = min_t(size_t, n / datum_size, rb->watermark); in iio_buffer_read_first_n_outer()
135 ret = wait_event_interruptible(rb->pollq, in iio_buffer_read_first_n_outer()
136 iio_buffer_ready(indio_dev, rb, to_wait, n / datum_size)); in iio_buffer_read_first_n_outer()
143 ret = rb->access->read_first_n(rb, n, buf); in iio_buffer_read_first_n_outer()
164 struct iio_buffer *rb = indio_dev->buffer; in iio_buffer_poll() local
169 poll_wait(filp, &rb->pollq, wait); in iio_buffer_poll()
170 if (iio_buffer_ready(indio_dev, rb, rb->watermark, 0)) in iio_buffer_poll()
/linux-4.4.14/arch/mips/alchemy/common/
Dusb.c391 static inline int au1000_usb_init(unsigned long rb, int reg) in au1000_usb_init() argument
393 void __iomem *base = (void __iomem *)KSEG1ADDR(rb + reg); in au1000_usb_init()
424 static inline void __au1xx0_ohci_control(int enable, unsigned long rb, int creg) in __au1xx0_ohci_control() argument
426 void __iomem *base = (void __iomem *)KSEG1ADDR(rb); in __au1xx0_ohci_control()
457 static inline int au1000_usb_control(int block, int enable, unsigned long rb, in au1000_usb_control() argument
464 __au1xx0_ohci_control(enable, rb, creg); in au1000_usb_control()
/linux-4.4.14/arch/powerpc/sysdev/
Dmpic.c174 struct mpic_reg_bank *rb, in _mpic_read() argument
180 return dcr_read(rb->dhost, reg); in _mpic_read()
183 return in_be32(rb->base + (reg >> 2)); in _mpic_read()
186 return in_le32(rb->base + (reg >> 2)); in _mpic_read()
191 struct mpic_reg_bank *rb, in _mpic_write() argument
197 dcr_write(rb->dhost, reg, value); in _mpic_write()
201 out_be32(rb->base + (reg >> 2), value); in _mpic_write()
205 out_le32(rb->base + (reg >> 2), value); in _mpic_write()
315 struct mpic_reg_bank *rb, unsigned int offset, in _mpic_map_mmio() argument
318 rb->base = ioremap(phys_addr + offset, size); in _mpic_map_mmio()
[all …]
Dfsl_pci.c902 unsigned int rd, ra, rb, d; in mcheck_handle_load() local
906 rb = get_rb(inst); in mcheck_handle_load()
919 regs->gpr[ra] += regs->gpr[rb]; in mcheck_handle_load()
928 regs->gpr[ra] += regs->gpr[rb]; in mcheck_handle_load()
938 regs->gpr[ra] += regs->gpr[rb]; in mcheck_handle_load()
947 regs->gpr[ra] += regs->gpr[rb]; in mcheck_handle_load()
/linux-4.4.14/drivers/usb/class/
Dcdc-acm.c413 struct acm_rb *rb = urb->context; in acm_read_bulk_callback() local
414 struct acm *acm = rb->instance; in acm_read_bulk_callback()
419 rb->index, urb->actual_length); in acm_read_bulk_callback()
422 set_bit(rb->index, &acm->read_urbs_free); in acm_read_bulk_callback()
428 set_bit(rb->index, &acm->read_urbs_free); in acm_read_bulk_callback()
443 set_bit(rb->index, &acm->read_urbs_free); in acm_read_bulk_callback()
450 acm_submit_read_urb(acm, rb->index, GFP_ATOMIC); in acm_read_bulk_callback()
1361 struct acm_rb *rb = &(acm->read_buffers[i]); in acm_probe() local
1364 rb->base = usb_alloc_coherent(acm->dev, readsize, GFP_KERNEL, in acm_probe()
1365 &rb->dma); in acm_probe()
[all …]
/linux-4.4.14/drivers/mtd/
Dmtdswap.c83 struct rb_node rb; member
93 rb)->erase_count)
95 rb)->erase_count)
214 rb_erase(&eb->rb, eb->root); in mtdswap_eb_detach()
226 cur = rb_entry(parent, struct swap_eb, rb); in __mtdswap_rb_add()
233 rb_link_node(&eb->rb, parent, p); in __mtdswap_rb_add()
234 rb_insert_color(&eb->rb, root); in __mtdswap_rb_add()
453 median = rb_entry(medrb, struct swap_eb, rb)->erase_count; in mtdswap_check_counts()
466 rb_erase(&eb->rb, &hist_root); in mtdswap_check_counts()
625 eb = rb_entry(rb_first(clean_root), struct swap_eb, rb); in mtdswap_map_free_block()
[all …]
/linux-4.4.14/fs/xfs/
Dxfs_rtalloc.h127 # define xfs_rtallocate_extent(t,b,min,max,l,a,f,p,rb) (ENOSYS) argument
129 # define xfs_rtpick_extent(m,t,l,rb) (ENOSYS) argument
/linux-4.4.14/drivers/media/platform/exynos4-is/
Dfimc-isp-video.c536 struct v4l2_requestbuffers *rb) in isp_video_reqbufs() argument
541 ret = vb2_ioctl_reqbufs(file, priv, rb); in isp_video_reqbufs()
545 if (rb->count && rb->count < FIMC_ISP_REQ_BUFS_MIN) { in isp_video_reqbufs()
546 rb->count = 0; in isp_video_reqbufs()
547 vb2_ioctl_reqbufs(file, priv, rb); in isp_video_reqbufs()
551 isp->video_capture.reqbufs_count = rb->count; in isp_video_reqbufs()
/linux-4.4.14/drivers/gpu/drm/msm/
Dmsm_gpu.c670 gpu->rb = msm_ringbuffer_new(gpu, ringsz); in msm_gpu_init()
672 if (IS_ERR(gpu->rb)) { in msm_gpu_init()
673 ret = PTR_ERR(gpu->rb); in msm_gpu_init()
674 gpu->rb = NULL; in msm_gpu_init()
695 if (gpu->rb) { in msm_gpu_cleanup()
697 msm_gem_put_iova(gpu->rb->bo, gpu->id); in msm_gpu_cleanup()
698 msm_ringbuffer_destroy(gpu->rb); in msm_gpu_cleanup()
Dmsm_gpu.h80 struct msm_ringbuffer *rb; member
/linux-4.4.14/drivers/media/pci/ngene/
Dngene-core.c777 static void free_ringbuffer(struct ngene *dev, struct SRingBufferDescriptor *rb) in free_ringbuffer() argument
779 struct SBufferHeader *Cur = rb->Head; in free_ringbuffer()
785 for (j = 0; j < rb->NumBuffers; j++, Cur = Cur->Next) { in free_ringbuffer()
788 rb->Buffer1Length, in free_ringbuffer()
794 rb->Buffer2Length, in free_ringbuffer()
799 if (rb->SCListMem) in free_ringbuffer()
800 pci_free_consistent(dev->pci_dev, rb->SCListMemSize, in free_ringbuffer()
801 rb->SCListMem, rb->PASCListMem); in free_ringbuffer()
803 pci_free_consistent(dev->pci_dev, rb->MemSize, rb->Head, rb->PAHead); in free_ringbuffer()
807 struct SRingBufferDescriptor *rb, in free_idlebuffer() argument
[all …]
/linux-4.4.14/drivers/of/
Dof_reserved_mem.c209 const struct reserved_mem *ra = a, *rb = b; in __rmem_cmp() local
211 if (ra->base < rb->base) in __rmem_cmp()
214 if (ra->base > rb->base) in __rmem_cmp()
/linux-4.4.14/drivers/usb/gadget/function/
Duvc_queue.c154 struct v4l2_requestbuffers *rb) in uvcg_alloc_buffers() argument
158 ret = vb2_reqbufs(&queue->queue, rb); in uvcg_alloc_buffers()
160 return ret ? ret : rb->count; in uvcg_alloc_buffers()
Duvc_queue.h65 struct v4l2_requestbuffers *rb);
/linux-4.4.14/sound/pci/lola/
Dlola.c367 PAGE_SIZE, &chip->rb); in setup_corb_rirb()
371 chip->corb.addr = chip->rb.addr; in setup_corb_rirb()
372 chip->corb.buf = (u32 *)chip->rb.area; in setup_corb_rirb()
373 chip->rirb.addr = chip->rb.addr + 2048; in setup_corb_rirb()
374 chip->rirb.buf = (u32 *)(chip->rb.area + 2048); in setup_corb_rirb()
556 if (chip->rb.area) in lola_free()
557 snd_dma_free_pages(&chip->rb); in lola_free()
Dlola.h344 struct snd_dma_buffer rb; member
/linux-4.4.14/arch/arm/boot/compressed/
Dhead.S30 .macro loadsp, rb, tmp
32 .macro writeb, ch, rb
36 .macro loadsp, rb, tmp
38 .macro writeb, ch, rb
42 .macro loadsp, rb, tmp
44 .macro writeb, ch, rb
53 .macro writeb, ch, rb
54 senduart \ch, \rb
58 .macro loadsp, rb, tmp
59 mov \rb, #0x80000000 @ physical base address
[all …]
/linux-4.4.14/drivers/crypto/vmx/
Dppc-xlate.pl144 my ($f, $vrt, $ra, $rb, $op) = @_;
145 " .long ".sprintf "0x%X",(31<<26)|($vrt<<21)|($ra<<16)|($rb<<11)|($op*2+1);
/linux-4.4.14/drivers/gpu/drm/nouveau/include/nvkm/engine/
Ddma.h16 struct rb_node rb; member
/linux-4.4.14/drivers/media/usb/gspca/
Dgspca.c1371 struct v4l2_requestbuffers *rb) in vidioc_reqbufs() argument
1376 i = rb->memory; /* (avoid compilation warning) */ in vidioc_reqbufs()
1390 && gspca_dev->memory != rb->memory) { in vidioc_reqbufs()
1424 if (rb->count == 0) /* unrequest */ in vidioc_reqbufs()
1426 ret = frame_alloc(gspca_dev, file, rb->memory, rb->count); in vidioc_reqbufs()
1428 rb->count = gspca_dev->nframes; in vidioc_reqbufs()
1434 PDEBUG(D_STREAM, "reqbufs st:%d c:%d", ret, rb->count); in vidioc_reqbufs()
1823 struct v4l2_requestbuffers rb; in read_alloc() local
1825 memset(&rb, 0, sizeof rb); in read_alloc()
1826 rb.count = gspca_dev->nbufread; in read_alloc()
[all …]
/linux-4.4.14/drivers/infiniband/hw/usnic/
Dusnic_uiom_interval_tree.h40 struct rb_node rb; member
Dusnic_uiom_interval_tree.c267 INTERVAL_TREE_DEFINE(struct usnic_uiom_interval_node, rb,
/linux-4.4.14/drivers/media/usb/uvc/
Duvc_queue.c233 struct v4l2_requestbuffers *rb) in uvc_request_buffers() argument
238 ret = vb2_reqbufs(&queue->queue, rb); in uvc_request_buffers()
241 return ret ? ret : rb->count; in uvc_request_buffers()
Duvcvideo.h627 struct v4l2_requestbuffers *rb);
Duvc_v4l2.c678 struct v4l2_requestbuffers *rb) in uvc_ioctl_reqbufs() argument
689 ret = uvc_request_buffers(&stream->queue, rb); in uvc_ioctl_reqbufs()
/linux-4.4.14/drivers/gpu/drm/
Ddrm_modes.c1231 bool yres_specified = false, cvt = false, rb = false; in drm_mode_parse_command_line_for_connector() local
1252 !yres_specified && !cvt && !rb && was_digit) { in drm_mode_parse_command_line_for_connector()
1261 !rb && was_digit) { in drm_mode_parse_command_line_for_connector()
1285 if (yres_specified || cvt || rb || was_digit) in drm_mode_parse_command_line_for_connector()
1287 rb = true; in drm_mode_parse_command_line_for_connector()
1364 mode->rb = rb; in drm_mode_parse_command_line_for_connector()
1392 cmd->rb, cmd->interlace, in drm_mode_create_from_cmdline_mode()
Ddrm_edid.c571 short rb; member
1523 bool rb) in drm_mode_find_dmt() argument
1535 if (rb != mode_is_rb(ptr)) in drm_mode_find_dmt()
2132 bool rb = drm_monitor_supports_rb(edid); in drm_cvt_modes_for_range() local
2136 newmode = drm_cvt_mode(dev, m->w, m->h, m->r, rb, 0, 0); in drm_cvt_modes_for_range()
2224 est3_modes[m].rb); in drm_est3_modes()
Ddrm_fb_helper.c1646 if (cmdline_mode->rb || cmdline_mode->margins) in drm_pick_cmdline_mode()
Ddrm_crtc.c853 mode->rb ? " reduced blanking" : "", in drm_connector_get_cmdline_mode()
/linux-4.4.14/drivers/video/fbdev/core/
Dmodedb.c700 int yres_specified = 0, cvt = 0, rb = 0, interlace = 0; in fb_find_mode() local
713 if (cvt || rb) in fb_find_mode()
724 if (cvt || rb) in fb_find_mode()
745 rb = 1; in fb_find_mode()
770 (rb) ? " reduced blanking" : "", in fb_find_mode()
784 ret = fb_find_mode_cvt(&cvt_mode, margins, rb); in fb_find_mode()
Dfbcvt.c305 int fb_find_mode_cvt(struct fb_videomode *mode, int margins, int rb) in fb_find_mode_cvt() argument
314 if (rb) in fb_find_mode_cvt()
/linux-4.4.14/drivers/infiniband/core/
Dumem_rbtree.c69 INTERVAL_TREE_DEFINE(struct umem_odp_node, rb, u64, __subtree_last,
/linux-4.4.14/drivers/media/platform/s3c-camif/
Dcamif-capture.c907 struct v4l2_requestbuffers *rb) in s3c_camif_reqbufs() argument
913 vp->id, rb->count, vp->owner, priv); in s3c_camif_reqbufs()
918 if (rb->count) in s3c_camif_reqbufs()
919 rb->count = max_t(u32, CAMIF_REQ_BUFS_MIN, rb->count); in s3c_camif_reqbufs()
923 ret = vb2_reqbufs(&vp->vb_queue, rb); in s3c_camif_reqbufs()
927 if (rb->count && rb->count < CAMIF_REQ_BUFS_MIN) { in s3c_camif_reqbufs()
928 rb->count = 0; in s3c_camif_reqbufs()
929 vb2_reqbufs(&vp->vb_queue, rb); in s3c_camif_reqbufs()
933 vp->reqbufs_count = rb->count; in s3c_camif_reqbufs()
934 if (vp->owner == NULL && rb->count > 0) in s3c_camif_reqbufs()
/linux-4.4.14/drivers/media/usb/stkwebcam/
Dstk-webcam.c1041 void *priv, struct v4l2_requestbuffers *rb) in stk_vidioc_reqbufs() argument
1047 if (rb->memory != V4L2_MEMORY_MMAP) in stk_vidioc_reqbufs()
1053 if (rb->count == 0) { in stk_vidioc_reqbufs()
1062 if (rb->count < 3) in stk_vidioc_reqbufs()
1063 rb->count = 3; in stk_vidioc_reqbufs()
1065 else if (rb->count > 5) in stk_vidioc_reqbufs()
1066 rb->count = 5; in stk_vidioc_reqbufs()
1068 stk_allocate_buffers(dev, rb->count); in stk_vidioc_reqbufs()
1069 rb->count = dev->n_sbufs; in stk_vidioc_reqbufs()
/linux-4.4.14/include/rdma/
Dib_umem_odp.h42 struct rb_node rb; member
/linux-4.4.14/drivers/video/fbdev/omap2/dss/
Dmanager-sysfs.c382 info.cpr_coefs.rb, in manager_cpr_coef_show()
403 &coefs.rr, &coefs.rg, &coefs.rb, in manager_cpr_coef_store()
408 arr = (s16[]){ coefs.rr, coefs.rg, coefs.rb, in manager_cpr_coef_store()
Ddispc.c1093 FLD_VAL(coefs->rb, 9, 0); in dispc_mgr_set_cpr_coef()
/linux-4.4.14/tools/perf/util/
Dkvm-stat.h25 struct rb_node rb; member
/linux-4.4.14/arch/arm/common/
Ddmabounce.c170 struct safe_buffer *b, *rb = NULL; in find_safe_buffer() local
178 rb = b; in find_safe_buffer()
183 return rb; in find_safe_buffer()
/linux-4.4.14/arch/x86/crypto/
Daes-x86_64-asm_64.S83 #define round(TAB,OFFSET,r1,r2,r3,r4,r5,r6,r7,r8,ra,rb,rc,rd) \ argument
94 xorl OFFSET+4(r8),rb ## E; \
/linux-4.4.14/tools/lib/
Drbtree.c46 static inline void rb_set_black(struct rb_node *rb) in rb_set_black() argument
48 rb->__rb_parent_color |= RB_BLACK; in rb_set_black()
/linux-4.4.14/drivers/misc/
Dsram.c173 struct sram_reserve *rb = list_entry(b, struct sram_reserve, list); in sram_reserve_cmp() local
175 return ra->start - rb->start; in sram_reserve_cmp()
/linux-4.4.14/include/media/
Dv4l2-mem2mem.h263 struct v4l2_requestbuffers *rb);
/linux-4.4.14/drivers/net/ethernet/ti/
Dnetcp_ethss.c162 #define GBE_SET_REG_OFS(p, rb, rn) p->rb##_ofs.rn = \ argument
163 offsetof(struct gbe##_##rb, rn)
164 #define GBENU_SET_REG_OFS(p, rb, rn) p->rb##_ofs.rn = \ argument
165 offsetof(struct gbenu##_##rb, rn)
166 #define XGBE_SET_REG_OFS(p, rb, rn) p->rb##_ofs.rn = \ argument
167 offsetof(struct xgbe##_##rb, rn)
168 #define GBE_REG_ADDR(p, rb, rn) (p->rb + p->rb##_ofs.rn) argument
/linux-4.4.14/include/drm/
Ddrm_modes.h160 bool rb; member
Ddrm_crtc.h1471 bool rb);
/linux-4.4.14/block/
Dcfq-iosched.c90 struct rb_root rb; member
96 #define CFQ_RB_ROOT (struct cfq_rb_root) { .rb = RB_ROOT, \
1172 root->left = rb_first(&root->rb); in cfq_rb_first()
1183 root->left = rb_first(&root->rb); in cfq_rb_first_group()
1201 rb_erase_init(n, &root->rb); in cfq_rb_erase()
1251 struct rb_node **node = &st->rb.rb_node; in __cfq_group_service_tree_add()
1273 rb_insert_color(&cfqg->rb_node, &st->rb); in __cfq_group_service_tree_add()
1366 n = rb_last(&st->rb); in cfq_group_notify_queue_add()
2212 parent = rb_last(&st->rb); in cfq_service_tree_add()
2249 p = &st->rb.rb_node; in cfq_service_tree_add()
[all …]
/linux-4.4.14/drivers/media/platform/coda/
Dcoda.h178 int (*reqbufs)(struct coda_ctx *ctx, struct v4l2_requestbuffers *rb);
Dcoda-bit.c778 struct v4l2_requestbuffers *rb) in coda_encoder_reqbufs() argument
783 if (rb->type != V4L2_BUF_TYPE_VIDEO_OUTPUT) in coda_encoder_reqbufs()
786 if (rb->count) { in coda_encoder_reqbufs()
1484 struct v4l2_requestbuffers *rb) in coda_decoder_reqbufs() argument
1489 if (rb->type != V4L2_BUF_TYPE_VIDEO_OUTPUT) in coda_decoder_reqbufs()
1492 if (rb->count) { in coda_decoder_reqbufs()
Dcoda-common.c659 struct v4l2_requestbuffers *rb) in coda_reqbufs() argument
664 ret = v4l2_m2m_reqbufs(file, ctx->fh.m2m_ctx, rb); in coda_reqbufs()
672 if (rb->type == V4L2_BUF_TYPE_VIDEO_OUTPUT && ctx->ops->reqbufs) in coda_reqbufs()
673 return ctx->ops->reqbufs(ctx, rb); in coda_reqbufs()
/linux-4.4.14/arch/sh/kernel/
Ddisassemble.c304 int rb = 0; in print_sh_insn() local
372 rb = nibs[n] & 0x07; in print_sh_insn()
424 printk("r%d_bank", rb); in print_sh_insn()
/linux-4.4.14/drivers/lightnvm/
Drrpc.c392 struct rrpc_block *rb) in rblock_max_invalid()
394 if (ra->nr_invalid_pages == rb->nr_invalid_pages) in rblock_max_invalid()
397 return (ra->nr_invalid_pages < rb->nr_invalid_pages) ? rb : ra; in rblock_max_invalid()
/linux-4.4.14/drivers/gpu/drm/radeon/
Dradeon_mn.c79 it.rb) { in radeon_mn_destroy()
Dradeon_vm.c1237 rbtree_postorder_for_each_entry_safe(bo_va, tmp, &vm->va, it.rb) { in radeon_vm_fini()
/linux-4.4.14/drivers/gpu/drm/amd/amdgpu/
Damdgpu_mn.c79 it.rb) { in amdgpu_mn_destroy()
Damdgpu_vm.c1317 rbtree_postorder_for_each_entry_safe(mapping, tmp, &vm->va, it.rb) { in amdgpu_vm_fini()
/linux-4.4.14/drivers/media/v4l2-core/
Dv4l2-mem2mem.c791 struct v4l2_requestbuffers *rb) in v4l2_m2m_ioctl_reqbufs() argument
795 return v4l2_m2m_reqbufs(file, fh->m2m_ctx, rb); in v4l2_m2m_ioctl_reqbufs()
/linux-4.4.14/drivers/media/pci/ttpci/
Dav7110_av.c443 #define FREE_COND_TS (dvb_ringbuffer_free(rb) >= 4096)
448 struct dvb_ringbuffer *rb; in ts_play() local
454 rb = (type) ? &av7110->avout : &av7110->aout; in ts_play()
467 if (wait_event_interruptible(rb->queue, FREE_COND_TS)) in ts_play()
/linux-4.4.14/drivers/media/usb/tm6000/
Dtm6000.h370 struct v4l2_requestbuffers *rb);
/linux-4.4.14/drivers/net/wireless/iwlwifi/pcie/
Dtrans.c2313 struct iwl_fw_error_dump_rb *rb; in iwl_trans_pcie_dump_rbs() local
2318 rb_len += sizeof(**data) + sizeof(*rb) + max_len; in iwl_trans_pcie_dump_rbs()
2321 (*data)->len = cpu_to_le32(sizeof(*rb) + max_len); in iwl_trans_pcie_dump_rbs()
2322 rb = (void *)(*data)->data; in iwl_trans_pcie_dump_rbs()
2323 rb->index = cpu_to_le32(i); in iwl_trans_pcie_dump_rbs()
2324 memcpy(rb->data, page_address(rxb->page), max_len); in iwl_trans_pcie_dump_rbs()
/linux-4.4.14/Documentation/devicetree/bindings/pinctrl/
Dmarvell,armada-xp-pinctrl.txt76 mpp48 48 gpio, dev(clkout), dev(burst/last), nand(rb)
/linux-4.4.14/net/sched/
Dsch_htb.c350 static void htb_safe_rb_erase(struct rb_node *rb, struct rb_root *root) in htb_safe_rb_erase() argument
352 if (RB_EMPTY_NODE(rb)) { in htb_safe_rb_erase()
355 rb_erase(rb, root); in htb_safe_rb_erase()
356 RB_CLEAR_NODE(rb); in htb_safe_rb_erase()
Dsch_netem.c153 static struct sk_buff *netem_rb_to_skb(struct rb_node *rb) in netem_rb_to_skb() argument
155 return container_of(rb, struct sk_buff, rbnode); in netem_rb_to_skb()
/linux-4.4.14/drivers/scsi/
Dinitio.c369 u8 instr, rb; in initio_se2_rd() local
382 rb = inb(base + TUL_NVRAM); in initio_se2_rd()
383 rb &= SE2DI; in initio_se2_rd()
384 val += (rb << i); in initio_se2_rd()
404 u8 rb; in initio_se2_wr() local
433 if ((rb = inb(base + TUL_NVRAM)) & SE2DI) in initio_se2_wr()
/linux-4.4.14/drivers/media/platform/
Dtimblogiw.c285 struct v4l2_requestbuffers *rb) in timblogiw_reqbufs() argument
292 return videobuf_reqbufs(&fh->vb_vidq, rb); in timblogiw_reqbufs()
Dvia-camera.c1003 struct v4l2_requestbuffers *rb) in viacam_reqbufs() argument
1007 return videobuf_reqbufs(&cam->vb_queue, rb); in viacam_reqbufs()
/linux-4.4.14/arch/alpha/kernel/
Dsmc37c669.c945 #define rb( _x_ ) inb( (unsigned int)((unsigned long)_x_) ) macro
2013 data = rb( &SMC37c669->data_port ); in SMC37c669_read_config()
/linux-4.4.14/drivers/isdn/i4l/
Disdn_tty.c2676 char rb[100]; in isdn_tty_cmd_ATand() local
2678 #define MAXRB (sizeof(rb) - 1) in isdn_tty_cmd_ATand()
2812 sprintf(rb, "S%02d=%03d%s", i, in isdn_tty_cmd_ATand()
2814 isdn_tty_at_cout(rb, info); in isdn_tty_cmd_ATand()
2816 sprintf(rb, "\r\nEAZ/MSN: %.50s\r\n", in isdn_tty_cmd_ATand()
2818 isdn_tty_at_cout(rb, info); in isdn_tty_cmd_ATand()
/linux-4.4.14/drivers/staging/media/omap4iss/
Diss_video.c678 iss_video_reqbufs(struct file *file, void *fh, struct v4l2_requestbuffers *rb) in iss_video_reqbufs() argument
682 return vb2_reqbufs(&vfh->queue, rb); in iss_video_reqbufs()
/linux-4.4.14/drivers/xen/
Dxen-scsiback.c1446 ssize_t rb; in scsiback_tpg_param_alias_show() local
1449 rb = snprintf(page, PAGE_SIZE, "%s\n", tpg->param_alias); in scsiback_tpg_param_alias_show()
1452 return rb; in scsiback_tpg_param_alias_show()
/linux-4.4.14/drivers/media/pci/cx18/
Dcx18-ioctl.c872 struct v4l2_requestbuffers *rb) in cx18_reqbufs() argument
882 return videobuf_reqbufs(cx18_vb_queue(id), rb); in cx18_reqbufs()
/linux-4.4.14/include/sound/
Dhdaudio.h281 struct snd_dma_buffer rb; member
/linux-4.4.14/drivers/media/usb/dvb-usb/
Ddib0700_devices.c2207 u8 rb[2]; in dib01x0_pmu_update() local
2210 {.addr = 0x1e >> 1, .flags = I2C_M_RD, .buf = rb, .len = 2}, in dib01x0_pmu_update()
2219 switch (rb[0] << 8 | rb[1]) { in dib01x0_pmu_update()
2244 wb[2] |= rb[0]; in dib01x0_pmu_update()
2245 wb[3] |= rb[1] & ~(3 << 4); in dib01x0_pmu_update()
/linux-4.4.14/drivers/media/platform/omap3isp/
Dispvideo.c824 isp_video_reqbufs(struct file *file, void *fh, struct v4l2_requestbuffers *rb) in isp_video_reqbufs() argument
831 ret = vb2_reqbufs(&vfh->queue, rb); in isp_video_reqbufs()
/linux-4.4.14/fs/reiserfs/
Dfix_node.c624 int rnum, int blk_num, short *s012, int lb, int rb) in set_parameters() argument
641 tb->rbytes = rb; in set_parameters()
647 PROC_INFO_ADD(tb->tb_sb, rbytes[h], rb); in set_parameters()
/linux-4.4.14/arch/m68k/ifpsp060/src/
Dilsp.S301 mov.w %d6, %d5 # rb + u3
307 mov.w %d6, %d5 # rb + u4
/linux-4.4.14/include/video/
Domapdss.h380 s16 rr, rg, rb; member
/linux-4.4.14/Documentation/filesystems/
Df2fs.txt144 extent_cache Enable an extent cache based on rb-tree, it can cache
148 noextent_cache Diable an extent cache based on rb-tree explicitly, see
/linux-4.4.14/drivers/media/usb/cx231xx/
Dcx231xx-video.c1612 struct v4l2_requestbuffers *rb) in vidioc_reqbufs() argument
1622 return videobuf_reqbufs(&fh->vb_vidq, rb); in vidioc_reqbufs()
/linux-4.4.14/Documentation/timers/
Dhighres.txt38 - time ordered enqueueing into a rb-tree