Searched refs:chunk_list (Results 1 - 14 of 14) sorted by relevance

/linux-4.1.27/drivers/net/ethernet/mellanox/mlx4/
H A Dicm.h58 struct list_head chunk_list; member in struct:mlx4_icm
91 iter->chunk = list_empty(&icm->chunk_list) ? mlx4_icm_first()
92 NULL : list_entry(icm->chunk_list.next, mlx4_icm_first()
105 if (iter->chunk->list.next == &iter->icm->chunk_list) { mlx4_icm_next()
H A Dicm.c85 list_for_each_entry_safe(chunk, tmp, &icm->chunk_list, list) { mlx4_free_icm()
149 INIT_LIST_HEAD(&icm->chunk_list); mlx4_alloc_icm()
170 list_add_tail(&chunk->list, &icm->chunk_list); mlx4_alloc_icm()
327 list_for_each_entry(chunk, &icm->chunk_list, list) { mlx4_table_find()
/linux-4.1.27/drivers/infiniband/hw/mthca/
H A Dmthca_memfree.h59 struct list_head chunk_list; member in struct:mthca_icm
103 iter->chunk = list_empty(&icm->chunk_list) ? mthca_icm_first()
104 NULL : list_entry(icm->chunk_list.next, mthca_icm_first()
117 if (iter->chunk->list.next == &iter->icm->chunk_list) { mthca_icm_next()
H A Dmthca_memfree.c95 list_for_each_entry_safe(chunk, tmp, &icm->chunk_list, list) { mthca_free_icm()
153 INIT_LIST_HEAD(&icm->chunk_list); mthca_alloc_icm()
167 list_add_tail(&chunk->list, &icm->chunk_list); mthca_alloc_icm()
297 list_for_each_entry(chunk, &icm->chunk_list, list) { mthca_table_find()
/linux-4.1.27/arch/powerpc/platforms/ps3/
H A Dmm.c373 * @link: A struct list_head used with struct ps3_dma_region.chunk_list, the
413 list_for_each_entry(c, &r->chunk_list.head, link) { dma_find_chunk()
442 list_for_each_entry(c, &r->chunk_list.head, link) { dma_find_chunk_lpar()
549 list_add(&c->link, &r->chunk_list.head); dma_sb_map_pages()
584 if (list_empty(&r->chunk_list.head)) { dma_ioc0_map_pages()
589 last = list_entry(r->chunk_list.head.next, dma_ioc0_map_pages()
620 list_add(&c->link, &r->chunk_list.head); dma_ioc0_map_pages()
670 INIT_LIST_HEAD(&r->chunk_list.head); dma_sb_region_create()
671 spin_lock_init(&r->chunk_list.lock); dma_sb_region_create()
692 INIT_LIST_HEAD(&r->chunk_list.head); dma_ioc0_region_create()
693 spin_lock_init(&r->chunk_list.lock); dma_ioc0_region_create()
732 list_for_each_entry_safe(c, tmp, &r->chunk_list.head, link) { dma_sb_region_free()
755 list_for_each_entry_safe(c, n, &r->chunk_list.head, link) { dma_ioc0_region_free()
811 spin_lock_irqsave(&r->chunk_list.lock, flags); dma_sb_map_area()
818 spin_unlock_irqrestore(&r->chunk_list.lock, flags); dma_sb_map_area()
828 spin_unlock_irqrestore(&r->chunk_list.lock, flags); dma_sb_map_area()
834 spin_unlock_irqrestore(&r->chunk_list.lock, flags); dma_sb_map_area()
856 spin_lock_irqsave(&r->chunk_list.lock, flags); dma_ioc0_map_area()
864 spin_unlock_irqrestore(&r->chunk_list.lock, flags); dma_ioc0_map_area()
875 spin_unlock_irqrestore(&r->chunk_list.lock, flags); dma_ioc0_map_area()
883 spin_unlock_irqrestore(&r->chunk_list.lock, flags); dma_ioc0_map_area()
902 spin_lock_irqsave(&r->chunk_list.lock, flags); dma_sb_unmap_area()
928 spin_unlock_irqrestore(&r->chunk_list.lock, flags); dma_sb_unmap_area()
939 spin_lock_irqsave(&r->chunk_list.lock, flags); dma_ioc0_unmap_area()
966 spin_unlock_irqrestore(&r->chunk_list.lock, flags); dma_ioc0_unmap_area()
/linux-4.1.27/drivers/infiniband/hw/usnic/
H A Dusnic_uiom.c79 static void usnic_uiom_put_pages(struct list_head *chunk_list, int dirty) usnic_uiom_put_pages() argument
87 list_for_each_entry_safe(chunk, tmp, chunk_list, list) { list_for_each_entry_safe()
101 int dmasync, struct list_head *chunk_list) usnic_uiom_get_pages()
123 INIT_LIST_HEAD(chunk_list); usnic_uiom_get_pages()
180 list_add_tail(&chunk->list, chunk_list); usnic_uiom_get_pages()
188 usnic_uiom_put_pages(chunk_list, 0); usnic_uiom_get_pages()
242 usnic_uiom_put_pages(&uiomr->chunk_list, dirty & writable); __usnic_uiom_reg_release()
262 chunk = list_first_entry(&uiomr->chunk_list, struct usnic_uiom_chunk, usnic_uiom_map_sorted_intervals()
372 &uiomr->chunk_list); usnic_uiom_reg_get()
417 usnic_uiom_put_pages(&uiomr->chunk_list, 0); usnic_uiom_reg_get()
100 usnic_uiom_get_pages(unsigned long addr, size_t size, int writable, int dmasync, struct list_head *chunk_list) usnic_uiom_get_pages() argument
H A Dusnic_uiom.h55 struct list_head chunk_list; member in struct:usnic_uiom_reg
/linux-4.1.27/drivers/gpu/drm/qxl/
H A Dqxl_image.c51 list_add_tail(&chunk->head, &image->chunk_list); qxl_allocate_chunk()
68 INIT_LIST_HEAD(&image->chunk_list); qxl_image_alloc_objects()
90 list_for_each_entry_safe(chunk, tmp, &dimage->chunk_list, head) { qxl_image_free_objects()
120 drv_chunk = list_first_entry(&dimage->chunk_list, struct qxl_drm_chunk, head); qxl_image_init_helper()
H A Dqxl_drv.h209 struct list_head chunk_list; member in struct:qxl_drm_image
/linux-4.1.27/drivers/s390/block/
H A Ddasd_int.h570 dasd_init_chunklist(struct list_head *chunk_list, void *mem, dasd_init_chunklist() argument
575 INIT_LIST_HEAD(chunk_list); dasd_init_chunklist()
578 list_add(&chunk->list, chunk_list); dasd_init_chunklist()
582 dasd_alloc_chunk(struct list_head *chunk_list, unsigned long size) dasd_alloc_chunk() argument
587 list_for_each_entry(chunk, chunk_list, list) { list_for_each_entry()
604 dasd_free_chunk(struct list_head *chunk_list, void *mem) dasd_free_chunk() argument
611 /* Find out the left neighbour in chunk_list. */ dasd_free_chunk()
612 left = chunk_list; list_for_each()
613 list_for_each(p, chunk_list) { list_for_each()
619 if (left->next != chunk_list) {
627 if (left != chunk_list) {
/linux-4.1.27/net/sctp/
H A Doutput.c119 INIT_LIST_HEAD(&packet->chunk_list); sctp_packet_init()
141 list_for_each_entry_safe(chunk, tmp, &packet->chunk_list, list) { sctp_packet_free()
312 list_add_tail(&chunk->list, &packet->chunk_list); __sctp_packet_append_chunk()
396 if (list_empty(&packet->chunk_list)) sctp_packet_transmit()
400 chunk = list_entry(packet->chunk_list.next, struct sctp_chunk, list); sctp_packet_transmit()
466 list_for_each_entry_safe(chunk, tmp, &packet->chunk_list, list) { sctp_packet_transmit()
619 list_for_each_entry_safe(chunk, tmp, &packet->chunk_list, list) { sctp_packet_transmit()
/linux-4.1.27/drivers/crypto/
H A Dn2_core.c673 struct list_head chunk_list; member in struct:n2_request_context
888 INIT_LIST_HEAD(&rctx->chunk_list); n2_compute_chunks()
921 &rctx->chunk_list); n2_compute_chunks()
948 list_add_tail(&chunk->entry, &rctx->chunk_list); n2_compute_chunks()
963 list_for_each_entry_safe(c, tmp, &rctx->chunk_list, entry) { n2_chunk_complete()
990 list_for_each_entry_safe(c, tmp, &rctx->chunk_list, entry) { n2_do_ecb()
1047 list_for_each_entry_safe(c, tmp, &rctx->chunk_list, n2_do_chaining()
1060 list_for_each_entry_safe_reverse(c, tmp, &rctx->chunk_list, n2_do_chaining()
/linux-4.1.27/arch/powerpc/include/asm/
H A Dps3.h87 * @chunk_list: Opaque variable used by the ioc page manager.
106 } chunk_list; member in struct:ps3_dma_region
/linux-4.1.27/include/net/sctp/
H A Dstructs.h694 struct list_head chunk_list; member in struct:sctp_packet

Completed in 451 milliseconds