chunk_list         94 arch/powerpc/include/asm/ps3.h 	} chunk_list;
chunk_list        401 arch/powerpc/platforms/ps3/mm.c 	list_for_each_entry(c, &r->chunk_list.head, link) {
chunk_list        430 arch/powerpc/platforms/ps3/mm.c 	list_for_each_entry(c, &r->chunk_list.head, link) {
chunk_list        536 arch/powerpc/platforms/ps3/mm.c 	list_add(&c->link, &r->chunk_list.head);
chunk_list        570 arch/powerpc/platforms/ps3/mm.c 	if (list_empty(&r->chunk_list.head)) {
chunk_list        575 arch/powerpc/platforms/ps3/mm.c 		last  = list_entry(r->chunk_list.head.next,
chunk_list        606 arch/powerpc/platforms/ps3/mm.c 	list_add(&c->link, &r->chunk_list.head);
chunk_list        656 arch/powerpc/platforms/ps3/mm.c 	INIT_LIST_HEAD(&r->chunk_list.head);
chunk_list        657 arch/powerpc/platforms/ps3/mm.c 	spin_lock_init(&r->chunk_list.lock);
chunk_list        678 arch/powerpc/platforms/ps3/mm.c 	INIT_LIST_HEAD(&r->chunk_list.head);
chunk_list        679 arch/powerpc/platforms/ps3/mm.c 	spin_lock_init(&r->chunk_list.lock);
chunk_list        718 arch/powerpc/platforms/ps3/mm.c 	list_for_each_entry_safe(c, tmp, &r->chunk_list.head, link) {
chunk_list        741 arch/powerpc/platforms/ps3/mm.c 	list_for_each_entry_safe(c, n, &r->chunk_list.head, link) {
chunk_list        797 arch/powerpc/platforms/ps3/mm.c 	spin_lock_irqsave(&r->chunk_list.lock, flags);
chunk_list        804 arch/powerpc/platforms/ps3/mm.c 		spin_unlock_irqrestore(&r->chunk_list.lock, flags);
chunk_list        814 arch/powerpc/platforms/ps3/mm.c 		spin_unlock_irqrestore(&r->chunk_list.lock, flags);
chunk_list        820 arch/powerpc/platforms/ps3/mm.c 	spin_unlock_irqrestore(&r->chunk_list.lock, flags);
chunk_list        842 arch/powerpc/platforms/ps3/mm.c 	spin_lock_irqsave(&r->chunk_list.lock, flags);
chunk_list        850 arch/powerpc/platforms/ps3/mm.c 		spin_unlock_irqrestore(&r->chunk_list.lock, flags);
chunk_list        861 arch/powerpc/platforms/ps3/mm.c 		spin_unlock_irqrestore(&r->chunk_list.lock, flags);
chunk_list        869 arch/powerpc/platforms/ps3/mm.c 	spin_unlock_irqrestore(&r->chunk_list.lock, flags);
chunk_list        888 arch/powerpc/platforms/ps3/mm.c 	spin_lock_irqsave(&r->chunk_list.lock, flags);
chunk_list        914 arch/powerpc/platforms/ps3/mm.c 	spin_unlock_irqrestore(&r->chunk_list.lock, flags);
chunk_list        925 arch/powerpc/platforms/ps3/mm.c 	spin_lock_irqsave(&r->chunk_list.lock, flags);
chunk_list        952 arch/powerpc/platforms/ps3/mm.c 	spin_unlock_irqrestore(&r->chunk_list.lock, flags);
chunk_list        687 drivers/crypto/n2_core.c 	struct list_head	chunk_list;
chunk_list        895 drivers/crypto/n2_core.c 	INIT_LIST_HEAD(&rctx->chunk_list);
chunk_list        928 drivers/crypto/n2_core.c 					      &rctx->chunk_list);
chunk_list        955 drivers/crypto/n2_core.c 		list_add_tail(&chunk->entry, &rctx->chunk_list);
chunk_list        970 drivers/crypto/n2_core.c 	list_for_each_entry_safe(c, tmp, &rctx->chunk_list, entry) {
chunk_list        997 drivers/crypto/n2_core.c 	list_for_each_entry_safe(c, tmp, &rctx->chunk_list, entry) {
chunk_list       1054 drivers/crypto/n2_core.c 		list_for_each_entry_safe(c, tmp, &rctx->chunk_list,
chunk_list       1067 drivers/crypto/n2_core.c 		list_for_each_entry_safe_reverse(c, tmp, &rctx->chunk_list,
chunk_list        167 drivers/gpu/drm/qxl/qxl_drv.h 	struct list_head chunk_list;
chunk_list         51 drivers/gpu/drm/qxl/qxl_image.c 	list_add_tail(&chunk->head, &image->chunk_list);
chunk_list         68 drivers/gpu/drm/qxl/qxl_image.c 	INIT_LIST_HEAD(&image->chunk_list);
chunk_list         90 drivers/gpu/drm/qxl/qxl_image.c 	list_for_each_entry_safe(chunk, tmp, &dimage->chunk_list, head) {
chunk_list        120 drivers/gpu/drm/qxl/qxl_image.c 	drv_chunk = list_first_entry(&dimage->chunk_list, struct qxl_drm_chunk, head);
chunk_list        107 drivers/infiniband/hw/efa/efa_verbs.c 			struct pbl_chunk_list chunk_list;
chunk_list       1095 drivers/infiniband/hw/efa/efa_verbs.c 	struct pbl_chunk_list *chunk_list = &pbl->phys.indirect.chunk_list;
chunk_list       1109 drivers/infiniband/hw/efa/efa_verbs.c 	chunk_list->size = chunk_list_size;
chunk_list       1110 drivers/infiniband/hw/efa/efa_verbs.c 	chunk_list->chunks = kcalloc(chunk_list_size,
chunk_list       1111 drivers/infiniband/hw/efa/efa_verbs.c 				     sizeof(*chunk_list->chunks),
chunk_list       1113 drivers/infiniband/hw/efa/efa_verbs.c 	if (!chunk_list->chunks)
chunk_list       1122 drivers/infiniband/hw/efa/efa_verbs.c 		chunk_list->chunks[i].buf = kzalloc(EFA_CHUNK_SIZE, GFP_KERNEL);
chunk_list       1123 drivers/infiniband/hw/efa/efa_verbs.c 		if (!chunk_list->chunks[i].buf)
chunk_list       1126 drivers/infiniband/hw/efa/efa_verbs.c 		chunk_list->chunks[i].length = EFA_CHUNK_USED_SIZE;
chunk_list       1128 drivers/infiniband/hw/efa/efa_verbs.c 	chunk_list->chunks[chunk_list_size - 1].length =
chunk_list       1135 drivers/infiniband/hw/efa/efa_verbs.c 	cur_chunk_buf = chunk_list->chunks[0].buf;
chunk_list       1143 drivers/infiniband/hw/efa/efa_verbs.c 			cur_chunk_buf = chunk_list->chunks[chunk_idx].buf;
chunk_list       1151 drivers/infiniband/hw/efa/efa_verbs.c 					  chunk_list->chunks[i].buf,
chunk_list       1152 drivers/infiniband/hw/efa/efa_verbs.c 					  chunk_list->chunks[i].length,
chunk_list       1160 drivers/infiniband/hw/efa/efa_verbs.c 		chunk_list->chunks[i].dma_addr = dma_addr;
chunk_list       1167 drivers/infiniband/hw/efa/efa_verbs.c 		prev_chunk_buf = chunk_list->chunks[i - 1].buf;
chunk_list       1171 drivers/infiniband/hw/efa/efa_verbs.c 		ctrl_buf->length = chunk_list->chunks[i].length;
chunk_list       1182 drivers/infiniband/hw/efa/efa_verbs.c 		dma_unmap_single(&dev->pdev->dev, chunk_list->chunks[i].dma_addr,
chunk_list       1183 drivers/infiniband/hw/efa/efa_verbs.c 				 chunk_list->chunks[i].length, DMA_TO_DEVICE);
chunk_list       1187 drivers/infiniband/hw/efa/efa_verbs.c 		kfree(chunk_list->chunks[i].buf);
chunk_list       1189 drivers/infiniband/hw/efa/efa_verbs.c 	kfree(chunk_list->chunks);
chunk_list       1195 drivers/infiniband/hw/efa/efa_verbs.c 	struct pbl_chunk_list *chunk_list = &pbl->phys.indirect.chunk_list;
chunk_list       1198 drivers/infiniband/hw/efa/efa_verbs.c 	for (i = 0; i < chunk_list->size; i++) {
chunk_list       1199 drivers/infiniband/hw/efa/efa_verbs.c 		dma_unmap_single(&dev->pdev->dev, chunk_list->chunks[i].dma_addr,
chunk_list       1200 drivers/infiniband/hw/efa/efa_verbs.c 				 chunk_list->chunks[i].length, DMA_TO_DEVICE);
chunk_list       1201 drivers/infiniband/hw/efa/efa_verbs.c 		kfree(chunk_list->chunks[i].buf);
chunk_list       1204 drivers/infiniband/hw/efa/efa_verbs.c 	kfree(chunk_list->chunks);
chunk_list       1263 drivers/infiniband/hw/efa/efa_verbs.c 		  pbl->phys.indirect.chunk_list.size);
chunk_list       1381 drivers/infiniband/hw/efa/efa_verbs.c 			pbl->phys.indirect.chunk_list.chunks[0].length;
chunk_list       1383 drivers/infiniband/hw/efa/efa_verbs.c 		efa_com_set_dma_addr(pbl->phys.indirect.chunk_list.chunks[0].dma_addr,
chunk_list        296 drivers/infiniband/hw/hns/hns_roce_hem.c 	INIT_LIST_HEAD(&hem->chunk_list);
chunk_list        311 drivers/infiniband/hw/hns/hns_roce_hem.c 			list_add_tail(&chunk->list, &hem->chunk_list);
chunk_list        350 drivers/infiniband/hw/hns/hns_roce_hem.c 	list_for_each_entry_safe(chunk, tmp, &hem->chunk_list, list) {
chunk_list        851 drivers/infiniband/hw/hns/hns_roce_hem.c 	list_for_each_entry(chunk, &hem->chunk_list, list) {
chunk_list         90 drivers/infiniband/hw/hns/hns_roce_hem.h 	struct list_head	 chunk_list;
chunk_list        154 drivers/infiniband/hw/hns/hns_roce_hem.h 	iter->chunk = list_empty(&hem->chunk_list) ? NULL :
chunk_list        155 drivers/infiniband/hw/hns/hns_roce_hem.h 				 list_entry(hem->chunk_list.next,
chunk_list        168 drivers/infiniband/hw/hns/hns_roce_hem.h 		if (iter->chunk->list.next == &iter->hem->chunk_list) {
chunk_list         95 drivers/infiniband/hw/mthca/mthca_memfree.c 	list_for_each_entry_safe(chunk, tmp, &icm->chunk_list, list) {
chunk_list        153 drivers/infiniband/hw/mthca/mthca_memfree.c 	INIT_LIST_HEAD(&icm->chunk_list);
chunk_list        167 drivers/infiniband/hw/mthca/mthca_memfree.c 			list_add_tail(&chunk->list, &icm->chunk_list);
chunk_list        297 drivers/infiniband/hw/mthca/mthca_memfree.c 	list_for_each_entry(chunk, &icm->chunk_list, list) {
chunk_list         59 drivers/infiniband/hw/mthca/mthca_memfree.h 	struct list_head chunk_list;
chunk_list        103 drivers/infiniband/hw/mthca/mthca_memfree.h 	iter->chunk    = list_empty(&icm->chunk_list) ?
chunk_list        104 drivers/infiniband/hw/mthca/mthca_memfree.h 		NULL : list_entry(icm->chunk_list.next,
chunk_list        117 drivers/infiniband/hw/mthca/mthca_memfree.h 		if (iter->chunk->list.next == &iter->icm->chunk_list) {
chunk_list         66 drivers/infiniband/hw/usnic/usnic_uiom.c static void usnic_uiom_put_pages(struct list_head *chunk_list, int dirty)
chunk_list         74 drivers/infiniband/hw/usnic/usnic_uiom.c 	list_for_each_entry_safe(chunk, tmp, chunk_list, list) {
chunk_list         88 drivers/infiniband/hw/usnic/usnic_uiom.c 	struct list_head *chunk_list = &uiomr->chunk_list;
chunk_list        117 drivers/infiniband/hw/usnic/usnic_uiom.c 	INIT_LIST_HEAD(chunk_list);
chunk_list        177 drivers/infiniband/hw/usnic/usnic_uiom.c 			list_add_tail(&chunk->list, chunk_list);
chunk_list        185 drivers/infiniband/hw/usnic/usnic_uiom.c 		usnic_uiom_put_pages(chunk_list, 0);
chunk_list        240 drivers/infiniband/hw/usnic/usnic_uiom.c 	usnic_uiom_put_pages(&uiomr->chunk_list, dirty & writable);
chunk_list        260 drivers/infiniband/hw/usnic/usnic_uiom.c 	chunk = list_first_entry(&uiomr->chunk_list, struct usnic_uiom_chunk,
chunk_list        415 drivers/infiniband/hw/usnic/usnic_uiom.c 	usnic_uiom_put_pages(&uiomr->chunk_list, 0);
chunk_list         72 drivers/infiniband/hw/usnic/usnic_uiom.h 	struct list_head		chunk_list;
chunk_list         86 drivers/net/ethernet/mellanox/mlx4/icm.c 	list_for_each_entry_safe(chunk, tmp, &icm->chunk_list, list) {
chunk_list        155 drivers/net/ethernet/mellanox/mlx4/icm.c 	INIT_LIST_HEAD(&icm->chunk_list);
chunk_list        176 drivers/net/ethernet/mellanox/mlx4/icm.c 			list_add_tail(&chunk->list, &icm->chunk_list);
chunk_list        335 drivers/net/ethernet/mellanox/mlx4/icm.c 	list_for_each_entry(chunk, &icm->chunk_list, list) {
chunk_list         68 drivers/net/ethernet/mellanox/mlx4/icm.h 	struct list_head	chunk_list;
chunk_list        100 drivers/net/ethernet/mellanox/mlx4/icm.h 	iter->chunk    = list_empty(&icm->chunk_list) ?
chunk_list        101 drivers/net/ethernet/mellanox/mlx4/icm.h 		NULL : list_entry(icm->chunk_list.next,
chunk_list        114 drivers/net/ethernet/mellanox/mlx4/icm.h 		if (iter->chunk->list.next == &iter->icm->chunk_list) {
chunk_list        261 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_icm_pool.c 		INIT_LIST_HEAD(&chunk->chunk_list);
chunk_list        262 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_icm_pool.c 		list_add(&chunk->chunk_list, &bucket->free_list);
chunk_list        287 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_icm_pool.c 	list_del(&chunk->chunk_list);
chunk_list        322 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_icm_pool.c 	list_for_each_entry_safe(chunk, next, &bucket->free_list, chunk_list)
chunk_list        328 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_icm_pool.c 	list_for_each_entry_safe(chunk, next, &bucket->used_list, chunk_list)
chunk_list        484 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_icm_pool.c 					chunk_list);
chunk_list        486 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_icm_pool.c 			list_del_init(&chunk->chunk_list);
chunk_list        487 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_icm_pool.c 			list_add_tail(&chunk->chunk_list, &bucket->used_list);
chunk_list        509 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_icm_pool.c 	list_del_init(&chunk->chunk_list);
chunk_list        510 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_icm_pool.c 	list_add_tail(&chunk->chunk_list, &bucket->hot_list);
chunk_list        790 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_types.h 	struct list_head chunk_list;
chunk_list        648 drivers/s390/block/dasd_int.h dasd_init_chunklist(struct list_head *chunk_list, void *mem,
chunk_list        653 drivers/s390/block/dasd_int.h 	INIT_LIST_HEAD(chunk_list);
chunk_list        656 drivers/s390/block/dasd_int.h 	list_add(&chunk->list, chunk_list);
chunk_list        660 drivers/s390/block/dasd_int.h dasd_alloc_chunk(struct list_head *chunk_list, unsigned long size)
chunk_list        665 drivers/s390/block/dasd_int.h 	list_for_each_entry(chunk, chunk_list, list) {
chunk_list        682 drivers/s390/block/dasd_int.h dasd_free_chunk(struct list_head *chunk_list, void *mem)
chunk_list        690 drivers/s390/block/dasd_int.h 	left = chunk_list;
chunk_list        691 drivers/s390/block/dasd_int.h 	list_for_each(p, chunk_list) {
chunk_list        697 drivers/s390/block/dasd_int.h 	if (left->next != chunk_list) {
chunk_list        705 drivers/s390/block/dasd_int.h 	if (left != chunk_list) {
chunk_list        712 include/net/sctp/structs.h 	struct list_head chunk_list;
chunk_list        151 net/sctp/output.c 	INIT_LIST_HEAD(&packet->chunk_list);
chunk_list        165 net/sctp/output.c 	list_for_each_entry_safe(chunk, tmp, &packet->chunk_list, list) {
chunk_list        347 net/sctp/output.c 	list_add_tail(&chunk->list, &packet->chunk_list);
chunk_list        429 net/sctp/output.c 		list_for_each_entry_safe(chunk, tmp, &packet->chunk_list,
chunk_list        450 net/sctp/output.c 		list_for_each_entry_safe(chunk, tmp, &packet->chunk_list, list) {
chunk_list        493 net/sctp/output.c 			if (list_empty(&packet->chunk_list))
chunk_list        497 net/sctp/output.c 					 &packet->chunk_list);
chunk_list        504 net/sctp/output.c 	} while (!list_empty(&packet->chunk_list));
chunk_list        557 net/sctp/output.c 	if (list_empty(&packet->chunk_list))
chunk_list        559 net/sctp/output.c 	chunk = list_entry(packet->chunk_list.next, struct sctp_chunk, list);
chunk_list        634 net/sctp/output.c 	list_for_each_entry_safe(chunk, tmp, &packet->chunk_list, list) {