hole 95 arch/arm/mm/init.c static void __init arm_adjust_dma_zone(unsigned long *size, unsigned long *hole, hole 103 arch/arm/mm/init.c hole[ZONE_NORMAL] = hole[0]; hole 104 arch/arm/mm/init.c hole[ZONE_DMA] = 0; hole 51 arch/m68k/sun3/sun3dvma.c static struct hole initholes[64]; hole 89 arch/m68k/sun3/sun3dvma.c struct hole *hole; hole 93 arch/m68k/sun3/sun3dvma.c hole = list_entry(cur, struct hole, list); hole 95 arch/m68k/sun3/sun3dvma.c if((hole->start == 0) && (hole->end == 0) && (hole->size == 0)) hole 99 arch/m68k/sun3/sun3dvma.c hole->start, hole->end, hole->size); hole 109 arch/m68k/sun3/sun3dvma.c struct hole *hole; hole 110 arch/m68k/sun3/sun3dvma.c struct hole *prev = NULL; hole 115 arch/m68k/sun3/sun3dvma.c hole = list_entry(cur, struct hole, list); hole 118 arch/m68k/sun3/sun3dvma.c prev = hole; hole 122 arch/m68k/sun3/sun3dvma.c if(hole->end == prev->start) { hole 123 arch/m68k/sun3/sun3dvma.c hole->size += prev->size; hole 124 arch/m68k/sun3/sun3dvma.c hole->end = prev->end; hole 134 arch/m68k/sun3/sun3dvma.c static inline struct hole *rmcache(void) hole 136 arch/m68k/sun3/sun3dvma.c struct hole *ret; hole 145 arch/m68k/sun3/sun3dvma.c ret = list_entry(hole_cache.next, struct hole, list); hole 156 arch/m68k/sun3/sun3dvma.c struct hole *hole; hole 170 arch/m68k/sun3/sun3dvma.c hole = list_entry(cur, struct hole, list); hole 173 arch/m68k/sun3/sun3dvma.c newlen = len + ((hole->end - len) & (align-1)); hole 177 arch/m68k/sun3/sun3dvma.c if(hole->size > newlen) { hole 178 arch/m68k/sun3/sun3dvma.c hole->end -= newlen; hole 179 arch/m68k/sun3/sun3dvma.c hole->size -= newlen; hole 180 arch/m68k/sun3/sun3dvma.c dvma_entry_use(hole->end) = newlen; hole 185 arch/m68k/sun3/sun3dvma.c return hole->end; hole 186 arch/m68k/sun3/sun3dvma.c } else if(hole->size == newlen) { hole 187 arch/m68k/sun3/sun3dvma.c list_move(&(hole->list), &hole_cache); hole 188 arch/m68k/sun3/sun3dvma.c dvma_entry_use(hole->start) = newlen; hole 193 arch/m68k/sun3/sun3dvma.c return hole->start; hole 207 arch/m68k/sun3/sun3dvma.c struct hole *hole; hole 223 arch/m68k/sun3/sun3dvma.c hole = list_entry(cur, struct hole, list); hole 225 arch/m68k/sun3/sun3dvma.c if(hole->end == baddr) { hole 226 arch/m68k/sun3/sun3dvma.c hole->end += len; hole 227 arch/m68k/sun3/sun3dvma.c hole->size += len; hole 229 arch/m68k/sun3/sun3dvma.c } else if(hole->start == (baddr + len)) { hole 230 arch/m68k/sun3/sun3dvma.c hole->start = baddr; hole 231 arch/m68k/sun3/sun3dvma.c hole->size += len; hole 237 arch/m68k/sun3/sun3dvma.c hole = rmcache(); hole 239 arch/m68k/sun3/sun3dvma.c hole->start = baddr; hole 240 arch/m68k/sun3/sun3dvma.c hole->end = baddr + len; hole 241 arch/m68k/sun3/sun3dvma.c hole->size = len; hole 244 arch/m68k/sun3/sun3dvma.c list_add(&(hole->list), cur); hole 253 arch/m68k/sun3/sun3dvma.c struct hole *hole; hole 263 arch/m68k/sun3/sun3dvma.c hole = rmcache(); hole 264 arch/m68k/sun3/sun3dvma.c hole->start = DVMA_START; hole 265 arch/m68k/sun3/sun3dvma.c hole->end = DVMA_END; hole 266 arch/m68k/sun3/sun3dvma.c hole->size = DVMA_SIZE; hole 268 arch/m68k/sun3/sun3dvma.c list_add(&(hole->list), &hole_list); hole 200 arch/x86/mm/numa_emulation.c static u64 uniform_size(u64 max_addr, u64 base, u64 hole, int nr_nodes) hole 204 arch/x86/mm/numa_emulation.c unsigned long hole_pfns = PHYS_PFN(hole); hole 356 drivers/gpu/drm/amd/amdgpu/amdgpu.h struct list_head *hole; hole 61 drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c sa_manager->hole = &sa_manager->olist; hole 88 drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c sa_manager->hole = &sa_manager->olist, hole 105 drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c if (sa_manager->hole == &sa_bo->olist) { hole 106 drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c sa_manager->hole = sa_bo->olist.prev; hole 118 drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c if (sa_manager->hole->next == &sa_manager->olist) hole 121 drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c sa_bo = list_entry(sa_manager->hole->next, struct amdgpu_sa_bo, olist); hole 133 drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c struct list_head *hole = sa_manager->hole; hole 135 drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c if (hole != &sa_manager->olist) { hole 136 drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c return list_entry(hole, struct amdgpu_sa_bo, olist)->eoffset; hole 143 drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c struct list_head *hole = sa_manager->hole; hole 145 drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c if (hole->next != &sa_manager->olist) { hole 146 drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c return list_entry(hole->next, struct amdgpu_sa_bo, olist)->soffset; hole 167 drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c list_add(&sa_bo->olist, sa_manager->hole); hole 169 drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c sa_manager->hole = &sa_bo->olist; hole 214 drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c if (sa_manager->hole->next == &sa_manager->olist) { hole 216 drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c sa_manager->hole = &sa_manager->olist; hole 265 drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c sa_manager->hole = best_bo->olist.prev; hole 382 drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c if (&i->olist == sa_manager->hole) { hole 403 drivers/gpu/drm/drm_mm.c struct drm_mm_node *hole; hole 412 drivers/gpu/drm/drm_mm.c hole = find_hole(mm, node->start); hole 413 drivers/gpu/drm/drm_mm.c if (!hole) hole 416 drivers/gpu/drm/drm_mm.c adj_start = hole_start = __drm_mm_hole_node_start(hole); hole 417 drivers/gpu/drm/drm_mm.c adj_end = hole_end = hole_start + hole->hole_size; hole 420 drivers/gpu/drm/drm_mm.c mm->color_adjust(hole, node->color, &adj_start, &adj_end); hole 427 drivers/gpu/drm/drm_mm.c list_add(&node->node_list, &hole->node_list); hole 428 drivers/gpu/drm/drm_mm.c drm_mm_interval_tree_add_node(hole, node); hole 432 drivers/gpu/drm/drm_mm.c rm_hole(hole); hole 434 drivers/gpu/drm/drm_mm.c add_hole(hole); hole 471 drivers/gpu/drm/drm_mm.c struct drm_mm_node *hole; hole 490 drivers/gpu/drm/drm_mm.c for (hole = first_hole(mm, range_start, range_end, size, mode); hole 491 drivers/gpu/drm/drm_mm.c hole; hole 492 drivers/gpu/drm/drm_mm.c hole = once ? NULL : next_hole(mm, hole, mode)) { hole 493 drivers/gpu/drm/drm_mm.c u64 hole_start = __drm_mm_hole_node_start(hole); hole 494 drivers/gpu/drm/drm_mm.c u64 hole_end = hole_start + hole->hole_size; hole 507 drivers/gpu/drm/drm_mm.c mm->color_adjust(hole, color, &col_start, &col_end); hole 546 drivers/gpu/drm/drm_mm.c list_add(&node->node_list, &hole->node_list); hole 547 drivers/gpu/drm/drm_mm.c drm_mm_interval_tree_add_node(hole, node); hole 550 drivers/gpu/drm/drm_mm.c rm_hole(hole); hole 552 drivers/gpu/drm/drm_mm.c add_hole(hole); hole 728 drivers/gpu/drm/drm_mm.c struct drm_mm_node *hole; hole 744 drivers/gpu/drm/drm_mm.c hole = list_prev_entry(node, node_list); hole 745 drivers/gpu/drm/drm_mm.c DRM_MM_BUG_ON(list_next_entry(hole, node_list) != node); hole 748 drivers/gpu/drm/drm_mm.c hole_start = __drm_mm_hole_node_start(hole); hole 749 drivers/gpu/drm/drm_mm.c hole_end = __drm_mm_hole_node_end(hole); hole 754 drivers/gpu/drm/drm_mm.c mm->color_adjust(hole, scan->color, &col_start, &col_end); hole 859 drivers/gpu/drm/drm_mm.c struct drm_mm_node *hole; hole 872 drivers/gpu/drm/drm_mm.c list_for_each_entry(hole, &mm->hole_stack, hole_stack) { hole 873 drivers/gpu/drm/drm_mm.c hole_start = __drm_mm_hole_node_start(hole); hole 874 drivers/gpu/drm/drm_mm.c hole_end = hole_start + hole->hole_size; hole 882 drivers/gpu/drm/drm_mm.c DRM_MM_BUG_ON(&hole->hole_stack == &mm->hole_stack); hole 883 drivers/gpu/drm/drm_mm.c if (unlikely(&hole->hole_stack == &mm->hole_stack)) hole 889 drivers/gpu/drm/drm_mm.c mm->color_adjust(hole, scan->color, &hole_start, &hole_end); hole 891 drivers/gpu/drm/drm_mm.c return hole; hole 893 drivers/gpu/drm/drm_mm.c return list_next_entry(hole, node_list); hole 424 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c struct drm_mm_node resv, *hole; hole 434 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c drm_mm_for_each_hole(hole, mm, hole_start, hole_end) { hole 393 drivers/gpu/drm/i915/selftests/i915_gem_evict.c struct drm_mm_node hole; hole 415 drivers/gpu/drm/i915/selftests/i915_gem_evict.c memset(&hole, 0, sizeof(hole)); hole 416 drivers/gpu/drm/i915/selftests/i915_gem_evict.c err = i915_gem_gtt_insert(&i915->ggtt.vm, &hole, hole 447 drivers/gpu/drm/i915/selftests/i915_gem_evict.c drm_mm_remove_node(&hole); hole 523 drivers/gpu/drm/i915/selftests/i915_gem_evict.c if (drm_mm_node_allocated(&hole)) hole 524 drivers/gpu/drm/i915/selftests/i915_gem_evict.c drm_mm_remove_node(&hole); hole 545 drivers/gpu/drm/radeon/radeon.h struct list_head *hole; hole 61 drivers/gpu/drm/radeon/radeon_sa.c sa_manager->hole = &sa_manager->olist; hole 83 drivers/gpu/drm/radeon/radeon_sa.c sa_manager->hole = &sa_manager->olist, hole 145 drivers/gpu/drm/radeon/radeon_sa.c if (sa_manager->hole == &sa_bo->olist) { hole 146 drivers/gpu/drm/radeon/radeon_sa.c sa_manager->hole = sa_bo->olist.prev; hole 158 drivers/gpu/drm/radeon/radeon_sa.c if (sa_manager->hole->next == &sa_manager->olist) hole 161 drivers/gpu/drm/radeon/radeon_sa.c sa_bo = list_entry(sa_manager->hole->next, struct radeon_sa_bo, olist); hole 172 drivers/gpu/drm/radeon/radeon_sa.c struct list_head *hole = sa_manager->hole; hole 174 drivers/gpu/drm/radeon/radeon_sa.c if (hole != &sa_manager->olist) { hole 175 drivers/gpu/drm/radeon/radeon_sa.c return list_entry(hole, struct radeon_sa_bo, olist)->eoffset; hole 182 drivers/gpu/drm/radeon/radeon_sa.c struct list_head *hole = sa_manager->hole; hole 184 drivers/gpu/drm/radeon/radeon_sa.c if (hole->next != &sa_manager->olist) { hole 185 drivers/gpu/drm/radeon/radeon_sa.c return list_entry(hole->next, struct radeon_sa_bo, olist)->soffset; hole 206 drivers/gpu/drm/radeon/radeon_sa.c list_add(&sa_bo->olist, sa_manager->hole); hole 208 drivers/gpu/drm/radeon/radeon_sa.c sa_manager->hole = &sa_bo->olist; hole 255 drivers/gpu/drm/radeon/radeon_sa.c if (sa_manager->hole->next == &sa_manager->olist) { hole 257 drivers/gpu/drm/radeon/radeon_sa.c sa_manager->hole = &sa_manager->olist; hole 302 drivers/gpu/drm/radeon/radeon_sa.c sa_manager->hole = best_bo->olist.prev; hole 409 drivers/gpu/drm/radeon/radeon_sa.c if (&i->olist == sa_manager->hole) { hole 55 drivers/gpu/drm/selftests/test-drm_mm.c struct drm_mm_node *hole; hole 60 drivers/gpu/drm/selftests/test-drm_mm.c drm_mm_for_each_hole(hole, mm, hole_start, hole_end) hole 67 drivers/gpu/drm/selftests/test-drm_mm.c drm_mm_for_each_node(hole, mm) { hole 68 drivers/gpu/drm/selftests/test-drm_mm.c if (drm_mm_hole_follows(hole)) { hole 79 drivers/gpu/drm/selftests/test-drm_mm.c struct drm_mm_node *hole; hole 88 drivers/gpu/drm/selftests/test-drm_mm.c drm_mm_for_each_hole(hole, mm, hole_start, hole_end) { hole 1153 drivers/gpu/drm/selftests/test-drm_mm.c struct drm_mm_node *hole; hole 1155 drivers/gpu/drm/selftests/test-drm_mm.c drm_mm_for_each_hole(hole, mm, hole_start, hole_end) { hole 1156 drivers/gpu/drm/selftests/test-drm_mm.c struct drm_mm_node *next = list_next_entry(hole, node_list); hole 1159 drivers/gpu/drm/selftests/test-drm_mm.c if (hole->allocated) hole 1162 drivers/gpu/drm/selftests/test-drm_mm.c hole->start, hole->size, hole->color); hole 1175 drivers/net/virtio_net.c unsigned int len, hole; hole 1189 drivers/net/virtio_net.c hole = alloc_frag->size - alloc_frag->offset; hole 1190 drivers/net/virtio_net.c if (hole < len + room) { hole 1195 drivers/net/virtio_net.c len += hole; hole 1196 drivers/net/virtio_net.c alloc_frag->offset += hole; hole 860 drivers/net/wireless/broadcom/brcm80211/brcmsmac/ampdu.c u8 hole[AMPDU_MAX_MPDU]; hole 861 drivers/net/wireless/broadcom/brcm80211/brcmsmac/ampdu.c memset(hole, 0, sizeof(hole)); hole 189 drivers/pcmcia/rsrc_nonstatic.c u_char *b, hole, most; hole 204 drivers/pcmcia/rsrc_nonstatic.c hole = inb(i); hole 206 drivers/pcmcia/rsrc_nonstatic.c if (inb(i+j) != hole) hole 209 drivers/pcmcia/rsrc_nonstatic.c if ((j == 8) && (++b[hole] > b[most])) hole 210 drivers/pcmcia/rsrc_nonstatic.c most = hole; hole 1586 fs/ntfs/aops.c goto hole; hole 1607 fs/ntfs/aops.c goto hole; hole 1623 fs/ntfs/aops.c hole: hole 1174 fs/ocfs2/dir.c unsigned int hole; hole 1177 fs/ocfs2/dir.c hole = le16_to_cpu(de->rec_len); hole 1179 fs/ocfs2/dir.c hole = le16_to_cpu(de->rec_len) - hole 1182 fs/ocfs2/dir.c return hole; hole 609 fs/ubifs/file.c int i = 0, nn = *n, offs = bu->zbranch[0].offs, hole = 0, read = 0; hole 623 fs/ubifs/file.c hole = 1; hole 633 fs/ubifs/file.c hole = 1; hole 670 fs/ubifs/file.c hole = 1; hole 687 fs/ubifs/file.c if (hole) { hole 59 fs/xfs/xfs_mount.c int hole, i; hole 73 fs/xfs/xfs_mount.c for (i = 0, hole = -1; i < xfs_uuid_table_size; i++) { hole 75 fs/xfs/xfs_mount.c hole = i; hole 82 fs/xfs/xfs_mount.c if (hole < 0) { hole 86 fs/xfs/xfs_mount.c hole = xfs_uuid_table_size++; hole 88 fs/xfs/xfs_mount.c xfs_uuid_table[hole] = *uuid; hole 801 kernel/bpf/core.c u32 size, hole, start, pages; hole 822 kernel/bpf/core.c hole = min_t(unsigned int, size - (proglen + sizeof(*hdr)), hole 824 kernel/bpf/core.c start = (get_random_int() % hole) & ~(alignment - 1); hole 3049 net/ipv4/tcp_output.c struct sk_buff *skb, *rtx_head, *hole = NULL; hole 3068 net/ipv4/tcp_output.c if (!hole) hole 3083 net/ipv4/tcp_output.c if (!hole && !(sacked & (TCPCB_SACKED_RETRANS|TCPCB_SACKED_ACKED))) hole 3084 net/ipv4/tcp_output.c hole = skb; hole 275 tools/perf/util/block-range.c struct block_range *hole = malloc(sizeof(struct block_range)); hole 276 tools/perf/util/block-range.c if (!hole) hole 279 tools/perf/util/block-range.c *hole = (struct block_range){ hole 286 tools/perf/util/block-range.c rb_link_left_of_node(&hole->node, &next->node); hole 287 tools/perf/util/block-range.c rb_insert_color(&hole->node, &block_ranges.root);