Home
last modified time | relevance | path

Searched refs:heap (Results 1 – 54 of 54) sorted by relevance

/linux-4.4.14/drivers/staging/android/ion/
Dion_heap.c28 void *ion_heap_map_kernel(struct ion_heap *heap, in ion_heap_map_kernel() argument
65 void ion_heap_unmap_kernel(struct ion_heap *heap, in ion_heap_unmap_kernel() argument
71 int ion_heap_map_user(struct ion_heap *heap, struct ion_buffer *buffer, in ion_heap_map_user() argument
163 void ion_heap_freelist_add(struct ion_heap *heap, struct ion_buffer *buffer) in ion_heap_freelist_add() argument
165 spin_lock(&heap->free_lock); in ion_heap_freelist_add()
166 list_add(&buffer->list, &heap->free_list); in ion_heap_freelist_add()
167 heap->free_list_size += buffer->size; in ion_heap_freelist_add()
168 spin_unlock(&heap->free_lock); in ion_heap_freelist_add()
169 wake_up(&heap->waitqueue); in ion_heap_freelist_add()
172 size_t ion_heap_freelist_size(struct ion_heap *heap) in ion_heap_freelist_size() argument
[all …]
Dion_system_heap.c51 struct ion_heap heap; member
55 static struct page *alloc_buffer_page(struct ion_system_heap *heap, in alloc_buffer_page() argument
60 struct ion_page_pool *pool = heap->pools[order_to_index(order)]; in alloc_buffer_page()
80 static void free_buffer_page(struct ion_system_heap *heap, in free_buffer_page() argument
87 struct ion_page_pool *pool = heap->pools[order_to_index(order)]; in free_buffer_page()
96 static struct page *alloc_largest_available(struct ion_system_heap *heap, in alloc_largest_available() argument
110 page = alloc_buffer_page(heap, buffer, orders[i]); in alloc_largest_available()
120 static int ion_system_heap_allocate(struct ion_heap *heap, in ion_system_heap_allocate() argument
125 struct ion_system_heap *sys_heap = container_of(heap, in ion_system_heap_allocate()
127 heap); in ion_system_heap_allocate()
[all …]
Dion_carveout_heap.c29 struct ion_heap heap; member
34 ion_phys_addr_t ion_carveout_allocate(struct ion_heap *heap, in ion_carveout_allocate() argument
39 container_of(heap, struct ion_carveout_heap, heap); in ion_carveout_allocate()
48 void ion_carveout_free(struct ion_heap *heap, ion_phys_addr_t addr, in ion_carveout_free() argument
52 container_of(heap, struct ion_carveout_heap, heap); in ion_carveout_free()
59 static int ion_carveout_heap_phys(struct ion_heap *heap, in ion_carveout_heap_phys() argument
72 static int ion_carveout_heap_allocate(struct ion_heap *heap, in ion_carveout_heap_allocate() argument
91 paddr = ion_carveout_allocate(heap, size, align); in ion_carveout_heap_allocate()
111 struct ion_heap *heap = buffer->heap; in ion_carveout_heap_free() local
122 ion_carveout_free(heap, paddr, buffer->size); in ion_carveout_heap_free()
[all …]
Dion_priv.h68 struct ion_heap *heap; member
110 int (*allocate)(struct ion_heap *heap,
114 int (*phys)(struct ion_heap *heap, struct ion_buffer *buffer,
116 struct sg_table * (*map_dma)(struct ion_heap *heap,
118 void (*unmap_dma)(struct ion_heap *heap, struct ion_buffer *buffer);
119 void * (*map_kernel)(struct ion_heap *heap, struct ion_buffer *buffer);
120 void (*unmap_kernel)(struct ion_heap *heap, struct ion_buffer *buffer);
123 int (*shrink)(struct ion_heap *heap, gfp_t gfp_mask, int nr_to_scan);
182 int (*debug_show)(struct ion_heap *heap, struct seq_file *, void *);
224 void ion_device_add_heap(struct ion_device *dev, struct ion_heap *heap);
[all …]
Dion_cma_heap.c30 struct ion_heap heap; member
34 #define to_cma_heap(x) container_of(x, struct ion_cma_heap, heap)
44 static int ion_cma_allocate(struct ion_heap *heap, struct ion_buffer *buffer, in ion_cma_allocate() argument
48 struct ion_cma_heap *cma_heap = to_cma_heap(heap); in ion_cma_allocate()
95 struct ion_cma_heap *cma_heap = to_cma_heap(buffer->heap); in ion_cma_free()
109 static int ion_cma_phys(struct ion_heap *heap, struct ion_buffer *buffer, in ion_cma_phys() argument
112 struct ion_cma_heap *cma_heap = to_cma_heap(buffer->heap); in ion_cma_phys()
125 static struct sg_table *ion_cma_heap_map_dma(struct ion_heap *heap, in ion_cma_heap_map_dma() argument
133 static void ion_cma_heap_unmap_dma(struct ion_heap *heap, in ion_cma_heap_unmap_dma() argument
141 struct ion_cma_heap *cma_heap = to_cma_heap(buffer->heap); in ion_cma_mmap()
[all …]
Dion_chunk_heap.c28 struct ion_heap heap; member
36 static int ion_chunk_heap_allocate(struct ion_heap *heap, in ion_chunk_heap_allocate() argument
42 container_of(heap, struct ion_chunk_heap, heap); in ion_chunk_heap_allocate()
95 struct ion_heap *heap = buffer->heap; in ion_chunk_heap_free() local
97 container_of(heap, struct ion_chunk_heap, heap); in ion_chunk_heap_free()
120 static struct sg_table *ion_chunk_heap_map_dma(struct ion_heap *heap, in ion_chunk_heap_map_dma() argument
126 static void ion_chunk_heap_unmap_dma(struct ion_heap *heap, in ion_chunk_heap_unmap_dma() argument
173 chunk_heap->heap.ops = &chunk_heap_ops; in ion_chunk_heap_create()
174 chunk_heap->heap.type = ION_HEAP_TYPE_CHUNK; in ion_chunk_heap_create()
175 chunk_heap->heap.flags = ION_HEAP_FLAG_DEFER_FREE; in ion_chunk_heap_create()
[all …]
Dion.c176 static struct ion_buffer *ion_buffer_create(struct ion_heap *heap, in ion_buffer_create() argument
191 buffer->heap = heap; in ion_buffer_create()
195 ret = heap->ops->allocate(heap, buffer, len, align, flags); in ion_buffer_create()
198 if (!(heap->flags & ION_HEAP_FLAG_DEFER_FREE)) in ion_buffer_create()
201 ion_heap_freelist_drain(heap, 0); in ion_buffer_create()
202 ret = heap->ops->allocate(heap, buffer, len, align, in ion_buffer_create()
211 table = heap->ops->map_dma(heap, buffer); in ion_buffer_create()
264 heap->ops->unmap_dma(heap, buffer); in ion_buffer_create()
266 heap->ops->free(buffer); in ion_buffer_create()
275 buffer->heap->ops->unmap_kernel(buffer->heap, buffer); in ion_buffer_destroy()
[all …]
/linux-4.4.14/drivers/gpu/drm/radeon/
Dradeon_mem.c84 static struct mem_block *alloc_block(struct mem_block *heap, int size, in alloc_block() argument
90 list_for_each(p, heap) { in alloc_block()
99 static struct mem_block *find_block(struct mem_block *heap, int start) in find_block() argument
103 list_for_each(p, heap) in find_block()
136 static int init_heap(struct mem_block **heap, int start, int size) in init_heap() argument
143 *heap = kzalloc(sizeof(**heap), GFP_KERNEL); in init_heap()
144 if (!*heap) { in init_heap()
152 blocks->next = blocks->prev = *heap; in init_heap()
154 (*heap)->file_priv = (struct drm_file *) - 1; in init_heap()
155 (*heap)->next = (*heap)->prev = blocks; in init_heap()
[all …]
Dradeon_drv.h393 extern void radeon_mem_takedown(struct mem_block **heap);
395 struct mem_block *heap);
/linux-4.4.14/fs/ubifs/
Dlprops.c62 static void move_up_lpt_heap(struct ubifs_info *c, struct ubifs_lpt_heap *heap, in move_up_lpt_heap() argument
75 val2 = get_heap_comp_val(heap->arr[ppos], cat); in move_up_lpt_heap()
79 heap->arr[ppos]->hpos = hpos; in move_up_lpt_heap()
80 heap->arr[hpos] = heap->arr[ppos]; in move_up_lpt_heap()
81 heap->arr[ppos] = lprops; in move_up_lpt_heap()
99 static void adjust_lpt_heap(struct ubifs_info *c, struct ubifs_lpt_heap *heap, in adjust_lpt_heap() argument
109 val2 = get_heap_comp_val(heap->arr[ppos], cat); in adjust_lpt_heap()
113 heap->arr[ppos]->hpos = hpos; in adjust_lpt_heap()
114 heap->arr[hpos] = heap->arr[ppos]; in adjust_lpt_heap()
115 heap->arr[ppos] = lprops; in adjust_lpt_heap()
[all …]
Dfind.c57 struct ubifs_lpt_heap *heap; in valuable() local
63 heap = &c->lpt_heap[cat - 1]; in valuable()
64 if (heap->cnt < heap->max_cnt) in valuable()
141 struct ubifs_lpt_heap *heap; in scan_for_dirty() local
146 heap = &c->lpt_heap[LPROPS_FREE - 1]; in scan_for_dirty()
147 for (i = 0; i < heap->cnt; i++) { in scan_for_dirty()
148 lprops = heap->arr[i]; in scan_for_dirty()
238 struct ubifs_lpt_heap *heap, *idx_heap; in ubifs_find_dirty_leb() local
284 heap = &c->lpt_heap[LPROPS_DIRTY - 1]; in ubifs_find_dirty_leb()
304 if (heap->cnt) { in ubifs_find_dirty_leb()
[all …]
Dlpt_commit.c807 struct ubifs_lpt_heap *heap; in populate_lsave() local
834 heap = &c->lpt_heap[LPROPS_DIRTY_IDX - 1]; in populate_lsave()
835 for (i = 0; i < heap->cnt; i++) { in populate_lsave()
836 c->lsave[cnt++] = heap->arr[i]->lnum; in populate_lsave()
840 heap = &c->lpt_heap[LPROPS_DIRTY - 1]; in populate_lsave()
841 for (i = 0; i < heap->cnt; i++) { in populate_lsave()
842 c->lsave[cnt++] = heap->arr[i]->lnum; in populate_lsave()
846 heap = &c->lpt_heap[LPROPS_FREE - 1]; in populate_lsave()
847 for (i = 0; i < heap->cnt; i++) { in populate_lsave()
848 c->lsave[cnt++] = heap->arr[i]->lnum; in populate_lsave()
[all …]
Ddebug.h268 void ubifs_dump_heap(struct ubifs_info *c, struct ubifs_lpt_heap *heap,
294 void dbg_check_heap(struct ubifs_info *c, struct ubifs_lpt_heap *heap, int cat,
Dlpt.c2106 struct ubifs_lpt_heap *heap; in dbg_chk_pnode() local
2168 heap = &c->lpt_heap[cat - 1]; in dbg_chk_pnode()
2169 if (lprops->hpos < heap->cnt && in dbg_chk_pnode()
2170 heap->arr[lprops->hpos] == lprops) in dbg_chk_pnode()
Ddebug.c889 void ubifs_dump_heap(struct ubifs_info *c, struct ubifs_lpt_heap *heap, int cat) in ubifs_dump_heap() argument
894 current->pid, cat, heap->cnt); in ubifs_dump_heap()
895 for (i = 0; i < heap->cnt; i++) { in ubifs_dump_heap()
896 struct ubifs_lprops *lprops = heap->arr[i]; in ubifs_dump_heap()
/linux-4.4.14/arch/x86/kernel/
Dtest_nx.c114 char *heap; in test_NX() local
128 heap = kmalloc(64, GFP_KERNEL); in test_NX()
129 if (!heap) in test_NX()
131 heap[0] = 0xC3; /* opcode for "ret" */ in test_NX()
133 if (test_address(heap)) { in test_NX()
137 kfree(heap); in test_NX()
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/core/
Dmm.c99 b->heap = a->heap; in region_head()
111 nvkm_mm_head(struct nvkm_mm *mm, u8 heap, u8 type, u32 size_max, u32 size_min, in nvkm_mm_head() argument
122 if (unlikely(heap != NVKM_MM_HEAP_ANY)) { in nvkm_mm_head()
123 if (this->heap != heap) in nvkm_mm_head()
174 b->heap = a->heap; in region_tail()
185 nvkm_mm_tail(struct nvkm_mm *mm, u8 heap, u8 type, u32 size_max, u32 size_min, in nvkm_mm_tail() argument
197 if (unlikely(heap != NVKM_MM_HEAP_ANY)) { in nvkm_mm_tail()
198 if (this->heap != heap) in nvkm_mm_tail()
275 node->heap = ++mm->heap_nodes; in nvkm_mm_init()
Dgpuobj.c157 ret = nvkm_mm_head(&parent->heap, 0, 1, size, size, in nvkm_gpuobj_ctor()
160 ret = nvkm_mm_tail(&parent->heap, 0, 1, size, size, in nvkm_gpuobj_ctor()
188 return nvkm_mm_init(&gpuobj->heap, 0, gpuobj->size, 1); in nvkm_gpuobj_ctor()
197 nvkm_mm_free(&gpuobj->parent->heap, &gpuobj->node); in nvkm_gpuobj_del()
198 nvkm_mm_fini(&gpuobj->heap); in nvkm_gpuobj_del()
/linux-4.4.14/drivers/md/bcache/
Dmovinggc.c196 return (b = heap_peek(&ca->heap)) ? GC_SECTORS_USED(b) : 0; in bucket_heap_top()
215 ca->heap.used = 0; in bch_moving_gc()
224 if (!heap_full(&ca->heap)) { in bch_moving_gc()
226 heap_add(&ca->heap, b, bucket_cmp); in bch_moving_gc()
227 } else if (bucket_cmp(b, heap_peek(&ca->heap))) { in bch_moving_gc()
231 ca->heap.data[0] = b; in bch_moving_gc()
232 heap_sift(&ca->heap, 0, bucket_cmp); in bch_moving_gc()
237 heap_pop(&ca->heap, b, bucket_cmp); in bch_moving_gc()
241 while (heap_pop(&ca->heap, b, bucket_cmp)) in bch_moving_gc()
Dutil.h40 #define init_heap(heap, _size, gfp) \ argument
43 (heap)->used = 0; \
44 (heap)->size = (_size); \
45 _bytes = (heap)->size * sizeof(*(heap)->data); \
46 (heap)->data = NULL; \
48 (heap)->data = kmalloc(_bytes, (gfp)); \
49 if ((!(heap)->data) && ((gfp) & GFP_KERNEL)) \
50 (heap)->data = vmalloc(_bytes); \
51 (heap)->data; \
54 #define free_heap(heap) \ argument
[all …]
Dalloc.c183 ca->heap.used = 0; in invalidate_buckets_lru()
189 if (!heap_full(&ca->heap)) in invalidate_buckets_lru()
190 heap_add(&ca->heap, b, bucket_max_cmp); in invalidate_buckets_lru()
191 else if (bucket_max_cmp(b, heap_peek(&ca->heap))) { in invalidate_buckets_lru()
192 ca->heap.data[0] = b; in invalidate_buckets_lru()
193 heap_sift(&ca->heap, 0, bucket_max_cmp); in invalidate_buckets_lru()
197 for (i = ca->heap.used / 2 - 1; i >= 0; --i) in invalidate_buckets_lru()
198 heap_sift(&ca->heap, i, bucket_min_cmp); in invalidate_buckets_lru()
201 if (!heap_pop(&ca->heap, b, bucket_min_cmp)) { in invalidate_buckets_lru()
Dbcache.h421 DECLARE_HEAP(struct bucket *, heap);
Dsuper.c1790 free_heap(&ca->heap); in bch_cache_release()
1825 !init_heap(&ca->heap, free << 3, GFP_KERNEL) || in cache_alloc()
/linux-4.4.14/lib/zlib_deflate/
Ddeftree.c352 top = s->heap[SMALLEST]; \
353 s->heap[SMALLEST] = s->heap[s->heap_len--]; \
377 int v = s->heap[k]; in pqdownheap()
382 smaller(tree, s->heap[j+1], s->heap[j], s->depth)) { in pqdownheap()
386 if (smaller(tree, v, s->heap[j], s->depth)) break; in pqdownheap()
389 s->heap[k] = s->heap[j]; k = j; in pqdownheap()
394 s->heap[k] = v; in pqdownheap()
430 tree[s->heap[s->heap_max]].Len = 0; /* root of the heap */ in gen_bitlen()
433 n = s->heap[h]; in gen_bitlen()
474 m = s->heap[--h]; in gen_bitlen()
[all …]
Ddefutil.h179 int heap[2*L_CODES+1]; /* heap used to build the Huffman trees */ member
/linux-4.4.14/drivers/gpu/drm/nouveau/include/nvkm/core/
Dmm.h11 u8 heap; member
35 int nvkm_mm_head(struct nvkm_mm *, u8 heap, u8 type, u32 size_max,
37 int nvkm_mm_tail(struct nvkm_mm *, u8 heap, u8 type, u32 size_max,
Dgpuobj.h21 struct nvkm_mm heap; member
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/instmem/
Dnv04.c32 struct nvkm_mm heap; member
98 nvkm_mm_free(&iobj->imem->heap, &iobj->node); in nv04_instobj_dtor()
131 ret = nvkm_mm_head(&imem->heap, 0, 1, size, size, in nv04_instobj_new()
163 ret = nvkm_mm_init(&imem->heap, 0, imem->base.reserved, 1); in nv04_instmem_oneinit()
201 nvkm_mm_fini(&imem->heap); in nv04_instmem_dtor()
Dnv40.c33 struct nvkm_mm heap; member
97 nvkm_mm_free(&iobj->imem->heap, &iobj->node); in nv40_instobj_dtor()
130 ret = nvkm_mm_head(&imem->heap, 0, 1, size, size, in nv40_instobj_new()
174 ret = nvkm_mm_init(&imem->heap, 0, imem->base.reserved, 1); in nv40_instmem_oneinit()
216 nvkm_mm_fini(&imem->heap); in nv40_instmem_dtor()
/linux-4.4.14/arch/x86/boot/compressed/
Dmisc.c386 asmlinkage __visible void *decompress_kernel(void *rmode, memptr heap, in decompress_kernel() argument
416 free_mem_ptr = heap; /* Heap */ in decompress_kernel()
417 free_mem_end_ptr = heap + BOOT_HEAP_SIZE; in decompress_kernel()
439 if (heap > 0x3fffffffffffUL) in decompress_kernel()
442 if (heap > ((-__PAGE_OFFSET-(128<<20)-1) & 0x7fffffff)) in decompress_kernel()
/linux-4.4.14/arch/m32r/boot/compressed/
Dmisc.c77 unsigned int zimage_len, unsigned long heap) in decompress_kernel() argument
85 free_mem_ptr = heap; in decompress_kernel()
/linux-4.4.14/arch/mips/boot/compressed/
Dhead.S51 PTR_LA a0, (.heap) /* heap address */
71 .comm .heap,BOOT_HEAP_SIZE,4
/linux-4.4.14/tools/perf/util/
Dauxtrace.c413 int auxtrace_heap__add(struct auxtrace_heap *heap, unsigned int queue_nr, in auxtrace_heap__add() argument
418 if (queue_nr >= heap->heap_sz) { in auxtrace_heap__add()
423 heap_array = realloc(heap->heap_array, in auxtrace_heap__add()
427 heap->heap_array = heap_array; in auxtrace_heap__add()
428 heap->heap_sz = heap_sz; in auxtrace_heap__add()
431 auxtrace_heapify(heap->heap_array, heap->heap_cnt++, queue_nr, ordinal); in auxtrace_heap__add()
436 void auxtrace_heap__free(struct auxtrace_heap *heap) in auxtrace_heap__free() argument
438 zfree(&heap->heap_array); in auxtrace_heap__free()
439 heap->heap_cnt = 0; in auxtrace_heap__free()
440 heap->heap_sz = 0; in auxtrace_heap__free()
[all …]
Dintel-bts.c52 struct auxtrace_heap heap; member
201 ret = auxtrace_heap__add(&bts->heap, queue_nr, in intel_bts_setup_queue()
565 if (!bts->heap.heap_cnt) in intel_bts_process_queues()
568 if (bts->heap.heap_array[0].ordinal > timestamp) in intel_bts_process_queues()
571 queue_nr = bts->heap.heap_array[0].queue_nr; in intel_bts_process_queues()
575 auxtrace_heap__pop(&bts->heap); in intel_bts_process_queues()
579 auxtrace_heap__add(&bts->heap, queue_nr, ts); in intel_bts_process_queues()
584 ret = auxtrace_heap__add(&bts->heap, queue_nr, ts); in intel_bts_process_queues()
729 auxtrace_heap__free(&bts->heap); in intel_bts_free()
Dauxtrace.h404 int auxtrace_heap__add(struct auxtrace_heap *heap, unsigned int queue_nr,
406 void auxtrace_heap__pop(struct auxtrace_heap *heap);
407 void auxtrace_heap__free(struct auxtrace_heap *heap);
Dintel-pt.c53 struct auxtrace_heap heap; member
879 ret = auxtrace_heap__add(&pt->heap, queue_nr, ptq->timestamp); in intel_pt_setup_queue()
1410 if (!pt->heap.heap_cnt) in intel_pt_process_queues()
1413 if (pt->heap.heap_array[0].ordinal >= timestamp) in intel_pt_process_queues()
1416 queue_nr = pt->heap.heap_array[0].queue_nr; in intel_pt_process_queues()
1421 queue_nr, pt->heap.heap_array[0].ordinal, in intel_pt_process_queues()
1424 auxtrace_heap__pop(&pt->heap); in intel_pt_process_queues()
1426 if (pt->heap.heap_cnt) { in intel_pt_process_queues()
1427 ts = pt->heap.heap_array[0].ordinal + 1; in intel_pt_process_queues()
1439 auxtrace_heap__add(&pt->heap, queue_nr, ts); in intel_pt_process_queues()
[all …]
/linux-4.4.14/drivers/gpu/drm/nouveau/
Dnouveau_abi16.c112 nvkm_mm_free(&chan->heap, &ntfy->node); in nouveau_abi16_ntfy_fini()
139 if (chan->heap.block_size) in nouveau_abi16_chan_fini()
140 nvkm_mm_fini(&chan->heap); in nouveau_abi16_chan_fini()
330 ret = nvkm_mm_init(&chan->heap, 0, PAGE_SIZE, 1); in nouveau_abi16_ioctl_channel_alloc()
528 ret = nvkm_mm_head(&chan->heap, 0, 1, info->size, info->size, 1, in nouveau_abi16_ioctl_notifierobj_alloc()
Dnouveau_abi16.h27 struct nvkm_mm heap; member
/linux-4.4.14/drivers/gpu/drm/ttm/
Dttm_bo.c1014 const struct ttm_place *heap = &placement->placement[i]; in ttm_bo_mem_compat() local
1016 (mem->start < heap->fpfn || in ttm_bo_mem_compat()
1017 (heap->lpfn != 0 && (mem->start + mem->num_pages) > heap->lpfn))) in ttm_bo_mem_compat()
1020 *new_flags = heap->flags; in ttm_bo_mem_compat()
1027 const struct ttm_place *heap = &placement->busy_placement[i]; in ttm_bo_mem_compat() local
1029 (mem->start < heap->fpfn || in ttm_bo_mem_compat()
1030 (heap->lpfn != 0 && (mem->start + mem->num_pages) > heap->lpfn))) in ttm_bo_mem_compat()
1033 *new_flags = heap->flags; in ttm_bo_mem_compat()
/linux-4.4.14/Documentation/features/vm/ELF-ASLR/
Darch-support.txt4 # description: arch randomizes the stack, heap and binary images of ELF binaries
/linux-4.4.14/drivers/soc/qcom/
DKconfig28 The driver provides an interface to items in a heap shared among all
/linux-4.4.14/Documentation/x86/
Dboot.txt22 Protocol 2.01: (Kernel 1.3.76) Added a heap overrun warning.
74 | Stack/heap | For use by the kernel real-mode code.
94 setup, and stack/heap) was made relocatable to any address between
126 | Stack/heap | For use by the kernel real-mode code.
496 code) of the end of the setup stack/heap, minus 0x0200.
534 the setup heap and 0xA0000; it does not have to be located in the
772 heap and 0xA0000.
791 The real-mode code requires a stack/heap to be set up, as well as
814 thus permitted to give the stack/heap the full 64K segment and locate
829 0x8000-0xdfff Stack and heap
[all …]
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/fb/
Dramnv50.c531 struct nvkm_mm *heap = &ram->vram; in nv50_ram_get() local
569 ret = nvkm_mm_tail(heap, 0, type, max, min, align, &r); in nv50_ram_get()
571 ret = nvkm_mm_head(heap, 0, type, max, min, align, &r); in nv50_ram_get()
/linux-4.4.14/arch/x86/boot/
Dheader.S465 # stack behind its own code, so we can't blindly put it directly past the heap.
/linux-4.4.14/Documentation/arm/
Dtcm.txt73 memory. Such a heap is great for things like saving
/linux-4.4.14/Documentation/fb/
Dsisfb.txt21 important role in connection with DRM/DRI: Sisfb manages the memory heap
/linux-4.4.14/Documentation/vm/
Dpagemap.txt162 library, or the stack or the heap, etc.
Dnuma_memory_policy.txt75 allocated for anonymous segments, such as the task stack and heap, and
Dunevictable-lru.txt476 Furthermore, any mmap() call or brk() call that expands the heap by a
/linux-4.4.14/Documentation/filesystems/
Df2fs.txt113 no_heap Disable heap-style segment allocation which finds free
273 -a [0 or 1] : Split start location of each area for heap-based allocation.
Dproc.txt328 start_brk address above which program heap can be expanded with brk()
345 0804a000-0806b000 rw-p 00000000 00:00 0 [heap]
379 [heap] = the heap of the program
396 0804a000-0806b000 rw-p 00000000 00:00 0 [heap]
/linux-4.4.14/init/
DKconfig1163 data and heap segment sizes, and a few additional /proc filesystem
1703 bool "Disable heap randomization"
1706 Randomizing heap placement makes heap exploits harder, but it
1708 This option changes the bootup default to heap randomization
/linux-4.4.14/Documentation/sysctl/
Dkernel.txt714 2 - Additionally enable heap randomization. This is the default if
725 with CONFIG_COMPAT_BRK enabled, which excludes the heap from process
/linux-4.4.14/Documentation/spi/
Dspi-summary447 You'd normally allocate them from the heap or free page pool.