objects            89 arch/arm/include/asm/kvm_host.h 	void *objects[KVM_NR_MEM_OBJS];
objects            96 arch/arm64/include/asm/kvm_host.h 	void *objects[KVM_NR_MEM_OBJS];
objects           308 arch/mips/include/asm/kvm_host.h 	void *objects[KVM_NR_MEM_OBJS];
objects            40 arch/mips/kvm/mmu.c 		cache->objects[cache->nobjs++] = page;
objects            48 arch/mips/kvm/mmu.c 		free_page((unsigned long)mc->objects[--mc->nobjs]);
objects            56 arch/mips/kvm/mmu.c 	p = mc->objects[--mc->nobjs];
objects           240 arch/x86/include/asm/kvm_host.h 	void *objects[KVM_NR_MEM_OBJS];
objects          1067 arch/x86/kvm/mmu.c 	while (cache->nobjs < ARRAY_SIZE(cache->objects)) {
objects          1071 arch/x86/kvm/mmu.c 		cache->objects[cache->nobjs++] = obj;
objects          1085 arch/x86/kvm/mmu.c 		kmem_cache_free(cache, mc->objects[--mc->nobjs]);
objects          1095 arch/x86/kvm/mmu.c 	while (cache->nobjs < ARRAY_SIZE(cache->objects)) {
objects          1099 arch/x86/kvm/mmu.c 		cache->objects[cache->nobjs++] = page;
objects          1107 arch/x86/kvm/mmu.c 		free_page((unsigned long)mc->objects[--mc->nobjs]);
objects          1141 arch/x86/kvm/mmu.c 	p = mc->objects[--mc->nobjs];
objects           200 drivers/acpi/acpica/acdebug.h void acpi_db_delete_objects(u32 count, union acpi_object *objects);
objects          1205 drivers/acpi/acpica/aclocal.h 	u32 objects;
objects            50 drivers/acpi/acpica/dbexec.c void acpi_db_delete_objects(u32 count, union acpi_object *objects)
objects            55 drivers/acpi/acpica/dbexec.c 		switch (objects[i].type) {
objects            58 drivers/acpi/acpica/dbexec.c 			ACPI_FREE(objects[i].buffer.pointer);
objects            65 drivers/acpi/acpica/dbexec.c 			acpi_db_delete_objects(objects[i].package.count,
objects            66 drivers/acpi/acpica/dbexec.c 					       objects[i].package.elements);
objects            70 drivers/acpi/acpica/dbexec.c 			ACPI_FREE(objects[i].package.elements);
objects           690 drivers/acpi/acpica/dbnames.c 		info->objects++;
objects           724 drivers/acpi/acpica/dbnames.c 		       info.nodes, info.objects);
objects           793 drivers/acpi/acpica/evregion.c 	union acpi_object objects[2];
objects           833 drivers/acpi/acpica/evregion.c 	args.pointer = objects;
objects           834 drivers/acpi/acpica/evregion.c 	objects[0].type = ACPI_TYPE_INTEGER;
objects           835 drivers/acpi/acpica/evregion.c 	objects[0].integer.value = ACPI_ADR_SPACE_EC;
objects           836 drivers/acpi/acpica/evregion.c 	objects[1].type = ACPI_TYPE_INTEGER;
objects           837 drivers/acpi/acpica/evregion.c 	objects[1].integer.value = ACPI_REG_CONNECT;
objects            84 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 					     &amn->objects.rb_root, it.rb) {
objects           217 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	it = interval_tree_iter_first(&amn->objects, start, end);
objects           263 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	it = interval_tree_iter_first(&amn->objects, start, end);
objects           343 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	amn->objects = RB_ROOT_CACHED;
objects           398 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	while ((it = interval_tree_iter_first(&amn->objects, addr, end))) {
objects           401 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 		interval_tree_remove(&node->it, &amn->objects);
objects           420 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	interval_tree_insert(&node->it, &amn->objects);
objects           460 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 		interval_tree_remove(&node->it, &amn->objects);
objects            66 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.h 	struct rb_root_cached	objects;
objects           351 drivers/gpu/drm/drm_lease.c 			  struct drm_mode_object **objects,
objects           363 drivers/gpu/drm/drm_lease.c 		if (objects[o]->type == DRM_MODE_OBJECT_CRTC && has_crtc == -1) {
objects           366 drivers/gpu/drm/drm_lease.c 		if (objects[o]->type == DRM_MODE_OBJECT_CONNECTOR && has_connector == -1)
objects           370 drivers/gpu/drm/drm_lease.c 			if (objects[o]->type == DRM_MODE_OBJECT_PLANE && has_plane == -1)
objects           387 drivers/gpu/drm/drm_lease.c 	struct drm_mode_object **objects;
objects           392 drivers/gpu/drm/drm_lease.c 	objects = kcalloc(object_count, sizeof(struct drm_mode_object *),
objects           394 drivers/gpu/drm/drm_lease.c 	if (!objects)
objects           400 drivers/gpu/drm/drm_lease.c 		objects[o] = drm_mode_object_find(dev, lessor_priv,
objects           403 drivers/gpu/drm/drm_lease.c 		if (!objects[o]) {
objects           408 drivers/gpu/drm/drm_lease.c 		if (!drm_mode_object_lease_required(objects[o]->type)) {
objects           415 drivers/gpu/drm/drm_lease.c 	ret = validate_lease(dev, object_count, objects, universal_planes);
objects           424 drivers/gpu/drm/drm_lease.c 		struct drm_mode_object *obj = objects[o];
objects           425 drivers/gpu/drm/drm_lease.c 		u32 object_id = objects[o]->id;
objects           464 drivers/gpu/drm/drm_lease.c 		if (objects[o])
objects           465 drivers/gpu/drm/drm_lease.c 			drm_mode_object_put(objects[o]);
objects           467 drivers/gpu/drm/drm_lease.c 	kfree(objects);
objects            36 drivers/gpu/drm/i915/gem/i915_gem_userptr.c 	struct rb_root_cached objects;
objects            49 drivers/gpu/drm/i915/gem/i915_gem_userptr.c 	interval_tree_insert(&mo->it, &mo->mn->objects);
objects            57 drivers/gpu/drm/i915/gem/i915_gem_userptr.c 	interval_tree_remove(&mo->it, &mo->mn->objects);
objects            99 drivers/gpu/drm/i915/gem/i915_gem_userptr.c 	if (RB_EMPTY_ROOT(&mn->objects.rb_root))
objects           106 drivers/gpu/drm/i915/gem/i915_gem_userptr.c 	it = interval_tree_iter_first(&mn->objects, range->start, end);
objects           167 drivers/gpu/drm/i915/gem/i915_gem_userptr.c 		it = interval_tree_iter_first(&mn->objects, range->start, end);
objects           194 drivers/gpu/drm/i915/gem/i915_gem_userptr.c 	mn->objects = RB_ROOT_CACHED;
objects           579 drivers/gpu/drm/i915/gem/selftests/huge_pages.c static void close_object_list(struct list_head *objects,
objects           584 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	list_for_each_entry_safe(obj, on, objects, st_link) {
objects           605 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	LIST_HEAD(objects);
objects           636 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 		list_add(&obj->st_link, &objects);
objects           711 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	close_object_list(&objects, ppgtt);
objects           728 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	} objects[] = {
objects           795 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	for (i = 0; i < ARRAY_SIZE(objects); ++i) {
objects           796 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 		unsigned int size = objects[i].size;
objects           797 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 		unsigned int expected_gtt = objects[i].gtt;
objects           798 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 		unsigned int offset = objects[i].offset;
objects           310 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 		   struct list_head *objects)
objects           339 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	list_add_tail(&obj->st_link, objects);
objects           373 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 		LIST_HEAD(objects);
objects           404 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 				obj = create_test_object(ctx, file, &objects);
objects           433 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 		list_for_each_entry(obj, &objects, st_link) {
objects           502 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 		LIST_HEAD(objects);
objects           522 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 				obj = create_test_object(parent, file, &objects);
objects           554 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 		list_for_each_entry(obj, &objects, st_link) {
objects          1051 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	LIST_HEAD(objects);
objects          1093 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 				obj = create_test_object(ctx, file, &objects);
objects          1124 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	list_for_each_entry(obj, &objects, st_link) {
objects           260 drivers/gpu/drm/i915/i915_buddy.c void i915_buddy_free_list(struct i915_buddy_mm *mm, struct list_head *objects)
objects           264 drivers/gpu/drm/i915/i915_buddy.c 	list_for_each_entry_safe(block, on, objects, link)
objects           266 drivers/gpu/drm/i915/i915_buddy.c 	INIT_LIST_HEAD(objects);
objects           126 drivers/gpu/drm/i915/i915_buddy.h void i915_buddy_free_list(struct i915_buddy_mm *mm, struct list_head *objects);
objects            38 drivers/gpu/drm/i915/selftests/i915_gem_evict.c 		      struct list_head *objects)
objects            43 drivers/gpu/drm/i915/selftests/i915_gem_evict.c 	list_add(&obj->st_link, objects);
objects            47 drivers/gpu/drm/i915/selftests/i915_gem_evict.c 			 struct list_head *objects)
objects            69 drivers/gpu/drm/i915/selftests/i915_gem_evict.c 		quirk_add(obj, objects);
objects            77 drivers/gpu/drm/i915/selftests/i915_gem_evict.c 	list_for_each_entry(obj, objects, st_link) {
objects           141 drivers/gpu/drm/i915/selftests/i915_gem_evict.c 	LIST_HEAD(objects);
objects           146 drivers/gpu/drm/i915/selftests/i915_gem_evict.c 	err = populate_ggtt(i915, &objects);
objects           175 drivers/gpu/drm/i915/selftests/i915_gem_evict.c 	cleanup_objects(i915, &objects);
objects           184 drivers/gpu/drm/i915/selftests/i915_gem_evict.c 	LIST_HEAD(objects);
objects           191 drivers/gpu/drm/i915/selftests/i915_gem_evict.c 	err = populate_ggtt(i915, &objects);
objects           201 drivers/gpu/drm/i915/selftests/i915_gem_evict.c 	quirk_add(obj, &objects);
objects           211 drivers/gpu/drm/i915/selftests/i915_gem_evict.c 	cleanup_objects(i915, &objects);
objects           223 drivers/gpu/drm/i915/selftests/i915_gem_evict.c 	LIST_HEAD(objects);
objects           228 drivers/gpu/drm/i915/selftests/i915_gem_evict.c 	err = populate_ggtt(i915, &objects);
objects           251 drivers/gpu/drm/i915/selftests/i915_gem_evict.c 	cleanup_objects(i915, &objects);
objects           274 drivers/gpu/drm/i915/selftests/i915_gem_evict.c 	LIST_HEAD(objects);
objects           290 drivers/gpu/drm/i915/selftests/i915_gem_evict.c 	quirk_add(obj, &objects);
objects           306 drivers/gpu/drm/i915/selftests/i915_gem_evict.c 	quirk_add(obj, &objects);
objects           342 drivers/gpu/drm/i915/selftests/i915_gem_evict.c 	cleanup_objects(i915, &objects);
objects           351 drivers/gpu/drm/i915/selftests/i915_gem_evict.c 	LIST_HEAD(objects);
objects           356 drivers/gpu/drm/i915/selftests/i915_gem_evict.c 	err = populate_ggtt(i915, &objects);
objects           378 drivers/gpu/drm/i915/selftests/i915_gem_evict.c 	cleanup_objects(i915, &objects);
objects           321 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c static void close_object_list(struct list_head *objects,
objects           327 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c 	list_for_each_entry_safe(obj, on, objects, st_link) {
objects           354 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c 	LIST_HEAD(objects);
objects           380 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c 			list_add(&obj->st_link, &objects);
objects           390 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c 				list_for_each_entry(obj, &objects, st_link) {
objects           427 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c 				list_for_each_entry(obj, &objects, st_link) {
objects           463 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c 				list_for_each_entry_reverse(obj, &objects, st_link) {
objects           500 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c 				list_for_each_entry_reverse(obj, &objects, st_link) {
objects           543 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c 		close_object_list(&objects, vm);
objects           550 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c 	close_object_list(&objects, vm);
objects           832 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c 	LIST_HEAD(objects);
objects           848 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c 		list_add(&obj->st_link, &objects);
objects           886 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c 	close_object_list(&objects, vm);
objects          1297 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c 	LIST_HEAD(objects);
objects          1325 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c 		list_add(&obj->st_link, &objects);
objects          1375 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c 		list_add(&obj->st_link, &objects);
objects          1407 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c 	list_for_each_entry_safe(obj, on, &objects, st_link) {
objects          1451 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c 	list_for_each_entry_safe(obj, on, &objects, st_link) {
objects          1490 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c 	LIST_HEAD(objects);
objects          1532 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c 		list_add(&obj->st_link, &objects);
objects          1560 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c 	list_for_each_entry(obj, &objects, st_link) {
objects          1579 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c 	list_for_each_entry_safe(obj, on, &objects, st_link) {
objects          1637 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c 		list_add(&obj->st_link, &objects);
objects          1660 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c 	list_for_each_entry_safe(obj, on, &objects, st_link) {
objects            20 drivers/gpu/drm/i915/selftests/i915_live_selftests.h selftest(objects, i915_gem_object_live_selftests)
objects            20 drivers/gpu/drm/i915/selftests/i915_mock_selftests.h selftest(objects, i915_gem_object_mock_selftests)
objects           106 drivers/gpu/drm/i915/selftests/i915_vma.c 		       struct list_head *objects,
objects           113 drivers/gpu/drm/i915/selftests/i915_vma.c 	list_for_each_entry(obj, objects, st_link) {
objects           155 drivers/gpu/drm/i915/selftests/i915_vma.c 	LIST_HEAD(objects);
objects           169 drivers/gpu/drm/i915/selftests/i915_vma.c 			list_add(&obj->st_link, &objects);
objects           182 drivers/gpu/drm/i915/selftests/i915_vma.c 			err = create_vmas(i915, &objects, &contexts);
objects           202 drivers/gpu/drm/i915/selftests/i915_vma.c 	err = create_vmas(i915, &objects, &contexts);
objects           209 drivers/gpu/drm/i915/selftests/i915_vma.c 	list_for_each_entry_safe(obj, on, &objects, st_link)
objects           105 drivers/gpu/drm/nouveau/nouveau_drv.h 	struct list_head objects;
objects           276 drivers/gpu/drm/nouveau/nouveau_usif.c 	list_add(&object->head, &cli->objects);
objects           388 drivers/gpu/drm/nouveau/nouveau_usif.c 	list_for_each_entry_safe(object, otemp, &cli->objects, head) {
objects           396 drivers/gpu/drm/nouveau/nouveau_usif.c 	INIT_LIST_HEAD(&cli->objects);
objects            59 drivers/gpu/drm/qxl/qxl_debugfs.c 	list_for_each_entry(bo, &qdev->gem.objects, list) {
objects           101 drivers/gpu/drm/qxl/qxl_drv.h 	struct list_head	objects;
objects            69 drivers/gpu/drm/qxl/qxl_gem.c 	list_add_tail(&qbo->list, &qdev->gem.objects);
objects           116 drivers/gpu/drm/qxl/qxl_gem.c 	INIT_LIST_HEAD(&qdev->gem.objects);
objects           308 drivers/gpu/drm/qxl/qxl_object.c 	if (list_empty(&qdev->gem.objects))
objects           311 drivers/gpu/drm/qxl/qxl_object.c 	list_for_each_entry_safe(bo, n, &qdev->gem.objects, list) {
objects           572 drivers/gpu/drm/radeon/radeon.h 	struct list_head	objects;
objects            90 drivers/gpu/drm/radeon/radeon_gem.c 	list_add_tail(&robj->list, &rdev->gem.objects);
objects           135 drivers/gpu/drm/radeon/radeon_gem.c 	INIT_LIST_HEAD(&rdev->gem.objects);
objects           789 drivers/gpu/drm/radeon/radeon_gem.c 	list_for_each_entry(rbo, &rdev->gem.objects, list) {
objects            44 drivers/gpu/drm/radeon/radeon_mn.c 	struct rb_root_cached	objects;
objects            83 drivers/gpu/drm/radeon/radeon_mn.c 	it = interval_tree_iter_first(&rmn->objects, range->start, end);
objects           150 drivers/gpu/drm/radeon/radeon_mn.c 	rmn->objects = RB_ROOT_CACHED;
objects           193 drivers/gpu/drm/radeon/radeon_mn.c 	while ((it = interval_tree_iter_first(&rmn->objects, addr, end))) {
objects           196 drivers/gpu/drm/radeon/radeon_mn.c 		interval_tree_remove(&node->it, &rmn->objects);
objects           218 drivers/gpu/drm/radeon/radeon_mn.c 	interval_tree_insert(&node->it, &rmn->objects);
objects           249 drivers/gpu/drm/radeon/radeon_mn.c 		interval_tree_remove(&node->it, &rmn->objects);
objects           439 drivers/gpu/drm/radeon/radeon_object.c 	if (list_empty(&rdev->gem.objects)) {
objects           443 drivers/gpu/drm/radeon/radeon_object.c 	list_for_each_entry_safe(bo, n, &rdev->gem.objects, list) {
objects           153 drivers/gpu/drm/radeon/radeon_pm.c 	if (list_empty(&rdev->gem.objects))
objects           156 drivers/gpu/drm/radeon/radeon_pm.c 	list_for_each_entry_safe(bo, n, &rdev->gem.objects, list) {
objects            79 drivers/gpu/drm/radeon/radeon_prime.c 	list_add_tail(&bo->list, &rdev->gem.objects);
objects           148 drivers/input/rmi4/rmi_f12.c 	int objects = f12->data1->num_subpackets;
objects           151 drivers/input/rmi4/rmi_f12.c 		objects = size / F12_DATA1_BYTES_PER_OBJ;
objects           153 drivers/input/rmi4/rmi_f12.c 	for (i = 0; i < objects; i++) {
objects           194 drivers/input/rmi4/rmi_f12.c 	for (i = 0; i < objects; i++)
objects          1410 drivers/media/common/videobuf2/videobuf2-core.c 	list_for_each_entry(obj, &req->objects, list)
objects          1176 drivers/media/common/videobuf2/videobuf2-v4l2.c 	list_for_each_entry(obj, &req->objects, list) {
objects          1186 drivers/media/common/videobuf2/videobuf2-v4l2.c 		list_for_each_entry_continue_reverse(obj, &req->objects, list)
objects          1207 drivers/media/common/videobuf2/videobuf2-v4l2.c 	list_for_each_entry_safe(obj, obj_safe, &req->objects, list)
objects            48 drivers/media/mc/mc-request.c 	list_for_each_entry_safe(obj, obj_safe, &req->objects, list) {
objects           323 drivers/media/mc/mc-request.c 	INIT_LIST_HEAD(&req->objects);
objects           372 drivers/media/mc/mc-request.c 	list_for_each_entry(obj, &req->objects, list) {
objects           422 drivers/media/mc/mc-request.c 		list_add_tail(&obj->list, &req->objects);
objects           424 drivers/media/mc/mc-request.c 		list_add(&obj->list, &req->objects);
objects          2019 drivers/media/platform/vicodec/vicodec-core.c 	list_for_each_entry(obj, &req->objects, list) {
objects          1012 drivers/media/v4l2-core/v4l2-mem2mem.c 	list_for_each_entry_safe(obj, obj_safe, &req->objects, list) {
objects           163 drivers/staging/media/sunxi/cedrus/cedrus.c 	list_for_each_entry(obj, &req->objects, list) {
objects           125 include/linux/mm_types.h 					unsigned objects:15;
objects           178 include/linux/slub_def.h 		(page->objects - 1) * cache->size;
objects            69 include/media/media-request.h 	struct list_head objects;
objects          1025 include/net/netfilter/nf_tables.h 	struct list_head		objects;
objects          1300 lib/debugobjects.c 	HLIST_HEAD(objects);
objects          1307 lib/debugobjects.c 		hlist_add_head(&obj->node, &objects);
objects          1320 lib/debugobjects.c 	hlist_move_list(&objects, &obj_pool);
objects          1324 lib/debugobjects.c 		hlist_move_list(&db->list, &objects);
objects          1326 lib/debugobjects.c 		hlist_for_each_entry(obj, &objects, node) {
objects          1340 lib/debugobjects.c 	hlist_for_each_entry_safe(obj, tmp, &objects, node) {
objects           306 lib/test_meminit.c 	void *objects[10];
objects           310 lib/test_meminit.c 		num = kmem_cache_alloc_bulk(c, GFP_KERNEL, ARRAY_SIZE(objects),
objects           311 lib/test_meminit.c 					    objects);
objects           313 lib/test_meminit.c 			bytes = count_nonzero_bytes(objects[i], size);
objects           316 lib/test_meminit.c 			fill_with_garbage(objects[i], size);
objects           320 lib/test_meminit.c 			kmem_cache_free_bulk(c, num, objects);
objects           519 mm/slub.c      	if (object < base || object >= base + page->objects * s->size ||
objects           615 mm/slub.c      	       page, page->objects, page->inuse, page->freelist, page->flags);
objects           922 mm/slub.c      	if (page->objects > maxobj) {
objects           924 mm/slub.c      			page->objects, maxobj);
objects           927 mm/slub.c      	if (page->inuse > page->objects) {
objects           929 mm/slub.c      			page->inuse, page->objects);
objects           949 mm/slub.c      	while (fp && nr <= page->objects) {
objects           960 mm/slub.c      				page->inuse = page->objects;
objects           975 mm/slub.c      	if (page->objects != max_objects) {
objects           977 mm/slub.c      			 page->objects, max_objects);
objects           978 mm/slub.c      		page->objects = max_objects;
objects           981 mm/slub.c      	if (page->inuse != page->objects - nr) {
objects           983 mm/slub.c      			 page->inuse, page->objects - nr);
objects           984 mm/slub.c      		page->inuse = page->objects - nr;
objects          1043 mm/slub.c      static inline void inc_slabs_node(struct kmem_cache *s, int node, int objects)
objects          1055 mm/slub.c      		atomic_long_add(objects, &n->total_objects);
objects          1058 mm/slub.c      static inline void dec_slabs_node(struct kmem_cache *s, int node, int objects)
objects          1063 mm/slub.c      	atomic_long_sub(objects, &n->total_objects);
objects          1129 mm/slub.c      		page->inuse = page->objects;
objects          1378 mm/slub.c      							int objects) {}
objects          1380 mm/slub.c      							int objects) {}
objects          1578 mm/slub.c      	if (page->objects < 2 || !s->random_seq)
objects          1584 mm/slub.c      	page_limit = page->objects * s->size;
objects          1593 mm/slub.c      	for (idx = 1; idx < page->objects; idx++) {
objects          1654 mm/slub.c      	page->objects = oo_objects(oo);
objects          1673 mm/slub.c      		for (idx = 0, p = start; idx < page->objects - 1; idx++) {
objects          1682 mm/slub.c      	page->inuse = page->objects;
objects          1691 mm/slub.c      	inc_slabs_node(s, page_to_nid(page), page->objects);
objects          1720 mm/slub.c      						page->objects)
objects          1751 mm/slub.c      	dec_slabs_node(s, page_to_nid(page), page->objects);
objects          1791 mm/slub.c      		int mode, int *objects)
objects          1807 mm/slub.c      	*objects = new.objects - new.inuse;
objects          1809 mm/slub.c      		new.inuse = page->objects;
objects          1841 mm/slub.c      	int objects;
objects          1859 mm/slub.c      		t = acquire_slab(s, n, page, object == NULL, &objects);
objects          1863 mm/slub.c      		available += objects;
objects          2282 mm/slub.c      		pobjects += page->objects - page->inuse;
objects          2379 mm/slub.c      	return page->objects - page->inuse;
objects          2507 mm/slub.c      		new.inuse = page->objects;
objects          3413 mm/slub.c      	inc_slabs_node(kmem_cache_node, node, page->objects);
objects          3696 mm/slub.c      	unsigned long *map = bitmap_zalloc(page->objects, GFP_ATOMIC);
objects          3703 mm/slub.c      	for_each_object(p, s, addr, page->objects) {
objects          4017 mm/slub.c      			int free = page->objects - page->inuse;
objects          4025 mm/slub.c      			if (free == page->objects) {
objects          4400 mm/slub.c      	return page->objects;
objects          4416 mm/slub.c      	bitmap_zero(map, page->objects);
objects          4419 mm/slub.c      	for_each_object(p, s, addr, page->objects) {
objects          4425 mm/slub.c      	for_each_object(p, s, addr, page->objects)
objects          4622 mm/slub.c      	bitmap_zero(map, page->objects);
objects          4625 mm/slub.c      	for_each_object(p, s, addr, page->objects)
objects          4833 mm/slub.c      				x = page->objects;
objects          5018 mm/slub.c      	unsigned int objects;
objects          5021 mm/slub.c      	err = kstrtouint(buf, 10, &objects);
objects          5024 mm/slub.c      	if (objects && !kmem_cache_has_cpu_partial(s))
objects          5027 mm/slub.c      	slub_set_cpu_partial(s, objects);
objects          5063 mm/slub.c      SLAB_ATTR_RO(objects);
objects          5073 mm/slub.c      	int objects = 0;
objects          5085 mm/slub.c      			objects += page->pobjects;
objects          5089 mm/slub.c      	len = sprintf(buf, "%d(%d)", objects, pages);
objects           950 net/netfilter/nf_tables_api.c 	INIT_LIST_HEAD(&table->objects);
objects          1014 net/netfilter/nf_tables_api.c 	list_for_each_entry_safe(obj, ne, &ctx->table->objects, list) {
objects          5064 net/netfilter/nf_tables_api.c 	list_for_each_entry(obj, &table->objects, list) {
objects          5292 net/netfilter/nf_tables_api.c 	list_add_tail_rcu(&obj->list, &table->objects);
objects          5371 net/netfilter/nf_tables_api.c 		list_for_each_entry_rcu(obj, &table->objects, list) {
objects          7726 net/netfilter/nf_tables_api.c 		list_for_each_entry_safe(obj, ne, &table->objects, list) {
objects            36 tools/vm/slabinfo.c 	unsigned long partial, objects, slabs, objects_partial, objects_total;
objects           360 tools/vm/slabinfo.c 	return	slab_size(s) - s->objects * s->object_size;
objects           536 tools/vm/slabinfo.c 		s->name, s->aliases, s->order, s->objects);
objects           553 tools/vm/slabinfo.c 			onoff(s->red_zone), s->objects * s->object_size);
objects           556 tools/vm/slabinfo.c 			s->slabs * (page_size << s->order) - s->objects * s->object_size);
objects           559 tools/vm/slabinfo.c 			(s->slab_size - s->object_size) * s->objects);
objects           633 tools/vm/slabinfo.c 			s->name, s->objects,
objects           641 tools/vm/slabinfo.c 			s->name, s->objects, s->object_size, size_str, dist_str,
objects           644 tools/vm/slabinfo.c 			s->slabs ? (s->objects * s->object_size * 100) /
objects           704 tools/vm/slabinfo.c 	if (s->objects > 0)
objects           841 tools/vm/slabinfo.c 		if (!s->slabs || !s->objects)
objects           847 tools/vm/slabinfo.c 		used = s->objects * s->object_size;
objects           856 tools/vm/slabinfo.c 							/ s->objects;
objects           873 tools/vm/slabinfo.c 		if (s->objects < min_objects)
objects           874 tools/vm/slabinfo.c 			min_objects = s->objects;
objects           898 tools/vm/slabinfo.c 		if (s->objects > max_objects)
objects           899 tools/vm/slabinfo.c 			max_objects = s->objects;
objects           916 tools/vm/slabinfo.c 		total_objects += s->objects;
objects           922 tools/vm/slabinfo.c 		total_objwaste += s->objects * objwaste;
objects           923 tools/vm/slabinfo.c 		total_objsize += s->objects * s->slab_size;
objects          1221 tools/vm/slabinfo.c 			slab->objects = get_obj("objects");
objects           140 virt/kvm/arm/mmu.c 		cache->objects[cache->nobjs++] = page;
objects           148 virt/kvm/arm/mmu.c 		free_page((unsigned long)mc->objects[--mc->nobjs]);
objects           156 virt/kvm/arm/mmu.c 	p = mc->objects[--mc->nobjs];