Home
last modified time | relevance | path

Searched refs:object (Results 1 – 200 of 555) sorted by relevance

123

/linux-4.1.27/arch/parisc/math-emu/
Dfloat.h61 #define Sall(object) (object) argument
62 #define Ssign(object) Bitfield_extract( 0, 1,object) argument
63 #define Ssignedsign(object) Bitfield_signed_extract( 0, 1,object) argument
64 #define Sexponent(object) Bitfield_extract( 1, 8,object) argument
65 #define Smantissa(object) Bitfield_mask( 9, 23,object) argument
66 #define Ssignaling(object) Bitfield_extract( 9, 1,object) argument
67 #define Ssignalingnan(object) Bitfield_extract( 1, 9,object) argument
68 #define Shigh2mantissa(object) Bitfield_extract( 9, 2,object) argument
69 #define Sexponentmantissa(object) Bitfield_mask( 1, 31,object) argument
70 #define Ssignexponent(object) Bitfield_extract( 0, 9,object) argument
[all …]
Dfpbits.h53 #define Bitfield_extract(start, length, object) \ argument
54 ((object) >> (HOSTWDSZ - (start) - (length)) & \
57 #define Bitfield_signed_extract(start, length, object) \ argument
58 ((int)((object) << start) >> (HOSTWDSZ - (length)))
60 #define Bitfield_mask(start, len, object) \ argument
61 ((object) & (((unsigned)-1 >> (HOSTWDSZ-len)) << (HOSTWDSZ-start-len)))
63 #define Bitfield_deposit(value,start,len,object) object = \ argument
64 ((object) & ~(((unsigned)-1 >> (HOSTWDSZ-len)) << (HOSTWDSZ-start-len))) | \
Dcnv_float.h33 #define Dintp1(object) (object) argument
34 #define Dintp2(object) (object) argument
36 #define Duintp1(object) (object) argument
37 #define Duintp2(object) (object) argument
39 #define Qintp0(object) (object) argument
40 #define Qintp1(object) (object) argument
41 #define Qintp2(object) (object) argument
42 #define Qintp3(object) (object) argument
Dsgl_float.h32 #define Sgl_sign(object) Ssign(object) argument
33 #define Sgl_exponent(object) Sexponent(object) argument
34 #define Sgl_signexponent(object) Ssignexponent(object) argument
35 #define Sgl_mantissa(object) Smantissa(object) argument
36 #define Sgl_exponentmantissa(object) Sexponentmantissa(object) argument
37 #define Sgl_all(object) Sall(object) argument
Ddbl_float.h31 #define Dbl_sign(object) Dsign(object) argument
32 #define Dbl_exponent(object) Dexponent(object) argument
33 #define Dbl_signexponent(object) Dsignexponent(object) argument
34 #define Dbl_mantissap1(object) Dmantissap1(object) argument
35 #define Dbl_mantissap2(object) Dmantissap2(object) argument
36 #define Dbl_exponentmantissap1(object) Dexponentmantissap1(object) argument
37 #define Dbl_allp1(object) Dallp1(object) argument
38 #define Dbl_allp2(object) Dallp2(object) argument
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/core/
Dobject.c37 struct nvkm_object *object; in nvkm_object_create_() local
39 object = *pobject = kzalloc(size, GFP_KERNEL); in nvkm_object_create_()
40 if (!object) in nvkm_object_create_()
43 nvkm_object_ref(parent, &object->parent); in nvkm_object_create_()
44 nvkm_object_ref(engine, (struct nvkm_object **)&object->engine); in nvkm_object_create_()
45 object->oclass = oclass; in nvkm_object_create_()
46 object->oclass->handle |= pclass; in nvkm_object_create_()
47 atomic_set(&object->refcount, 1); in nvkm_object_create_()
48 atomic_set(&object->usecount, 0); in nvkm_object_create_()
51 object->_magic = NVKM_OBJECT_MAGIC; in nvkm_object_create_()
[all …]
Dioctl.c36 struct nvkm_object *object = handle->object; in nvkm_ioctl_nop() local
42 nv_ioctl(object, "nop size %d\n", size); in nvkm_ioctl_nop()
44 nv_ioctl(object, "nop\n"); in nvkm_ioctl_nop()
53 struct nvkm_object *object = handle->object; in nvkm_ioctl_sclass() local
59 if (!nv_iclass(object, NV_PARENT_CLASS)) { in nvkm_ioctl_sclass()
60 nv_debug(object, "cannot have children (sclass)\n"); in nvkm_ioctl_sclass()
64 nv_ioctl(object, "sclass size %d\n", size); in nvkm_ioctl_sclass()
66 nv_ioctl(object, "sclass vers %d count %d\n", in nvkm_ioctl_sclass()
69 ret = nvkm_parent_lclass(object, args->v0.oclass, in nvkm_ioctl_sclass()
89 struct nvkm_client *client = nvkm_client(handle->object); in nvkm_ioctl_new()
[all …]
Dsubdev.c31 struct nvkm_object *object = nv_object(obj); in nvkm_subdev() local
32 while (object && !nv_iclass(object, NV_SUBDEV_CLASS)) in nvkm_subdev()
33 object = object->parent; in nvkm_subdev()
34 if (object == NULL || nv_subidx(nv_subdev(object)) != idx) in nvkm_subdev()
35 object = nv_device(obj)->subdev[idx]; in nvkm_subdev()
36 return object ? nv_subdev(object) : NULL; in nvkm_subdev()
50 int ret = nvkm_object_init(&subdev->object); in nvkm_subdev_init()
54 nvkm_subdev_reset(&subdev->object); in nvkm_subdev_init()
59 _nvkm_subdev_init(struct nvkm_object *object) in _nvkm_subdev_init() argument
61 return nvkm_subdev_init(nv_subdev(object)); in _nvkm_subdev_init()
[all …]
Dprintk.c31 nv_printk_(struct nvkm_object *object, int level, const char *fmt, ...) in nv_printk_() argument
60 if (object && !nv_iclass(object, NV_CLIENT_CLASS)) { in nv_printk_()
65 if (object->engine == NULL) { in nv_printk_()
66 subdev = object; in nv_printk_()
70 subdev = &object->engine->subdev.object; in nv_printk_()
77 if (object != subdev) { in nv_printk_()
79 nv_hclass(object)); in nv_printk_()
90 if (object && nv_iclass(object, NV_CLIENT_CLASS)) { in nv_printk_()
91 if (level > nv_client(object)->debug) in nv_printk_()
95 name[level], nv_client(object)->name, fmt); in nv_printk_()
Dengctx.c106 struct nvkm_engine *engine = engctx->gpuobj.object.engine; in nvkm_engctx_destroy()
121 nvkm_object_destroy(&engctx->gpuobj.object); in nvkm_engctx_destroy()
127 struct nvkm_object *object = nv_object(engctx); in nvkm_engctx_init() local
128 struct nvkm_subdev *subdev = nv_subdev(object->engine); in nvkm_engctx_init()
137 parent = nv_pclass(object->parent, NV_PARENT_CLASS); in nvkm_engctx_init()
141 ret = nv_parent(parent)->context_attach(parent, object); in nvkm_engctx_init()
158 struct nvkm_object *object = nv_object(engctx); in nvkm_engctx_fini() local
159 struct nvkm_subdev *subdev = nv_subdev(object->engine); in nvkm_engctx_fini()
164 parent = nv_pclass(object->parent, NV_PARENT_CLASS); in nvkm_engctx_fini()
168 ret = nv_parent(parent)->context_detach(parent, suspend, object); in nvkm_engctx_fini()
[all …]
Dnamedb.c47 if (nv_mclass(handle->object) == oclass) in nvkm_namedb_lookup_class()
60 if (nv_iclass(handle->object, NV_GPUOBJ_CLASS)) { in nvkm_namedb_lookup_vinst()
61 if (nv_gpuobj(handle->object)->addr == vinst) in nvkm_namedb_lookup_vinst()
75 if (nv_iclass(handle->object, NV_GPUOBJ_CLASS)) { in nvkm_namedb_lookup_cinst()
76 if (nv_gpuobj(handle->object)->node && in nvkm_namedb_lookup_cinst()
77 nv_gpuobj(handle->object)->node->offset == cinst) in nvkm_namedb_lookup_cinst()
87 struct nvkm_object *object, in nvkm_namedb_insert() argument
93 nvkm_object_ref(object, &handle->object); in nvkm_namedb_insert()
106 struct nvkm_object *object = handle->object; in nvkm_namedb_remove() local
110 nvkm_object_ref(NULL, &object); in nvkm_namedb_remove()
[all …]
Dgpuobj.c47 nvkm_object_destroy(&gpuobj->object); in nvkm_gpuobj_destroy()
148 struct nvkm_gpuobj *object; in _nvkm_gpuobj_ctor() local
153 &object); in _nvkm_gpuobj_ctor()
154 *pobject = nv_object(object); in _nvkm_gpuobj_ctor()
162 _nvkm_gpuobj_dtor(struct nvkm_object *object) in _nvkm_gpuobj_dtor() argument
164 nvkm_gpuobj_destroy(nv_gpuobj(object)); in _nvkm_gpuobj_dtor()
168 _nvkm_gpuobj_init(struct nvkm_object *object) in _nvkm_gpuobj_init() argument
170 return nvkm_gpuobj_init(nv_gpuobj(object)); in _nvkm_gpuobj_init()
174 _nvkm_gpuobj_fini(struct nvkm_object *object, bool suspend) in _nvkm_gpuobj_fini() argument
176 return nvkm_gpuobj_fini(nv_gpuobj(object), suspend); in _nvkm_gpuobj_fini()
[all …]
Dhandle.c28 struct nvkm_client *c = nvkm_client((h)->object); \
40 ret = nvkm_object_inc(handle->object); in nvkm_handle_init()
59 nvkm_object_dec(handle->object, false); in nvkm_handle_init()
78 if (handle->object) { in nvkm_handle_fini()
79 ret = nvkm_object_dec(handle->object, suspend); in nvkm_handle_fini()
99 struct nvkm_object *object, struct nvkm_handle **phandle) in nvkm_handle_create() argument
118 ret = nvkm_namedb_insert(nv_namedb(namedb), _handle, object, handle); in nvkm_handle_create()
125 ret = nv_parent(parent)->object_attach(parent, object, _handle); in nvkm_handle_create()
134 if (object != namedb) { in nvkm_handle_create()
162 struct nvkm_object *parent = handle->parent->object; in nvkm_handle_destroy()
[all …]
Dparent.c41 *pengine = &parent->engine->subdev.object; in nvkm_parent_sclass()
115 struct nvkm_parent *object; in nvkm_parent_create_() local
121 object = *pobject; in nvkm_parent_create_()
130 nclass->sclass = object->sclass; in nvkm_parent_create_()
131 object->sclass = nclass; in nvkm_parent_create_()
137 object->engine = engcls; in nvkm_parent_create_()
151 nvkm_object_destroy(&parent->object); in nvkm_parent_destroy()
156 _nvkm_parent_dtor(struct nvkm_object *object) in _nvkm_parent_dtor() argument
158 nvkm_parent_destroy(nv_parent(object)); in _nvkm_parent_dtor()
Dclient.c91 nvkm_client_notify_new(struct nvkm_object *object, in nvkm_client_notify_new() argument
94 struct nvkm_client *client = nvkm_client(object); in nvkm_client_notify_new()
128 ret = nvkm_notify_init(object, event, nvkm_client_notify, in nvkm_client_notify_new()
142 nvkm_client_mthd_devlist(struct nvkm_object *object, void *data, u32 size) in nvkm_client_mthd_devlist() argument
149 nv_ioctl(object, "client devlist size %d\n", size); in nvkm_client_mthd_devlist()
151 nv_ioctl(object, "client devlist vers %d count %d\n", in nvkm_client_mthd_devlist()
168 nvkm_client_mthd(struct nvkm_object *object, u32 mthd, void *data, u32 size) in nvkm_client_mthd() argument
172 return nvkm_client_mthd_devlist(object, data, size); in nvkm_client_mthd()
180 nvkm_client_dtor(struct nvkm_object *object) in nvkm_client_dtor() argument
182 struct nvkm_client *client = (void *)object; in nvkm_client_dtor()
/linux-4.1.27/fs/fscache/
Dobject.c149 static inline void fscache_done_parent_op(struct fscache_object *object) in fscache_done_parent_op() argument
151 struct fscache_object *parent = object->parent; in fscache_done_parent_op()
154 object->debug_id, parent->debug_id, parent->n_ops); in fscache_done_parent_op()
167 static void fscache_object_sm_dispatcher(struct fscache_object *object) in fscache_object_sm_dispatcher() argument
174 ASSERT(object != NULL); in fscache_object_sm_dispatcher()
177 object->debug_id, object->state->name, object->events); in fscache_object_sm_dispatcher()
179 event_mask = object->event_mask; in fscache_object_sm_dispatcher()
181 object->event_mask = 0; /* Mask normal event handling */ in fscache_object_sm_dispatcher()
182 state = object->state; in fscache_object_sm_dispatcher()
184 events = object->events; in fscache_object_sm_dispatcher()
[all …]
Doperation.c34 op->object->debug_id, op->debug_id, atomic_read(&op->usage)); in fscache_enqueue_operation()
38 ASSERT(fscache_object_is_available(op->object)); in fscache_enqueue_operation()
64 static void fscache_run_op(struct fscache_object *object, in fscache_run_op() argument
70 object->n_in_progress++; in fscache_run_op()
83 int fscache_submit_exclusive_op(struct fscache_object *object, in fscache_submit_exclusive_op() argument
88 _enter("{OBJ%x OP%x},", object->debug_id, op->debug_id); in fscache_submit_exclusive_op()
93 spin_lock(&object->lock); in fscache_submit_exclusive_op()
94 ASSERTCMP(object->n_ops, >=, object->n_in_progress); in fscache_submit_exclusive_op()
95 ASSERTCMP(object->n_ops, >=, object->n_exclusive); in fscache_submit_exclusive_op()
99 if (fscache_object_is_active(object)) { in fscache_submit_exclusive_op()
[all …]
Dpage.c143 static void fscache_end_page_write(struct fscache_object *object, in fscache_end_page_write() argument
149 spin_lock(&object->lock); in fscache_end_page_write()
150 cookie = object->cookie; in fscache_end_page_write()
165 spin_unlock(&object->lock); in fscache_end_page_write()
175 struct fscache_object *object = op->object; in fscache_attr_changed_op() local
178 _enter("{OBJ%x OP%x}", object->debug_id, op->debug_id); in fscache_attr_changed_op()
182 if (fscache_object_is_active(object)) { in fscache_attr_changed_op()
184 ret = object->cache->ops->attr_changed(object); in fscache_attr_changed_op()
187 fscache_abort_object(object); in fscache_attr_changed_op()
200 struct fscache_object *object; in __fscache_attr_changed() local
[all …]
Dcookie.c28 struct fscache_object *object);
193 struct fscache_object *object; in fscache_acquire_non_index_cookie() local
243 object = hlist_entry(cookie->backing_objects.first, in fscache_acquire_non_index_cookie()
246 fscache_set_store_limit(object, i_size); in fscache_acquire_non_index_cookie()
250 fscache_raise_event(object, FSCACHE_OBJECT_EV_NEW_CHILD); in fscache_acquire_non_index_cookie()
281 struct fscache_object *object; in fscache_alloc_object() local
287 hlist_for_each_entry(object, &cookie->backing_objects, in fscache_alloc_object()
289 if (object->cache == cache) in fscache_alloc_object()
297 object = cache->ops->alloc_object(cache, cookie); in fscache_alloc_object()
299 if (IS_ERR(object)) { in fscache_alloc_object()
[all …]
Dcache.c98 struct fscache_object *object; in fscache_select_cache_for_object() local
114 object = hlist_entry(cookie->backing_objects.first, in fscache_select_cache_for_object()
117 cache = object->cache; in fscache_select_cache_for_object()
118 if (fscache_object_is_dying(object) || in fscache_select_cache_for_object()
332 struct fscache_object *object; in fscache_withdraw_all_objects() local
338 object = list_entry(cache->object_list.next, in fscache_withdraw_all_objects()
340 list_move_tail(&object->cache_link, dying_objects); in fscache_withdraw_all_objects()
342 _debug("withdraw %p", object->cookie); in fscache_withdraw_all_objects()
347 fscache_raise_event(object, FSCACHE_OBJECT_EV_KILL); in fscache_withdraw_all_objects()
Dinternal.h116 #define fscache_objlist_add(object) do {} while(0) argument
117 #define fscache_objlist_remove(object) do {} while(0) argument
299 static inline void fscache_raise_event(struct fscache_object *object, in fscache_raise_event() argument
305 object->debug_id, object->event_mask, (1 << event)); in fscache_raise_event()
307 if (!test_and_set_bit(event, &object->events) && in fscache_raise_event()
308 test_bit(event, &object->event_mask)) in fscache_raise_event()
309 fscache_enqueue_object(object); in fscache_raise_event()
DMakefile11 object.o \
18 fscache-$(CONFIG_FSCACHE_OBJECT_LIST) += object-list.o
/linux-4.1.27/drivers/acpi/acpica/
Dutdelete.c54 static void acpi_ut_delete_internal_obj(union acpi_operand_object *object);
57 acpi_ut_update_ref_count(union acpi_operand_object *object, u32 action);
72 static void acpi_ut_delete_internal_obj(union acpi_operand_object *object) in acpi_ut_delete_internal_obj() argument
81 ACPI_FUNCTION_TRACE_PTR(ut_delete_internal_obj, object); in acpi_ut_delete_internal_obj()
83 if (!object) { in acpi_ut_delete_internal_obj()
91 switch (object->common.type) { in acpi_ut_delete_internal_obj()
95 "**** String %p, ptr %p\n", object, in acpi_ut_delete_internal_obj()
96 object->string.pointer)); in acpi_ut_delete_internal_obj()
100 if (!(object->common.flags & AOPOBJ_STATIC_POINTER)) { in acpi_ut_delete_internal_obj()
104 obj_pointer = object->string.pointer; in acpi_ut_delete_internal_obj()
[all …]
Dnsobject.c74 union acpi_operand_object *object, acpi_object_type type) in acpi_ns_attach_object() argument
93 if (!object && (ACPI_TYPE_ANY != type)) { in acpi_ns_attach_object()
113 if (node->object == object) { in acpi_ns_attach_object()
116 object, node)); in acpi_ns_attach_object()
123 if (!object) { in acpi_ns_attach_object()
132 else if ((ACPI_GET_DESCRIPTOR_TYPE(object) == ACPI_DESC_TYPE_NAMED) && in acpi_ns_attach_object()
133 ((struct acpi_namespace_node *)object)->object) { in acpi_ns_attach_object()
138 obj_desc = ((struct acpi_namespace_node *)object)->object; in acpi_ns_attach_object()
139 object_type = ((struct acpi_namespace_node *)object)->type; in acpi_ns_attach_object()
147 obj_desc = (union acpi_operand_object *)object; in acpi_ns_attach_object()
[all …]
Ddsmthdat.c61 union acpi_operand_object *object,
146 if (walk_state->local_variables[index].object) { in acpi_ds_method_data_delete_all()
150 object)); in acpi_ds_method_data_delete_all()
162 if (walk_state->arguments[index].object) { in acpi_ds_method_data_delete_all()
165 walk_state->arguments[index].object)); in acpi_ds_method_data_delete_all()
316 union acpi_operand_object *object, in acpi_ds_method_data_set_value() argument
325 "NewObj %p Type %2.2X, Refs=%u [%s]\n", object, in acpi_ds_method_data_set_value()
326 type, object->common.reference_count, in acpi_ds_method_data_set_value()
327 acpi_ut_get_type_name(object->common.type))); in acpi_ds_method_data_set_value()
342 acpi_ut_add_reference(object); in acpi_ds_method_data_set_value()
[all …]
Dutdecode.c255 char *acpi_ut_get_node_name(void *object) in acpi_ut_get_node_name() argument
257 struct acpi_namespace_node *node = (struct acpi_namespace_node *)object; in acpi_ut_get_node_name()
261 if (!object) { in acpi_ut_get_node_name()
267 if ((object == ACPI_ROOT_OBJECT) || (object == acpi_gbl_root_node)) { in acpi_ut_get_node_name()
321 char *acpi_ut_get_descriptor_name(void *object) in acpi_ut_get_descriptor_name() argument
324 if (!object) { in acpi_ut_get_descriptor_name()
328 if (ACPI_GET_DESCRIPTOR_TYPE(object) > ACPI_DESC_TYPE_MAX) { in acpi_ut_get_descriptor_name()
334 (object)])); in acpi_ut_get_descriptor_name()
362 const char *acpi_ut_get_reference_name(union acpi_operand_object *object) in acpi_ut_get_reference_name() argument
365 if (!object) { in acpi_ut_get_reference_name()
[all …]
Dutobject.c93 union acpi_operand_object *object; in acpi_ut_create_internal_object_dbg() local
101 object = in acpi_ut_create_internal_object_dbg()
104 if (!object) { in acpi_ut_create_internal_object_dbg()
119 acpi_ut_delete_object_desc(object); in acpi_ut_create_internal_object_dbg()
128 object->common.next_object = second_object; in acpi_ut_create_internal_object_dbg()
139 object->common.type = (u8) type; in acpi_ut_create_internal_object_dbg()
143 object->common.reference_count = 1; in acpi_ut_create_internal_object_dbg()
147 return_PTR(object); in acpi_ut_create_internal_object_dbg()
335 u8 acpi_ut_valid_internal_object(void *object) in acpi_ut_valid_internal_object() argument
342 if (!object) { in acpi_ut_valid_internal_object()
[all …]
Dutcache.c188 acpi_os_release_object(struct acpi_memory_list * cache, void *object) in acpi_os_release_object() argument
194 if (!cache || !object) { in acpi_os_release_object()
201 ACPI_FREE(object); in acpi_os_release_object()
215 ACPI_MEMSET(object, 0xCA, cache->object_size); in acpi_os_release_object()
216 ACPI_SET_DESCRIPTOR_TYPE(object, ACPI_DESC_TYPE_CACHED); in acpi_os_release_object()
220 ACPI_SET_DESCRIPTOR_PTR(object, cache->list_head); in acpi_os_release_object()
221 cache->list_head = object; in acpi_os_release_object()
246 void *object; in acpi_os_acquire_object() local
267 object = cache->list_head; in acpi_os_acquire_object()
268 cache->list_head = ACPI_GET_DESCRIPTOR_PTR(object); in acpi_os_acquire_object()
[all …]
Ddswstate.c72 acpi_ds_result_pop(union acpi_operand_object **object, in acpi_ds_result_pop() argument
108 *object = state->results.obj_desc[index]; in acpi_ds_result_pop()
109 if (!*object) { in acpi_ds_result_pop()
125 "Obj=%p [%s] Index=%X State=%p Num=%X\n", *object, in acpi_ds_result_pop()
126 acpi_ut_get_object_type_name(*object), in acpi_ds_result_pop()
146 acpi_ds_result_push(union acpi_operand_object * object, in acpi_ds_result_push() argument
181 if (!object) { in acpi_ds_result_push()
184 object, walk_state, walk_state->result_count)); in acpi_ds_result_push()
191 state->results.obj_desc[index] = object; in acpi_ds_result_push()
195 object, in acpi_ds_result_push()
[all …]
Dexdebug.c242 node)->object, in acpi_ex_do_debug_object()
247 } else if (source_desc->reference.object) { in acpi_ex_do_debug_object()
249 (source_desc->reference.object) == in acpi_ex_do_debug_object()
254 object)->object, in acpi_ex_do_debug_object()
258 object, level + 4, 0); in acpi_ex_do_debug_object()
Dacutils.h183 char *acpi_ut_get_node_name(void *object);
185 char *acpi_ut_get_descriptor_name(void *object);
187 const char *acpi_ut_get_reference_name(union acpi_operand_object *object);
303 acpi_ut_update_object_reference(union acpi_operand_object *object, u16 action);
374 void acpi_ut_add_reference(union acpi_operand_object *object);
376 void acpi_ut_remove_reference(union acpi_operand_object *object);
378 void acpi_ut_delete_internal_package_object(union acpi_operand_object *object);
380 void acpi_ut_delete_internal_simple_object(union acpi_operand_object *object);
464 void acpi_ut_delete_object_desc(union acpi_operand_object *object);
466 u8 acpi_ut_valid_internal_object(void *object);
[all …]
Dutstate.c193 *object, u16 action) in acpi_ut_create_update_state()
209 state->update.object = object; in acpi_ut_create_update_state()
Dexdump.c232 {ACPI_EXD_POINTER, ACPI_EXD_OFFSET(reference.object), "Object Desc"},
306 {ACPI_EXD_LIST, ACPI_EXD_NSOFFSET(object), "Object List"},
667 acpi_os_printf("%p\n", obj_desc->reference.object); in acpi_ex_dump_operand()
678 acpi_os_printf("%p [%s]\n", obj_desc->reference.object, in acpi_ex_dump_operand()
684 object)->common. in acpi_ex_dump_operand()
1005 } else if (obj_desc->reference.object) { in acpi_ex_dump_reference_obj()
1009 obj_desc->reference.object); in acpi_ex_dump_reference_obj()
1015 obj_desc->reference.object, in acpi_ex_dump_reference_obj()
1021 object)-> in acpi_ex_dump_reference_obj()
1027 obj_desc->reference.object); in acpi_ex_dump_reference_obj()
[all …]
Dexstore.c162 object, walk_state, in acpi_ex_store()
280 index_desc->reference.object)->common. in acpi_ex_store_object_to_index()
291 index_desc->reference.object)->common. in acpi_ex_store_object_to_index()
313 obj_desc = index_desc->reference.object; in acpi_ex_store_object_to_index()
Dutmisc.c192 acpi_ut_create_update_state_and_push(union acpi_operand_object *object, in acpi_ut_create_update_state_and_push() argument
202 if (!object) { in acpi_ut_create_update_state_and_push()
206 state = acpi_ut_create_update_state(object, action); in acpi_ut_create_update_state_and_push()
Ddsobject.c186 op->common.node->object); in acpi_ds_build_internal_object()
199 acpi_ut_add_reference(op->common.node->object); in acpi_ds_build_internal_object()
786 object)); in acpi_ds_init_object_from_op()
807 object)); in acpi_ds_init_object_from_op()
819 obj_desc->reference.object = in acpi_ds_init_object_from_op()
820 op->common.node->object; in acpi_ds_init_object_from_op()
/linux-4.1.27/mm/
Dkmemleak.c295 struct kmemleak_object *object) in hex_dump_object() argument
297 const u8 *ptr = (const u8 *)object->pointer; in hex_dump_object()
303 min(object->size, (size_t)(HEX_MAX_LINES * HEX_ROW_SIZE)); in hex_dump_object()
327 static bool color_white(const struct kmemleak_object *object) in color_white() argument
329 return object->count != KMEMLEAK_BLACK && in color_white()
330 object->count < object->min_count; in color_white()
333 static bool color_gray(const struct kmemleak_object *object) in color_gray() argument
335 return object->min_count != KMEMLEAK_BLACK && in color_gray()
336 object->count >= object->min_count; in color_gray()
344 static bool unreferenced_object(struct kmemleak_object *object) in unreferenced_object() argument
[all …]
Dslub.c229 struct page *page, const void *object) in check_valid_pointer() argument
233 if (!object) in check_valid_pointer()
237 if (object < base || object >= base + page->objects * s->size || in check_valid_pointer()
238 (object - base) % s->size) { in check_valid_pointer()
245 static inline void *get_freepointer(struct kmem_cache *s, void *object) in get_freepointer() argument
247 return *(void **)(object + s->offset); in get_freepointer()
250 static void prefetch_freepointer(const struct kmem_cache *s, void *object) in prefetch_freepointer() argument
252 prefetch(object + s->offset); in prefetch_freepointer()
255 static inline void *get_freepointer_safe(struct kmem_cache *s, void *object) in get_freepointer_safe() argument
260 probe_kernel_read(&p, (void **)(object + s->offset), sizeof(p)); in get_freepointer_safe()
[all …]
Dkmemcheck.c60 void kmemcheck_slab_alloc(struct kmem_cache *s, gfp_t gfpflags, void *object, in kmemcheck_slab_alloc() argument
83 kmemcheck_mark_initialized(object, size); in kmemcheck_slab_alloc()
89 kmemcheck_mark_uninitialized(object, size); in kmemcheck_slab_alloc()
93 void kmemcheck_slab_free(struct kmem_cache *s, void *object, size_t size) in kmemcheck_slab_free() argument
97 kmemcheck_mark_freed(object, size); in kmemcheck_slab_free()
/linux-4.1.27/drivers/gpu/drm/nouveau/nvif/
Dobject.c31 nvif_object_ioctl(struct nvif_object *object, void *data, u32 size, void **hack) in nvif_object_ioctl() argument
33 struct nvif_client *client = nvif_client(object); in nvif_object_ioctl()
42 args->v0.path[args->v0.path_nr++] = object->handle; in nvif_object_ioctl()
43 if (object->parent == object) in nvif_object_ioctl()
45 object = object->parent; in nvif_object_ioctl()
54 nvif_object_sclass(struct nvif_object *object, u32 *oclass, int count) in nvif_object_sclass() argument
71 ret = nvif_object_ioctl(object, args, sizeof(*args) + size, NULL); in nvif_object_sclass()
79 nvif_object_rd(struct nvif_object *object, int size, u64 addr) in nvif_object_rd() argument
89 int ret = nvif_object_ioctl(object, &args, sizeof(args), NULL); in nvif_object_rd()
98 nvif_object_wr(struct nvif_object *object, int size, u64 addr, u32 data) in nvif_object_wr() argument
[all …]
Dnotify.c35 struct nvif_object *object = notify->object; in nvif_notify_put_() local
47 return nvif_object_ioctl(object, &args, sizeof(args), NULL); in nvif_notify_put_()
53 if (likely(notify->object) && in nvif_notify_put()
66 struct nvif_object *object = notify->object; in nvif_notify_get_() local
78 return nvif_object_ioctl(object, &args, sizeof(args), NULL); in nvif_notify_get_()
84 if (likely(notify->object) && in nvif_notify_get()
127 struct nvif_client *client = nvif_client(notify->object); in nvif_notify()
147 struct nvif_object *object = notify->object; in nvif_notify_fini() local
156 if (ret >= 0 && object) { in nvif_notify_fini()
157 ret = nvif_object_ioctl(object, &args, sizeof(args), NULL); in nvif_notify_fini()
[all …]
/linux-4.1.27/fs/cachefiles/
Dinterface.c31 struct cachefiles_object *object; in cachefiles_alloc_object() local
47 object = kmem_cache_alloc(cachefiles_object_jar, cachefiles_gfp); in cachefiles_alloc_object()
48 if (!object) in cachefiles_alloc_object()
51 ASSERTCMP(object->backer, ==, NULL); in cachefiles_alloc_object()
53 BUG_ON(test_bit(CACHEFILES_OBJECT_ACTIVE, &object->flags)); in cachefiles_alloc_object()
54 atomic_set(&object->usage, 1); in cachefiles_alloc_object()
56 fscache_object_init(&object->fscache, cookie, &cache->cache); in cachefiles_alloc_object()
58 object->type = cookie->def->type; in cachefiles_alloc_object()
77 key = cachefiles_cook_key(buffer, keylen + 2, object->type); in cachefiles_alloc_object()
95 object->lookup_data = lookup_data; in cachefiles_alloc_object()
[all …]
Dnamei.c31 void __cachefiles_printk_object(struct cachefiles_object *object, in __cachefiles_printk_object() argument
38 pr_err("%sobject: OBJ%x\n", prefix, object->fscache.debug_id); in __cachefiles_printk_object()
40 prefix, object->fscache.state->name, in __cachefiles_printk_object()
41 object->fscache.flags, work_busy(&object->fscache.work), in __cachefiles_printk_object()
42 object->fscache.events, object->fscache.event_mask); in __cachefiles_printk_object()
44 prefix, object->fscache.n_ops, object->fscache.n_in_progress, in __cachefiles_printk_object()
45 object->fscache.n_exclusive); in __cachefiles_printk_object()
47 prefix, object->fscache.parent); in __cachefiles_printk_object()
49 spin_lock(&object->fscache.lock); in __cachefiles_printk_object()
50 cookie = object->fscache.cookie; in __cachefiles_printk_object()
[all …]
Dxattr.c29 int cachefiles_check_object_type(struct cachefiles_object *object) in cachefiles_check_object_type() argument
31 struct dentry *dentry = object->dentry; in cachefiles_check_object_type()
38 if (!object->fscache.cookie) in cachefiles_check_object_type()
41 snprintf(type, 3, "%02x", object->fscache.cookie->def->type); in cachefiles_check_object_type()
43 _enter("%p{%s}", object, type); in cachefiles_check_object_type()
103 int cachefiles_set_object_xattr(struct cachefiles_object *object, in cachefiles_set_object_xattr() argument
106 struct dentry *dentry = object->dentry; in cachefiles_set_object_xattr()
111 _enter("%p,#%d", object, auxdata->len); in cachefiles_set_object_xattr()
121 object, in cachefiles_set_object_xattr()
131 int cachefiles_update_object_xattr(struct cachefiles_object *object, in cachefiles_update_object_xattr() argument
[all …]
Drdwr.c29 struct cachefiles_object *object; in cachefiles_read_waiter() local
56 object = container_of(monitor->op->op.object, in cachefiles_read_waiter()
59 spin_lock(&object->work_lock); in cachefiles_read_waiter()
61 spin_unlock(&object->work_lock); in cachefiles_read_waiter()
74 static int cachefiles_read_reissue(struct cachefiles_object *object, in cachefiles_read_reissue() argument
77 struct address_space *bmapping = d_backing_inode(object->backer)->i_mapping; in cachefiles_read_reissue()
82 d_backing_inode(object->backer)->i_ino, in cachefiles_read_reissue()
138 spin_lock_irq(&object->work_lock); in cachefiles_read_reissue()
140 spin_unlock_irq(&object->work_lock); in cachefiles_read_reissue()
152 struct cachefiles_object *object; in cachefiles_read_copier() local
[all …]
Dinternal.h110 struct cachefiles_object *object; member
162 struct cachefiles_object *object);
164 struct cachefiles_object *object,
240 extern int cachefiles_check_object_type(struct cachefiles_object *object);
241 extern int cachefiles_set_object_xattr(struct cachefiles_object *object,
243 extern int cachefiles_update_object_xattr(struct cachefiles_object *object,
245 extern int cachefiles_check_auxdata(struct cachefiles_object *object);
246 extern int cachefiles_check_object_xattr(struct cachefiles_object *object,
263 #define cachefiles_io_error_obj(object, FMT, ...) \ argument
267 ___cache = container_of((object)->fscache.cache, \
Dmain.c45 struct cachefiles_object *object = _object; in cachefiles_object_init_once() local
47 memset(object, 0, sizeof(*object)); in cachefiles_object_init_once()
48 spin_lock_init(&object->work_lock); in cachefiles_object_init_once()
/linux-4.1.27/sound/pci/asihpi/
Dhpimsginit.c37 static void hpi_init_message(struct hpi_message *phm, u16 object, in hpi_init_message() argument
42 if ((object > 0) && (object <= HPI_OBJ_MAXINDEX)) in hpi_init_message()
43 size = msg_size[object]; in hpi_init_message()
54 phm->object = object; in hpi_init_message()
64 void hpi_init_response(struct hpi_response *phr, u16 object, u16 function, in hpi_init_response() argument
69 if ((object > 0) && (object <= HPI_OBJ_MAXINDEX)) in hpi_init_response()
70 size = res_size[object]; in hpi_init_response()
77 phr->object = object; in hpi_init_response()
85 struct hpi_response *phr, u16 object, u16 function) in hpi_init_message_response() argument
87 hpi_init_message(phm, object, function); in hpi_init_message_response()
[all …]
Dhpimsginit.h33 void hpi_init_response(struct hpi_response *phr, u16 object, u16 function,
37 struct hpi_response *phr, u16 object, u16 function);
40 u16 object, u16 function);
43 struct hpi_response_header *phr, u16 res_size, u16 object,
/linux-4.1.27/arch/powerpc/boot/
Dwrapper52 object=arch/powerpc/boot
53 objbin=$object
104 object="$1"
127 if [ ! -r "$dts" -a -r "$object/dts/$dts" ]; then
128 dts="$object/dts/$dts"
148 platformo=$object/"$platform".o
149 lds=$object/zImage.lds
160 platformo="$object/of.o $object/epapr.o"
164 platformo="$object/pseries-head.o $object/of.o $object/epapr.o"
173 platformo="$object/of.o $object/epapr.o"
[all …]
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/engine/gr/
Dnv04.c447 nv04_gr_set_ctx1(struct nvkm_object *object, u32 mask, u32 value) in nv04_gr_set_ctx1() argument
449 struct nv04_gr_priv *priv = (void *)object->engine; in nv04_gr_set_ctx1()
453 tmp = nv_ro32(object, 0x00); in nv04_gr_set_ctx1()
456 nv_wo32(object, 0x00, tmp); in nv04_gr_set_ctx1()
463 nv04_gr_set_ctx_val(struct nvkm_object *object, u32 mask, u32 value) in nv04_gr_set_ctx_val() argument
468 ctx1 = nv_ro32(object, 0x00); in nv04_gr_set_ctx_val()
472 tmp = nv_ro32(object, 0x0c); in nv04_gr_set_ctx_val()
475 nv_wo32(object, 0x0c, tmp); in nv04_gr_set_ctx_val()
507 nv04_gr_set_ctx1(object, 0x01000000, valid << 24); in nv04_gr_set_ctx_val()
511 nv04_gr_mthd_set_operation(struct nvkm_object *object, u32 mthd, in nv04_gr_mthd_set_operation() argument
[all …]
Dnv20.c103 nv20_gr_context_init(struct nvkm_object *object) in nv20_gr_context_init() argument
105 struct nv20_gr_priv *priv = (void *)object->engine; in nv20_gr_context_init()
106 struct nv20_gr_chan *chan = (void *)object; in nv20_gr_context_init()
118 nv20_gr_context_fini(struct nvkm_object *object, bool suspend) in nv20_gr_context_fini() argument
120 struct nv20_gr_priv *priv = (void *)object->engine; in nv20_gr_context_fini()
121 struct nv20_gr_chan *chan = (void *)object; in nv20_gr_context_fini()
210 if (handle && !nv_call(handle->object, mthd, data)) in nv20_gr_intr()
263 nv20_gr_dtor(struct nvkm_object *object) in nv20_gr_dtor() argument
265 struct nv20_gr_priv *priv = (void *)object; in nv20_gr_dtor()
271 nv20_gr_init(struct nvkm_object *object) in nv20_gr_init() argument
[all …]
Dnv10.c478 nv17_gr_mthd_lma_window(struct nvkm_object *object, u32 mthd, in nv17_gr_mthd_lma_window() argument
481 struct nv10_gr_chan *chan = (void *)object->parent; in nv17_gr_mthd_lma_window()
555 nv17_gr_mthd_lma_enable(struct nvkm_object *object, u32 mthd, in nv17_gr_mthd_lma_enable() argument
558 struct nv10_gr_chan *chan = (void *)object->parent; in nv17_gr_mthd_lma_enable()
1074 nv10_gr_context_dtor(struct nvkm_object *object) in nv10_gr_context_dtor() argument
1076 struct nv10_gr_priv *priv = (void *)object->engine; in nv10_gr_context_dtor()
1077 struct nv10_gr_chan *chan = (void *)object; in nv10_gr_context_dtor()
1088 nv10_gr_context_fini(struct nvkm_object *object, bool suspend) in nv10_gr_context_fini() argument
1090 struct nv10_gr_priv *priv = (void *)object->engine; in nv10_gr_context_fini()
1091 struct nv10_gr_chan *chan = (void *)object; in nv10_gr_context_fini()
[all …]
Dnv40.c156 nv40_gr_context_fini(struct nvkm_object *object, bool suspend) in nv40_gr_context_fini() argument
158 struct nv40_gr_priv *priv = (void *)object->engine; in nv40_gr_context_fini()
159 struct nv40_gr_chan *chan = (void *)object; in nv40_gr_context_fini()
309 if (handle && !nv_call(handle->object, mthd, data)) in nv40_gr_intr()
366 nv40_gr_init(struct nvkm_object *object) in nv40_gr_init() argument
368 struct nvkm_engine *engine = nv_engine(object); in nv40_gr_init()
369 struct nvkm_fb *pfb = nvkm_fb(object); in nv40_gr_init()
/linux-4.1.27/include/linux/
Dfscache-cache.h91 struct fscache_object *object; /* object to be operated upon */ member
240 int (*lookup_object)(struct fscache_object *object);
243 void (*lookup_complete)(struct fscache_object *object);
246 struct fscache_object *(*grab_object)(struct fscache_object *object);
249 int (*pin_object)(struct fscache_object *object);
252 void (*unpin_object)(struct fscache_object *object);
259 void (*update_object)(struct fscache_object *object);
266 void (*drop_object)(struct fscache_object *object);
269 void (*put_object)(struct fscache_object *object);
276 int (*attr_changed)(struct fscache_object *object);
[all …]
Dkasan.h42 void kasan_unpoison_object_data(struct kmem_cache *cache, void *object);
43 void kasan_poison_object_data(struct kmem_cache *cache, void *object);
48 void kasan_kmalloc(struct kmem_cache *s, const void *object, size_t size);
49 void kasan_krealloc(const void *object, size_t new_size);
51 void kasan_slab_alloc(struct kmem_cache *s, void *object);
52 void kasan_slab_free(struct kmem_cache *s, void *object);
69 void *object) {} in kasan_unpoison_object_data() argument
71 void *object) {} in kasan_poison_object_data() argument
76 static inline void kasan_kmalloc(struct kmem_cache *s, const void *object, in kasan_kmalloc() argument
78 static inline void kasan_krealloc(const void *object, size_t new_size) {} in kasan_krealloc() argument
[all …]
Dassoc_array.h39 unsigned long (*get_object_key_chunk)(const void *object, int level);
42 bool (*compare_object)(const void *object, const void *index_key);
47 int (*diff_objects)(const void *object, const void *index_key);
50 void (*free_object)(void *object);
65 int (*iterator)(const void *object,
76 void *object);
78 void *object);
88 bool (*iterator)(void *object, void *iterator_data),
Dkmemcheck.h13 void kmemcheck_slab_alloc(struct kmem_cache *s, gfp_t gfpflags, void *object,
15 void kmemcheck_slab_free(struct kmem_cache *s, void *object, size_t size);
103 kmemcheck_slab_alloc(struct kmem_cache *s, gfp_t gfpflags, void *object, in kmemcheck_slab_alloc() argument
108 static inline void kmemcheck_slab_free(struct kmem_cache *s, void *object, in kmemcheck_slab_free() argument
/linux-4.1.27/drivers/gpu/drm/
Ddrm_global.c38 void *object; member
51 item->object = NULL; in drm_global_init()
61 BUG_ON(item->object != NULL); in drm_global_release()
73 item->object = kzalloc(ref->size, GFP_KERNEL); in drm_global_item_ref()
74 if (unlikely(item->object == NULL)) { in drm_global_item_ref()
79 ref->object = item->object; in drm_global_item_ref()
86 ref->object = item->object; in drm_global_item_ref()
91 item->object = NULL; in drm_global_item_ref()
102 BUG_ON(ref->object != item->object); in drm_global_item_unref()
105 item->object = NULL; in drm_global_item_unref()
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/subdev/bios/
Dbase.c73 nvkm_bios_rd08(struct nvkm_object *object, u64 addr) in nvkm_bios_rd08() argument
75 struct nvkm_bios *bios = (void *)object; in nvkm_bios_rd08()
80 nvkm_bios_rd16(struct nvkm_object *object, u64 addr) in nvkm_bios_rd16() argument
82 struct nvkm_bios *bios = (void *)object; in nvkm_bios_rd16()
87 nvkm_bios_rd32(struct nvkm_object *object, u64 addr) in nvkm_bios_rd32() argument
89 struct nvkm_bios *bios = (void *)object; in nvkm_bios_rd32()
94 nvkm_bios_wr08(struct nvkm_object *object, u64 addr, u8 data) in nvkm_bios_wr08() argument
96 struct nvkm_bios *bios = (void *)object; in nvkm_bios_wr08()
101 nvkm_bios_wr16(struct nvkm_object *object, u64 addr, u16 data) in nvkm_bios_wr16() argument
103 struct nvkm_bios *bios = (void *)object; in nvkm_bios_wr16()
[all …]
Dshadow.c55 shadow_rd08(struct nvkm_object *object, u64 addr) in shadow_rd08() argument
57 struct nvkm_bios *bios = (void *)object; in shadow_rd08()
64 shadow_rd16(struct nvkm_object *object, u64 addr) in shadow_rd16() argument
66 struct nvkm_bios *bios = (void *)object; in shadow_rd16()
73 shadow_rd32(struct nvkm_object *object, u64 addr) in shadow_rd32() argument
75 struct nvkm_bios *bios = (void *)object; in shadow_rd32()
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/engine/fifo/
Dbase.c37 nvkm_fifo_event_ctor(struct nvkm_object *object, void *data, u32 size, in nvkm_fifo_event_ctor() argument
134 _nvkm_fifo_channel_dtor(struct nvkm_object *object) in _nvkm_fifo_channel_dtor() argument
136 struct nvkm_fifo_chan *chan = (void *)object; in _nvkm_fifo_channel_dtor()
141 _nvkm_fifo_channel_map(struct nvkm_object *object, u64 *addr, u32 *size) in _nvkm_fifo_channel_map() argument
143 struct nvkm_fifo_chan *chan = (void *)object; in _nvkm_fifo_channel_map()
150 _nvkm_fifo_channel_rd32(struct nvkm_object *object, u64 addr) in _nvkm_fifo_channel_rd32() argument
152 struct nvkm_fifo_chan *chan = (void *)object; in _nvkm_fifo_channel_rd32()
162 _nvkm_fifo_channel_wr32(struct nvkm_object *object, u64 addr, u32 data) in _nvkm_fifo_channel_wr32() argument
164 struct nvkm_fifo_chan *chan = (void *)object; in _nvkm_fifo_channel_wr32()
174 nvkm_fifo_uevent_ctor(struct nvkm_object *object, void *data, u32 size, in nvkm_fifo_uevent_ctor() argument
[all …]
Dnv50.c72 nv50_fifo_context_attach(struct nvkm_object *parent, struct nvkm_object *object) in nv50_fifo_context_attach() argument
76 struct nvkm_gpuobj *ectx = (void *)object; in nv50_fifo_context_attach()
81 switch (nv_engidx(object->engine)) { in nv50_fifo_context_attach()
103 struct nvkm_object *object) in nv50_fifo_context_detach() argument
112 switch (nv_engidx(object->engine)) { in nv50_fifo_context_detach()
159 struct nvkm_object *object, u32 handle) in nv50_fifo_object_attach() argument
164 if (nv_iclass(object, NV_GPUOBJ_CLASS)) in nv50_fifo_object_attach()
165 context = nv_gpuobj(object)->node->offset >> 4; in nv50_fifo_object_attach()
169 switch (nv_engidx(object->engine)) { in nv50_fifo_object_attach()
313 nv50_fifo_chan_dtor(struct nvkm_object *object) in nv50_fifo_chan_dtor() argument
[all …]
Dnv04.c56 struct nvkm_object *object, u32 handle) in nv04_fifo_object_attach() argument
63 if (nv_iclass(object, NV_GPUOBJ_CLASS)) in nv04_fifo_object_attach()
64 context = nv_gpuobj(object)->addr >> 4; in nv04_fifo_object_attach()
68 switch (nv_engidx(object->engine)) { in nv04_fifo_object_attach()
103 struct nvkm_object *object) in nv04_fifo_context_attach() argument
105 nv_engctx(object)->addr = nvkm_fifo_chan(parent)->chid; in nv04_fifo_context_attach()
160 nv04_fifo_chan_dtor(struct nvkm_object *object) in nv04_fifo_chan_dtor() argument
162 struct nv04_fifo_priv *priv = (void *)object->engine; in nv04_fifo_chan_dtor()
163 struct nv04_fifo_chan *chan = (void *)object; in nv04_fifo_chan_dtor()
174 nv04_fifo_chan_init(struct nvkm_object *object) in nv04_fifo_chan_init() argument
[all …]
Dgk104.c129 struct nvkm_object *object) in gk104_fifo_context_attach() argument
133 struct nvkm_engctx *ectx = (void *)object; in gk104_fifo_context_attach()
137 switch (nv_engidx(object->engine)) { in gk104_fifo_context_attach()
170 struct nvkm_object *object) in gk104_fifo_context_detach() argument
178 switch (nv_engidx(object->engine)) { in gk104_fifo_context_detach()
286 gk104_fifo_chan_init(struct nvkm_object *object) in gk104_fifo_chan_init() argument
288 struct nvkm_gpuobj *base = nv_gpuobj(object->parent); in gk104_fifo_chan_init()
289 struct gk104_fifo_priv *priv = (void *)object->engine; in gk104_fifo_chan_init()
290 struct gk104_fifo_chan *chan = (void *)object; in gk104_fifo_chan_init()
311 gk104_fifo_chan_fini(struct nvkm_object *object, bool suspend) in gk104_fifo_chan_fini() argument
[all …]
Dgf100.c109 struct nvkm_object *object) in gf100_fifo_context_attach() argument
113 struct nvkm_engctx *ectx = (void *)object; in gf100_fifo_context_attach()
117 switch (nv_engidx(object->engine)) { in gf100_fifo_context_attach()
146 struct nvkm_object *object) in gf100_fifo_context_detach() argument
154 switch (nv_engidx(object->engine)) { in gf100_fifo_context_detach()
251 gf100_fifo_chan_init(struct nvkm_object *object) in gf100_fifo_chan_init() argument
253 struct nvkm_gpuobj *base = nv_gpuobj(object->parent); in gf100_fifo_chan_init()
254 struct gf100_fifo_priv *priv = (void *)object->engine; in gf100_fifo_chan_init()
255 struct gf100_fifo_chan *chan = (void *)object; in gf100_fifo_chan_init()
276 gf100_fifo_chan_fini(struct nvkm_object *object, bool suspend) in gf100_fifo_chan_fini() argument
[all …]
Dg84.c42 g84_fifo_context_attach(struct nvkm_object *parent, struct nvkm_object *object) in g84_fifo_context_attach() argument
46 struct nvkm_gpuobj *ectx = (void *)object; in g84_fifo_context_attach()
51 switch (nv_engidx(object->engine)) { in g84_fifo_context_attach()
81 struct nvkm_object *object) in g84_fifo_context_detach() argument
90 switch (nv_engidx(object->engine)) { in g84_fifo_context_detach()
129 struct nvkm_object *object, u32 handle) in g84_fifo_object_attach() argument
134 if (nv_iclass(object, NV_GPUOBJ_CLASS)) in g84_fifo_object_attach()
135 context = nv_gpuobj(object)->node->offset >> 4; in g84_fifo_object_attach()
139 switch (nv_engidx(object->engine)) { in g84_fifo_object_attach()
310 g84_fifo_chan_init(struct nvkm_object *object) in g84_fifo_chan_init() argument
[all …]
Dnv40.c69 struct nvkm_object *object, u32 handle) in nv40_fifo_object_attach() argument
76 if (nv_iclass(object, NV_GPUOBJ_CLASS)) in nv40_fifo_object_attach()
77 context = nv_gpuobj(object)->addr >> 4; in nv40_fifo_object_attach()
81 switch (nv_engidx(object->engine)) { in nv40_fifo_object_attach()
296 nv40_fifo_init(struct nvkm_object *object) in nv40_fifo_init() argument
298 struct nv04_fifo_priv *priv = (void *)object; in nv40_fifo_init()
299 struct nvkm_fb *pfb = nvkm_fb(object); in nv40_fifo_init()
/linux-4.1.27/drivers/gpu/drm/nouveau/include/nvif/
Dclient.h8 struct nvif_object *object; /*XXX: hack for nvif_object() */ member
14 nvif_client(struct nvif_object *object) in nvif_client() argument
16 while (object && object->parent != object) in nvif_client()
17 object = object->parent; in nvif_client()
18 return (void *)object; in nvif_client()
Ddevice.h9 struct nvif_object *object; /*XXX: hack for nvif_object() */ member
14 nvif_device(struct nvif_object *object) in nvif_device() argument
16 while (object && object->oclass != 0x0080 /*XXX: NV_DEVICE_CLASS*/ ) in nvif_device()
17 object = object->parent; in nvif_device()
18 return (void *)object; in nvif_device()
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/subdev/instmem/
Dnv04.c33 nv04_instobj_rd32(struct nvkm_object *object, u64 addr) in nv04_instobj_rd32() argument
35 struct nv04_instmem_priv *priv = (void *)nvkm_instmem(object); in nv04_instobj_rd32()
36 struct nv04_instobj_priv *node = (void *)object; in nv04_instobj_rd32()
41 nv04_instobj_wr32(struct nvkm_object *object, u64 addr, u32 data) in nv04_instobj_wr32() argument
43 struct nv04_instmem_priv *priv = (void *)nvkm_instmem(object); in nv04_instobj_wr32()
44 struct nv04_instobj_priv *node = (void *)object; in nv04_instobj_wr32()
49 nv04_instobj_dtor(struct nvkm_object *object) in nv04_instobj_dtor() argument
51 struct nv04_instmem_priv *priv = (void *)nvkm_instmem(object); in nv04_instobj_dtor()
52 struct nv04_instobj_priv *node = (void *)object; in nv04_instobj_dtor()
110 nv04_instmem_rd32(struct nvkm_object *object, u64 addr) in nv04_instmem_rd32() argument
[all …]
Dnv50.c44 nv50_instobj_rd32(struct nvkm_object *object, u64 offset) in nv50_instobj_rd32() argument
46 struct nv50_instmem_priv *priv = (void *)nvkm_instmem(object); in nv50_instobj_rd32()
47 struct nv50_instobj_priv *node = (void *)object; in nv50_instobj_rd32()
64 nv50_instobj_wr32(struct nvkm_object *object, u64 offset, u32 data) in nv50_instobj_wr32() argument
66 struct nv50_instmem_priv *priv = (void *)nvkm_instmem(object); in nv50_instobj_wr32()
67 struct nv50_instobj_priv *node = (void *)object; in nv50_instobj_wr32()
82 nv50_instobj_dtor(struct nvkm_object *object) in nv50_instobj_dtor() argument
84 struct nv50_instobj_priv *node = (void *)object; in nv50_instobj_dtor()
85 struct nvkm_fb *pfb = nvkm_fb(object); in nv50_instobj_dtor()
135 nv50_instmem_fini(struct nvkm_object *object, bool suspend) in nv50_instmem_fini() argument
[all …]
Dbase.c33 _nvkm_instobj_dtor(struct nvkm_object *object) in _nvkm_instobj_dtor() argument
35 struct nvkm_instmem *imem = nvkm_instmem(object); in _nvkm_instobj_dtor()
36 struct nvkm_instobj *iobj = (void *)object; in _nvkm_instobj_dtor()
73 struct nvkm_instmem_impl *impl = (void *)imem->base.object.oclass; in nvkm_instmem_alloc()
75 return nvkm_object_ctor(parent, &parent->engine->subdev.object, in nvkm_instmem_alloc()
80 _nvkm_instmem_fini(struct nvkm_object *object, bool suspend) in _nvkm_instmem_fini() argument
82 struct nvkm_instmem *imem = (void *)object; in _nvkm_instmem_fini()
107 _nvkm_instmem_init(struct nvkm_object *object) in _nvkm_instmem_init() argument
109 struct nvkm_instmem *imem = (void *)object; in _nvkm_instmem_init()
Dgk20a.c107 gk20a_instobj_rd32(struct nvkm_object *object, u64 offset) in gk20a_instobj_rd32() argument
109 struct gk20a_instmem_priv *priv = (void *)nvkm_instmem(object); in gk20a_instobj_rd32()
110 struct gk20a_instobj_priv *node = (void *)object; in gk20a_instobj_rd32()
127 gk20a_instobj_wr32(struct nvkm_object *object, u64 offset, u32 data) in gk20a_instobj_wr32() argument
129 struct gk20a_instmem_priv *priv = (void *)nvkm_instmem(object); in gk20a_instobj_wr32()
130 struct gk20a_instobj_priv *node = (void *)object; in gk20a_instobj_wr32()
189 gk20a_instobj_dtor(struct nvkm_object *object) in gk20a_instobj_dtor() argument
191 struct gk20a_instobj_priv *node = (void *)object; in gk20a_instobj_dtor()
382 gk20a_instmem_fini(struct nvkm_object *object, bool suspend) in gk20a_instmem_fini() argument
384 struct gk20a_instmem_priv *priv = (void *)object; in gk20a_instmem_fini()
Dnv40.c34 nv40_instmem_rd32(struct nvkm_object *object, u64 addr) in nv40_instmem_rd32() argument
36 struct nv04_instmem_priv *priv = (void *)object; in nv40_instmem_rd32()
41 nv40_instmem_wr32(struct nvkm_object *object, u64 addr, u32 data) in nv40_instmem_wr32() argument
43 struct nv04_instmem_priv *priv = (void *)object; in nv40_instmem_wr32()
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/engine/device/
Dctrl.c34 nvkm_control_mthd_pstate_info(struct nvkm_object *object, void *data, u32 size) in nvkm_control_mthd_pstate_info() argument
39 struct nvkm_clk *clk = nvkm_clk(object); in nvkm_control_mthd_pstate_info()
42 nv_ioctl(object, "control pstate info size %d\n", size); in nvkm_control_mthd_pstate_info()
44 nv_ioctl(object, "control pstate info vers %d\n", in nvkm_control_mthd_pstate_info()
67 nvkm_control_mthd_pstate_attr(struct nvkm_object *object, void *data, u32 size) in nvkm_control_mthd_pstate_attr() argument
72 struct nvkm_clk *clk = nvkm_clk(object); in nvkm_control_mthd_pstate_attr()
80 nv_ioctl(object, "control pstate attr size %d\n", size); in nvkm_control_mthd_pstate_attr()
82 nv_ioctl(object, "control pstate attr vers %d state %d " in nvkm_control_mthd_pstate_attr()
140 nvkm_control_mthd_pstate_user(struct nvkm_object *object, void *data, u32 size) in nvkm_control_mthd_pstate_user() argument
145 struct nvkm_clk *clk = nvkm_clk(object); in nvkm_control_mthd_pstate_user()
[all …]
Dbase.c80 nvkm_devobj_info(struct nvkm_object *object, void *data, u32 size) in nvkm_devobj_info() argument
82 struct nvkm_device *device = nv_device(object); in nvkm_devobj_info()
90 nv_ioctl(object, "device info size %d\n", size); in nvkm_devobj_info()
92 nv_ioctl(object, "device info vers %d\n", args->v0.version); in nvkm_devobj_info()
153 nvkm_devobj_mthd(struct nvkm_object *object, u32 mthd, void *data, u32 size) in nvkm_devobj_mthd() argument
157 return nvkm_devobj_info(object, data, size); in nvkm_devobj_mthd()
165 nvkm_devobj_rd08(struct nvkm_object *object, u64 addr) in nvkm_devobj_rd08() argument
167 return nv_rd08(object->engine, addr); in nvkm_devobj_rd08()
171 nvkm_devobj_rd16(struct nvkm_object *object, u64 addr) in nvkm_devobj_rd16() argument
173 return nv_rd16(object->engine, addr); in nvkm_devobj_rd16()
[all …]
/linux-4.1.27/Documentation/filesystems/caching/
Dobject.txt25 FS-Cache maintains an in-kernel representation of each object that a netfs is
40 correspond, but the cookies tree is a superset of the union of the object trees
96 Within FS-Cache, each active object is managed by its own individual state
97 machine. The state for an object is kept in the fscache_object struct, in
98 object->state. A cookie may point to a set of objects that are in different
105 representations are hierarchical, and it is expected that an object must
106 be created or accessed with respect to its parent object.
121 which it is interested (object->event_mask) and relinquish the worker thread.
123 is not masked, the object will be queued for processing (by calling
147 Because only one worker thread may be operating on any particular object's
[all …]
Dbackend-api.txt47 (*) "fsdef" which should point to the object representation for the FS-Cache
49 here. FS-Cache keeps the caller's reference to the index object if
117 The fields that might be of use to the backend describe the object
119 The object definition contain functions supplied by the netfs for loading
124 (*) In-cache object representation:
140 the case of CacheFS, they're embedded in CacheFS's internal object
144 that refer to a particular object. In such a case it should be printed
147 Each object contains a pointer to the cookie that represents the object it
148 is backing. An object should retired when put_object() is called if it is
150 initialised by calling fscache_object_init(object).
[all …]
Doperations.txt125 conjunction with any other operation on the object being operated upon.
148 int fscache_submit_op(struct fscache_object *object,
151 int fscache_submit_exclusive_op(struct fscache_object *object,
159 object and return 0. -ENOBUFS will be returned if the object specified is
162 The operation manager will defer operations on an object that is still
164 operation of conflicting exclusivity is in progress on the object.
173 FSCACHE_OP_WAITING as described above and check the state of the object if
174 necessary (the object might have died whilst the thread was waiting).
179 (4) The operation holds an effective lock upon the object, preventing other
Dfscache.txt178 Any index object may reside in more than one cache, provided it only has index
179 children. Any index with non-index object children will be assumed to only
191 A description of the internal representations and object state machine can be
194 Documentation/filesystems/caching/object.txt
219 nal=N Number of object allocation failures
257 abt=N Number of alloc reqs aborted due to object death
267 abt=N Number of retr reqs aborted due to object death
286 rej=N Number of async ops rejected due to object lookup/create failure
320 OBJ INST Length of time to instantiate an object
322 OBJ RUNS Length of time a call to process an object event took
[all …]
Dnetfs-api.txt8 (1) Caches can store a number of different object types. There are two main
9 object types: indices and files. The first is a special type used by
13 (2) Every index, file or other object is represented by a cookie. This cookie
31 (8) Miscellaneous object registration
121 To define an object, a structure of the following type should be filled out:
160 (1) The type of the object [mandatory].
174 This defines an extraordinary object such as an XATTR.
176 (2) The name of the object type (NUL terminated unless all 16 chars are used)
182 during the instantiation of a non-index object. Only the immediate index
183 parent for the non-index object will be queried. Any indices above that
[all …]
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/engine/pm/
Dbase.c106 nvkm_perfctr_query(struct nvkm_object *object, void *data, u32 size) in nvkm_perfctr_query() argument
111 struct nvkm_device *device = nv_device(object); in nvkm_perfctr_query()
112 struct nvkm_pm *ppm = (void *)object->engine; in nvkm_perfctr_query()
120 nv_ioctl(object, "perfctr query size %d\n", size); in nvkm_perfctr_query()
122 nv_ioctl(object, "perfctr query vers %d iter %08x\n", in nvkm_perfctr_query()
165 nvkm_perfctr_sample(struct nvkm_object *object, void *data, u32 size) in nvkm_perfctr_sample() argument
170 struct nvkm_pm *ppm = (void *)object->engine; in nvkm_perfctr_sample()
175 nv_ioctl(object, "perfctr sample size %d\n", size); in nvkm_perfctr_sample()
177 nv_ioctl(object, "perfctr sample\n"); in nvkm_perfctr_sample()
218 nvkm_perfctr_read(struct nvkm_object *object, void *data, u32 size) in nvkm_perfctr_read() argument
[all …]
Dgt215.c58 struct nvkm_object **object) in gt215_pm_ctor() argument
60 int ret = nv40_pm_ctor(parent, engine, oclass, data, size, object); in gt215_pm_ctor()
62 struct nv40_pm_priv *priv = (void *)*object; in gt215_pm_ctor()
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/subdev/bar/
Dbase.c67 nvkm_barobj_dtor(struct nvkm_object *object) in nvkm_barobj_dtor() argument
69 struct nvkm_bar *bar = nvkm_bar(object); in nvkm_barobj_dtor()
70 struct nvkm_barobj *barobj = (void *)object; in nvkm_barobj_dtor()
80 nvkm_barobj_rd32(struct nvkm_object *object, u64 addr) in nvkm_barobj_rd32() argument
82 struct nvkm_barobj *barobj = (void *)object; in nvkm_barobj_rd32()
87 nvkm_barobj_wr32(struct nvkm_object *object, u64 addr, u32 data) in nvkm_barobj_wr32() argument
89 struct nvkm_barobj *barobj = (void *)object; in nvkm_barobj_wr32()
110 int ret = nvkm_object_ctor(parent, &parent->engine->subdev.object, in nvkm_bar_alloc()
140 _nvkm_bar_dtor(struct nvkm_object *object) in _nvkm_bar_dtor() argument
142 struct nvkm_bar *bar = (void *)object; in _nvkm_bar_dtor()
Dnv50.c212 nv50_bar_dtor(struct nvkm_object *object) in nv50_bar_dtor() argument
214 struct nv50_bar_priv *priv = (void *)object; in nv50_bar_dtor()
229 nv50_bar_init(struct nvkm_object *object) in nv50_bar_init() argument
231 struct nv50_bar_priv *priv = (void *)object; in nv50_bar_init()
256 nv50_bar_fini(struct nvkm_object *object, bool suspend) in nv50_bar_fini() argument
258 struct nv50_bar_priv *priv = (void *)object; in nv50_bar_fini()
Dgf100.c172 gf100_bar_dtor(struct nvkm_object *object) in gf100_bar_dtor() argument
174 struct gf100_bar_priv *priv = (void *)object; in gf100_bar_dtor()
191 gf100_bar_init(struct nvkm_object *object) in gf100_bar_init() argument
193 struct gf100_bar_priv *priv = (void *)object; in gf100_bar_init()
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/engine/sw/
Dnv50.c39 nv50_sw_mthd_dma_vblsem(struct nvkm_object *object, u32 mthd, in nv50_sw_mthd_dma_vblsem() argument
42 struct nv50_sw_chan *chan = (void *)nv_engctx(object->parent); in nv50_sw_mthd_dma_vblsem()
51 if (nv_iclass(handle->object, NV_GPUOBJ_CLASS)) { in nv50_sw_mthd_dma_vblsem()
52 struct nvkm_gpuobj *gpuobj = nv_gpuobj(handle->object); in nv50_sw_mthd_dma_vblsem()
61 nv50_sw_mthd_vblsem_offset(struct nvkm_object *object, u32 mthd, in nv50_sw_mthd_vblsem_offset() argument
64 struct nv50_sw_chan *chan = (void *)nv_engctx(object->parent); in nv50_sw_mthd_vblsem_offset()
70 nv50_sw_mthd_vblsem_value(struct nvkm_object *object, u32 mthd, in nv50_sw_mthd_vblsem_value() argument
73 struct nv50_sw_chan *chan = (void *)nv_engctx(object->parent); in nv50_sw_mthd_vblsem_value()
79 nv50_sw_mthd_vblsem_release(struct nvkm_object *object, u32 mthd, in nv50_sw_mthd_vblsem_release() argument
82 struct nv50_sw_chan *chan = (void *)nv_engctx(object->parent); in nv50_sw_mthd_vblsem_release()
[all …]
Dnv04.c40 nv04_sw_set_ref(struct nvkm_object *object, u32 mthd, void *data, u32 size) in nv04_sw_set_ref() argument
42 struct nvkm_object *channel = (void *)nv_engctx(object->parent); in nv04_sw_set_ref()
49 nv04_sw_flip(struct nvkm_object *object, u32 mthd, void *args, u32 size) in nv04_sw_flip() argument
51 struct nv04_sw_chan *chan = (void *)nv_engctx(object->parent); in nv04_sw_flip()
Dgf100.c33 gf100_sw_mthd_vblsem_offset(struct nvkm_object *object, u32 mthd, in gf100_sw_mthd_vblsem_offset() argument
36 struct nv50_sw_chan *chan = (void *)nv_engctx(object->parent); in gf100_sw_mthd_vblsem_offset()
49 gf100_sw_mthd_mp_control(struct nvkm_object *object, u32 mthd, in gf100_sw_mthd_mp_control() argument
52 struct nv50_sw_chan *chan = (void *)nv_engctx(object->parent); in gf100_sw_mthd_mp_control()
/linux-4.1.27/drivers/gpu/drm/nouveau/
Dnouveau_usif.c257 usif_object_dtor(struct usif_object *object) in usif_object_dtor() argument
259 list_del(&object->head); in usif_object_dtor()
260 kfree(object); in usif_object_dtor()
271 struct usif_object *object; in usif_object_new() local
274 if (!(object = kmalloc(sizeof(*object), GFP_KERNEL))) in usif_object_new()
276 list_add(&object->head, &cli->objects); in usif_object_new()
279 object->route = args->v0.route; in usif_object_new()
280 object->token = args->v0.token; in usif_object_new()
282 args->v0.token = (unsigned long)(void *)object; in usif_object_new()
284 args->v0.token = object->token; in usif_object_new()
[all …]
Dnouveau_chan.c45 struct nouveau_cli *cli = (void *)nvif_client(chan->object); in nouveau_channel_idle()
57 chan->object->handle, nvxx_client(&cli->base)->name); in nouveau_channel_idle()
73 nvif_object_ref(NULL, &chan->object); in nouveau_channel_del()
226 &args, size, &chan->object); in nouveau_channel_ind()
228 retn = chan->object->data; in nouveau_channel_ind()
229 if (chan->object->oclass >= KEPLER_CHANNEL_GPFIFO_A) in nouveau_channel_ind()
268 &args, sizeof(args), &chan->object); in nouveau_channel_dma()
270 retn = chan->object->data; in nouveau_channel_dma()
290 nvif_object_map(chan->object); in nouveau_channel_init()
306 ret = nvif_object_init(chan->object, NULL, vram, in nouveau_channel_init()
[all …]
Dnv04_fbcon.c171 ret = nvif_object_init(chan->object, NULL, 0x0062, in nv04_fbcon_accel_init()
177 ret = nvif_object_init(chan->object, NULL, 0x0019, 0x0019, NULL, 0, in nv04_fbcon_accel_init()
182 ret = nvif_object_init(chan->object, NULL, 0x0043, 0x0043, NULL, 0, in nv04_fbcon_accel_init()
187 ret = nvif_object_init(chan->object, NULL, 0x0044, 0x0044, NULL, 0, in nv04_fbcon_accel_init()
192 ret = nvif_object_init(chan->object, NULL, 0x004a, 0x004a, NULL, 0, in nv04_fbcon_accel_init()
197 ret = nvif_object_init(chan->object, NULL, 0x005f, in nv04_fbcon_accel_init()
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/subdev/devinit/
Dbase.c31 _nvkm_devinit_fini(struct nvkm_object *object, bool suspend) in _nvkm_devinit_fini() argument
33 struct nvkm_devinit *devinit = (void *)object; in _nvkm_devinit_fini()
46 _nvkm_devinit_init(struct nvkm_object *object) in _nvkm_devinit_init() argument
48 struct nvkm_devinit_impl *impl = (void *)object->oclass; in _nvkm_devinit_init()
49 struct nvkm_devinit *devinit = (void *)object; in _nvkm_devinit_init()
66 _nvkm_devinit_dtor(struct nvkm_object *object) in _nvkm_devinit_dtor() argument
68 struct nvkm_devinit *devinit = (void *)object; in _nvkm_devinit_dtor()
Dnv50.c93 nv50_devinit_init(struct nvkm_object *object) in nv50_devinit_init() argument
95 struct nvkm_bios *bios = nvkm_bios(object); in nv50_devinit_init()
96 struct nvkm_ibus *ibus = nvkm_ibus(object); in nv50_devinit_init()
97 struct nv50_devinit_priv *priv = (void *)object; in nv50_devinit_init()
Dnv04.c391 nv04_devinit_fini(struct nvkm_object *object, bool suspend) in nv04_devinit_fini() argument
393 struct nv04_devinit_priv *priv = (void *)object; in nv04_devinit_fini()
411 nv04_devinit_init(struct nvkm_object *object) in nv04_devinit_init() argument
413 struct nv04_devinit_priv *priv = (void *)object; in nv04_devinit_init()
431 nv04_devinit_dtor(struct nvkm_object *object) in nv04_devinit_dtor() argument
433 struct nv04_devinit_priv *priv = (void *)object; in nv04_devinit_dtor()
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/engine/disp/
Dbase.c37 nvkm_disp_vblank_ctor(struct nvkm_object *object, void *data, u32 size, in nvkm_disp_vblank_ctor() argument
67 nvkm_disp_hpd_ctor(struct nvkm_object *object, void *data, u32 size, in nvkm_disp_hpd_ctor() argument
101 nvkm_disp_ntfy(struct nvkm_object *object, u32 type, struct nvkm_event **event) in nvkm_disp_ntfy() argument
103 struct nvkm_disp *disp = (void *)object->engine; in nvkm_disp_ntfy()
118 _nvkm_disp_fini(struct nvkm_object *object, bool suspend) in _nvkm_disp_fini() argument
120 struct nvkm_disp *disp = (void *)object; in _nvkm_disp_fini()
141 _nvkm_disp_init(struct nvkm_object *object) in _nvkm_disp_init() argument
143 struct nvkm_disp *disp = (void *)object; in _nvkm_disp_init()
168 _nvkm_disp_dtor(struct nvkm_object *object) in _nvkm_disp_dtor() argument
170 struct nvkm_disp *disp = (void *)object; in _nvkm_disp_dtor()
[all …]
Dnv50.c115 nv50_disp_chan_uevent_ctor(struct nvkm_object *object, void *data, u32 size, in nv50_disp_chan_uevent_ctor() argument
118 struct nv50_disp_dmac *dmac = (void *)object; in nv50_disp_chan_uevent_ctor()
142 nv50_disp_chan_ntfy(struct nvkm_object *object, u32 type, in nv50_disp_chan_ntfy() argument
145 struct nv50_disp_priv *priv = (void *)object->engine; in nv50_disp_chan_ntfy()
157 nv50_disp_chan_map(struct nvkm_object *object, u64 *addr, u32 *size) in nv50_disp_chan_map() argument
159 struct nv50_disp_chan *chan = (void *)object; in nv50_disp_chan_map()
160 *addr = nv_device_resource_start(nv_device(object), 0) + in nv50_disp_chan_map()
167 nv50_disp_chan_rd32(struct nvkm_object *object, u64 addr) in nv50_disp_chan_rd32() argument
169 struct nv50_disp_priv *priv = (void *)object->engine; in nv50_disp_chan_rd32()
170 struct nv50_disp_chan *chan = (void *)object; in nv50_disp_chan_rd32()
[all …]
Dnv04.c37 nv04_disp_scanoutpos(struct nvkm_object *object, struct nv04_disp_priv *priv, in nv04_disp_scanoutpos() argument
47 nv_ioctl(object, "disp scanoutpos size %d\n", size); in nv04_disp_scanoutpos()
49 nv_ioctl(object, "disp scanoutpos vers %d\n", args->v0.version); in nv04_disp_scanoutpos()
78 nv04_disp_mthd(struct nvkm_object *object, u32 mthd, void *data, u32 size) in nv04_disp_mthd() argument
83 struct nv04_disp_priv *priv = (void *)object->engine; in nv04_disp_mthd()
86 nv_ioctl(object, "disp mthd size %d\n", size); in nv04_disp_mthd()
88 nv_ioctl(object, "disp mthd vers %d mthd %02x head %d\n", in nv04_disp_mthd()
100 return nv04_disp_scanoutpos(object, priv, data, size, head); in nv04_disp_mthd()
Dgf110.c75 struct nvkm_object *object, u32 name) in gf110_disp_dmac_object_attach() argument
79 u32 addr = nv_gpuobj(object)->node->offset; in gf110_disp_dmac_object_attach()
92 gf110_disp_dmac_init(struct nvkm_object *object) in gf110_disp_dmac_init() argument
94 struct nv50_disp_priv *priv = (void *)object->engine; in gf110_disp_dmac_init()
95 struct nv50_disp_dmac *dmac = (void *)object; in gf110_disp_dmac_init()
125 gf110_disp_dmac_fini(struct nvkm_object *object, bool suspend) in gf110_disp_dmac_fini() argument
127 struct nv50_disp_priv *priv = (void *)object->engine; in gf110_disp_dmac_fini()
128 struct nv50_disp_dmac *dmac = (void *)object; in gf110_disp_dmac_fini()
293 gf110_disp_core_init(struct nvkm_object *object) in gf110_disp_core_init() argument
295 struct nv50_disp_priv *priv = (void *)object->engine; in gf110_disp_core_init()
[all …]
Doutp.c33 _nvkm_output_fini(struct nvkm_object *object, bool suspend) in _nvkm_output_fini() argument
35 struct nvkm_output *outp = (void *)object; in _nvkm_output_fini()
41 _nvkm_output_init(struct nvkm_object *object) in _nvkm_output_init() argument
43 struct nvkm_output *outp = (void *)object; in _nvkm_output_init()
51 _nvkm_output_dtor(struct nvkm_object *object) in _nvkm_output_dtor() argument
53 struct nvkm_output *outp = (void *)object; in _nvkm_output_dtor()
Dconn.c55 _nvkm_connector_fini(struct nvkm_object *object, bool suspend) in _nvkm_connector_fini() argument
57 struct nvkm_connector *conn = (void *)object; in _nvkm_connector_fini()
63 _nvkm_connector_init(struct nvkm_object *object) in _nvkm_connector_init() argument
65 struct nvkm_connector *conn = (void *)object; in _nvkm_connector_init()
73 _nvkm_connector_dtor(struct nvkm_object *object) in _nvkm_connector_dtor() argument
75 struct nvkm_connector *conn = (void *)object; in _nvkm_connector_dtor()
Ddacnv50.c43 nv_ioctl(object, "disp dac pwr size %d\n", size); in nv50_dac_power()
45 nv_ioctl(object, "disp dac pwr vers %d state %d data %d " in nv50_dac_power()
72 nv_ioctl(object, "disp dac load size %d\n", size); in nv50_dac_sense()
74 nv_ioctl(object, "disp dac load vers %d data %08x\n", in nv50_dac_sense()
Doutpdp.c187 _nvkm_output_dp_fini(struct nvkm_object *object, bool suspend) in _nvkm_output_dp_fini() argument
189 struct nvkm_output_dp *outp = (void *)object; in _nvkm_output_dp_fini()
196 _nvkm_output_dp_init(struct nvkm_object *object) in _nvkm_output_dp_init() argument
198 struct nvkm_output_dp *outp = (void *)object; in _nvkm_output_dp_init()
204 _nvkm_output_dp_dtor(struct nvkm_object *object) in _nvkm_output_dp_dtor() argument
206 struct nvkm_output_dp *outp = (void *)object; in _nvkm_output_dp_dtor()
Dsornv50.c43 nv_ioctl(object, "disp sor pwr size %d\n", size); in nv50_sor_power()
45 nv_ioctl(object, "disp sor pwr vers %d state %d\n", in nv50_sor_power()
Dhdagt215.c42 nv_ioctl(object, "disp sor hda eld size %d\n", size); in gt215_hda_eld()
44 nv_ioctl(object, "disp sor hda eld vers %d\n", args->v0.version); in gt215_hda_eld()
Dhdagf110.c45 nv_ioctl(object, "disp sor hda eld size %d\n", size); in gf110_hda_eld()
47 nv_ioctl(object, "disp sor hda eld vers %d\n", args->v0.version); in gf110_hda_eld()
Dhdmigk104.c42 nv_ioctl(object, "disp sor hdmi ctrl size %d\n", size); in gk104_hdmi_ctrl()
44 nv_ioctl(object, "disp sor hdmi ctrl vers %d state %d " in gk104_hdmi_ctrl()
Dhdmigf110.c41 nv_ioctl(object, "disp sor hdmi ctrl size %d\n", size); in gf110_hdmi_ctrl()
43 nv_ioctl(object, "disp sor hdmi ctrl vers %d state %d " in gf110_hdmi_ctrl()
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/engine/
Dxtensa.c28 _nvkm_xtensa_rd32(struct nvkm_object *object, u64 addr) in _nvkm_xtensa_rd32() argument
30 struct nvkm_xtensa *xtensa = (void *)object; in _nvkm_xtensa_rd32()
35 _nvkm_xtensa_wr32(struct nvkm_object *object, u64 addr, u32 data) in _nvkm_xtensa_wr32() argument
37 struct nvkm_xtensa *xtensa = (void *)object; in _nvkm_xtensa_wr32()
95 _nvkm_xtensa_init(struct nvkm_object *object) in _nvkm_xtensa_init() argument
97 struct nvkm_device *device = nv_device(object); in _nvkm_xtensa_init()
98 struct nvkm_xtensa *xtensa = (void *)object; in _nvkm_xtensa_init()
124 ret = nvkm_gpuobj_new(object, NULL, 0x40000, 0x1000, 0, in _nvkm_xtensa_init()
161 _nvkm_xtensa_fini(struct nvkm_object *object, bool suspend) in _nvkm_xtensa_fini() argument
163 struct nvkm_xtensa *xtensa = (void *)object; in _nvkm_xtensa_fini()
Dfalcon.c47 _nvkm_falcon_rd32(struct nvkm_object *object, u64 addr) in _nvkm_falcon_rd32() argument
49 struct nvkm_falcon *falcon = (void *)object; in _nvkm_falcon_rd32()
54 _nvkm_falcon_wr32(struct nvkm_object *object, u64 addr, u32 data) in _nvkm_falcon_wr32() argument
56 struct nvkm_falcon *falcon = (void *)object; in _nvkm_falcon_wr32()
71 _nvkm_falcon_init(struct nvkm_object *object) in _nvkm_falcon_init() argument
73 struct nvkm_device *device = nv_device(object); in _nvkm_falcon_init()
74 struct nvkm_falcon *falcon = (void *)object; in _nvkm_falcon_init()
175 ret = nvkm_gpuobj_new(object->parent, NULL, falcon->code.size, in _nvkm_falcon_init()
241 _nvkm_falcon_fini(struct nvkm_object *object, bool suspend) in _nvkm_falcon_fini() argument
243 struct nvkm_falcon *falcon = (void *)object; in _nvkm_falcon_fini()
/linux-4.1.27/Documentation/ABI/testing/
Dsysfs-bus-acpi6 object associated with the device object. For example,
16 This attribute indicates the PNP IDs of the device object.
18 CCCCCCCC contains device object's PNPID (_HID or _CID).
25 device object. For example, PNP0103.
33 This attribute contains the output of the device object's
40 This attribute contains the output of the device object's
49 This attribute contains the output of the device object's
57 this device object. This file exists for every device
58 object that has _EJ0 method.
Dsysfs-kernel-livepatch17 object (vmlinux or a module) in which it patched functions.
28 What: /sys/kernel/livepatch/<patch>/<object>
33 The object directory contains subdirectories for each function
34 that is patched within the object.
36 What: /sys/kernel/livepatch/<patch>/<object>/<function>
Dsysfs-class-devfreq7 The name of devfreq object denoted as ... is same as the
15 governor used by the corresponding devfreq object.
22 frequency of the corresponding devfreq object. Same as
31 predicted target frequency of the corresponding devfreq object.
39 object. The values are represented in ms. If the value is
63 sets the requested frequency for the devfreq object if
71 the available frequencies of the corresponding devfreq object.
Dsysfs-class-extcon7 The name of extcon object denoted as ... is the name given
27 object. If the extcon object has an optional callback
36 attach/detach information of the corresponding extcon object.
37 If the extcon object has an optional callback "show_state"
/linux-4.1.27/mm/kasan/
Dkasan.c310 void kasan_unpoison_object_data(struct kmem_cache *cache, void *object) in kasan_unpoison_object_data() argument
312 kasan_unpoison_shadow(object, cache->object_size); in kasan_unpoison_object_data()
315 void kasan_poison_object_data(struct kmem_cache *cache, void *object) in kasan_poison_object_data() argument
317 kasan_poison_shadow(object, in kasan_poison_object_data()
322 void kasan_slab_alloc(struct kmem_cache *cache, void *object) in kasan_slab_alloc() argument
324 kasan_kmalloc(cache, object, cache->object_size); in kasan_slab_alloc()
327 void kasan_slab_free(struct kmem_cache *cache, void *object) in kasan_slab_free() argument
336 kasan_poison_shadow(object, rounded_up_size, KASAN_KMALLOC_FREE); in kasan_slab_free()
339 void kasan_kmalloc(struct kmem_cache *cache, const void *object, size_t size) in kasan_kmalloc() argument
344 if (unlikely(object == NULL)) in kasan_kmalloc()
[all …]
Dreport.c109 void *object; in print_address_description() local
113 object = virt_to_obj(cache, page_address(page), addr); in print_address_description()
117 if (unlikely(object > last_object)) in print_address_description()
118 object = last_object; /* we hit into padding */ in print_address_description()
120 object_err(cache, page, object, in print_address_description()
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/subdev/i2c/
Dpadgm204.c32 gm204_i2c_pad_fini(struct nvkm_object *object, bool suspend) in gm204_i2c_pad_fini() argument
34 struct nvkm_i2c *i2c = (void *)nvkm_i2c(object); in gm204_i2c_pad_fini()
35 struct gm204_i2c_pad *pad = (void *)object; in gm204_i2c_pad_fini()
41 gm204_i2c_pad_init(struct nvkm_object *object) in gm204_i2c_pad_init() argument
43 struct nvkm_i2c *i2c = (void *)nvkm_i2c(object); in gm204_i2c_pad_init()
44 struct gm204_i2c_pad *pad = (void *)object; in gm204_i2c_pad_init()
Dpadg94.c32 g94_i2c_pad_fini(struct nvkm_object *object, bool suspend) in g94_i2c_pad_fini() argument
34 struct nvkm_i2c *i2c = (void *)nvkm_i2c(object); in g94_i2c_pad_fini()
35 struct g94_i2c_pad *pad = (void *)object; in g94_i2c_pad_fini()
41 g94_i2c_pad_init(struct nvkm_object *object) in g94_i2c_pad_init() argument
43 struct nvkm_i2c *i2c = (void *)nvkm_i2c(object); in g94_i2c_pad_init()
44 struct g94_i2c_pad *pad = (void *)object; in g94_i2c_pad_init()
Dpad.c27 _nvkm_i2c_pad_fini(struct nvkm_object *object, bool suspend) in _nvkm_i2c_pad_fini() argument
29 struct nvkm_i2c_pad *pad = (void *)object; in _nvkm_i2c_pad_fini()
36 _nvkm_i2c_pad_init(struct nvkm_object *object) in _nvkm_i2c_pad_init() argument
38 struct nvkm_i2c_pad *pad = (void *)object; in _nvkm_i2c_pad_init()
Dbase.c92 _nvkm_i2c_port_fini(struct nvkm_object *object, bool suspend) in _nvkm_i2c_port_fini() argument
94 struct nvkm_i2c_port *port = (void *)object; in _nvkm_i2c_port_fini()
101 _nvkm_i2c_port_dtor(struct nvkm_object *object) in _nvkm_i2c_port_dtor() argument
103 struct nvkm_i2c_port *port = (void *)object; in _nvkm_i2c_port_dtor()
345 nvkm_i2c_intr_ctor(struct nvkm_object *object, void *data, u32 size, in nvkm_i2c_intr_ctor() argument
398 _nvkm_i2c_fini(struct nvkm_object *object, bool suspend) in _nvkm_i2c_fini() argument
400 struct nvkm_i2c_impl *impl = (void *)nv_oclass(object); in _nvkm_i2c_fini()
401 struct nvkm_i2c *i2c = (void *)object; in _nvkm_i2c_fini()
427 _nvkm_i2c_init(struct nvkm_object *object) in _nvkm_i2c_init() argument
429 struct nvkm_i2c *i2c = (void *)object; in _nvkm_i2c_init()
[all …]
Dnv50.c101 nv50_i2c_port_init(struct nvkm_object *object) in nv50_i2c_port_init() argument
103 struct nv50_i2c_priv *priv = (void *)nvkm_i2c(object); in nv50_i2c_port_init()
104 struct nv50_i2c_port *port = (void *)object; in nv50_i2c_port_init()
/linux-4.1.27/Documentation/acpi/
Dnamespace.txt11 receiving ACPI hotplug notification events. For each device object in this
81 The following naming conventions apply to object names in the ACPI
194 ACPI namespace representation of the given object and 'instance' is used
195 for distinguishing different object of the same 'bus_id' (it is
198 The value of 'bus_id' depends on the type of the object whose name it is
234 The object's source is an ACPI namespace node (as indicated by the
235 named object's type in the second column). In that case the object's
239 The struct acpi_device object is created for a fixed hardware
244 The struct acpi_device object is created for an ACPI namespace node
249 struct acpi_device object with LNXVIDEO 'bus_id' will be created for
[all …]
/linux-4.1.27/Documentation/filesystems/pohmelfs/
Dnetwork_protocol.txt14 __u64 start; /* Start of the object. */
33 NETFS_LOOKUP, /* Lookup single object */
40 NETFS_RENAME, /* Rename object */
53 @id - id of the object this command operates on. Each command can use it for own purpose.
55 @start - start of the object this command operates on. Each command can use it for own purpose.
64 @ext - length of the path to object.
76 @size - number of bytes to read plus length of the path to object.
77 @ext - object path length.
81 Used to create object.
82 It does not require that all directories on top of the object were
[all …]
/linux-4.1.27/drivers/input/touchscreen/
Datmel_mxt_ts.c665 struct mxt_object *object; in mxt_get_object() local
669 object = data->object_table + i; in mxt_get_object()
670 if (object->type == type) in mxt_get_object()
671 return object; in mxt_get_object()
714 struct mxt_object *object; in mxt_write_object() local
717 object = mxt_get_object(data, type); in mxt_write_object()
718 if (!object || offset >= mxt_obj_size(object)) in mxt_write_object()
721 reg = object->start_address; in mxt_write_object()
1241 struct mxt_object *object; in mxt_prepare_cfg_mem() local
1262 object = mxt_get_object(data, type); in mxt_prepare_cfg_mem()
[all …]
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/engine/mpeg/
Dnv31.c60 nv31_mpeg_mthd_dma(struct nvkm_object *object, u32 mthd, void *arg, u32 len) in nv31_mpeg_mthd_dma() argument
62 struct nvkm_instmem *imem = nvkm_instmem(object); in nv31_mpeg_mthd_dma()
63 struct nv31_mpeg_priv *priv = (void *)object->engine; in nv31_mpeg_mthd_dma()
155 nv31_mpeg_context_dtor(struct nvkm_object *object) in nv31_mpeg_context_dtor() argument
157 struct nv31_mpeg_priv *priv = (void *)object->engine; in nv31_mpeg_context_dtor()
158 struct nv31_mpeg_chan *chan = (void *)object; in nv31_mpeg_context_dtor()
219 if (handle && !nv_call(handle->object, mthd, data)) in nv31_mpeg_intr()
259 nv31_mpeg_init(struct nvkm_object *object) in nv31_mpeg_init() argument
261 struct nvkm_engine *engine = nv_engine(object); in nv31_mpeg_init()
262 struct nv31_mpeg_priv *priv = (void *)object; in nv31_mpeg_init()
[all …]
Dnv44.c62 nv44_mpeg_context_fini(struct nvkm_object *object, bool suspend) in nv44_mpeg_context_fini() argument
65 struct nv44_mpeg_priv *priv = (void *)object->engine; in nv44_mpeg_context_fini()
66 struct nv44_mpeg_chan *chan = (void *)object; in nv44_mpeg_context_fini()
121 if (handle && !nv_call(handle->object, mthd, data)) in nv44_mpeg_intr()
Dnv40.c33 nv40_mpeg_mthd_dma(struct nvkm_object *object, u32 mthd, void *arg, u32 len) in nv40_mpeg_mthd_dma() argument
35 struct nvkm_instmem *imem = nvkm_instmem(object); in nv40_mpeg_mthd_dma()
36 struct nv31_mpeg_priv *priv = (void *)object->engine; in nv40_mpeg_mthd_dma()
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/subdev/mc/
Dbase.c82 _nvkm_mc_fini(struct nvkm_object *object, bool suspend) in _nvkm_mc_fini() argument
84 struct nvkm_mc *pmc = (void *)object; in _nvkm_mc_fini()
90 _nvkm_mc_init(struct nvkm_object *object) in _nvkm_mc_init() argument
92 struct nvkm_mc *pmc = (void *)object; in _nvkm_mc_init()
101 _nvkm_mc_dtor(struct nvkm_object *object) in _nvkm_mc_dtor() argument
103 struct nvkm_device *device = nv_device(object); in _nvkm_mc_dtor()
104 struct nvkm_mc *pmc = (void *)object; in _nvkm_mc_dtor()
Dnv44.c27 nv44_mc_init(struct nvkm_object *object) in nv44_mc_init() argument
29 struct nv04_mc_priv *priv = (void *)object; in nv44_mc_init()
Dnv50.c54 nv50_mc_init(struct nvkm_object *object) in nv50_mc_init() argument
56 struct nv04_mc_priv *priv = (void *)object; in nv50_mc_init()
Dnv04.c42 nv04_mc_init(struct nvkm_object *object) in nv04_mc_init() argument
44 struct nv04_mc_priv *priv = (void *)object; in nv04_mc_init()
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/subdev/fuse/
Dbase.c27 _nvkm_fuse_init(struct nvkm_object *object) in _nvkm_fuse_init() argument
29 struct nvkm_fuse *fuse = (void *)object; in _nvkm_fuse_init()
34 _nvkm_fuse_dtor(struct nvkm_object *object) in _nvkm_fuse_dtor() argument
36 struct nvkm_fuse *fuse = (void *)object; in _nvkm_fuse_dtor()
Dpriv.h5 int _nvkm_fuse_init(struct nvkm_object *object);
6 void _nvkm_fuse_dtor(struct nvkm_object *object);
Dgm107.c31 gm107_fuse_rd32(struct nvkm_object *object, u64 addr) in gm107_fuse_rd32() argument
33 struct gf100_fuse_priv *priv = (void *)object; in gm107_fuse_rd32()
Dnv50.c33 nv50_fuse_rd32(struct nvkm_object *object, u64 addr) in nv50_fuse_rd32() argument
35 struct nv50_fuse_priv *priv = (void *)object; in nv50_fuse_rd32()
Dgf100.c33 gf100_fuse_rd32(struct nvkm_object *object, u64 addr) in gf100_fuse_rd32() argument
35 struct gf100_fuse_priv *priv = (void *)object; in gf100_fuse_rd32()
/linux-4.1.27/scripts/
Dnamespace.pl344 my ($object, $name) = @_;
345 my $nmdata = $nmdata{$object};
349 splice(@{$nmdata{$object}}, $i, 1);
352 if ($def{$name}[$j] eq $object) {
389 foreach my $object (keys(%nmdata)) {
390 my $nmdata = $nmdata{$object};
396 $nmdata->[$i] = "$type $name $object";
433 printf "reference to $name from $object\n";
/linux-4.1.27/tools/build/Documentation/
DBuild.txt10 Unlike the kernel we don't have a single build object 'obj-y' list that where
32 OBJECT - is the name of the build object
34 When succefully finished the $(DIR) directory contains the final object file
109 It's possible to alter the standard object C flags in the following way:
111 CFLAGS_perf.o += '...' - alters CFLAGS for perf.o object
112 CFLAGS_gtk += '...' - alters CFLAGS for gtk build object
120 For each built object file 'a.o' the '.a.cmd' is created and holds:
122 - Command line used to built that object
123 (for each object)
126 (for compiled object)
[all …]
/linux-4.1.27/security/keys/
Dkeyring.c48 void *object = assoc_array_ptr_to_leaf(x); in keyring_ptr_to_key() local
49 return (struct key *)((unsigned long)object & ~KEYRING_PTR_SUBTYPE); in keyring_ptr_to_key()
272 static unsigned long keyring_get_object_key_chunk(const void *object, int level) in keyring_get_object_key_chunk() argument
274 const struct key *key = keyring_ptr_to_key(object); in keyring_get_object_key_chunk()
278 static bool keyring_compare_object(const void *object, const void *data) in keyring_compare_object() argument
281 const struct key *key = keyring_ptr_to_key(object); in keyring_compare_object()
293 static int keyring_diff_objects(const void *object, const void *data) in keyring_diff_objects() argument
295 const struct key *key_a = keyring_ptr_to_key(object); in keyring_diff_objects()
360 static void keyring_free_object(void *object) in keyring_free_object() argument
362 key_put(keyring_ptr_to_key(object)); in keyring_free_object()
[all …]
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/subdev/fb/
Dgf100.c53 gf100_fb_init(struct nvkm_object *object) in gf100_fb_init() argument
55 struct gf100_fb_priv *priv = (void *)object; in gf100_fb_init()
70 gf100_fb_dtor(struct nvkm_object *object) in gf100_fb_dtor() argument
72 struct nvkm_device *device = nv_device(object); in gf100_fb_dtor()
73 struct gf100_fb_priv *priv = (void *)object; in gf100_fb_dtor()
Dbase.c53 _nvkm_fb_fini(struct nvkm_object *object, bool suspend) in _nvkm_fb_fini() argument
55 struct nvkm_fb *pfb = (void *)object; in _nvkm_fb_fini()
68 _nvkm_fb_init(struct nvkm_object *object) in _nvkm_fb_init() argument
70 struct nvkm_fb *pfb = (void *)object; in _nvkm_fb_init()
90 _nvkm_fb_dtor(struct nvkm_object *object) in _nvkm_fb_dtor() argument
92 struct nvkm_fb *pfb = (void *)object; in _nvkm_fb_dtor()
Dnv50.c271 nv50_fb_dtor(struct nvkm_object *object) in nv50_fb_dtor() argument
273 struct nvkm_device *device = nv_device(object); in nv50_fb_dtor()
274 struct nv50_fb_priv *priv = (void *)object; in nv50_fb_dtor()
286 nv50_fb_init(struct nvkm_object *object) in nv50_fb_init() argument
288 struct nv50_fb_impl *impl = (void *)object->oclass; in nv50_fb_init()
289 struct nv50_fb_priv *priv = (void *)object; in nv50_fb_init()
Drammcp77.c66 mcp77_ram_init(struct nvkm_object *object) in mcp77_ram_init() argument
68 struct nvkm_fb *pfb = nvkm_fb(object); in mcp77_ram_init()
69 struct mcp77_ram_priv *priv = (void *)object; in mcp77_ram_init()
Dgk20a.c29 gk20a_fb_init(struct nvkm_object *object) in gk20a_fb_init() argument
31 struct gk20a_fb_priv *priv = (void *)object; in gk20a_fb_init()
Dnv41.c39 nv41_fb_init(struct nvkm_object *object) in nv41_fb_init() argument
41 struct nv04_fb_priv *priv = (void *)object; in nv41_fb_init()
Dnv30.c92 nv30_fb_init(struct nvkm_object *object) in nv30_fb_init() argument
94 struct nvkm_device *device = nv_device(object); in nv30_fb_init()
95 struct nv04_fb_priv *priv = (void *)object; in nv30_fb_init()
Dnv40.c46 nv40_fb_init(struct nvkm_object *object) in nv40_fb_init() argument
48 struct nv04_fb_priv *priv = (void *)object; in nv40_fb_init()
Dnv44.c48 nv44_fb_init(struct nvkm_object *object) in nv44_fb_init() argument
50 struct nv04_fb_priv *priv = (void *)object; in nv44_fb_init()
/linux-4.1.27/net/core/
Dflow.c39 struct flow_cache_object *object; member
70 if (fle->object && !fle->object->ops->check(fle->object)) in flow_entry_valid()
78 if (fle->object) in flow_entry_kill()
79 fle->object->ops->delete(fle->object); in flow_entry_kill()
241 fle->object = NULL; in flow_cache_lookup()
246 flo = fle->object; in flow_cache_lookup()
252 } else if (fle->object) { in flow_cache_lookup()
253 flo = fle->object; in flow_cache_lookup()
255 fle->object = NULL; in flow_cache_lookup()
261 flo = fle->object; in flow_cache_lookup()
[all …]
/linux-4.1.27/drivers/gpu/drm/nouveau/include/nvkm/core/
Dparent.h12 struct nvkm_object object; member
22 struct nvkm_object *object, u32 name);
40 nvkm_object_init(&(p)->object)
42 nvkm_object_fini(&(p)->object, (s))
Dgpuobj.h13 struct nvkm_object object; member
36 #define nvkm_gpuobj_init(p) nvkm_object_init(&(p)->object)
37 #define nvkm_gpuobj_fini(p,s) nvkm_object_fini(&(p)->object, (s))
56 nvkm_object_ref(&obj->object, (struct nvkm_object **)ref); in nvkm_gpuobj_ref()
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/subdev/gpio/
Dbase.c123 nvkm_gpio_intr_ctor(struct nvkm_object *object, void *data, u32 size, in nvkm_gpio_intr_ctor() argument
162 _nvkm_gpio_fini(struct nvkm_object *object, bool suspend) in _nvkm_gpio_fini() argument
164 const struct nvkm_gpio_impl *impl = (void *)object->oclass; in _nvkm_gpio_fini()
165 struct nvkm_gpio *gpio = nvkm_gpio(object); in _nvkm_gpio_fini()
186 _nvkm_gpio_init(struct nvkm_object *object) in _nvkm_gpio_init() argument
188 struct nvkm_gpio *gpio = nvkm_gpio(object); in _nvkm_gpio_init()
202 _nvkm_gpio_dtor(struct nvkm_object *object) in _nvkm_gpio_dtor() argument
204 struct nvkm_gpio *gpio = (void *)object; in _nvkm_gpio_dtor()
/linux-4.1.27/Documentation/filesystems/
Dexofs.txt11 time attributes and more. Each object is addressed by a 64bit ID, and is
12 contained in a 64bit ID partition. Each object has associated attributes
13 attached to it, which are integral part of the object and provide metadata about
14 the object. The standard defines some common obligatory attributes, but user
21 To use this file system, you need to have an object store to run it on. You
123 * The file system control block (AKA on-disk superblock) resides in an object
126 in-memory superblock structure at mount time. This object is created before
131 * Each file resides in its own object and contains the data (and it will be
136 name, inode #> pairs for files that are found in that directory. The object
138 a bitmap (stored in a separate object). Now they are allocated using a
[all …]
Ddirectory-locking13 2) object creation. Locking rules: same as above.
15 3) object removal. Locking rules: caller locks parent, finds victim,
56 (1) if object removal or non-cross-directory rename holds lock on A and
59 the parent of object and it would have to lock the parent).
69 non-directory object, except renames, which take locks on source and
86 Any contended object is either held by cross-directory rename or
89 is blocked on belongs to child of that object due to (1).
93 would have a contended child and we had assumed that no object is its
97 Consider the object blocking the cross-directory rename. One
119 ability to check that directory is a descendent of another object. Current
Dxfs-self-describing-metadata.txt60 metadata object contains some form of unique identifier in a well known
62 hence parse and verify the metadata object. IF we can't independently identify
63 the type of metadata in the object, then the metadata doesn't describe itself
73 metadata object at runtime, during forensic analysis or repair.
105 object, we don't know what inode it belongs to and hence have to walk the entire
109 owner field in the metadata object, we can immediately do top down validation to
115 contents of the owner field are determined by the type of metadata object we are
127 For example, we can determine whether a metadata object is supposed to be free
130 compared to when the metadata object itself was last written. If the free space
131 block is more recent than the object and the object's owner, then there is a
[all …]
Doverlayfs.txt18 cases an object accessed in the union will be indistinguishable
19 from accessing the corresponding object from the original filesystem.
24 upper filesystem that is providing the object. Similarly st_ino will
26 over the lifetime of a non-directory object. Many applications and
34 object in the 'upper' filesystem is visible while the object in the
36 merged with the 'upper' object.
58 then the lower object is hidden - the name refers only to the upper
59 object.
150 necessary. It then creates the object with the same metadata (owner,
151 mode, mtime, symlink-target etc.) and then if the object is a file, the
[all …]
/linux-4.1.27/Documentation/
Dassoc_array.txt28 This associative array implementation is an object container with the following
37 permits an object to be located in multiple arrays simultaneously.
42 (4) Index keys must be unique. Inserting an object with the same key as one
43 already in the array will replace the old object.
70 pack leaf object pointers into spare space in the node rather than making an
71 extra branch until as such time an object needs to be added to a full node.
122 this was for insertion, the new object is _not_ released by this function,
149 (2) Get a chunk of an object's index key.
151 unsigned long (*get_object_key_chunk)(const void *object, int level);
153 As the previous function, but gets its data from an object in the array
[all …]
/linux-4.1.27/security/smack/
Dsmack_access.c125 int smk_access(struct smack_known *subject, struct smack_known *object, in smk_access() argument
146 if (object == &smack_known_web || subject == &smack_known_web) in smk_access()
151 if (object == &smack_known_star) in smk_access()
157 if (subject->smk_known == object->smk_known) in smk_access()
165 if (object == &smack_known_floor) in smk_access()
178 may = smk_access_entry(subject->smk_known, object->smk_known, in smk_access()
200 if (object == smack_unconfined) in smk_access()
209 smack_log(subject->smk_known, object->smk_known, in smk_access()
331 audit_log_untrustedstring(ab, sad->object); in smack_log_callback()
374 sad->object = object_label; in smack_log()
/linux-4.1.27/drivers/input/serio/
Dserio.c160 void *object; member
191 static void serio_remove_duplicate_events(void *object, in serio_remove_duplicate_events() argument
200 if (object == e->object) { in serio_remove_duplicate_events()
228 serio_add_port(event->object); in serio_handle_event()
232 serio_reconnect_port(event->object); in serio_handle_event()
236 serio_disconnect_port(event->object); in serio_handle_event()
237 serio_find_driver(event->object); in serio_handle_event()
241 serio_reconnect_subtree(event->object); in serio_handle_event()
245 serio_attach_driver(event->object); in serio_handle_event()
249 serio_remove_duplicate_events(event->object, event->type); in serio_handle_event()
[all …]
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/subdev/therm/
Dg84.c193 g84_therm_init(struct nvkm_object *object) in g84_therm_init() argument
195 struct g84_therm_priv *priv = (void *)object; in g84_therm_init()
245 g84_therm_fini(struct nvkm_object *object, bool suspend) in g84_therm_fini() argument
248 nv_wr32(object, 0x20000, 0x00000000); in g84_therm_fini()
251 nv_wr32(object, 0x20100, 0xffffffff); in g84_therm_fini()
252 nv_wr32(object, 0x1100, 0x10000); /* PBUS */ in g84_therm_fini()
254 return _nvkm_therm_fini(object, suspend); in g84_therm_fini()
Dbase.c285 _nvkm_therm_init(struct nvkm_object *object) in _nvkm_therm_init() argument
287 struct nvkm_therm *therm = (void *)object; in _nvkm_therm_init()
308 _nvkm_therm_fini(struct nvkm_object *object, bool suspend) in _nvkm_therm_fini() argument
310 struct nvkm_therm *therm = (void *)object; in _nvkm_therm_fini()
362 _nvkm_therm_dtor(struct nvkm_object *object) in _nvkm_therm_dtor() argument
364 struct nvkm_therm_priv *priv = (void *)object; in _nvkm_therm_dtor()
Dnv50.c180 nv50_therm_init(struct nvkm_object *object) in nv50_therm_init() argument
182 struct nvkm_therm *therm = (void *)object; in nv50_therm_init()
186 return _nvkm_therm_init(object); in nv50_therm_init()
/linux-4.1.27/Documentation/RCU/
Drculist_nulls.txt22 * Because a writer could delete object, and a writer could
23 * reuse these object before the RCU grace period, we
24 * must check key after getting the reference on object
26 if (obj->key != key) { // not the object we expected
61 "If the object is moved from one list to another list in-between the
63 object has moved to the end of a new list, the traversal will not
64 complete properly on the list it should have, since the object will
99 But thanks to SLAB_DESTROY_BY_RCU, beware a deleted object can be reused
117 a race (some writer did a delete and/or a move of an object
121 the beginning. If the object was moved to the same chain,
[all …]
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/subdev/timer/
Dnv04.c130 nv04_timer_fini(struct nvkm_object *object, bool suspend) in nv04_timer_fini() argument
132 struct nv04_timer_priv *priv = (void *)object; in nv04_timer_fini()
140 nv04_timer_init(struct nvkm_object *object) in nv04_timer_init() argument
142 struct nvkm_device *device = nv_device(object); in nv04_timer_init()
143 struct nv04_timer_priv *priv = (void *)object; in nv04_timer_init()
223 nv04_timer_dtor(struct nvkm_object *object) in nv04_timer_dtor() argument
225 struct nv04_timer_priv *priv = (void *)object; in nv04_timer_dtor()
Dgk20a.c27 gk20a_timer_init(struct nvkm_object *object) in gk20a_timer_init() argument
29 struct nv04_timer_priv *priv = (void *)object; in gk20a_timer_init()
/linux-4.1.27/fs/overlayfs/
DKconfig6 object in the 'upper' filesystem is visible while the object in the
8 merged with the 'upper' object.
/linux-4.1.27/drivers/acpi/
Dacpi_processor.c215 union acpi_object object = { 0 }; in acpi_processor_get_info() local
216 struct acpi_buffer buffer = { sizeof(union acpi_object), &object }; in acpi_processor_get_info()
248 pr->acpi_id = object.processor.proc_id; in acpi_processor_get_info()
307 if (!object.processor.pblk_address) in acpi_processor_get_info()
309 else if (object.processor.pblk_length != 6) in acpi_processor_get_info()
311 object.processor.pblk_length); in acpi_processor_get_info()
313 pr->throttling.address = object.processor.pblk_address; in acpi_processor_get_info()
317 pr->pblk = object.processor.pblk_address; in acpi_processor_get_info()
Dprocessor_pdc.c28 union acpi_object object = { 0 }; in processor_physically_present() local
29 struct acpi_buffer buffer = { sizeof(union acpi_object), &object }; in processor_physically_present()
40 acpi_id = object.processor.proc_id; in processor_physically_present()
/linux-4.1.27/Documentation/vm/
Dslub.txt41 P Poisoning (object and padding)
66 a result of storing the metadata (for example, caches with PAGE_SIZE object
98 SLUB can validate all object if the kernel was booted with slub_debug. In
134 large object sizes into one high order page. Setting command line
182 If SLUB encounters a corrupted object (full detection requires the kernel
196 INFO: Object <address> <object information>
205 2. The object contents if an object was involved.
210 Shows a few bytes before the object where the problem was detected.
212 object.
215 The bytes of the object. If the object is inactive then the bytes
[all …]
Dzsmalloc.txt9 any object of size PAGE_SIZE/2 or larger would occupy an entire page.
14 pages act as a single higher-order page i.e. an object can span 0-order
26 location of the allocated object. The reason for this indirection is that
29 is very small. So, before using the allocating memory, the object has to
53 size: object size zspage stores
/linux-4.1.27/drivers/input/gameport/
Dgameport.c265 void *object; member
304 if (event->object == e->object) { in gameport_remove_duplicate_events()
339 gameport_add_port(event->object); in gameport_handle_events()
343 gameport_attach_driver(event->object); in gameport_handle_events()
356 static int gameport_queue_event(void *object, struct module *owner, in gameport_queue_event() argument
373 if (event->object == object) { in gameport_queue_event()
396 event->object = object; in gameport_queue_event()
411 static void gameport_remove_pending_events(void *object) in gameport_remove_pending_events() argument
419 if (event->object == object) { in gameport_remove_pending_events()
446 gameport = event->object; in gameport_get_pending_child()
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/subdev/ibus/
Dgk20a.c62 gk20a_ibus_init(struct nvkm_object *object) in gk20a_ibus_init() argument
64 struct gk20a_ibus_priv *priv = (void *)object; in gk20a_ibus_init()
67 ret = _nvkm_ibus_init(object); in gk20a_ibus_init()
/linux-4.1.27/drivers/gpu/drm/vmwgfx/
Dvmwgfx_ttm_glue.c49 return ttm_mem_global_init(ref->object); in vmw_ttm_mem_global_init()
54 ttm_mem_global_release(ref->object); in vmw_ttm_mem_global_release()
75 dev_priv->mem_global_ref.object; in vmw_ttm_global_init()
/linux-4.1.27/include/acpi/
Dacpixf.h434 acpi_unload_parent_table(acpi_handle object))
484 acpi_get_name(acpi_handle object, u32 name_type,
491 acpi_attach_data(acpi_handle object,
495 acpi_detach_data(acpi_handle object,
498 acpi_get_data(acpi_handle object,
509 acpi_evaluate_object(acpi_handle object,
516 acpi_evaluate_object_typed(acpi_handle object,
525 acpi_get_object_info(acpi_handle object,
537 acpi_get_type(acpi_handle object,
541 acpi_get_parent(acpi_handle object,
[all …]
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/subdev/ltc/
Dgf100.c115 gf100_ltc_init(struct nvkm_object *object) in gf100_ltc_init() argument
117 struct nvkm_ltc_priv *priv = (void *)object; in gf100_ltc_init()
133 gf100_ltc_dtor(struct nvkm_object *object) in gf100_ltc_dtor() argument
135 struct nvkm_fb *pfb = nvkm_fb(object); in gf100_ltc_dtor()
136 struct nvkm_ltc_priv *priv = (void *)object; in gf100_ltc_dtor()
Dgk104.c27 gk104_ltc_init(struct nvkm_object *object) in gk104_ltc_init() argument
29 struct nvkm_ltc_priv *priv = (void *)object; in gk104_ltc_init()
Dbase.c80 _nvkm_ltc_init(struct nvkm_object *object) in _nvkm_ltc_init() argument
82 const struct nvkm_ltc_impl *impl = (void *)nv_oclass(object); in _nvkm_ltc_init()
83 struct nvkm_ltc_priv *priv = (void *)object; in _nvkm_ltc_init()
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/
Dgk20a.c163 gk20a_pmu_fini(struct nvkm_object *object, bool suspend) in gk20a_pmu_fini() argument
165 struct nvkm_pmu *pmu = (void *)object; in gk20a_pmu_fini()
174 gk20a_pmu_init(struct nvkm_object *object) in gk20a_pmu_init() argument
176 struct nvkm_pmu *pmu = (void *)object; in gk20a_pmu_init()
Dgt215.c28 gt215_pmu_init(struct nvkm_object *object) in gt215_pmu_init() argument
30 struct nvkm_pmu *pmu = (void *)object; in gt215_pmu_init()
Dbase.c174 _nvkm_pmu_fini(struct nvkm_object *object, bool suspend) in _nvkm_pmu_fini() argument
176 struct nvkm_pmu *pmu = (void *)object; in _nvkm_pmu_fini()
185 _nvkm_pmu_init(struct nvkm_object *object) in _nvkm_pmu_init() argument
187 const struct nvkm_pmu_impl *impl = (void *)object->oclass; in _nvkm_pmu_init()
188 struct nvkm_pmu *pmu = (void *)object; in _nvkm_pmu_init()
/linux-4.1.27/drivers/staging/lustre/lustre/include/
Dlustre_handles.h55 void (*hop_addref)(void *object);
56 void (*hop_free)(void *object, int size);
/linux-4.1.27/arch/arm/mm/
Dproc-arm720.S140 .type arm710_cr1_clear, #object
141 .type arm710_cr1_set, #object
168 .type arm720_crval, #object
192 .type __\name\()_proc_info,#object
Dproc-v7.S515 .type __v7_ca5mp_proc_info, #object
525 .type __v7_ca9mp_proc_info, #object
535 .type __v7_ca8_proc_info, #object
548 .type __v7_pj4b_proc_info, #object
559 .type __v7_cr7mp_proc_info, #object
569 .type __v7_ca7mp_proc_info, #object
579 .type __v7_ca12mp_proc_info, #object
589 .type __v7_ca15mp_proc_info, #object
599 .type __v7_b15mp_proc_info, #object
609 .type __v7_ca17mp_proc_info, #object
[all …]
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/subdev/volt/
Dbase.c138 _nvkm_volt_init(struct nvkm_object *object) in _nvkm_volt_init() argument
140 struct nvkm_volt *volt = (void *)object; in _nvkm_volt_init()
159 _nvkm_volt_dtor(struct nvkm_object *object) in _nvkm_volt_dtor() argument
161 struct nvkm_volt *volt = (void *)object; in _nvkm_volt_dtor()
/linux-4.1.27/Documentation/security/
DSmack.txt41 to an object with another
70 the label given to a new filesystem object will be the label
82 on a directory when an object is created in the directory and
84 to the directory includes the transmute ("t") mode the object
86 creating process. If the object being created is a directory
112 Smack label has a particular access to an object with a
119 Smack label has a particular access to an object with a
132 object label, the third the access to allow and the fourth the
134 "rwxat-". If a rule for a given subject and object exists it will be
166 object label, and the third the requested access. The access
[all …]
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/subdev/clk/
Dbase.c474 _nvkm_clk_fini(struct nvkm_object *object, bool suspend) in _nvkm_clk_fini() argument
476 struct nvkm_clk *clk = (void *)object; in _nvkm_clk_fini()
482 _nvkm_clk_init(struct nvkm_object *object) in _nvkm_clk_init() argument
484 struct nvkm_clk *clk = (void *)object; in _nvkm_clk_init()
517 _nvkm_clk_dtor(struct nvkm_object *object) in _nvkm_clk_dtor() argument
519 struct nvkm_clk *clk = (void *)object; in _nvkm_clk_dtor()
535 bool allow_reclock, int length, void **object) in nvkm_clk_create_() argument
543 "clock", length, object); in nvkm_clk_create_()
544 clk = *object; in nvkm_clk_create_()
/linux-4.1.27/arch/mips/
DKbuild14 # mips object files
15 # The object files are linked as core-y files would be linked
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/subdev/bus/
Dgf100.c56 gf100_bus_init(struct nvkm_object *object) in gf100_bus_init() argument
58 struct nv04_bus_priv *priv = (void *)object; in gf100_bus_init()
Dnv31.c67 nv31_bus_init(struct nvkm_object *object) in nv31_bus_init() argument
69 struct nv04_bus_priv *priv = (void *)object; in nv31_bus_init()
/linux-4.1.27/Documentation/driver-model/
Dbus.txt15 object of this type. They must initialize the name field, and may
32 initializes the rest of the fields in the bus object and inserts it
33 into a global list of bus types. Once the bus object is registered,
80 count on each object in the list is incremented before the callback is
81 called; it is decremented after the next object has been obtained. The
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/engine/msvld/
Dg98.c65 g98_msvld_init(struct nvkm_object *object) in g98_msvld_init() argument
67 struct g98_msvld_priv *priv = (void *)object; in g98_msvld_init()
Dgk104.c63 gk104_msvld_init(struct nvkm_object *object) in gk104_msvld_init() argument
65 struct gk104_msvld_priv *priv = (void *)object; in gk104_msvld_init()
Dgf100.c63 gf100_msvld_init(struct nvkm_object *object) in gf100_msvld_init() argument
65 struct gf100_msvld_priv *priv = (void *)object; in gf100_msvld_init()
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/engine/mspdec/
Dgk104.c63 gk104_mspdec_init(struct nvkm_object *object) in gk104_mspdec_init() argument
65 struct gk104_mspdec_priv *priv = (void *)object; in gk104_mspdec_init()
Dg98.c64 g98_mspdec_init(struct nvkm_object *object) in g98_mspdec_init() argument
66 struct g98_mspdec_priv *priv = (void *)object; in g98_mspdec_init()
Dgf100.c63 gf100_mspdec_init(struct nvkm_object *object) in gf100_mspdec_init() argument
65 struct gf100_mspdec_priv *priv = (void *)object; in gf100_mspdec_init()
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/engine/msppp/
Dg98.c64 g98_msppp_init(struct nvkm_object *object) in g98_msppp_init() argument
66 struct g98_msppp_priv *priv = (void *)object; in g98_msppp_init()
Dgf100.c63 gf100_msppp_init(struct nvkm_object *object) in gf100_msppp_init() argument
65 struct gf100_msppp_priv *priv = (void *)object; in gf100_msppp_init()
/linux-4.1.27/Documentation/filesystems/nfs/
DExporting34 tree. This means that if any filesystem object is in the dcache, then
35 all of the ancestors of that filesystem object are also in the dcache.
37 maintained easily (by each object maintaining a reference count on
42 for the object. This leads to two related but distinct features of
111 to find or create a dentry for the same object. The default
117 Given a filehandle fragment, this should find the implied object and
122 implied object and create a dentry for it (possibly with
136 object identified by the child dentry. If no get_name function is

123