obj 77 arch/ia64/include/asm/asmmacro.h #define LOAD_PHYSICAL(pr, reg, obj) \ obj 78 arch/ia64/include/asm/asmmacro.h [1:](pr)movl reg = obj; \ obj 29 arch/ia64/kernel/acpi-ext.c static acpi_status hp_ccsr_locate(acpi_handle obj, u64 *base, u64 *length) obj 36 arch/ia64/kernel/acpi-ext.c status = acpi_get_vendor_resource(obj, METHOD_NAME__CRS, &hp_ccsr_uuid, obj 78 arch/ia64/kernel/acpi-ext.c static acpi_status hp_crs_locate(acpi_handle obj, u64 *base, u64 *length) obj 82 arch/ia64/kernel/acpi-ext.c acpi_walk_resources(obj, METHOD_NAME__CRS, find_csr_space, &space); obj 91 arch/ia64/kernel/acpi-ext.c acpi_status hp_acpi_csr_space(acpi_handle obj, u64 *csr_base, u64 *csr_length) obj 95 arch/ia64/kernel/acpi-ext.c status = hp_ccsr_locate(obj, csr_base, csr_length); obj 99 arch/ia64/kernel/acpi-ext.c return hp_crs_locate(obj, csr_base, csr_length); obj 831 arch/ia64/kernel/acpi.c union acpi_object *obj; obj 843 arch/ia64/kernel/acpi.c obj = buffer.pointer; obj 844 arch/ia64/kernel/acpi.c if (obj->type != ACPI_TYPE_BUFFER || obj 845 arch/ia64/kernel/acpi.c obj->buffer.length < sizeof(*iosapic)) { obj 850 arch/ia64/kernel/acpi.c iosapic = (struct acpi_madt_io_sapic *)obj->buffer.pointer; obj 256 arch/parisc/include/asm/ropes.h extern int iosapic_fixup_irq(void *obj, struct pci_dev *pcidev); obj 61 arch/parisc/kernel/drivers.c void * obj; obj 69 arch/parisc/kernel/drivers.c if (recurse_data->fn(dev, recurse_data->obj)) obj 88 arch/parisc/kernel/drivers.c .obj = data, obj 740 arch/parisc/kernel/drivers.c .obj = &d, obj 4060 arch/powerpc/xmon/xmon.c #define DUMP_FIELD(obj, format, field) \ obj 4061 arch/powerpc/xmon/xmon.c DUMP_VALUE(format, field, obj->field) obj 182 arch/x86/include/asm/thread_info.h const void *obj, unsigned long len) obj 204 arch/x86/include/asm/thread_info.h if (obj + len <= frame) obj 205 arch/x86/include/asm/thread_info.h return obj >= oldframe + 2 * sizeof(void *) ? obj 13 arch/x86/kernel/livepatch.c struct klp_object *obj) obj 24 arch/x86/kernel/livepatch.c objname = obj->name ? obj->name : "vmlinux"; obj 1063 arch/x86/kvm/mmu.c void *obj; obj 1068 arch/x86/kvm/mmu.c obj = kmem_cache_zalloc(base_cache, GFP_KERNEL_ACCOUNT); obj 1069 arch/x86/kvm/mmu.c if (!obj) obj 1071 arch/x86/kvm/mmu.c cache->objects[cache->nobjs++] = obj; obj 41 drivers/acpi/acpi_adxl.c union acpi_object *obj, *o; obj 43 drivers/acpi/acpi_adxl.c obj = acpi_evaluate_dsm_typed(handle, &adxl_guid, ADXL_REVISION, obj 45 drivers/acpi/acpi_adxl.c if (!obj) { obj 50 drivers/acpi/acpi_adxl.c if (obj->package.count != 2) { obj 51 drivers/acpi/acpi_adxl.c pr_info("Bad pkg count %d\n", obj->package.count); obj 55 drivers/acpi/acpi_adxl.c o = obj->package.elements; obj 65 drivers/acpi/acpi_adxl.c o = obj->package.elements + 1; obj 70 drivers/acpi/acpi_adxl.c return obj; obj 73 drivers/acpi/acpi_adxl.c ACPI_FREE(obj); obj 186 drivers/acpi/acpi_extlog.c union acpi_object *obj; obj 194 drivers/acpi/acpi_extlog.c obj = acpi_evaluate_dsm_typed(handle, &guid, EXTLOG_DSM_REV, obj 196 drivers/acpi/acpi_extlog.c if (!obj) { obj 199 drivers/acpi/acpi_extlog.c l1_dirbase = obj->integer.value; obj 200 drivers/acpi/acpi_extlog.c ACPI_FREE(obj); obj 323 drivers/acpi/acpi_video.c union acpi_object *obj; obj 331 drivers/acpi/acpi_video.c obj = (union acpi_object *)buffer.pointer; obj 332 drivers/acpi/acpi_video.c if (!obj || (obj->type != ACPI_TYPE_PACKAGE)) { obj 338 drivers/acpi/acpi_video.c *levels = obj; obj 653 drivers/acpi/acpi_video.c union acpi_object *obj; obj 673 drivers/acpi/acpi_video.c obj = buffer.pointer; obj 675 drivers/acpi/acpi_video.c if (obj && obj->type == ACPI_TYPE_BUFFER) obj 676 drivers/acpi/acpi_video.c *edid = obj; obj 680 drivers/acpi/acpi_video.c kfree(obj); obj 819 drivers/acpi/acpi_video.c union acpi_object *obj = NULL; obj 827 drivers/acpi/acpi_video.c &obj))) { obj 834 drivers/acpi/acpi_video.c if (obj->package.count < ACPI_VIDEO_FIRST_LEVEL) { obj 851 drivers/acpi/acpi_video.c br->levels = kmalloc_array(obj->package.count + ACPI_VIDEO_FIRST_LEVEL, obj 859 drivers/acpi/acpi_video.c for (i = 0; i < obj->package.count; i++) { obj 860 drivers/acpi/acpi_video.c o = (union acpi_object *)&obj->package.elements[i]; obj 918 drivers/acpi/acpi_video.c kfree(obj); obj 1317 drivers/acpi/acpi_video.c union acpi_object *obj; obj 1348 drivers/acpi/acpi_video.c obj = &dod->package.elements[i]; obj 1350 drivers/acpi/acpi_video.c if (obj->type != ACPI_TYPE_INTEGER) { obj 1356 drivers/acpi/acpi_video.c active_list[count].value.int_val = obj->integer.value; obj 1359 drivers/acpi/acpi_video.c (int)obj->integer.value)); obj 243 drivers/acpi/acpica/acutils.h acpi_ut_build_simple_object(union acpi_operand_object *obj, obj 248 drivers/acpi/acpica/acutils.h acpi_ut_build_package_object(union acpi_operand_object *obj, obj 252 drivers/acpi/acpica/acutils.h acpi_ut_copy_iobject_to_eobject(union acpi_operand_object *obj, obj 256 drivers/acpi/acpica/acutils.h acpi_ut_copy_eobject_to_iobject(union acpi_object *obj, obj 445 drivers/acpi/acpica/acutils.h acpi_ut_get_object_size(union acpi_operand_object *obj, acpi_size *obj_length); obj 20 drivers/acpi/acpica/utobject.c acpi_ut_get_simple_object_size(union acpi_operand_object *obj, obj 24 drivers/acpi/acpica/utobject.c acpi_ut_get_package_object_size(union acpi_operand_object *obj, obj 629 drivers/acpi/bus.c const union acpi_object *of_compatible, *obj; obj 641 drivers/acpi/bus.c obj = of_compatible->package.elements; obj 644 drivers/acpi/bus.c obj = of_compatible; obj 647 drivers/acpi/bus.c for (i = 0; i < nval; i++, obj++) { obj 651 drivers/acpi/bus.c if (!strcasecmp(obj->string.pointer, id->compatible)) { obj 665 drivers/acpi/bus.c const union acpi_object *obj; obj 673 drivers/acpi/bus.c obj = of_compatible->package.elements; obj 675 drivers/acpi/bus.c obj = of_compatible; obj 677 drivers/acpi/bus.c str = obj->string.pointer; obj 196 drivers/acpi/device_sysfs.c const union acpi_object *of_compatible, *obj; obj 219 drivers/acpi/device_sysfs.c obj = of_compatible->package.elements; obj 222 drivers/acpi/device_sysfs.c obj = of_compatible; obj 224 drivers/acpi/device_sysfs.c for (i = 0; i < nval; i++, obj++) { obj 226 drivers/acpi/device_sysfs.c obj->string.pointer); obj 98 drivers/acpi/fan.c union acpi_object *obj; obj 108 drivers/acpi/fan.c obj = buffer.pointer; obj 109 drivers/acpi/fan.c if (!obj || obj->type != ACPI_TYPE_PACKAGE || obj 110 drivers/acpi/fan.c obj->package.count != 3 || obj 111 drivers/acpi/fan.c obj->package.elements[1].type != ACPI_TYPE_INTEGER) { obj 117 drivers/acpi/fan.c control = obj->package.elements[1].integer.value; obj 141 drivers/acpi/fan.c kfree(obj); obj 236 drivers/acpi/fan.c union acpi_object *obj; obj 243 drivers/acpi/fan.c obj = buffer.pointer; obj 244 drivers/acpi/fan.c if (!obj || obj->type != ACPI_TYPE_PACKAGE) { obj 250 drivers/acpi/fan.c status = acpi_extract_package(obj, &format, &fif); obj 257 drivers/acpi/fan.c kfree(obj); obj 272 drivers/acpi/fan.c union acpi_object *obj; obj 280 drivers/acpi/fan.c obj = buffer.pointer; obj 281 drivers/acpi/fan.c if (!obj || obj->type != ACPI_TYPE_PACKAGE || obj->package.count < 2) { obj 287 drivers/acpi/fan.c fan->fps_count = obj->package.count - 1; /* minus revision field */ obj 299 drivers/acpi/fan.c status = acpi_extract_package(&obj->package.elements[i + 1], obj 312 drivers/acpi/fan.c kfree(obj); obj 234 drivers/acpi/nfit/core.c union acpi_object *obj = &pkg->package.elements[i]; obj 236 drivers/acpi/nfit/core.c if (obj->type == ACPI_TYPE_INTEGER) obj 238 drivers/acpi/nfit/core.c else if (obj->type == ACPI_TYPE_BUFFER) obj 239 drivers/acpi/nfit/core.c size += obj->buffer.length; obj 242 drivers/acpi/nfit/core.c obj->type); obj 256 drivers/acpi/nfit/core.c union acpi_object *obj = &pkg->package.elements[i]; obj 258 drivers/acpi/nfit/core.c if (obj->type == ACPI_TYPE_INTEGER) { obj 259 drivers/acpi/nfit/core.c memcpy(dst, &obj->integer.value, 4); obj 261 drivers/acpi/nfit/core.c } else if (obj->type == ACPI_TYPE_BUFFER) { obj 262 drivers/acpi/nfit/core.c memcpy(dst, obj->buffer.pointer, obj->buffer.length); obj 263 drivers/acpi/nfit/core.c dst += obj->buffer.length; obj 3653 drivers/acpi/nfit/core.c union acpi_object *obj = buf.pointer; obj 3655 drivers/acpi/nfit/core.c if (obj->type == ACPI_TYPE_BUFFER) obj 3656 drivers/acpi/nfit/core.c rc = acpi_nfit_init(acpi_desc, obj->buffer.pointer, obj 3657 drivers/acpi/nfit/core.c obj->buffer.length); obj 3660 drivers/acpi/nfit/core.c (int) obj->type); obj 3683 drivers/acpi/nfit/core.c union acpi_object *obj; obj 3713 drivers/acpi/nfit/core.c obj = buf.pointer; obj 3714 drivers/acpi/nfit/core.c if (obj->type == ACPI_TYPE_BUFFER) { obj 3715 drivers/acpi/nfit/core.c ret = acpi_nfit_init(acpi_desc, obj->buffer.pointer, obj 3716 drivers/acpi/nfit/core.c obj->buffer.length); obj 883 drivers/acpi/pci_root.c union acpi_object *obj; obj 925 drivers/acpi/pci_root.c obj = acpi_evaluate_dsm(ACPI_HANDLE(bus->bridge), &pci_acpi_dsm_guid, 1, obj 927 drivers/acpi/pci_root.c if (obj && obj->type == ACPI_TYPE_INTEGER && obj->integer.value == 0) obj 929 drivers/acpi/pci_root.c ACPI_FREE(obj); obj 168 drivers/acpi/processor_core.c union acpi_object *obj; obj 178 drivers/acpi/processor_core.c obj = buffer.pointer; obj 179 drivers/acpi/processor_core.c if (obj->type != ACPI_TYPE_BUFFER || obj 180 drivers/acpi/processor_core.c obj->buffer.length < sizeof(struct acpi_subtable_header)) { obj 184 drivers/acpi/processor_core.c header = (struct acpi_subtable_header *)obj->buffer.pointer; obj 314 drivers/acpi/processor_core.c union acpi_object *obj; obj 323 drivers/acpi/processor_core.c obj = buffer.pointer; obj 324 drivers/acpi/processor_core.c if (obj->type != ACPI_TYPE_BUFFER || obj 325 drivers/acpi/processor_core.c obj->buffer.length < sizeof(struct acpi_subtable_header)) obj 328 drivers/acpi/processor_core.c header = (struct acpi_subtable_header *)obj->buffer.pointer; obj 343 drivers/acpi/processor_idle.c union acpi_object *obj; obj 356 drivers/acpi/processor_idle.c obj = &(element->package.elements[0]); obj 358 drivers/acpi/processor_idle.c if (obj->type != ACPI_TYPE_BUFFER) obj 361 drivers/acpi/processor_idle.c reg = (struct acpi_power_register *)obj->buffer.pointer; obj 368 drivers/acpi/processor_idle.c obj = &(element->package.elements[1]); obj 369 drivers/acpi/processor_idle.c if (obj->type != ACPI_TYPE_INTEGER) obj 372 drivers/acpi/processor_idle.c cx.type = obj->integer.value; obj 424 drivers/acpi/processor_idle.c obj = &(element->package.elements[2]); obj 425 drivers/acpi/processor_idle.c if (obj->type != ACPI_TYPE_INTEGER) obj 428 drivers/acpi/processor_idle.c cx.latency = obj->integer.value; obj 430 drivers/acpi/processor_idle.c obj = &(element->package.elements[3]); obj 431 drivers/acpi/processor_idle.c if (obj->type != ACPI_TYPE_INTEGER) obj 952 drivers/acpi/processor_idle.c static int obj_get_integer(union acpi_object *obj, u32 *value) obj 954 drivers/acpi/processor_idle.c if (obj->type != ACPI_TYPE_INTEGER) obj 957 drivers/acpi/processor_idle.c *value = obj->integer.value; obj 1007 drivers/acpi/processor_idle.c union acpi_object *element, *pkg_elem, *obj; obj 1015 drivers/acpi/processor_idle.c obj = pkg_elem + 6; obj 1016 drivers/acpi/processor_idle.c if (obj->type == ACPI_TYPE_BUFFER) { obj 1019 drivers/acpi/processor_idle.c reg = (struct acpi_power_register *)obj->buffer.pointer; obj 1028 drivers/acpi/processor_idle.c } else if (obj->type == ACPI_TYPE_INTEGER) { obj 1030 drivers/acpi/processor_idle.c lpi_state->address = obj->integer.value; obj 1037 drivers/acpi/processor_idle.c obj = pkg_elem + 9; obj 1038 drivers/acpi/processor_idle.c if (obj->type == ACPI_TYPE_STRING) obj 1039 drivers/acpi/processor_idle.c strlcpy(lpi_state->desc, obj->string.pointer, obj 74 drivers/acpi/processor_pdc.c union acpi_object *obj; obj 82 drivers/acpi/processor_pdc.c obj = kmalloc(sizeof(union acpi_object), GFP_KERNEL); obj 83 drivers/acpi/processor_pdc.c if (!obj) { obj 90 drivers/acpi/processor_pdc.c kfree(obj); obj 97 drivers/acpi/processor_pdc.c obj->type = ACPI_TYPE_BUFFER; obj 98 drivers/acpi/processor_pdc.c obj->buffer.length = 12; obj 99 drivers/acpi/processor_pdc.c obj->buffer.pointer = (u8 *) buf; obj 101 drivers/acpi/processor_pdc.c obj_list->pointer = obj; obj 124 drivers/acpi/processor_pdc.c union acpi_object *obj; obj 127 drivers/acpi/processor_pdc.c obj = pdc_in->pointer; obj 128 drivers/acpi/processor_pdc.c buffer = (u32 *)(obj->buffer.pointer); obj 198 drivers/acpi/processor_perflib.c union acpi_object obj = { 0 }; obj 219 drivers/acpi/processor_perflib.c obj = pct->package.elements[0]; obj 221 drivers/acpi/processor_perflib.c if ((obj.type != ACPI_TYPE_BUFFER) obj 222 drivers/acpi/processor_perflib.c || (obj.buffer.length < sizeof(struct acpi_pct_register)) obj 223 drivers/acpi/processor_perflib.c || (obj.buffer.pointer == NULL)) { obj 228 drivers/acpi/processor_perflib.c memcpy(&pr->performance->control_register, obj.buffer.pointer, obj 235 drivers/acpi/processor_perflib.c obj = pct->package.elements[1]; obj 237 drivers/acpi/processor_perflib.c if ((obj.type != ACPI_TYPE_BUFFER) obj 238 drivers/acpi/processor_perflib.c || (obj.buffer.length < sizeof(struct acpi_pct_register)) obj 239 drivers/acpi/processor_perflib.c || (obj.buffer.pointer == NULL)) { obj 245 drivers/acpi/processor_perflib.c memcpy(&pr->performance->status_register, obj.buffer.pointer, obj 416 drivers/acpi/processor_throttling.c union acpi_object obj = { 0 }; obj 439 drivers/acpi/processor_throttling.c obj = ptc->package.elements[0]; obj 441 drivers/acpi/processor_throttling.c if ((obj.type != ACPI_TYPE_BUFFER) obj 442 drivers/acpi/processor_throttling.c || (obj.buffer.length < sizeof(struct acpi_ptc_register)) obj 443 drivers/acpi/processor_throttling.c || (obj.buffer.pointer == NULL)) { obj 449 drivers/acpi/processor_throttling.c memcpy(&pr->throttling.control_register, obj.buffer.pointer, obj 456 drivers/acpi/processor_throttling.c obj = ptc->package.elements[1]; obj 458 drivers/acpi/processor_throttling.c if ((obj.type != ACPI_TYPE_BUFFER) obj 459 drivers/acpi/processor_throttling.c || (obj.buffer.length < sizeof(struct acpi_ptc_register)) obj 460 drivers/acpi/processor_throttling.c || (obj.buffer.pointer == NULL)) { obj 466 drivers/acpi/processor_throttling.c memcpy(&pr->throttling.status_register, obj.buffer.pointer, obj 22 drivers/acpi/property.c const union acpi_object **obj); obj 483 drivers/acpi/property.c const union acpi_object **obj) obj 511 drivers/acpi/property.c if (obj) obj 512 drivers/acpi/property.c *obj = propvalue; obj 529 drivers/acpi/property.c acpi_object_type type, const union acpi_object **obj) obj 531 drivers/acpi/property.c return adev ? acpi_data_get_property(&adev->data, name, type, obj) : -EINVAL; obj 584 drivers/acpi/property.c const union acpi_object **obj) obj 599 drivers/acpi/property.c if (obj) obj 600 drivers/acpi/property.c *obj = prop; obj 676 drivers/acpi/property.c const union acpi_object *obj; obj 685 drivers/acpi/property.c ret = acpi_data_get_property(data, propname, ACPI_TYPE_ANY, &obj); obj 693 drivers/acpi/property.c if (obj->type == ACPI_TYPE_LOCAL_REFERENCE) { obj 697 drivers/acpi/property.c ret = acpi_bus_get_device(obj->reference.handle, &device); obj 715 drivers/acpi/property.c if (obj->type != ACPI_TYPE_PACKAGE) obj 717 drivers/acpi/property.c if (index >= obj->package.count) obj 720 drivers/acpi/property.c element = obj->package.elements; obj 721 drivers/acpi/property.c end = element + obj->package.count; obj 794 drivers/acpi/property.c const union acpi_object *obj; obj 801 drivers/acpi/property.c ret = acpi_data_get_property(data, propname, ACPI_TYPE_INTEGER, &obj); obj 807 drivers/acpi/property.c if (obj->integer.value > U8_MAX) obj 809 drivers/acpi/property.c *(u8 *)val = obj->integer.value; obj 812 drivers/acpi/property.c if (obj->integer.value > U16_MAX) obj 814 drivers/acpi/property.c *(u16 *)val = obj->integer.value; obj 817 drivers/acpi/property.c if (obj->integer.value > U32_MAX) obj 819 drivers/acpi/property.c *(u32 *)val = obj->integer.value; obj 822 drivers/acpi/property.c *(u64 *)val = obj->integer.value; obj 826 drivers/acpi/property.c ret = acpi_data_get_property(data, propname, ACPI_TYPE_STRING, &obj); obj 830 drivers/acpi/property.c *(char **)val = obj->string.pointer; obj 934 drivers/acpi/property.c const union acpi_object *obj; obj 944 drivers/acpi/property.c ret = acpi_data_get_property_array(data, propname, ACPI_TYPE_ANY, &obj); obj 949 drivers/acpi/property.c return obj->package.count; obj 951 drivers/acpi/property.c if (proptype != DEV_PROP_STRING && nval > obj->package.count) obj 956 drivers/acpi/property.c items = obj->package.elements; obj 974 drivers/acpi/property.c min_t(u32, nval, obj->package.count)); obj 750 drivers/acpi/scan.c union acpi_object *obj; obj 758 drivers/acpi/scan.c obj = buffer.pointer; obj 759 drivers/acpi/scan.c status = acpi_get_handle(ACPI_ROOT_OBJECT, obj->string.pointer, obj 804 drivers/acpi/sleep.c union acpi_object *obj; obj 816 drivers/acpi/sleep.c obj = &cnstr_pkg[0]; obj 817 drivers/acpi/sleep.c dev_info.uid = obj->integer.value; obj 818 drivers/acpi/sleep.c obj = &cnstr_pkg[1]; obj 819 drivers/acpi/sleep.c dev_info.min_dstate = obj->integer.value; obj 549 drivers/acpi/utils.c union acpi_object obj = { .type = ACPI_TYPE_INTEGER }; obj 550 drivers/acpi/utils.c struct acpi_object_list arg_list = { .count = 1, .pointer = &obj, }; obj 552 drivers/acpi/utils.c obj.integer.value = arg; obj 669 drivers/acpi/utils.c union acpi_object *obj; obj 674 drivers/acpi/utils.c obj = acpi_evaluate_dsm(handle, guid, rev, 0, NULL); obj 675 drivers/acpi/utils.c if (!obj) obj 679 drivers/acpi/utils.c if (obj->type == ACPI_TYPE_INTEGER) obj 680 drivers/acpi/utils.c mask = obj->integer.value; obj 681 drivers/acpi/utils.c else if (obj->type == ACPI_TYPE_BUFFER) obj 682 drivers/acpi/utils.c for (i = 0; i < obj->buffer.length && i < 8; i++) obj 683 drivers/acpi/utils.c mask |= (((u64)obj->buffer.pointer[i]) << (i * 8)); obj 684 drivers/acpi/utils.c ACPI_FREE(obj); obj 45 drivers/base/base.h #define to_subsys_private(obj) container_of(obj, struct subsys_private, subsys.kobj) obj 54 drivers/base/base.h #define to_driver(obj) container_of(obj, struct driver_private, kobj) obj 88 drivers/base/base.h #define to_device_private_parent(obj) \ obj 89 drivers/base/base.h container_of(obj, struct device_private, knode_parent) obj 90 drivers/base/base.h #define to_device_private_driver(obj) \ obj 91 drivers/base/base.h container_of(obj, struct device_private, knode_driver) obj 92 drivers/base/base.h #define to_device_private_bus(obj) \ obj 93 drivers/base/base.h container_of(obj, struct device_private, knode_bus) obj 94 drivers/base/base.h #define to_device_private_class(obj) \ obj 95 drivers/base/base.h container_of(obj, struct device_private, knode_class) obj 1736 drivers/base/core.c #define to_class_dir(obj) container_of(obj, struct class_dir, kobj) obj 660 drivers/base/node.c struct device *obj; obj 665 drivers/base/node.c obj = get_cpu_device(cpu); obj 666 drivers/base/node.c if (!obj) obj 670 drivers/base/node.c &obj->kobj, obj 671 drivers/base/node.c kobject_name(&obj->kobj)); obj 675 drivers/base/node.c return sysfs_create_link(&obj->kobj, obj 732 drivers/base/node.c struct device *obj; obj 737 drivers/base/node.c obj = get_cpu_device(cpu); obj 738 drivers/base/node.c if (!obj) obj 742 drivers/base/node.c kobject_name(&obj->kobj)); obj 743 drivers/base/node.c sysfs_remove_link(&obj->kobj, obj 90 drivers/block/drbd/drbd_int.h #define __drbd_printk_if_same_type(obj, type, func, level, fmt, args...) \ obj 91 drivers/block/drbd/drbd_int.h (__builtin_types_compatible_p(typeof(obj), type) || \ obj 92 drivers/block/drbd/drbd_int.h __builtin_types_compatible_p(typeof(obj), const type)), \ obj 93 drivers/block/drbd/drbd_int.h func(level, (const type)(obj), fmt, ## args) obj 95 drivers/block/drbd/drbd_int.h #define drbd_printk(level, obj, fmt, args...) \ obj 97 drivers/block/drbd/drbd_int.h __drbd_printk_if_same_type(obj, struct drbd_device *, \ obj 100 drivers/block/drbd/drbd_int.h __drbd_printk_if_same_type(obj, struct drbd_resource *, \ obj 103 drivers/block/drbd/drbd_int.h __drbd_printk_if_same_type(obj, struct drbd_connection *, \ obj 106 drivers/block/drbd/drbd_int.h __drbd_printk_if_same_type(obj, struct drbd_peer_device *, \ obj 110 drivers/block/drbd/drbd_int.h #define drbd_dbg(obj, fmt, args...) \ obj 111 drivers/block/drbd/drbd_int.h drbd_printk(KERN_DEBUG, obj, fmt, ## args) obj 112 drivers/block/drbd/drbd_int.h #define drbd_alert(obj, fmt, args...) \ obj 113 drivers/block/drbd/drbd_int.h drbd_printk(KERN_ALERT, obj, fmt, ## args) obj 114 drivers/block/drbd/drbd_int.h #define drbd_err(obj, fmt, args...) \ obj 115 drivers/block/drbd/drbd_int.h drbd_printk(KERN_ERR, obj, fmt, ## args) obj 116 drivers/block/drbd/drbd_int.h #define drbd_warn(obj, fmt, args...) \ obj 117 drivers/block/drbd/drbd_int.h drbd_printk(KERN_WARNING, obj, fmt, ## args) obj 118 drivers/block/drbd/drbd_int.h #define drbd_info(obj, fmt, args...) \ obj 119 drivers/block/drbd/drbd_int.h drbd_printk(KERN_INFO, obj, fmt, ## args) obj 120 drivers/block/drbd/drbd_int.h #define drbd_emerg(obj, fmt, args...) \ obj 121 drivers/block/drbd/drbd_int.h drbd_printk(KERN_EMERG, obj, fmt, ## args) obj 919 drivers/bluetooth/hci_bcm.c const union acpi_object *obj; obj 927 drivers/bluetooth/hci_bcm.c if (!acpi_dev_get_property(adev, "baud", ACPI_TYPE_BUFFER, &obj) && obj 928 drivers/bluetooth/hci_bcm.c obj->buffer.length == 8) obj 929 drivers/bluetooth/hci_bcm.c dev->init_speed = *(u64 *)obj->buffer.pointer; obj 475 drivers/char/agp/hp-agp.c zx1_gart_probe (acpi_handle obj, u32 depth, void *context, void **ret) obj 483 drivers/char/agp/hp-agp.c status = hp_acpi_csr_space(obj, &lba_hpa, &length); obj 488 drivers/char/agp/hp-agp.c handle = obj; obj 315 drivers/char/tpm/tpm_crb.c union acpi_object *obj; obj 318 drivers/char/tpm/tpm_crb.c obj = acpi_evaluate_dsm(chip->acpi_dev_handle, obj 323 drivers/char/tpm/tpm_crb.c if (!obj) obj 325 drivers/char/tpm/tpm_crb.c rc = obj->integer.value == 0 ? 0 : -ENXIO; obj 326 drivers/char/tpm/tpm_crb.c ACPI_FREE(obj); obj 62 drivers/char/tpm/tpm_ppi.c union acpi_object *obj; obj 70 drivers/char/tpm/tpm_ppi.c obj = tpm_eval_dsm(chip->acpi_dev_handle, TPM_PPI_FN_GETREQ, obj 72 drivers/char/tpm/tpm_ppi.c if (!obj) obj 81 drivers/char/tpm/tpm_ppi.c if (obj->package.count == 3 && obj 82 drivers/char/tpm/tpm_ppi.c obj->package.elements[0].type == ACPI_TYPE_INTEGER && obj 83 drivers/char/tpm/tpm_ppi.c obj->package.elements[1].type == ACPI_TYPE_INTEGER && obj 84 drivers/char/tpm/tpm_ppi.c obj->package.elements[2].type == ACPI_TYPE_INTEGER) { obj 85 drivers/char/tpm/tpm_ppi.c if (obj->package.elements[0].integer.value) obj 88 drivers/char/tpm/tpm_ppi.c req = obj->package.elements[1].integer.value; obj 92 drivers/char/tpm/tpm_ppi.c obj->package.elements[2].integer.value); obj 97 drivers/char/tpm/tpm_ppi.c } else if (obj->package.count == 2 && obj 98 drivers/char/tpm/tpm_ppi.c obj->package.elements[0].type == ACPI_TYPE_INTEGER && obj 99 drivers/char/tpm/tpm_ppi.c obj->package.elements[1].type == ACPI_TYPE_INTEGER) { obj 100 drivers/char/tpm/tpm_ppi.c if (obj->package.elements[0].integer.value) obj 104 drivers/char/tpm/tpm_ppi.c obj->package.elements[1].integer.value); obj 107 drivers/char/tpm/tpm_ppi.c ACPI_FREE(obj); obj 119 drivers/char/tpm/tpm_ppi.c union acpi_object *obj, tmp[2]; obj 160 drivers/char/tpm/tpm_ppi.c obj = tpm_eval_dsm(chip->acpi_dev_handle, func, ACPI_TYPE_INTEGER, obj 162 drivers/char/tpm/tpm_ppi.c if (!obj) { obj 165 drivers/char/tpm/tpm_ppi.c ret = obj->integer.value; obj 166 drivers/char/tpm/tpm_ppi.c ACPI_FREE(obj); obj 181 drivers/char/tpm/tpm_ppi.c union acpi_object *obj = NULL; obj 203 drivers/char/tpm/tpm_ppi.c obj = &tmp; obj 204 drivers/char/tpm/tpm_ppi.c obj = tpm_eval_dsm(chip->acpi_dev_handle, TPM_PPI_FN_GETACT, obj 205 drivers/char/tpm/tpm_ppi.c ACPI_TYPE_INTEGER, obj, TPM_PPI_REVISION_ID_1); obj 206 drivers/char/tpm/tpm_ppi.c if (!obj) { obj 209 drivers/char/tpm/tpm_ppi.c ret = obj->integer.value; obj 210 drivers/char/tpm/tpm_ppi.c ACPI_FREE(obj); obj 226 drivers/char/tpm/tpm_ppi.c union acpi_object *obj, *ret_obj; obj 230 drivers/char/tpm/tpm_ppi.c obj = tpm_eval_dsm(chip->acpi_dev_handle, TPM_PPI_FN_GETRSP, obj 232 drivers/char/tpm/tpm_ppi.c if (!obj) obj 242 drivers/char/tpm/tpm_ppi.c ret_obj = obj->package.elements; obj 243 drivers/char/tpm/tpm_ppi.c if (obj->package.count < 3 || obj 278 drivers/char/tpm/tpm_ppi.c ACPI_FREE(obj); obj 288 drivers/char/tpm/tpm_ppi.c union acpi_object *obj, tmp; obj 306 drivers/char/tpm/tpm_ppi.c obj = tpm_eval_dsm(dev_handle, TPM_PPI_FN_GETOPR, obj 309 drivers/char/tpm/tpm_ppi.c if (!obj) { obj 312 drivers/char/tpm/tpm_ppi.c ret = obj->integer.value; obj 313 drivers/char/tpm/tpm_ppi.c ACPI_FREE(obj); obj 368 drivers/char/tpm/tpm_ppi.c union acpi_object *obj; obj 378 drivers/char/tpm/tpm_ppi.c obj = acpi_evaluate_dsm_typed(chip->acpi_dev_handle, &tpm_ppi_guid, obj 382 drivers/char/tpm/tpm_ppi.c if (obj) { obj 383 drivers/char/tpm/tpm_ppi.c strlcpy(chip->ppi_version, obj->string.pointer, obj 385 drivers/char/tpm/tpm_ppi.c ACPI_FREE(obj); obj 41 drivers/clk/bcm/clk-kona.h #define FLAG_SET(obj, type, flag) ((obj)->flags |= FLAG(type, flag)) obj 42 drivers/clk/bcm/clk-kona.h #define FLAG_CLEAR(obj, type, flag) ((obj)->flags &= ~(FLAG(type, flag))) obj 43 drivers/clk/bcm/clk-kona.h #define FLAG_FLIP(obj, type, flag) ((obj)->flags ^= FLAG(type, flag)) obj 44 drivers/clk/bcm/clk-kona.h #define FLAG_TEST(obj, type, flag) (!!((obj)->flags & FLAG(type, flag))) obj 57 drivers/clk/hisilicon/clk-hi6220-stub.c unsigned char obj; obj 86 drivers/clk/hisilicon/clk-hi6220-stub.c data.msg.obj = HI6220_MBOX_OBJ_AP; obj 121 drivers/crypto/caam/caamalg_qi2.c static inline void qi_cache_free(void *obj) obj 123 drivers/crypto/caam/caamalg_qi2.c kmem_cache_free(qi_cache, obj); obj 467 drivers/crypto/caam/qi.c void qi_cache_free(void *obj) obj 469 drivers/crypto/caam/qi.c kmem_cache_free(qi_cache, obj); obj 176 drivers/crypto/caam/qi.h void qi_cache_free(void *obj); obj 102 drivers/dma-buf/dma-resv.c void dma_resv_init(struct dma_resv *obj) obj 104 drivers/dma-buf/dma-resv.c ww_mutex_init(&obj->lock, &reservation_ww_class); obj 106 drivers/dma-buf/dma-resv.c __seqcount_init(&obj->seq, reservation_seqcount_string, obj 108 drivers/dma-buf/dma-resv.c RCU_INIT_POINTER(obj->fence, NULL); obj 109 drivers/dma-buf/dma-resv.c RCU_INIT_POINTER(obj->fence_excl, NULL); obj 117 drivers/dma-buf/dma-resv.c void dma_resv_fini(struct dma_resv *obj) obj 126 drivers/dma-buf/dma-resv.c excl = rcu_dereference_protected(obj->fence_excl, 1); obj 130 drivers/dma-buf/dma-resv.c fobj = rcu_dereference_protected(obj->fence, 1); obj 132 drivers/dma-buf/dma-resv.c ww_mutex_destroy(&obj->lock); obj 148 drivers/dma-buf/dma-resv.c int dma_resv_reserve_shared(struct dma_resv *obj, unsigned int num_fences) obj 153 drivers/dma-buf/dma-resv.c dma_resv_assert_held(obj); obj 155 drivers/dma-buf/dma-resv.c old = dma_resv_get_list(obj); obj 181 drivers/dma-buf/dma-resv.c dma_resv_held(obj)); obj 197 drivers/dma-buf/dma-resv.c rcu_assign_pointer(obj->fence, new); obj 207 drivers/dma-buf/dma-resv.c dma_resv_held(obj)); obj 224 drivers/dma-buf/dma-resv.c void dma_resv_add_shared_fence(struct dma_resv *obj, struct dma_fence *fence) obj 232 drivers/dma-buf/dma-resv.c dma_resv_assert_held(obj); obj 234 drivers/dma-buf/dma-resv.c fobj = dma_resv_get_list(obj); obj 238 drivers/dma-buf/dma-resv.c write_seqcount_begin(&obj->seq); obj 243 drivers/dma-buf/dma-resv.c dma_resv_held(obj)); obj 258 drivers/dma-buf/dma-resv.c write_seqcount_end(&obj->seq); obj 271 drivers/dma-buf/dma-resv.c void dma_resv_add_excl_fence(struct dma_resv *obj, struct dma_fence *fence) obj 273 drivers/dma-buf/dma-resv.c struct dma_fence *old_fence = dma_resv_get_excl(obj); obj 277 drivers/dma-buf/dma-resv.c dma_resv_assert_held(obj); obj 279 drivers/dma-buf/dma-resv.c old = dma_resv_get_list(obj); obj 287 drivers/dma-buf/dma-resv.c write_seqcount_begin(&obj->seq); obj 289 drivers/dma-buf/dma-resv.c RCU_INIT_POINTER(obj->fence_excl, fence); obj 292 drivers/dma-buf/dma-resv.c write_seqcount_end(&obj->seq); obj 298 drivers/dma-buf/dma-resv.c dma_resv_held(obj))); obj 399 drivers/dma-buf/dma-resv.c int dma_resv_get_fences_rcu(struct dma_resv *obj, obj 417 drivers/dma-buf/dma-resv.c seq = read_seqcount_begin(&obj->seq); obj 419 drivers/dma-buf/dma-resv.c fence_excl = rcu_dereference(obj->fence_excl); obj 423 drivers/dma-buf/dma-resv.c fobj = rcu_dereference(obj->fence); obj 459 drivers/dma-buf/dma-resv.c if (i != shared_count || read_seqcount_retry(&obj->seq, seq)) { obj 499 drivers/dma-buf/dma-resv.c long dma_resv_wait_timeout_rcu(struct dma_resv *obj, obj 510 drivers/dma-buf/dma-resv.c seq = read_seqcount_begin(&obj->seq); obj 514 drivers/dma-buf/dma-resv.c fence = rcu_dereference(obj->fence_excl); obj 529 drivers/dma-buf/dma-resv.c struct dma_resv_list *fobj = rcu_dereference(obj->fence); obj 556 drivers/dma-buf/dma-resv.c if (read_seqcount_retry(&obj->seq, seq)) { obj 601 drivers/dma-buf/dma-resv.c bool dma_resv_test_signaled_rcu(struct dma_resv *obj, bool test_all) obj 610 drivers/dma-buf/dma-resv.c seq = read_seqcount_begin(&obj->seq); obj 615 drivers/dma-buf/dma-resv.c struct dma_resv_list *fobj = rcu_dereference(obj->fence); obj 630 drivers/dma-buf/dma-resv.c if (read_seqcount_retry(&obj->seq, seq)) obj 635 drivers/dma-buf/dma-resv.c struct dma_fence *fence_excl = rcu_dereference(obj->fence_excl); obj 642 drivers/dma-buf/dma-resv.c if (read_seqcount_retry(&obj->seq, seq)) obj 80 drivers/dma-buf/sw_sync.c struct sync_timeline *obj; obj 82 drivers/dma-buf/sw_sync.c obj = kzalloc(sizeof(*obj), GFP_KERNEL); obj 83 drivers/dma-buf/sw_sync.c if (!obj) obj 86 drivers/dma-buf/sw_sync.c kref_init(&obj->kref); obj 87 drivers/dma-buf/sw_sync.c obj->context = dma_fence_context_alloc(1); obj 88 drivers/dma-buf/sw_sync.c strlcpy(obj->name, name, sizeof(obj->name)); obj 90 drivers/dma-buf/sw_sync.c obj->pt_tree = RB_ROOT; obj 91 drivers/dma-buf/sw_sync.c INIT_LIST_HEAD(&obj->pt_list); obj 92 drivers/dma-buf/sw_sync.c spin_lock_init(&obj->lock); obj 94 drivers/dma-buf/sw_sync.c sync_timeline_debug_add(obj); obj 96 drivers/dma-buf/sw_sync.c return obj; obj 101 drivers/dma-buf/sw_sync.c struct sync_timeline *obj = obj 104 drivers/dma-buf/sw_sync.c sync_timeline_debug_remove(obj); obj 106 drivers/dma-buf/sw_sync.c kfree(obj); obj 109 drivers/dma-buf/sw_sync.c static void sync_timeline_get(struct sync_timeline *obj) obj 111 drivers/dma-buf/sw_sync.c kref_get(&obj->kref); obj 114 drivers/dma-buf/sw_sync.c static void sync_timeline_put(struct sync_timeline *obj) obj 116 drivers/dma-buf/sw_sync.c kref_put(&obj->kref, sync_timeline_free); obj 192 drivers/dma-buf/sw_sync.c static void sync_timeline_signal(struct sync_timeline *obj, unsigned int inc) obj 196 drivers/dma-buf/sw_sync.c trace_sync_timeline(obj); obj 198 drivers/dma-buf/sw_sync.c spin_lock_irq(&obj->lock); obj 200 drivers/dma-buf/sw_sync.c obj->value += inc; obj 202 drivers/dma-buf/sw_sync.c list_for_each_entry_safe(pt, next, &obj->pt_list, link) { obj 207 drivers/dma-buf/sw_sync.c rb_erase(&pt->node, &obj->pt_tree); obj 220 drivers/dma-buf/sw_sync.c spin_unlock_irq(&obj->lock); obj 233 drivers/dma-buf/sw_sync.c static struct sync_pt *sync_pt_create(struct sync_timeline *obj, obj 242 drivers/dma-buf/sw_sync.c sync_timeline_get(obj); obj 243 drivers/dma-buf/sw_sync.c dma_fence_init(&pt->base, &timeline_fence_ops, &obj->lock, obj 244 drivers/dma-buf/sw_sync.c obj->context, value); obj 247 drivers/dma-buf/sw_sync.c spin_lock_irq(&obj->lock); obj 249 drivers/dma-buf/sw_sync.c struct rb_node **p = &obj->pt_tree.rb_node; obj 265 drivers/dma-buf/sw_sync.c sync_timeline_put(obj); obj 274 drivers/dma-buf/sw_sync.c rb_insert_color(&pt->node, &obj->pt_tree); obj 278 drivers/dma-buf/sw_sync.c parent ? &rb_entry(parent, typeof(*pt), node)->link : &obj->pt_list); obj 281 drivers/dma-buf/sw_sync.c spin_unlock_irq(&obj->lock); obj 295 drivers/dma-buf/sw_sync.c struct sync_timeline *obj; obj 300 drivers/dma-buf/sw_sync.c obj = sync_timeline_create(task_comm); obj 301 drivers/dma-buf/sw_sync.c if (!obj) obj 304 drivers/dma-buf/sw_sync.c file->private_data = obj; obj 311 drivers/dma-buf/sw_sync.c struct sync_timeline *obj = file->private_data; obj 314 drivers/dma-buf/sw_sync.c spin_lock_irq(&obj->lock); obj 316 drivers/dma-buf/sw_sync.c list_for_each_entry_safe(pt, next, &obj->pt_list, link) { obj 321 drivers/dma-buf/sw_sync.c spin_unlock_irq(&obj->lock); obj 323 drivers/dma-buf/sw_sync.c sync_timeline_put(obj); obj 327 drivers/dma-buf/sw_sync.c static long sw_sync_ioctl_create_fence(struct sync_timeline *obj, obj 344 drivers/dma-buf/sw_sync.c pt = sync_pt_create(obj, data.value); obj 373 drivers/dma-buf/sw_sync.c static long sw_sync_ioctl_inc(struct sync_timeline *obj, unsigned long arg) obj 381 drivers/dma-buf/sw_sync.c sync_timeline_signal(obj, INT_MAX); obj 385 drivers/dma-buf/sw_sync.c sync_timeline_signal(obj, value); obj 393 drivers/dma-buf/sw_sync.c struct sync_timeline *obj = file->private_data; obj 397 drivers/dma-buf/sw_sync.c return sw_sync_ioctl_create_fence(obj, arg); obj 400 drivers/dma-buf/sw_sync.c return sw_sync_ioctl_inc(obj, arg); obj 18 drivers/dma-buf/sync_debug.c void sync_timeline_debug_add(struct sync_timeline *obj) obj 23 drivers/dma-buf/sync_debug.c list_add_tail(&obj->sync_timeline_list, &sync_timeline_list_head); obj 27 drivers/dma-buf/sync_debug.c void sync_timeline_debug_remove(struct sync_timeline *obj) obj 32 drivers/dma-buf/sync_debug.c list_del(&obj->sync_timeline_list); obj 107 drivers/dma-buf/sync_debug.c static void sync_print_obj(struct seq_file *s, struct sync_timeline *obj) obj 111 drivers/dma-buf/sync_debug.c seq_printf(s, "%s: %d\n", obj->name, obj->value); obj 113 drivers/dma-buf/sync_debug.c spin_lock_irq(&obj->lock); obj 114 drivers/dma-buf/sync_debug.c list_for_each(pos, &obj->pt_list) { obj 118 drivers/dma-buf/sync_debug.c spin_unlock_irq(&obj->lock); obj 149 drivers/dma-buf/sync_debug.c struct sync_timeline *obj = obj 153 drivers/dma-buf/sync_debug.c sync_print_obj(s, obj); obj 67 drivers/dma-buf/sync_debug.h void sync_timeline_debug_add(struct sync_timeline *obj); obj 68 drivers/dma-buf/sync_debug.h void sync_timeline_debug_remove(struct sync_timeline *obj); obj 102 drivers/firmware/edd.c #define to_edd_device(obj) container_of(obj,struct edd_device,kobj) obj 58 drivers/firmware/efi/efivars.c #define to_efivar_entry(obj) container_of(obj, struct efivar_entry, kobj) obj 1250 drivers/gpio/gpiolib-acpi.c static int acpi_gpio_package_count(const union acpi_object *obj) obj 1252 drivers/gpio/gpiolib-acpi.c const union acpi_object *element = obj->package.elements; obj 1253 drivers/gpio/gpiolib-acpi.c const union acpi_object *end = element + obj->package.count; obj 1296 drivers/gpio/gpiolib-acpi.c const union acpi_object *obj; obj 1313 drivers/gpio/gpiolib-acpi.c &obj); obj 1315 drivers/gpio/gpiolib-acpi.c if (obj->type == ACPI_TYPE_LOCAL_REFERENCE) obj 1317 drivers/gpio/gpiolib-acpi.c else if (obj->type == ACPI_TYPE_PACKAGE) obj 1318 drivers/gpio/gpiolib-acpi.c count = acpi_gpio_package_count(obj); obj 510 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c struct drm_gem_object *obj; obj 523 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c obj = dma_buf->priv; obj 524 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c if (obj->dev->driver != adev->ddev->driver) obj 528 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c adev = obj->dev->dev_private; obj 529 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c bo = gem_to_amdgpu_bo(obj); obj 1598 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c struct drm_gem_object *obj; obj 1606 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c obj = dma_buf->priv; obj 1607 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c if (obj->dev->dev_private != adev) obj 1611 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c bo = gem_to_amdgpu_bo(obj); obj 243 drivers/gpu/drm/amd/amdgpu/amdgpu_bios.c union acpi_object atrm_arg_elements[2], *obj; obj 262 drivers/gpu/drm/amd/amdgpu/amdgpu_bios.c obj = (union acpi_object *)buffer.pointer; obj 263 drivers/gpu/drm/amd/amdgpu/amdgpu_bios.c memcpy(bios+offset, obj->buffer.pointer, obj->buffer.length); obj 264 drivers/gpu/drm/amd/amdgpu/amdgpu_bios.c len = obj->buffer.length; obj 3058 drivers/gpu/drm/amd/amdgpu/amdgpu_device.c if (fb == NULL || fb->obj[0] == NULL) { obj 3061 drivers/gpu/drm/amd/amdgpu/amdgpu_device.c robj = gem_to_amdgpu_bo(fb->obj[0]); obj 157 drivers/gpu/drm/amd/amdgpu/amdgpu_display.c struct drm_gem_object *obj; obj 177 drivers/gpu/drm/amd/amdgpu/amdgpu_display.c obj = crtc->primary->fb->obj[0]; obj 180 drivers/gpu/drm/amd/amdgpu/amdgpu_display.c work->old_abo = gem_to_amdgpu_bo(obj); obj 183 drivers/gpu/drm/amd/amdgpu/amdgpu_display.c obj = fb->obj[0]; obj 184 drivers/gpu/drm/amd/amdgpu/amdgpu_display.c new_abo = gem_to_amdgpu_bo(obj); obj 528 drivers/gpu/drm/amd/amdgpu/amdgpu_display.c struct drm_gem_object *obj) obj 531 drivers/gpu/drm/amd/amdgpu/amdgpu_display.c rfb->base.obj[0] = obj; obj 535 drivers/gpu/drm/amd/amdgpu/amdgpu_display.c rfb->base.obj[0] = NULL; obj 546 drivers/gpu/drm/amd/amdgpu/amdgpu_display.c struct drm_gem_object *obj; obj 550 drivers/gpu/drm/amd/amdgpu/amdgpu_display.c obj = drm_gem_object_lookup(file_priv, mode_cmd->handles[0]); obj 551 drivers/gpu/drm/amd/amdgpu/amdgpu_display.c if (obj == NULL) { obj 558 drivers/gpu/drm/amd/amdgpu/amdgpu_display.c if (obj->import_attach) { obj 565 drivers/gpu/drm/amd/amdgpu/amdgpu_display.c drm_gem_object_put_unlocked(obj); obj 569 drivers/gpu/drm/amd/amdgpu/amdgpu_display.c ret = amdgpu_display_framebuffer_init(dev, amdgpu_fb, mode_cmd, obj); obj 572 drivers/gpu/drm/amd/amdgpu/amdgpu_display.c drm_gem_object_put_unlocked(obj); obj 49 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c struct sg_table *amdgpu_gem_prime_get_sg_table(struct drm_gem_object *obj) obj 51 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c struct amdgpu_bo *bo = gem_to_amdgpu_bo(obj); obj 66 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c void *amdgpu_gem_prime_vmap(struct drm_gem_object *obj) obj 68 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c struct amdgpu_bo *bo = gem_to_amdgpu_bo(obj); obj 86 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c void amdgpu_gem_prime_vunmap(struct drm_gem_object *obj, void *vaddr) obj 88 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c struct amdgpu_bo *bo = gem_to_amdgpu_bo(obj); obj 104 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c int amdgpu_gem_prime_mmap(struct drm_gem_object *obj, obj 107 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c struct amdgpu_bo *bo = gem_to_amdgpu_bo(obj); obj 129 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c ret = drm_vma_node_allow(&obj->vma_node, vma->vm_file->private_data); obj 134 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c drm_vma_node_revoke(&obj->vma_node, vma->vm_file->private_data); obj 140 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c __dma_resv_make_exclusive(struct dma_resv *obj) obj 146 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c if (!dma_resv_get_list(obj)) /* no shared fences to convert */ obj 149 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c r = dma_resv_get_fences_rcu(obj, NULL, &count, &fences); obj 156 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c dma_resv_add_excl_fence(obj, fences[0]); obj 168 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c dma_resv_add_excl_fence(obj, &array->base); obj 196 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c struct drm_gem_object *obj = dma_buf->priv; obj 197 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c struct amdgpu_bo *bo = gem_to_amdgpu_bo(obj); obj 252 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c struct drm_gem_object *obj = dma_buf->priv; obj 253 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c struct amdgpu_bo *bo = gem_to_amdgpu_bo(obj); obj 417 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c struct drm_gem_object *obj; obj 420 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c obj = dma_buf->priv; obj 421 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c if (obj->dev == dev) { obj 426 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c drm_gem_object_get(obj); obj 427 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c return obj; obj 28 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.h struct sg_table *amdgpu_gem_prime_get_sg_table(struct drm_gem_object *obj); obj 37 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.h void *amdgpu_gem_prime_vmap(struct drm_gem_object *obj); obj 38 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.h void amdgpu_gem_prime_vunmap(struct drm_gem_object *obj, void *vaddr); obj 39 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.h int amdgpu_gem_prime_mmap(struct drm_gem_object *obj, obj 295 drivers/gpu/drm/amd/amdgpu/amdgpu_fb.c if (rfb->base.obj[0]) { obj 296 drivers/gpu/drm/amd/amdgpu/amdgpu_fb.c amdgpufb_destroy_pinned_object(rfb->base.obj[0]); obj 297 drivers/gpu/drm/amd/amdgpu/amdgpu_fb.c rfb->base.obj[0] = NULL; obj 380 drivers/gpu/drm/amd/amdgpu/amdgpu_fb.c robj = gem_to_amdgpu_bo(adev->mode_info.rfbdev->rfb.base.obj[0]); obj 389 drivers/gpu/drm/amd/amdgpu/amdgpu_fb.c if (robj == gem_to_amdgpu_bo(adev->mode_info.rfbdev->rfb.base.obj[0])) obj 54 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c struct drm_gem_object **obj) obj 61 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c *obj = NULL; obj 88 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c *obj = &bo->tbo.base; obj 121 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c int amdgpu_gem_object_open(struct drm_gem_object *obj, obj 124 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c struct amdgpu_bo *abo = gem_to_amdgpu_bo(obj); obj 154 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c void amdgpu_gem_object_close(struct drm_gem_object *obj, obj 157 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c struct amdgpu_bo *bo = gem_to_amdgpu_bo(obj); obj 36 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.h void amdgpu_gem_object_free(struct drm_gem_object *obj); obj 37 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.h int amdgpu_gem_object_open(struct drm_gem_object *obj, obj 39 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.h void amdgpu_gem_object_close(struct drm_gem_object *obj, obj 51 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.h struct drm_gem_object **obj); obj 602 drivers/gpu/drm/amd/amdgpu/amdgpu_mode.h struct drm_gem_object *obj); obj 71 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c struct ras_manager *obj = (struct ras_manager *)file_inode(f)->i_private; obj 73 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c .head = obj->head, obj 78 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c if (amdgpu_ras_error_query(obj->adev, &info)) obj 297 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c struct ras_manager *obj = container_of(attr, struct ras_manager, sysfs_attr); obj 299 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c .head = obj->head, obj 302 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c if (amdgpu_ras_error_query(obj->adev, &info)) obj 312 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c #define get_obj(obj) do { (obj)->use++; } while (0) obj 313 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c #define alive_obj(obj) ((obj)->use) obj 315 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c static inline void put_obj(struct ras_manager *obj) obj 317 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c if (obj && --obj->use == 0) obj 318 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c list_del(&obj->node); obj 319 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c if (obj && obj->use < 0) { obj 320 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c DRM_ERROR("RAS ERROR: Unbalance obj(%s) use\n", obj->head.name); obj 329 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c struct ras_manager *obj; obj 337 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c obj = &con->objs[head->block]; obj 339 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c if (alive_obj(obj)) obj 342 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c obj->head = *head; obj 343 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c obj->adev = adev; obj 344 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c list_add(&obj->node, &con->head); obj 345 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c get_obj(obj); obj 347 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c return obj; obj 355 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c struct ras_manager *obj; obj 365 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c obj = &con->objs[head->block]; obj 367 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c if (alive_obj(obj)) { obj 368 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c WARN_ON(head->block != obj->head.block); obj 369 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c return obj; obj 373 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c obj = &con->objs[i]; obj 374 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c if (alive_obj(obj)) { obj 375 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c WARN_ON(i != obj->head.block); obj 376 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c return obj; obj 410 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c struct ras_manager *obj = amdgpu_ras_find_obj(adev, head); obj 424 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c if (!obj) { obj 425 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c obj = amdgpu_ras_create_obj(adev, head); obj 426 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c if (!obj) obj 430 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c get_obj(obj); obj 434 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c if (obj && amdgpu_ras_is_feature_enabled(adev, head)) { obj 436 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c put_obj(obj); obj 536 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c struct ras_manager *obj, *tmp; obj 538 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c list_for_each_entry_safe(obj, tmp, &con->head, node) { obj 543 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c if (__amdgpu_ras_feature_enable(adev, &obj->head, 0)) obj 546 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c if (amdgpu_ras_feature_enable(adev, &obj->head, 0)) obj 591 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c struct ras_manager *obj = amdgpu_ras_find_obj(adev, &info->head); obj 594 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c if (!obj) obj 619 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c obj->err_data.ue_count += err_data.ue_count; obj 620 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c obj->err_data.ce_count += err_data.ce_count; obj 622 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c info->ue_count = obj->err_data.ue_count; obj 623 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c info->ce_count = obj->err_data.ce_count; obj 627 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c obj->err_data.ce_count, ras_block_str(info->head.block)); obj 630 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c obj->err_data.ue_count, ras_block_str(info->head.block)); obj 639 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c struct ras_manager *obj = amdgpu_ras_find_obj(adev, &info->head); obj 649 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c if (!obj) obj 689 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c struct ras_manager *obj; obj 695 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c list_for_each_entry(obj, &con->head, node) { obj 697 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c .head = obj->head, obj 860 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c struct ras_manager *obj = amdgpu_ras_find_obj(adev, &head->head); obj 862 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c if (!obj || obj->attr_inuse) obj 865 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c get_obj(obj); obj 867 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c memcpy(obj->fs_data.sysfs_name, obj 869 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c sizeof(obj->fs_data.sysfs_name)); obj 871 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c obj->sysfs_attr = (struct device_attribute){ obj 873 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c .name = obj->fs_data.sysfs_name, obj 878 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c sysfs_attr_init(&obj->sysfs_attr.attr); obj 881 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c &obj->sysfs_attr.attr, obj 883 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c put_obj(obj); obj 887 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c obj->attr_inuse = 1; obj 895 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c struct ras_manager *obj = amdgpu_ras_find_obj(adev, head); obj 897 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c if (!obj || !obj->attr_inuse) obj 901 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c &obj->sysfs_attr.attr, obj 903 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c obj->attr_inuse = 0; obj 904 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c put_obj(obj); obj 912 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c struct ras_manager *obj, *tmp; obj 914 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c list_for_each_entry_safe(obj, tmp, &con->head, node) { obj 915 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c amdgpu_ras_sysfs_remove(adev, &obj->head); obj 939 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c struct ras_manager *obj = amdgpu_ras_find_obj(adev, &head->head); obj 941 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c if (!obj || obj->ent) obj 944 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c get_obj(obj); obj 946 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c memcpy(obj->fs_data.debugfs_name, obj 948 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c sizeof(obj->fs_data.debugfs_name)); obj 950 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c obj->ent = debugfs_create_file(obj->fs_data.debugfs_name, obj 951 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c S_IWUGO | S_IRUGO, con->dir, obj, obj 958 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c struct ras_manager *obj = amdgpu_ras_find_obj(adev, head); obj 960 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c if (!obj || !obj->ent) obj 963 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c debugfs_remove(obj->ent); obj 964 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c obj->ent = NULL; obj 965 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c put_obj(obj); obj 971 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c struct ras_manager *obj, *tmp; obj 973 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c list_for_each_entry_safe(obj, tmp, &con->head, node) { obj 974 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c amdgpu_ras_debugfs_remove(adev, &obj->head); obj 1003 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c static void amdgpu_ras_interrupt_handler(struct ras_manager *obj) obj 1005 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c struct ras_ih_data *data = &obj->ih_data; obj 1023 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c ret = data->cb(obj->adev, &err_data, &entry); obj 1033 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c obj->err_data.ue_count += err_data.ue_count; obj 1034 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c obj->err_data.ce_count += err_data.ce_count; obj 1044 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c struct ras_manager *obj = obj 1047 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c amdgpu_ras_interrupt_handler(obj); obj 1053 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c struct ras_manager *obj = amdgpu_ras_find_obj(adev, &info->head); obj 1054 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c struct ras_ih_data *data = &obj->ih_data; obj 1056 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c if (!obj) obj 1078 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c struct ras_manager *obj = amdgpu_ras_find_obj(adev, &info->head); obj 1081 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c if (!obj) obj 1084 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c data = &obj->ih_data; obj 1092 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c put_obj(obj); obj 1100 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c struct ras_manager *obj = amdgpu_ras_find_obj(adev, &info->head); obj 1103 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c if (!obj) { obj 1105 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c obj = amdgpu_ras_create_obj(adev, &info->head); obj 1106 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c if (!obj) obj 1109 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c get_obj(obj); obj 1111 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c data = &obj->ih_data; obj 1128 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c put_obj(obj); obj 1141 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c struct ras_manager *obj, *tmp; obj 1143 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c list_for_each_entry_safe(obj, tmp, &con->head, node) { obj 1145 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c .head = obj->head, obj 1488 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c struct ras_manager *obj, *tmp; obj 1505 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c list_for_each_entry_safe(obj, tmp, &con->head, node) { obj 1506 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c if (!amdgpu_ras_is_supported(adev, obj->head.block)) { obj 1507 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c amdgpu_ras_feature_enable(adev, &obj->head, 0); obj 1509 drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c WARN_ON(alive_obj(obj)); obj 310 drivers/gpu/drm/amd/amdgpu/amdgpu_virt.c int amdgpu_virt_fw_reserve_get_checksum(void *obj, obj 319 drivers/gpu/drm/amd/amdgpu/amdgpu_virt.c pos = (char *)obj; obj 305 drivers/gpu/drm/amd/amdgpu/amdgpu_virt.h int amdgpu_virt_fw_reserve_get_checksum(void *obj, unsigned long obj_size, obj 1837 drivers/gpu/drm/amd/amdgpu/dce_v10_0.c struct drm_gem_object *obj; obj 1862 drivers/gpu/drm/amd/amdgpu/dce_v10_0.c obj = target_fb->obj[0]; obj 1863 drivers/gpu/drm/amd/amdgpu/dce_v10_0.c abo = gem_to_amdgpu_bo(obj); obj 2062 drivers/gpu/drm/amd/amdgpu/dce_v10_0.c abo = gem_to_amdgpu_bo(fb->obj[0]); obj 2363 drivers/gpu/drm/amd/amdgpu/dce_v10_0.c struct drm_gem_object *obj; obj 2370 drivers/gpu/drm/amd/amdgpu/dce_v10_0.c obj = NULL; obj 2380 drivers/gpu/drm/amd/amdgpu/dce_v10_0.c obj = drm_gem_object_lookup(file_priv, handle); obj 2381 drivers/gpu/drm/amd/amdgpu/dce_v10_0.c if (!obj) { obj 2386 drivers/gpu/drm/amd/amdgpu/dce_v10_0.c aobj = gem_to_amdgpu_bo(obj); obj 2389 drivers/gpu/drm/amd/amdgpu/dce_v10_0.c drm_gem_object_put_unlocked(obj); obj 2397 drivers/gpu/drm/amd/amdgpu/dce_v10_0.c drm_gem_object_put_unlocked(obj); obj 2435 drivers/gpu/drm/amd/amdgpu/dce_v10_0.c amdgpu_crtc->cursor_bo = obj; obj 2547 drivers/gpu/drm/amd/amdgpu/dce_v10_0.c abo = gem_to_amdgpu_bo(crtc->primary->fb->obj[0]); obj 1879 drivers/gpu/drm/amd/amdgpu/dce_v11_0.c struct drm_gem_object *obj; obj 1904 drivers/gpu/drm/amd/amdgpu/dce_v11_0.c obj = target_fb->obj[0]; obj 1905 drivers/gpu/drm/amd/amdgpu/dce_v11_0.c abo = gem_to_amdgpu_bo(obj); obj 2104 drivers/gpu/drm/amd/amdgpu/dce_v11_0.c abo = gem_to_amdgpu_bo(fb->obj[0]); obj 2442 drivers/gpu/drm/amd/amdgpu/dce_v11_0.c struct drm_gem_object *obj; obj 2449 drivers/gpu/drm/amd/amdgpu/dce_v11_0.c obj = NULL; obj 2459 drivers/gpu/drm/amd/amdgpu/dce_v11_0.c obj = drm_gem_object_lookup(file_priv, handle); obj 2460 drivers/gpu/drm/amd/amdgpu/dce_v11_0.c if (!obj) { obj 2465 drivers/gpu/drm/amd/amdgpu/dce_v11_0.c aobj = gem_to_amdgpu_bo(obj); obj 2468 drivers/gpu/drm/amd/amdgpu/dce_v11_0.c drm_gem_object_put_unlocked(obj); obj 2476 drivers/gpu/drm/amd/amdgpu/dce_v11_0.c drm_gem_object_put_unlocked(obj); obj 2514 drivers/gpu/drm/amd/amdgpu/dce_v11_0.c amdgpu_crtc->cursor_bo = obj; obj 2626 drivers/gpu/drm/amd/amdgpu/dce_v11_0.c abo = gem_to_amdgpu_bo(crtc->primary->fb->obj[0]); obj 1795 drivers/gpu/drm/amd/amdgpu/dce_v6_0.c struct drm_gem_object *obj; obj 1819 drivers/gpu/drm/amd/amdgpu/dce_v6_0.c obj = target_fb->obj[0]; obj 1820 drivers/gpu/drm/amd/amdgpu/dce_v6_0.c abo = gem_to_amdgpu_bo(obj); obj 1997 drivers/gpu/drm/amd/amdgpu/dce_v6_0.c abo = gem_to_amdgpu_bo(fb->obj[0]); obj 2256 drivers/gpu/drm/amd/amdgpu/dce_v6_0.c struct drm_gem_object *obj; obj 2263 drivers/gpu/drm/amd/amdgpu/dce_v6_0.c obj = NULL; obj 2273 drivers/gpu/drm/amd/amdgpu/dce_v6_0.c obj = drm_gem_object_lookup(file_priv, handle); obj 2274 drivers/gpu/drm/amd/amdgpu/dce_v6_0.c if (!obj) { obj 2279 drivers/gpu/drm/amd/amdgpu/dce_v6_0.c aobj = gem_to_amdgpu_bo(obj); obj 2282 drivers/gpu/drm/amd/amdgpu/dce_v6_0.c drm_gem_object_put_unlocked(obj); obj 2290 drivers/gpu/drm/amd/amdgpu/dce_v6_0.c drm_gem_object_put_unlocked(obj); obj 2328 drivers/gpu/drm/amd/amdgpu/dce_v6_0.c amdgpu_crtc->cursor_bo = obj; obj 2435 drivers/gpu/drm/amd/amdgpu/dce_v6_0.c abo = gem_to_amdgpu_bo(crtc->primary->fb->obj[0]); obj 1766 drivers/gpu/drm/amd/amdgpu/dce_v8_0.c struct drm_gem_object *obj; obj 1791 drivers/gpu/drm/amd/amdgpu/dce_v8_0.c obj = target_fb->obj[0]; obj 1792 drivers/gpu/drm/amd/amdgpu/dce_v8_0.c abo = gem_to_amdgpu_bo(obj); obj 1971 drivers/gpu/drm/amd/amdgpu/dce_v8_0.c abo = gem_to_amdgpu_bo(fb->obj[0]); obj 2264 drivers/gpu/drm/amd/amdgpu/dce_v8_0.c struct drm_gem_object *obj; obj 2271 drivers/gpu/drm/amd/amdgpu/dce_v8_0.c obj = NULL; obj 2281 drivers/gpu/drm/amd/amdgpu/dce_v8_0.c obj = drm_gem_object_lookup(file_priv, handle); obj 2282 drivers/gpu/drm/amd/amdgpu/dce_v8_0.c if (!obj) { obj 2287 drivers/gpu/drm/amd/amdgpu/dce_v8_0.c aobj = gem_to_amdgpu_bo(obj); obj 2290 drivers/gpu/drm/amd/amdgpu/dce_v8_0.c drm_gem_object_put_unlocked(obj); obj 2298 drivers/gpu/drm/amd/amdgpu/dce_v8_0.c drm_gem_object_put_unlocked(obj); obj 2336 drivers/gpu/drm/amd/amdgpu/dce_v8_0.c amdgpu_crtc->cursor_bo = obj; obj 2448 drivers/gpu/drm/amd/amdgpu/dce_v8_0.c abo = gem_to_amdgpu_bo(crtc->primary->fb->obj[0]); obj 1935 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c struct drm_private_obj *obj; obj 1939 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c for_each_new_private_obj_in_state(state, obj, new_obj_state, i) { obj 1940 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c if (obj->funcs == dm->atomic_obj.funcs) obj 1953 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c struct drm_private_obj *obj; obj 1957 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c for_each_old_private_obj_in_state(state, obj, old_obj_state, i) { obj 1958 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c if (obj->funcs == dm->atomic_obj.funcs) obj 1966 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c dm_atomic_duplicate_state(struct drm_private_obj *obj) obj 1974 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c __drm_atomic_helper_private_obj_duplicate_state(obj, &new_state->base); obj 1976 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c old_state = to_dm_atomic_state(obj->state); obj 1989 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c static void dm_atomic_destroy_state(struct drm_private_obj *obj, obj 2672 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c struct amdgpu_bo *rbo = gem_to_amdgpu_bo(amdgpu_fb->base.obj[0]); obj 4490 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c struct drm_gem_object *obj; obj 4511 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c obj = new_state->fb->obj[0]; obj 4512 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c rbo = gem_to_amdgpu_bo(obj); obj 4580 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c rbo = gem_to_amdgpu_bo(old_state->fb->obj[0]); obj 5759 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c abo = gem_to_amdgpu_bo(fb->obj[0]); obj 22 drivers/gpu/drm/arm/display/komeda/komeda_framebuffer.c drm_gem_object_put_unlocked(fb->obj[i]); obj 31 drivers/gpu/drm/arm/display/komeda/komeda_framebuffer.c return drm_gem_handle_create(file, fb->obj[0], handle); obj 45 drivers/gpu/drm/arm/display/komeda/komeda_framebuffer.c struct drm_gem_object *obj; obj 49 drivers/gpu/drm/arm/display/komeda/komeda_framebuffer.c obj = drm_gem_object_lookup(file, mode_cmd->handles[0]); obj 50 drivers/gpu/drm/arm/display/komeda/komeda_framebuffer.c if (!obj) { obj 96 drivers/gpu/drm/arm/display/komeda/komeda_framebuffer.c if (min_size > obj->size) { obj 98 drivers/gpu/drm/arm/display/komeda/komeda_framebuffer.c obj->size, min_size); obj 102 drivers/gpu/drm/arm/display/komeda/komeda_framebuffer.c fb->obj[0] = obj; obj 106 drivers/gpu/drm/arm/display/komeda/komeda_framebuffer.c drm_gem_object_put_unlocked(obj); obj 117 drivers/gpu/drm/arm/display/komeda/komeda_framebuffer.c struct drm_gem_object *obj; obj 125 drivers/gpu/drm/arm/display/komeda/komeda_framebuffer.c obj = drm_gem_object_lookup(file, mode_cmd->handles[i]); obj 126 drivers/gpu/drm/arm/display/komeda/komeda_framebuffer.c if (!obj) { obj 130 drivers/gpu/drm/arm/display/komeda/komeda_framebuffer.c fb->obj[i] = obj; obj 140 drivers/gpu/drm/arm/display/komeda/komeda_framebuffer.c - to_drm_gem_cma_obj(obj)->paddr; obj 141 drivers/gpu/drm/arm/display/komeda/komeda_framebuffer.c if (obj->size < min_size) { obj 143 drivers/gpu/drm/arm/display/komeda/komeda_framebuffer.c i, obj->size, min_size); obj 202 drivers/gpu/drm/arm/display/komeda/komeda_framebuffer.c drm_gem_object_put_unlocked(kfb->base.obj[i]); obj 242 drivers/gpu/drm/arm/display/komeda/komeda_framebuffer.c const struct drm_gem_cma_object *obj; obj 250 drivers/gpu/drm/arm/display/komeda/komeda_framebuffer.c obj = drm_fb_cma_get_gem_obj(fb, plane); obj 263 drivers/gpu/drm/arm/display/komeda/komeda_framebuffer.c return obj->paddr + offset; obj 80 drivers/gpu/drm/arm/display/komeda/komeda_pipeline.h struct drm_private_obj obj; obj 156 drivers/gpu/drm/arm/display/komeda/komeda_pipeline.h struct drm_private_state obj; obj 383 drivers/gpu/drm/arm/display/komeda/komeda_pipeline.h struct drm_private_obj obj; obj 434 drivers/gpu/drm/arm/display/komeda/komeda_pipeline.h struct drm_private_state obj; obj 463 drivers/gpu/drm/arm/display/komeda/komeda_pipeline.h #define priv_to_comp_st(o) container_of(o, struct komeda_component_state, obj) obj 464 drivers/gpu/drm/arm/display/komeda/komeda_pipeline.h #define priv_to_pipe_st(o) container_of(o, struct komeda_pipeline_state, obj) obj 29 drivers/gpu/drm/arm/display/komeda/komeda_pipeline_state.c priv_st = drm_atomic_get_private_obj_state(state, &pipe->obj); obj 42 drivers/gpu/drm/arm/display/komeda/komeda_pipeline_state.c priv_st = drm_atomic_get_old_private_obj_state(state, &pipe->obj); obj 54 drivers/gpu/drm/arm/display/komeda/komeda_pipeline_state.c priv_st = drm_atomic_get_new_private_obj_state(state, &pipe->obj); obj 104 drivers/gpu/drm/arm/display/komeda/komeda_pipeline_state.c WARN_ON(!drm_modeset_is_locked(&c->pipeline->obj.lock)); obj 106 drivers/gpu/drm/arm/display/komeda/komeda_pipeline_state.c priv_st = drm_atomic_get_private_obj_state(state, &c->obj); obj 119 drivers/gpu/drm/arm/display/komeda/komeda_pipeline_state.c priv_st = drm_atomic_get_old_private_obj_state(state, &c->obj); obj 1178 drivers/gpu/drm/arm/display/komeda/komeda_pipeline_state.c struct drm_atomic_state *drm_st = new->obj.state; obj 1179 drivers/gpu/drm/arm/display/komeda/komeda_pipeline_state.c struct komeda_pipeline_state *old = priv_to_pipe_st(pipe->obj.state); obj 1237 drivers/gpu/drm/arm/display/komeda/komeda_pipeline_state.c c_st = priv_to_comp_st(c->obj.state); obj 1256 drivers/gpu/drm/arm/display/komeda/komeda_pipeline_state.c struct komeda_pipeline_state *new = priv_to_pipe_st(pipe->obj.state); obj 1272 drivers/gpu/drm/arm/display/komeda/komeda_pipeline_state.c c->funcs->update(c, priv_to_comp_st(c->obj.state)); obj 20 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c komeda_layer_atomic_duplicate_state(struct drm_private_obj *obj) obj 24 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c st = kmemdup(obj->state, sizeof(*st), GFP_KERNEL); obj 29 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c __drm_atomic_helper_private_obj_duplicate_state(obj, &st->base.obj); obj 31 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c return &st->base.obj; obj 35 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c komeda_layer_atomic_destroy_state(struct drm_private_obj *obj, obj 58 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c drm_atomic_private_obj_init(&kms->base, &layer->base.obj, &st->base.obj, obj 64 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c komeda_scaler_atomic_duplicate_state(struct drm_private_obj *obj) obj 68 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c st = kmemdup(obj->state, sizeof(*st), GFP_KERNEL); obj 73 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c __drm_atomic_helper_private_obj_duplicate_state(obj, &st->base.obj); obj 75 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c return &st->base.obj; obj 79 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c komeda_scaler_atomic_destroy_state(struct drm_private_obj *obj, obj 101 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c &scaler->base.obj, &st->base.obj, obj 107 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c komeda_compiz_atomic_duplicate_state(struct drm_private_obj *obj) obj 111 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c st = kmemdup(obj->state, sizeof(*st), GFP_KERNEL); obj 116 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c __drm_atomic_helper_private_obj_duplicate_state(obj, &st->base.obj); obj 118 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c return &st->base.obj; obj 122 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c komeda_compiz_atomic_destroy_state(struct drm_private_obj *obj, obj 143 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c drm_atomic_private_obj_init(&kms->base, &compiz->base.obj, &st->base.obj, obj 150 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c komeda_splitter_atomic_duplicate_state(struct drm_private_obj *obj) obj 154 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c st = kmemdup(obj->state, sizeof(*st), GFP_KERNEL); obj 159 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c __drm_atomic_helper_private_obj_duplicate_state(obj, &st->base.obj); obj 161 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c return &st->base.obj; obj 165 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c komeda_splitter_atomic_destroy_state(struct drm_private_obj *obj, obj 187 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c &splitter->base.obj, &st->base.obj, obj 194 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c komeda_merger_atomic_duplicate_state(struct drm_private_obj *obj) obj 198 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c st = kmemdup(obj->state, sizeof(*st), GFP_KERNEL); obj 203 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c __drm_atomic_helper_private_obj_duplicate_state(obj, &st->base.obj); obj 205 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c return &st->base.obj; obj 208 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c static void komeda_merger_atomic_destroy_state(struct drm_private_obj *obj, obj 230 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c &merger->base.obj, &st->base.obj, obj 237 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c komeda_improc_atomic_duplicate_state(struct drm_private_obj *obj) obj 241 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c st = kmemdup(obj->state, sizeof(*st), GFP_KERNEL); obj 246 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c __drm_atomic_helper_private_obj_duplicate_state(obj, &st->base.obj); obj 248 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c return &st->base.obj; obj 252 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c komeda_improc_atomic_destroy_state(struct drm_private_obj *obj, obj 273 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c drm_atomic_private_obj_init(&kms->base, &improc->base.obj, &st->base.obj, obj 280 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c komeda_timing_ctrlr_atomic_duplicate_state(struct drm_private_obj *obj) obj 284 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c st = kmemdup(obj->state, sizeof(*st), GFP_KERNEL); obj 289 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c __drm_atomic_helper_private_obj_duplicate_state(obj, &st->base.obj); obj 291 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c return &st->base.obj; obj 295 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c komeda_timing_ctrlr_atomic_destroy_state(struct drm_private_obj *obj, obj 316 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c drm_atomic_private_obj_init(&kms->base, &ctrlr->base.obj, &st->base.obj, obj 323 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c komeda_pipeline_atomic_duplicate_state(struct drm_private_obj *obj) obj 327 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c st = kmemdup(obj->state, sizeof(*st), GFP_KERNEL); obj 333 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c __drm_atomic_helper_private_obj_duplicate_state(obj, &st->obj); obj 335 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c return &st->obj; obj 339 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c komeda_pipeline_atomic_destroy_state(struct drm_private_obj *obj, obj 360 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c drm_atomic_private_obj_init(&kms->base, &pipe->obj, &st->obj, obj 428 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c struct drm_private_obj *obj, *next; obj 430 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c list_for_each_entry_safe(obj, next, &config->privobj_list, head) obj 431 drivers/gpu/drm/arm/display/komeda/komeda_private_obj.c drm_atomic_private_obj_fini(obj); obj 164 drivers/gpu/drm/arm/malidp_mw.c struct drm_gem_cma_object *obj = drm_fb_cma_get_gem_obj(fb, i); obj 174 drivers/gpu/drm/arm/malidp_mw.c mw_state->addrs[i] = obj->paddr + fb->offsets[i]; obj 338 drivers/gpu/drm/arm/malidp_planes.c struct drm_gem_object *obj; obj 343 drivers/gpu/drm/arm/malidp_planes.c obj = drm_gem_fb_get_obj(ms->base.fb, i); obj 344 drivers/gpu/drm/arm/malidp_planes.c cma_obj = to_drm_gem_cma_obj(obj); obj 349 drivers/gpu/drm/arm/malidp_planes.c sgt = obj->dev->driver->gem_prime_get_sg_table(obj); obj 736 drivers/gpu/drm/arm/malidp_planes.c struct drm_gem_cma_object *obj; obj 738 drivers/gpu/drm/arm/malidp_planes.c obj = drm_fb_cma_get_gem_obj(fb, plane_index); obj 740 drivers/gpu/drm/arm/malidp_planes.c if (WARN_ON(!obj)) obj 742 drivers/gpu/drm/arm/malidp_planes.c paddr = obj->paddr; obj 667 drivers/gpu/drm/armada/armada_crtc.c struct armada_gem_object *obj = dcrtc->cursor_obj; obj 670 drivers/gpu/drm/armada/armada_crtc.c pix = obj->addr; obj 695 drivers/gpu/drm/armada/armada_crtc.c struct armada_gem_object *obj = NULL; obj 707 drivers/gpu/drm/armada/armada_crtc.c obj = armada_gem_object_lookup(file, handle); obj 708 drivers/gpu/drm/armada/armada_crtc.c if (!obj) obj 712 drivers/gpu/drm/armada/armada_crtc.c if (!obj->addr) { obj 713 drivers/gpu/drm/armada/armada_crtc.c drm_gem_object_put_unlocked(&obj->obj); obj 717 drivers/gpu/drm/armada/armada_crtc.c if (obj->obj.size < w * h * 4) { obj 719 drivers/gpu/drm/armada/armada_crtc.c drm_gem_object_put_unlocked(&obj->obj); obj 727 drivers/gpu/drm/armada/armada_crtc.c drm_gem_object_put_unlocked(&dcrtc->cursor_obj->obj); obj 729 drivers/gpu/drm/armada/armada_crtc.c dcrtc->cursor_obj = obj; obj 733 drivers/gpu/drm/armada/armada_crtc.c if (obj) { obj 734 drivers/gpu/drm/armada/armada_crtc.c obj->update_data = dcrtc; obj 735 drivers/gpu/drm/armada/armada_crtc.c obj->update = cursor_update; obj 763 drivers/gpu/drm/armada/armada_crtc.c drm_gem_object_put_unlocked(&dcrtc->cursor_obj->obj); obj 22 drivers/gpu/drm/armada/armada_fb.c const struct drm_mode_fb_cmd2 *mode, struct armada_gem_object *obj) obj 66 drivers/gpu/drm/armada/armada_fb.c dfb->fb.obj[0] = &obj->obj; obj 82 drivers/gpu/drm/armada/armada_fb.c drm_gem_object_get(&obj->obj); obj 91 drivers/gpu/drm/armada/armada_fb.c struct armada_gem_object *obj; obj 108 drivers/gpu/drm/armada/armada_fb.c obj = armada_gem_object_lookup(dfile, mode->handles[0]); obj 109 drivers/gpu/drm/armada/armada_fb.c if (!obj) { obj 114 drivers/gpu/drm/armada/armada_fb.c if (obj->obj.import_attach && !obj->sgt) { obj 115 drivers/gpu/drm/armada/armada_fb.c ret = armada_gem_map_import(obj); obj 121 drivers/gpu/drm/armada/armada_fb.c if (!obj->mapped) { obj 126 drivers/gpu/drm/armada/armada_fb.c dfb = armada_framebuffer_create(dev, mode, obj); obj 132 drivers/gpu/drm/armada/armada_fb.c drm_gem_object_put_unlocked(&obj->obj); obj 137 drivers/gpu/drm/armada/armada_fb.c drm_gem_object_put_unlocked(&obj->obj); obj 15 drivers/gpu/drm/armada/armada_fb.h #define drm_fb_obj(fb) drm_to_armada_gem((fb)->obj[0]) obj 33 drivers/gpu/drm/armada/armada_fbdev.c struct armada_gem_object *obj; obj 46 drivers/gpu/drm/armada/armada_fbdev.c obj = armada_gem_alloc_private_object(dev, size); obj 47 drivers/gpu/drm/armada/armada_fbdev.c if (!obj) { obj 52 drivers/gpu/drm/armada/armada_fbdev.c ret = armada_gem_linear_back(dev, obj); obj 54 drivers/gpu/drm/armada/armada_fbdev.c drm_gem_object_put_unlocked(&obj->obj); obj 58 drivers/gpu/drm/armada/armada_fbdev.c ptr = armada_gem_map_object(dev, obj); obj 60 drivers/gpu/drm/armada/armada_fbdev.c drm_gem_object_put_unlocked(&obj->obj); obj 64 drivers/gpu/drm/armada/armada_fbdev.c dfb = armada_framebuffer_create(dev, &mode, obj); obj 70 drivers/gpu/drm/armada/armada_fbdev.c drm_gem_object_put_unlocked(&obj->obj); obj 82 drivers/gpu/drm/armada/armada_fbdev.c info->fix.smem_start = obj->phys_addr; obj 83 drivers/gpu/drm/armada/armada_fbdev.c info->fix.smem_len = obj->obj.size; obj 84 drivers/gpu/drm/armada/armada_fbdev.c info->screen_size = obj->obj.size; obj 92 drivers/gpu/drm/armada/armada_fbdev.c (unsigned long long)obj->phys_addr); obj 21 drivers/gpu/drm/armada/armada_gem.c struct armada_gem_object *obj = drm_to_armada_gem(gobj); obj 22 drivers/gpu/drm/armada/armada_gem.c unsigned long pfn = obj->phys_addr >> PAGE_SHIFT; obj 39 drivers/gpu/drm/armada/armada_gem.c void armada_gem_free_object(struct drm_gem_object *obj) obj 41 drivers/gpu/drm/armada/armada_gem.c struct armada_gem_object *dobj = drm_to_armada_gem(obj); obj 42 drivers/gpu/drm/armada/armada_gem.c struct armada_private *priv = obj->dev->dev_private; obj 46 drivers/gpu/drm/armada/armada_gem.c drm_gem_free_mmap_offset(&dobj->obj); obj 52 drivers/gpu/drm/armada/armada_gem.c unsigned int order = get_order(dobj->obj.size); obj 64 drivers/gpu/drm/armada/armada_gem.c if (dobj->obj.import_attach) { obj 67 drivers/gpu/drm/armada/armada_gem.c dma_buf_unmap_attachment(dobj->obj.import_attach, obj 69 drivers/gpu/drm/armada/armada_gem.c drm_prime_gem_destroy(&dobj->obj, NULL); obj 72 drivers/gpu/drm/armada/armada_gem.c drm_gem_object_release(&dobj->obj); obj 78 drivers/gpu/drm/armada/armada_gem.c armada_gem_linear_back(struct drm_device *dev, struct armada_gem_object *obj) obj 81 drivers/gpu/drm/armada/armada_gem.c size_t size = obj->obj.size; obj 83 drivers/gpu/drm/armada/armada_gem.c if (obj->page || obj->linear) obj 99 drivers/gpu/drm/armada/armada_gem.c obj->addr = page_address(p); obj 100 drivers/gpu/drm/armada/armada_gem.c obj->phys_addr = page_to_phys(p); obj 101 drivers/gpu/drm/armada/armada_gem.c obj->page = p; obj 103 drivers/gpu/drm/armada/armada_gem.c memset(obj->addr, 0, PAGE_ALIGN(size)); obj 131 drivers/gpu/drm/armada/armada_gem.c if (!obj->page) { obj 150 drivers/gpu/drm/armada/armada_gem.c obj->linear = node; obj 153 drivers/gpu/drm/armada/armada_gem.c ptr = ioremap_wc(obj->linear->start, size); obj 156 drivers/gpu/drm/armada/armada_gem.c drm_mm_remove_node(obj->linear); obj 158 drivers/gpu/drm/armada/armada_gem.c kfree(obj->linear); obj 159 drivers/gpu/drm/armada/armada_gem.c obj->linear = NULL; obj 166 drivers/gpu/drm/armada/armada_gem.c obj->phys_addr = obj->linear->start; obj 167 drivers/gpu/drm/armada/armada_gem.c obj->dev_addr = obj->linear->start; obj 168 drivers/gpu/drm/armada/armada_gem.c obj->mapped = true; obj 171 drivers/gpu/drm/armada/armada_gem.c DRM_DEBUG_DRIVER("obj %p phys %#llx dev %#llx\n", obj, obj 172 drivers/gpu/drm/armada/armada_gem.c (unsigned long long)obj->phys_addr, obj 173 drivers/gpu/drm/armada/armada_gem.c (unsigned long long)obj->dev_addr); obj 183 drivers/gpu/drm/armada/armada_gem.c dobj->addr = ioremap_wc(dobj->phys_addr, dobj->obj.size); obj 190 drivers/gpu/drm/armada/armada_gem.c struct armada_gem_object *obj; obj 194 drivers/gpu/drm/armada/armada_gem.c obj = kzalloc(sizeof(*obj), GFP_KERNEL); obj 195 drivers/gpu/drm/armada/armada_gem.c if (!obj) obj 198 drivers/gpu/drm/armada/armada_gem.c drm_gem_private_object_init(dev, &obj->obj, size); obj 200 drivers/gpu/drm/armada/armada_gem.c DRM_DEBUG_DRIVER("alloc private obj %p size %zu\n", obj, size); obj 202 drivers/gpu/drm/armada/armada_gem.c return obj; obj 208 drivers/gpu/drm/armada/armada_gem.c struct armada_gem_object *obj; obj 213 drivers/gpu/drm/armada/armada_gem.c obj = kzalloc(sizeof(*obj), GFP_KERNEL); obj 214 drivers/gpu/drm/armada/armada_gem.c if (!obj) obj 217 drivers/gpu/drm/armada/armada_gem.c if (drm_gem_object_init(dev, &obj->obj, size)) { obj 218 drivers/gpu/drm/armada/armada_gem.c kfree(obj); obj 222 drivers/gpu/drm/armada/armada_gem.c mapping = obj->obj.filp->f_mapping; obj 225 drivers/gpu/drm/armada/armada_gem.c DRM_DEBUG_DRIVER("alloc obj %p size %zu\n", obj, size); obj 227 drivers/gpu/drm/armada/armada_gem.c return obj; obj 250 drivers/gpu/drm/armada/armada_gem.c ret = drm_gem_handle_create(file, &dobj->obj, &handle); obj 259 drivers/gpu/drm/armada/armada_gem.c drm_gem_object_put_unlocked(&dobj->obj); obj 282 drivers/gpu/drm/armada/armada_gem.c ret = drm_gem_handle_create(file, &dobj->obj, &handle); obj 291 drivers/gpu/drm/armada/armada_gem.c drm_gem_object_put_unlocked(&dobj->obj); obj 307 drivers/gpu/drm/armada/armada_gem.c if (!dobj->obj.filp) { obj 308 drivers/gpu/drm/armada/armada_gem.c drm_gem_object_put_unlocked(&dobj->obj); obj 312 drivers/gpu/drm/armada/armada_gem.c addr = vm_mmap(dobj->obj.filp, 0, args->size, PROT_READ | PROT_WRITE, obj 314 drivers/gpu/drm/armada/armada_gem.c drm_gem_object_put_unlocked(&dobj->obj); obj 354 drivers/gpu/drm/armada/armada_gem.c if (args->offset > dobj->obj.size || obj 355 drivers/gpu/drm/armada/armada_gem.c args->size > dobj->obj.size - args->offset) { obj 356 drivers/gpu/drm/armada/armada_gem.c DRM_ERROR("invalid size: object size %u\n", dobj->obj.size); obj 369 drivers/gpu/drm/armada/armada_gem.c drm_gem_object_put_unlocked(&dobj->obj); obj 378 drivers/gpu/drm/armada/armada_gem.c struct drm_gem_object *obj = attach->dmabuf->priv; obj 379 drivers/gpu/drm/armada/armada_gem.c struct armada_gem_object *dobj = drm_to_armada_gem(obj); obj 388 drivers/gpu/drm/armada/armada_gem.c if (dobj->obj.filp) { obj 392 drivers/gpu/drm/armada/armada_gem.c count = dobj->obj.size / PAGE_SIZE; obj 396 drivers/gpu/drm/armada/armada_gem.c mapping = dobj->obj.filp->f_mapping; obj 419 drivers/gpu/drm/armada/armada_gem.c sg_set_page(sgt->sgl, dobj->page, dobj->obj.size, 0); obj 428 drivers/gpu/drm/armada/armada_gem.c sg_dma_len(sgt->sgl) = dobj->obj.size; obj 447 drivers/gpu/drm/armada/armada_gem.c struct drm_gem_object *obj = attach->dmabuf->priv; obj 448 drivers/gpu/drm/armada/armada_gem.c struct armada_gem_object *dobj = drm_to_armada_gem(obj); obj 454 drivers/gpu/drm/armada/armada_gem.c if (dobj->obj.filp) { obj 490 drivers/gpu/drm/armada/armada_gem.c armada_gem_prime_export(struct drm_gem_object *obj, int flags) obj 495 drivers/gpu/drm/armada/armada_gem.c exp_info.size = obj->size; obj 497 drivers/gpu/drm/armada/armada_gem.c exp_info.priv = obj; obj 499 drivers/gpu/drm/armada/armada_gem.c return drm_gem_dmabuf_export(obj->dev, &exp_info); obj 509 drivers/gpu/drm/armada/armada_gem.c struct drm_gem_object *obj = buf->priv; obj 510 drivers/gpu/drm/armada/armada_gem.c if (obj->dev == dev) { obj 515 drivers/gpu/drm/armada/armada_gem.c drm_gem_object_get(obj); obj 516 drivers/gpu/drm/armada/armada_gem.c return obj; obj 530 drivers/gpu/drm/armada/armada_gem.c dobj->obj.import_attach = attach; obj 538 drivers/gpu/drm/armada/armada_gem.c return &dobj->obj; obj 545 drivers/gpu/drm/armada/armada_gem.c dobj->sgt = dma_buf_map_attachment(dobj->obj.import_attach, obj 557 drivers/gpu/drm/armada/armada_gem.c if (sg_dma_len(dobj->sgt->sgl) < dobj->obj.size) { obj 12 drivers/gpu/drm/armada/armada_gem.h struct drm_gem_object obj; obj 26 drivers/gpu/drm/armada/armada_gem.h #define drm_to_armada_gem(o) container_of(o, struct armada_gem_object, obj) obj 35 drivers/gpu/drm/armada/armada_gem.h struct dma_buf *armada_gem_prime_export(struct drm_gem_object *obj, int flags); obj 43 drivers/gpu/drm/armada/armada_gem.h struct drm_gem_object *obj = drm_gem_object_lookup(dfile, handle); obj 45 drivers/gpu/drm/armada/armada_gem.h return obj ? drm_to_armada_gem(obj) : NULL; obj 289 drivers/gpu/drm/ast/ast_drv.h struct drm_gem_object **obj); obj 542 drivers/gpu/drm/ast/ast_main.c struct drm_gem_object **obj) obj 547 drivers/gpu/drm/ast/ast_main.c *obj = NULL; obj 560 drivers/gpu/drm/ast/ast_main.c *obj = &gbo->bo.base; obj 537 drivers/gpu/drm/ast/ast_mode.c gbo = drm_gem_vram_of_gem(fb->obj[0]); obj 541 drivers/gpu/drm/ast/ast_mode.c gbo = drm_gem_vram_of_gem(crtc->primary->fb->obj[0]); obj 610 drivers/gpu/drm/ast/ast_mode.c drm_gem_vram_of_gem(fb->obj[0]); obj 900 drivers/gpu/drm/ast/ast_mode.c struct drm_gem_object *obj; obj 907 drivers/gpu/drm/ast/ast_mode.c ret = ast_gem_create(dev, size, true, &obj); obj 910 drivers/gpu/drm/ast/ast_mode.c gbo = drm_gem_vram_of_gem(obj); obj 928 drivers/gpu/drm/ast/ast_mode.c ast->cursor_cache = obj; obj 1160 drivers/gpu/drm/ast/ast_mode.c struct drm_gem_object *obj; obj 1176 drivers/gpu/drm/ast/ast_mode.c obj = drm_gem_object_lookup(file_priv, handle); obj 1177 drivers/gpu/drm/ast/ast_mode.c if (!obj) { obj 1181 drivers/gpu/drm/ast/ast_mode.c gbo = drm_gem_vram_of_gem(obj); obj 1238 drivers/gpu/drm/ast/ast_mode.c drm_gem_object_put_unlocked(obj); obj 1247 drivers/gpu/drm/ast/ast_mode.c drm_gem_object_put_unlocked(obj); obj 37 drivers/gpu/drm/bochs/bochs_kms.c gbo = drm_gem_vram_of_gem(state->fb->obj[0]); obj 79 drivers/gpu/drm/bochs/bochs_kms.c gbo = drm_gem_vram_of_gem(new_state->fb->obj[0]); obj 90 drivers/gpu/drm/bochs/bochs_kms.c gbo = drm_gem_vram_of_gem(old_state->fb->obj[0]); obj 305 drivers/gpu/drm/cirrus/cirrus.c vmap = drm_gem_shmem_vmap(fb->obj[0]); obj 326 drivers/gpu/drm/cirrus/cirrus.c drm_gem_shmem_vunmap(fb->obj[0], vmap); obj 175 drivers/gpu/drm/cirrus/cirrus_drv.h void cirrus_gem_free_object(struct drm_gem_object *obj); obj 182 drivers/gpu/drm/cirrus/cirrus_drv.h struct drm_gem_object **obj); obj 190 drivers/gpu/drm/cirrus/cirrus_drv.h struct drm_gem_object *obj); obj 205 drivers/gpu/drm/drm_atomic.c struct drm_private_obj *obj = state->private_objs[i].ptr; obj 207 drivers/gpu/drm/drm_atomic.c obj->funcs->atomic_destroy_state(obj, obj 723 drivers/gpu/drm/drm_atomic.c struct drm_private_obj *obj, obj 727 drivers/gpu/drm/drm_atomic.c memset(obj, 0, sizeof(*obj)); obj 729 drivers/gpu/drm/drm_atomic.c drm_modeset_lock_init(&obj->lock); obj 731 drivers/gpu/drm/drm_atomic.c obj->state = state; obj 732 drivers/gpu/drm/drm_atomic.c obj->funcs = funcs; obj 733 drivers/gpu/drm/drm_atomic.c list_add_tail(&obj->head, &dev->mode_config.privobj_list); obj 744 drivers/gpu/drm/drm_atomic.c drm_atomic_private_obj_fini(struct drm_private_obj *obj) obj 746 drivers/gpu/drm/drm_atomic.c list_del(&obj->head); obj 747 drivers/gpu/drm/drm_atomic.c obj->funcs->atomic_destroy_state(obj, obj->state); obj 748 drivers/gpu/drm/drm_atomic.c drm_modeset_lock_fini(&obj->lock); obj 767 drivers/gpu/drm/drm_atomic.c struct drm_private_obj *obj) obj 775 drivers/gpu/drm/drm_atomic.c if (obj == state->private_objs[i].ptr) obj 778 drivers/gpu/drm/drm_atomic.c ret = drm_modeset_lock(&obj->lock, state->acquire_ctx); obj 792 drivers/gpu/drm/drm_atomic.c obj_state = obj->funcs->atomic_duplicate_state(obj); obj 797 drivers/gpu/drm/drm_atomic.c state->private_objs[index].old_state = obj->state; obj 799 drivers/gpu/drm/drm_atomic.c state->private_objs[index].ptr = obj; obj 805 drivers/gpu/drm/drm_atomic.c obj, obj_state, state); obj 821 drivers/gpu/drm/drm_atomic.c struct drm_private_obj *obj) obj 826 drivers/gpu/drm/drm_atomic.c if (obj == state->private_objs[i].ptr) obj 843 drivers/gpu/drm/drm_atomic.c struct drm_private_obj *obj) obj 848 drivers/gpu/drm/drm_atomic.c if (obj == state->private_objs[i].ptr) obj 2708 drivers/gpu/drm/drm_atomic_helper.c struct drm_private_obj *obj; obj 2794 drivers/gpu/drm/drm_atomic_helper.c for_each_oldnew_private_obj_in_state(state, obj, old_obj_state, new_obj_state, i) { obj 2795 drivers/gpu/drm/drm_atomic_helper.c WARN_ON(obj->state != old_obj_state); obj 2801 drivers/gpu/drm/drm_atomic_helper.c obj->state = new_obj_state; obj 496 drivers/gpu/drm/drm_atomic_state_helper.c void __drm_atomic_helper_private_obj_duplicate_state(struct drm_private_obj *obj, obj 499 drivers/gpu/drm/drm_atomic_state_helper.c memcpy(state, obj->state, sizeof(*state)); obj 855 drivers/gpu/drm/drm_atomic_uapi.c int drm_atomic_get_property(struct drm_mode_object *obj, obj 861 drivers/gpu/drm/drm_atomic_uapi.c switch (obj->type) { obj 863 drivers/gpu/drm/drm_atomic_uapi.c struct drm_connector *connector = obj_to_connector(obj); obj 870 drivers/gpu/drm/drm_atomic_uapi.c struct drm_crtc *crtc = obj_to_crtc(obj); obj 877 drivers/gpu/drm/drm_atomic_uapi.c struct drm_plane *plane = obj_to_plane(obj); obj 964 drivers/gpu/drm/drm_atomic_uapi.c struct drm_mode_object *obj, obj 974 drivers/gpu/drm/drm_atomic_uapi.c switch (obj->type) { obj 976 drivers/gpu/drm/drm_atomic_uapi.c struct drm_connector *connector = obj_to_connector(obj); obj 991 drivers/gpu/drm/drm_atomic_uapi.c struct drm_crtc *crtc = obj_to_crtc(obj); obj 1005 drivers/gpu/drm/drm_atomic_uapi.c struct drm_plane *plane = obj_to_plane(obj); obj 1332 drivers/gpu/drm/drm_atomic_uapi.c struct drm_mode_object *obj; obj 1339 drivers/gpu/drm/drm_atomic_uapi.c obj = drm_mode_object_find(dev, file_priv, obj_id, DRM_MODE_OBJECT_ANY); obj 1340 drivers/gpu/drm/drm_atomic_uapi.c if (!obj) { obj 1345 drivers/gpu/drm/drm_atomic_uapi.c if (!obj->properties) { obj 1346 drivers/gpu/drm/drm_atomic_uapi.c drm_mode_object_put(obj); obj 1352 drivers/gpu/drm/drm_atomic_uapi.c drm_mode_object_put(obj); obj 1365 drivers/gpu/drm/drm_atomic_uapi.c drm_mode_object_put(obj); obj 1370 drivers/gpu/drm/drm_atomic_uapi.c prop = drm_mode_obj_find_prop_id(obj, prop_id); obj 1372 drivers/gpu/drm/drm_atomic_uapi.c drm_mode_object_put(obj); obj 1380 drivers/gpu/drm/drm_atomic_uapi.c drm_mode_object_put(obj); obj 1386 drivers/gpu/drm/drm_atomic_uapi.c obj, prop, prop_value); obj 1388 drivers/gpu/drm/drm_atomic_uapi.c drm_mode_object_put(obj); obj 1395 drivers/gpu/drm/drm_atomic_uapi.c drm_mode_object_put(obj); obj 255 drivers/gpu/drm/drm_client.c struct drm_gem_object *obj; obj 274 drivers/gpu/drm/drm_client.c obj = drm_gem_object_lookup(client->file, dumb_args.handle); obj 275 drivers/gpu/drm/drm_client.c if (!obj) { obj 280 drivers/gpu/drm/drm_client.c buffer->gem = obj; obj 2039 drivers/gpu/drm/drm_connector.c int drm_connector_set_obj_prop(struct drm_mode_object *obj, obj 2044 drivers/gpu/drm/drm_connector.c struct drm_connector *connector = obj_to_connector(obj); obj 737 drivers/gpu/drm/drm_crtc.c int drm_mode_crtc_set_obj_prop(struct drm_mode_object *obj, obj 742 drivers/gpu/drm/drm_crtc.c struct drm_crtc *crtc = obj_to_crtc(obj); obj 747 drivers/gpu/drm/drm_crtc.c drm_object_property_set_value(obj, property, value); obj 62 drivers/gpu/drm/drm_crtc_internal.h int drm_mode_crtc_set_obj_prop(struct drm_mode_object *obj, obj 139 drivers/gpu/drm/drm_crtc_internal.h int __drm_mode_object_add(struct drm_device *dev, struct drm_mode_object *obj, obj 142 drivers/gpu/drm/drm_crtc_internal.h int drm_mode_object_add(struct drm_device *dev, struct drm_mode_object *obj, obj 145 drivers/gpu/drm/drm_crtc_internal.h struct drm_mode_object *obj); obj 151 drivers/gpu/drm/drm_crtc_internal.h int drm_mode_object_get_properties(struct drm_mode_object *obj, bool atomic, obj 155 drivers/gpu/drm/drm_crtc_internal.h struct drm_property *drm_mode_obj_find_prop_id(struct drm_mode_object *obj, obj 178 drivers/gpu/drm/drm_crtc_internal.h int drm_connector_set_obj_prop(struct drm_mode_object *obj, obj 241 drivers/gpu/drm/drm_crtc_internal.h struct drm_mode_object *obj, obj 244 drivers/gpu/drm/drm_crtc_internal.h int drm_atomic_get_property(struct drm_mode_object *obj, obj 114 drivers/gpu/drm/drm_debugfs.c struct drm_gem_object *obj = ptr; obj 118 drivers/gpu/drm/drm_debugfs.c obj->name, obj->size, obj 119 drivers/gpu/drm/drm_debugfs.c obj->handle_count, obj 120 drivers/gpu/drm/drm_debugfs.c kref_read(&obj->refcount)); obj 3774 drivers/gpu/drm/drm_dp_mst_topology.c drm_dp_mst_duplicate_state(struct drm_private_obj *obj) obj 3777 drivers/gpu/drm/drm_dp_mst_topology.c to_dp_mst_topology_state(obj->state); obj 3784 drivers/gpu/drm/drm_dp_mst_topology.c __drm_atomic_helper_private_obj_duplicate_state(obj, &state->base); obj 3813 drivers/gpu/drm/drm_dp_mst_topology.c static void drm_dp_mst_destroy_state(struct drm_private_obj *obj, obj 66 drivers/gpu/drm/drm_fb_cma_helper.c struct drm_gem_cma_object *obj; obj 77 drivers/gpu/drm/drm_fb_cma_helper.c obj = drm_fb_cma_get_gem_obj(fb, plane); obj 78 drivers/gpu/drm/drm_fb_cma_helper.c if (!obj) obj 81 drivers/gpu/drm/drm_fb_cma_helper.c paddr = obj->paddr + fb->offsets[plane]; obj 766 drivers/gpu/drm/drm_framebuffer.c struct drm_mode_object *obj; obj 769 drivers/gpu/drm/drm_framebuffer.c obj = __drm_mode_object_find(dev, file_priv, id, DRM_MODE_OBJECT_FB); obj 770 drivers/gpu/drm/drm_framebuffer.c if (obj) obj 771 drivers/gpu/drm/drm_framebuffer.c fb = obj_to_fb(obj); obj 1060 drivers/gpu/drm/drm_framebuffer.c fb->obj[i] ? "" : "(null)"); obj 1061 drivers/gpu/drm/drm_framebuffer.c if (fb->obj[i]) obj 1062 drivers/gpu/drm/drm_framebuffer.c drm_gem_print_info(p, indent + 2, fb->obj[i]); obj 125 drivers/gpu/drm/drm_gem.c struct drm_gem_object *obj, size_t size) obj 129 drivers/gpu/drm/drm_gem.c drm_gem_private_object_init(dev, obj, size); obj 135 drivers/gpu/drm/drm_gem.c obj->filp = filp; obj 152 drivers/gpu/drm/drm_gem.c struct drm_gem_object *obj, size_t size) obj 156 drivers/gpu/drm/drm_gem.c obj->dev = dev; obj 157 drivers/gpu/drm/drm_gem.c obj->filp = NULL; obj 159 drivers/gpu/drm/drm_gem.c kref_init(&obj->refcount); obj 160 drivers/gpu/drm/drm_gem.c obj->handle_count = 0; obj 161 drivers/gpu/drm/drm_gem.c obj->size = size; obj 162 drivers/gpu/drm/drm_gem.c dma_resv_init(&obj->_resv); obj 163 drivers/gpu/drm/drm_gem.c if (!obj->resv) obj 164 drivers/gpu/drm/drm_gem.c obj->resv = &obj->_resv; obj 166 drivers/gpu/drm/drm_gem.c drm_vma_node_reset(&obj->vma_node); obj 171 drivers/gpu/drm/drm_gem.c drm_gem_remove_prime_handles(struct drm_gem_object *obj, struct drm_file *filp) obj 178 drivers/gpu/drm/drm_gem.c if (obj->dma_buf) { obj 180 drivers/gpu/drm/drm_gem.c obj->dma_buf); obj 195 drivers/gpu/drm/drm_gem.c static void drm_gem_object_handle_free(struct drm_gem_object *obj) obj 197 drivers/gpu/drm/drm_gem.c struct drm_device *dev = obj->dev; obj 200 drivers/gpu/drm/drm_gem.c if (obj->name) { obj 201 drivers/gpu/drm/drm_gem.c idr_remove(&dev->object_name_idr, obj->name); obj 202 drivers/gpu/drm/drm_gem.c obj->name = 0; obj 206 drivers/gpu/drm/drm_gem.c static void drm_gem_object_exported_dma_buf_free(struct drm_gem_object *obj) obj 209 drivers/gpu/drm/drm_gem.c if (obj->dma_buf) { obj 210 drivers/gpu/drm/drm_gem.c dma_buf_put(obj->dma_buf); obj 211 drivers/gpu/drm/drm_gem.c obj->dma_buf = NULL; obj 216 drivers/gpu/drm/drm_gem.c drm_gem_object_handle_put_unlocked(struct drm_gem_object *obj) obj 218 drivers/gpu/drm/drm_gem.c struct drm_device *dev = obj->dev; obj 221 drivers/gpu/drm/drm_gem.c if (WARN_ON(obj->handle_count == 0)) obj 231 drivers/gpu/drm/drm_gem.c if (--obj->handle_count == 0) { obj 232 drivers/gpu/drm/drm_gem.c drm_gem_object_handle_free(obj); obj 233 drivers/gpu/drm/drm_gem.c drm_gem_object_exported_dma_buf_free(obj); obj 239 drivers/gpu/drm/drm_gem.c drm_gem_object_put_unlocked(obj); obj 250 drivers/gpu/drm/drm_gem.c struct drm_gem_object *obj = ptr; obj 251 drivers/gpu/drm/drm_gem.c struct drm_device *dev = obj->dev; obj 253 drivers/gpu/drm/drm_gem.c if (obj->funcs && obj->funcs->close) obj 254 drivers/gpu/drm/drm_gem.c obj->funcs->close(obj, file_priv); obj 256 drivers/gpu/drm/drm_gem.c dev->driver->gem_close_object(obj, file_priv); obj 258 drivers/gpu/drm/drm_gem.c drm_gem_remove_prime_handles(obj, file_priv); obj 259 drivers/gpu/drm/drm_gem.c drm_vma_node_revoke(&obj->vma_node, file_priv); obj 261 drivers/gpu/drm/drm_gem.c drm_gem_object_handle_put_unlocked(obj); obj 278 drivers/gpu/drm/drm_gem.c struct drm_gem_object *obj; obj 283 drivers/gpu/drm/drm_gem.c obj = idr_replace(&filp->object_idr, NULL, handle); obj 285 drivers/gpu/drm/drm_gem.c if (IS_ERR_OR_NULL(obj)) obj 289 drivers/gpu/drm/drm_gem.c drm_gem_object_release_handle(handle, obj, filp); obj 316 drivers/gpu/drm/drm_gem.c struct drm_gem_object *obj; obj 319 drivers/gpu/drm/drm_gem.c obj = drm_gem_object_lookup(file, handle); obj 320 drivers/gpu/drm/drm_gem.c if (!obj) obj 324 drivers/gpu/drm/drm_gem.c if (obj->import_attach) { obj 329 drivers/gpu/drm/drm_gem.c ret = drm_gem_create_mmap_offset(obj); obj 333 drivers/gpu/drm/drm_gem.c *offset = drm_vma_node_offset_addr(&obj->vma_node); obj 335 drivers/gpu/drm/drm_gem.c drm_gem_object_put_unlocked(obj); obj 374 drivers/gpu/drm/drm_gem.c struct drm_gem_object *obj, obj 377 drivers/gpu/drm/drm_gem.c struct drm_device *dev = obj->dev; obj 382 drivers/gpu/drm/drm_gem.c if (obj->handle_count++ == 0) obj 383 drivers/gpu/drm/drm_gem.c drm_gem_object_get(obj); obj 392 drivers/gpu/drm/drm_gem.c ret = idr_alloc(&file_priv->object_idr, obj, 1, 0, GFP_NOWAIT); obj 403 drivers/gpu/drm/drm_gem.c ret = drm_vma_node_allow(&obj->vma_node, file_priv); obj 407 drivers/gpu/drm/drm_gem.c if (obj->funcs && obj->funcs->open) { obj 408 drivers/gpu/drm/drm_gem.c ret = obj->funcs->open(obj, file_priv); obj 412 drivers/gpu/drm/drm_gem.c ret = dev->driver->gem_open_object(obj, file_priv); obj 421 drivers/gpu/drm/drm_gem.c drm_vma_node_revoke(&obj->vma_node, file_priv); obj 427 drivers/gpu/drm/drm_gem.c drm_gem_object_handle_put_unlocked(obj); obj 445 drivers/gpu/drm/drm_gem.c struct drm_gem_object *obj, obj 448 drivers/gpu/drm/drm_gem.c mutex_lock(&obj->dev->object_name_lock); obj 450 drivers/gpu/drm/drm_gem.c return drm_gem_handle_create_tail(file_priv, obj, handlep); obj 466 drivers/gpu/drm/drm_gem.c drm_gem_free_mmap_offset(struct drm_gem_object *obj) obj 468 drivers/gpu/drm/drm_gem.c struct drm_device *dev = obj->dev; obj 470 drivers/gpu/drm/drm_gem.c drm_vma_offset_remove(dev->vma_offset_manager, &obj->vma_node); obj 492 drivers/gpu/drm/drm_gem.c drm_gem_create_mmap_offset_size(struct drm_gem_object *obj, size_t size) obj 494 drivers/gpu/drm/drm_gem.c struct drm_device *dev = obj->dev; obj 496 drivers/gpu/drm/drm_gem.c return drm_vma_offset_add(dev->vma_offset_manager, &obj->vma_node, obj 515 drivers/gpu/drm/drm_gem.c int drm_gem_create_mmap_offset(struct drm_gem_object *obj) obj 517 drivers/gpu/drm/drm_gem.c return drm_gem_create_mmap_offset_size(obj, obj->size); obj 553 drivers/gpu/drm/drm_gem.c struct page **drm_gem_get_pages(struct drm_gem_object *obj) obj 561 drivers/gpu/drm/drm_gem.c mapping = obj->filp->f_mapping; obj 567 drivers/gpu/drm/drm_gem.c WARN_ON((obj->size & (PAGE_SIZE - 1)) != 0); obj 569 drivers/gpu/drm/drm_gem.c npages = obj->size >> PAGE_SHIFT; obj 616 drivers/gpu/drm/drm_gem.c void drm_gem_put_pages(struct drm_gem_object *obj, struct page **pages, obj 623 drivers/gpu/drm/drm_gem.c mapping = file_inode(obj->filp)->i_mapping; obj 630 drivers/gpu/drm/drm_gem.c WARN_ON((obj->size & (PAGE_SIZE - 1)) != 0); obj 632 drivers/gpu/drm/drm_gem.c npages = obj->size >> PAGE_SHIFT; obj 660 drivers/gpu/drm/drm_gem.c struct drm_gem_object *obj; obj 666 drivers/gpu/drm/drm_gem.c obj = idr_find(&filp->object_idr, handle[i]); obj 667 drivers/gpu/drm/drm_gem.c if (!obj) { obj 671 drivers/gpu/drm/drm_gem.c drm_gem_object_get(obj); obj 672 drivers/gpu/drm/drm_gem.c objs[i] = obj; obj 750 drivers/gpu/drm/drm_gem.c struct drm_gem_object *obj = NULL; obj 752 drivers/gpu/drm/drm_gem.c objects_lookup(filp, &handle, 1, &obj); obj 753 drivers/gpu/drm/drm_gem.c return obj; obj 774 drivers/gpu/drm/drm_gem.c struct drm_gem_object *obj; obj 776 drivers/gpu/drm/drm_gem.c obj = drm_gem_object_lookup(filep, handle); obj 777 drivers/gpu/drm/drm_gem.c if (!obj) { obj 782 drivers/gpu/drm/drm_gem.c ret = dma_resv_wait_timeout_rcu(obj->resv, wait_all, obj 789 drivers/gpu/drm/drm_gem.c drm_gem_object_put_unlocked(obj); obj 834 drivers/gpu/drm/drm_gem.c struct drm_gem_object *obj; obj 840 drivers/gpu/drm/drm_gem.c obj = drm_gem_object_lookup(file_priv, args->handle); obj 841 drivers/gpu/drm/drm_gem.c if (obj == NULL) obj 846 drivers/gpu/drm/drm_gem.c if (obj->handle_count == 0) { obj 851 drivers/gpu/drm/drm_gem.c if (!obj->name) { obj 852 drivers/gpu/drm/drm_gem.c ret = idr_alloc(&dev->object_name_idr, obj, 1, 0, GFP_KERNEL); obj 856 drivers/gpu/drm/drm_gem.c obj->name = ret; obj 859 drivers/gpu/drm/drm_gem.c args->name = (uint64_t) obj->name; obj 864 drivers/gpu/drm/drm_gem.c drm_gem_object_put_unlocked(obj); obj 884 drivers/gpu/drm/drm_gem.c struct drm_gem_object *obj; obj 892 drivers/gpu/drm/drm_gem.c obj = idr_find(&dev->object_name_idr, (int) args->name); obj 893 drivers/gpu/drm/drm_gem.c if (obj) { obj 894 drivers/gpu/drm/drm_gem.c drm_gem_object_get(obj); obj 901 drivers/gpu/drm/drm_gem.c ret = drm_gem_handle_create_tail(file_priv, obj, &handle); obj 902 drivers/gpu/drm/drm_gem.c drm_gem_object_put_unlocked(obj); obj 907 drivers/gpu/drm/drm_gem.c args->size = obj->size; obj 952 drivers/gpu/drm/drm_gem.c drm_gem_object_release(struct drm_gem_object *obj) obj 954 drivers/gpu/drm/drm_gem.c WARN_ON(obj->dma_buf); obj 956 drivers/gpu/drm/drm_gem.c if (obj->filp) obj 957 drivers/gpu/drm/drm_gem.c fput(obj->filp); obj 959 drivers/gpu/drm/drm_gem.c dma_resv_fini(&obj->_resv); obj 960 drivers/gpu/drm/drm_gem.c drm_gem_free_mmap_offset(obj); obj 976 drivers/gpu/drm/drm_gem.c struct drm_gem_object *obj = obj 978 drivers/gpu/drm/drm_gem.c struct drm_device *dev = obj->dev; obj 980 drivers/gpu/drm/drm_gem.c if (obj->funcs) { obj 981 drivers/gpu/drm/drm_gem.c obj->funcs->free(obj); obj 983 drivers/gpu/drm/drm_gem.c dev->driver->gem_free_object_unlocked(obj); obj 987 drivers/gpu/drm/drm_gem.c dev->driver->gem_free_object(obj); obj 1002 drivers/gpu/drm/drm_gem.c drm_gem_object_put_unlocked(struct drm_gem_object *obj) obj 1006 drivers/gpu/drm/drm_gem.c if (!obj) obj 1009 drivers/gpu/drm/drm_gem.c dev = obj->dev; obj 1013 drivers/gpu/drm/drm_gem.c if (kref_put_mutex(&obj->refcount, drm_gem_object_free, obj 1017 drivers/gpu/drm/drm_gem.c kref_put(&obj->refcount, drm_gem_object_free); obj 1034 drivers/gpu/drm/drm_gem.c drm_gem_object_put(struct drm_gem_object *obj) obj 1036 drivers/gpu/drm/drm_gem.c if (obj) { obj 1037 drivers/gpu/drm/drm_gem.c WARN_ON(!mutex_is_locked(&obj->dev->struct_mutex)); obj 1039 drivers/gpu/drm/drm_gem.c kref_put(&obj->refcount, drm_gem_object_free); obj 1053 drivers/gpu/drm/drm_gem.c struct drm_gem_object *obj = vma->vm_private_data; obj 1055 drivers/gpu/drm/drm_gem.c drm_gem_object_get(obj); obj 1068 drivers/gpu/drm/drm_gem.c struct drm_gem_object *obj = vma->vm_private_data; obj 1070 drivers/gpu/drm/drm_gem.c drm_gem_object_put_unlocked(obj); obj 1098 drivers/gpu/drm/drm_gem.c int drm_gem_mmap_obj(struct drm_gem_object *obj, unsigned long obj_size, obj 1101 drivers/gpu/drm/drm_gem.c struct drm_device *dev = obj->dev; obj 1107 drivers/gpu/drm/drm_gem.c if (obj->funcs && obj->funcs->vm_ops) obj 1108 drivers/gpu/drm/drm_gem.c vma->vm_ops = obj->funcs->vm_ops; obj 1115 drivers/gpu/drm/drm_gem.c vma->vm_private_data = obj; obj 1125 drivers/gpu/drm/drm_gem.c drm_gem_object_get(obj); obj 1150 drivers/gpu/drm/drm_gem.c struct drm_gem_object *obj = NULL; obj 1162 drivers/gpu/drm/drm_gem.c obj = container_of(node, struct drm_gem_object, vma_node); obj 1173 drivers/gpu/drm/drm_gem.c if (!kref_get_unless_zero(&obj->refcount)) obj 1174 drivers/gpu/drm/drm_gem.c obj = NULL; obj 1178 drivers/gpu/drm/drm_gem.c if (!obj) obj 1182 drivers/gpu/drm/drm_gem.c drm_gem_object_put_unlocked(obj); obj 1188 drivers/gpu/drm/drm_gem.c drm_gem_object_put_unlocked(obj); obj 1195 drivers/gpu/drm/drm_gem.c ret = drm_gem_mmap_obj(obj, drm_vma_node_size(node) << PAGE_SHIFT, obj 1198 drivers/gpu/drm/drm_gem.c drm_gem_object_put_unlocked(obj); obj 1205 drivers/gpu/drm/drm_gem.c const struct drm_gem_object *obj) obj 1207 drivers/gpu/drm/drm_gem.c drm_printf_indent(p, indent, "name=%d\n", obj->name); obj 1209 drivers/gpu/drm/drm_gem.c kref_read(&obj->refcount)); obj 1211 drivers/gpu/drm/drm_gem.c drm_vma_node_start(&obj->vma_node)); obj 1212 drivers/gpu/drm/drm_gem.c drm_printf_indent(p, indent, "size=%zu\n", obj->size); obj 1214 drivers/gpu/drm/drm_gem.c obj->import_attach ? "yes" : "no"); obj 1216 drivers/gpu/drm/drm_gem.c if (obj->funcs && obj->funcs->print_info) obj 1217 drivers/gpu/drm/drm_gem.c obj->funcs->print_info(p, indent, obj); obj 1218 drivers/gpu/drm/drm_gem.c else if (obj->dev->driver->gem_print_info) obj 1219 drivers/gpu/drm/drm_gem.c obj->dev->driver->gem_print_info(p, indent, obj); obj 1222 drivers/gpu/drm/drm_gem.c int drm_gem_pin(struct drm_gem_object *obj) obj 1224 drivers/gpu/drm/drm_gem.c if (obj->funcs && obj->funcs->pin) obj 1225 drivers/gpu/drm/drm_gem.c return obj->funcs->pin(obj); obj 1226 drivers/gpu/drm/drm_gem.c else if (obj->dev->driver->gem_prime_pin) obj 1227 drivers/gpu/drm/drm_gem.c return obj->dev->driver->gem_prime_pin(obj); obj 1232 drivers/gpu/drm/drm_gem.c void drm_gem_unpin(struct drm_gem_object *obj) obj 1234 drivers/gpu/drm/drm_gem.c if (obj->funcs && obj->funcs->unpin) obj 1235 drivers/gpu/drm/drm_gem.c obj->funcs->unpin(obj); obj 1236 drivers/gpu/drm/drm_gem.c else if (obj->dev->driver->gem_prime_unpin) obj 1237 drivers/gpu/drm/drm_gem.c obj->dev->driver->gem_prime_unpin(obj); obj 1240 drivers/gpu/drm/drm_gem.c void *drm_gem_vmap(struct drm_gem_object *obj) obj 1244 drivers/gpu/drm/drm_gem.c if (obj->funcs && obj->funcs->vmap) obj 1245 drivers/gpu/drm/drm_gem.c vaddr = obj->funcs->vmap(obj); obj 1246 drivers/gpu/drm/drm_gem.c else if (obj->dev->driver->gem_prime_vmap) obj 1247 drivers/gpu/drm/drm_gem.c vaddr = obj->dev->driver->gem_prime_vmap(obj); obj 1257 drivers/gpu/drm/drm_gem.c void drm_gem_vunmap(struct drm_gem_object *obj, void *vaddr) obj 1262 drivers/gpu/drm/drm_gem.c if (obj->funcs && obj->funcs->vunmap) obj 1263 drivers/gpu/drm/drm_gem.c obj->funcs->vunmap(obj, vaddr); obj 1264 drivers/gpu/drm/drm_gem.c else if (obj->dev->driver->gem_prime_vunmap) obj 1265 drivers/gpu/drm/drm_gem.c obj->dev->driver->gem_prime_vunmap(obj, vaddr); obj 1292 drivers/gpu/drm/drm_gem.c struct drm_gem_object *obj = objs[contended]; obj 1294 drivers/gpu/drm/drm_gem.c ret = dma_resv_lock_slow_interruptible(obj->resv, obj 1407 drivers/gpu/drm/drm_gem.c struct drm_gem_object *obj, obj 1416 drivers/gpu/drm/drm_gem.c dma_resv_get_excl_rcu(obj->resv); obj 1421 drivers/gpu/drm/drm_gem.c ret = dma_resv_get_fences_rcu(obj->resv, NULL, obj 349 drivers/gpu/drm/drm_gem_cma_helper.c struct drm_gem_object *obj = NULL; obj 362 drivers/gpu/drm/drm_gem_cma_helper.c obj = container_of(node, struct drm_gem_object, vma_node); obj 373 drivers/gpu/drm/drm_gem_cma_helper.c if (!kref_get_unless_zero(&obj->refcount)) obj 374 drivers/gpu/drm/drm_gem_cma_helper.c obj = NULL; obj 379 drivers/gpu/drm/drm_gem_cma_helper.c if (!obj) obj 383 drivers/gpu/drm/drm_gem_cma_helper.c drm_gem_object_put_unlocked(obj); obj 387 drivers/gpu/drm/drm_gem_cma_helper.c cma_obj = to_drm_gem_cma_obj(obj); obj 389 drivers/gpu/drm/drm_gem_cma_helper.c drm_gem_object_put_unlocked(obj); obj 406 drivers/gpu/drm/drm_gem_cma_helper.c const struct drm_gem_object *obj) obj 408 drivers/gpu/drm/drm_gem_cma_helper.c const struct drm_gem_cma_object *cma_obj = to_drm_gem_cma_obj(obj); obj 427 drivers/gpu/drm/drm_gem_cma_helper.c struct sg_table *drm_gem_cma_prime_get_sg_table(struct drm_gem_object *obj) obj 429 drivers/gpu/drm/drm_gem_cma_helper.c struct drm_gem_cma_object *cma_obj = to_drm_gem_cma_obj(obj); obj 437 drivers/gpu/drm/drm_gem_cma_helper.c ret = dma_get_sgtable(obj->dev->dev, sgt, cma_obj->vaddr, obj 438 drivers/gpu/drm/drm_gem_cma_helper.c cma_obj->paddr, obj->size); obj 521 drivers/gpu/drm/drm_gem_cma_helper.c int drm_gem_cma_prime_mmap(struct drm_gem_object *obj, obj 527 drivers/gpu/drm/drm_gem_cma_helper.c ret = drm_gem_mmap_obj(obj, obj->size, vma); obj 531 drivers/gpu/drm/drm_gem_cma_helper.c cma_obj = to_drm_gem_cma_obj(obj); obj 550 drivers/gpu/drm/drm_gem_cma_helper.c void *drm_gem_cma_prime_vmap(struct drm_gem_object *obj) obj 552 drivers/gpu/drm/drm_gem_cma_helper.c struct drm_gem_cma_object *cma_obj = to_drm_gem_cma_obj(obj); obj 569 drivers/gpu/drm/drm_gem_cma_helper.c void drm_gem_cma_prime_vunmap(struct drm_gem_object *obj, void *vaddr) obj 636 drivers/gpu/drm/drm_gem_cma_helper.c struct drm_gem_object *obj; obj 645 drivers/gpu/drm/drm_gem_cma_helper.c obj = drm_gem_cma_prime_import_sg_table(dev, attach, sgt); obj 646 drivers/gpu/drm/drm_gem_cma_helper.c if (IS_ERR(obj)) { obj 648 drivers/gpu/drm/drm_gem_cma_helper.c return obj; obj 651 drivers/gpu/drm/drm_gem_cma_helper.c cma_obj = to_drm_gem_cma_obj(obj); obj 654 drivers/gpu/drm/drm_gem_cma_helper.c return obj; obj 53 drivers/gpu/drm/drm_gem_framebuffer_helper.c return fb->obj[plane]; obj 60 drivers/gpu/drm/drm_gem_framebuffer_helper.c struct drm_gem_object **obj, unsigned int num_planes, obj 73 drivers/gpu/drm/drm_gem_framebuffer_helper.c fb->obj[i] = obj[i]; obj 99 drivers/gpu/drm/drm_gem_framebuffer_helper.c drm_gem_object_put_unlocked(fb->obj[i]); obj 122 drivers/gpu/drm/drm_gem_framebuffer_helper.c return drm_gem_handle_create(file, fb->obj[0], handle); obj 290 drivers/gpu/drm/drm_gem_framebuffer_helper.c struct drm_gem_object *obj; obj 296 drivers/gpu/drm/drm_gem_framebuffer_helper.c obj = drm_gem_fb_get_obj(state->fb, 0); obj 297 drivers/gpu/drm/drm_gem_framebuffer_helper.c fence = dma_resv_get_excl_rcu(obj->resv); obj 52 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_object *obj; obj 58 drivers/gpu/drm/drm_gem_shmem_helper.c obj = dev->driver->gem_create_object(dev, size); obj 60 drivers/gpu/drm/drm_gem_shmem_helper.c obj = kzalloc(sizeof(*shmem), GFP_KERNEL); obj 61 drivers/gpu/drm/drm_gem_shmem_helper.c if (!obj) obj 64 drivers/gpu/drm/drm_gem_shmem_helper.c if (!obj->funcs) obj 65 drivers/gpu/drm/drm_gem_shmem_helper.c obj->funcs = &drm_gem_shmem_funcs; obj 67 drivers/gpu/drm/drm_gem_shmem_helper.c ret = drm_gem_object_init(dev, obj, size); obj 71 drivers/gpu/drm/drm_gem_shmem_helper.c ret = drm_gem_create_mmap_offset(obj); obj 75 drivers/gpu/drm/drm_gem_shmem_helper.c shmem = to_drm_gem_shmem_obj(obj); obj 87 drivers/gpu/drm/drm_gem_shmem_helper.c mapping_set_gfp_mask(obj->filp->f_mapping, GFP_HIGHUSER | obj 93 drivers/gpu/drm/drm_gem_shmem_helper.c drm_gem_object_release(obj); obj 95 drivers/gpu/drm/drm_gem_shmem_helper.c kfree(obj); obj 108 drivers/gpu/drm/drm_gem_shmem_helper.c void drm_gem_shmem_free_object(struct drm_gem_object *obj) obj 110 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); obj 114 drivers/gpu/drm/drm_gem_shmem_helper.c if (obj->import_attach) { obj 116 drivers/gpu/drm/drm_gem_shmem_helper.c drm_prime_gem_destroy(obj, shmem->sgt); obj 120 drivers/gpu/drm/drm_gem_shmem_helper.c dma_unmap_sg(obj->dev->dev, shmem->sgt->sgl, obj 131 drivers/gpu/drm/drm_gem_shmem_helper.c drm_gem_object_release(obj); obj 140 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_object *obj = &shmem->base; obj 146 drivers/gpu/drm/drm_gem_shmem_helper.c pages = drm_gem_get_pages(obj); obj 184 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_object *obj = &shmem->base; obj 192 drivers/gpu/drm/drm_gem_shmem_helper.c drm_gem_put_pages(obj, shmem->pages, obj 222 drivers/gpu/drm/drm_gem_shmem_helper.c int drm_gem_shmem_pin(struct drm_gem_object *obj) obj 224 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); obj 237 drivers/gpu/drm/drm_gem_shmem_helper.c void drm_gem_shmem_unpin(struct drm_gem_object *obj) obj 239 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); obj 247 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_object *obj = &shmem->base; obj 257 drivers/gpu/drm/drm_gem_shmem_helper.c if (obj->import_attach) obj 258 drivers/gpu/drm/drm_gem_shmem_helper.c shmem->vaddr = dma_buf_vmap(obj->import_attach->dmabuf); obj 260 drivers/gpu/drm/drm_gem_shmem_helper.c shmem->vaddr = vmap(shmem->pages, obj->size >> PAGE_SHIFT, obj 289 drivers/gpu/drm/drm_gem_shmem_helper.c void *drm_gem_shmem_vmap(struct drm_gem_object *obj) obj 291 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); obj 307 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_object *obj = &shmem->base; obj 315 drivers/gpu/drm/drm_gem_shmem_helper.c if (obj->import_attach) obj 316 drivers/gpu/drm/drm_gem_shmem_helper.c dma_buf_vunmap(obj->import_attach->dmabuf, shmem->vaddr); obj 330 drivers/gpu/drm/drm_gem_shmem_helper.c void drm_gem_shmem_vunmap(struct drm_gem_object *obj, void *vaddr) obj 332 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); obj 369 drivers/gpu/drm/drm_gem_shmem_helper.c int drm_gem_shmem_madvise(struct drm_gem_object *obj, int madv) obj 371 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); obj 386 drivers/gpu/drm/drm_gem_shmem_helper.c void drm_gem_shmem_purge_locked(struct drm_gem_object *obj) obj 388 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_device *dev = obj->dev; obj 389 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); obj 393 drivers/gpu/drm/drm_gem_shmem_helper.c dma_unmap_sg(obj->dev->dev, shmem->sgt->sgl, obj 403 drivers/gpu/drm/drm_gem_shmem_helper.c drm_vma_node_unmap(&obj->vma_node, dev->anon_inode->i_mapping); obj 404 drivers/gpu/drm/drm_gem_shmem_helper.c drm_gem_free_mmap_offset(obj); obj 411 drivers/gpu/drm/drm_gem_shmem_helper.c shmem_truncate_range(file_inode(obj->filp), 0, (loff_t)-1); obj 413 drivers/gpu/drm/drm_gem_shmem_helper.c invalidate_mapping_pages(file_inode(obj->filp)->i_mapping, obj 418 drivers/gpu/drm/drm_gem_shmem_helper.c bool drm_gem_shmem_purge(struct drm_gem_object *obj) obj 420 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); obj 424 drivers/gpu/drm/drm_gem_shmem_helper.c drm_gem_shmem_purge_locked(obj); obj 474 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_object *obj = vma->vm_private_data; obj 475 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); obj 476 drivers/gpu/drm/drm_gem_shmem_helper.c loff_t num_pages = obj->size >> PAGE_SHIFT; obj 489 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_object *obj = vma->vm_private_data; obj 490 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); obj 501 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_object *obj = vma->vm_private_data; obj 502 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); obj 566 drivers/gpu/drm/drm_gem_shmem_helper.c const struct drm_gem_object *obj) obj 568 drivers/gpu/drm/drm_gem_shmem_helper.c const struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); obj 587 drivers/gpu/drm/drm_gem_shmem_helper.c struct sg_table *drm_gem_shmem_get_sg_table(struct drm_gem_object *obj) obj 589 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); obj 591 drivers/gpu/drm/drm_gem_shmem_helper.c return drm_prime_pages_to_sg(shmem->pages, obj->size >> PAGE_SHIFT); obj 607 drivers/gpu/drm/drm_gem_shmem_helper.c struct sg_table *drm_gem_shmem_get_pages_sgt(struct drm_gem_object *obj) obj 610 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); obj 616 drivers/gpu/drm/drm_gem_shmem_helper.c WARN_ON(obj->import_attach); obj 628 drivers/gpu/drm/drm_gem_shmem_helper.c dma_map_sg(obj->dev->dev, sgt->sgl, sgt->nents, DMA_BIDIRECTIONAL); obj 123 drivers/gpu/drm/drm_internal.h struct drm_gem_object *obj, obj 134 drivers/gpu/drm/drm_internal.h const struct drm_gem_object *obj); obj 136 drivers/gpu/drm/drm_internal.h int drm_gem_pin(struct drm_gem_object *obj); obj 137 drivers/gpu/drm/drm_internal.h void drm_gem_unpin(struct drm_gem_object *obj); obj 138 drivers/gpu/drm/drm_internal.h void *drm_gem_vmap(struct drm_gem_object *obj); obj 139 drivers/gpu/drm/drm_internal.h void drm_gem_vunmap(struct drm_gem_object *obj, void *vaddr); obj 424 drivers/gpu/drm/drm_lease.c struct drm_mode_object *obj = objects[o]; obj 442 drivers/gpu/drm/drm_lease.c if (obj->type == DRM_MODE_OBJECT_CRTC && !universal_planes) { obj 443 drivers/gpu/drm/drm_lease.c struct drm_crtc *crtc = obj_to_crtc(obj); obj 39 drivers/gpu/drm/drm_mode_object.c int __drm_mode_object_add(struct drm_device *dev, struct drm_mode_object *obj, obj 48 drivers/gpu/drm/drm_mode_object.c ret = idr_alloc(&dev->mode_config.object_idr, register_obj ? obj : NULL, obj 55 drivers/gpu/drm/drm_mode_object.c obj->id = ret; obj 56 drivers/gpu/drm/drm_mode_object.c obj->type = obj_type; obj 58 drivers/gpu/drm/drm_mode_object.c obj->free_cb = obj_free_cb; obj 59 drivers/gpu/drm/drm_mode_object.c kref_init(&obj->refcount); obj 80 drivers/gpu/drm/drm_mode_object.c struct drm_mode_object *obj, uint32_t obj_type) obj 82 drivers/gpu/drm/drm_mode_object.c return __drm_mode_object_add(dev, obj, obj_type, true, NULL); obj 86 drivers/gpu/drm/drm_mode_object.c struct drm_mode_object *obj) obj 89 drivers/gpu/drm/drm_mode_object.c idr_replace(&dev->mode_config.object_idr, obj, obj->id); obj 140 drivers/gpu/drm/drm_mode_object.c struct drm_mode_object *obj = NULL; obj 143 drivers/gpu/drm/drm_mode_object.c obj = idr_find(&dev->mode_config.object_idr, id); obj 144 drivers/gpu/drm/drm_mode_object.c if (obj && type != DRM_MODE_OBJECT_ANY && obj->type != type) obj 145 drivers/gpu/drm/drm_mode_object.c obj = NULL; obj 146 drivers/gpu/drm/drm_mode_object.c if (obj && obj->id != id) obj 147 drivers/gpu/drm/drm_mode_object.c obj = NULL; obj 149 drivers/gpu/drm/drm_mode_object.c if (obj && drm_mode_object_lease_required(obj->type) && obj 150 drivers/gpu/drm/drm_mode_object.c !_drm_lease_held(file_priv, obj->id)) obj 151 drivers/gpu/drm/drm_mode_object.c obj = NULL; obj 153 drivers/gpu/drm/drm_mode_object.c if (obj && obj->free_cb) { obj 154 drivers/gpu/drm/drm_mode_object.c if (!kref_get_unless_zero(&obj->refcount)) obj 155 drivers/gpu/drm/drm_mode_object.c obj = NULL; obj 159 drivers/gpu/drm/drm_mode_object.c return obj; obj 177 drivers/gpu/drm/drm_mode_object.c struct drm_mode_object *obj = NULL; obj 179 drivers/gpu/drm/drm_mode_object.c obj = __drm_mode_object_find(dev, file_priv, id, type); obj 180 drivers/gpu/drm/drm_mode_object.c return obj; obj 192 drivers/gpu/drm/drm_mode_object.c void drm_mode_object_put(struct drm_mode_object *obj) obj 194 drivers/gpu/drm/drm_mode_object.c if (obj->free_cb) { obj 195 drivers/gpu/drm/drm_mode_object.c DRM_DEBUG("OBJ ID: %d (%d)\n", obj->id, kref_read(&obj->refcount)); obj 196 drivers/gpu/drm/drm_mode_object.c kref_put(&obj->refcount, obj->free_cb); obj 209 drivers/gpu/drm/drm_mode_object.c void drm_mode_object_get(struct drm_mode_object *obj) obj 211 drivers/gpu/drm/drm_mode_object.c if (obj->free_cb) { obj 212 drivers/gpu/drm/drm_mode_object.c DRM_DEBUG("OBJ ID: %d (%d)\n", obj->id, kref_read(&obj->refcount)); obj 213 drivers/gpu/drm/drm_mode_object.c kref_get(&obj->refcount); obj 228 drivers/gpu/drm/drm_mode_object.c void drm_object_attach_property(struct drm_mode_object *obj, obj 232 drivers/gpu/drm/drm_mode_object.c int count = obj->properties->count; obj 238 drivers/gpu/drm/drm_mode_object.c obj->type); obj 242 drivers/gpu/drm/drm_mode_object.c obj->properties->properties[count] = property; obj 243 drivers/gpu/drm/drm_mode_object.c obj->properties->values[count] = init_val; obj 244 drivers/gpu/drm/drm_mode_object.c obj->properties->count++; obj 267 drivers/gpu/drm/drm_mode_object.c int drm_object_property_set_value(struct drm_mode_object *obj, obj 275 drivers/gpu/drm/drm_mode_object.c for (i = 0; i < obj->properties->count; i++) { obj 276 drivers/gpu/drm/drm_mode_object.c if (obj->properties->properties[i] == property) { obj 277 drivers/gpu/drm/drm_mode_object.c obj->properties->values[i] = val; obj 286 drivers/gpu/drm/drm_mode_object.c static int __drm_object_property_get_value(struct drm_mode_object *obj, obj 298 drivers/gpu/drm/drm_mode_object.c return drm_atomic_get_property(obj, property, val); obj 300 drivers/gpu/drm/drm_mode_object.c for (i = 0; i < obj->properties->count; i++) { obj 301 drivers/gpu/drm/drm_mode_object.c if (obj->properties->properties[i] == property) { obj 302 drivers/gpu/drm/drm_mode_object.c *val = obj->properties->values[i]; obj 328 drivers/gpu/drm/drm_mode_object.c int drm_object_property_get_value(struct drm_mode_object *obj, obj 333 drivers/gpu/drm/drm_mode_object.c return __drm_object_property_get_value(obj, property, val); obj 338 drivers/gpu/drm/drm_mode_object.c int drm_mode_object_get_properties(struct drm_mode_object *obj, bool atomic, obj 345 drivers/gpu/drm/drm_mode_object.c for (i = 0, count = 0; i < obj->properties->count; i++) { obj 346 drivers/gpu/drm/drm_mode_object.c struct drm_property *prop = obj->properties->properties[i]; obj 353 drivers/gpu/drm/drm_mode_object.c ret = __drm_object_property_get_value(obj, prop, &val); obj 390 drivers/gpu/drm/drm_mode_object.c struct drm_mode_object *obj; obj 398 drivers/gpu/drm/drm_mode_object.c obj = drm_mode_object_find(dev, file_priv, arg->obj_id, arg->obj_type); obj 399 drivers/gpu/drm/drm_mode_object.c if (!obj) { obj 403 drivers/gpu/drm/drm_mode_object.c if (!obj->properties) { obj 408 drivers/gpu/drm/drm_mode_object.c ret = drm_mode_object_get_properties(obj, file_priv->atomic, obj 414 drivers/gpu/drm/drm_mode_object.c drm_mode_object_put(obj); obj 420 drivers/gpu/drm/drm_mode_object.c struct drm_property *drm_mode_obj_find_prop_id(struct drm_mode_object *obj, obj 425 drivers/gpu/drm/drm_mode_object.c for (i = 0; i < obj->properties->count; i++) obj 426 drivers/gpu/drm/drm_mode_object.c if (obj->properties->properties[i]->base.id == prop_id) obj 427 drivers/gpu/drm/drm_mode_object.c return obj->properties->properties[i]; obj 432 drivers/gpu/drm/drm_mode_object.c static int set_property_legacy(struct drm_mode_object *obj, obj 444 drivers/gpu/drm/drm_mode_object.c switch (obj->type) { obj 446 drivers/gpu/drm/drm_mode_object.c ret = drm_connector_set_obj_prop(obj, prop, prop_value); obj 449 drivers/gpu/drm/drm_mode_object.c ret = drm_mode_crtc_set_obj_prop(obj, prop, prop_value); obj 452 drivers/gpu/drm/drm_mode_object.c ret = drm_mode_plane_set_obj_prop(obj_to_plane(obj), obj 462 drivers/gpu/drm/drm_mode_object.c static int set_property_atomic(struct drm_mode_object *obj, obj 481 drivers/gpu/drm/drm_mode_object.c if (obj->type != DRM_MODE_OBJECT_CONNECTOR) { obj 487 drivers/gpu/drm/drm_mode_object.c obj_to_connector(obj), obj 490 drivers/gpu/drm/drm_mode_object.c ret = drm_atomic_set_property(state, file_priv, obj, prop, prop_value); obj 463 drivers/gpu/drm/drm_plane.c struct drm_mode_object *obj = &plane->base; obj 468 drivers/gpu/drm/drm_plane.c drm_object_property_set_value(obj, property, value); obj 267 drivers/gpu/drm/drm_prime.c struct drm_gem_object *obj = dma_buf->priv; obj 268 drivers/gpu/drm/drm_prime.c struct drm_device *dev = obj->dev; obj 271 drivers/gpu/drm/drm_prime.c drm_gem_object_put_unlocked(obj); obj 296 drivers/gpu/drm/drm_prime.c struct drm_gem_object *obj; obj 313 drivers/gpu/drm/drm_prime.c obj = dev->driver->gem_prime_import(dev, dma_buf); obj 315 drivers/gpu/drm/drm_prime.c obj = drm_gem_prime_import(dev, dma_buf); obj 316 drivers/gpu/drm/drm_prime.c if (IS_ERR(obj)) { obj 317 drivers/gpu/drm/drm_prime.c ret = PTR_ERR(obj); obj 321 drivers/gpu/drm/drm_prime.c if (obj->dma_buf) { obj 322 drivers/gpu/drm/drm_prime.c WARN_ON(obj->dma_buf != dma_buf); obj 324 drivers/gpu/drm/drm_prime.c obj->dma_buf = dma_buf; obj 329 drivers/gpu/drm/drm_prime.c ret = drm_gem_handle_create_tail(file_priv, obj, handle); obj 330 drivers/gpu/drm/drm_prime.c drm_gem_object_put_unlocked(obj); obj 374 drivers/gpu/drm/drm_prime.c struct drm_gem_object *obj, obj 380 drivers/gpu/drm/drm_prime.c if (obj->handle_count == 0) { obj 385 drivers/gpu/drm/drm_prime.c if (obj->funcs && obj->funcs->export) obj 386 drivers/gpu/drm/drm_prime.c dmabuf = obj->funcs->export(obj, flags); obj 388 drivers/gpu/drm/drm_prime.c dmabuf = dev->driver->gem_prime_export(obj, flags); obj 390 drivers/gpu/drm/drm_prime.c dmabuf = drm_gem_prime_export(obj, flags); obj 403 drivers/gpu/drm/drm_prime.c obj->dma_buf = dmabuf; obj 404 drivers/gpu/drm/drm_prime.c get_dma_buf(obj->dma_buf); obj 427 drivers/gpu/drm/drm_prime.c struct drm_gem_object *obj; obj 432 drivers/gpu/drm/drm_prime.c obj = drm_gem_object_lookup(file_priv, handle); obj 433 drivers/gpu/drm/drm_prime.c if (!obj) { obj 446 drivers/gpu/drm/drm_prime.c if (obj->import_attach) { obj 447 drivers/gpu/drm/drm_prime.c dmabuf = obj->import_attach->dmabuf; obj 452 drivers/gpu/drm/drm_prime.c if (obj->dma_buf) { obj 453 drivers/gpu/drm/drm_prime.c get_dma_buf(obj->dma_buf); obj 454 drivers/gpu/drm/drm_prime.c dmabuf = obj->dma_buf; obj 458 drivers/gpu/drm/drm_prime.c dmabuf = export_and_register_object(dev, obj, flags); obj 501 drivers/gpu/drm/drm_prime.c drm_gem_object_put_unlocked(obj); obj 577 drivers/gpu/drm/drm_prime.c struct drm_gem_object *obj = dma_buf->priv; obj 579 drivers/gpu/drm/drm_prime.c return drm_gem_pin(obj); obj 595 drivers/gpu/drm/drm_prime.c struct drm_gem_object *obj = dma_buf->priv; obj 597 drivers/gpu/drm/drm_prime.c drm_gem_unpin(obj); obj 616 drivers/gpu/drm/drm_prime.c struct drm_gem_object *obj = attach->dmabuf->priv; obj 622 drivers/gpu/drm/drm_prime.c if (obj->funcs) obj 623 drivers/gpu/drm/drm_prime.c sgt = obj->funcs->get_sg_table(obj); obj 625 drivers/gpu/drm/drm_prime.c sgt = obj->dev->driver->gem_prime_get_sg_table(obj); obj 671 drivers/gpu/drm/drm_prime.c struct drm_gem_object *obj = dma_buf->priv; obj 674 drivers/gpu/drm/drm_prime.c vaddr = drm_gem_vmap(obj); obj 692 drivers/gpu/drm/drm_prime.c struct drm_gem_object *obj = dma_buf->priv; obj 694 drivers/gpu/drm/drm_prime.c drm_gem_vunmap(obj, vaddr); obj 710 drivers/gpu/drm/drm_prime.c int drm_gem_prime_mmap(struct drm_gem_object *obj, struct vm_area_struct *vma) obj 724 drivers/gpu/drm/drm_prime.c priv->minor = obj->dev->primary; obj 727 drivers/gpu/drm/drm_prime.c ret = drm_vma_node_allow(&obj->vma_node, priv); obj 731 drivers/gpu/drm/drm_prime.c vma->vm_pgoff += drm_vma_node_start(&obj->vma_node); obj 733 drivers/gpu/drm/drm_prime.c ret = obj->dev->driver->fops->mmap(fil, vma); obj 735 drivers/gpu/drm/drm_prime.c drm_vma_node_revoke(&obj->vma_node, priv); obj 760 drivers/gpu/drm/drm_prime.c struct drm_gem_object *obj = dma_buf->priv; obj 761 drivers/gpu/drm/drm_prime.c struct drm_device *dev = obj->dev; obj 766 drivers/gpu/drm/drm_prime.c return dev->driver->gem_prime_mmap(obj, vma); obj 825 drivers/gpu/drm/drm_prime.c struct dma_buf *drm_gem_prime_export(struct drm_gem_object *obj, obj 828 drivers/gpu/drm/drm_prime.c struct drm_device *dev = obj->dev; obj 833 drivers/gpu/drm/drm_prime.c .size = obj->size, obj 835 drivers/gpu/drm/drm_prime.c .priv = obj, obj 836 drivers/gpu/drm/drm_prime.c .resv = obj->resv, obj 863 drivers/gpu/drm/drm_prime.c struct drm_gem_object *obj; obj 867 drivers/gpu/drm/drm_prime.c obj = dma_buf->priv; obj 868 drivers/gpu/drm/drm_prime.c if (obj->dev == dev) { obj 873 drivers/gpu/drm/drm_prime.c drm_gem_object_get(obj); obj 874 drivers/gpu/drm/drm_prime.c return obj; obj 893 drivers/gpu/drm/drm_prime.c obj = dev->driver->gem_prime_import_sg_table(dev, attach, sgt); obj 894 drivers/gpu/drm/drm_prime.c if (IS_ERR(obj)) { obj 895 drivers/gpu/drm/drm_prime.c ret = PTR_ERR(obj); obj 899 drivers/gpu/drm/drm_prime.c obj->import_attach = attach; obj 900 drivers/gpu/drm/drm_prime.c obj->resv = dma_buf->resv; obj 902 drivers/gpu/drm/drm_prime.c return obj; obj 988 drivers/gpu/drm/drm_prime.c void drm_prime_gem_destroy(struct drm_gem_object *obj, struct sg_table *sg) obj 992 drivers/gpu/drm/drm_prime.c attach = obj->import_attach; obj 656 drivers/gpu/drm/drm_property.c struct drm_mode_object *obj; obj 659 drivers/gpu/drm/drm_property.c obj = __drm_mode_object_find(dev, NULL, id, DRM_MODE_OBJECT_BLOB); obj 660 drivers/gpu/drm/drm_property.c if (obj) obj 661 drivers/gpu/drm/drm_property.c blob = obj_to_blob(obj); obj 294 drivers/gpu/drm/etnaviv/etnaviv_drv.c struct drm_gem_object *obj; obj 300 drivers/gpu/drm/etnaviv/etnaviv_drv.c obj = drm_gem_object_lookup(file, args->handle); obj 301 drivers/gpu/drm/etnaviv/etnaviv_drv.c if (!obj) obj 304 drivers/gpu/drm/etnaviv/etnaviv_drv.c ret = etnaviv_gem_cpu_prep(obj, args->op, &TS(args->timeout)); obj 306 drivers/gpu/drm/etnaviv/etnaviv_drv.c drm_gem_object_put_unlocked(obj); obj 315 drivers/gpu/drm/etnaviv/etnaviv_drv.c struct drm_gem_object *obj; obj 321 drivers/gpu/drm/etnaviv/etnaviv_drv.c obj = drm_gem_object_lookup(file, args->handle); obj 322 drivers/gpu/drm/etnaviv/etnaviv_drv.c if (!obj) obj 325 drivers/gpu/drm/etnaviv/etnaviv_drv.c ret = etnaviv_gem_cpu_fini(obj); obj 327 drivers/gpu/drm/etnaviv/etnaviv_drv.c drm_gem_object_put_unlocked(obj); obj 336 drivers/gpu/drm/etnaviv/etnaviv_drv.c struct drm_gem_object *obj; obj 342 drivers/gpu/drm/etnaviv/etnaviv_drv.c obj = drm_gem_object_lookup(file, args->handle); obj 343 drivers/gpu/drm/etnaviv/etnaviv_drv.c if (!obj) obj 346 drivers/gpu/drm/etnaviv/etnaviv_drv.c ret = etnaviv_gem_mmap_offset(obj, &args->offset); obj 347 drivers/gpu/drm/etnaviv/etnaviv_drv.c drm_gem_object_put_unlocked(obj); obj 407 drivers/gpu/drm/etnaviv/etnaviv_drv.c struct drm_gem_object *obj; obj 421 drivers/gpu/drm/etnaviv/etnaviv_drv.c obj = drm_gem_object_lookup(file, args->handle); obj 422 drivers/gpu/drm/etnaviv/etnaviv_drv.c if (!obj) obj 428 drivers/gpu/drm/etnaviv/etnaviv_drv.c ret = etnaviv_gem_wait_bo(gpu, obj, timeout); obj 430 drivers/gpu/drm/etnaviv/etnaviv_drv.c drm_gem_object_put_unlocked(obj); obj 52 drivers/gpu/drm/etnaviv/etnaviv_drv.h int etnaviv_gem_mmap_offset(struct drm_gem_object *obj, u64 *offset); obj 53 drivers/gpu/drm/etnaviv/etnaviv_drv.h struct sg_table *etnaviv_gem_prime_get_sg_table(struct drm_gem_object *obj); obj 54 drivers/gpu/drm/etnaviv/etnaviv_drv.h void *etnaviv_gem_prime_vmap(struct drm_gem_object *obj); obj 55 drivers/gpu/drm/etnaviv/etnaviv_drv.h void etnaviv_gem_prime_vunmap(struct drm_gem_object *obj, void *vaddr); obj 56 drivers/gpu/drm/etnaviv/etnaviv_drv.h int etnaviv_gem_prime_mmap(struct drm_gem_object *obj, obj 60 drivers/gpu/drm/etnaviv/etnaviv_drv.h int etnaviv_gem_prime_pin(struct drm_gem_object *obj); obj 61 drivers/gpu/drm/etnaviv/etnaviv_drv.h void etnaviv_gem_prime_unpin(struct drm_gem_object *obj); obj 62 drivers/gpu/drm/etnaviv/etnaviv_drv.h void *etnaviv_gem_vmap(struct drm_gem_object *obj); obj 63 drivers/gpu/drm/etnaviv/etnaviv_drv.h int etnaviv_gem_cpu_prep(struct drm_gem_object *obj, u32 op, obj 65 drivers/gpu/drm/etnaviv/etnaviv_drv.h int etnaviv_gem_cpu_fini(struct drm_gem_object *obj); obj 66 drivers/gpu/drm/etnaviv/etnaviv_drv.h void etnaviv_gem_free_object(struct drm_gem_object *obj); obj 117 drivers/gpu/drm/etnaviv/etnaviv_dump.c struct etnaviv_gem_object *obj; obj 141 drivers/gpu/drm/etnaviv/etnaviv_dump.c obj = submit->bos[i].obj; obj 142 drivers/gpu/drm/etnaviv/etnaviv_dump.c file_size += obj->base.size; obj 143 drivers/gpu/drm/etnaviv/etnaviv_dump.c n_bomap_pages += obj->base.size >> PAGE_SHIFT; obj 201 drivers/gpu/drm/etnaviv/etnaviv_dump.c obj = submit->bos[i].obj; obj 204 drivers/gpu/drm/etnaviv/etnaviv_dump.c mutex_lock(&obj->lock); obj 205 drivers/gpu/drm/etnaviv/etnaviv_dump.c pages = etnaviv_gem_get_pages(obj); obj 206 drivers/gpu/drm/etnaviv/etnaviv_dump.c mutex_unlock(&obj->lock); obj 212 drivers/gpu/drm/etnaviv/etnaviv_dump.c for (j = 0; j < obj->base.size >> PAGE_SHIFT; j++) obj 218 drivers/gpu/drm/etnaviv/etnaviv_dump.c vaddr = etnaviv_gem_vmap(&obj->base); obj 220 drivers/gpu/drm/etnaviv/etnaviv_dump.c memcpy(iter.data, vaddr, obj->base.size); obj 223 drivers/gpu/drm/etnaviv/etnaviv_dump.c obj->base.size); obj 160 drivers/gpu/drm/etnaviv/etnaviv_gem.c struct etnaviv_gem_object *obj; obj 169 drivers/gpu/drm/etnaviv/etnaviv_gem.c obj = to_etnaviv_bo(vma->vm_private_data); obj 170 drivers/gpu/drm/etnaviv/etnaviv_gem.c return obj->ops->mmap(obj, vma); obj 176 drivers/gpu/drm/etnaviv/etnaviv_gem.c struct drm_gem_object *obj = vma->vm_private_data; obj 177 drivers/gpu/drm/etnaviv/etnaviv_gem.c struct etnaviv_gem_object *etnaviv_obj = to_etnaviv_bo(obj); obj 210 drivers/gpu/drm/etnaviv/etnaviv_gem.c int etnaviv_gem_mmap_offset(struct drm_gem_object *obj, u64 *offset) obj 215 drivers/gpu/drm/etnaviv/etnaviv_gem.c ret = drm_gem_create_mmap_offset(obj); obj 217 drivers/gpu/drm/etnaviv/etnaviv_gem.c dev_err(obj->dev->dev, "could not allocate mmap offset\n"); obj 219 drivers/gpu/drm/etnaviv/etnaviv_gem.c *offset = drm_vma_node_offset_addr(&obj->vma_node); obj 225 drivers/gpu/drm/etnaviv/etnaviv_gem.c etnaviv_gem_get_vram_mapping(struct etnaviv_gem_object *obj, obj 230 drivers/gpu/drm/etnaviv/etnaviv_gem.c list_for_each_entry(mapping, &obj->vram_list, obj_node) { obj 251 drivers/gpu/drm/etnaviv/etnaviv_gem.c struct drm_gem_object *obj, struct etnaviv_iommu_context *mmu_context, obj 254 drivers/gpu/drm/etnaviv/etnaviv_gem.c struct etnaviv_gem_object *etnaviv_obj = to_etnaviv_bo(obj); obj 328 drivers/gpu/drm/etnaviv/etnaviv_gem.c drm_gem_object_get(obj); obj 332 drivers/gpu/drm/etnaviv/etnaviv_gem.c void *etnaviv_gem_vmap(struct drm_gem_object *obj) obj 334 drivers/gpu/drm/etnaviv/etnaviv_gem.c struct etnaviv_gem_object *etnaviv_obj = to_etnaviv_bo(obj); obj 351 drivers/gpu/drm/etnaviv/etnaviv_gem.c static void *etnaviv_gem_vmap_impl(struct etnaviv_gem_object *obj) obj 355 drivers/gpu/drm/etnaviv/etnaviv_gem.c lockdep_assert_held(&obj->lock); obj 357 drivers/gpu/drm/etnaviv/etnaviv_gem.c pages = etnaviv_gem_get_pages(obj); obj 361 drivers/gpu/drm/etnaviv/etnaviv_gem.c return vmap(pages, obj->base.size >> PAGE_SHIFT, obj 375 drivers/gpu/drm/etnaviv/etnaviv_gem.c int etnaviv_gem_cpu_prep(struct drm_gem_object *obj, u32 op, obj 378 drivers/gpu/drm/etnaviv/etnaviv_gem.c struct etnaviv_gem_object *etnaviv_obj = to_etnaviv_bo(obj); obj 379 drivers/gpu/drm/etnaviv/etnaviv_gem.c struct drm_device *dev = obj->dev; obj 394 drivers/gpu/drm/etnaviv/etnaviv_gem.c if (!dma_resv_test_signaled_rcu(obj->resv, obj 400 drivers/gpu/drm/etnaviv/etnaviv_gem.c ret = dma_resv_wait_timeout_rcu(obj->resv, obj 416 drivers/gpu/drm/etnaviv/etnaviv_gem.c int etnaviv_gem_cpu_fini(struct drm_gem_object *obj) obj 418 drivers/gpu/drm/etnaviv/etnaviv_gem.c struct drm_device *dev = obj->dev; obj 419 drivers/gpu/drm/etnaviv/etnaviv_gem.c struct etnaviv_gem_object *etnaviv_obj = to_etnaviv_bo(obj); obj 433 drivers/gpu/drm/etnaviv/etnaviv_gem.c int etnaviv_gem_wait_bo(struct etnaviv_gpu *gpu, struct drm_gem_object *obj, obj 436 drivers/gpu/drm/etnaviv/etnaviv_gem.c struct etnaviv_gem_object *etnaviv_obj = to_etnaviv_bo(obj); obj 453 drivers/gpu/drm/etnaviv/etnaviv_gem.c static void etnaviv_gem_describe(struct drm_gem_object *obj, struct seq_file *m) obj 455 drivers/gpu/drm/etnaviv/etnaviv_gem.c struct etnaviv_gem_object *etnaviv_obj = to_etnaviv_bo(obj); obj 456 drivers/gpu/drm/etnaviv/etnaviv_gem.c struct dma_resv *robj = obj->resv; obj 459 drivers/gpu/drm/etnaviv/etnaviv_gem.c unsigned long off = drm_vma_node_start(&obj->vma_node); obj 463 drivers/gpu/drm/etnaviv/etnaviv_gem.c obj->name, kref_read(&obj->refcount), obj 464 drivers/gpu/drm/etnaviv/etnaviv_gem.c off, etnaviv_obj->vaddr, obj->size); obj 492 drivers/gpu/drm/etnaviv/etnaviv_gem.c struct drm_gem_object *obj = &etnaviv_obj->base; obj 495 drivers/gpu/drm/etnaviv/etnaviv_gem.c etnaviv_gem_describe(obj, m); obj 497 drivers/gpu/drm/etnaviv/etnaviv_gem.c size += obj->size; obj 518 drivers/gpu/drm/etnaviv/etnaviv_gem.c void etnaviv_gem_free_object(struct drm_gem_object *obj) obj 520 drivers/gpu/drm/etnaviv/etnaviv_gem.c struct etnaviv_gem_object *etnaviv_obj = to_etnaviv_bo(obj); obj 521 drivers/gpu/drm/etnaviv/etnaviv_gem.c struct etnaviv_drm_private *priv = obj->dev->dev_private; obj 546 drivers/gpu/drm/etnaviv/etnaviv_gem.c drm_gem_free_mmap_offset(obj); obj 548 drivers/gpu/drm/etnaviv/etnaviv_gem.c drm_gem_object_release(obj); obj 553 drivers/gpu/drm/etnaviv/etnaviv_gem.c void etnaviv_gem_obj_add(struct drm_device *dev, struct drm_gem_object *obj) obj 556 drivers/gpu/drm/etnaviv/etnaviv_gem.c struct etnaviv_gem_object *etnaviv_obj = to_etnaviv_bo(obj); obj 564 drivers/gpu/drm/etnaviv/etnaviv_gem.c const struct etnaviv_gem_ops *ops, struct drm_gem_object **obj) obj 596 drivers/gpu/drm/etnaviv/etnaviv_gem.c *obj = &etnaviv_obj->base; obj 605 drivers/gpu/drm/etnaviv/etnaviv_gem.c struct drm_gem_object *obj = NULL; obj 611 drivers/gpu/drm/etnaviv/etnaviv_gem.c &etnaviv_gem_shmem_ops, &obj); obj 615 drivers/gpu/drm/etnaviv/etnaviv_gem.c lockdep_set_class(&to_etnaviv_bo(obj)->lock, &etnaviv_shm_lock_class); obj 617 drivers/gpu/drm/etnaviv/etnaviv_gem.c ret = drm_gem_object_init(dev, obj, size); obj 627 drivers/gpu/drm/etnaviv/etnaviv_gem.c mapping_set_gfp_mask(obj->filp->f_mapping, GFP_HIGHUSER | obj 630 drivers/gpu/drm/etnaviv/etnaviv_gem.c etnaviv_gem_obj_add(dev, obj); obj 632 drivers/gpu/drm/etnaviv/etnaviv_gem.c ret = drm_gem_handle_create(file, obj, handle); obj 636 drivers/gpu/drm/etnaviv/etnaviv_gem.c drm_gem_object_put_unlocked(obj); obj 644 drivers/gpu/drm/etnaviv/etnaviv_gem.c struct drm_gem_object *obj; obj 647 drivers/gpu/drm/etnaviv/etnaviv_gem.c ret = etnaviv_gem_new_impl(dev, size, flags, ops, &obj); obj 651 drivers/gpu/drm/etnaviv/etnaviv_gem.c drm_gem_private_object_init(dev, obj, size); obj 653 drivers/gpu/drm/etnaviv/etnaviv_gem.c *res = to_etnaviv_bo(obj); obj 59 drivers/gpu/drm/etnaviv/etnaviv_gem.h struct etnaviv_gem_object *to_etnaviv_bo(struct drm_gem_object *obj) obj 61 drivers/gpu/drm/etnaviv/etnaviv_gem.h return container_of(obj, struct etnaviv_gem_object, base); obj 81 drivers/gpu/drm/etnaviv/etnaviv_gem.h struct etnaviv_gem_object *obj; obj 114 drivers/gpu/drm/etnaviv/etnaviv_gem.h int etnaviv_gem_wait_bo(struct etnaviv_gpu *gpu, struct drm_gem_object *obj, obj 118 drivers/gpu/drm/etnaviv/etnaviv_gem.h void etnaviv_gem_obj_add(struct drm_device *dev, struct drm_gem_object *obj); obj 119 drivers/gpu/drm/etnaviv/etnaviv_gem.h struct page **etnaviv_gem_get_pages(struct etnaviv_gem_object *obj); obj 120 drivers/gpu/drm/etnaviv/etnaviv_gem.h void etnaviv_gem_put_pages(struct etnaviv_gem_object *obj); obj 123 drivers/gpu/drm/etnaviv/etnaviv_gem.h struct drm_gem_object *obj, struct etnaviv_iommu_context *mmu_context, obj 14 drivers/gpu/drm/etnaviv/etnaviv_gem_prime.c struct sg_table *etnaviv_gem_prime_get_sg_table(struct drm_gem_object *obj) obj 16 drivers/gpu/drm/etnaviv/etnaviv_gem_prime.c struct etnaviv_gem_object *etnaviv_obj = to_etnaviv_bo(obj); obj 17 drivers/gpu/drm/etnaviv/etnaviv_gem_prime.c int npages = obj->size >> PAGE_SHIFT; obj 25 drivers/gpu/drm/etnaviv/etnaviv_gem_prime.c void *etnaviv_gem_prime_vmap(struct drm_gem_object *obj) obj 27 drivers/gpu/drm/etnaviv/etnaviv_gem_prime.c return etnaviv_gem_vmap(obj); obj 30 drivers/gpu/drm/etnaviv/etnaviv_gem_prime.c void etnaviv_gem_prime_vunmap(struct drm_gem_object *obj, void *vaddr) obj 35 drivers/gpu/drm/etnaviv/etnaviv_gem_prime.c int etnaviv_gem_prime_mmap(struct drm_gem_object *obj, obj 38 drivers/gpu/drm/etnaviv/etnaviv_gem_prime.c struct etnaviv_gem_object *etnaviv_obj = to_etnaviv_bo(obj); obj 41 drivers/gpu/drm/etnaviv/etnaviv_gem_prime.c ret = drm_gem_mmap_obj(obj, obj->size, vma); obj 48 drivers/gpu/drm/etnaviv/etnaviv_gem_prime.c int etnaviv_gem_prime_pin(struct drm_gem_object *obj) obj 50 drivers/gpu/drm/etnaviv/etnaviv_gem_prime.c if (!obj->import_attach) { obj 51 drivers/gpu/drm/etnaviv/etnaviv_gem_prime.c struct etnaviv_gem_object *etnaviv_obj = to_etnaviv_bo(obj); obj 60 drivers/gpu/drm/etnaviv/etnaviv_gem_prime.c void etnaviv_gem_prime_unpin(struct drm_gem_object *obj) obj 62 drivers/gpu/drm/etnaviv/etnaviv_gem_prime.c if (!obj->import_attach) { obj 63 drivers/gpu/drm/etnaviv/etnaviv_gem_prime.c struct etnaviv_gem_object *etnaviv_obj = to_etnaviv_bo(obj); obj 66 drivers/gpu/drm/etnaviv/etnaviv_gem_prime.c etnaviv_gem_put_pages(to_etnaviv_bo(obj)); obj 66 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c struct drm_gem_object *obj; obj 87 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c obj = idr_find(&file->object_idr, bo->handle); obj 88 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c if (!obj) { obj 99 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c drm_gem_object_get(obj); obj 101 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c submit->bos[i].obj = to_etnaviv_bo(obj); obj 114 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c struct drm_gem_object *obj = &submit->bos[i].obj->base; obj 116 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c ww_mutex_unlock(&obj->resv->lock); obj 128 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c struct drm_gem_object *obj = &submit->bos[i].obj->base; obj 136 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c ret = ww_mutex_lock_interruptible(&obj->resv->lock, obj 159 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c struct drm_gem_object *obj; obj 161 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c obj = &submit->bos[contended].obj->base; obj 164 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c ret = ww_mutex_lock_slow_interruptible(&obj->resv->lock, obj 182 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c struct dma_resv *robj = bo->obj->base.resv; obj 213 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c struct drm_gem_object *obj = &submit->bos[i].obj->base; obj 216 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c dma_resv_add_excl_fence(obj->resv, obj 219 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c dma_resv_add_shared_fence(obj->resv, obj 231 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c struct etnaviv_gem_object *etnaviv_obj = submit->bos[i].obj; obj 313 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c if (r->reloc_offset > bo->obj->base.size - sizeof(*ptr)) { obj 346 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c if (r->read_offset >= bo->obj->base.size - sizeof(u32)) { obj 366 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c submit->pmrs[i].bo_vma = etnaviv_gem_vmap(&bo->obj->base); obj 391 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c struct etnaviv_gem_object *etnaviv_obj = submit->bos[i].obj; obj 76 drivers/gpu/drm/exynos/exynos_drm_fb.c fb->obj[i] = &exynos_gem[i]->base; obj 149 drivers/gpu/drm/exynos/exynos_drm_fb.c exynos_gem = to_exynos_gem(fb->obj[index]); obj 193 drivers/gpu/drm/exynos/exynos_drm_g2d.c void *obj[MAX_REG_TYPE_NR]; obj 377 drivers/gpu/drm/exynos/exynos_drm_g2d.c void *obj, obj 380 drivers/gpu/drm/exynos/exynos_drm_g2d.c struct g2d_cmdlist_userptr *g2d_userptr = obj; obj 383 drivers/gpu/drm/exynos/exynos_drm_g2d.c if (!obj) obj 423 drivers/gpu/drm/exynos/exynos_drm_g2d.c void **obj) obj 446 drivers/gpu/drm/exynos/exynos_drm_g2d.c *obj = g2d_userptr; obj 531 drivers/gpu/drm/exynos/exynos_drm_g2d.c *obj = g2d_userptr; obj 734 drivers/gpu/drm/exynos/exynos_drm_g2d.c buf_info->obj[reg_type] = exynos_gem; obj 755 drivers/gpu/drm/exynos/exynos_drm_g2d.c &buf_info->obj[reg_type]); obj 783 drivers/gpu/drm/exynos/exynos_drm_g2d.c void *obj; obj 788 drivers/gpu/drm/exynos/exynos_drm_g2d.c obj = buf_info->obj[reg_type]; obj 791 drivers/gpu/drm/exynos/exynos_drm_g2d.c exynos_drm_gem_put(obj); obj 793 drivers/gpu/drm/exynos/exynos_drm_g2d.c g2d_userptr_put_dma_addr(g2d, obj, false); obj 796 drivers/gpu/drm/exynos/exynos_drm_g2d.c buf_info->obj[reg_type] = NULL; obj 125 drivers/gpu/drm/exynos/exynos_drm_gem.c static int exynos_drm_gem_handle_create(struct drm_gem_object *obj, obj 135 drivers/gpu/drm/exynos/exynos_drm_gem.c ret = drm_gem_handle_create(file_priv, obj, handle); obj 139 drivers/gpu/drm/exynos/exynos_drm_gem.c DRM_DEV_DEBUG_KMS(to_dma_dev(obj->dev), "gem handle = 0x%x\n", *handle); obj 142 drivers/gpu/drm/exynos/exynos_drm_gem.c drm_gem_object_put_unlocked(obj); obj 149 drivers/gpu/drm/exynos/exynos_drm_gem.c struct drm_gem_object *obj = &exynos_gem->base; obj 151 drivers/gpu/drm/exynos/exynos_drm_gem.c DRM_DEV_DEBUG_KMS(to_dma_dev(obj->dev), "handle count = %d\n", obj 152 drivers/gpu/drm/exynos/exynos_drm_gem.c obj->handle_count); obj 160 drivers/gpu/drm/exynos/exynos_drm_gem.c if (obj->import_attach) obj 161 drivers/gpu/drm/exynos/exynos_drm_gem.c drm_prime_gem_destroy(obj, exynos_gem->sgt); obj 166 drivers/gpu/drm/exynos/exynos_drm_gem.c drm_gem_object_release(obj); obj 175 drivers/gpu/drm/exynos/exynos_drm_gem.c struct drm_gem_object *obj; obj 183 drivers/gpu/drm/exynos/exynos_drm_gem.c obj = &exynos_gem->base; obj 185 drivers/gpu/drm/exynos/exynos_drm_gem.c ret = drm_gem_object_init(dev, obj, size); obj 192 drivers/gpu/drm/exynos/exynos_drm_gem.c ret = drm_gem_create_mmap_offset(obj); obj 194 drivers/gpu/drm/exynos/exynos_drm_gem.c drm_gem_object_release(obj); obj 199 drivers/gpu/drm/exynos/exynos_drm_gem.c DRM_DEV_DEBUG_KMS(dev->dev, "created file object = %pK\n", obj->filp); obj 283 drivers/gpu/drm/exynos/exynos_drm_gem.c struct drm_gem_object *obj; obj 285 drivers/gpu/drm/exynos/exynos_drm_gem.c obj = drm_gem_object_lookup(filp, gem_handle); obj 286 drivers/gpu/drm/exynos/exynos_drm_gem.c if (!obj) obj 288 drivers/gpu/drm/exynos/exynos_drm_gem.c return to_exynos_gem(obj); obj 323 drivers/gpu/drm/exynos/exynos_drm_gem.c struct drm_gem_object *obj; obj 325 drivers/gpu/drm/exynos/exynos_drm_gem.c obj = drm_gem_object_lookup(file_priv, args->handle); obj 326 drivers/gpu/drm/exynos/exynos_drm_gem.c if (!obj) { obj 331 drivers/gpu/drm/exynos/exynos_drm_gem.c exynos_gem = to_exynos_gem(obj); obj 336 drivers/gpu/drm/exynos/exynos_drm_gem.c drm_gem_object_put_unlocked(obj); obj 341 drivers/gpu/drm/exynos/exynos_drm_gem.c void exynos_drm_gem_free_object(struct drm_gem_object *obj) obj 343 drivers/gpu/drm/exynos/exynos_drm_gem.c exynos_drm_gem_destroy(to_exynos_gem(obj)); obj 387 drivers/gpu/drm/exynos/exynos_drm_gem.c struct drm_gem_object *obj = vma->vm_private_data; obj 388 drivers/gpu/drm/exynos/exynos_drm_gem.c struct exynos_drm_gem *exynos_gem = to_exynos_gem(obj); obj 404 drivers/gpu/drm/exynos/exynos_drm_gem.c static int exynos_drm_gem_mmap_obj(struct drm_gem_object *obj, obj 407 drivers/gpu/drm/exynos/exynos_drm_gem.c struct exynos_drm_gem *exynos_gem = to_exynos_gem(obj); obj 410 drivers/gpu/drm/exynos/exynos_drm_gem.c DRM_DEV_DEBUG_KMS(to_dma_dev(obj->dev), "flags = 0x%x\n", obj 437 drivers/gpu/drm/exynos/exynos_drm_gem.c struct drm_gem_object *obj; obj 447 drivers/gpu/drm/exynos/exynos_drm_gem.c obj = vma->vm_private_data; obj 449 drivers/gpu/drm/exynos/exynos_drm_gem.c if (obj->import_attach) obj 450 drivers/gpu/drm/exynos/exynos_drm_gem.c return dma_buf_mmap(obj->dma_buf, vma, 0); obj 452 drivers/gpu/drm/exynos/exynos_drm_gem.c return exynos_drm_gem_mmap_obj(obj, vma); obj 462 drivers/gpu/drm/exynos/exynos_drm_gem.c struct sg_table *exynos_drm_gem_prime_get_sg_table(struct drm_gem_object *obj) obj 464 drivers/gpu/drm/exynos/exynos_drm_gem.c struct exynos_drm_gem *exynos_gem = to_exynos_gem(obj); obj 526 drivers/gpu/drm/exynos/exynos_drm_gem.c void *exynos_drm_gem_prime_vmap(struct drm_gem_object *obj) obj 531 drivers/gpu/drm/exynos/exynos_drm_gem.c void exynos_drm_gem_prime_vunmap(struct drm_gem_object *obj, void *vaddr) obj 536 drivers/gpu/drm/exynos/exynos_drm_gem.c int exynos_drm_gem_prime_mmap(struct drm_gem_object *obj, obj 541 drivers/gpu/drm/exynos/exynos_drm_gem.c ret = drm_gem_mmap_obj(obj, obj->size, vma); obj 545 drivers/gpu/drm/exynos/exynos_drm_gem.c return exynos_drm_gem_mmap_obj(obj, vma); obj 97 drivers/gpu/drm/exynos/exynos_drm_gem.h void exynos_drm_gem_free_object(struct drm_gem_object *obj); obj 113 drivers/gpu/drm/exynos/exynos_drm_gem.h struct sg_table *exynos_drm_gem_prime_get_sg_table(struct drm_gem_object *obj); obj 118 drivers/gpu/drm/exynos/exynos_drm_gem.h void *exynos_drm_gem_prime_vmap(struct drm_gem_object *obj); obj 119 drivers/gpu/drm/exynos/exynos_drm_gem.h void exynos_drm_gem_prime_vunmap(struct drm_gem_object *obj, void *vaddr); obj 120 drivers/gpu/drm/exynos/exynos_drm_gem.h int exynos_drm_gem_prime_mmap(struct drm_gem_object *obj, obj 242 drivers/gpu/drm/gma500/accel_2d.c offset = to_gtt_range(fb->obj[0])->offset; obj 83 drivers/gpu/drm/gma500/framebuffer.c struct gtt_range *gtt = to_gtt_range(psbfb->base.obj[0]); obj 105 drivers/gpu/drm/gma500/framebuffer.c struct gtt_range *gtt = to_gtt_range(psbfb->base.obj[0]); obj 231 drivers/gpu/drm/gma500/framebuffer.c fb->base.obj[0] = >->gem; obj 443 drivers/gpu/drm/gma500/framebuffer.c struct drm_gem_object *obj; obj 449 drivers/gpu/drm/gma500/framebuffer.c obj = drm_gem_object_lookup(filp, cmd->handles[0]); obj 450 drivers/gpu/drm/gma500/framebuffer.c if (obj == NULL) obj 454 drivers/gpu/drm/gma500/framebuffer.c r = container_of(obj, struct gtt_range, gem); obj 501 drivers/gpu/drm/gma500/framebuffer.c if (psbfb->base.obj[0]) obj 502 drivers/gpu/drm/gma500/framebuffer.c drm_gem_object_put_unlocked(psbfb->base.obj[0]); obj 21 drivers/gpu/drm/gma500/gem.c void psb_gem_free_object(struct drm_gem_object *obj) obj 23 drivers/gpu/drm/gma500/gem.c struct gtt_range *gtt = container_of(obj, struct gtt_range, gem); obj 26 drivers/gpu/drm/gma500/gem.c drm_gem_free_mmap_offset(obj); obj 27 drivers/gpu/drm/gma500/gem.c drm_gem_object_release(obj); obj 30 drivers/gpu/drm/gma500/gem.c psb_gtt_free_range(obj->dev, gtt); obj 129 drivers/gpu/drm/gma500/gem.c struct drm_gem_object *obj; obj 138 drivers/gpu/drm/gma500/gem.c obj = vma->vm_private_data; /* GEM object */ obj 139 drivers/gpu/drm/gma500/gem.c dev = obj->dev; obj 142 drivers/gpu/drm/gma500/gem.c r = container_of(obj, struct gtt_range, gem); /* Get the gtt range */ obj 73 drivers/gpu/drm/gma500/gma_display.c gtt = to_gtt_range(fb->obj[0]); obj 128 drivers/gpu/drm/gma500/gma_display.c psb_gtt_unpin(to_gtt_range(old_fb->obj[0])); obj 334 drivers/gpu/drm/gma500/gma_display.c struct drm_gem_object *obj; obj 364 drivers/gpu/drm/gma500/gma_display.c obj = drm_gem_object_lookup(file_priv, handle); obj 365 drivers/gpu/drm/gma500/gma_display.c if (!obj) { obj 370 drivers/gpu/drm/gma500/gma_display.c if (obj->size < width * height * 4) { obj 376 drivers/gpu/drm/gma500/gma_display.c gt = container_of(obj, struct gtt_range, gem); obj 431 drivers/gpu/drm/gma500/gma_display.c gma_crtc->cursor_obj = obj; obj 436 drivers/gpu/drm/gma500/gma_display.c drm_gem_object_put_unlocked(obj); obj 490 drivers/gpu/drm/gma500/gma_display.c gt = to_gtt_range(crtc->primary->fb->obj[0]); obj 189 drivers/gpu/drm/gma500/mdfld_intel_display.c start = to_gtt_range(fb->obj[0])->offset; obj 611 drivers/gpu/drm/gma500/oaktrail_crtc.c start = to_gtt_range(fb->obj[0])->offset; obj 736 drivers/gpu/drm/gma500/psb_drv.h extern void psb_gem_free_object(struct drm_gem_object *obj); obj 110 drivers/gpu/drm/hisilicon/hibmc/hibmc_drm_de.c gbo = drm_gem_vram_of_gem(hibmc_fb->obj); obj 25 drivers/gpu/drm/hisilicon/hibmc/hibmc_drm_drv.h struct drm_gem_object *obj; obj 63 drivers/gpu/drm/hisilicon/hibmc/hibmc_drm_drv.h struct drm_gem_object **obj); obj 67 drivers/gpu/drm/hisilicon/hibmc/hibmc_drm_drv.h struct drm_gem_object *obj); obj 51 drivers/gpu/drm/hisilicon/hibmc/hibmc_ttm.c struct drm_gem_object **obj) obj 56 drivers/gpu/drm/hisilicon/hibmc/hibmc_ttm.c *obj = NULL; obj 69 drivers/gpu/drm/hisilicon/hibmc/hibmc_ttm.c *obj = &gbo->bo.base; obj 105 drivers/gpu/drm/hisilicon/hibmc/hibmc_ttm.c drm_gem_object_put_unlocked(hibmc_fb->obj); obj 117 drivers/gpu/drm/hisilicon/hibmc/hibmc_ttm.c struct drm_gem_object *obj) obj 129 drivers/gpu/drm/hisilicon/hibmc/hibmc_ttm.c hibmc_fb->obj = obj; obj 145 drivers/gpu/drm/hisilicon/hibmc/hibmc_ttm.c struct drm_gem_object *obj; obj 155 drivers/gpu/drm/hisilicon/hibmc/hibmc_ttm.c obj = drm_gem_object_lookup(filp, mode_cmd->handles[0]); obj 156 drivers/gpu/drm/hisilicon/hibmc/hibmc_ttm.c if (!obj) obj 159 drivers/gpu/drm/hisilicon/hibmc/hibmc_ttm.c hibmc_fb = hibmc_framebuffer_init(dev, mode_cmd, obj); obj 161 drivers/gpu/drm/hisilicon/hibmc/hibmc_ttm.c drm_gem_object_put_unlocked(obj); obj 551 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c struct drm_gem_cma_object *obj = drm_fb_cma_get_gem_obj(fb, 0); obj 555 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c u32 addr = (u32)obj->paddr + y * stride; obj 558 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c ch + 1, y, in_h, stride, (u32)obj->paddr); obj 90 drivers/gpu/drm/i915/display/intel_acpi.c union acpi_object *obj = &pkg->package.elements[i]; obj 91 drivers/gpu/drm/i915/display/intel_acpi.c union acpi_object *connector_id = &obj->package.elements[0]; obj 92 drivers/gpu/drm/i915/display/intel_acpi.c union acpi_object *info = &obj->package.elements[1]; obj 404 drivers/gpu/drm/i915/display/intel_bw.c static struct drm_private_state *intel_bw_duplicate_state(struct drm_private_obj *obj) obj 408 drivers/gpu/drm/i915/display/intel_bw.c state = kmemdup(obj->state, sizeof(*state), GFP_KERNEL); obj 412 drivers/gpu/drm/i915/display/intel_bw.c __drm_atomic_helper_private_obj_duplicate_state(obj, &state->base); obj 417 drivers/gpu/drm/i915/display/intel_bw.c static void intel_bw_destroy_state(struct drm_private_obj *obj, obj 123 drivers/gpu/drm/i915/display/intel_display.c struct drm_i915_gem_object *obj, obj 2076 drivers/gpu/drm/i915/display/intel_display.c struct drm_i915_gem_object *obj = intel_fb_obj(fb); obj 2102 drivers/gpu/drm/i915/display/intel_display.c i915_gem_object_lock(obj); obj 2118 drivers/gpu/drm/i915/display/intel_display.c vma = i915_gem_object_pin_to_display_plane(obj, obj 2157 drivers/gpu/drm/i915/display/intel_display.c i915_gem_object_unlock(obj); obj 2166 drivers/gpu/drm/i915/display/intel_display.c i915_gem_object_lock(vma->obj); obj 2170 drivers/gpu/drm/i915/display/intel_display.c i915_gem_object_unlock(vma->obj); obj 2633 drivers/gpu/drm/i915/display/intel_display.c struct drm_i915_gem_object *obj = intel_fb_obj(fb); obj 2697 drivers/gpu/drm/i915/display/intel_display.c if (i == 0 && i915_gem_object_is_tiled(obj) && obj 2783 drivers/gpu/drm/i915/display/intel_display.c if (mul_u32_u32(max_size, tile_size) > obj->base.size) { obj 2785 drivers/gpu/drm/i915/display/intel_display.c mul_u32_u32(max_size, tile_size), obj->base.size); obj 3044 drivers/gpu/drm/i915/display/intel_display.c struct drm_i915_gem_object *obj; obj 3070 drivers/gpu/drm/i915/display/intel_display.c obj = i915_gem_object_create_stolen_for_preallocated(dev_priv, obj 3075 drivers/gpu/drm/i915/display/intel_display.c if (!obj) obj 3083 drivers/gpu/drm/i915/display/intel_display.c obj->tiling_and_stride = fb->pitches[0] | plane_config->tiling; obj 3097 drivers/gpu/drm/i915/display/intel_display.c if (intel_framebuffer_init(to_intel_framebuffer(fb), obj, &mode_cmd)) { obj 3103 drivers/gpu/drm/i915/display/intel_display.c DRM_DEBUG_KMS("initial plane fb obj %p\n", obj); obj 3106 drivers/gpu/drm/i915/display/intel_display.c i915_gem_object_put(obj); obj 10509 drivers/gpu/drm/i915/display/intel_display.c const struct drm_i915_gem_object *obj = intel_fb_obj(fb); obj 10513 drivers/gpu/drm/i915/display/intel_display.c base = sg_dma_address(obj->mm.pages->sgl); obj 11040 drivers/gpu/drm/i915/display/intel_display.c intel_framebuffer_create(struct drm_i915_gem_object *obj, obj 11050 drivers/gpu/drm/i915/display/intel_display.c ret = intel_framebuffer_init(intel_fb, obj, mode_cmd); obj 14311 drivers/gpu/drm/i915/display/intel_display.c struct drm_i915_gem_object *obj = intel_fb_obj(fb); obj 14315 drivers/gpu/drm/i915/display/intel_display.c err = i915_gem_object_attach_phys(obj, align); obj 14341 drivers/gpu/drm/i915/display/intel_display.c static void fb_obj_bump_render_priority(struct drm_i915_gem_object *obj) obj 14347 drivers/gpu/drm/i915/display/intel_display.c i915_gem_object_wait_priority(obj, 0, &attr); obj 14372 drivers/gpu/drm/i915/display/intel_display.c struct drm_i915_gem_object *obj = intel_fb_obj(fb); obj 14411 drivers/gpu/drm/i915/display/intel_display.c if (!obj) obj 14414 drivers/gpu/drm/i915/display/intel_display.c ret = i915_gem_object_pin_pages(obj); obj 14420 drivers/gpu/drm/i915/display/intel_display.c i915_gem_object_unpin_pages(obj); obj 14427 drivers/gpu/drm/i915/display/intel_display.c i915_gem_object_unpin_pages(obj); obj 14431 drivers/gpu/drm/i915/display/intel_display.c fb_obj_bump_render_priority(obj); obj 14432 drivers/gpu/drm/i915/display/intel_display.c intel_frontbuffer_flush(obj->frontbuffer, ORIGIN_DIRTYFB); obj 14438 drivers/gpu/drm/i915/display/intel_display.c obj->base.resv, NULL, obj 14444 drivers/gpu/drm/i915/display/intel_display.c fence = dma_resv_get_excl_rcu(obj->base.resv); obj 15562 drivers/gpu/drm/i915/display/intel_display.c struct drm_i915_gem_object *obj = intel_fb_obj(fb); obj 15564 drivers/gpu/drm/i915/display/intel_display.c if (obj->userptr.mm) { obj 15569 drivers/gpu/drm/i915/display/intel_display.c return drm_gem_handle_create(file, &obj->base, handle); obj 15578 drivers/gpu/drm/i915/display/intel_display.c struct drm_i915_gem_object *obj = intel_fb_obj(fb); obj 15580 drivers/gpu/drm/i915/display/intel_display.c i915_gem_object_flush_if_display(obj); obj 15593 drivers/gpu/drm/i915/display/intel_display.c struct drm_i915_gem_object *obj, obj 15596 drivers/gpu/drm/i915/display/intel_display.c struct drm_i915_private *dev_priv = to_i915(obj->base.dev); obj 15603 drivers/gpu/drm/i915/display/intel_display.c intel_fb->frontbuffer = intel_frontbuffer_get(obj); obj 15607 drivers/gpu/drm/i915/display/intel_display.c i915_gem_object_lock(obj); obj 15608 drivers/gpu/drm/i915/display/intel_display.c tiling = i915_gem_object_get_tiling(obj); obj 15609 drivers/gpu/drm/i915/display/intel_display.c stride = i915_gem_object_get_stride(obj); obj 15610 drivers/gpu/drm/i915/display/intel_display.c i915_gem_object_unlock(obj); obj 15708 drivers/gpu/drm/i915/display/intel_display.c fb->obj[i] = &obj->base; obj 15734 drivers/gpu/drm/i915/display/intel_display.c struct drm_i915_gem_object *obj; obj 15737 drivers/gpu/drm/i915/display/intel_display.c obj = i915_gem_object_lookup(filp, mode_cmd.handles[0]); obj 15738 drivers/gpu/drm/i915/display/intel_display.c if (!obj) obj 15741 drivers/gpu/drm/i915/display/intel_display.c fb = intel_framebuffer_create(obj, &mode_cmd); obj 15742 drivers/gpu/drm/i915/display/intel_display.c i915_gem_object_put(obj); obj 480 drivers/gpu/drm/i915/display/intel_display.h intel_framebuffer_create(struct drm_i915_gem_object *obj, obj 1094 drivers/gpu/drm/i915/display/intel_display_types.h #define intel_fb_obj(x) ((x) ? to_intel_bo((x)->obj[0]) : NULL) obj 293 drivers/gpu/drm/i915/display/intel_fbc.c if (i915_gem_object_get_tiling(params->vma->obj) != obj 123 drivers/gpu/drm/i915/display/intel_fbdev.c struct drm_i915_gem_object *obj; obj 144 drivers/gpu/drm/i915/display/intel_fbdev.c obj = NULL; obj 146 drivers/gpu/drm/i915/display/intel_fbdev.c obj = i915_gem_object_create_stolen(dev_priv, size); obj 147 drivers/gpu/drm/i915/display/intel_fbdev.c if (obj == NULL) obj 148 drivers/gpu/drm/i915/display/intel_fbdev.c obj = i915_gem_object_create_shmem(dev_priv, size); obj 149 drivers/gpu/drm/i915/display/intel_fbdev.c if (IS_ERR(obj)) { obj 151 drivers/gpu/drm/i915/display/intel_fbdev.c return PTR_ERR(obj); obj 154 drivers/gpu/drm/i915/display/intel_fbdev.c fb = intel_framebuffer_create(obj, &mode_cmd); obj 155 drivers/gpu/drm/i915/display/intel_fbdev.c i915_gem_object_put(obj); obj 258 drivers/gpu/drm/i915/display/intel_fbdev.c if (vma->obj->stolen && !prealloc) obj 326 drivers/gpu/drm/i915/display/intel_fbdev.c struct drm_i915_gem_object *obj = obj 330 drivers/gpu/drm/i915/display/intel_fbdev.c if (!crtc->state->active || !obj) { obj 336 drivers/gpu/drm/i915/display/intel_fbdev.c if (obj->base.size > max_size) { obj 340 drivers/gpu/drm/i915/display/intel_fbdev.c max_size = obj->base.size; obj 167 drivers/gpu/drm/i915/display/intel_frontbuffer.c struct drm_i915_private *i915 = to_i915(front->obj->base.dev); obj 186 drivers/gpu/drm/i915/display/intel_frontbuffer.c struct drm_i915_private *i915 = to_i915(front->obj->base.dev); obj 219 drivers/gpu/drm/i915/display/intel_frontbuffer.c __releases(&to_i915(front->obj->base.dev)->fb_tracking.lock) obj 224 drivers/gpu/drm/i915/display/intel_frontbuffer.c front->obj->frontbuffer = NULL; obj 225 drivers/gpu/drm/i915/display/intel_frontbuffer.c spin_unlock(&to_i915(front->obj->base.dev)->fb_tracking.lock); obj 227 drivers/gpu/drm/i915/display/intel_frontbuffer.c i915_gem_object_put(front->obj); obj 232 drivers/gpu/drm/i915/display/intel_frontbuffer.c intel_frontbuffer_get(struct drm_i915_gem_object *obj) obj 234 drivers/gpu/drm/i915/display/intel_frontbuffer.c struct drm_i915_private *i915 = to_i915(obj->base.dev); obj 238 drivers/gpu/drm/i915/display/intel_frontbuffer.c front = obj->frontbuffer; obj 249 drivers/gpu/drm/i915/display/intel_frontbuffer.c front->obj = obj; obj 256 drivers/gpu/drm/i915/display/intel_frontbuffer.c if (obj->frontbuffer) { obj 258 drivers/gpu/drm/i915/display/intel_frontbuffer.c front = obj->frontbuffer; obj 261 drivers/gpu/drm/i915/display/intel_frontbuffer.c i915_gem_object_get(obj); obj 262 drivers/gpu/drm/i915/display/intel_frontbuffer.c obj->frontbuffer = front; obj 273 drivers/gpu/drm/i915/display/intel_frontbuffer.c &to_i915(front->obj->base.dev)->fb_tracking.lock); obj 47 drivers/gpu/drm/i915/display/intel_frontbuffer.h struct drm_i915_gem_object *obj; obj 58 drivers/gpu/drm/i915/display/intel_frontbuffer.h intel_frontbuffer_get(struct drm_i915_gem_object *obj); obj 284 drivers/gpu/drm/i915/display/intel_overlay.c intel_frontbuffer_track(overlay->vma ? overlay->vma->obj->frontbuffer : NULL, obj 285 drivers/gpu/drm/i915/display/intel_overlay.c vma ? vma->obj->frontbuffer : NULL, obj 1302 drivers/gpu/drm/i915/display/intel_overlay.c struct drm_i915_gem_object *obj; obj 1308 drivers/gpu/drm/i915/display/intel_overlay.c obj = i915_gem_object_create_stolen(i915, PAGE_SIZE); obj 1309 drivers/gpu/drm/i915/display/intel_overlay.c if (obj == NULL) obj 1310 drivers/gpu/drm/i915/display/intel_overlay.c obj = i915_gem_object_create_internal(i915, PAGE_SIZE); obj 1311 drivers/gpu/drm/i915/display/intel_overlay.c if (IS_ERR(obj)) { obj 1312 drivers/gpu/drm/i915/display/intel_overlay.c err = PTR_ERR(obj); obj 1316 drivers/gpu/drm/i915/display/intel_overlay.c vma = i915_gem_object_ggtt_pin(obj, NULL, 0, 0, PIN_MAPPABLE); obj 1323 drivers/gpu/drm/i915/display/intel_overlay.c overlay->flip_addr = sg_dma_address(obj->mm.pages->sgl); obj 1334 drivers/gpu/drm/i915/display/intel_overlay.c overlay->reg_bo = obj; obj 1339 drivers/gpu/drm/i915/display/intel_overlay.c i915_gem_object_put(obj); obj 84 drivers/gpu/drm/i915/gem/i915_gem_busy.c struct drm_i915_gem_object *obj; obj 91 drivers/gpu/drm/i915/gem/i915_gem_busy.c obj = i915_gem_object_lookup_rcu(file, args->handle); obj 92 drivers/gpu/drm/i915/gem/i915_gem_busy.c if (!obj) obj 113 drivers/gpu/drm/i915/gem/i915_gem_busy.c seq = raw_read_seqcount(&obj->base.resv->seq); obj 117 drivers/gpu/drm/i915/gem/i915_gem_busy.c busy_check_writer(rcu_dereference(obj->base.resv->fence_excl)); obj 120 drivers/gpu/drm/i915/gem/i915_gem_busy.c list = rcu_dereference(obj->base.resv->fence); obj 132 drivers/gpu/drm/i915/gem/i915_gem_busy.c if (args->busy && read_seqcount_retry(&obj->base.resv->seq, seq)) obj 16 drivers/gpu/drm/i915/gem/i915_gem_clflush.c struct drm_i915_gem_object *obj; obj 19 drivers/gpu/drm/i915/gem/i915_gem_clflush.c static void __do_clflush(struct drm_i915_gem_object *obj) obj 21 drivers/gpu/drm/i915/gem/i915_gem_clflush.c GEM_BUG_ON(!i915_gem_object_has_pages(obj)); obj 22 drivers/gpu/drm/i915/gem/i915_gem_clflush.c drm_clflush_sg(obj->mm.pages); obj 23 drivers/gpu/drm/i915/gem/i915_gem_clflush.c intel_frontbuffer_flush(obj->frontbuffer, ORIGIN_CPU); obj 29 drivers/gpu/drm/i915/gem/i915_gem_clflush.c struct drm_i915_gem_object *obj = fetch_and_zero(&clflush->obj); obj 32 drivers/gpu/drm/i915/gem/i915_gem_clflush.c err = i915_gem_object_pin_pages(obj); obj 36 drivers/gpu/drm/i915/gem/i915_gem_clflush.c __do_clflush(obj); obj 37 drivers/gpu/drm/i915/gem/i915_gem_clflush.c i915_gem_object_unpin_pages(obj); obj 40 drivers/gpu/drm/i915/gem/i915_gem_clflush.c i915_gem_object_put(obj); obj 48 drivers/gpu/drm/i915/gem/i915_gem_clflush.c if (clflush->obj) obj 49 drivers/gpu/drm/i915/gem/i915_gem_clflush.c i915_gem_object_put(clflush->obj); obj 58 drivers/gpu/drm/i915/gem/i915_gem_clflush.c static struct clflush *clflush_work_create(struct drm_i915_gem_object *obj) obj 62 drivers/gpu/drm/i915/gem/i915_gem_clflush.c GEM_BUG_ON(!obj->cache_dirty); obj 69 drivers/gpu/drm/i915/gem/i915_gem_clflush.c clflush->obj = i915_gem_object_get(obj); /* obj <-> clflush cycle */ obj 74 drivers/gpu/drm/i915/gem/i915_gem_clflush.c bool i915_gem_clflush_object(struct drm_i915_gem_object *obj, obj 79 drivers/gpu/drm/i915/gem/i915_gem_clflush.c assert_object_held(obj); obj 88 drivers/gpu/drm/i915/gem/i915_gem_clflush.c if (!i915_gem_object_has_struct_page(obj)) { obj 89 drivers/gpu/drm/i915/gem/i915_gem_clflush.c obj->cache_dirty = false; obj 102 drivers/gpu/drm/i915/gem/i915_gem_clflush.c obj->cache_coherent & I915_BO_CACHE_COHERENT_FOR_READ) obj 105 drivers/gpu/drm/i915/gem/i915_gem_clflush.c trace_i915_gem_object_clflush(obj); obj 109 drivers/gpu/drm/i915/gem/i915_gem_clflush.c clflush = clflush_work_create(obj); obj 112 drivers/gpu/drm/i915/gem/i915_gem_clflush.c obj->base.resv, NULL, true, obj 115 drivers/gpu/drm/i915/gem/i915_gem_clflush.c dma_resv_add_excl_fence(obj->base.resv, &clflush->base.dma); obj 117 drivers/gpu/drm/i915/gem/i915_gem_clflush.c } else if (obj->mm.pages) { obj 118 drivers/gpu/drm/i915/gem/i915_gem_clflush.c __do_clflush(obj); obj 120 drivers/gpu/drm/i915/gem/i915_gem_clflush.c GEM_BUG_ON(obj->write_domain != I915_GEM_DOMAIN_CPU); obj 123 drivers/gpu/drm/i915/gem/i915_gem_clflush.c obj->cache_dirty = false; obj 15 drivers/gpu/drm/i915/gem/i915_gem_clflush.h bool i915_gem_clflush_object(struct drm_i915_gem_object *obj, obj 15 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c struct drm_i915_gem_object *obj; obj 56 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c struct drm_i915_gem_object *obj, obj 68 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c vma = i915_vma_instance(obj, vm, NULL); obj 159 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c struct drm_i915_gem_object *obj = w->sleeve->vma->obj; obj 168 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c if (obj->cache_dirty) { obj 169 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c if (i915_gem_object_has_struct_page(obj)) obj 171 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c obj->cache_dirty = false; obj 173 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c obj->read_domains = I915_GEM_GPU_DOMAINS; obj 174 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c obj->write_domain = 0; obj 264 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c int i915_gem_schedule_fill_pages_blt(struct drm_i915_gem_object *obj, obj 274 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c sleeve = create_sleeve(ce->vm, obj, pages, page_sizes); obj 295 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c i915_gem_object_lock(obj); obj 297 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c obj->base.resv, NULL, obj 303 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c dma_resv_add_excl_fence(obj->base.resv, &work->dma); obj 306 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c i915_gem_object_unlock(obj); obj 15 drivers/gpu/drm/i915/gem/i915_gem_client_blt.h int i915_gem_schedule_fill_pages_blt(struct drm_i915_gem_object *obj, obj 107 drivers/gpu/drm/i915/gem/i915_gem_context.c struct drm_i915_gem_object *obj = vma->obj; obj 110 drivers/gpu/drm/i915/gem/i915_gem_context.c if (!kref_get_unless_zero(&obj->base.refcount)) obj 114 drivers/gpu/drm/i915/gem/i915_gem_context.c i915_gem_object_lock(obj); obj 115 drivers/gpu/drm/i915/gem/i915_gem_context.c list_for_each_entry(lut, &obj->lut_list, obj_link) { obj 125 drivers/gpu/drm/i915/gem/i915_gem_context.c i915_gem_object_unlock(obj); obj 128 drivers/gpu/drm/i915/gem/i915_gem_context.c if (&lut->obj_link != &obj->lut_list) { obj 134 drivers/gpu/drm/i915/gem/i915_gem_context.c i915_gem_object_put(obj); obj 137 drivers/gpu/drm/i915/gem/i915_gem_context.c i915_gem_object_put(obj); obj 23 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c struct drm_i915_gem_object *obj = dma_buf_to_obj(attachment->dmabuf); obj 28 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c ret = i915_gem_object_pin_pages(obj); obj 39 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c ret = sg_alloc_table(st, obj->mm.pages->nents, GFP_KERNEL); obj 43 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c src = obj->mm.pages->sgl; obj 45 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c for (i = 0; i < obj->mm.pages->nents; i++) { obj 63 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c i915_gem_object_unpin_pages(obj); obj 72 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c struct drm_i915_gem_object *obj = dma_buf_to_obj(attachment->dmabuf); obj 78 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c i915_gem_object_unpin_pages(obj); obj 83 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c struct drm_i915_gem_object *obj = dma_buf_to_obj(dma_buf); obj 85 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c return i915_gem_object_pin_map(obj, I915_MAP_WB); obj 90 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c struct drm_i915_gem_object *obj = dma_buf_to_obj(dma_buf); obj 92 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c i915_gem_object_flush_map(obj); obj 93 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c i915_gem_object_unpin_map(obj); obj 98 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c struct drm_i915_gem_object *obj = dma_buf_to_obj(dma_buf); obj 101 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c if (page_num >= obj->base.size >> PAGE_SHIFT) obj 104 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c if (!i915_gem_object_has_struct_page(obj)) obj 107 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c if (i915_gem_object_pin_pages(obj)) obj 111 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c page = i915_gem_object_get_page(obj, page_num); obj 118 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c i915_gem_object_unpin_pages(obj); obj 124 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c struct drm_i915_gem_object *obj = dma_buf_to_obj(dma_buf); obj 127 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c i915_gem_object_unpin_pages(obj); obj 132 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c struct drm_i915_gem_object *obj = dma_buf_to_obj(dma_buf); obj 135 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c if (obj->base.size < vma->vm_end - vma->vm_start) obj 138 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c if (!obj->base.filp) obj 141 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c ret = call_mmap(obj->base.filp, vma); obj 146 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c vma->vm_file = get_file(obj->base.filp); obj 153 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c struct drm_i915_gem_object *obj = dma_buf_to_obj(dma_buf); obj 157 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c err = i915_gem_object_pin_pages(obj); obj 161 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c err = i915_gem_object_lock_interruptible(obj); obj 165 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c err = i915_gem_object_set_to_cpu_domain(obj, write); obj 166 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c i915_gem_object_unlock(obj); obj 169 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c i915_gem_object_unpin_pages(obj); obj 175 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c struct drm_i915_gem_object *obj = dma_buf_to_obj(dma_buf); obj 178 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c err = i915_gem_object_pin_pages(obj); obj 182 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c err = i915_gem_object_lock_interruptible(obj); obj 186 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c err = i915_gem_object_set_to_gtt_domain(obj, false); obj 187 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c i915_gem_object_unlock(obj); obj 190 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c i915_gem_object_unpin_pages(obj); obj 209 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c struct drm_i915_gem_object *obj = to_intel_bo(gem_obj); obj 216 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c exp_info.resv = obj->base.resv; obj 218 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c if (obj->ops->dmabuf_export) { obj 219 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c int ret = obj->ops->dmabuf_export(obj); obj 227 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c static int i915_gem_object_get_pages_dmabuf(struct drm_i915_gem_object *obj) obj 232 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c pages = dma_buf_map_attachment(obj->base.import_attach, obj 239 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c __i915_gem_object_set_pages(obj, pages, sg_page_sizes); obj 244 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c static void i915_gem_object_put_pages_dmabuf(struct drm_i915_gem_object *obj, obj 247 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c dma_buf_unmap_attachment(obj->base.import_attach, pages, obj 260 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c struct drm_i915_gem_object *obj; obj 265 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c obj = dma_buf_to_obj(dma_buf); obj 267 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c if (obj->base.dev == dev) { obj 272 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c return &i915_gem_object_get(obj)->base; obj 283 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c obj = i915_gem_object_alloc(); obj 284 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c if (obj == NULL) { obj 289 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c drm_gem_private_object_init(dev, &obj->base, dma_buf->size); obj 290 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c i915_gem_object_init(obj, &i915_gem_object_dmabuf_ops); obj 291 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c obj->base.import_attach = attach; obj 292 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c obj->base.resv = dma_buf->resv; obj 301 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c obj->read_domains = I915_GEM_DOMAIN_GTT; obj 302 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c obj->write_domain = 0; obj 304 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c return &obj->base; obj 16 drivers/gpu/drm/i915/gem/i915_gem_domain.c static void __i915_gem_object_flush_for_display(struct drm_i915_gem_object *obj) obj 22 drivers/gpu/drm/i915/gem/i915_gem_domain.c i915_gem_object_flush_write_domain(obj, ~I915_GEM_DOMAIN_CPU); obj 23 drivers/gpu/drm/i915/gem/i915_gem_domain.c if (obj->cache_dirty) obj 24 drivers/gpu/drm/i915/gem/i915_gem_domain.c i915_gem_clflush_object(obj, I915_CLFLUSH_FORCE); obj 25 drivers/gpu/drm/i915/gem/i915_gem_domain.c obj->write_domain = 0; obj 28 drivers/gpu/drm/i915/gem/i915_gem_domain.c void i915_gem_object_flush_if_display(struct drm_i915_gem_object *obj) obj 30 drivers/gpu/drm/i915/gem/i915_gem_domain.c if (!READ_ONCE(obj->pin_global)) obj 33 drivers/gpu/drm/i915/gem/i915_gem_domain.c i915_gem_object_lock(obj); obj 34 drivers/gpu/drm/i915/gem/i915_gem_domain.c __i915_gem_object_flush_for_display(obj); obj 35 drivers/gpu/drm/i915/gem/i915_gem_domain.c i915_gem_object_unlock(obj); obj 47 drivers/gpu/drm/i915/gem/i915_gem_domain.c i915_gem_object_set_to_wc_domain(struct drm_i915_gem_object *obj, bool write) obj 51 drivers/gpu/drm/i915/gem/i915_gem_domain.c assert_object_held(obj); obj 53 drivers/gpu/drm/i915/gem/i915_gem_domain.c ret = i915_gem_object_wait(obj, obj 60 drivers/gpu/drm/i915/gem/i915_gem_domain.c if (obj->write_domain == I915_GEM_DOMAIN_WC) obj 71 drivers/gpu/drm/i915/gem/i915_gem_domain.c ret = i915_gem_object_pin_pages(obj); obj 75 drivers/gpu/drm/i915/gem/i915_gem_domain.c i915_gem_object_flush_write_domain(obj, ~I915_GEM_DOMAIN_WC); obj 81 drivers/gpu/drm/i915/gem/i915_gem_domain.c if ((obj->read_domains & I915_GEM_DOMAIN_WC) == 0) obj 87 drivers/gpu/drm/i915/gem/i915_gem_domain.c GEM_BUG_ON((obj->write_domain & ~I915_GEM_DOMAIN_WC) != 0); obj 88 drivers/gpu/drm/i915/gem/i915_gem_domain.c obj->read_domains |= I915_GEM_DOMAIN_WC; obj 90 drivers/gpu/drm/i915/gem/i915_gem_domain.c obj->read_domains = I915_GEM_DOMAIN_WC; obj 91 drivers/gpu/drm/i915/gem/i915_gem_domain.c obj->write_domain = I915_GEM_DOMAIN_WC; obj 92 drivers/gpu/drm/i915/gem/i915_gem_domain.c obj->mm.dirty = true; obj 95 drivers/gpu/drm/i915/gem/i915_gem_domain.c i915_gem_object_unpin_pages(obj); obj 108 drivers/gpu/drm/i915/gem/i915_gem_domain.c i915_gem_object_set_to_gtt_domain(struct drm_i915_gem_object *obj, bool write) obj 112 drivers/gpu/drm/i915/gem/i915_gem_domain.c assert_object_held(obj); obj 114 drivers/gpu/drm/i915/gem/i915_gem_domain.c ret = i915_gem_object_wait(obj, obj 121 drivers/gpu/drm/i915/gem/i915_gem_domain.c if (obj->write_domain == I915_GEM_DOMAIN_GTT) obj 132 drivers/gpu/drm/i915/gem/i915_gem_domain.c ret = i915_gem_object_pin_pages(obj); obj 136 drivers/gpu/drm/i915/gem/i915_gem_domain.c i915_gem_object_flush_write_domain(obj, ~I915_GEM_DOMAIN_GTT); obj 142 drivers/gpu/drm/i915/gem/i915_gem_domain.c if ((obj->read_domains & I915_GEM_DOMAIN_GTT) == 0) obj 148 drivers/gpu/drm/i915/gem/i915_gem_domain.c GEM_BUG_ON((obj->write_domain & ~I915_GEM_DOMAIN_GTT) != 0); obj 149 drivers/gpu/drm/i915/gem/i915_gem_domain.c obj->read_domains |= I915_GEM_DOMAIN_GTT; obj 151 drivers/gpu/drm/i915/gem/i915_gem_domain.c obj->read_domains = I915_GEM_DOMAIN_GTT; obj 152 drivers/gpu/drm/i915/gem/i915_gem_domain.c obj->write_domain = I915_GEM_DOMAIN_GTT; obj 153 drivers/gpu/drm/i915/gem/i915_gem_domain.c obj->mm.dirty = true; obj 156 drivers/gpu/drm/i915/gem/i915_gem_domain.c i915_gem_object_unpin_pages(obj); obj 175 drivers/gpu/drm/i915/gem/i915_gem_domain.c int i915_gem_object_set_cache_level(struct drm_i915_gem_object *obj, obj 181 drivers/gpu/drm/i915/gem/i915_gem_domain.c assert_object_held(obj); obj 183 drivers/gpu/drm/i915/gem/i915_gem_domain.c if (obj->cache_level == cache_level) obj 192 drivers/gpu/drm/i915/gem/i915_gem_domain.c list_for_each_entry(vma, &obj->vma.list, obj_link) { obj 223 drivers/gpu/drm/i915/gem/i915_gem_domain.c if (atomic_read(&obj->bind_count)) { obj 224 drivers/gpu/drm/i915/gem/i915_gem_domain.c struct drm_i915_private *i915 = to_i915(obj->base.dev); obj 230 drivers/gpu/drm/i915/gem/i915_gem_domain.c ret = i915_gem_object_wait(obj, obj 256 drivers/gpu/drm/i915/gem/i915_gem_domain.c if (obj->userfault_count) obj 257 drivers/gpu/drm/i915/gem/i915_gem_domain.c __i915_gem_object_release_mmap(obj); obj 267 drivers/gpu/drm/i915/gem/i915_gem_domain.c for_each_ggtt_vma(vma, obj) { obj 287 drivers/gpu/drm/i915/gem/i915_gem_domain.c list_for_each_entry(vma, &obj->vma.list, obj_link) { obj 297 drivers/gpu/drm/i915/gem/i915_gem_domain.c list_for_each_entry(vma, &obj->vma.list, obj_link) obj 299 drivers/gpu/drm/i915/gem/i915_gem_domain.c i915_gem_object_set_cache_coherency(obj, cache_level); obj 300 drivers/gpu/drm/i915/gem/i915_gem_domain.c obj->cache_dirty = true; /* Always invalidate stale cachelines */ obj 309 drivers/gpu/drm/i915/gem/i915_gem_domain.c struct drm_i915_gem_object *obj; obj 313 drivers/gpu/drm/i915/gem/i915_gem_domain.c obj = i915_gem_object_lookup_rcu(file, args->handle); obj 314 drivers/gpu/drm/i915/gem/i915_gem_domain.c if (!obj) { obj 319 drivers/gpu/drm/i915/gem/i915_gem_domain.c switch (obj->cache_level) { obj 343 drivers/gpu/drm/i915/gem/i915_gem_domain.c struct drm_i915_gem_object *obj; obj 370 drivers/gpu/drm/i915/gem/i915_gem_domain.c obj = i915_gem_object_lookup(file, args->handle); obj 371 drivers/gpu/drm/i915/gem/i915_gem_domain.c if (!obj) obj 378 drivers/gpu/drm/i915/gem/i915_gem_domain.c if (i915_gem_object_is_proxy(obj)) { obj 383 drivers/gpu/drm/i915/gem/i915_gem_domain.c if (obj->cache_level == level) obj 386 drivers/gpu/drm/i915/gem/i915_gem_domain.c ret = i915_gem_object_wait(obj, obj 396 drivers/gpu/drm/i915/gem/i915_gem_domain.c ret = i915_gem_object_lock_interruptible(obj); obj 398 drivers/gpu/drm/i915/gem/i915_gem_domain.c ret = i915_gem_object_set_cache_level(obj, level); obj 399 drivers/gpu/drm/i915/gem/i915_gem_domain.c i915_gem_object_unlock(obj); obj 404 drivers/gpu/drm/i915/gem/i915_gem_domain.c i915_gem_object_put(obj); obj 415 drivers/gpu/drm/i915/gem/i915_gem_domain.c i915_gem_object_pin_to_display_plane(struct drm_i915_gem_object *obj, obj 423 drivers/gpu/drm/i915/gem/i915_gem_domain.c assert_object_held(obj); obj 428 drivers/gpu/drm/i915/gem/i915_gem_domain.c obj->pin_global++; obj 439 drivers/gpu/drm/i915/gem/i915_gem_domain.c ret = i915_gem_object_set_cache_level(obj, obj 440 drivers/gpu/drm/i915/gem/i915_gem_domain.c HAS_WT(to_i915(obj->base.dev)) ? obj 457 drivers/gpu/drm/i915/gem/i915_gem_domain.c vma = i915_gem_object_ggtt_pin(obj, view, 0, alignment, obj 462 drivers/gpu/drm/i915/gem/i915_gem_domain.c vma = i915_gem_object_ggtt_pin(obj, view, 0, alignment, flags); obj 468 drivers/gpu/drm/i915/gem/i915_gem_domain.c __i915_gem_object_flush_for_display(obj); obj 473 drivers/gpu/drm/i915/gem/i915_gem_domain.c obj->read_domains |= I915_GEM_DOMAIN_GTT; obj 478 drivers/gpu/drm/i915/gem/i915_gem_domain.c obj->pin_global--; obj 482 drivers/gpu/drm/i915/gem/i915_gem_domain.c static void i915_gem_object_bump_inactive_ggtt(struct drm_i915_gem_object *obj) obj 484 drivers/gpu/drm/i915/gem/i915_gem_domain.c struct drm_i915_private *i915 = to_i915(obj->base.dev); obj 487 drivers/gpu/drm/i915/gem/i915_gem_domain.c GEM_BUG_ON(!i915_gem_object_has_pinned_pages(obj)); obj 490 drivers/gpu/drm/i915/gem/i915_gem_domain.c for_each_ggtt_vma(vma, obj) { obj 498 drivers/gpu/drm/i915/gem/i915_gem_domain.c if (i915_gem_object_is_shrinkable(obj)) { obj 503 drivers/gpu/drm/i915/gem/i915_gem_domain.c if (obj->mm.madv == I915_MADV_WILLNEED) obj 504 drivers/gpu/drm/i915/gem/i915_gem_domain.c list_move_tail(&obj->mm.link, &i915->mm.shrink_list); obj 513 drivers/gpu/drm/i915/gem/i915_gem_domain.c struct drm_i915_gem_object *obj = vma->obj; obj 515 drivers/gpu/drm/i915/gem/i915_gem_domain.c assert_object_held(obj); obj 517 drivers/gpu/drm/i915/gem/i915_gem_domain.c if (WARN_ON(obj->pin_global == 0)) obj 520 drivers/gpu/drm/i915/gem/i915_gem_domain.c if (--obj->pin_global == 0) obj 524 drivers/gpu/drm/i915/gem/i915_gem_domain.c i915_gem_object_bump_inactive_ggtt(obj); obj 538 drivers/gpu/drm/i915/gem/i915_gem_domain.c i915_gem_object_set_to_cpu_domain(struct drm_i915_gem_object *obj, bool write) obj 542 drivers/gpu/drm/i915/gem/i915_gem_domain.c assert_object_held(obj); obj 544 drivers/gpu/drm/i915/gem/i915_gem_domain.c ret = i915_gem_object_wait(obj, obj 551 drivers/gpu/drm/i915/gem/i915_gem_domain.c i915_gem_object_flush_write_domain(obj, ~I915_GEM_DOMAIN_CPU); obj 554 drivers/gpu/drm/i915/gem/i915_gem_domain.c if ((obj->read_domains & I915_GEM_DOMAIN_CPU) == 0) { obj 555 drivers/gpu/drm/i915/gem/i915_gem_domain.c i915_gem_clflush_object(obj, I915_CLFLUSH_SYNC); obj 556 drivers/gpu/drm/i915/gem/i915_gem_domain.c obj->read_domains |= I915_GEM_DOMAIN_CPU; obj 562 drivers/gpu/drm/i915/gem/i915_gem_domain.c GEM_BUG_ON(obj->write_domain & ~I915_GEM_DOMAIN_CPU); obj 568 drivers/gpu/drm/i915/gem/i915_gem_domain.c __start_cpu_write(obj); obj 585 drivers/gpu/drm/i915/gem/i915_gem_domain.c struct drm_i915_gem_object *obj; obj 604 drivers/gpu/drm/i915/gem/i915_gem_domain.c obj = i915_gem_object_lookup(file, args->handle); obj 605 drivers/gpu/drm/i915/gem/i915_gem_domain.c if (!obj) obj 618 drivers/gpu/drm/i915/gem/i915_gem_domain.c if (READ_ONCE(obj->write_domain) == read_domains) { obj 628 drivers/gpu/drm/i915/gem/i915_gem_domain.c err = i915_gem_object_wait(obj, obj 642 drivers/gpu/drm/i915/gem/i915_gem_domain.c if (i915_gem_object_is_proxy(obj)) { obj 656 drivers/gpu/drm/i915/gem/i915_gem_domain.c err = i915_gem_object_pin_pages(obj); obj 660 drivers/gpu/drm/i915/gem/i915_gem_domain.c err = i915_gem_object_lock_interruptible(obj); obj 665 drivers/gpu/drm/i915/gem/i915_gem_domain.c err = i915_gem_object_set_to_wc_domain(obj, write_domain); obj 667 drivers/gpu/drm/i915/gem/i915_gem_domain.c err = i915_gem_object_set_to_gtt_domain(obj, write_domain); obj 669 drivers/gpu/drm/i915/gem/i915_gem_domain.c err = i915_gem_object_set_to_cpu_domain(obj, write_domain); obj 672 drivers/gpu/drm/i915/gem/i915_gem_domain.c i915_gem_object_bump_inactive_ggtt(obj); obj 674 drivers/gpu/drm/i915/gem/i915_gem_domain.c i915_gem_object_unlock(obj); obj 677 drivers/gpu/drm/i915/gem/i915_gem_domain.c intel_frontbuffer_invalidate(obj->frontbuffer, ORIGIN_CPU); obj 680 drivers/gpu/drm/i915/gem/i915_gem_domain.c i915_gem_object_unpin_pages(obj); obj 682 drivers/gpu/drm/i915/gem/i915_gem_domain.c i915_gem_object_put(obj); obj 691 drivers/gpu/drm/i915/gem/i915_gem_domain.c int i915_gem_object_prepare_read(struct drm_i915_gem_object *obj, obj 697 drivers/gpu/drm/i915/gem/i915_gem_domain.c if (!i915_gem_object_has_struct_page(obj)) obj 700 drivers/gpu/drm/i915/gem/i915_gem_domain.c ret = i915_gem_object_lock_interruptible(obj); obj 704 drivers/gpu/drm/i915/gem/i915_gem_domain.c ret = i915_gem_object_wait(obj, obj 710 drivers/gpu/drm/i915/gem/i915_gem_domain.c ret = i915_gem_object_pin_pages(obj); obj 714 drivers/gpu/drm/i915/gem/i915_gem_domain.c if (obj->cache_coherent & I915_BO_CACHE_COHERENT_FOR_READ || obj 716 drivers/gpu/drm/i915/gem/i915_gem_domain.c ret = i915_gem_object_set_to_cpu_domain(obj, false); obj 723 drivers/gpu/drm/i915/gem/i915_gem_domain.c i915_gem_object_flush_write_domain(obj, ~I915_GEM_DOMAIN_CPU); obj 730 drivers/gpu/drm/i915/gem/i915_gem_domain.c if (!obj->cache_dirty && obj 731 drivers/gpu/drm/i915/gem/i915_gem_domain.c !(obj->read_domains & I915_GEM_DOMAIN_CPU)) obj 739 drivers/gpu/drm/i915/gem/i915_gem_domain.c i915_gem_object_unpin_pages(obj); obj 741 drivers/gpu/drm/i915/gem/i915_gem_domain.c i915_gem_object_unlock(obj); obj 745 drivers/gpu/drm/i915/gem/i915_gem_domain.c int i915_gem_object_prepare_write(struct drm_i915_gem_object *obj, obj 751 drivers/gpu/drm/i915/gem/i915_gem_domain.c if (!i915_gem_object_has_struct_page(obj)) obj 754 drivers/gpu/drm/i915/gem/i915_gem_domain.c ret = i915_gem_object_lock_interruptible(obj); obj 758 drivers/gpu/drm/i915/gem/i915_gem_domain.c ret = i915_gem_object_wait(obj, obj 765 drivers/gpu/drm/i915/gem/i915_gem_domain.c ret = i915_gem_object_pin_pages(obj); obj 769 drivers/gpu/drm/i915/gem/i915_gem_domain.c if (obj->cache_coherent & I915_BO_CACHE_COHERENT_FOR_WRITE || obj 771 drivers/gpu/drm/i915/gem/i915_gem_domain.c ret = i915_gem_object_set_to_cpu_domain(obj, true); obj 778 drivers/gpu/drm/i915/gem/i915_gem_domain.c i915_gem_object_flush_write_domain(obj, ~I915_GEM_DOMAIN_CPU); obj 785 drivers/gpu/drm/i915/gem/i915_gem_domain.c if (!obj->cache_dirty) { obj 792 drivers/gpu/drm/i915/gem/i915_gem_domain.c if (!(obj->read_domains & I915_GEM_DOMAIN_CPU)) obj 797 drivers/gpu/drm/i915/gem/i915_gem_domain.c intel_frontbuffer_invalidate(obj->frontbuffer, ORIGIN_CPU); obj 798 drivers/gpu/drm/i915/gem/i915_gem_domain.c obj->mm.dirty = true; obj 803 drivers/gpu/drm/i915/gem/i915_gem_domain.c i915_gem_object_unpin_pages(obj); obj 805 drivers/gpu/drm/i915/gem/i915_gem_domain.c i915_gem_object_unlock(obj); obj 480 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c i915_gem_object_is_tiled(vma->obj)) obj 564 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c const struct drm_i915_gem_object *obj) obj 566 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c if (!i915_gem_object_has_struct_page(obj)) obj 576 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c obj->cache_dirty || obj 577 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c obj->cache_level != I915_CACHE_NONE); obj 744 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c struct drm_i915_gem_object *obj; obj 771 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c obj = i915_gem_object_lookup(eb->file, handle); obj 772 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c if (unlikely(!obj)) { obj 777 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c vma = i915_vma_instance(obj, eb->context->vm, NULL); obj 801 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c i915_gem_object_lock(obj); obj 802 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c list_add(&lut->obj_link, &obj->lut_list); obj 803 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c i915_gem_object_unlock(obj); obj 822 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c i915_gem_object_put(obj); obj 934 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c struct drm_i915_gem_object *obj = cache->rq->batch->obj; obj 936 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c GEM_BUG_ON(cache->rq_size >= obj->base.size / sizeof(u32)); obj 939 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c __i915_gem_object_flush_map(obj, 0, sizeof(u32) * (cache->rq_size + 1)); obj 940 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c i915_gem_object_unpin_map(obj); obj 985 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c static void *reloc_kmap(struct drm_i915_gem_object *obj, obj 997 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c err = i915_gem_object_prepare_write(obj, &flushes); obj 1005 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c cache->node.mm = (void *)obj; obj 1010 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c vaddr = kmap_atomic(i915_gem_object_get_dirty_page(obj, page)); obj 1017 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c static void *reloc_iomap(struct drm_i915_gem_object *obj, obj 1032 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c if (i915_gem_object_is_tiled(obj)) obj 1035 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c if (use_cpu_reloc(cache, obj)) obj 1038 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c i915_gem_object_lock(obj); obj 1039 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c err = i915_gem_object_set_to_gtt_domain(obj, true); obj 1040 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c i915_gem_object_unlock(obj); obj 1044 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c vma = i915_gem_object_ggtt_pin(obj, NULL, 0, 0, obj 1066 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c i915_gem_object_get_dma_address(obj, page), obj 1080 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c static void *reloc_vaddr(struct drm_i915_gem_object *obj, obj 1091 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c vaddr = reloc_iomap(obj, cache, page); obj 1093 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c vaddr = reloc_kmap(obj, cache, page); obj 1124 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c struct drm_i915_gem_object *obj = vma->obj; obj 1129 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c if (obj->cache_dirty & ~obj->cache_coherent) obj 1130 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c i915_gem_clflush_object(obj, 0); obj 1131 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c obj->write_domain = 0; obj 1133 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c err = i915_request_await_object(rq, vma->obj, true); obj 1157 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c cmd = i915_gem_object_pin_map(pool->obj, obj 1166 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c batch = i915_vma_instance(pool->obj, vma->vm, NULL); obj 1197 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c err = i915_request_await_object(rq, batch->obj, false); obj 1221 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c i915_gem_object_unpin_map(pool->obj); obj 1329 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c vaddr = reloc_vaddr(vma->obj, &eb->reloc_cache, offset >> PAGE_SHIFT); obj 1395 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c err = i915_vma_bind(target, target->obj->cache_level, obj 1850 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c struct drm_i915_gem_object *obj = vma->obj; obj 1877 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c if (unlikely(obj->cache_dirty & ~obj->cache_coherent)) { obj 1878 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c if (i915_gem_clflush_object(obj, 0)) obj 1884 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c (eb->request, obj, flags & EXEC_OBJECT_WRITE); obj 1964 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c shadow_batch_pin(struct i915_execbuffer *eb, struct drm_i915_gem_object *obj) obj 1981 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c i915_gem_object_set_readonly(obj); obj 1987 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c return i915_gem_object_pin(obj, vm, NULL, 0, 0, flags); obj 2002 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c vma = shadow_batch_pin(eb, pool->obj); obj 2013 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c eb->batch->obj, obj 2017 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c pool->obj, obj 2630 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c vma = i915_gem_object_ggtt_pin(eb.batch->obj, NULL, 0, 0, 0); obj 60 drivers/gpu/drm/i915/gem/i915_gem_fence.c i915_gem_object_lock_fence(struct drm_i915_gem_object *obj) obj 64 drivers/gpu/drm/i915/gem/i915_gem_fence.c assert_object_held(obj); obj 75 drivers/gpu/drm/i915/gem/i915_gem_fence.c obj->base.resv, NULL, obj 80 drivers/gpu/drm/i915/gem/i915_gem_fence.c dma_resv_add_excl_fence(obj->base.resv, &stub->dma); obj 89 drivers/gpu/drm/i915/gem/i915_gem_fence.c void i915_gem_object_unlock_fence(struct drm_i915_gem_object *obj, obj 35 drivers/gpu/drm/i915/gem/i915_gem_internal.c static int i915_gem_object_get_pages_internal(struct drm_i915_gem_object *obj) obj 37 drivers/gpu/drm/i915/gem/i915_gem_internal.c struct drm_i915_private *i915 = to_i915(obj->base.dev); obj 71 drivers/gpu/drm/i915/gem/i915_gem_internal.c npages = obj->base.size / PAGE_SIZE; obj 110 drivers/gpu/drm/i915/gem/i915_gem_internal.c if (i915_gem_gtt_prepare_pages(obj, st)) { obj 125 drivers/gpu/drm/i915/gem/i915_gem_internal.c obj->mm.madv = I915_MADV_DONTNEED; obj 127 drivers/gpu/drm/i915/gem/i915_gem_internal.c __i915_gem_object_set_pages(obj, st, sg_page_sizes); obj 139 drivers/gpu/drm/i915/gem/i915_gem_internal.c static void i915_gem_object_put_pages_internal(struct drm_i915_gem_object *obj, obj 142 drivers/gpu/drm/i915/gem/i915_gem_internal.c i915_gem_gtt_finish_pages(obj, pages); obj 145 drivers/gpu/drm/i915/gem/i915_gem_internal.c obj->mm.dirty = false; obj 146 drivers/gpu/drm/i915/gem/i915_gem_internal.c obj->mm.madv = I915_MADV_WILLNEED; obj 175 drivers/gpu/drm/i915/gem/i915_gem_internal.c struct drm_i915_gem_object *obj; obj 181 drivers/gpu/drm/i915/gem/i915_gem_internal.c if (overflows_type(size, obj->base.size)) obj 184 drivers/gpu/drm/i915/gem/i915_gem_internal.c obj = i915_gem_object_alloc(); obj 185 drivers/gpu/drm/i915/gem/i915_gem_internal.c if (!obj) obj 188 drivers/gpu/drm/i915/gem/i915_gem_internal.c drm_gem_private_object_init(&i915->drm, &obj->base, size); obj 189 drivers/gpu/drm/i915/gem/i915_gem_internal.c i915_gem_object_init(obj, &i915_gem_object_internal_ops); obj 191 drivers/gpu/drm/i915/gem/i915_gem_internal.c obj->read_domains = I915_GEM_DOMAIN_CPU; obj 192 drivers/gpu/drm/i915/gem/i915_gem_internal.c obj->write_domain = I915_GEM_DOMAIN_CPU; obj 195 drivers/gpu/drm/i915/gem/i915_gem_internal.c i915_gem_object_set_cache_coherency(obj, cache_level); obj 197 drivers/gpu/drm/i915/gem/i915_gem_internal.c return obj; obj 55 drivers/gpu/drm/i915/gem/i915_gem_mman.c struct drm_i915_gem_object *obj; obj 64 drivers/gpu/drm/i915/gem/i915_gem_mman.c obj = i915_gem_object_lookup(file, args->handle); obj 65 drivers/gpu/drm/i915/gem/i915_gem_mman.c if (!obj) obj 71 drivers/gpu/drm/i915/gem/i915_gem_mman.c if (!obj->base.filp) { obj 76 drivers/gpu/drm/i915/gem/i915_gem_mman.c if (range_overflows(args->offset, args->size, (u64)obj->base.size)) { obj 81 drivers/gpu/drm/i915/gem/i915_gem_mman.c addr = vm_mmap(obj->base.filp, 0, args->size, obj 96 drivers/gpu/drm/i915/gem/i915_gem_mman.c if (vma && __vma_matches(vma, obj->base.filp, addr, args->size)) obj 105 drivers/gpu/drm/i915/gem/i915_gem_mman.c i915_gem_object_put(obj); obj 111 drivers/gpu/drm/i915/gem/i915_gem_mman.c i915_gem_object_put(obj); obj 115 drivers/gpu/drm/i915/gem/i915_gem_mman.c static unsigned int tile_row_pages(const struct drm_i915_gem_object *obj) obj 117 drivers/gpu/drm/i915/gem/i915_gem_mman.c return i915_gem_object_get_tile_row_size(obj) >> PAGE_SHIFT; obj 177 drivers/gpu/drm/i915/gem/i915_gem_mman.c compute_partial_view(const struct drm_i915_gem_object *obj, obj 183 drivers/gpu/drm/i915/gem/i915_gem_mman.c if (i915_gem_object_is_tiled(obj)) obj 184 drivers/gpu/drm/i915/gem/i915_gem_mman.c chunk = roundup(chunk, tile_row_pages(obj)); obj 190 drivers/gpu/drm/i915/gem/i915_gem_mman.c (obj->base.size >> PAGE_SHIFT) - view.partial.offset); obj 193 drivers/gpu/drm/i915/gem/i915_gem_mman.c if (chunk >= obj->base.size >> PAGE_SHIFT) obj 221 drivers/gpu/drm/i915/gem/i915_gem_mman.c struct drm_i915_gem_object *obj = to_intel_bo(area->vm_private_data); obj 222 drivers/gpu/drm/i915/gem/i915_gem_mman.c struct drm_device *dev = obj->base.dev; obj 234 drivers/gpu/drm/i915/gem/i915_gem_mman.c if (i915_gem_object_is_readonly(obj) && write) obj 240 drivers/gpu/drm/i915/gem/i915_gem_mman.c trace_i915_gem_object_fault(obj, page_offset, true, write); obj 242 drivers/gpu/drm/i915/gem/i915_gem_mman.c ret = i915_gem_object_pin_pages(obj); obj 257 drivers/gpu/drm/i915/gem/i915_gem_mman.c if (obj->cache_level != I915_CACHE_NONE && !HAS_LLC(i915)) { obj 263 drivers/gpu/drm/i915/gem/i915_gem_mman.c vma = i915_gem_object_ggtt_pin(obj, NULL, 0, 0, obj 270 drivers/gpu/drm/i915/gem/i915_gem_mman.c compute_partial_view(obj, page_offset, MIN_CHUNK_PAGES); obj 282 drivers/gpu/drm/i915/gem/i915_gem_mman.c vma = i915_gem_object_ggtt_pin(obj, &view, 0, 0, flags); obj 286 drivers/gpu/drm/i915/gem/i915_gem_mman.c vma = i915_gem_object_ggtt_pin(obj, &view, 0, 0, flags); obj 311 drivers/gpu/drm/i915/gem/i915_gem_mman.c if (!i915_vma_set_userfault(vma) && !obj->userfault_count++) obj 312 drivers/gpu/drm/i915/gem/i915_gem_mman.c list_add(&obj->userfault_link, &i915->ggtt.userfault_list); obj 320 drivers/gpu/drm/i915/gem/i915_gem_mman.c GEM_BUG_ON(!i915_gem_object_has_pinned_pages(obj)); obj 322 drivers/gpu/drm/i915/gem/i915_gem_mman.c obj->mm.dirty = true; obj 335 drivers/gpu/drm/i915/gem/i915_gem_mman.c i915_gem_object_unpin_pages(obj); obj 375 drivers/gpu/drm/i915/gem/i915_gem_mman.c void __i915_gem_object_release_mmap(struct drm_i915_gem_object *obj) obj 379 drivers/gpu/drm/i915/gem/i915_gem_mman.c GEM_BUG_ON(!obj->userfault_count); obj 381 drivers/gpu/drm/i915/gem/i915_gem_mman.c obj->userfault_count = 0; obj 382 drivers/gpu/drm/i915/gem/i915_gem_mman.c list_del(&obj->userfault_link); obj 383 drivers/gpu/drm/i915/gem/i915_gem_mman.c drm_vma_node_unmap(&obj->base.vma_node, obj 384 drivers/gpu/drm/i915/gem/i915_gem_mman.c obj->base.dev->anon_inode->i_mapping); obj 386 drivers/gpu/drm/i915/gem/i915_gem_mman.c for_each_ggtt_vma(vma, obj) obj 404 drivers/gpu/drm/i915/gem/i915_gem_mman.c void i915_gem_object_release_mmap(struct drm_i915_gem_object *obj) obj 406 drivers/gpu/drm/i915/gem/i915_gem_mman.c struct drm_i915_private *i915 = to_i915(obj->base.dev); obj 420 drivers/gpu/drm/i915/gem/i915_gem_mman.c if (!obj->userfault_count) obj 423 drivers/gpu/drm/i915/gem/i915_gem_mman.c __i915_gem_object_release_mmap(obj); obj 439 drivers/gpu/drm/i915/gem/i915_gem_mman.c static int create_mmap_offset(struct drm_i915_gem_object *obj) obj 441 drivers/gpu/drm/i915/gem/i915_gem_mman.c struct drm_i915_private *i915 = to_i915(obj->base.dev); obj 444 drivers/gpu/drm/i915/gem/i915_gem_mman.c err = drm_gem_create_mmap_offset(&obj->base); obj 457 drivers/gpu/drm/i915/gem/i915_gem_mman.c err = drm_gem_create_mmap_offset(&obj->base); obj 472 drivers/gpu/drm/i915/gem/i915_gem_mman.c struct drm_i915_gem_object *obj; obj 475 drivers/gpu/drm/i915/gem/i915_gem_mman.c obj = i915_gem_object_lookup(file, handle); obj 476 drivers/gpu/drm/i915/gem/i915_gem_mman.c if (!obj) obj 479 drivers/gpu/drm/i915/gem/i915_gem_mman.c if (i915_gem_object_never_bind_ggtt(obj)) { obj 484 drivers/gpu/drm/i915/gem/i915_gem_mman.c ret = create_mmap_offset(obj); obj 486 drivers/gpu/drm/i915/gem/i915_gem_mman.c *offset = drm_vma_node_offset_addr(&obj->base.vma_node); obj 489 drivers/gpu/drm/i915/gem/i915_gem_mman.c i915_gem_object_put(obj); obj 44 drivers/gpu/drm/i915/gem/i915_gem_object.c void i915_gem_object_free(struct drm_i915_gem_object *obj) obj 46 drivers/gpu/drm/i915/gem/i915_gem_object.c return kmem_cache_free(global.slab_objects, obj); obj 49 drivers/gpu/drm/i915/gem/i915_gem_object.c void i915_gem_object_init(struct drm_i915_gem_object *obj, obj 52 drivers/gpu/drm/i915/gem/i915_gem_object.c mutex_init(&obj->mm.lock); obj 54 drivers/gpu/drm/i915/gem/i915_gem_object.c spin_lock_init(&obj->vma.lock); obj 55 drivers/gpu/drm/i915/gem/i915_gem_object.c INIT_LIST_HEAD(&obj->vma.list); obj 57 drivers/gpu/drm/i915/gem/i915_gem_object.c INIT_LIST_HEAD(&obj->mm.link); obj 59 drivers/gpu/drm/i915/gem/i915_gem_object.c INIT_LIST_HEAD(&obj->lut_list); obj 61 drivers/gpu/drm/i915/gem/i915_gem_object.c init_rcu_head(&obj->rcu); obj 63 drivers/gpu/drm/i915/gem/i915_gem_object.c obj->ops = ops; obj 65 drivers/gpu/drm/i915/gem/i915_gem_object.c obj->mm.madv = I915_MADV_WILLNEED; obj 66 drivers/gpu/drm/i915/gem/i915_gem_object.c INIT_RADIX_TREE(&obj->mm.get_page.radix, GFP_KERNEL | __GFP_NOWARN); obj 67 drivers/gpu/drm/i915/gem/i915_gem_object.c mutex_init(&obj->mm.get_page.lock); obj 75 drivers/gpu/drm/i915/gem/i915_gem_object.c void i915_gem_object_set_cache_coherency(struct drm_i915_gem_object *obj, obj 78 drivers/gpu/drm/i915/gem/i915_gem_object.c obj->cache_level = cache_level; obj 81 drivers/gpu/drm/i915/gem/i915_gem_object.c obj->cache_coherent = (I915_BO_CACHE_COHERENT_FOR_READ | obj 83 drivers/gpu/drm/i915/gem/i915_gem_object.c else if (HAS_LLC(to_i915(obj->base.dev))) obj 84 drivers/gpu/drm/i915/gem/i915_gem_object.c obj->cache_coherent = I915_BO_CACHE_COHERENT_FOR_READ; obj 86 drivers/gpu/drm/i915/gem/i915_gem_object.c obj->cache_coherent = 0; obj 88 drivers/gpu/drm/i915/gem/i915_gem_object.c obj->cache_dirty = obj 89 drivers/gpu/drm/i915/gem/i915_gem_object.c !(obj->cache_coherent & I915_BO_CACHE_COHERENT_FOR_WRITE); obj 94 drivers/gpu/drm/i915/gem/i915_gem_object.c struct drm_i915_gem_object *obj = to_intel_bo(gem); obj 99 drivers/gpu/drm/i915/gem/i915_gem_object.c i915_gem_object_lock(obj); obj 100 drivers/gpu/drm/i915/gem/i915_gem_object.c list_for_each_entry_safe(lut, ln, &obj->lut_list, obj_link) { obj 109 drivers/gpu/drm/i915/gem/i915_gem_object.c i915_gem_object_unlock(obj); obj 123 drivers/gpu/drm/i915/gem/i915_gem_object.c GEM_BUG_ON(vma->obj != obj); obj 133 drivers/gpu/drm/i915/gem/i915_gem_object.c i915_gem_object_put(obj); obj 139 drivers/gpu/drm/i915/gem/i915_gem_object.c struct drm_i915_gem_object *obj = obj 140 drivers/gpu/drm/i915/gem/i915_gem_object.c container_of(head, typeof(*obj), rcu); obj 141 drivers/gpu/drm/i915/gem/i915_gem_object.c struct drm_i915_private *i915 = to_i915(obj->base.dev); obj 143 drivers/gpu/drm/i915/gem/i915_gem_object.c dma_resv_fini(&obj->base._resv); obj 144 drivers/gpu/drm/i915/gem/i915_gem_object.c i915_gem_object_free(obj); obj 153 drivers/gpu/drm/i915/gem/i915_gem_object.c struct drm_i915_gem_object *obj, *on; obj 157 drivers/gpu/drm/i915/gem/i915_gem_object.c llist_for_each_entry_safe(obj, on, freed, freed) { obj 160 drivers/gpu/drm/i915/gem/i915_gem_object.c trace_i915_gem_object_destroy(obj); obj 164 drivers/gpu/drm/i915/gem/i915_gem_object.c list_for_each_entry_safe(vma, vn, &obj->vma.list, obj_link) { obj 169 drivers/gpu/drm/i915/gem/i915_gem_object.c GEM_BUG_ON(!list_empty(&obj->vma.list)); obj 170 drivers/gpu/drm/i915/gem/i915_gem_object.c GEM_BUG_ON(!RB_EMPTY_ROOT(&obj->vma.tree)); obj 174 drivers/gpu/drm/i915/gem/i915_gem_object.c GEM_BUG_ON(atomic_read(&obj->bind_count)); obj 175 drivers/gpu/drm/i915/gem/i915_gem_object.c GEM_BUG_ON(obj->userfault_count); obj 176 drivers/gpu/drm/i915/gem/i915_gem_object.c GEM_BUG_ON(!list_empty(&obj->lut_list)); obj 178 drivers/gpu/drm/i915/gem/i915_gem_object.c atomic_set(&obj->mm.pages_pin_count, 0); obj 179 drivers/gpu/drm/i915/gem/i915_gem_object.c __i915_gem_object_put_pages(obj, I915_MM_NORMAL); obj 180 drivers/gpu/drm/i915/gem/i915_gem_object.c GEM_BUG_ON(i915_gem_object_has_pages(obj)); obj 181 drivers/gpu/drm/i915/gem/i915_gem_object.c bitmap_free(obj->bit_17); obj 183 drivers/gpu/drm/i915/gem/i915_gem_object.c if (obj->base.import_attach) obj 184 drivers/gpu/drm/i915/gem/i915_gem_object.c drm_prime_gem_destroy(&obj->base, NULL); obj 186 drivers/gpu/drm/i915/gem/i915_gem_object.c drm_gem_free_mmap_offset(&obj->base); obj 188 drivers/gpu/drm/i915/gem/i915_gem_object.c if (obj->ops->release) obj 189 drivers/gpu/drm/i915/gem/i915_gem_object.c obj->ops->release(obj); obj 192 drivers/gpu/drm/i915/gem/i915_gem_object.c call_rcu(&obj->rcu, __i915_gem_free_object_rcu); obj 215 drivers/gpu/drm/i915/gem/i915_gem_object.c struct drm_i915_gem_object *obj = to_intel_bo(gem_obj); obj 216 drivers/gpu/drm/i915/gem/i915_gem_object.c struct drm_i915_private *i915 = to_i915(obj->base.dev); obj 218 drivers/gpu/drm/i915/gem/i915_gem_object.c GEM_BUG_ON(i915_gem_object_is_framebuffer(obj)); obj 235 drivers/gpu/drm/i915/gem/i915_gem_object.c i915_gem_object_make_unshrinkable(obj); obj 247 drivers/gpu/drm/i915/gem/i915_gem_object.c if (llist_add(&obj->freed, &i915->mm.free_list)) obj 251 drivers/gpu/drm/i915/gem/i915_gem_object.c static bool gpu_write_needs_clflush(struct drm_i915_gem_object *obj) obj 253 drivers/gpu/drm/i915/gem/i915_gem_object.c return !(obj->cache_level == I915_CACHE_NONE || obj 254 drivers/gpu/drm/i915/gem/i915_gem_object.c obj->cache_level == I915_CACHE_WT); obj 258 drivers/gpu/drm/i915/gem/i915_gem_object.c i915_gem_object_flush_write_domain(struct drm_i915_gem_object *obj, obj 263 drivers/gpu/drm/i915/gem/i915_gem_object.c assert_object_held(obj); obj 265 drivers/gpu/drm/i915/gem/i915_gem_object.c if (!(obj->write_domain & flush_domains)) obj 268 drivers/gpu/drm/i915/gem/i915_gem_object.c switch (obj->write_domain) { obj 270 drivers/gpu/drm/i915/gem/i915_gem_object.c for_each_ggtt_vma(vma, obj) obj 273 drivers/gpu/drm/i915/gem/i915_gem_object.c intel_frontbuffer_flush(obj->frontbuffer, ORIGIN_CPU); obj 275 drivers/gpu/drm/i915/gem/i915_gem_object.c for_each_ggtt_vma(vma, obj) { obj 289 drivers/gpu/drm/i915/gem/i915_gem_object.c i915_gem_clflush_object(obj, I915_CLFLUSH_SYNC); obj 293 drivers/gpu/drm/i915/gem/i915_gem_object.c if (gpu_write_needs_clflush(obj)) obj 294 drivers/gpu/drm/i915/gem/i915_gem_object.c obj->cache_dirty = true; obj 298 drivers/gpu/drm/i915/gem/i915_gem_object.c obj->write_domain = 0; obj 23 drivers/gpu/drm/i915/gem/i915_gem_object.h void i915_gem_object_free(struct drm_i915_gem_object *obj); obj 25 drivers/gpu/drm/i915/gem/i915_gem_object.h void i915_gem_object_init(struct drm_i915_gem_object *obj, obj 34 drivers/gpu/drm/i915/gem/i915_gem_object.h void __i915_gem_object_release_shmem(struct drm_i915_gem_object *obj, obj 38 drivers/gpu/drm/i915/gem/i915_gem_object.h int i915_gem_object_attach_phys(struct drm_i915_gem_object *obj, int align); obj 41 drivers/gpu/drm/i915/gem/i915_gem_object.h void i915_gem_free_object(struct drm_gem_object *obj); obj 46 drivers/gpu/drm/i915/gem/i915_gem_object.h __i915_gem_object_unset_pages(struct drm_i915_gem_object *obj); obj 47 drivers/gpu/drm/i915/gem/i915_gem_object.h void i915_gem_object_truncate(struct drm_i915_gem_object *obj); obj 72 drivers/gpu/drm/i915/gem/i915_gem_object.h struct drm_i915_gem_object *obj; obj 75 drivers/gpu/drm/i915/gem/i915_gem_object.h obj = i915_gem_object_lookup_rcu(file, handle); obj 76 drivers/gpu/drm/i915/gem/i915_gem_object.h if (obj && !kref_get_unless_zero(&obj->base.refcount)) obj 77 drivers/gpu/drm/i915/gem/i915_gem_object.h obj = NULL; obj 80 drivers/gpu/drm/i915/gem/i915_gem_object.h return obj; obj 89 drivers/gpu/drm/i915/gem/i915_gem_object.h i915_gem_object_get(struct drm_i915_gem_object *obj) obj 91 drivers/gpu/drm/i915/gem/i915_gem_object.h drm_gem_object_get(&obj->base); obj 92 drivers/gpu/drm/i915/gem/i915_gem_object.h return obj; obj 97 drivers/gpu/drm/i915/gem/i915_gem_object.h i915_gem_object_put(struct drm_i915_gem_object *obj) obj 99 drivers/gpu/drm/i915/gem/i915_gem_object.h __drm_gem_object_put(&obj->base); obj 102 drivers/gpu/drm/i915/gem/i915_gem_object.h #define assert_object_held(obj) dma_resv_assert_held((obj)->base.resv) obj 104 drivers/gpu/drm/i915/gem/i915_gem_object.h static inline void i915_gem_object_lock(struct drm_i915_gem_object *obj) obj 106 drivers/gpu/drm/i915/gem/i915_gem_object.h dma_resv_lock(obj->base.resv, NULL); obj 110 drivers/gpu/drm/i915/gem/i915_gem_object.h i915_gem_object_lock_interruptible(struct drm_i915_gem_object *obj) obj 112 drivers/gpu/drm/i915/gem/i915_gem_object.h return dma_resv_lock_interruptible(obj->base.resv, NULL); obj 115 drivers/gpu/drm/i915/gem/i915_gem_object.h static inline void i915_gem_object_unlock(struct drm_i915_gem_object *obj) obj 117 drivers/gpu/drm/i915/gem/i915_gem_object.h dma_resv_unlock(obj->base.resv); obj 121 drivers/gpu/drm/i915/gem/i915_gem_object.h i915_gem_object_lock_fence(struct drm_i915_gem_object *obj); obj 122 drivers/gpu/drm/i915/gem/i915_gem_object.h void i915_gem_object_unlock_fence(struct drm_i915_gem_object *obj, obj 126 drivers/gpu/drm/i915/gem/i915_gem_object.h i915_gem_object_set_readonly(struct drm_i915_gem_object *obj) obj 128 drivers/gpu/drm/i915/gem/i915_gem_object.h obj->base.vma_node.readonly = true; obj 132 drivers/gpu/drm/i915/gem/i915_gem_object.h i915_gem_object_is_readonly(const struct drm_i915_gem_object *obj) obj 134 drivers/gpu/drm/i915/gem/i915_gem_object.h return obj->base.vma_node.readonly; obj 138 drivers/gpu/drm/i915/gem/i915_gem_object.h i915_gem_object_has_struct_page(const struct drm_i915_gem_object *obj) obj 140 drivers/gpu/drm/i915/gem/i915_gem_object.h return obj->ops->flags & I915_GEM_OBJECT_HAS_STRUCT_PAGE; obj 144 drivers/gpu/drm/i915/gem/i915_gem_object.h i915_gem_object_is_shrinkable(const struct drm_i915_gem_object *obj) obj 146 drivers/gpu/drm/i915/gem/i915_gem_object.h return obj->ops->flags & I915_GEM_OBJECT_IS_SHRINKABLE; obj 150 drivers/gpu/drm/i915/gem/i915_gem_object.h i915_gem_object_is_proxy(const struct drm_i915_gem_object *obj) obj 152 drivers/gpu/drm/i915/gem/i915_gem_object.h return obj->ops->flags & I915_GEM_OBJECT_IS_PROXY; obj 156 drivers/gpu/drm/i915/gem/i915_gem_object.h i915_gem_object_never_bind_ggtt(const struct drm_i915_gem_object *obj) obj 158 drivers/gpu/drm/i915/gem/i915_gem_object.h return obj->ops->flags & I915_GEM_OBJECT_NO_GGTT; obj 162 drivers/gpu/drm/i915/gem/i915_gem_object.h i915_gem_object_needs_async_cancel(const struct drm_i915_gem_object *obj) obj 164 drivers/gpu/drm/i915/gem/i915_gem_object.h return obj->ops->flags & I915_GEM_OBJECT_ASYNC_CANCEL; obj 168 drivers/gpu/drm/i915/gem/i915_gem_object.h i915_gem_object_is_framebuffer(const struct drm_i915_gem_object *obj) obj 170 drivers/gpu/drm/i915/gem/i915_gem_object.h return READ_ONCE(obj->frontbuffer); obj 174 drivers/gpu/drm/i915/gem/i915_gem_object.h i915_gem_object_get_tiling(const struct drm_i915_gem_object *obj) obj 176 drivers/gpu/drm/i915/gem/i915_gem_object.h return obj->tiling_and_stride & TILING_MASK; obj 180 drivers/gpu/drm/i915/gem/i915_gem_object.h i915_gem_object_is_tiled(const struct drm_i915_gem_object *obj) obj 182 drivers/gpu/drm/i915/gem/i915_gem_object.h return i915_gem_object_get_tiling(obj) != I915_TILING_NONE; obj 186 drivers/gpu/drm/i915/gem/i915_gem_object.h i915_gem_object_get_stride(const struct drm_i915_gem_object *obj) obj 188 drivers/gpu/drm/i915/gem/i915_gem_object.h return obj->tiling_and_stride & STRIDE_MASK; obj 199 drivers/gpu/drm/i915/gem/i915_gem_object.h i915_gem_object_get_tile_height(const struct drm_i915_gem_object *obj) obj 201 drivers/gpu/drm/i915/gem/i915_gem_object.h return i915_gem_tile_height(i915_gem_object_get_tiling(obj)); obj 205 drivers/gpu/drm/i915/gem/i915_gem_object.h i915_gem_object_get_tile_row_size(const struct drm_i915_gem_object *obj) obj 207 drivers/gpu/drm/i915/gem/i915_gem_object.h return (i915_gem_object_get_stride(obj) * obj 208 drivers/gpu/drm/i915/gem/i915_gem_object.h i915_gem_object_get_tile_height(obj)); obj 211 drivers/gpu/drm/i915/gem/i915_gem_object.h int i915_gem_object_set_tiling(struct drm_i915_gem_object *obj, obj 215 drivers/gpu/drm/i915/gem/i915_gem_object.h i915_gem_object_get_sg(struct drm_i915_gem_object *obj, obj 219 drivers/gpu/drm/i915/gem/i915_gem_object.h i915_gem_object_get_page(struct drm_i915_gem_object *obj, obj 223 drivers/gpu/drm/i915/gem/i915_gem_object.h i915_gem_object_get_dirty_page(struct drm_i915_gem_object *obj, obj 227 drivers/gpu/drm/i915/gem/i915_gem_object.h i915_gem_object_get_dma_address_len(struct drm_i915_gem_object *obj, obj 232 drivers/gpu/drm/i915/gem/i915_gem_object.h i915_gem_object_get_dma_address(struct drm_i915_gem_object *obj, obj 235 drivers/gpu/drm/i915/gem/i915_gem_object.h void __i915_gem_object_set_pages(struct drm_i915_gem_object *obj, obj 239 drivers/gpu/drm/i915/gem/i915_gem_object.h int ____i915_gem_object_get_pages(struct drm_i915_gem_object *obj); obj 240 drivers/gpu/drm/i915/gem/i915_gem_object.h int __i915_gem_object_get_pages(struct drm_i915_gem_object *obj); obj 243 drivers/gpu/drm/i915/gem/i915_gem_object.h i915_gem_object_pin_pages(struct drm_i915_gem_object *obj) obj 245 drivers/gpu/drm/i915/gem/i915_gem_object.h might_lock(&obj->mm.lock); obj 247 drivers/gpu/drm/i915/gem/i915_gem_object.h if (atomic_inc_not_zero(&obj->mm.pages_pin_count)) obj 250 drivers/gpu/drm/i915/gem/i915_gem_object.h return __i915_gem_object_get_pages(obj); obj 254 drivers/gpu/drm/i915/gem/i915_gem_object.h i915_gem_object_has_pages(struct drm_i915_gem_object *obj) obj 256 drivers/gpu/drm/i915/gem/i915_gem_object.h return !IS_ERR_OR_NULL(READ_ONCE(obj->mm.pages)); obj 260 drivers/gpu/drm/i915/gem/i915_gem_object.h __i915_gem_object_pin_pages(struct drm_i915_gem_object *obj) obj 262 drivers/gpu/drm/i915/gem/i915_gem_object.h GEM_BUG_ON(!i915_gem_object_has_pages(obj)); obj 264 drivers/gpu/drm/i915/gem/i915_gem_object.h atomic_inc(&obj->mm.pages_pin_count); obj 268 drivers/gpu/drm/i915/gem/i915_gem_object.h i915_gem_object_has_pinned_pages(struct drm_i915_gem_object *obj) obj 270 drivers/gpu/drm/i915/gem/i915_gem_object.h return atomic_read(&obj->mm.pages_pin_count); obj 274 drivers/gpu/drm/i915/gem/i915_gem_object.h __i915_gem_object_unpin_pages(struct drm_i915_gem_object *obj) obj 276 drivers/gpu/drm/i915/gem/i915_gem_object.h GEM_BUG_ON(!i915_gem_object_has_pages(obj)); obj 277 drivers/gpu/drm/i915/gem/i915_gem_object.h GEM_BUG_ON(!i915_gem_object_has_pinned_pages(obj)); obj 279 drivers/gpu/drm/i915/gem/i915_gem_object.h atomic_dec(&obj->mm.pages_pin_count); obj 283 drivers/gpu/drm/i915/gem/i915_gem_object.h i915_gem_object_unpin_pages(struct drm_i915_gem_object *obj) obj 285 drivers/gpu/drm/i915/gem/i915_gem_object.h __i915_gem_object_unpin_pages(obj); obj 293 drivers/gpu/drm/i915/gem/i915_gem_object.h int __i915_gem_object_put_pages(struct drm_i915_gem_object *obj, obj 295 drivers/gpu/drm/i915/gem/i915_gem_object.h void i915_gem_object_truncate(struct drm_i915_gem_object *obj); obj 296 drivers/gpu/drm/i915/gem/i915_gem_object.h void i915_gem_object_writeback(struct drm_i915_gem_object *obj); obj 322 drivers/gpu/drm/i915/gem/i915_gem_object.h void *__must_check i915_gem_object_pin_map(struct drm_i915_gem_object *obj, obj 325 drivers/gpu/drm/i915/gem/i915_gem_object.h void __i915_gem_object_flush_map(struct drm_i915_gem_object *obj, obj 328 drivers/gpu/drm/i915/gem/i915_gem_object.h static inline void i915_gem_object_flush_map(struct drm_i915_gem_object *obj) obj 330 drivers/gpu/drm/i915/gem/i915_gem_object.h __i915_gem_object_flush_map(obj, 0, obj->base.size); obj 342 drivers/gpu/drm/i915/gem/i915_gem_object.h static inline void i915_gem_object_unpin_map(struct drm_i915_gem_object *obj) obj 344 drivers/gpu/drm/i915/gem/i915_gem_object.h i915_gem_object_unpin_pages(obj); obj 347 drivers/gpu/drm/i915/gem/i915_gem_object.h void __i915_gem_object_release_mmap(struct drm_i915_gem_object *obj); obj 348 drivers/gpu/drm/i915/gem/i915_gem_object.h void i915_gem_object_release_mmap(struct drm_i915_gem_object *obj); obj 351 drivers/gpu/drm/i915/gem/i915_gem_object.h i915_gem_object_flush_write_domain(struct drm_i915_gem_object *obj, obj 354 drivers/gpu/drm/i915/gem/i915_gem_object.h int i915_gem_object_prepare_read(struct drm_i915_gem_object *obj, obj 356 drivers/gpu/drm/i915/gem/i915_gem_object.h int i915_gem_object_prepare_write(struct drm_i915_gem_object *obj, obj 363 drivers/gpu/drm/i915/gem/i915_gem_object.h i915_gem_object_finish_access(struct drm_i915_gem_object *obj) obj 365 drivers/gpu/drm/i915/gem/i915_gem_object.h i915_gem_object_unpin_pages(obj); obj 366 drivers/gpu/drm/i915/gem/i915_gem_object.h i915_gem_object_unlock(obj); obj 370 drivers/gpu/drm/i915/gem/i915_gem_object.h i915_gem_object_last_write_engine(struct drm_i915_gem_object *obj) obj 376 drivers/gpu/drm/i915/gem/i915_gem_object.h fence = dma_resv_get_excl_rcu(obj->base.resv); obj 386 drivers/gpu/drm/i915/gem/i915_gem_object.h void i915_gem_object_set_cache_coherency(struct drm_i915_gem_object *obj, obj 388 drivers/gpu/drm/i915/gem/i915_gem_object.h void i915_gem_object_flush_if_display(struct drm_i915_gem_object *obj); obj 391 drivers/gpu/drm/i915/gem/i915_gem_object.h i915_gem_object_set_to_wc_domain(struct drm_i915_gem_object *obj, bool write); obj 393 drivers/gpu/drm/i915/gem/i915_gem_object.h i915_gem_object_set_to_gtt_domain(struct drm_i915_gem_object *obj, bool write); obj 395 drivers/gpu/drm/i915/gem/i915_gem_object.h i915_gem_object_set_to_cpu_domain(struct drm_i915_gem_object *obj, bool write); obj 397 drivers/gpu/drm/i915/gem/i915_gem_object.h i915_gem_object_pin_to_display_plane(struct drm_i915_gem_object *obj, obj 403 drivers/gpu/drm/i915/gem/i915_gem_object.h void i915_gem_object_make_unshrinkable(struct drm_i915_gem_object *obj); obj 404 drivers/gpu/drm/i915/gem/i915_gem_object.h void i915_gem_object_make_shrinkable(struct drm_i915_gem_object *obj); obj 405 drivers/gpu/drm/i915/gem/i915_gem_object.h void i915_gem_object_make_purgeable(struct drm_i915_gem_object *obj); obj 407 drivers/gpu/drm/i915/gem/i915_gem_object.h static inline bool cpu_write_needs_clflush(struct drm_i915_gem_object *obj) obj 409 drivers/gpu/drm/i915/gem/i915_gem_object.h if (obj->cache_dirty) obj 412 drivers/gpu/drm/i915/gem/i915_gem_object.h if (!(obj->cache_coherent & I915_BO_CACHE_COHERENT_FOR_WRITE)) obj 415 drivers/gpu/drm/i915/gem/i915_gem_object.h return obj->pin_global; /* currently in use by HW, keep flushed */ obj 418 drivers/gpu/drm/i915/gem/i915_gem_object.h static inline void __start_cpu_write(struct drm_i915_gem_object *obj) obj 420 drivers/gpu/drm/i915/gem/i915_gem_object.h obj->read_domains = I915_GEM_DOMAIN_CPU; obj 421 drivers/gpu/drm/i915/gem/i915_gem_object.h obj->write_domain = I915_GEM_DOMAIN_CPU; obj 422 drivers/gpu/drm/i915/gem/i915_gem_object.h if (cpu_write_needs_clflush(obj)) obj 423 drivers/gpu/drm/i915/gem/i915_gem_object.h obj->cache_dirty = true; obj 426 drivers/gpu/drm/i915/gem/i915_gem_object.h int i915_gem_object_wait(struct drm_i915_gem_object *obj, obj 429 drivers/gpu/drm/i915/gem/i915_gem_object.h int i915_gem_object_wait_priority(struct drm_i915_gem_object *obj, obj 41 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c cmd = i915_gem_object_pin_map(pool->obj, I915_MAP_WC); obj 82 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c i915_gem_object_unpin_map(pool->obj); obj 84 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c batch = i915_vma_instance(pool->obj, ce->vm, NULL); obj 109 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c err = i915_request_await_object(rq, vma->obj, false); obj 126 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c int i915_gem_object_fill_blt(struct drm_i915_gem_object *obj, obj 135 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c vma = i915_vma_instance(obj, ce->vm, NULL); obj 143 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c if (obj->cache_dirty & ~obj->cache_coherent) { obj 144 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c i915_gem_object_lock(obj); obj 145 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c i915_gem_clflush_object(obj, 0); obj 146 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c i915_gem_object_unlock(obj); obj 165 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c err = i915_request_await_object(rq, obj, true); obj 176 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c err = i915_request_await_object(rq, vma->obj, true); obj 225 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c cmd = i915_gem_object_pin_map(pool->obj, I915_MAP_WC); obj 281 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c i915_gem_object_unpin_map(pool->obj); obj 283 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c batch = i915_vma_instance(pool->obj, ce->vm, NULL); obj 305 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c struct drm_i915_gem_object *obj = vma->obj; obj 307 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c if (obj->cache_dirty & ~obj->cache_coherent) obj 308 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c i915_gem_clflush_object(obj, 0); obj 310 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c return i915_request_await_object(rq, obj, write); obj 29 drivers/gpu/drm/i915/gem/i915_gem_object_blt.h int i915_gem_object_fill_blt(struct drm_i915_gem_object *obj, obj 51 drivers/gpu/drm/i915/gem/i915_gem_object_types.h int (*get_pages)(struct drm_i915_gem_object *obj); obj 52 drivers/gpu/drm/i915/gem/i915_gem_object_types.h void (*put_pages)(struct drm_i915_gem_object *obj, obj 54 drivers/gpu/drm/i915/gem/i915_gem_object_types.h void (*truncate)(struct drm_i915_gem_object *obj); obj 55 drivers/gpu/drm/i915/gem/i915_gem_object_types.h void (*writeback)(struct drm_i915_gem_object *obj); obj 57 drivers/gpu/drm/i915/gem/i915_gem_object_types.h int (*pwrite)(struct drm_i915_gem_object *obj, obj 60 drivers/gpu/drm/i915/gem/i915_gem_object_types.h int (*dmabuf_export)(struct drm_i915_gem_object *obj); obj 61 drivers/gpu/drm/i915/gem/i915_gem_object_types.h void (*release)(struct drm_i915_gem_object *obj); obj 11 drivers/gpu/drm/i915/gem/i915_gem_pages.c void __i915_gem_object_set_pages(struct drm_i915_gem_object *obj, obj 15 drivers/gpu/drm/i915/gem/i915_gem_pages.c struct drm_i915_private *i915 = to_i915(obj->base.dev); obj 19 drivers/gpu/drm/i915/gem/i915_gem_pages.c lockdep_assert_held(&obj->mm.lock); obj 22 drivers/gpu/drm/i915/gem/i915_gem_pages.c if (obj->cache_dirty) { obj 23 drivers/gpu/drm/i915/gem/i915_gem_pages.c obj->write_domain = 0; obj 24 drivers/gpu/drm/i915/gem/i915_gem_pages.c if (i915_gem_object_has_struct_page(obj)) obj 26 drivers/gpu/drm/i915/gem/i915_gem_pages.c obj->cache_dirty = false; obj 29 drivers/gpu/drm/i915/gem/i915_gem_pages.c obj->mm.get_page.sg_pos = pages->sgl; obj 30 drivers/gpu/drm/i915/gem/i915_gem_pages.c obj->mm.get_page.sg_idx = 0; obj 32 drivers/gpu/drm/i915/gem/i915_gem_pages.c obj->mm.pages = pages; obj 34 drivers/gpu/drm/i915/gem/i915_gem_pages.c if (i915_gem_object_is_tiled(obj) && obj 36 drivers/gpu/drm/i915/gem/i915_gem_pages.c GEM_BUG_ON(obj->mm.quirked); obj 37 drivers/gpu/drm/i915/gem/i915_gem_pages.c __i915_gem_object_pin_pages(obj); obj 38 drivers/gpu/drm/i915/gem/i915_gem_pages.c obj->mm.quirked = true; obj 42 drivers/gpu/drm/i915/gem/i915_gem_pages.c obj->mm.page_sizes.phys = sg_page_sizes; obj 52 drivers/gpu/drm/i915/gem/i915_gem_pages.c obj->mm.page_sizes.sg = 0; obj 54 drivers/gpu/drm/i915/gem/i915_gem_pages.c if (obj->mm.page_sizes.phys & ~0u << i) obj 55 drivers/gpu/drm/i915/gem/i915_gem_pages.c obj->mm.page_sizes.sg |= BIT(i); obj 57 drivers/gpu/drm/i915/gem/i915_gem_pages.c GEM_BUG_ON(!HAS_PAGE_SIZES(i915, obj->mm.page_sizes.sg)); obj 59 drivers/gpu/drm/i915/gem/i915_gem_pages.c if (i915_gem_object_is_shrinkable(obj)) { obj 66 drivers/gpu/drm/i915/gem/i915_gem_pages.c i915->mm.shrink_memory += obj->base.size; obj 68 drivers/gpu/drm/i915/gem/i915_gem_pages.c if (obj->mm.madv != I915_MADV_WILLNEED) obj 72 drivers/gpu/drm/i915/gem/i915_gem_pages.c list_add_tail(&obj->mm.link, list); obj 78 drivers/gpu/drm/i915/gem/i915_gem_pages.c int ____i915_gem_object_get_pages(struct drm_i915_gem_object *obj) obj 82 drivers/gpu/drm/i915/gem/i915_gem_pages.c if (unlikely(obj->mm.madv != I915_MADV_WILLNEED)) { obj 87 drivers/gpu/drm/i915/gem/i915_gem_pages.c err = obj->ops->get_pages(obj); obj 88 drivers/gpu/drm/i915/gem/i915_gem_pages.c GEM_BUG_ON(!err && !i915_gem_object_has_pages(obj)); obj 100 drivers/gpu/drm/i915/gem/i915_gem_pages.c int __i915_gem_object_get_pages(struct drm_i915_gem_object *obj) obj 104 drivers/gpu/drm/i915/gem/i915_gem_pages.c err = mutex_lock_interruptible(&obj->mm.lock); obj 108 drivers/gpu/drm/i915/gem/i915_gem_pages.c if (unlikely(!i915_gem_object_has_pages(obj))) { obj 109 drivers/gpu/drm/i915/gem/i915_gem_pages.c GEM_BUG_ON(i915_gem_object_has_pinned_pages(obj)); obj 111 drivers/gpu/drm/i915/gem/i915_gem_pages.c err = ____i915_gem_object_get_pages(obj); obj 117 drivers/gpu/drm/i915/gem/i915_gem_pages.c atomic_inc(&obj->mm.pages_pin_count); obj 120 drivers/gpu/drm/i915/gem/i915_gem_pages.c mutex_unlock(&obj->mm.lock); obj 125 drivers/gpu/drm/i915/gem/i915_gem_pages.c void i915_gem_object_truncate(struct drm_i915_gem_object *obj) obj 127 drivers/gpu/drm/i915/gem/i915_gem_pages.c drm_gem_free_mmap_offset(&obj->base); obj 128 drivers/gpu/drm/i915/gem/i915_gem_pages.c if (obj->ops->truncate) obj 129 drivers/gpu/drm/i915/gem/i915_gem_pages.c obj->ops->truncate(obj); obj 133 drivers/gpu/drm/i915/gem/i915_gem_pages.c void i915_gem_object_writeback(struct drm_i915_gem_object *obj) obj 135 drivers/gpu/drm/i915/gem/i915_gem_pages.c lockdep_assert_held(&obj->mm.lock); obj 136 drivers/gpu/drm/i915/gem/i915_gem_pages.c GEM_BUG_ON(i915_gem_object_has_pages(obj)); obj 138 drivers/gpu/drm/i915/gem/i915_gem_pages.c if (obj->ops->writeback) obj 139 drivers/gpu/drm/i915/gem/i915_gem_pages.c obj->ops->writeback(obj); obj 142 drivers/gpu/drm/i915/gem/i915_gem_pages.c static void __i915_gem_object_reset_page_iter(struct drm_i915_gem_object *obj) obj 148 drivers/gpu/drm/i915/gem/i915_gem_pages.c radix_tree_for_each_slot(slot, &obj->mm.get_page.radix, &iter, 0) obj 149 drivers/gpu/drm/i915/gem/i915_gem_pages.c radix_tree_delete(&obj->mm.get_page.radix, iter.index); obj 154 drivers/gpu/drm/i915/gem/i915_gem_pages.c __i915_gem_object_unset_pages(struct drm_i915_gem_object *obj) obj 158 drivers/gpu/drm/i915/gem/i915_gem_pages.c pages = fetch_and_zero(&obj->mm.pages); obj 162 drivers/gpu/drm/i915/gem/i915_gem_pages.c i915_gem_object_make_unshrinkable(obj); obj 164 drivers/gpu/drm/i915/gem/i915_gem_pages.c if (obj->mm.mapping) { obj 167 drivers/gpu/drm/i915/gem/i915_gem_pages.c ptr = page_mask_bits(obj->mm.mapping); obj 173 drivers/gpu/drm/i915/gem/i915_gem_pages.c obj->mm.mapping = NULL; obj 176 drivers/gpu/drm/i915/gem/i915_gem_pages.c __i915_gem_object_reset_page_iter(obj); obj 177 drivers/gpu/drm/i915/gem/i915_gem_pages.c obj->mm.page_sizes.phys = obj->mm.page_sizes.sg = 0; obj 182 drivers/gpu/drm/i915/gem/i915_gem_pages.c int __i915_gem_object_put_pages(struct drm_i915_gem_object *obj, obj 188 drivers/gpu/drm/i915/gem/i915_gem_pages.c if (i915_gem_object_has_pinned_pages(obj)) obj 191 drivers/gpu/drm/i915/gem/i915_gem_pages.c GEM_BUG_ON(atomic_read(&obj->bind_count)); obj 194 drivers/gpu/drm/i915/gem/i915_gem_pages.c mutex_lock_nested(&obj->mm.lock, subclass); obj 195 drivers/gpu/drm/i915/gem/i915_gem_pages.c if (unlikely(atomic_read(&obj->mm.pages_pin_count))) { obj 205 drivers/gpu/drm/i915/gem/i915_gem_pages.c pages = __i915_gem_object_unset_pages(obj); obj 213 drivers/gpu/drm/i915/gem/i915_gem_pages.c if (!pages && !i915_gem_object_needs_async_cancel(obj)) obj 217 drivers/gpu/drm/i915/gem/i915_gem_pages.c obj->ops->put_pages(obj, pages); obj 221 drivers/gpu/drm/i915/gem/i915_gem_pages.c mutex_unlock(&obj->mm.lock); obj 227 drivers/gpu/drm/i915/gem/i915_gem_pages.c static void *i915_gem_object_map(const struct drm_i915_gem_object *obj, obj 230 drivers/gpu/drm/i915/gem/i915_gem_pages.c unsigned long n_pages = obj->base.size >> PAGE_SHIFT; obj 231 drivers/gpu/drm/i915/gem/i915_gem_pages.c struct sg_table *sgt = obj->mm.pages; obj 277 drivers/gpu/drm/i915/gem/i915_gem_pages.c void *i915_gem_object_pin_map(struct drm_i915_gem_object *obj, obj 285 drivers/gpu/drm/i915/gem/i915_gem_pages.c if (unlikely(!i915_gem_object_has_struct_page(obj))) obj 288 drivers/gpu/drm/i915/gem/i915_gem_pages.c err = mutex_lock_interruptible(&obj->mm.lock); obj 295 drivers/gpu/drm/i915/gem/i915_gem_pages.c if (!atomic_inc_not_zero(&obj->mm.pages_pin_count)) { obj 296 drivers/gpu/drm/i915/gem/i915_gem_pages.c if (unlikely(!i915_gem_object_has_pages(obj))) { obj 297 drivers/gpu/drm/i915/gem/i915_gem_pages.c GEM_BUG_ON(i915_gem_object_has_pinned_pages(obj)); obj 299 drivers/gpu/drm/i915/gem/i915_gem_pages.c err = ____i915_gem_object_get_pages(obj); obj 305 drivers/gpu/drm/i915/gem/i915_gem_pages.c atomic_inc(&obj->mm.pages_pin_count); obj 308 drivers/gpu/drm/i915/gem/i915_gem_pages.c GEM_BUG_ON(!i915_gem_object_has_pages(obj)); obj 310 drivers/gpu/drm/i915/gem/i915_gem_pages.c ptr = page_unpack_bits(obj->mm.mapping, &has_type); obj 322 drivers/gpu/drm/i915/gem/i915_gem_pages.c ptr = obj->mm.mapping = NULL; obj 326 drivers/gpu/drm/i915/gem/i915_gem_pages.c ptr = i915_gem_object_map(obj, type); obj 332 drivers/gpu/drm/i915/gem/i915_gem_pages.c obj->mm.mapping = page_pack_bits(ptr, type); obj 336 drivers/gpu/drm/i915/gem/i915_gem_pages.c mutex_unlock(&obj->mm.lock); obj 340 drivers/gpu/drm/i915/gem/i915_gem_pages.c atomic_dec(&obj->mm.pages_pin_count); obj 346 drivers/gpu/drm/i915/gem/i915_gem_pages.c void __i915_gem_object_flush_map(struct drm_i915_gem_object *obj, obj 353 drivers/gpu/drm/i915/gem/i915_gem_pages.c GEM_BUG_ON(!i915_gem_object_has_pinned_pages(obj)); obj 354 drivers/gpu/drm/i915/gem/i915_gem_pages.c GEM_BUG_ON(range_overflows_t(typeof(obj->base.size), obj 355 drivers/gpu/drm/i915/gem/i915_gem_pages.c offset, size, obj->base.size)); obj 357 drivers/gpu/drm/i915/gem/i915_gem_pages.c obj->mm.dirty = true; obj 359 drivers/gpu/drm/i915/gem/i915_gem_pages.c if (obj->cache_coherent & I915_BO_CACHE_COHERENT_FOR_WRITE) obj 362 drivers/gpu/drm/i915/gem/i915_gem_pages.c ptr = page_unpack_bits(obj->mm.mapping, &has_type); obj 367 drivers/gpu/drm/i915/gem/i915_gem_pages.c if (size == obj->base.size) { obj 368 drivers/gpu/drm/i915/gem/i915_gem_pages.c obj->write_domain &= ~I915_GEM_DOMAIN_CPU; obj 369 drivers/gpu/drm/i915/gem/i915_gem_pages.c obj->cache_dirty = false; obj 374 drivers/gpu/drm/i915/gem/i915_gem_pages.c i915_gem_object_get_sg(struct drm_i915_gem_object *obj, obj 378 drivers/gpu/drm/i915/gem/i915_gem_pages.c struct i915_gem_object_page_iter *iter = &obj->mm.get_page; obj 383 drivers/gpu/drm/i915/gem/i915_gem_pages.c GEM_BUG_ON(n >= obj->base.size >> PAGE_SHIFT); obj 384 drivers/gpu/drm/i915/gem/i915_gem_pages.c GEM_BUG_ON(!i915_gem_object_has_pinned_pages(obj)); obj 487 drivers/gpu/drm/i915/gem/i915_gem_pages.c i915_gem_object_get_page(struct drm_i915_gem_object *obj, unsigned int n) obj 492 drivers/gpu/drm/i915/gem/i915_gem_pages.c GEM_BUG_ON(!i915_gem_object_has_struct_page(obj)); obj 494 drivers/gpu/drm/i915/gem/i915_gem_pages.c sg = i915_gem_object_get_sg(obj, n, &offset); obj 500 drivers/gpu/drm/i915/gem/i915_gem_pages.c i915_gem_object_get_dirty_page(struct drm_i915_gem_object *obj, obj 505 drivers/gpu/drm/i915/gem/i915_gem_pages.c page = i915_gem_object_get_page(obj, n); obj 506 drivers/gpu/drm/i915/gem/i915_gem_pages.c if (!obj->mm.dirty) obj 513 drivers/gpu/drm/i915/gem/i915_gem_pages.c i915_gem_object_get_dma_address_len(struct drm_i915_gem_object *obj, obj 520 drivers/gpu/drm/i915/gem/i915_gem_pages.c sg = i915_gem_object_get_sg(obj, n, &offset); obj 529 drivers/gpu/drm/i915/gem/i915_gem_pages.c i915_gem_object_get_dma_address(struct drm_i915_gem_object *obj, obj 532 drivers/gpu/drm/i915/gem/i915_gem_pages.c return i915_gem_object_get_dma_address_len(obj, n, NULL); obj 21 drivers/gpu/drm/i915/gem/i915_gem_phys.c static int i915_gem_object_get_pages_phys(struct drm_i915_gem_object *obj) obj 23 drivers/gpu/drm/i915/gem/i915_gem_phys.c struct address_space *mapping = obj->base.filp->f_mapping; obj 31 drivers/gpu/drm/i915/gem/i915_gem_phys.c if (WARN_ON(i915_gem_object_needs_bit17_swizzle(obj))) obj 39 drivers/gpu/drm/i915/gem/i915_gem_phys.c vaddr = dma_alloc_coherent(&obj->base.dev->pdev->dev, obj 40 drivers/gpu/drm/i915/gem/i915_gem_phys.c roundup_pow_of_two(obj->base.size), obj 54 drivers/gpu/drm/i915/gem/i915_gem_phys.c sg->length = obj->base.size; obj 58 drivers/gpu/drm/i915/gem/i915_gem_phys.c sg_dma_len(sg) = obj->base.size; obj 61 drivers/gpu/drm/i915/gem/i915_gem_phys.c for (i = 0; i < obj->base.size / PAGE_SIZE; i++) { obj 78 drivers/gpu/drm/i915/gem/i915_gem_phys.c intel_gt_chipset_flush(&to_i915(obj->base.dev)->gt); obj 80 drivers/gpu/drm/i915/gem/i915_gem_phys.c __i915_gem_object_set_pages(obj, st, sg->length); obj 87 drivers/gpu/drm/i915/gem/i915_gem_phys.c dma_free_coherent(&obj->base.dev->pdev->dev, obj 88 drivers/gpu/drm/i915/gem/i915_gem_phys.c roundup_pow_of_two(obj->base.size), obj 94 drivers/gpu/drm/i915/gem/i915_gem_phys.c i915_gem_object_put_pages_phys(struct drm_i915_gem_object *obj, obj 100 drivers/gpu/drm/i915/gem/i915_gem_phys.c __i915_gem_object_release_shmem(obj, pages, false); obj 102 drivers/gpu/drm/i915/gem/i915_gem_phys.c if (obj->mm.dirty) { obj 103 drivers/gpu/drm/i915/gem/i915_gem_phys.c struct address_space *mapping = obj->base.filp->f_mapping; obj 107 drivers/gpu/drm/i915/gem/i915_gem_phys.c for (i = 0; i < obj->base.size / PAGE_SIZE; i++) { obj 121 drivers/gpu/drm/i915/gem/i915_gem_phys.c if (obj->mm.madv == I915_MADV_WILLNEED) obj 127 drivers/gpu/drm/i915/gem/i915_gem_phys.c obj->mm.dirty = false; obj 133 drivers/gpu/drm/i915/gem/i915_gem_phys.c dma_free_coherent(&obj->base.dev->pdev->dev, obj 134 drivers/gpu/drm/i915/gem/i915_gem_phys.c roundup_pow_of_two(obj->base.size), obj 138 drivers/gpu/drm/i915/gem/i915_gem_phys.c static void phys_release(struct drm_i915_gem_object *obj) obj 140 drivers/gpu/drm/i915/gem/i915_gem_phys.c fput(obj->base.filp); obj 150 drivers/gpu/drm/i915/gem/i915_gem_phys.c int i915_gem_object_attach_phys(struct drm_i915_gem_object *obj, int align) obj 155 drivers/gpu/drm/i915/gem/i915_gem_phys.c if (align > obj->base.size) obj 158 drivers/gpu/drm/i915/gem/i915_gem_phys.c if (obj->ops == &i915_gem_phys_ops) obj 161 drivers/gpu/drm/i915/gem/i915_gem_phys.c if (obj->ops != &i915_gem_shmem_ops) obj 164 drivers/gpu/drm/i915/gem/i915_gem_phys.c err = i915_gem_object_unbind(obj, I915_GEM_OBJECT_UNBIND_ACTIVE); obj 168 drivers/gpu/drm/i915/gem/i915_gem_phys.c mutex_lock(&obj->mm.lock); obj 170 drivers/gpu/drm/i915/gem/i915_gem_phys.c if (obj->mm.madv != I915_MADV_WILLNEED) { obj 175 drivers/gpu/drm/i915/gem/i915_gem_phys.c if (obj->mm.quirked) { obj 180 drivers/gpu/drm/i915/gem/i915_gem_phys.c if (obj->mm.mapping) { obj 185 drivers/gpu/drm/i915/gem/i915_gem_phys.c pages = __i915_gem_object_unset_pages(obj); obj 187 drivers/gpu/drm/i915/gem/i915_gem_phys.c obj->ops = &i915_gem_phys_ops; obj 189 drivers/gpu/drm/i915/gem/i915_gem_phys.c err = ____i915_gem_object_get_pages(obj); obj 194 drivers/gpu/drm/i915/gem/i915_gem_phys.c __i915_gem_object_pin_pages(obj); obj 197 drivers/gpu/drm/i915/gem/i915_gem_phys.c i915_gem_shmem_ops.put_pages(obj, pages); obj 198 drivers/gpu/drm/i915/gem/i915_gem_phys.c mutex_unlock(&obj->mm.lock); obj 202 drivers/gpu/drm/i915/gem/i915_gem_phys.c obj->ops = &i915_gem_shmem_ops; obj 206 drivers/gpu/drm/i915/gem/i915_gem_phys.c __i915_gem_object_set_pages(obj, pages, sg_page_sizes); obj 209 drivers/gpu/drm/i915/gem/i915_gem_phys.c mutex_unlock(&obj->mm.lock); obj 181 drivers/gpu/drm/i915/gem/i915_gem_pm.c struct drm_i915_gem_object *obj; obj 213 drivers/gpu/drm/i915/gem/i915_gem_pm.c while ((obj = first_mm_object(*phase))) { obj 214 drivers/gpu/drm/i915/gem/i915_gem_pm.c list_move_tail(&obj->mm.link, &keep); obj 217 drivers/gpu/drm/i915/gem/i915_gem_pm.c if (!kref_get_unless_zero(&obj->base.refcount)) obj 222 drivers/gpu/drm/i915/gem/i915_gem_pm.c i915_gem_object_lock(obj); obj 223 drivers/gpu/drm/i915/gem/i915_gem_pm.c WARN_ON(i915_gem_object_set_to_gtt_domain(obj, false)); obj 224 drivers/gpu/drm/i915/gem/i915_gem_pm.c i915_gem_object_unlock(obj); obj 225 drivers/gpu/drm/i915/gem/i915_gem_pm.c i915_gem_object_put(obj); obj 26 drivers/gpu/drm/i915/gem/i915_gem_shmem.c static int shmem_get_pages(struct drm_i915_gem_object *obj) obj 28 drivers/gpu/drm/i915/gem/i915_gem_shmem.c struct drm_i915_private *i915 = to_i915(obj->base.dev); obj 29 drivers/gpu/drm/i915/gem/i915_gem_shmem.c const unsigned long page_count = obj->base.size / PAGE_SIZE; obj 48 drivers/gpu/drm/i915/gem/i915_gem_shmem.c GEM_BUG_ON(obj->read_domains & I915_GEM_GPU_DOMAINS); obj 49 drivers/gpu/drm/i915/gem/i915_gem_shmem.c GEM_BUG_ON(obj->write_domain & I915_GEM_GPU_DOMAINS); obj 74 drivers/gpu/drm/i915/gem/i915_gem_shmem.c mapping = obj->base.filp->f_mapping; obj 158 drivers/gpu/drm/i915/gem/i915_gem_shmem.c ret = i915_gem_gtt_prepare_pages(obj, st); obj 180 drivers/gpu/drm/i915/gem/i915_gem_shmem.c if (i915_gem_object_needs_bit17_swizzle(obj)) obj 181 drivers/gpu/drm/i915/gem/i915_gem_shmem.c i915_gem_object_do_bit_17_swizzle(obj, st); obj 183 drivers/gpu/drm/i915/gem/i915_gem_shmem.c __i915_gem_object_set_pages(obj, st, sg_page_sizes); obj 217 drivers/gpu/drm/i915/gem/i915_gem_shmem.c shmem_truncate(struct drm_i915_gem_object *obj) obj 225 drivers/gpu/drm/i915/gem/i915_gem_shmem.c shmem_truncate_range(file_inode(obj->base.filp), 0, (loff_t)-1); obj 226 drivers/gpu/drm/i915/gem/i915_gem_shmem.c obj->mm.madv = __I915_MADV_PURGED; obj 227 drivers/gpu/drm/i915/gem/i915_gem_shmem.c obj->mm.pages = ERR_PTR(-EFAULT); obj 231 drivers/gpu/drm/i915/gem/i915_gem_shmem.c shmem_writeback(struct drm_i915_gem_object *obj) obj 249 drivers/gpu/drm/i915/gem/i915_gem_shmem.c mapping = obj->base.filp->f_mapping; obj 252 drivers/gpu/drm/i915/gem/i915_gem_shmem.c for (i = 0; i < obj->base.size >> PAGE_SHIFT; i++) { obj 276 drivers/gpu/drm/i915/gem/i915_gem_shmem.c __i915_gem_object_release_shmem(struct drm_i915_gem_object *obj, obj 280 drivers/gpu/drm/i915/gem/i915_gem_shmem.c GEM_BUG_ON(obj->mm.madv == __I915_MADV_PURGED); obj 282 drivers/gpu/drm/i915/gem/i915_gem_shmem.c if (obj->mm.madv == I915_MADV_DONTNEED) obj 283 drivers/gpu/drm/i915/gem/i915_gem_shmem.c obj->mm.dirty = false; obj 286 drivers/gpu/drm/i915/gem/i915_gem_shmem.c (obj->read_domains & I915_GEM_DOMAIN_CPU) == 0 && obj 287 drivers/gpu/drm/i915/gem/i915_gem_shmem.c !(obj->cache_coherent & I915_BO_CACHE_COHERENT_FOR_READ)) obj 290 drivers/gpu/drm/i915/gem/i915_gem_shmem.c __start_cpu_write(obj); obj 294 drivers/gpu/drm/i915/gem/i915_gem_shmem.c shmem_put_pages(struct drm_i915_gem_object *obj, struct sg_table *pages) obj 300 drivers/gpu/drm/i915/gem/i915_gem_shmem.c __i915_gem_object_release_shmem(obj, pages, true); obj 302 drivers/gpu/drm/i915/gem/i915_gem_shmem.c i915_gem_gtt_finish_pages(obj, pages); obj 304 drivers/gpu/drm/i915/gem/i915_gem_shmem.c if (i915_gem_object_needs_bit17_swizzle(obj)) obj 305 drivers/gpu/drm/i915/gem/i915_gem_shmem.c i915_gem_object_save_bit_17_swizzle(obj, pages); obj 307 drivers/gpu/drm/i915/gem/i915_gem_shmem.c mapping_clear_unevictable(file_inode(obj->base.filp)->i_mapping); obj 311 drivers/gpu/drm/i915/gem/i915_gem_shmem.c if (obj->mm.dirty) obj 314 drivers/gpu/drm/i915/gem/i915_gem_shmem.c if (obj->mm.madv == I915_MADV_WILLNEED) obj 322 drivers/gpu/drm/i915/gem/i915_gem_shmem.c obj->mm.dirty = false; obj 329 drivers/gpu/drm/i915/gem/i915_gem_shmem.c shmem_pwrite(struct drm_i915_gem_object *obj, obj 332 drivers/gpu/drm/i915/gem/i915_gem_shmem.c struct address_space *mapping = obj->base.filp->f_mapping; obj 349 drivers/gpu/drm/i915/gem/i915_gem_shmem.c if (i915_gem_object_has_pages(obj)) obj 352 drivers/gpu/drm/i915/gem/i915_gem_shmem.c if (obj->mm.madv != I915_MADV_WILLNEED) obj 387 drivers/gpu/drm/i915/gem/i915_gem_shmem.c err = pagecache_write_begin(obj->base.filp, mapping, obj 399 drivers/gpu/drm/i915/gem/i915_gem_shmem.c err = pagecache_write_end(obj->base.filp, mapping, obj 418 drivers/gpu/drm/i915/gem/i915_gem_shmem.c static void shmem_release(struct drm_i915_gem_object *obj) obj 420 drivers/gpu/drm/i915/gem/i915_gem_shmem.c fput(obj->base.filp); obj 438 drivers/gpu/drm/i915/gem/i915_gem_shmem.c struct drm_gem_object *obj, obj 444 drivers/gpu/drm/i915/gem/i915_gem_shmem.c drm_gem_private_object_init(&i915->drm, obj, size); obj 454 drivers/gpu/drm/i915/gem/i915_gem_shmem.c obj->filp = filp; obj 461 drivers/gpu/drm/i915/gem/i915_gem_shmem.c struct drm_i915_gem_object *obj; obj 475 drivers/gpu/drm/i915/gem/i915_gem_shmem.c if (overflows_type(size, obj->base.size)) obj 478 drivers/gpu/drm/i915/gem/i915_gem_shmem.c obj = i915_gem_object_alloc(); obj 479 drivers/gpu/drm/i915/gem/i915_gem_shmem.c if (!obj) obj 482 drivers/gpu/drm/i915/gem/i915_gem_shmem.c ret = create_shmem(i915, &obj->base, size); obj 493 drivers/gpu/drm/i915/gem/i915_gem_shmem.c mapping = obj->base.filp->f_mapping; obj 497 drivers/gpu/drm/i915/gem/i915_gem_shmem.c i915_gem_object_init(obj, &i915_gem_shmem_ops); obj 499 drivers/gpu/drm/i915/gem/i915_gem_shmem.c obj->write_domain = I915_GEM_DOMAIN_CPU; obj 500 drivers/gpu/drm/i915/gem/i915_gem_shmem.c obj->read_domains = I915_GEM_DOMAIN_CPU; obj 519 drivers/gpu/drm/i915/gem/i915_gem_shmem.c i915_gem_object_set_cache_coherency(obj, cache_level); obj 521 drivers/gpu/drm/i915/gem/i915_gem_shmem.c trace_i915_gem_object_create(obj); obj 523 drivers/gpu/drm/i915/gem/i915_gem_shmem.c return obj; obj 526 drivers/gpu/drm/i915/gem/i915_gem_shmem.c i915_gem_object_free(obj); obj 535 drivers/gpu/drm/i915/gem/i915_gem_shmem.c struct drm_i915_gem_object *obj; obj 540 drivers/gpu/drm/i915/gem/i915_gem_shmem.c obj = i915_gem_object_create_shmem(dev_priv, round_up(size, PAGE_SIZE)); obj 541 drivers/gpu/drm/i915/gem/i915_gem_shmem.c if (IS_ERR(obj)) obj 542 drivers/gpu/drm/i915/gem/i915_gem_shmem.c return obj; obj 544 drivers/gpu/drm/i915/gem/i915_gem_shmem.c GEM_BUG_ON(obj->write_domain != I915_GEM_DOMAIN_CPU); obj 546 drivers/gpu/drm/i915/gem/i915_gem_shmem.c file = obj->base.filp; obj 574 drivers/gpu/drm/i915/gem/i915_gem_shmem.c return obj; obj 577 drivers/gpu/drm/i915/gem/i915_gem_shmem.c i915_gem_object_put(obj); obj 58 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c static bool can_release_pages(struct drm_i915_gem_object *obj) obj 61 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c if (!i915_gem_object_is_shrinkable(obj)) obj 72 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c if (atomic_read(&obj->mm.pages_pin_count) > atomic_read(&obj->bind_count)) obj 81 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c if (READ_ONCE(obj->pin_global)) obj 88 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c return swap_available() || obj->mm.madv == I915_MADV_DONTNEED; obj 91 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c static bool unsafe_drop_pages(struct drm_i915_gem_object *obj, obj 100 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c if (i915_gem_object_unbind(obj, flags) == 0) obj 101 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c __i915_gem_object_put_pages(obj, I915_MM_SHRINKER); obj 103 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c return !i915_gem_object_has_pages(obj); obj 106 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c static void try_to_writeback(struct drm_i915_gem_object *obj, obj 109 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c switch (obj->mm.madv) { obj 111 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c i915_gem_object_truncate(obj); obj 117 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c i915_gem_object_writeback(obj); obj 213 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c struct drm_i915_gem_object *obj; obj 230 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c (obj = list_first_entry_or_null(phase->list, obj 231 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c typeof(*obj), obj 233 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c list_move_tail(&obj->mm.link, &still_in_list); obj 236 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c !is_vmalloc_addr(obj->mm.mapping)) obj 240 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c i915_gem_object_is_framebuffer(obj)) obj 244 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c atomic_read(&obj->bind_count)) obj 247 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c if (!can_release_pages(obj)) obj 250 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c if (!kref_get_unless_zero(&obj->base.refcount)) obj 255 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c if (unsafe_drop_pages(obj, shrink)) { obj 257 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c mutex_lock_nested(&obj->mm.lock, obj 259 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c if (!i915_gem_object_has_pages(obj)) { obj 260 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c try_to_writeback(obj, shrink); obj 261 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c count += obj->base.size >> PAGE_SHIFT; obj 263 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c mutex_unlock(&obj->mm.lock); obj 266 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c scanned += obj->base.size >> PAGE_SHIFT; obj 267 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c i915_gem_object_put(obj); obj 386 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c struct drm_i915_gem_object *obj; obj 404 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c list_for_each_entry(obj, &i915->mm.shrink_list, mm.link) { obj 405 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c if (!can_release_pages(obj)) obj 406 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c unevictable += obj->base.size >> PAGE_SHIFT; obj 408 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c available += obj->base.size >> PAGE_SHIFT; obj 524 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c void i915_gem_object_make_unshrinkable(struct drm_i915_gem_object *obj) obj 532 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c if (!list_empty(&obj->mm.link)) { /* pinned by caller */ obj 533 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c struct drm_i915_private *i915 = obj_to_i915(obj); obj 537 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c GEM_BUG_ON(list_empty(&obj->mm.link)); obj 539 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c list_del_init(&obj->mm.link); obj 541 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c i915->mm.shrink_memory -= obj->base.size; obj 547 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c static void __i915_gem_object_make_shrinkable(struct drm_i915_gem_object *obj, obj 550 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c GEM_BUG_ON(!i915_gem_object_has_pages(obj)); obj 551 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c GEM_BUG_ON(!list_empty(&obj->mm.link)); obj 553 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c if (i915_gem_object_is_shrinkable(obj)) { obj 554 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c struct drm_i915_private *i915 = obj_to_i915(obj); obj 558 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c GEM_BUG_ON(!kref_read(&obj->base.refcount)); obj 560 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c list_add_tail(&obj->mm.link, head); obj 562 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c i915->mm.shrink_memory += obj->base.size; obj 568 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c void i915_gem_object_make_shrinkable(struct drm_i915_gem_object *obj) obj 570 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c __i915_gem_object_make_shrinkable(obj, obj 571 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c &obj_to_i915(obj)->mm.shrink_list); obj 574 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c void i915_gem_object_make_purgeable(struct drm_i915_gem_object *obj) obj 576 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c __i915_gem_object_make_shrinkable(obj, obj 577 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c &obj_to_i915(obj)->mm.purge_list); obj 507 drivers/gpu/drm/i915/gem/i915_gem_stolen.c static int i915_gem_object_get_pages_stolen(struct drm_i915_gem_object *obj) obj 510 drivers/gpu/drm/i915/gem/i915_gem_stolen.c i915_pages_create_for_stolen(obj->base.dev, obj 511 drivers/gpu/drm/i915/gem/i915_gem_stolen.c obj->stolen->start, obj 512 drivers/gpu/drm/i915/gem/i915_gem_stolen.c obj->stolen->size); obj 516 drivers/gpu/drm/i915/gem/i915_gem_stolen.c __i915_gem_object_set_pages(obj, pages, obj->stolen->size); obj 521 drivers/gpu/drm/i915/gem/i915_gem_stolen.c static void i915_gem_object_put_pages_stolen(struct drm_i915_gem_object *obj, obj 530 drivers/gpu/drm/i915/gem/i915_gem_stolen.c i915_gem_object_release_stolen(struct drm_i915_gem_object *obj) obj 532 drivers/gpu/drm/i915/gem/i915_gem_stolen.c struct drm_i915_private *dev_priv = to_i915(obj->base.dev); obj 533 drivers/gpu/drm/i915/gem/i915_gem_stolen.c struct drm_mm_node *stolen = fetch_and_zero(&obj->stolen); obj 551 drivers/gpu/drm/i915/gem/i915_gem_stolen.c struct drm_i915_gem_object *obj; obj 554 drivers/gpu/drm/i915/gem/i915_gem_stolen.c obj = i915_gem_object_alloc(); obj 555 drivers/gpu/drm/i915/gem/i915_gem_stolen.c if (obj == NULL) obj 558 drivers/gpu/drm/i915/gem/i915_gem_stolen.c drm_gem_private_object_init(&dev_priv->drm, &obj->base, stolen->size); obj 559 drivers/gpu/drm/i915/gem/i915_gem_stolen.c i915_gem_object_init(obj, &i915_gem_object_stolen_ops); obj 561 drivers/gpu/drm/i915/gem/i915_gem_stolen.c obj->stolen = stolen; obj 562 drivers/gpu/drm/i915/gem/i915_gem_stolen.c obj->read_domains = I915_GEM_DOMAIN_CPU | I915_GEM_DOMAIN_GTT; obj 564 drivers/gpu/drm/i915/gem/i915_gem_stolen.c i915_gem_object_set_cache_coherency(obj, cache_level); obj 566 drivers/gpu/drm/i915/gem/i915_gem_stolen.c if (i915_gem_object_pin_pages(obj)) obj 569 drivers/gpu/drm/i915/gem/i915_gem_stolen.c return obj; obj 572 drivers/gpu/drm/i915/gem/i915_gem_stolen.c i915_gem_object_free(obj); obj 580 drivers/gpu/drm/i915/gem/i915_gem_stolen.c struct drm_i915_gem_object *obj; obj 600 drivers/gpu/drm/i915/gem/i915_gem_stolen.c obj = _i915_gem_object_create_stolen(dev_priv, stolen); obj 601 drivers/gpu/drm/i915/gem/i915_gem_stolen.c if (obj) obj 602 drivers/gpu/drm/i915/gem/i915_gem_stolen.c return obj; obj 616 drivers/gpu/drm/i915/gem/i915_gem_stolen.c struct drm_i915_gem_object *obj; obj 650 drivers/gpu/drm/i915/gem/i915_gem_stolen.c obj = _i915_gem_object_create_stolen(dev_priv, stolen); obj 651 drivers/gpu/drm/i915/gem/i915_gem_stolen.c if (obj == NULL) { obj 660 drivers/gpu/drm/i915/gem/i915_gem_stolen.c return obj; obj 662 drivers/gpu/drm/i915/gem/i915_gem_stolen.c ret = i915_gem_object_pin_pages(obj); obj 666 drivers/gpu/drm/i915/gem/i915_gem_stolen.c vma = i915_vma_instance(obj, &ggtt->vm, NULL); obj 678 drivers/gpu/drm/i915/gem/i915_gem_stolen.c size, gtt_offset, obj->cache_level, obj 687 drivers/gpu/drm/i915/gem/i915_gem_stolen.c vma->pages = obj->mm.pages; obj 695 drivers/gpu/drm/i915/gem/i915_gem_stolen.c GEM_BUG_ON(i915_gem_object_is_shrinkable(obj)); obj 696 drivers/gpu/drm/i915/gem/i915_gem_stolen.c atomic_inc(&obj->bind_count); obj 698 drivers/gpu/drm/i915/gem/i915_gem_stolen.c return obj; obj 701 drivers/gpu/drm/i915/gem/i915_gem_stolen.c i915_gem_object_unpin_pages(obj); obj 703 drivers/gpu/drm/i915/gem/i915_gem_stolen.c i915_gem_object_put(obj); obj 117 drivers/gpu/drm/i915/gem/i915_gem_tiling.c i915_tiling_ok(struct drm_i915_gem_object *obj, obj 120 drivers/gpu/drm/i915/gem/i915_gem_tiling.c struct drm_i915_private *i915 = to_i915(obj->base.dev); obj 181 drivers/gpu/drm/i915/gem/i915_gem_tiling.c i915_gem_object_fence_prepare(struct drm_i915_gem_object *obj, obj 190 drivers/gpu/drm/i915/gem/i915_gem_tiling.c for_each_ggtt_vma(vma, obj) { obj 203 drivers/gpu/drm/i915/gem/i915_gem_tiling.c i915_gem_object_set_tiling(struct drm_i915_gem_object *obj, obj 206 drivers/gpu/drm/i915/gem/i915_gem_tiling.c struct drm_i915_private *i915 = to_i915(obj->base.dev); obj 213 drivers/gpu/drm/i915/gem/i915_gem_tiling.c GEM_BUG_ON(!i915_tiling_ok(obj, tiling, stride)); obj 217 drivers/gpu/drm/i915/gem/i915_gem_tiling.c if ((tiling | stride) == obj->tiling_and_stride) obj 220 drivers/gpu/drm/i915/gem/i915_gem_tiling.c if (i915_gem_object_is_framebuffer(obj)) obj 236 drivers/gpu/drm/i915/gem/i915_gem_tiling.c err = i915_gem_object_fence_prepare(obj, tiling, stride); obj 240 drivers/gpu/drm/i915/gem/i915_gem_tiling.c i915_gem_object_lock(obj); obj 241 drivers/gpu/drm/i915/gem/i915_gem_tiling.c if (i915_gem_object_is_framebuffer(obj)) { obj 242 drivers/gpu/drm/i915/gem/i915_gem_tiling.c i915_gem_object_unlock(obj); obj 250 drivers/gpu/drm/i915/gem/i915_gem_tiling.c mutex_lock(&obj->mm.lock); obj 251 drivers/gpu/drm/i915/gem/i915_gem_tiling.c if (i915_gem_object_has_pages(obj) && obj 252 drivers/gpu/drm/i915/gem/i915_gem_tiling.c obj->mm.madv == I915_MADV_WILLNEED && obj 255 drivers/gpu/drm/i915/gem/i915_gem_tiling.c GEM_BUG_ON(!obj->mm.quirked); obj 256 drivers/gpu/drm/i915/gem/i915_gem_tiling.c __i915_gem_object_unpin_pages(obj); obj 257 drivers/gpu/drm/i915/gem/i915_gem_tiling.c obj->mm.quirked = false; obj 259 drivers/gpu/drm/i915/gem/i915_gem_tiling.c if (!i915_gem_object_is_tiled(obj)) { obj 260 drivers/gpu/drm/i915/gem/i915_gem_tiling.c GEM_BUG_ON(obj->mm.quirked); obj 261 drivers/gpu/drm/i915/gem/i915_gem_tiling.c __i915_gem_object_pin_pages(obj); obj 262 drivers/gpu/drm/i915/gem/i915_gem_tiling.c obj->mm.quirked = true; obj 265 drivers/gpu/drm/i915/gem/i915_gem_tiling.c mutex_unlock(&obj->mm.lock); obj 267 drivers/gpu/drm/i915/gem/i915_gem_tiling.c for_each_ggtt_vma(vma, obj) { obj 278 drivers/gpu/drm/i915/gem/i915_gem_tiling.c obj->tiling_and_stride = tiling | stride; obj 279 drivers/gpu/drm/i915/gem/i915_gem_tiling.c i915_gem_object_unlock(obj); obj 282 drivers/gpu/drm/i915/gem/i915_gem_tiling.c i915_gem_object_release_mmap(obj); obj 285 drivers/gpu/drm/i915/gem/i915_gem_tiling.c if (i915_gem_object_needs_bit17_swizzle(obj)) { obj 286 drivers/gpu/drm/i915/gem/i915_gem_tiling.c if (!obj->bit_17) { obj 287 drivers/gpu/drm/i915/gem/i915_gem_tiling.c obj->bit_17 = bitmap_zalloc(obj->base.size >> PAGE_SHIFT, obj 291 drivers/gpu/drm/i915/gem/i915_gem_tiling.c bitmap_free(obj->bit_17); obj 292 drivers/gpu/drm/i915/gem/i915_gem_tiling.c obj->bit_17 = NULL; obj 317 drivers/gpu/drm/i915/gem/i915_gem_tiling.c struct drm_i915_gem_object *obj; obj 320 drivers/gpu/drm/i915/gem/i915_gem_tiling.c obj = i915_gem_object_lookup(file, args->handle); obj 321 drivers/gpu/drm/i915/gem/i915_gem_tiling.c if (!obj) obj 328 drivers/gpu/drm/i915/gem/i915_gem_tiling.c if (i915_gem_object_is_proxy(obj)) { obj 333 drivers/gpu/drm/i915/gem/i915_gem_tiling.c if (!i915_tiling_ok(obj, args->tiling_mode, args->stride)) { obj 371 drivers/gpu/drm/i915/gem/i915_gem_tiling.c err = i915_gem_object_set_tiling(obj, args->tiling_mode, args->stride); obj 375 drivers/gpu/drm/i915/gem/i915_gem_tiling.c args->stride = i915_gem_object_get_stride(obj); obj 376 drivers/gpu/drm/i915/gem/i915_gem_tiling.c args->tiling_mode = i915_gem_object_get_tiling(obj); obj 379 drivers/gpu/drm/i915/gem/i915_gem_tiling.c i915_gem_object_put(obj); obj 402 drivers/gpu/drm/i915/gem/i915_gem_tiling.c struct drm_i915_gem_object *obj; obj 406 drivers/gpu/drm/i915/gem/i915_gem_tiling.c obj = i915_gem_object_lookup_rcu(file, args->handle); obj 407 drivers/gpu/drm/i915/gem/i915_gem_tiling.c if (obj) { obj 409 drivers/gpu/drm/i915/gem/i915_gem_tiling.c READ_ONCE(obj->tiling_and_stride) & TILING_MASK; obj 42 drivers/gpu/drm/i915/gem/i915_gem_userptr.c struct drm_i915_gem_object *obj; obj 62 drivers/gpu/drm/i915/gem/i915_gem_userptr.c __i915_gem_userptr_set_active(struct drm_i915_gem_object *obj, bool value) obj 64 drivers/gpu/drm/i915/gem/i915_gem_userptr.c struct i915_mmu_object *mo = obj->userptr.mmu_object; obj 108 drivers/gpu/drm/i915/gem/i915_gem_userptr.c struct drm_i915_gem_object *obj; obj 125 drivers/gpu/drm/i915/gem/i915_gem_userptr.c obj = container_of(it, struct i915_mmu_object, it)->obj; obj 126 drivers/gpu/drm/i915/gem/i915_gem_userptr.c if (!kref_get_unless_zero(&obj->base.refcount)) { obj 139 drivers/gpu/drm/i915/gem/i915_gem_userptr.c i915_gem_object_put(obj); obj 152 drivers/gpu/drm/i915/gem/i915_gem_userptr.c ret = i915_gem_object_unbind(obj, obj 155 drivers/gpu/drm/i915/gem/i915_gem_userptr.c ret = __i915_gem_object_put_pages(obj, I915_MM_SHRINKER); obj 156 drivers/gpu/drm/i915/gem/i915_gem_userptr.c i915_gem_object_put(obj); obj 201 drivers/gpu/drm/i915/gem/i915_gem_userptr.c i915_gem_userptr_release__mmu_notifier(struct drm_i915_gem_object *obj) obj 205 drivers/gpu/drm/i915/gem/i915_gem_userptr.c mo = fetch_and_zero(&obj->userptr.mmu_object); obj 255 drivers/gpu/drm/i915/gem/i915_gem_userptr.c i915_gem_userptr_init__mmu_notifier(struct drm_i915_gem_object *obj, obj 264 drivers/gpu/drm/i915/gem/i915_gem_userptr.c if (WARN_ON(obj->userptr.mm == NULL)) obj 267 drivers/gpu/drm/i915/gem/i915_gem_userptr.c mn = i915_mmu_notifier_find(obj->userptr.mm); obj 276 drivers/gpu/drm/i915/gem/i915_gem_userptr.c mo->obj = obj; obj 277 drivers/gpu/drm/i915/gem/i915_gem_userptr.c mo->it.start = obj->userptr.ptr; obj 278 drivers/gpu/drm/i915/gem/i915_gem_userptr.c mo->it.last = obj->userptr.ptr + obj->base.size - 1; obj 281 drivers/gpu/drm/i915/gem/i915_gem_userptr.c obj->userptr.mmu_object = mo; obj 299 drivers/gpu/drm/i915/gem/i915_gem_userptr.c __i915_gem_userptr_set_active(struct drm_i915_gem_object *obj, bool value) obj 304 drivers/gpu/drm/i915/gem/i915_gem_userptr.c i915_gem_userptr_release__mmu_notifier(struct drm_i915_gem_object *obj) obj 309 drivers/gpu/drm/i915/gem/i915_gem_userptr.c i915_gem_userptr_init__mmu_notifier(struct drm_i915_gem_object *obj, obj 343 drivers/gpu/drm/i915/gem/i915_gem_userptr.c i915_gem_userptr_init__mm_struct(struct drm_i915_gem_object *obj) obj 345 drivers/gpu/drm/i915/gem/i915_gem_userptr.c struct drm_i915_private *dev_priv = to_i915(obj->base.dev); obj 369 drivers/gpu/drm/i915/gem/i915_gem_userptr.c mm->i915 = to_i915(obj->base.dev); obj 382 drivers/gpu/drm/i915/gem/i915_gem_userptr.c obj->userptr.mm = mm; obj 411 drivers/gpu/drm/i915/gem/i915_gem_userptr.c i915_gem_userptr_release__mm_struct(struct drm_i915_gem_object *obj) obj 413 drivers/gpu/drm/i915/gem/i915_gem_userptr.c if (obj->userptr.mm == NULL) obj 416 drivers/gpu/drm/i915/gem/i915_gem_userptr.c kref_put_mutex(&obj->userptr.mm->kref, obj 418 drivers/gpu/drm/i915/gem/i915_gem_userptr.c &to_i915(obj->base.dev)->mm_lock); obj 419 drivers/gpu/drm/i915/gem/i915_gem_userptr.c obj->userptr.mm = NULL; obj 424 drivers/gpu/drm/i915/gem/i915_gem_userptr.c struct drm_i915_gem_object *obj; obj 429 drivers/gpu/drm/i915/gem/i915_gem_userptr.c __i915_gem_userptr_alloc_pages(struct drm_i915_gem_object *obj, obj 451 drivers/gpu/drm/i915/gem/i915_gem_userptr.c ret = i915_gem_gtt_prepare_pages(obj, st); obj 466 drivers/gpu/drm/i915/gem/i915_gem_userptr.c __i915_gem_object_set_pages(obj, st, sg_page_sizes); obj 475 drivers/gpu/drm/i915/gem/i915_gem_userptr.c struct drm_i915_gem_object *obj = work->obj; obj 476 drivers/gpu/drm/i915/gem/i915_gem_userptr.c const unsigned long npages = obj->base.size >> PAGE_SHIFT; obj 486 drivers/gpu/drm/i915/gem/i915_gem_userptr.c struct mm_struct *mm = obj->userptr.mm->mm; obj 489 drivers/gpu/drm/i915/gem/i915_gem_userptr.c if (!i915_gem_object_is_readonly(obj)) obj 498 drivers/gpu/drm/i915/gem/i915_gem_userptr.c obj->userptr.ptr + pinned * PAGE_SIZE, obj 512 drivers/gpu/drm/i915/gem/i915_gem_userptr.c mutex_lock(&obj->mm.lock); obj 513 drivers/gpu/drm/i915/gem/i915_gem_userptr.c if (obj->userptr.work == &work->work) { obj 517 drivers/gpu/drm/i915/gem/i915_gem_userptr.c pages = __i915_gem_userptr_alloc_pages(obj, pvec, obj 525 drivers/gpu/drm/i915/gem/i915_gem_userptr.c obj->userptr.work = ERR_CAST(pages); obj 527 drivers/gpu/drm/i915/gem/i915_gem_userptr.c __i915_gem_userptr_set_active(obj, false); obj 529 drivers/gpu/drm/i915/gem/i915_gem_userptr.c mutex_unlock(&obj->mm.lock); obj 534 drivers/gpu/drm/i915/gem/i915_gem_userptr.c i915_gem_object_put(obj); obj 540 drivers/gpu/drm/i915/gem/i915_gem_userptr.c __i915_gem_userptr_get_pages_schedule(struct drm_i915_gem_object *obj) obj 567 drivers/gpu/drm/i915/gem/i915_gem_userptr.c obj->userptr.work = &work->work; obj 569 drivers/gpu/drm/i915/gem/i915_gem_userptr.c work->obj = i915_gem_object_get(obj); obj 575 drivers/gpu/drm/i915/gem/i915_gem_userptr.c queue_work(to_i915(obj->base.dev)->mm.userptr_wq, &work->work); obj 580 drivers/gpu/drm/i915/gem/i915_gem_userptr.c static int i915_gem_userptr_get_pages(struct drm_i915_gem_object *obj) obj 582 drivers/gpu/drm/i915/gem/i915_gem_userptr.c const unsigned long num_pages = obj->base.size >> PAGE_SHIFT; obj 583 drivers/gpu/drm/i915/gem/i915_gem_userptr.c struct mm_struct *mm = obj->userptr.mm->mm; obj 606 drivers/gpu/drm/i915/gem/i915_gem_userptr.c if (obj->userptr.work) { obj 608 drivers/gpu/drm/i915/gem/i915_gem_userptr.c if (IS_ERR(obj->userptr.work)) obj 609 drivers/gpu/drm/i915/gem/i915_gem_userptr.c return PTR_ERR(obj->userptr.work); obj 631 drivers/gpu/drm/i915/gem/i915_gem_userptr.c pinned = __get_user_pages_fast(obj->userptr.ptr, obj 633 drivers/gpu/drm/i915/gem/i915_gem_userptr.c !i915_gem_object_is_readonly(obj), obj 642 drivers/gpu/drm/i915/gem/i915_gem_userptr.c pages = __i915_gem_userptr_get_pages_schedule(obj); obj 645 drivers/gpu/drm/i915/gem/i915_gem_userptr.c pages = __i915_gem_userptr_alloc_pages(obj, pvec, num_pages); obj 649 drivers/gpu/drm/i915/gem/i915_gem_userptr.c __i915_gem_userptr_set_active(obj, true); obj 659 drivers/gpu/drm/i915/gem/i915_gem_userptr.c i915_gem_userptr_put_pages(struct drm_i915_gem_object *obj, obj 666 drivers/gpu/drm/i915/gem/i915_gem_userptr.c obj->userptr.work = NULL; obj 667 drivers/gpu/drm/i915/gem/i915_gem_userptr.c __i915_gem_userptr_set_active(obj, false); obj 671 drivers/gpu/drm/i915/gem/i915_gem_userptr.c __i915_gem_object_release_shmem(obj, pages, true); obj 672 drivers/gpu/drm/i915/gem/i915_gem_userptr.c i915_gem_gtt_finish_pages(obj, pages); obj 679 drivers/gpu/drm/i915/gem/i915_gem_userptr.c if (i915_gem_object_is_readonly(obj)) obj 680 drivers/gpu/drm/i915/gem/i915_gem_userptr.c obj->mm.dirty = false; obj 683 drivers/gpu/drm/i915/gem/i915_gem_userptr.c if (obj->mm.dirty && trylock_page(page)) { obj 709 drivers/gpu/drm/i915/gem/i915_gem_userptr.c obj->mm.dirty = false; obj 716 drivers/gpu/drm/i915/gem/i915_gem_userptr.c i915_gem_userptr_release(struct drm_i915_gem_object *obj) obj 718 drivers/gpu/drm/i915/gem/i915_gem_userptr.c i915_gem_userptr_release__mmu_notifier(obj); obj 719 drivers/gpu/drm/i915/gem/i915_gem_userptr.c i915_gem_userptr_release__mm_struct(obj); obj 723 drivers/gpu/drm/i915/gem/i915_gem_userptr.c i915_gem_userptr_dmabuf_export(struct drm_i915_gem_object *obj) obj 725 drivers/gpu/drm/i915/gem/i915_gem_userptr.c if (obj->userptr.mmu_object) obj 728 drivers/gpu/drm/i915/gem/i915_gem_userptr.c return i915_gem_userptr_init__mmu_notifier(obj, 0); obj 784 drivers/gpu/drm/i915/gem/i915_gem_userptr.c struct drm_i915_gem_object *obj; obj 820 drivers/gpu/drm/i915/gem/i915_gem_userptr.c obj = i915_gem_object_alloc(); obj 821 drivers/gpu/drm/i915/gem/i915_gem_userptr.c if (obj == NULL) obj 824 drivers/gpu/drm/i915/gem/i915_gem_userptr.c drm_gem_private_object_init(dev, &obj->base, args->user_size); obj 825 drivers/gpu/drm/i915/gem/i915_gem_userptr.c i915_gem_object_init(obj, &i915_gem_userptr_ops); obj 826 drivers/gpu/drm/i915/gem/i915_gem_userptr.c obj->read_domains = I915_GEM_DOMAIN_CPU; obj 827 drivers/gpu/drm/i915/gem/i915_gem_userptr.c obj->write_domain = I915_GEM_DOMAIN_CPU; obj 828 drivers/gpu/drm/i915/gem/i915_gem_userptr.c i915_gem_object_set_cache_coherency(obj, I915_CACHE_LLC); obj 830 drivers/gpu/drm/i915/gem/i915_gem_userptr.c obj->userptr.ptr = args->user_ptr; obj 832 drivers/gpu/drm/i915/gem/i915_gem_userptr.c i915_gem_object_set_readonly(obj); obj 838 drivers/gpu/drm/i915/gem/i915_gem_userptr.c ret = i915_gem_userptr_init__mm_struct(obj); obj 840 drivers/gpu/drm/i915/gem/i915_gem_userptr.c ret = i915_gem_userptr_init__mmu_notifier(obj, args->flags); obj 842 drivers/gpu/drm/i915/gem/i915_gem_userptr.c ret = drm_gem_handle_create(file, &obj->base, &handle); obj 845 drivers/gpu/drm/i915/gem/i915_gem_userptr.c i915_gem_object_put(obj); obj 132 drivers/gpu/drm/i915/gem/i915_gem_wait.c i915_gem_object_wait_priority(struct drm_i915_gem_object *obj, obj 143 drivers/gpu/drm/i915/gem/i915_gem_wait.c ret = dma_resv_get_fences_rcu(obj->base.resv, obj 155 drivers/gpu/drm/i915/gem/i915_gem_wait.c excl = dma_resv_get_excl_rcu(obj->base.resv); obj 172 drivers/gpu/drm/i915/gem/i915_gem_wait.c i915_gem_object_wait(struct drm_i915_gem_object *obj, obj 179 drivers/gpu/drm/i915/gem/i915_gem_wait.c timeout = i915_gem_object_wait_reservation(obj->base.resv, obj 233 drivers/gpu/drm/i915/gem/i915_gem_wait.c struct drm_i915_gem_object *obj; obj 240 drivers/gpu/drm/i915/gem/i915_gem_wait.c obj = i915_gem_object_lookup(file, args->bo_handle); obj 241 drivers/gpu/drm/i915/gem/i915_gem_wait.c if (!obj) obj 246 drivers/gpu/drm/i915/gem/i915_gem_wait.c ret = i915_gem_object_wait(obj, obj 272 drivers/gpu/drm/i915/gem/i915_gem_wait.c i915_gem_object_put(obj); obj 11 drivers/gpu/drm/i915/gem/selftests/huge_gem_object.c static void huge_free_pages(struct drm_i915_gem_object *obj, obj 14 drivers/gpu/drm/i915/gem/selftests/huge_gem_object.c unsigned long nreal = obj->scratch / PAGE_SIZE; obj 24 drivers/gpu/drm/i915/gem/selftests/huge_gem_object.c static int huge_get_pages(struct drm_i915_gem_object *obj) obj 27 drivers/gpu/drm/i915/gem/selftests/huge_gem_object.c const unsigned long nreal = obj->scratch / PAGE_SIZE; obj 28 drivers/gpu/drm/i915/gem/selftests/huge_gem_object.c const unsigned long npages = obj->base.size / PAGE_SIZE; obj 64 drivers/gpu/drm/i915/gem/selftests/huge_gem_object.c if (i915_gem_gtt_prepare_pages(obj, pages)) obj 67 drivers/gpu/drm/i915/gem/selftests/huge_gem_object.c __i915_gem_object_set_pages(obj, pages, PAGE_SIZE); obj 72 drivers/gpu/drm/i915/gem/selftests/huge_gem_object.c huge_free_pages(obj, pages); obj 78 drivers/gpu/drm/i915/gem/selftests/huge_gem_object.c static void huge_put_pages(struct drm_i915_gem_object *obj, obj 81 drivers/gpu/drm/i915/gem/selftests/huge_gem_object.c i915_gem_gtt_finish_pages(obj, pages); obj 82 drivers/gpu/drm/i915/gem/selftests/huge_gem_object.c huge_free_pages(obj, pages); obj 84 drivers/gpu/drm/i915/gem/selftests/huge_gem_object.c obj->mm.dirty = false; obj 99 drivers/gpu/drm/i915/gem/selftests/huge_gem_object.c struct drm_i915_gem_object *obj; obj 106 drivers/gpu/drm/i915/gem/selftests/huge_gem_object.c if (overflows_type(dma_size, obj->base.size)) obj 109 drivers/gpu/drm/i915/gem/selftests/huge_gem_object.c obj = i915_gem_object_alloc(); obj 110 drivers/gpu/drm/i915/gem/selftests/huge_gem_object.c if (!obj) obj 113 drivers/gpu/drm/i915/gem/selftests/huge_gem_object.c drm_gem_private_object_init(&i915->drm, &obj->base, dma_size); obj 114 drivers/gpu/drm/i915/gem/selftests/huge_gem_object.c i915_gem_object_init(obj, &huge_ops); obj 116 drivers/gpu/drm/i915/gem/selftests/huge_gem_object.c obj->read_domains = I915_GEM_DOMAIN_CPU; obj 117 drivers/gpu/drm/i915/gem/selftests/huge_gem_object.c obj->write_domain = I915_GEM_DOMAIN_CPU; obj 119 drivers/gpu/drm/i915/gem/selftests/huge_gem_object.c i915_gem_object_set_cache_coherency(obj, cache_level); obj 120 drivers/gpu/drm/i915/gem/selftests/huge_gem_object.c obj->scratch = phys_size; obj 122 drivers/gpu/drm/i915/gem/selftests/huge_gem_object.c return obj; obj 16 drivers/gpu/drm/i915/gem/selftests/huge_gem_object.h huge_gem_object_phys_size(struct drm_i915_gem_object *obj) obj 18 drivers/gpu/drm/i915/gem/selftests/huge_gem_object.h return obj->scratch; obj 22 drivers/gpu/drm/i915/gem/selftests/huge_gem_object.h huge_gem_object_dma_size(struct drm_i915_gem_object *obj) obj 24 drivers/gpu/drm/i915/gem/selftests/huge_gem_object.h return obj->base.size; obj 56 drivers/gpu/drm/i915/gem/selftests/huge_pages.c static int get_huge_pages(struct drm_i915_gem_object *obj) obj 59 drivers/gpu/drm/i915/gem/selftests/huge_pages.c unsigned int page_mask = obj->mm.page_mask; obj 69 drivers/gpu/drm/i915/gem/selftests/huge_pages.c if (sg_alloc_table(st, obj->base.size >> PAGE_SHIFT, GFP)) { obj 74 drivers/gpu/drm/i915/gem/selftests/huge_pages.c rem = obj->base.size; obj 113 drivers/gpu/drm/i915/gem/selftests/huge_pages.c if (i915_gem_gtt_prepare_pages(obj, st)) obj 116 drivers/gpu/drm/i915/gem/selftests/huge_pages.c obj->mm.madv = I915_MADV_DONTNEED; obj 118 drivers/gpu/drm/i915/gem/selftests/huge_pages.c GEM_BUG_ON(sg_page_sizes != obj->mm.page_mask); obj 119 drivers/gpu/drm/i915/gem/selftests/huge_pages.c __i915_gem_object_set_pages(obj, st, sg_page_sizes); obj 131 drivers/gpu/drm/i915/gem/selftests/huge_pages.c static void put_huge_pages(struct drm_i915_gem_object *obj, obj 134 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_gem_gtt_finish_pages(obj, pages); obj 137 drivers/gpu/drm/i915/gem/selftests/huge_pages.c obj->mm.dirty = false; obj 138 drivers/gpu/drm/i915/gem/selftests/huge_pages.c obj->mm.madv = I915_MADV_WILLNEED; obj 153 drivers/gpu/drm/i915/gem/selftests/huge_pages.c struct drm_i915_gem_object *obj; obj 161 drivers/gpu/drm/i915/gem/selftests/huge_pages.c if (overflows_type(size, obj->base.size)) obj 164 drivers/gpu/drm/i915/gem/selftests/huge_pages.c obj = i915_gem_object_alloc(); obj 165 drivers/gpu/drm/i915/gem/selftests/huge_pages.c if (!obj) obj 168 drivers/gpu/drm/i915/gem/selftests/huge_pages.c drm_gem_private_object_init(&i915->drm, &obj->base, size); obj 169 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_gem_object_init(obj, &huge_page_ops); obj 171 drivers/gpu/drm/i915/gem/selftests/huge_pages.c obj->write_domain = I915_GEM_DOMAIN_CPU; obj 172 drivers/gpu/drm/i915/gem/selftests/huge_pages.c obj->read_domains = I915_GEM_DOMAIN_CPU; obj 173 drivers/gpu/drm/i915/gem/selftests/huge_pages.c obj->cache_level = I915_CACHE_NONE; obj 175 drivers/gpu/drm/i915/gem/selftests/huge_pages.c obj->mm.page_mask = page_mask; obj 177 drivers/gpu/drm/i915/gem/selftests/huge_pages.c return obj; obj 180 drivers/gpu/drm/i915/gem/selftests/huge_pages.c static int fake_get_huge_pages(struct drm_i915_gem_object *obj) obj 182 drivers/gpu/drm/i915/gem/selftests/huge_pages.c struct drm_i915_private *i915 = to_i915(obj->base.dev); obj 193 drivers/gpu/drm/i915/gem/selftests/huge_pages.c if (sg_alloc_table(st, obj->base.size >> PAGE_SHIFT, GFP)) { obj 199 drivers/gpu/drm/i915/gem/selftests/huge_pages.c rem = obj->base.size; obj 230 drivers/gpu/drm/i915/gem/selftests/huge_pages.c obj->mm.madv = I915_MADV_DONTNEED; obj 232 drivers/gpu/drm/i915/gem/selftests/huge_pages.c __i915_gem_object_set_pages(obj, st, sg_page_sizes); obj 237 drivers/gpu/drm/i915/gem/selftests/huge_pages.c static int fake_get_huge_pages_single(struct drm_i915_gem_object *obj) obj 239 drivers/gpu/drm/i915/gem/selftests/huge_pages.c struct drm_i915_private *i915 = to_i915(obj->base.dev); obj 256 drivers/gpu/drm/i915/gem/selftests/huge_pages.c page_size = get_largest_page_size(i915, obj->base.size); obj 260 drivers/gpu/drm/i915/gem/selftests/huge_pages.c sg->length = obj->base.size; obj 261 drivers/gpu/drm/i915/gem/selftests/huge_pages.c sg_dma_len(sg) = obj->base.size; obj 264 drivers/gpu/drm/i915/gem/selftests/huge_pages.c obj->mm.madv = I915_MADV_DONTNEED; obj 266 drivers/gpu/drm/i915/gem/selftests/huge_pages.c __i915_gem_object_set_pages(obj, st, sg->length); obj 272 drivers/gpu/drm/i915/gem/selftests/huge_pages.c static void fake_free_huge_pages(struct drm_i915_gem_object *obj, obj 279 drivers/gpu/drm/i915/gem/selftests/huge_pages.c static void fake_put_huge_pages(struct drm_i915_gem_object *obj, obj 282 drivers/gpu/drm/i915/gem/selftests/huge_pages.c fake_free_huge_pages(obj, pages); obj 283 drivers/gpu/drm/i915/gem/selftests/huge_pages.c obj->mm.dirty = false; obj 284 drivers/gpu/drm/i915/gem/selftests/huge_pages.c obj->mm.madv = I915_MADV_WILLNEED; obj 302 drivers/gpu/drm/i915/gem/selftests/huge_pages.c struct drm_i915_gem_object *obj; obj 310 drivers/gpu/drm/i915/gem/selftests/huge_pages.c if (overflows_type(size, obj->base.size)) obj 313 drivers/gpu/drm/i915/gem/selftests/huge_pages.c obj = i915_gem_object_alloc(); obj 314 drivers/gpu/drm/i915/gem/selftests/huge_pages.c if (!obj) obj 317 drivers/gpu/drm/i915/gem/selftests/huge_pages.c drm_gem_private_object_init(&i915->drm, &obj->base, size); obj 320 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_gem_object_init(obj, &fake_ops_single); obj 322 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_gem_object_init(obj, &fake_ops); obj 324 drivers/gpu/drm/i915/gem/selftests/huge_pages.c obj->write_domain = I915_GEM_DOMAIN_CPU; obj 325 drivers/gpu/drm/i915/gem/selftests/huge_pages.c obj->read_domains = I915_GEM_DOMAIN_CPU; obj 326 drivers/gpu/drm/i915/gem/selftests/huge_pages.c obj->cache_level = I915_CACHE_NONE; obj 328 drivers/gpu/drm/i915/gem/selftests/huge_pages.c return obj; obj 335 drivers/gpu/drm/i915/gem/selftests/huge_pages.c struct drm_i915_gem_object *obj = vma->obj; obj 350 drivers/gpu/drm/i915/gem/selftests/huge_pages.c if (vma->page_sizes.phys != obj->mm.page_sizes.phys) { obj 352 drivers/gpu/drm/i915/gem/selftests/huge_pages.c vma->page_sizes.phys, obj->mm.page_sizes.phys); obj 356 drivers/gpu/drm/i915/gem/selftests/huge_pages.c if (vma->page_sizes.sg != obj->mm.page_sizes.sg) { obj 358 drivers/gpu/drm/i915/gem/selftests/huge_pages.c vma->page_sizes.sg, obj->mm.page_sizes.sg); obj 362 drivers/gpu/drm/i915/gem/selftests/huge_pages.c if (obj->mm.page_sizes.gtt) { obj 364 drivers/gpu/drm/i915/gem/selftests/huge_pages.c obj->mm.page_sizes.gtt); obj 376 drivers/gpu/drm/i915/gem/selftests/huge_pages.c struct drm_i915_gem_object *obj; obj 397 drivers/gpu/drm/i915/gem/selftests/huge_pages.c obj = fake_huge_pages_object(i915, combination, !!single); obj 398 drivers/gpu/drm/i915/gem/selftests/huge_pages.c if (IS_ERR(obj)) { obj 399 drivers/gpu/drm/i915/gem/selftests/huge_pages.c err = PTR_ERR(obj); obj 403 drivers/gpu/drm/i915/gem/selftests/huge_pages.c if (obj->base.size != combination) { obj 405 drivers/gpu/drm/i915/gem/selftests/huge_pages.c obj->base.size, combination); obj 410 drivers/gpu/drm/i915/gem/selftests/huge_pages.c vma = i915_vma_instance(obj, &ppgtt->vm, NULL); obj 431 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_gem_object_put(obj); obj 443 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_gem_object_put(obj); obj 455 drivers/gpu/drm/i915/gem/selftests/huge_pages.c struct drm_i915_gem_object *obj; obj 477 drivers/gpu/drm/i915/gem/selftests/huge_pages.c obj = fake_huge_pages_object(i915, size, true); obj 478 drivers/gpu/drm/i915/gem/selftests/huge_pages.c if (IS_ERR(obj)) obj 479 drivers/gpu/drm/i915/gem/selftests/huge_pages.c return PTR_ERR(obj); obj 481 drivers/gpu/drm/i915/gem/selftests/huge_pages.c if (obj->base.size != size) { obj 483 drivers/gpu/drm/i915/gem/selftests/huge_pages.c obj->base.size, size); obj 488 drivers/gpu/drm/i915/gem/selftests/huge_pages.c err = i915_gem_object_pin_pages(obj); obj 493 drivers/gpu/drm/i915/gem/selftests/huge_pages.c obj->mm.page_sizes.sg = page_size; obj 495 drivers/gpu/drm/i915/gem/selftests/huge_pages.c vma = i915_vma_instance(obj, &ppgtt->vm, NULL); obj 564 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_gem_object_unpin_pages(obj); obj 565 drivers/gpu/drm/i915/gem/selftests/huge_pages.c __i915_gem_object_put_pages(obj, I915_MM_NORMAL); obj 566 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_gem_object_put(obj); obj 572 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_gem_object_unpin_pages(obj); obj 574 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_gem_object_put(obj); obj 582 drivers/gpu/drm/i915/gem/selftests/huge_pages.c struct drm_i915_gem_object *obj, *on; obj 584 drivers/gpu/drm/i915/gem/selftests/huge_pages.c list_for_each_entry_safe(obj, on, objects, st_link) { obj 587 drivers/gpu/drm/i915/gem/selftests/huge_pages.c vma = i915_vma_instance(obj, &ppgtt->vm, NULL); obj 591 drivers/gpu/drm/i915/gem/selftests/huge_pages.c list_del(&obj->st_link); obj 592 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_gem_object_unpin_pages(obj); obj 593 drivers/gpu/drm/i915/gem/selftests/huge_pages.c __i915_gem_object_put_pages(obj, I915_MM_NORMAL); obj 594 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_gem_object_put(obj); obj 610 drivers/gpu/drm/i915/gem/selftests/huge_pages.c struct drm_i915_gem_object *obj; obj 616 drivers/gpu/drm/i915/gem/selftests/huge_pages.c obj = fake_huge_pages_object(i915, size, single); obj 617 drivers/gpu/drm/i915/gem/selftests/huge_pages.c if (IS_ERR(obj)) { obj 618 drivers/gpu/drm/i915/gem/selftests/huge_pages.c err = PTR_ERR(obj); obj 622 drivers/gpu/drm/i915/gem/selftests/huge_pages.c if (obj->base.size != size) { obj 624 drivers/gpu/drm/i915/gem/selftests/huge_pages.c obj->base.size, size); obj 625 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_gem_object_put(obj); obj 630 drivers/gpu/drm/i915/gem/selftests/huge_pages.c err = i915_gem_object_pin_pages(obj); obj 632 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_gem_object_put(obj); obj 636 drivers/gpu/drm/i915/gem/selftests/huge_pages.c list_add(&obj->st_link, &objects); obj 638 drivers/gpu/drm/i915/gem/selftests/huge_pages.c vma = i915_vma_instance(obj, &ppgtt->vm, NULL); obj 698 drivers/gpu/drm/i915/gem/selftests/huge_pages.c obj->base.size, yesno(!!single)); obj 705 drivers/gpu/drm/i915/gem/selftests/huge_pages.c __func__, obj->base.size)) obj 723 drivers/gpu/drm/i915/gem/selftests/huge_pages.c struct drm_i915_gem_object *obj; obj 802 drivers/gpu/drm/i915/gem/selftests/huge_pages.c obj = fake_huge_pages_object(i915, size, !!single); obj 803 drivers/gpu/drm/i915/gem/selftests/huge_pages.c if (IS_ERR(obj)) obj 804 drivers/gpu/drm/i915/gem/selftests/huge_pages.c return PTR_ERR(obj); obj 806 drivers/gpu/drm/i915/gem/selftests/huge_pages.c err = i915_gem_object_pin_pages(obj); obj 814 drivers/gpu/drm/i915/gem/selftests/huge_pages.c obj->mm.page_sizes.sg &= ~I915_GTT_PAGE_SIZE_2M; obj 816 drivers/gpu/drm/i915/gem/selftests/huge_pages.c vma = i915_vma_instance(obj, &ppgtt->vm, NULL); obj 862 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_gem_object_unpin_pages(obj); obj 863 drivers/gpu/drm/i915/gem/selftests/huge_pages.c __i915_gem_object_put_pages(obj, I915_MM_NORMAL); obj 864 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_gem_object_put(obj); obj 875 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_gem_object_unpin_pages(obj); obj 877 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_gem_object_put(obj); obj 890 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_gem_object_lock(vma->obj); obj 891 drivers/gpu/drm/i915/gem/selftests/huge_pages.c err = i915_gem_object_set_to_gtt_domain(vma->obj, true); obj 892 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_gem_object_unlock(vma->obj); obj 900 drivers/gpu/drm/i915/gem/selftests/huge_pages.c static int cpu_check(struct drm_i915_gem_object *obj, u32 dword, u32 val) obj 906 drivers/gpu/drm/i915/gem/selftests/huge_pages.c err = i915_gem_object_prepare_read(obj, &needs_flush); obj 910 drivers/gpu/drm/i915/gem/selftests/huge_pages.c for (n = 0; n < obj->base.size >> PAGE_SHIFT; ++n) { obj 911 drivers/gpu/drm/i915/gem/selftests/huge_pages.c u32 *ptr = kmap_atomic(i915_gem_object_get_page(obj, n)); obj 927 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_gem_object_finish_access(obj); obj 934 drivers/gpu/drm/i915/gem/selftests/huge_pages.c struct drm_i915_gem_object *obj, obj 943 drivers/gpu/drm/i915/gem/selftests/huge_pages.c vma = i915_vma_instance(obj, vm, NULL); obj 973 drivers/gpu/drm/i915/gem/selftests/huge_pages.c err = cpu_check(obj, dword, val); obj 988 drivers/gpu/drm/i915/gem/selftests/huge_pages.c struct drm_i915_gem_object *obj) obj 990 drivers/gpu/drm/i915/gem/selftests/huge_pages.c struct drm_i915_private *i915 = to_i915(obj->base.dev); obj 1005 drivers/gpu/drm/i915/gem/selftests/huge_pages.c GEM_BUG_ON(!i915_gem_object_has_pinned_pages(obj)); obj 1007 drivers/gpu/drm/i915/gem/selftests/huge_pages.c size = obj->base.size; obj 1008 drivers/gpu/drm/i915/gem/selftests/huge_pages.c if (obj->mm.page_sizes.sg & I915_GTT_PAGE_SIZE_64K) obj 1011 drivers/gpu/drm/i915/gem/selftests/huge_pages.c max_page_size = rounddown_pow_of_two(obj->mm.page_sizes.sg); obj 1056 drivers/gpu/drm/i915/gem/selftests/huge_pages.c if (obj->mm.page_sizes.sg & I915_GTT_PAGE_SIZE_64K) obj 1060 drivers/gpu/drm/i915/gem/selftests/huge_pages.c err = __igt_write_huge(ctx, engine, obj, size, offset_low, obj 1065 drivers/gpu/drm/i915/gem/selftests/huge_pages.c err = __igt_write_huge(ctx, engine, obj, size, offset_high, obj 1088 drivers/gpu/drm/i915/gem/selftests/huge_pages.c struct drm_i915_gem_object *obj; obj 1133 drivers/gpu/drm/i915/gem/selftests/huge_pages.c obj = huge_pages_object(i915, size, page_sizes); obj 1134 drivers/gpu/drm/i915/gem/selftests/huge_pages.c if (IS_ERR(obj)) { obj 1135 drivers/gpu/drm/i915/gem/selftests/huge_pages.c err = PTR_ERR(obj); obj 1139 drivers/gpu/drm/i915/gem/selftests/huge_pages.c err = i915_gem_object_pin_pages(obj); obj 1141 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_gem_object_put(obj); obj 1157 drivers/gpu/drm/i915/gem/selftests/huge_pages.c obj->mm.page_sizes.sg = page_sizes; obj 1159 drivers/gpu/drm/i915/gem/selftests/huge_pages.c err = igt_write_huge(ctx, obj); obj 1166 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_gem_object_unpin_pages(obj); obj 1167 drivers/gpu/drm/i915/gem/selftests/huge_pages.c __i915_gem_object_put_pages(obj, I915_MM_NORMAL); obj 1168 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_gem_object_put(obj); obj 1175 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_gem_object_unpin_pages(obj); obj 1176 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_gem_object_put(obj); obj 1187 drivers/gpu/drm/i915/gem/selftests/huge_pages.c struct drm_i915_gem_object *obj; obj 1207 drivers/gpu/drm/i915/gem/selftests/huge_pages.c obj = i915_gem_object_create_internal(i915, size); obj 1208 drivers/gpu/drm/i915/gem/selftests/huge_pages.c if (IS_ERR(obj)) obj 1209 drivers/gpu/drm/i915/gem/selftests/huge_pages.c return PTR_ERR(obj); obj 1211 drivers/gpu/drm/i915/gem/selftests/huge_pages.c err = i915_gem_object_pin_pages(obj); obj 1215 drivers/gpu/drm/i915/gem/selftests/huge_pages.c if (obj->mm.page_sizes.phys < I915_GTT_PAGE_SIZE_64K) { obj 1221 drivers/gpu/drm/i915/gem/selftests/huge_pages.c err = igt_write_huge(ctx, obj); obj 1228 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_gem_object_unpin_pages(obj); obj 1229 drivers/gpu/drm/i915/gem/selftests/huge_pages.c __i915_gem_object_put_pages(obj, I915_MM_NORMAL); obj 1230 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_gem_object_put(obj); obj 1236 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_gem_object_unpin_pages(obj); obj 1238 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_gem_object_put(obj); obj 1252 drivers/gpu/drm/i915/gem/selftests/huge_pages.c struct drm_i915_gem_object *obj; obj 1276 drivers/gpu/drm/i915/gem/selftests/huge_pages.c obj = i915_gem_object_create_shmem(i915, size); obj 1277 drivers/gpu/drm/i915/gem/selftests/huge_pages.c if (IS_ERR(obj)) obj 1278 drivers/gpu/drm/i915/gem/selftests/huge_pages.c return PTR_ERR(obj); obj 1280 drivers/gpu/drm/i915/gem/selftests/huge_pages.c err = i915_gem_object_pin_pages(obj); obj 1284 drivers/gpu/drm/i915/gem/selftests/huge_pages.c if (obj->mm.page_sizes.phys < I915_GTT_PAGE_SIZE_2M) { obj 1290 drivers/gpu/drm/i915/gem/selftests/huge_pages.c err = igt_write_huge(ctx, obj); obj 1297 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_gem_object_unpin_pages(obj); obj 1298 drivers/gpu/drm/i915/gem/selftests/huge_pages.c __i915_gem_object_put_pages(obj, I915_MM_NORMAL); obj 1299 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_gem_object_put(obj); obj 1305 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_gem_object_unpin_pages(obj); obj 1307 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_gem_object_put(obj); obj 1318 drivers/gpu/drm/i915/gem/selftests/huge_pages.c struct drm_i915_gem_object *obj; obj 1346 drivers/gpu/drm/i915/gem/selftests/huge_pages.c obj = i915_gem_object_create_internal(dev_priv, page_size); obj 1347 drivers/gpu/drm/i915/gem/selftests/huge_pages.c if (IS_ERR(obj)) obj 1348 drivers/gpu/drm/i915/gem/selftests/huge_pages.c return PTR_ERR(obj); obj 1350 drivers/gpu/drm/i915/gem/selftests/huge_pages.c vma = i915_vma_instance(obj, vm, NULL); obj 1371 drivers/gpu/drm/i915/gem/selftests/huge_pages.c dma_addr_t addr = i915_gem_object_get_dma_address(obj, 0); obj 1397 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_gem_object_put(obj); obj 1400 drivers/gpu/drm/i915/gem/selftests/huge_pages.c obj = i915_gem_object_create_internal(dev_priv, PAGE_SIZE); obj 1401 drivers/gpu/drm/i915/gem/selftests/huge_pages.c if (IS_ERR(obj)) obj 1402 drivers/gpu/drm/i915/gem/selftests/huge_pages.c return PTR_ERR(obj); obj 1404 drivers/gpu/drm/i915/gem/selftests/huge_pages.c vma = i915_vma_instance(obj, vm, NULL); obj 1431 drivers/gpu/drm/i915/gem/selftests/huge_pages.c err = cpu_check(obj, n, 0xdeadbeaf); obj 1441 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_gem_object_put(obj); obj 1452 drivers/gpu/drm/i915/gem/selftests/huge_pages.c struct drm_i915_gem_object *obj; obj 1465 drivers/gpu/drm/i915/gem/selftests/huge_pages.c obj = i915_gem_object_create_shmem(i915, PAGE_SIZE); obj 1466 drivers/gpu/drm/i915/gem/selftests/huge_pages.c if (IS_ERR(obj)) { obj 1467 drivers/gpu/drm/i915/gem/selftests/huge_pages.c err = PTR_ERR(obj); obj 1471 drivers/gpu/drm/i915/gem/selftests/huge_pages.c vaddr = i915_gem_object_pin_map(obj, I915_MAP_WB); obj 1478 drivers/gpu/drm/i915/gem/selftests/huge_pages.c __i915_gem_object_flush_map(obj, 0, 64); obj 1479 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_gem_object_unpin_map(obj); obj 1481 drivers/gpu/drm/i915/gem/selftests/huge_pages.c vma = i915_vma_instance(obj, vm, NULL); obj 1497 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_gem_object_put(obj); obj 1509 drivers/gpu/drm/i915/gem/selftests/huge_pages.c struct drm_i915_gem_object *obj; obj 1527 drivers/gpu/drm/i915/gem/selftests/huge_pages.c obj = i915_gem_object_create_shmem(i915, SZ_2M); obj 1528 drivers/gpu/drm/i915/gem/selftests/huge_pages.c if (IS_ERR(obj)) obj 1529 drivers/gpu/drm/i915/gem/selftests/huge_pages.c return PTR_ERR(obj); obj 1531 drivers/gpu/drm/i915/gem/selftests/huge_pages.c vma = i915_vma_instance(obj, vm, NULL); obj 1541 drivers/gpu/drm/i915/gem/selftests/huge_pages.c if (obj->mm.page_sizes.phys < I915_GTT_PAGE_SIZE_2M) { obj 1567 drivers/gpu/drm/i915/gem/selftests/huge_pages.c if (i915_gem_object_has_pages(obj)) { obj 1573 drivers/gpu/drm/i915/gem/selftests/huge_pages.c if (obj->mm.page_sizes.sg || obj->mm.page_sizes.phys) { obj 1584 drivers/gpu/drm/i915/gem/selftests/huge_pages.c err = cpu_check(obj, n, 0xdeadbeaf); obj 1595 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_gem_object_put(obj); obj 19 drivers/gpu/drm/i915/gem/selftests/i915_gem_client_blt.c struct drm_i915_gem_object *obj; obj 40 drivers/gpu/drm/i915/gem/selftests/i915_gem_client_blt.c obj = huge_gem_object(i915, phys_sz, sz); obj 41 drivers/gpu/drm/i915/gem/selftests/i915_gem_client_blt.c if (IS_ERR(obj)) { obj 42 drivers/gpu/drm/i915/gem/selftests/i915_gem_client_blt.c err = PTR_ERR(obj); obj 46 drivers/gpu/drm/i915/gem/selftests/i915_gem_client_blt.c vaddr = i915_gem_object_pin_map(obj, I915_MAP_WB); obj 63 drivers/gpu/drm/i915/gem/selftests/i915_gem_client_blt.c huge_gem_object_phys_size(obj) / sizeof(u32)); obj 65 drivers/gpu/drm/i915/gem/selftests/i915_gem_client_blt.c if (!(obj->cache_coherent & I915_BO_CACHE_COHERENT_FOR_WRITE)) obj 66 drivers/gpu/drm/i915/gem/selftests/i915_gem_client_blt.c obj->cache_dirty = true; obj 68 drivers/gpu/drm/i915/gem/selftests/i915_gem_client_blt.c err = i915_gem_schedule_fill_pages_blt(obj, ce, obj->mm.pages, obj 69 drivers/gpu/drm/i915/gem/selftests/i915_gem_client_blt.c &obj->mm.page_sizes, obj 74 drivers/gpu/drm/i915/gem/selftests/i915_gem_client_blt.c i915_gem_object_lock(obj); obj 75 drivers/gpu/drm/i915/gem/selftests/i915_gem_client_blt.c err = i915_gem_object_set_to_cpu_domain(obj, false); obj 76 drivers/gpu/drm/i915/gem/selftests/i915_gem_client_blt.c i915_gem_object_unlock(obj); obj 80 drivers/gpu/drm/i915/gem/selftests/i915_gem_client_blt.c for (i = 0; i < huge_gem_object_phys_size(obj) / sizeof(u32); ++i) { obj 89 drivers/gpu/drm/i915/gem/selftests/i915_gem_client_blt.c i915_gem_object_unpin_map(obj); obj 90 drivers/gpu/drm/i915/gem/selftests/i915_gem_client_blt.c i915_gem_object_put(obj); obj 96 drivers/gpu/drm/i915/gem/selftests/i915_gem_client_blt.c i915_gem_object_unpin_map(obj); obj 98 drivers/gpu/drm/i915/gem/selftests/i915_gem_client_blt.c i915_gem_object_put(obj); obj 14 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c static int cpu_set(struct drm_i915_gem_object *obj, obj 24 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c err = i915_gem_object_prepare_write(obj, &needs_clflush); obj 28 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c page = i915_gem_object_get_page(obj, offset >> PAGE_SHIFT); obj 41 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c i915_gem_object_finish_access(obj); obj 46 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c static int cpu_get(struct drm_i915_gem_object *obj, obj 56 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c err = i915_gem_object_prepare_read(obj, &needs_clflush); obj 60 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c page = i915_gem_object_get_page(obj, offset >> PAGE_SHIFT); obj 70 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c i915_gem_object_finish_access(obj); obj 75 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c static int gtt_set(struct drm_i915_gem_object *obj, obj 83 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c i915_gem_object_lock(obj); obj 84 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c err = i915_gem_object_set_to_gtt_domain(obj, true); obj 85 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c i915_gem_object_unlock(obj); obj 89 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c vma = i915_gem_object_ggtt_pin(obj, NULL, 0, 0, PIN_MAPPABLE); obj 104 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c static int gtt_get(struct drm_i915_gem_object *obj, obj 112 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c i915_gem_object_lock(obj); obj 113 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c err = i915_gem_object_set_to_gtt_domain(obj, false); obj 114 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c i915_gem_object_unlock(obj); obj 118 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c vma = i915_gem_object_ggtt_pin(obj, NULL, 0, 0, PIN_MAPPABLE); obj 133 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c static int wc_set(struct drm_i915_gem_object *obj, obj 140 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c i915_gem_object_lock(obj); obj 141 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c err = i915_gem_object_set_to_wc_domain(obj, true); obj 142 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c i915_gem_object_unlock(obj); obj 146 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c map = i915_gem_object_pin_map(obj, I915_MAP_WC); obj 151 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c i915_gem_object_unpin_map(obj); obj 156 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c static int wc_get(struct drm_i915_gem_object *obj, obj 163 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c i915_gem_object_lock(obj); obj 164 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c err = i915_gem_object_set_to_wc_domain(obj, false); obj 165 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c i915_gem_object_unlock(obj); obj 169 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c map = i915_gem_object_pin_map(obj, I915_MAP_WC); obj 174 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c i915_gem_object_unpin_map(obj); obj 179 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c static int gpu_set(struct drm_i915_gem_object *obj, obj 183 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c struct drm_i915_private *i915 = to_i915(obj->base.dev); obj 189 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c i915_gem_object_lock(obj); obj 190 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c err = i915_gem_object_set_to_gtt_domain(obj, true); obj 191 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c i915_gem_object_unlock(obj); obj 195 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c vma = i915_gem_object_ggtt_pin(obj, NULL, 0, 0, 0); obj 231 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c err = i915_request_await_object(rq, vma->obj, true); obj 282 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c struct drm_i915_gem_object *obj; obj 326 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c obj = i915_gem_object_create_internal(i915, PAGE_SIZE); obj 327 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c if (IS_ERR(obj)) { obj 328 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c err = PTR_ERR(obj); obj 337 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c err = over->set(obj, offsets[n], ~values[n]); obj 346 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c err = write->set(obj, offsets[n], values[n]); obj 357 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c err = read->get(obj, offsets[n], &found); obj 375 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c i915_gem_object_put(obj); obj 387 drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c i915_gem_object_put(obj); obj 159 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c static unsigned long real_page_count(struct drm_i915_gem_object *obj) obj 161 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c return huge_gem_object_phys_size(obj) >> PAGE_SHIFT; obj 164 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c static unsigned long fake_page_count(struct drm_i915_gem_object *obj) obj 166 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c return huge_gem_object_dma_size(obj) >> PAGE_SHIFT; obj 169 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c static int gpu_fill(struct drm_i915_gem_object *obj, obj 178 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c GEM_BUG_ON(obj->base.size > vm->total); obj 181 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c vma = i915_vma_instance(obj, vm, NULL); obj 185 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c i915_gem_object_lock(obj); obj 186 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c err = i915_gem_object_set_to_gtt_domain(obj, true); obj 187 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c i915_gem_object_unlock(obj); obj 206 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c (dw * real_page_count(obj)) << PAGE_SHIFT | obj 208 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c real_page_count(obj), obj 215 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c static int cpu_fill(struct drm_i915_gem_object *obj, u32 value) obj 217 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c const bool has_llc = HAS_LLC(to_i915(obj->base.dev)); obj 221 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c err = i915_gem_object_prepare_write(obj, &need_flush); obj 225 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c for (n = 0; n < real_page_count(obj); n++) { obj 228 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c map = kmap_atomic(i915_gem_object_get_page(obj, n)); obj 236 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c i915_gem_object_finish_access(obj); obj 237 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c obj->read_domains = I915_GEM_DOMAIN_GTT | I915_GEM_DOMAIN_CPU; obj 238 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c obj->write_domain = 0; obj 242 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c static noinline int cpu_check(struct drm_i915_gem_object *obj, obj 248 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c err = i915_gem_object_prepare_read(obj, &needs_flush); obj 252 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c for (n = 0; n < real_page_count(obj); n++) { obj 255 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c map = kmap_atomic(i915_gem_object_get_page(obj, n)); obj 263 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c n, real_page_count(obj), m, max, obj 286 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c i915_gem_object_finish_access(obj); obj 291 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c struct drm_i915_gem_object *obj) obj 295 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c GEM_BUG_ON(obj->base.handle_count); obj 298 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c err = idr_alloc(&file->object_idr, &obj->base, 1, 0, GFP_KERNEL); obj 302 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c i915_gem_object_get(obj); obj 303 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c obj->base.handle_count++; obj 312 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c struct drm_i915_gem_object *obj; obj 323 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c obj = huge_gem_object(ctx->i915, DW_PER_PAGE * PAGE_SIZE, size); obj 324 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c if (IS_ERR(obj)) obj 325 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c return obj; obj 327 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c err = file_add_object(file, obj); obj 328 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c i915_gem_object_put(obj); obj 332 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c err = cpu_fill(obj, STACK_MAGIC); obj 339 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c list_add_tail(&obj->st_link, objects); obj 340 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c return obj; obj 343 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c static unsigned long max_dwords(struct drm_i915_gem_object *obj) obj 345 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c unsigned long npages = fake_page_count(obj); obj 368 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c struct drm_i915_gem_object *obj = NULL; obj 403 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c if (!obj) { obj 404 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c obj = create_test_object(ctx, file, &objects); obj 405 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c if (IS_ERR(obj)) { obj 406 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c err = PTR_ERR(obj); obj 411 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c err = gpu_fill(obj, ctx, engine, dw); obj 414 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c ndwords, dw, max_dwords(obj), obj 420 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c if (++dw == max_dwords(obj)) { obj 421 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c obj = NULL; obj 433 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c list_for_each_entry(obj, &objects, st_link) { obj 435 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c min_t(unsigned int, ndwords - dw, max_dwords(obj)); obj 437 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c err = cpu_check(obj, ncontexts++, rem); obj 500 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c struct drm_i915_gem_object *obj = NULL; obj 521 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c if (!obj) { obj 522 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c obj = create_test_object(parent, file, &objects); obj 523 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c if (IS_ERR(obj)) { obj 524 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c err = PTR_ERR(obj); obj 530 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c err = gpu_fill(obj, ctx, engine, dw); obj 533 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c ndwords, dw, max_dwords(obj), obj 540 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c if (++dw == max_dwords(obj)) { obj 541 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c obj = NULL; obj 554 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c list_for_each_entry(obj, &objects, st_link) { obj 556 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c min_t(unsigned int, ndwords - dw, max_dwords(obj)); obj 558 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c err = cpu_check(obj, ncontexts++, rem); obj 581 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c struct drm_i915_gem_object *obj; obj 588 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c obj = i915_gem_object_create_internal(vma->vm->i915, PAGE_SIZE); obj 589 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c if (IS_ERR(obj)) obj 590 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c return ERR_CAST(obj); obj 592 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c cmd = i915_gem_object_pin_map(obj, I915_MAP_WB); obj 604 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c __i915_gem_object_flush_map(obj, 0, 64); obj 605 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c i915_gem_object_unpin_map(obj); obj 607 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c vma = i915_vma_instance(obj, vma->vm, NULL); obj 620 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c i915_gem_object_put(obj); obj 625 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c emit_rpcs_query(struct drm_i915_gem_object *obj, obj 636 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c vma = i915_vma_instance(obj, ce->vm, NULL); obj 640 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c i915_gem_object_lock(obj); obj 641 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c err = i915_gem_object_set_to_gtt_domain(obj, false); obj 642 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c i915_gem_object_unlock(obj); obj 669 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c err = i915_request_await_object(rq, batch->obj, false); obj 677 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c err = i915_request_await_object(rq, vma->obj, true); obj 761 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c struct drm_i915_gem_object *obj, obj 771 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c ret = emit_rpcs_query(obj, ce, &rq); obj 783 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c buf = i915_gem_object_pin_map(obj, I915_MAP_WB); obj 801 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c i915_gem_object_unpin_map(obj); obj 835 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c struct drm_i915_gem_object *obj, obj 849 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c ret = __read_slice_count(ce, obj, obj 855 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c ret = __read_slice_count(ce->engine->kernel_context, obj, NULL, &rpcs); obj 868 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c ret = __read_slice_count(ce, obj, NULL, &rpcs); obj 880 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c struct drm_i915_gem_object *obj, obj 894 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c ret = __sseu_finish(name, flags, ce, obj, obj 912 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c struct drm_i915_gem_object *obj; obj 957 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c obj = i915_gem_object_create_internal(i915, PAGE_SIZE); obj 958 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c if (IS_ERR(obj)) { obj 959 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c ret = PTR_ERR(obj); obj 974 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c ret = __sseu_test(name, flags, ce, obj, engine->sseu); obj 979 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c ret = __sseu_test(name, flags, ce, obj, pg_sseu); obj 984 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c ret = __sseu_test(name, flags, ce, obj, engine->sseu); obj 989 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c ret = __sseu_test(name, flags, ce, obj, pg_sseu); obj 1001 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c i915_gem_object_put(obj); obj 1043 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c struct drm_i915_gem_object *obj = NULL; obj 1092 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c if (!obj) { obj 1093 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c obj = create_test_object(ctx, file, &objects); obj 1094 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c if (IS_ERR(obj)) { obj 1095 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c err = PTR_ERR(obj); obj 1100 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c i915_gem_object_set_readonly(obj); obj 1103 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c err = gpu_fill(obj, ctx, engine, dw); obj 1106 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c ndwords, dw, max_dwords(obj), obj 1112 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c if (++dw == max_dwords(obj)) { obj 1113 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c obj = NULL; obj 1124 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c list_for_each_entry(obj, &objects, st_link) { obj 1126 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c min_t(unsigned int, ndwords - dw, max_dwords(obj)); obj 1130 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c if (i915_gem_object_is_readonly(obj)) obj 1133 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c err = cpu_check(obj, idx++, num_writes); obj 1169 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c struct drm_i915_gem_object *obj; obj 1177 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c obj = i915_gem_object_create_internal(i915, PAGE_SIZE); obj 1178 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c if (IS_ERR(obj)) obj 1179 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c return PTR_ERR(obj); obj 1181 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c cmd = i915_gem_object_pin_map(obj, I915_MAP_WB); obj 1197 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c __i915_gem_object_flush_map(obj, 0, 64); obj 1198 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c i915_gem_object_unpin_map(obj); obj 1200 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c vma = i915_vma_instance(obj, ctx->vm, NULL); obj 1225 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c err = i915_request_await_object(rq, vma->obj, false); obj 1247 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c i915_gem_object_put(obj); obj 1256 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c struct drm_i915_gem_object *obj; obj 1266 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c obj = i915_gem_object_create_internal(i915, PAGE_SIZE); obj 1267 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c if (IS_ERR(obj)) obj 1268 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c return PTR_ERR(obj); obj 1270 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c cmd = i915_gem_object_pin_map(obj, I915_MAP_WB); obj 1296 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c i915_gem_object_flush_map(obj); obj 1297 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c i915_gem_object_unpin_map(obj); obj 1299 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c vma = i915_vma_instance(obj, ctx->vm, NULL); obj 1324 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c err = i915_request_await_object(rq, vma->obj, true); obj 1336 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c i915_gem_object_lock(obj); obj 1337 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c err = i915_gem_object_set_to_cpu_domain(obj, false); obj 1338 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c i915_gem_object_unlock(obj); obj 1342 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c cmd = i915_gem_object_pin_map(obj, I915_MAP_WB); obj 1349 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c i915_gem_object_unpin_map(obj); obj 1350 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c i915_gem_object_put(obj); obj 1361 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c i915_gem_object_put(obj); obj 16 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c struct drm_i915_gem_object *obj; obj 19 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c obj = i915_gem_object_create_shmem(i915, PAGE_SIZE); obj 20 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c if (IS_ERR(obj)) obj 21 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c return PTR_ERR(obj); obj 23 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c dmabuf = i915_gem_prime_export(&obj->base, 0); obj 24 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c i915_gem_object_put(obj); obj 38 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c struct drm_i915_gem_object *obj; obj 43 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c obj = i915_gem_object_create_shmem(i915, PAGE_SIZE); obj 44 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c if (IS_ERR(obj)) obj 45 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c return PTR_ERR(obj); obj 47 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c dmabuf = i915_gem_prime_export(&obj->base, 0); obj 63 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c if (import != &obj->base) { obj 75 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c i915_gem_object_put(obj); obj 82 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c struct drm_i915_gem_object *obj; obj 92 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c obj = to_intel_bo(i915_gem_prime_import(&i915->drm, dmabuf)); obj 93 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c if (IS_ERR(obj)) { obj 95 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c (int)PTR_ERR(obj)); obj 96 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c err = PTR_ERR(obj); obj 100 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c if (obj->base.dev != &i915->drm) { obj 106 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c if (obj->base.size != PAGE_SIZE) { obj 108 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c (long long)obj->base.size, PAGE_SIZE); obj 121 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c obj_map = i915_gem_object_pin_map(obj, I915_MAP_WB); obj 133 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c i915_gem_object_unpin_map(obj); obj 143 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c i915_gem_object_unpin_map(obj); obj 148 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c i915_gem_object_unpin_map(obj); obj 155 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c i915_gem_object_put(obj); obj 164 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c struct drm_i915_gem_object *obj; obj 183 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c obj = to_intel_bo(i915_gem_prime_import(&i915->drm, dmabuf)); obj 184 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c if (IS_ERR(obj)) { obj 186 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c (int)PTR_ERR(obj)); obj 187 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c err = PTR_ERR(obj); obj 193 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c err = i915_gem_object_pin_pages(obj); obj 200 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c i915_gem_object_unpin_pages(obj); obj 202 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c i915_gem_object_put(obj); obj 213 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c struct drm_i915_gem_object *obj; obj 218 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c obj = i915_gem_object_create_shmem(i915, PAGE_SIZE); obj 219 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c if (IS_ERR(obj)) obj 220 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c return PTR_ERR(obj); obj 222 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c dmabuf = i915_gem_prime_export(&obj->base, 0); obj 229 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c i915_gem_object_put(obj); obj 253 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c i915_gem_object_put(obj); obj 260 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c struct drm_i915_gem_object *obj; obj 265 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c obj = i915_gem_object_create_shmem(i915, 2 * PAGE_SIZE); obj 266 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c if (IS_ERR(obj)) obj 267 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c return PTR_ERR(obj); obj 269 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c dmabuf = i915_gem_prime_export(&obj->base, 0); obj 270 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c i915_gem_object_put(obj); obj 294 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c ptr = i915_gem_object_pin_map(obj, I915_MAP_WB); obj 301 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c i915_gem_object_flush_map(obj); obj 302 drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c i915_gem_object_unpin_map(obj); obj 77 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c static int check_partial_mapping(struct drm_i915_gem_object *obj, obj 81 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c const unsigned int nreal = obj->scratch / PAGE_SIZE; obj 82 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c const unsigned long npages = obj->base.size / PAGE_SIZE; obj 92 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c err = i915_gem_object_set_tiling(obj, tile->tiling, tile->stride); obj 99 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c GEM_BUG_ON(i915_gem_object_get_tiling(obj) != tile->tiling); obj 100 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c GEM_BUG_ON(i915_gem_object_get_stride(obj) != tile->stride); obj 102 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c i915_gem_object_lock(obj); obj 103 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c err = i915_gem_object_set_to_gtt_domain(obj, true); obj 104 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c i915_gem_object_unlock(obj); obj 112 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c compute_partial_view(obj, page, MIN_CHUNK_PAGES); obj 122 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c vma = i915_gem_object_ggtt_pin(obj, &view, 0, 0, PIN_MAPPABLE); obj 144 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c if (offset >= obj->base.size) obj 147 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c intel_gt_flush_ggtt_writes(&to_i915(obj->base.dev)->gt); obj 149 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c p = i915_gem_object_get_page(obj, offset >> PAGE_SHIFT); obj 158 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c tile->tiling ? tile_row_pages(obj) : 0, obj 182 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c struct drm_i915_gem_object *obj; obj 195 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c obj = huge_gem_object(i915, obj 198 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c if (IS_ERR(obj)) obj 199 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c return PTR_ERR(obj); obj 201 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c err = i915_gem_object_pin_pages(obj); obj 204 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c nreal, obj->base.size / PAGE_SIZE, err); obj 222 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c err = check_partial_mapping(obj, &tile, end); obj 280 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c err = check_partial_mapping(obj, &tile, end); obj 288 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c err = check_partial_mapping(obj, &tile, end); obj 297 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c err = check_partial_mapping(obj, &tile, end); obj 308 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c err = check_partial_mapping(obj, &tile, end); obj 322 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c i915_gem_object_unpin_pages(obj); obj 324 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c i915_gem_object_put(obj); obj 328 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c static int make_obj_busy(struct drm_i915_gem_object *obj) obj 330 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c struct drm_i915_private *i915 = to_i915(obj->base.dev); obj 336 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c vma = i915_vma_instance(obj, &i915->ggtt.vm, NULL); obj 354 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c err = i915_request_await_object(rq, vma->obj, true); obj 364 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c i915_gem_object_put(obj); /* leave it only alive via its active ref */ obj 373 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c struct drm_i915_gem_object *obj; obj 376 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c obj = i915_gem_object_create_internal(i915, size); obj 377 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c if (IS_ERR(obj)) obj 380 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c err = create_mmap_offset(obj); obj 381 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c i915_gem_object_put(obj); obj 423 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c struct drm_i915_gem_object *obj; obj 462 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c obj = i915_gem_object_create_internal(i915, PAGE_SIZE); obj 463 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c if (IS_ERR(obj)) { obj 464 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c err = PTR_ERR(obj); obj 468 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c err = create_mmap_offset(obj); obj 480 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c i915_gem_object_put(obj); obj 487 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c obj = i915_gem_object_create_internal(i915, PAGE_SIZE); obj 488 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c if (IS_ERR(obj)) { obj 489 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c err = PTR_ERR(obj); obj 494 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c err = make_obj_busy(obj); obj 510 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c i915_gem_object_put(obj); obj 16 drivers/gpu/drm/i915/gem/selftests/i915_gem_object.c struct drm_i915_gem_object *obj; obj 21 drivers/gpu/drm/i915/gem/selftests/i915_gem_object.c obj = i915_gem_object_create_shmem(i915, PAGE_SIZE); obj 22 drivers/gpu/drm/i915/gem/selftests/i915_gem_object.c if (IS_ERR(obj)) { obj 23 drivers/gpu/drm/i915/gem/selftests/i915_gem_object.c err = PTR_ERR(obj); obj 29 drivers/gpu/drm/i915/gem/selftests/i915_gem_object.c i915_gem_object_put(obj); obj 38 drivers/gpu/drm/i915/gem/selftests/i915_gem_object.c struct drm_i915_gem_object *obj; obj 44 drivers/gpu/drm/i915/gem/selftests/i915_gem_object.c obj = huge_gem_object(i915, obj 47 drivers/gpu/drm/i915/gem/selftests/i915_gem_object.c if (IS_ERR(obj)) obj 48 drivers/gpu/drm/i915/gem/selftests/i915_gem_object.c return PTR_ERR(obj); obj 50 drivers/gpu/drm/i915/gem/selftests/i915_gem_object.c err = i915_gem_object_pin_pages(obj); obj 53 drivers/gpu/drm/i915/gem/selftests/i915_gem_object.c nreal, obj->base.size / PAGE_SIZE, err); obj 57 drivers/gpu/drm/i915/gem/selftests/i915_gem_object.c for (n = 0; n < obj->base.size / PAGE_SIZE; n++) { obj 58 drivers/gpu/drm/i915/gem/selftests/i915_gem_object.c if (i915_gem_object_get_page(obj, n) != obj 59 drivers/gpu/drm/i915/gem/selftests/i915_gem_object.c i915_gem_object_get_page(obj, n % nreal)) { obj 68 drivers/gpu/drm/i915/gem/selftests/i915_gem_object.c i915_gem_object_unpin_pages(obj); obj 70 drivers/gpu/drm/i915/gem/selftests/i915_gem_object.c i915_gem_object_put(obj); obj 19 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c struct drm_i915_gem_object *obj; obj 46 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c obj = huge_gem_object(i915, phys_sz, sz); obj 47 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c if (IS_ERR(obj)) { obj 48 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c err = PTR_ERR(obj); obj 52 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c vaddr = i915_gem_object_pin_map(obj, I915_MAP_WB); obj 63 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c huge_gem_object_phys_size(obj) / sizeof(u32)); obj 65 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c if (!(obj->cache_coherent & I915_BO_CACHE_COHERENT_FOR_WRITE)) obj 66 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c obj->cache_dirty = true; obj 69 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c err = i915_gem_object_fill_blt(obj, ce, val); obj 74 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c i915_gem_object_lock(obj); obj 75 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c err = i915_gem_object_set_to_cpu_domain(obj, false); obj 76 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c i915_gem_object_unlock(obj); obj 80 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c for (i = 0; i < huge_gem_object_phys_size(obj) / sizeof(u32); ++i) { obj 89 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c i915_gem_object_unpin_map(obj); obj 90 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c i915_gem_object_put(obj); obj 96 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c i915_gem_object_unpin_map(obj); obj 98 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c i915_gem_object_put(obj); obj 14 drivers/gpu/drm/i915/gem/selftests/i915_gem_phys.c struct drm_i915_gem_object *obj; obj 21 drivers/gpu/drm/i915/gem/selftests/i915_gem_phys.c obj = i915_gem_object_create_shmem(i915, PAGE_SIZE); obj 22 drivers/gpu/drm/i915/gem/selftests/i915_gem_phys.c if (IS_ERR(obj)) { obj 23 drivers/gpu/drm/i915/gem/selftests/i915_gem_phys.c err = PTR_ERR(obj); obj 29 drivers/gpu/drm/i915/gem/selftests/i915_gem_phys.c err = i915_gem_object_attach_phys(obj, PAGE_SIZE); obj 36 drivers/gpu/drm/i915/gem/selftests/i915_gem_phys.c if (obj->ops != &i915_gem_phys_ops) { obj 42 drivers/gpu/drm/i915/gem/selftests/i915_gem_phys.c if (!atomic_read(&obj->mm.pages_pin_count)) { obj 49 drivers/gpu/drm/i915/gem/selftests/i915_gem_phys.c i915_gem_object_lock(obj); obj 50 drivers/gpu/drm/i915/gem/selftests/i915_gem_phys.c err = i915_gem_object_set_to_gtt_domain(obj, true); obj 51 drivers/gpu/drm/i915/gem/selftests/i915_gem_phys.c i915_gem_object_unlock(obj); obj 59 drivers/gpu/drm/i915/gem/selftests/i915_gem_phys.c i915_gem_object_put(obj); obj 44 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c struct drm_i915_gem_object *obj; obj 52 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c obj = i915_gem_object_create_internal(vma->vm->i915, size); obj 53 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c if (IS_ERR(obj)) obj 54 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c return ERR_CAST(obj); obj 56 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c cmd = i915_gem_object_pin_map(obj, I915_MAP_WC); obj 85 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c i915_gem_object_unpin_map(obj); obj 87 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c vma = i915_vma_instance(obj, vma->vm, NULL); obj 100 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c i915_gem_object_put(obj); obj 142 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c err = i915_request_await_object(rq, batch->obj, false); obj 150 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c err = i915_request_await_object(rq, vma->obj, true); obj 130 drivers/gpu/drm/i915/gt/intel_context.c vma->obj->mm.dirty = true; obj 506 drivers/gpu/drm/i915/gt/intel_engine_cs.c i915_gem_object_unpin_map(vma->obj); obj 507 drivers/gpu/drm/i915/gt/intel_engine_cs.c i915_gem_object_put(vma->obj); obj 537 drivers/gpu/drm/i915/gt/intel_engine_cs.c struct drm_i915_gem_object *obj; obj 549 drivers/gpu/drm/i915/gt/intel_engine_cs.c obj = i915_gem_object_create_internal(engine->i915, PAGE_SIZE); obj 550 drivers/gpu/drm/i915/gt/intel_engine_cs.c if (IS_ERR(obj)) { obj 552 drivers/gpu/drm/i915/gt/intel_engine_cs.c return PTR_ERR(obj); obj 555 drivers/gpu/drm/i915/gt/intel_engine_cs.c i915_gem_object_set_cache_coherency(obj, I915_CACHE_LLC); obj 557 drivers/gpu/drm/i915/gt/intel_engine_cs.c vma = i915_vma_instance(obj, &engine->gt->ggtt->vm, NULL); obj 563 drivers/gpu/drm/i915/gt/intel_engine_cs.c vaddr = i915_gem_object_pin_map(obj, I915_MAP_WB); obj 581 drivers/gpu/drm/i915/gt/intel_engine_cs.c i915_gem_object_unpin_map(obj); obj 583 drivers/gpu/drm/i915/gt/intel_engine_cs.c i915_gem_object_put(obj); obj 37 drivers/gpu/drm/i915/gt/intel_engine_pool.c i915_gem_object_put(node->obj); obj 46 drivers/gpu/drm/i915/gt/intel_engine_pool.c struct dma_resv *resv = node->obj->base.resv; obj 54 drivers/gpu/drm/i915/gt/intel_engine_pool.c err = i915_gem_object_pin_pages(node->obj); obj 59 drivers/gpu/drm/i915/gt/intel_engine_pool.c i915_gem_object_make_unshrinkable(node->obj); obj 69 drivers/gpu/drm/i915/gt/intel_engine_pool.c struct list_head *list = bucket_for_size(pool, node->obj->base.size); obj 74 drivers/gpu/drm/i915/gt/intel_engine_pool.c i915_gem_object_unpin_pages(node->obj); obj 77 drivers/gpu/drm/i915/gt/intel_engine_pool.c i915_gem_object_make_purgeable(node->obj); obj 89 drivers/gpu/drm/i915/gt/intel_engine_pool.c struct drm_i915_gem_object *obj; obj 99 drivers/gpu/drm/i915/gt/intel_engine_pool.c obj = i915_gem_object_create_internal(engine->i915, sz); obj 100 drivers/gpu/drm/i915/gt/intel_engine_pool.c if (IS_ERR(obj)) { obj 103 drivers/gpu/drm/i915/gt/intel_engine_pool.c return ERR_CAST(obj); obj 106 drivers/gpu/drm/i915/gt/intel_engine_pool.c i915_gem_object_set_readonly(obj); obj 108 drivers/gpu/drm/i915/gt/intel_engine_pool.c node->obj = obj; obj 127 drivers/gpu/drm/i915/gt/intel_engine_pool.c if (node->obj->base.size < size) obj 24 drivers/gpu/drm/i915/gt/intel_engine_pool_types.h struct drm_i915_gem_object *obj; obj 228 drivers/gpu/drm/i915/gt/intel_gt.c struct drm_i915_gem_object *obj; obj 232 drivers/gpu/drm/i915/gt/intel_gt.c obj = i915_gem_object_create_stolen(i915, size); obj 233 drivers/gpu/drm/i915/gt/intel_gt.c if (!obj) obj 234 drivers/gpu/drm/i915/gt/intel_gt.c obj = i915_gem_object_create_internal(i915, size); obj 235 drivers/gpu/drm/i915/gt/intel_gt.c if (IS_ERR(obj)) { obj 237 drivers/gpu/drm/i915/gt/intel_gt.c return PTR_ERR(obj); obj 240 drivers/gpu/drm/i915/gt/intel_gt.c vma = i915_vma_instance(obj, >->ggtt->vm, NULL); obj 255 drivers/gpu/drm/i915/gt/intel_gt.c i915_gem_object_put(obj); obj 1758 drivers/gpu/drm/i915/gt/intel_lrc.c i915_gem_object_unpin_map(ce->state->obj); obj 1799 drivers/gpu/drm/i915/gt/intel_lrc.c vaddr = i915_gem_object_pin_map(ce->state->obj, obj 1818 drivers/gpu/drm/i915/gt/intel_lrc.c i915_gem_object_unpin_map(ce->state->obj); obj 2216 drivers/gpu/drm/i915/gt/intel_lrc.c struct drm_i915_gem_object *obj; obj 2220 drivers/gpu/drm/i915/gt/intel_lrc.c obj = i915_gem_object_create_shmem(engine->i915, CTX_WA_BB_OBJ_SIZE); obj 2221 drivers/gpu/drm/i915/gt/intel_lrc.c if (IS_ERR(obj)) obj 2222 drivers/gpu/drm/i915/gt/intel_lrc.c return PTR_ERR(obj); obj 2224 drivers/gpu/drm/i915/gt/intel_lrc.c vma = i915_vma_instance(obj, &engine->gt->ggtt->vm, NULL); obj 2238 drivers/gpu/drm/i915/gt/intel_lrc.c i915_gem_object_put(obj); obj 2290 drivers/gpu/drm/i915/gt/intel_lrc.c page = i915_gem_object_get_dirty_page(wa_ctx->vma->obj, 0); obj 33 drivers/gpu/drm/i915/gt/intel_renderstate.c struct drm_i915_gem_object *obj; obj 86 drivers/gpu/drm/i915/gt/intel_renderstate.c ret = i915_gem_object_prepare_write(so->obj, &needs_clflush); obj 90 drivers/gpu/drm/i915/gt/intel_renderstate.c d = kmap_atomic(i915_gem_object_get_dirty_page(so->obj, 0)); obj 168 drivers/gpu/drm/i915/gt/intel_renderstate.c i915_gem_object_finish_access(so->obj); obj 192 drivers/gpu/drm/i915/gt/intel_renderstate.c so.obj = i915_gem_object_create_internal(engine->i915, PAGE_SIZE); obj 193 drivers/gpu/drm/i915/gt/intel_renderstate.c if (IS_ERR(so.obj)) obj 194 drivers/gpu/drm/i915/gt/intel_renderstate.c return PTR_ERR(so.obj); obj 196 drivers/gpu/drm/i915/gt/intel_renderstate.c so.vma = i915_vma_instance(so.obj, &engine->gt->ggtt->vm, NULL); obj 225 drivers/gpu/drm/i915/gt/intel_renderstate.c err = i915_request_await_object(rq, so.vma->obj, false); obj 234 drivers/gpu/drm/i915/gt/intel_renderstate.c i915_gem_object_put(so.obj); obj 640 drivers/gpu/drm/i915/gt/intel_reset.c node = &vma->obj->base.vma_node; obj 519 drivers/gpu/drm/i915/gt/intel_ringbuffer.c struct drm_i915_gem_object *obj = engine->status_page.vma->obj; obj 521 drivers/gpu/drm/i915/gt/intel_ringbuffer.c GEM_BUG_ON(!i915_gem_object_has_pinned_pages(obj)); obj 522 drivers/gpu/drm/i915/gt/intel_ringbuffer.c return sg_page(obj->mm.pages->sgl); obj 1202 drivers/gpu/drm/i915/gt/intel_ringbuffer.c if (vma->obj->stolen) obj 1214 drivers/gpu/drm/i915/gt/intel_ringbuffer.c addr = i915_gem_object_pin_map(vma->obj, obj 1258 drivers/gpu/drm/i915/gt/intel_ringbuffer.c i915_gem_object_unpin_map(vma->obj); obj 1271 drivers/gpu/drm/i915/gt/intel_ringbuffer.c struct drm_i915_gem_object *obj; obj 1274 drivers/gpu/drm/i915/gt/intel_ringbuffer.c obj = i915_gem_object_create_stolen(i915, size); obj 1275 drivers/gpu/drm/i915/gt/intel_ringbuffer.c if (!obj) obj 1276 drivers/gpu/drm/i915/gt/intel_ringbuffer.c obj = i915_gem_object_create_internal(i915, size); obj 1277 drivers/gpu/drm/i915/gt/intel_ringbuffer.c if (IS_ERR(obj)) obj 1278 drivers/gpu/drm/i915/gt/intel_ringbuffer.c return ERR_CAST(obj); obj 1285 drivers/gpu/drm/i915/gt/intel_ringbuffer.c i915_gem_object_set_readonly(obj); obj 1287 drivers/gpu/drm/i915/gt/intel_ringbuffer.c vma = i915_vma_instance(obj, vm, NULL); obj 1294 drivers/gpu/drm/i915/gt/intel_ringbuffer.c i915_gem_object_put(obj); obj 1349 drivers/gpu/drm/i915/gt/intel_ringbuffer.c i915_gem_object_put(ce->state->obj); obj 1406 drivers/gpu/drm/i915/gt/intel_ringbuffer.c struct drm_i915_gem_object *obj; obj 1410 drivers/gpu/drm/i915/gt/intel_ringbuffer.c obj = i915_gem_object_create_shmem(i915, engine->context_size); obj 1411 drivers/gpu/drm/i915/gt/intel_ringbuffer.c if (IS_ERR(obj)) obj 1412 drivers/gpu/drm/i915/gt/intel_ringbuffer.c return ERR_CAST(obj); obj 1430 drivers/gpu/drm/i915/gt/intel_ringbuffer.c i915_gem_object_set_cache_coherency(obj, I915_CACHE_L3_LLC); obj 1435 drivers/gpu/drm/i915/gt/intel_ringbuffer.c vaddr = i915_gem_object_pin_map(obj, I915_MAP_WB); obj 1451 drivers/gpu/drm/i915/gt/intel_ringbuffer.c i915_gem_object_flush_map(obj); obj 1452 drivers/gpu/drm/i915/gt/intel_ringbuffer.c i915_gem_object_unpin_map(obj); obj 1455 drivers/gpu/drm/i915/gt/intel_ringbuffer.c vma = i915_vma_instance(obj, &engine->gt->ggtt->vm, NULL); obj 1464 drivers/gpu/drm/i915/gt/intel_ringbuffer.c i915_gem_object_unpin_map(obj); obj 1466 drivers/gpu/drm/i915/gt/intel_ringbuffer.c i915_gem_object_put(obj); obj 37 drivers/gpu/drm/i915/gt/intel_timeline.c struct drm_i915_gem_object *obj; obj 40 drivers/gpu/drm/i915/gt/intel_timeline.c obj = i915_gem_object_create_internal(i915, PAGE_SIZE); obj 41 drivers/gpu/drm/i915/gt/intel_timeline.c if (IS_ERR(obj)) obj 42 drivers/gpu/drm/i915/gt/intel_timeline.c return ERR_CAST(obj); obj 44 drivers/gpu/drm/i915/gt/intel_timeline.c i915_gem_object_set_cache_coherency(obj, I915_CACHE_LLC); obj 46 drivers/gpu/drm/i915/gt/intel_timeline.c vma = i915_vma_instance(obj, >->ggtt->vm, NULL); obj 48 drivers/gpu/drm/i915/gt/intel_timeline.c i915_gem_object_put(obj); obj 131 drivers/gpu/drm/i915/gt/intel_timeline.c i915_gem_object_unpin_map(cl->hwsp->vma->obj); obj 170 drivers/gpu/drm/i915/gt/intel_timeline.c vaddr = i915_gem_object_pin_map(hwsp->vma->obj, I915_MAP_WB); obj 242 drivers/gpu/drm/i915/gt/intel_timeline.c vaddr = i915_gem_object_pin_map(hwsp->obj, I915_MAP_WB); obj 289 drivers/gpu/drm/i915/gt/intel_timeline.c i915_gem_object_unpin_map(timeline->hwsp_ggtt->obj); obj 1418 drivers/gpu/drm/i915/gt/intel_workarounds.c struct drm_i915_gem_object *obj; obj 1424 drivers/gpu/drm/i915/gt/intel_workarounds.c obj = i915_gem_object_create_internal(vm->i915, size); obj 1425 drivers/gpu/drm/i915/gt/intel_workarounds.c if (IS_ERR(obj)) obj 1426 drivers/gpu/drm/i915/gt/intel_workarounds.c return ERR_CAST(obj); obj 1428 drivers/gpu/drm/i915/gt/intel_workarounds.c i915_gem_object_set_cache_coherency(obj, I915_CACHE_LLC); obj 1430 drivers/gpu/drm/i915/gt/intel_workarounds.c vma = i915_vma_instance(obj, vm, NULL); obj 1444 drivers/gpu/drm/i915/gt/intel_workarounds.c i915_gem_object_put(obj); obj 1534 drivers/gpu/drm/i915/gt/intel_workarounds.c results = i915_gem_object_pin_map(vma->obj, I915_MAP_WB); obj 1549 drivers/gpu/drm/i915/gt/intel_workarounds.c i915_gem_object_unpin_map(vma->obj); obj 84 drivers/gpu/drm/i915/gt/selftest_context.c vaddr = i915_gem_object_pin_map(ce->state->obj, obj 137 drivers/gpu/drm/i915/gt/selftest_context.c i915_gem_object_unpin_map(ce->state->obj); obj 47 drivers/gpu/drm/i915/gt/selftest_hangcheck.c struct drm_i915_gem_object *obj; obj 73 drivers/gpu/drm/i915/gt/selftest_hangcheck.c h->obj = i915_gem_object_create_internal(gt->i915, PAGE_SIZE); obj 74 drivers/gpu/drm/i915/gt/selftest_hangcheck.c if (IS_ERR(h->obj)) { obj 75 drivers/gpu/drm/i915/gt/selftest_hangcheck.c err = PTR_ERR(h->obj); obj 87 drivers/gpu/drm/i915/gt/selftest_hangcheck.c vaddr = i915_gem_object_pin_map(h->obj, obj 100 drivers/gpu/drm/i915/gt/selftest_hangcheck.c i915_gem_object_put(h->obj); obj 121 drivers/gpu/drm/i915/gt/selftest_hangcheck.c err = i915_request_await_object(rq, vma->obj, obj 135 drivers/gpu/drm/i915/gt/selftest_hangcheck.c struct drm_i915_gem_object *obj; obj 143 drivers/gpu/drm/i915/gt/selftest_hangcheck.c obj = i915_gem_object_create_internal(gt->i915, PAGE_SIZE); obj 144 drivers/gpu/drm/i915/gt/selftest_hangcheck.c if (IS_ERR(obj)) obj 145 drivers/gpu/drm/i915/gt/selftest_hangcheck.c return ERR_CAST(obj); obj 147 drivers/gpu/drm/i915/gt/selftest_hangcheck.c vaddr = i915_gem_object_pin_map(obj, i915_coherent_map_type(gt->i915)); obj 149 drivers/gpu/drm/i915/gt/selftest_hangcheck.c i915_gem_object_put(obj); obj 153 drivers/gpu/drm/i915/gt/selftest_hangcheck.c i915_gem_object_unpin_map(h->obj); obj 154 drivers/gpu/drm/i915/gt/selftest_hangcheck.c i915_gem_object_put(h->obj); obj 156 drivers/gpu/drm/i915/gt/selftest_hangcheck.c h->obj = obj; obj 159 drivers/gpu/drm/i915/gt/selftest_hangcheck.c vma = i915_vma_instance(h->obj, vm, NULL); obj 280 drivers/gpu/drm/i915/gt/selftest_hangcheck.c i915_gem_object_unpin_map(h->obj); obj 281 drivers/gpu/drm/i915/gt/selftest_hangcheck.c i915_gem_object_put(h->obj); obj 1154 drivers/gpu/drm/i915/gt/selftest_hangcheck.c err = i915_gem_object_set_tiling(arg->vma->obj, I915_TILING_Y, 512); obj 1187 drivers/gpu/drm/i915/gt/selftest_hangcheck.c struct drm_i915_gem_object *obj; obj 1204 drivers/gpu/drm/i915/gt/selftest_hangcheck.c obj = i915_gem_object_create_internal(gt->i915, SZ_1M); obj 1205 drivers/gpu/drm/i915/gt/selftest_hangcheck.c if (IS_ERR(obj)) { obj 1206 drivers/gpu/drm/i915/gt/selftest_hangcheck.c err = PTR_ERR(obj); obj 1211 drivers/gpu/drm/i915/gt/selftest_hangcheck.c err = i915_gem_object_set_tiling(obj, I915_TILING_X, 512); obj 1218 drivers/gpu/drm/i915/gt/selftest_hangcheck.c arg.vma = i915_vma_instance(obj, vm, NULL); obj 1250 drivers/gpu/drm/i915/gt/selftest_hangcheck.c err = i915_request_await_object(rq, arg.vma->obj, obj 1319 drivers/gpu/drm/i915/gt/selftest_hangcheck.c i915_gem_object_put(obj); obj 227 drivers/gpu/drm/i915/gt/selftest_lrc.c struct drm_i915_gem_object *obj; obj 246 drivers/gpu/drm/i915/gt/selftest_lrc.c obj = i915_gem_object_create_internal(i915, PAGE_SIZE); obj 247 drivers/gpu/drm/i915/gt/selftest_lrc.c if (IS_ERR(obj)) { obj 248 drivers/gpu/drm/i915/gt/selftest_lrc.c err = PTR_ERR(obj); obj 252 drivers/gpu/drm/i915/gt/selftest_lrc.c vma = i915_vma_instance(obj, &i915->ggtt.vm, NULL); obj 258 drivers/gpu/drm/i915/gt/selftest_lrc.c vaddr = i915_gem_object_pin_map(obj, I915_MAP_WC); obj 292 drivers/gpu/drm/i915/gt/selftest_lrc.c i915_gem_object_unpin_map(obj); obj 294 drivers/gpu/drm/i915/gt/selftest_lrc.c i915_gem_object_put(obj); obj 307 drivers/gpu/drm/i915/gt/selftest_lrc.c struct drm_i915_gem_object *obj; obj 334 drivers/gpu/drm/i915/gt/selftest_lrc.c obj = i915_gem_object_create_internal(i915, PAGE_SIZE); obj 335 drivers/gpu/drm/i915/gt/selftest_lrc.c if (IS_ERR(obj)) { obj 336 drivers/gpu/drm/i915/gt/selftest_lrc.c err = PTR_ERR(obj); obj 340 drivers/gpu/drm/i915/gt/selftest_lrc.c map = i915_gem_object_pin_map(obj, I915_MAP_WC); obj 346 drivers/gpu/drm/i915/gt/selftest_lrc.c vma = i915_vma_instance(obj, &i915->ggtt.vm, NULL); obj 471 drivers/gpu/drm/i915/gt/selftest_lrc.c i915_gem_object_unpin_map(obj); obj 473 drivers/gpu/drm/i915/gt/selftest_lrc.c i915_gem_object_put(obj); obj 1462 drivers/gpu/drm/i915/gt/selftest_lrc.c err = i915_request_await_object(rq, vma->obj, false); obj 21 drivers/gpu/drm/i915/gt/selftest_timeline.c struct drm_i915_gem_object *obj = tl->hwsp_ggtt->obj; obj 23 drivers/gpu/drm/i915/gt/selftest_timeline.c GEM_BUG_ON(!i915_gem_object_has_pinned_pages(obj)); obj 24 drivers/gpu/drm/i915/gt/selftest_timeline.c return sg_page(obj->mm.pages->sgl); obj 116 drivers/gpu/drm/i915/gt/selftest_workarounds.c err = i915_request_await_object(rq, vma->obj, true); obj 357 drivers/gpu/drm/i915/gt/selftest_workarounds.c struct drm_i915_gem_object *obj; obj 361 drivers/gpu/drm/i915/gt/selftest_workarounds.c obj = i915_gem_object_create_internal(ctx->i915, 16 * PAGE_SIZE); obj 362 drivers/gpu/drm/i915/gt/selftest_workarounds.c if (IS_ERR(obj)) obj 363 drivers/gpu/drm/i915/gt/selftest_workarounds.c return ERR_CAST(obj); obj 365 drivers/gpu/drm/i915/gt/selftest_workarounds.c vma = i915_vma_instance(obj, ctx->vm, NULL); obj 378 drivers/gpu/drm/i915/gt/selftest_workarounds.c i915_gem_object_put(obj); obj 503 drivers/gpu/drm/i915/gt/selftest_workarounds.c cs = i915_gem_object_pin_map(batch->obj, I915_MAP_WC); obj 552 drivers/gpu/drm/i915/gt/selftest_workarounds.c i915_gem_object_flush_map(batch->obj); obj 553 drivers/gpu/drm/i915/gt/selftest_workarounds.c i915_gem_object_unpin_map(batch->obj); obj 587 drivers/gpu/drm/i915/gt/selftest_workarounds.c results = i915_gem_object_pin_map(scratch->obj, I915_MAP_WB); obj 666 drivers/gpu/drm/i915/gt/selftest_workarounds.c i915_gem_object_unpin_map(scratch->obj); obj 820 drivers/gpu/drm/i915/gt/selftest_workarounds.c cs = i915_gem_object_pin_map(batch->obj, I915_MAP_WC); obj 838 drivers/gpu/drm/i915/gt/selftest_workarounds.c i915_gem_object_flush_map(batch->obj); obj 862 drivers/gpu/drm/i915/gt/selftest_workarounds.c i915_gem_object_unpin_map(batch->obj); obj 946 drivers/gpu/drm/i915/gt/selftest_workarounds.c a = i915_gem_object_pin_map(A->obj, I915_MAP_WB); obj 950 drivers/gpu/drm/i915/gt/selftest_workarounds.c b = i915_gem_object_pin_map(B->obj, I915_MAP_WB); obj 968 drivers/gpu/drm/i915/gt/selftest_workarounds.c i915_gem_object_unpin_map(B->obj); obj 970 drivers/gpu/drm/i915/gt/selftest_workarounds.c i915_gem_object_unpin_map(A->obj); obj 94 drivers/gpu/drm/i915/gt/uc/intel_guc.c vaddr = i915_gem_object_pin_map(vma->obj, I915_MAP_WB); obj 594 drivers/gpu/drm/i915/gt/uc/intel_guc.c struct drm_i915_gem_object *obj; obj 599 drivers/gpu/drm/i915/gt/uc/intel_guc.c obj = i915_gem_object_create_shmem(gt->i915, size); obj 600 drivers/gpu/drm/i915/gt/uc/intel_guc.c if (IS_ERR(obj)) obj 601 drivers/gpu/drm/i915/gt/uc/intel_guc.c return ERR_CAST(obj); obj 603 drivers/gpu/drm/i915/gt/uc/intel_guc.c vma = i915_vma_instance(obj, >->ggtt->vm, NULL); obj 617 drivers/gpu/drm/i915/gt/uc/intel_guc.c i915_gem_object_put(obj); obj 125 drivers/gpu/drm/i915/gt/uc/intel_guc_ads.c i915_gem_object_flush_map(guc->ads_vma->obj); obj 148 drivers/gpu/drm/i915/gt/uc/intel_guc_ads.c blob = i915_gem_object_pin_map(vma->obj, I915_MAP_WB); obj 166 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c blob = i915_gem_object_pin_map(vma->obj, I915_MAP_WB); obj 157 drivers/gpu/drm/i915/gt/uc/intel_guc_log.c relay_reserve(log->relay.channel, log->vma->obj->base.size); obj 343 drivers/gpu/drm/i915/gt/uc/intel_guc_log.c vaddr = i915_gem_object_pin_map(log->vma->obj, I915_MAP_WC); obj 356 drivers/gpu/drm/i915/gt/uc/intel_guc_log.c i915_gem_object_unpin_map(log->vma->obj); obj 322 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c vaddr = i915_gem_object_pin_map(vma->obj, I915_MAP_WB); obj 831 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c vaddr = i915_gem_object_pin_map(vma->obj, I915_MAP_WB); obj 862 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c i915_gem_object_unpin_map(client->vma->obj); obj 57 drivers/gpu/drm/i915/gt/uc/intel_huc.c vaddr = i915_gem_object_pin_map(vma->obj, I915_MAP_WB); obj 66 drivers/gpu/drm/i915/gt/uc/intel_huc.c i915_gem_object_unpin_map(vma->obj); obj 115 drivers/gpu/drm/i915/gt/uc/intel_uc.c uc->load_err_log = i915_gem_object_get(guc->log.vma->obj); obj 263 drivers/gpu/drm/i915/gt/uc/intel_uc_fw.c struct drm_i915_gem_object *obj; obj 372 drivers/gpu/drm/i915/gt/uc/intel_uc_fw.c obj = i915_gem_object_create_shmem_from_data(i915, fw->data, fw->size); obj 373 drivers/gpu/drm/i915/gt/uc/intel_uc_fw.c if (IS_ERR(obj)) { obj 374 drivers/gpu/drm/i915/gt/uc/intel_uc_fw.c err = PTR_ERR(obj); obj 378 drivers/gpu/drm/i915/gt/uc/intel_uc_fw.c uc_fw->obj = obj; obj 413 drivers/gpu/drm/i915/gt/uc/intel_uc_fw.c struct drm_i915_gem_object *obj = uc_fw->obj; obj 417 drivers/gpu/drm/i915/gt/uc/intel_uc_fw.c .node.size = obj->base.size, obj 418 drivers/gpu/drm/i915/gt/uc/intel_uc_fw.c .pages = obj->mm.pages, obj 422 drivers/gpu/drm/i915/gt/uc/intel_uc_fw.c GEM_BUG_ON(!i915_gem_object_has_pinned_pages(obj)); obj 434 drivers/gpu/drm/i915/gt/uc/intel_uc_fw.c struct drm_i915_gem_object *obj = uc_fw->obj; obj 438 drivers/gpu/drm/i915/gt/uc/intel_uc_fw.c ggtt->vm.clear_range(&ggtt->vm, start, obj->base.size); obj 544 drivers/gpu/drm/i915/gt/uc/intel_uc_fw.c err = i915_gem_object_pin_pages(uc_fw->obj); obj 559 drivers/gpu/drm/i915/gt/uc/intel_uc_fw.c i915_gem_object_unpin_pages(uc_fw->obj); obj 573 drivers/gpu/drm/i915/gt/uc/intel_uc_fw.c i915_gem_object_put(fetch_and_zero(&uc_fw->obj)); obj 589 drivers/gpu/drm/i915/gt/uc/intel_uc_fw.c struct sg_table *pages = uc_fw->obj->mm.pages; obj 73 drivers/gpu/drm/i915/gt/uc/intel_uc_fw.h struct drm_i915_gem_object *obj; obj 1858 drivers/gpu/drm/i915/gvt/cmd_parser.c bb->obj = i915_gem_object_create_shmem(s->vgpu->gvt->dev_priv, obj 1861 drivers/gpu/drm/i915/gvt/cmd_parser.c if (IS_ERR(bb->obj)) { obj 1862 drivers/gpu/drm/i915/gvt/cmd_parser.c ret = PTR_ERR(bb->obj); obj 1866 drivers/gpu/drm/i915/gvt/cmd_parser.c ret = i915_gem_object_prepare_write(bb->obj, &bb->clflush); obj 1870 drivers/gpu/drm/i915/gvt/cmd_parser.c bb->va = i915_gem_object_pin_map(bb->obj, I915_MAP_WB); obj 1877 drivers/gpu/drm/i915/gvt/cmd_parser.c drm_clflush_virt_range(bb->va, bb->obj->base.size); obj 1917 drivers/gpu/drm/i915/gvt/cmd_parser.c i915_gem_object_unpin_map(bb->obj); obj 1919 drivers/gpu/drm/i915/gvt/cmd_parser.c i915_gem_object_finish_access(bb->obj); obj 1921 drivers/gpu/drm/i915/gvt/cmd_parser.c i915_gem_object_put(bb->obj); obj 2937 drivers/gpu/drm/i915/gvt/cmd_parser.c struct drm_i915_gem_object *obj; obj 2941 drivers/gpu/drm/i915/gvt/cmd_parser.c obj = i915_gem_object_create_shmem(workload->vgpu->gvt->dev_priv, obj 2944 drivers/gpu/drm/i915/gvt/cmd_parser.c if (IS_ERR(obj)) obj 2945 drivers/gpu/drm/i915/gvt/cmd_parser.c return PTR_ERR(obj); obj 2948 drivers/gpu/drm/i915/gvt/cmd_parser.c map = i915_gem_object_pin_map(obj, I915_MAP_WB); obj 2955 drivers/gpu/drm/i915/gvt/cmd_parser.c i915_gem_object_lock(obj); obj 2956 drivers/gpu/drm/i915/gvt/cmd_parser.c ret = i915_gem_object_set_to_cpu_domain(obj, false); obj 2957 drivers/gpu/drm/i915/gvt/cmd_parser.c i915_gem_object_unlock(obj); obj 2972 drivers/gpu/drm/i915/gvt/cmd_parser.c wa_ctx->indirect_ctx.obj = obj; obj 2977 drivers/gpu/drm/i915/gvt/cmd_parser.c i915_gem_object_unpin_map(obj); obj 2979 drivers/gpu/drm/i915/gvt/cmd_parser.c i915_gem_object_put(obj); obj 40 drivers/gpu/drm/i915/gvt/dmabuf.c struct drm_i915_gem_object *obj) obj 42 drivers/gpu/drm/i915/gvt/dmabuf.c struct drm_i915_private *dev_priv = to_i915(obj->base.dev); obj 50 drivers/gpu/drm/i915/gvt/dmabuf.c fb_info = (struct intel_vgpu_fb_info *)obj->gvt_info; obj 58 drivers/gpu/drm/i915/gvt/dmabuf.c page_num = obj->base.size >> PAGE_SHIFT; obj 74 drivers/gpu/drm/i915/gvt/dmabuf.c __i915_gem_object_set_pages(obj, st, PAGE_SIZE); obj 79 drivers/gpu/drm/i915/gvt/dmabuf.c static void vgpu_gem_put_pages(struct drm_i915_gem_object *obj, obj 88 drivers/gpu/drm/i915/gvt/dmabuf.c struct intel_vgpu_dmabuf_obj *obj = obj 90 drivers/gpu/drm/i915/gvt/dmabuf.c struct intel_vgpu *vgpu = obj->vgpu; obj 98 drivers/gpu/drm/i915/gvt/dmabuf.c if (dmabuf_obj == obj) { obj 110 drivers/gpu/drm/i915/gvt/dmabuf.c kfree(obj->info); obj 111 drivers/gpu/drm/i915/gvt/dmabuf.c kfree(obj); obj 116 drivers/gpu/drm/i915/gvt/dmabuf.c static inline void dmabuf_obj_get(struct intel_vgpu_dmabuf_obj *obj) obj 118 drivers/gpu/drm/i915/gvt/dmabuf.c kref_get(&obj->kref); obj 121 drivers/gpu/drm/i915/gvt/dmabuf.c static inline void dmabuf_obj_put(struct intel_vgpu_dmabuf_obj *obj) obj 123 drivers/gpu/drm/i915/gvt/dmabuf.c kref_put(&obj->kref, dmabuf_gem_object_free); obj 130 drivers/gpu/drm/i915/gvt/dmabuf.c struct intel_vgpu_dmabuf_obj *obj = fb_info->obj; obj 131 drivers/gpu/drm/i915/gvt/dmabuf.c struct intel_vgpu *vgpu = obj->vgpu; obj 136 drivers/gpu/drm/i915/gvt/dmabuf.c dmabuf_obj_put(obj); obj 141 drivers/gpu/drm/i915/gvt/dmabuf.c dmabuf_obj_put(obj); obj 156 drivers/gpu/drm/i915/gvt/dmabuf.c struct drm_i915_gem_object *obj; obj 158 drivers/gpu/drm/i915/gvt/dmabuf.c obj = i915_gem_object_alloc(); obj 159 drivers/gpu/drm/i915/gvt/dmabuf.c if (obj == NULL) obj 162 drivers/gpu/drm/i915/gvt/dmabuf.c drm_gem_private_object_init(dev, &obj->base, obj 164 drivers/gpu/drm/i915/gvt/dmabuf.c i915_gem_object_init(obj, &intel_vgpu_gem_ops); obj 166 drivers/gpu/drm/i915/gvt/dmabuf.c obj->read_domains = I915_GEM_DOMAIN_GTT; obj 167 drivers/gpu/drm/i915/gvt/dmabuf.c obj->write_domain = 0; obj 189 drivers/gpu/drm/i915/gvt/dmabuf.c obj->tiling_and_stride = tiling_mode | stride; obj 191 drivers/gpu/drm/i915/gvt/dmabuf.c obj->tiling_and_stride = info->drm_format_mod ? obj 195 drivers/gpu/drm/i915/gvt/dmabuf.c return obj; obj 423 drivers/gpu/drm/i915/gvt/dmabuf.c ((struct intel_vgpu_fb_info *)dmabuf_obj->info)->obj = dmabuf_obj; obj 471 drivers/gpu/drm/i915/gvt/dmabuf.c struct drm_i915_gem_object *obj; obj 485 drivers/gpu/drm/i915/gvt/dmabuf.c obj = vgpu_create_gem(dev, dmabuf_obj->info); obj 486 drivers/gpu/drm/i915/gvt/dmabuf.c if (obj == NULL) { obj 492 drivers/gpu/drm/i915/gvt/dmabuf.c obj->gvt_info = dmabuf_obj->info; obj 494 drivers/gpu/drm/i915/gvt/dmabuf.c dmabuf = i915_gem_prime_export(&obj->base, DRM_CLOEXEC | DRM_RDWR); obj 523 drivers/gpu/drm/i915/gvt/dmabuf.c kref_read(&obj->base.refcount)); obj 525 drivers/gpu/drm/i915/gvt/dmabuf.c i915_gem_object_put(obj); obj 532 drivers/gpu/drm/i915/gvt/dmabuf.c i915_gem_object_put(obj); obj 48 drivers/gpu/drm/i915/gvt/dmabuf.h struct intel_vgpu_dmabuf_obj *obj; obj 61 drivers/gpu/drm/i915/gvt/scheduler.c workload->req->hw_context->state->obj; obj 132 drivers/gpu/drm/i915/gvt/scheduler.c workload->req->hw_context->state->obj; obj 354 drivers/gpu/drm/i915/gvt/scheduler.c if (!wa_ctx->indirect_ctx.obj) obj 357 drivers/gpu/drm/i915/gvt/scheduler.c i915_gem_object_unpin_map(wa_ctx->indirect_ctx.obj); obj 358 drivers/gpu/drm/i915/gvt/scheduler.c i915_gem_object_put(wa_ctx->indirect_ctx.obj); obj 360 drivers/gpu/drm/i915/gvt/scheduler.c wa_ctx->indirect_ctx.obj = NULL; obj 483 drivers/gpu/drm/i915/gvt/scheduler.c bb->obj->base.size); obj 486 drivers/gpu/drm/i915/gvt/scheduler.c i915_gem_object_finish_access(bb->obj); obj 490 drivers/gpu/drm/i915/gvt/scheduler.c bb->vma = i915_gem_object_ggtt_pin(bb->obj, obj 505 drivers/gpu/drm/i915/gvt/scheduler.c bb->obj->base.size); obj 509 drivers/gpu/drm/i915/gvt/scheduler.c ret = i915_gem_object_set_to_gtt_domain(bb->obj, obj 520 drivers/gpu/drm/i915/gvt/scheduler.c i915_gem_object_finish_access(bb->obj); obj 556 drivers/gpu/drm/i915/gvt/scheduler.c vma = i915_gem_object_ggtt_pin(wa_ctx->indirect_ctx.obj, NULL, obj 600 drivers/gpu/drm/i915/gvt/scheduler.c if (bb->obj) { obj 602 drivers/gpu/drm/i915/gvt/scheduler.c i915_gem_object_finish_access(bb->obj); obj 605 drivers/gpu/drm/i915/gvt/scheduler.c i915_gem_object_unpin_map(bb->obj); obj 611 drivers/gpu/drm/i915/gvt/scheduler.c i915_gem_object_put(bb->obj); obj 803 drivers/gpu/drm/i915/gvt/scheduler.c struct drm_i915_gem_object *ctx_obj = rq->hw_context->state->obj; obj 60 drivers/gpu/drm/i915/gvt/scheduler.h struct drm_i915_gem_object *obj; obj 122 drivers/gpu/drm/i915/gvt/scheduler.h struct drm_i915_gem_object *obj; obj 80 drivers/gpu/drm/i915/i915_debugfs.c static char get_pin_flag(struct drm_i915_gem_object *obj) obj 82 drivers/gpu/drm/i915/i915_debugfs.c return obj->pin_global ? 'p' : ' '; obj 85 drivers/gpu/drm/i915/i915_debugfs.c static char get_tiling_flag(struct drm_i915_gem_object *obj) obj 87 drivers/gpu/drm/i915/i915_debugfs.c switch (i915_gem_object_get_tiling(obj)) { obj 95 drivers/gpu/drm/i915/i915_debugfs.c static char get_global_flag(struct drm_i915_gem_object *obj) obj 97 drivers/gpu/drm/i915/i915_debugfs.c return READ_ONCE(obj->userfault_count) ? 'g' : ' '; obj 100 drivers/gpu/drm/i915/i915_debugfs.c static char get_pin_mapped_flag(struct drm_i915_gem_object *obj) obj 102 drivers/gpu/drm/i915/i915_debugfs.c return obj->mm.mapping ? 'M' : ' '; obj 136 drivers/gpu/drm/i915/i915_debugfs.c describe_obj(struct seq_file *m, struct drm_i915_gem_object *obj) obj 138 drivers/gpu/drm/i915/i915_debugfs.c struct drm_i915_private *dev_priv = to_i915(obj->base.dev); obj 144 drivers/gpu/drm/i915/i915_debugfs.c &obj->base, obj 145 drivers/gpu/drm/i915/i915_debugfs.c get_pin_flag(obj), obj 146 drivers/gpu/drm/i915/i915_debugfs.c get_tiling_flag(obj), obj 147 drivers/gpu/drm/i915/i915_debugfs.c get_global_flag(obj), obj 148 drivers/gpu/drm/i915/i915_debugfs.c get_pin_mapped_flag(obj), obj 149 drivers/gpu/drm/i915/i915_debugfs.c obj->base.size / 1024, obj 150 drivers/gpu/drm/i915/i915_debugfs.c obj->read_domains, obj 151 drivers/gpu/drm/i915/i915_debugfs.c obj->write_domain, obj 152 drivers/gpu/drm/i915/i915_debugfs.c i915_cache_level_str(dev_priv, obj->cache_level), obj 153 drivers/gpu/drm/i915/i915_debugfs.c obj->mm.dirty ? " dirty" : "", obj 154 drivers/gpu/drm/i915/i915_debugfs.c obj->mm.madv == I915_MADV_DONTNEED ? " purgeable" : ""); obj 155 drivers/gpu/drm/i915/i915_debugfs.c if (obj->base.name) obj 156 drivers/gpu/drm/i915/i915_debugfs.c seq_printf(m, " (name: %d)", obj->base.name); obj 158 drivers/gpu/drm/i915/i915_debugfs.c spin_lock(&obj->vma.lock); obj 159 drivers/gpu/drm/i915/i915_debugfs.c list_for_each_entry(vma, &obj->vma.list, obj_link) { obj 163 drivers/gpu/drm/i915/i915_debugfs.c spin_unlock(&obj->vma.lock); obj 217 drivers/gpu/drm/i915/i915_debugfs.c spin_lock(&obj->vma.lock); obj 219 drivers/gpu/drm/i915/i915_debugfs.c spin_unlock(&obj->vma.lock); obj 222 drivers/gpu/drm/i915/i915_debugfs.c if (obj->stolen) obj 223 drivers/gpu/drm/i915/i915_debugfs.c seq_printf(m, " (stolen: %08llx)", obj->stolen->start); obj 224 drivers/gpu/drm/i915/i915_debugfs.c if (obj->pin_global) obj 227 drivers/gpu/drm/i915/i915_debugfs.c engine = i915_gem_object_last_write_engine(obj); obj 242 drivers/gpu/drm/i915/i915_debugfs.c struct drm_i915_gem_object *obj = ptr; obj 247 drivers/gpu/drm/i915/i915_debugfs.c stats->total += obj->base.size; obj 248 drivers/gpu/drm/i915/i915_debugfs.c if (!atomic_read(&obj->bind_count)) obj 249 drivers/gpu/drm/i915/i915_debugfs.c stats->unbound += obj->base.size; obj 251 drivers/gpu/drm/i915/i915_debugfs.c spin_lock(&obj->vma.lock); obj 253 drivers/gpu/drm/i915/i915_debugfs.c for_each_ggtt_vma(vma, obj) { obj 266 drivers/gpu/drm/i915/i915_debugfs.c struct rb_node *p = obj->vma.tree.rb_node; obj 291 drivers/gpu/drm/i915/i915_debugfs.c spin_unlock(&obj->vma.lock); obj 324 drivers/gpu/drm/i915/i915_debugfs.c ce->state->obj, &kstats); obj 325 drivers/gpu/drm/i915/i915_debugfs.c per_file_stats(0, ce->ring->vma->obj, &kstats); obj 663 drivers/gpu/drm/i915/i915_debugfs.c describe_obj(m, vma->obj); obj 1608 drivers/gpu/drm/i915/i915_debugfs.c describe_obj(m, ce->state->obj); obj 1970 drivers/gpu/drm/i915/i915_debugfs.c struct drm_i915_gem_object *obj = NULL; obj 1978 drivers/gpu/drm/i915/i915_debugfs.c obj = dev_priv->gt.uc.load_err_log; obj 1980 drivers/gpu/drm/i915/i915_debugfs.c obj = dev_priv->gt.uc.guc.log.vma->obj; obj 1982 drivers/gpu/drm/i915/i915_debugfs.c if (!obj) obj 1985 drivers/gpu/drm/i915/i915_debugfs.c log = i915_gem_object_pin_map(obj, I915_MAP_WC); obj 1992 drivers/gpu/drm/i915/i915_debugfs.c for (i = 0; i < obj->base.size / sizeof(u32); i += 4) obj 1999 drivers/gpu/drm/i915/i915_debugfs.c i915_gem_object_unpin_map(obj); obj 2288 drivers/gpu/drm/i915/i915_drv.h i915_gem_object_ggtt_pin(struct drm_i915_gem_object *obj, obj 2294 drivers/gpu/drm/i915/i915_drv.h int i915_gem_object_unbind(struct drm_i915_gem_object *obj, obj 2299 drivers/gpu/drm/i915/i915_drv.h i915_gem_object_pin(struct drm_i915_gem_object *obj, obj 2351 drivers/gpu/drm/i915/i915_drv.h int i915_gem_object_set_cache_level(struct drm_i915_gem_object *obj, obj 2396 drivers/gpu/drm/i915/i915_drv.h static inline bool i915_gem_object_needs_bit17_swizzle(struct drm_i915_gem_object *obj) obj 2398 drivers/gpu/drm/i915/i915_drv.h struct drm_i915_private *dev_priv = to_i915(obj->base.dev); obj 2401 drivers/gpu/drm/i915/i915_drv.h i915_gem_object_is_tiled(obj); obj 105 drivers/gpu/drm/i915/i915_gem.c int i915_gem_object_unbind(struct drm_i915_gem_object *obj, obj 112 drivers/gpu/drm/i915/i915_gem.c lockdep_assert_held(&obj->base.dev->struct_mutex); obj 114 drivers/gpu/drm/i915/i915_gem.c spin_lock(&obj->vma.lock); obj 115 drivers/gpu/drm/i915/i915_gem.c while (!ret && (vma = list_first_entry_or_null(&obj->vma.list, obj 119 drivers/gpu/drm/i915/i915_gem.c spin_unlock(&obj->vma.lock); obj 126 drivers/gpu/drm/i915/i915_gem.c spin_lock(&obj->vma.lock); obj 128 drivers/gpu/drm/i915/i915_gem.c list_splice(&still_in_list, &obj->vma.list); obj 129 drivers/gpu/drm/i915/i915_gem.c spin_unlock(&obj->vma.lock); obj 135 drivers/gpu/drm/i915/i915_gem.c i915_gem_phys_pwrite(struct drm_i915_gem_object *obj, obj 139 drivers/gpu/drm/i915/i915_gem.c void *vaddr = sg_page(obj->mm.pages->sgl) + args->offset; obj 146 drivers/gpu/drm/i915/i915_gem.c intel_frontbuffer_invalidate(obj->frontbuffer, ORIGIN_CPU); obj 152 drivers/gpu/drm/i915/i915_gem.c intel_gt_chipset_flush(&to_i915(obj->base.dev)->gt); obj 154 drivers/gpu/drm/i915/i915_gem.c intel_frontbuffer_flush(obj->frontbuffer, ORIGIN_CPU); obj 164 drivers/gpu/drm/i915/i915_gem.c struct drm_i915_gem_object *obj; obj 174 drivers/gpu/drm/i915/i915_gem.c obj = i915_gem_object_create_shmem(dev_priv, size); obj 175 drivers/gpu/drm/i915/i915_gem.c if (IS_ERR(obj)) obj 176 drivers/gpu/drm/i915/i915_gem.c return PTR_ERR(obj); obj 178 drivers/gpu/drm/i915/i915_gem.c ret = drm_gem_handle_create(file, &obj->base, &handle); obj 180 drivers/gpu/drm/i915/i915_gem.c i915_gem_object_put(obj); obj 263 drivers/gpu/drm/i915/i915_gem.c i915_gem_shmem_pread(struct drm_i915_gem_object *obj, obj 273 drivers/gpu/drm/i915/i915_gem.c ret = i915_gem_object_prepare_read(obj, &needs_clflush); obj 277 drivers/gpu/drm/i915/i915_gem.c fence = i915_gem_object_lock_fence(obj); obj 278 drivers/gpu/drm/i915/i915_gem.c i915_gem_object_finish_access(obj); obj 286 drivers/gpu/drm/i915/i915_gem.c struct page *page = i915_gem_object_get_page(obj, idx); obj 299 drivers/gpu/drm/i915/i915_gem.c i915_gem_object_unlock_fence(obj, fence); obj 328 drivers/gpu/drm/i915/i915_gem.c i915_gem_gtt_pread(struct drm_i915_gem_object *obj, obj 331 drivers/gpu/drm/i915/i915_gem.c struct drm_i915_private *i915 = to_i915(obj->base.dev); obj 347 drivers/gpu/drm/i915/i915_gem.c if (!i915_gem_object_is_tiled(obj)) obj 348 drivers/gpu/drm/i915/i915_gem.c vma = i915_gem_object_ggtt_pin(obj, NULL, 0, 0, obj 364 drivers/gpu/drm/i915/i915_gem.c ret = i915_gem_object_lock_interruptible(obj); obj 368 drivers/gpu/drm/i915/i915_gem.c ret = i915_gem_object_set_to_gtt_domain(obj, false); obj 370 drivers/gpu/drm/i915/i915_gem.c i915_gem_object_unlock(obj); obj 374 drivers/gpu/drm/i915/i915_gem.c fence = i915_gem_object_lock_fence(obj); obj 375 drivers/gpu/drm/i915/i915_gem.c i915_gem_object_unlock(obj); obj 398 drivers/gpu/drm/i915/i915_gem.c i915_gem_object_get_dma_address(obj, offset >> PAGE_SHIFT), obj 415 drivers/gpu/drm/i915/i915_gem.c i915_gem_object_unlock_fence(obj, fence); obj 444 drivers/gpu/drm/i915/i915_gem.c struct drm_i915_gem_object *obj; obj 454 drivers/gpu/drm/i915/i915_gem.c obj = i915_gem_object_lookup(file, args->handle); obj 455 drivers/gpu/drm/i915/i915_gem.c if (!obj) obj 459 drivers/gpu/drm/i915/i915_gem.c if (range_overflows_t(u64, args->offset, args->size, obj->base.size)) { obj 464 drivers/gpu/drm/i915/i915_gem.c trace_i915_gem_object_pread(obj, args->offset, args->size); obj 466 drivers/gpu/drm/i915/i915_gem.c ret = i915_gem_object_wait(obj, obj 472 drivers/gpu/drm/i915/i915_gem.c ret = i915_gem_object_pin_pages(obj); obj 476 drivers/gpu/drm/i915/i915_gem.c ret = i915_gem_shmem_pread(obj, args); obj 478 drivers/gpu/drm/i915/i915_gem.c ret = i915_gem_gtt_pread(obj, args); obj 480 drivers/gpu/drm/i915/i915_gem.c i915_gem_object_unpin_pages(obj); obj 482 drivers/gpu/drm/i915/i915_gem.c i915_gem_object_put(obj); obj 520 drivers/gpu/drm/i915/i915_gem.c i915_gem_gtt_pwrite_fast(struct drm_i915_gem_object *obj, obj 523 drivers/gpu/drm/i915/i915_gem.c struct drm_i915_private *i915 = to_i915(obj->base.dev); obj 538 drivers/gpu/drm/i915/i915_gem.c if (i915_gem_object_has_struct_page(obj)) { obj 557 drivers/gpu/drm/i915/i915_gem.c if (!i915_gem_object_is_tiled(obj)) obj 558 drivers/gpu/drm/i915/i915_gem.c vma = i915_gem_object_ggtt_pin(obj, NULL, 0, 0, obj 574 drivers/gpu/drm/i915/i915_gem.c ret = i915_gem_object_lock_interruptible(obj); obj 578 drivers/gpu/drm/i915/i915_gem.c ret = i915_gem_object_set_to_gtt_domain(obj, true); obj 580 drivers/gpu/drm/i915/i915_gem.c i915_gem_object_unlock(obj); obj 584 drivers/gpu/drm/i915/i915_gem.c fence = i915_gem_object_lock_fence(obj); obj 585 drivers/gpu/drm/i915/i915_gem.c i915_gem_object_unlock(obj); obj 591 drivers/gpu/drm/i915/i915_gem.c intel_frontbuffer_invalidate(obj->frontbuffer, ORIGIN_CPU); obj 611 drivers/gpu/drm/i915/i915_gem.c i915_gem_object_get_dma_address(obj, offset >> PAGE_SHIFT), obj 633 drivers/gpu/drm/i915/i915_gem.c intel_frontbuffer_flush(obj->frontbuffer, ORIGIN_CPU); obj 635 drivers/gpu/drm/i915/i915_gem.c i915_gem_object_unlock_fence(obj, fence); obj 680 drivers/gpu/drm/i915/i915_gem.c i915_gem_shmem_pwrite(struct drm_i915_gem_object *obj, obj 691 drivers/gpu/drm/i915/i915_gem.c ret = i915_gem_object_prepare_write(obj, &needs_clflush); obj 695 drivers/gpu/drm/i915/i915_gem.c fence = i915_gem_object_lock_fence(obj); obj 696 drivers/gpu/drm/i915/i915_gem.c i915_gem_object_finish_access(obj); obj 712 drivers/gpu/drm/i915/i915_gem.c struct page *page = i915_gem_object_get_page(obj, idx); obj 726 drivers/gpu/drm/i915/i915_gem.c intel_frontbuffer_flush(obj->frontbuffer, ORIGIN_CPU); obj 727 drivers/gpu/drm/i915/i915_gem.c i915_gem_object_unlock_fence(obj, fence); obj 745 drivers/gpu/drm/i915/i915_gem.c struct drm_i915_gem_object *obj; obj 754 drivers/gpu/drm/i915/i915_gem.c obj = i915_gem_object_lookup(file, args->handle); obj 755 drivers/gpu/drm/i915/i915_gem.c if (!obj) obj 759 drivers/gpu/drm/i915/i915_gem.c if (range_overflows_t(u64, args->offset, args->size, obj->base.size)) { obj 765 drivers/gpu/drm/i915/i915_gem.c if (i915_gem_object_is_readonly(obj)) { obj 770 drivers/gpu/drm/i915/i915_gem.c trace_i915_gem_object_pwrite(obj, args->offset, args->size); obj 773 drivers/gpu/drm/i915/i915_gem.c if (obj->ops->pwrite) obj 774 drivers/gpu/drm/i915/i915_gem.c ret = obj->ops->pwrite(obj, args); obj 778 drivers/gpu/drm/i915/i915_gem.c ret = i915_gem_object_wait(obj, obj 785 drivers/gpu/drm/i915/i915_gem.c ret = i915_gem_object_pin_pages(obj); obj 796 drivers/gpu/drm/i915/i915_gem.c if (!i915_gem_object_has_struct_page(obj) || obj 797 drivers/gpu/drm/i915/i915_gem.c cpu_write_needs_clflush(obj)) obj 802 drivers/gpu/drm/i915/i915_gem.c ret = i915_gem_gtt_pwrite_fast(obj, args); obj 805 drivers/gpu/drm/i915/i915_gem.c if (i915_gem_object_has_struct_page(obj)) obj 806 drivers/gpu/drm/i915/i915_gem.c ret = i915_gem_shmem_pwrite(obj, args); obj 808 drivers/gpu/drm/i915/i915_gem.c ret = i915_gem_phys_pwrite(obj, args, file); obj 811 drivers/gpu/drm/i915/i915_gem.c i915_gem_object_unpin_pages(obj); obj 813 drivers/gpu/drm/i915/i915_gem.c i915_gem_object_put(obj); obj 828 drivers/gpu/drm/i915/i915_gem.c struct drm_i915_gem_object *obj; obj 830 drivers/gpu/drm/i915/i915_gem.c obj = i915_gem_object_lookup(file, args->handle); obj 831 drivers/gpu/drm/i915/i915_gem.c if (!obj) obj 840 drivers/gpu/drm/i915/i915_gem.c i915_gem_object_flush_if_display(obj); obj 841 drivers/gpu/drm/i915/i915_gem.c i915_gem_object_put(obj); obj 848 drivers/gpu/drm/i915/i915_gem.c struct drm_i915_gem_object *obj, *on; obj 858 drivers/gpu/drm/i915/i915_gem.c list_for_each_entry_safe(obj, on, obj 860 drivers/gpu/drm/i915/i915_gem.c __i915_gem_object_release_mmap(obj); obj 959 drivers/gpu/drm/i915/i915_gem.c i915_gem_object_ggtt_pin(struct drm_i915_gem_object *obj, obj 965 drivers/gpu/drm/i915/i915_gem.c struct drm_i915_private *dev_priv = to_i915(obj->base.dev); obj 968 drivers/gpu/drm/i915/i915_gem.c return i915_gem_object_pin(obj, vm, view, size, alignment, obj 973 drivers/gpu/drm/i915/i915_gem.c i915_gem_object_pin(struct drm_i915_gem_object *obj, obj 980 drivers/gpu/drm/i915/i915_gem.c struct drm_i915_private *dev_priv = to_i915(obj->base.dev); obj 984 drivers/gpu/drm/i915/i915_gem.c lockdep_assert_held(&obj->base.dev->struct_mutex); obj 986 drivers/gpu/drm/i915/i915_gem.c if (i915_gem_object_never_bind_ggtt(obj)) obj 998 drivers/gpu/drm/i915/i915_gem.c if (obj->base.size > dev_priv->ggtt.mappable_end) obj 1017 drivers/gpu/drm/i915/i915_gem.c obj->base.size > dev_priv->ggtt.mappable_end / 2) obj 1021 drivers/gpu/drm/i915/i915_gem.c vma = i915_vma_instance(obj, vm, view); obj 1047 drivers/gpu/drm/i915/i915_gem.c if (vma->fence && !i915_gem_object_is_tiled(obj)) { obj 1068 drivers/gpu/drm/i915/i915_gem.c struct drm_i915_gem_object *obj; obj 1079 drivers/gpu/drm/i915/i915_gem.c obj = i915_gem_object_lookup(file_priv, args->handle); obj 1080 drivers/gpu/drm/i915/i915_gem.c if (!obj) obj 1083 drivers/gpu/drm/i915/i915_gem.c err = mutex_lock_interruptible(&obj->mm.lock); obj 1087 drivers/gpu/drm/i915/i915_gem.c if (i915_gem_object_has_pages(obj) && obj 1088 drivers/gpu/drm/i915/i915_gem.c i915_gem_object_is_tiled(obj) && obj 1090 drivers/gpu/drm/i915/i915_gem.c if (obj->mm.madv == I915_MADV_WILLNEED) { obj 1091 drivers/gpu/drm/i915/i915_gem.c GEM_BUG_ON(!obj->mm.quirked); obj 1092 drivers/gpu/drm/i915/i915_gem.c __i915_gem_object_unpin_pages(obj); obj 1093 drivers/gpu/drm/i915/i915_gem.c obj->mm.quirked = false; obj 1096 drivers/gpu/drm/i915/i915_gem.c GEM_BUG_ON(obj->mm.quirked); obj 1097 drivers/gpu/drm/i915/i915_gem.c __i915_gem_object_pin_pages(obj); obj 1098 drivers/gpu/drm/i915/i915_gem.c obj->mm.quirked = true; obj 1102 drivers/gpu/drm/i915/i915_gem.c if (obj->mm.madv != __I915_MADV_PURGED) obj 1103 drivers/gpu/drm/i915/i915_gem.c obj->mm.madv = args->madv; obj 1105 drivers/gpu/drm/i915/i915_gem.c if (i915_gem_object_has_pages(obj)) { obj 1108 drivers/gpu/drm/i915/i915_gem.c if (i915_gem_object_is_shrinkable(obj)) { obj 1113 drivers/gpu/drm/i915/i915_gem.c if (obj->mm.madv != I915_MADV_WILLNEED) obj 1117 drivers/gpu/drm/i915/i915_gem.c list_move_tail(&obj->mm.link, list); obj 1124 drivers/gpu/drm/i915/i915_gem.c if (obj->mm.madv == I915_MADV_DONTNEED && obj 1125 drivers/gpu/drm/i915/i915_gem.c !i915_gem_object_has_pages(obj)) obj 1126 drivers/gpu/drm/i915/i915_gem.c i915_gem_object_truncate(obj); obj 1128 drivers/gpu/drm/i915/i915_gem.c args->retained = obj->mm.madv != __I915_MADV_PURGED; obj 1129 drivers/gpu/drm/i915/i915_gem.c mutex_unlock(&obj->mm.lock); obj 1132 drivers/gpu/drm/i915/i915_gem.c i915_gem_object_put(obj); obj 1352 drivers/gpu/drm/i915/i915_gem.c i915_gem_object_lock(state->obj); obj 1353 drivers/gpu/drm/i915/i915_gem.c err = i915_gem_object_set_to_cpu_domain(state->obj, false); obj 1354 drivers/gpu/drm/i915/i915_gem.c i915_gem_object_unlock(state->obj); obj 1358 drivers/gpu/drm/i915/i915_gem.c i915_gem_object_set_cache_coherency(state->obj, I915_CACHE_LLC); obj 1361 drivers/gpu/drm/i915/i915_gem.c vaddr = i915_gem_object_pin_map(state->obj, I915_MAP_FORCE_WB); obj 1367 drivers/gpu/drm/i915/i915_gem.c rq->engine->default_state = i915_gem_object_get(state->obj); obj 1368 drivers/gpu/drm/i915/i915_gem.c i915_gem_object_unpin_map(state->obj); obj 1701 drivers/gpu/drm/i915/i915_gem.c struct drm_i915_gem_object *obj; obj 1724 drivers/gpu/drm/i915/i915_gem.c list_for_each_entry(obj, &i915->mm.shrink_list, mm.link) { obj 1725 drivers/gpu/drm/i915/i915_gem.c i915_gem_object_lock(obj); obj 1726 drivers/gpu/drm/i915/i915_gem.c WARN_ON(i915_gem_object_set_to_cpu_domain(obj, true)); obj 1727 drivers/gpu/drm/i915/i915_gem.c i915_gem_object_unlock(obj); obj 82 drivers/gpu/drm/i915/i915_gem_fence_reg.c unsigned int stride = i915_gem_object_get_stride(vma->obj); obj 92 drivers/gpu/drm/i915/i915_gem_fence_reg.c if (i915_gem_object_get_tiling(vma->obj) == I915_TILING_Y) obj 126 drivers/gpu/drm/i915/i915_gem_fence_reg.c unsigned int tiling = i915_gem_object_get_tiling(vma->obj); obj 128 drivers/gpu/drm/i915/i915_gem_fence_reg.c unsigned int stride = i915_gem_object_get_stride(vma->obj); obj 166 drivers/gpu/drm/i915/i915_gem_fence_reg.c unsigned int stride = i915_gem_object_get_stride(vma->obj); obj 175 drivers/gpu/drm/i915/i915_gem_fence_reg.c if (i915_gem_object_get_tiling(vma->obj) == I915_TILING_Y) obj 226 drivers/gpu/drm/i915/i915_gem_fence_reg.c if (WARN(!i915_gem_object_get_stride(vma->obj) || obj 227 drivers/gpu/drm/i915/i915_gem_fence_reg.c !i915_gem_object_get_tiling(vma->obj), obj 229 drivers/gpu/drm/i915/i915_gem_fence_reg.c i915_gem_object_get_stride(vma->obj), obj 230 drivers/gpu/drm/i915/i915_gem_fence_reg.c i915_gem_object_get_tiling(vma->obj))) obj 338 drivers/gpu/drm/i915/i915_gem_fence_reg.c struct i915_vma *set = i915_gem_object_is_tiled(vma->obj) ? vma : NULL; obj 493 drivers/gpu/drm/i915/i915_gem_fence_reg.c if (vma && !i915_gem_object_is_tiled(vma->obj)) obj 760 drivers/gpu/drm/i915/i915_gem_fence_reg.c i915_gem_object_do_bit_17_swizzle(struct drm_i915_gem_object *obj, obj 767 drivers/gpu/drm/i915/i915_gem_fence_reg.c if (obj->bit_17 == NULL) obj 773 drivers/gpu/drm/i915/i915_gem_fence_reg.c if ((new_bit_17 & 0x1) != (test_bit(i, obj->bit_17) != 0)) { obj 791 drivers/gpu/drm/i915/i915_gem_fence_reg.c i915_gem_object_save_bit_17_swizzle(struct drm_i915_gem_object *obj, obj 794 drivers/gpu/drm/i915/i915_gem_fence_reg.c const unsigned int page_count = obj->base.size >> PAGE_SHIFT; obj 799 drivers/gpu/drm/i915/i915_gem_fence_reg.c if (obj->bit_17 == NULL) { obj 800 drivers/gpu/drm/i915/i915_gem_fence_reg.c obj->bit_17 = bitmap_zalloc(page_count, GFP_KERNEL); obj 801 drivers/gpu/drm/i915/i915_gem_fence_reg.c if (obj->bit_17 == NULL) { obj 812 drivers/gpu/drm/i915/i915_gem_fence_reg.c __set_bit(i, obj->bit_17); obj 814 drivers/gpu/drm/i915/i915_gem_fence_reg.c __clear_bit(i, obj->bit_17); obj 63 drivers/gpu/drm/i915/i915_gem_fence_reg.h void i915_gem_object_do_bit_17_swizzle(struct drm_i915_gem_object *obj, obj 65 drivers/gpu/drm/i915/i915_gem_fence_reg.h void i915_gem_object_save_bit_17_swizzle(struct drm_i915_gem_object *obj, obj 161 drivers/gpu/drm/i915/i915_gem_gtt.c if (i915_gem_object_is_readonly(vma->obj)) obj 178 drivers/gpu/drm/i915/i915_gem_gtt.c vma->pages = vma->obj->mm.pages; obj 180 drivers/gpu/drm/i915/i915_gem_gtt.c vma->page_sizes = vma->obj->mm.page_sizes; obj 189 drivers/gpu/drm/i915/i915_gem_gtt.c if (vma->pages != vma->obj->mm.pages) { obj 2143 drivers/gpu/drm/i915/i915_gem_gtt.c int i915_gem_gtt_prepare_pages(struct drm_i915_gem_object *obj, obj 2147 drivers/gpu/drm/i915/i915_gem_gtt.c if (dma_map_sg_attrs(&obj->base.dev->pdev->dev, obj 2160 drivers/gpu/drm/i915/i915_gem_gtt.c GEM_BUG_ON(obj->mm.pages == pages); obj 2161 drivers/gpu/drm/i915/i915_gem_gtt.c } while (i915_gem_shrink(to_i915(obj->base.dev), obj 2162 drivers/gpu/drm/i915/i915_gem_gtt.c obj->base.size >> PAGE_SHIFT, NULL, obj 2434 drivers/gpu/drm/i915/i915_gem_gtt.c struct drm_i915_gem_object *obj = vma->obj; obj 2440 drivers/gpu/drm/i915/i915_gem_gtt.c if (i915_gem_object_is_readonly(obj)) obj 2477 drivers/gpu/drm/i915/i915_gem_gtt.c if (i915_gem_object_is_readonly(vma->obj)) obj 2527 drivers/gpu/drm/i915/i915_gem_gtt.c void i915_gem_gtt_finish_pages(struct drm_i915_gem_object *obj, obj 2530 drivers/gpu/drm/i915/i915_gem_gtt.c struct drm_i915_private *dev_priv = to_i915(obj->base.dev); obj 2555 drivers/gpu/drm/i915/i915_gem_gtt.c vma->page_sizes = vma->obj->mm.page_sizes; obj 3317 drivers/gpu/drm/i915/i915_gem_gtt.c struct drm_i915_gem_object *obj = vma->obj; obj 3328 drivers/gpu/drm/i915/i915_gem_gtt.c obj ? obj->cache_level : 0, obj 3330 drivers/gpu/drm/i915/i915_gem_gtt.c if (obj) { /* only used during resume => exclusive access */ obj 3331 drivers/gpu/drm/i915/i915_gem_gtt.c flush |= fetch_and_zero(&obj->write_domain); obj 3332 drivers/gpu/drm/i915/i915_gem_gtt.c obj->read_domains |= I915_GEM_DOMAIN_GTT; obj 3357 drivers/gpu/drm/i915/i915_gem_gtt.c rotate_pages(struct drm_i915_gem_object *obj, unsigned int offset, obj 3375 drivers/gpu/drm/i915/i915_gem_gtt.c i915_gem_object_get_dma_address(obj, src_idx); obj 3387 drivers/gpu/drm/i915/i915_gem_gtt.c struct drm_i915_gem_object *obj) obj 3408 drivers/gpu/drm/i915/i915_gem_gtt.c sg = rotate_pages(obj, rot_info->plane[i].offset, obj 3420 drivers/gpu/drm/i915/i915_gem_gtt.c obj->base.size, rot_info->plane[0].width, rot_info->plane[0].height, size); obj 3426 drivers/gpu/drm/i915/i915_gem_gtt.c remap_pages(struct drm_i915_gem_object *obj, unsigned int offset, obj 3445 drivers/gpu/drm/i915/i915_gem_gtt.c addr = i915_gem_object_get_dma_address_len(obj, offset, &length); obj 3468 drivers/gpu/drm/i915/i915_gem_gtt.c struct drm_i915_gem_object *obj) obj 3489 drivers/gpu/drm/i915/i915_gem_gtt.c sg = remap_pages(obj, rem_info->plane[i].offset, obj 3503 drivers/gpu/drm/i915/i915_gem_gtt.c obj->base.size, rem_info->plane[0].width, rem_info->plane[0].height, size); obj 3510 drivers/gpu/drm/i915/i915_gem_gtt.c struct drm_i915_gem_object *obj) obj 3526 drivers/gpu/drm/i915/i915_gem_gtt.c iter = i915_gem_object_get_sg(obj, view->partial.offset, &offset); obj 3571 drivers/gpu/drm/i915/i915_gem_gtt.c GEM_BUG_ON(!i915_gem_object_has_pinned_pages(vma->obj)); obj 3578 drivers/gpu/drm/i915/i915_gem_gtt.c vma->pages = vma->obj->mm.pages; obj 3583 drivers/gpu/drm/i915/i915_gem_gtt.c intel_rotate_pages(&vma->ggtt_view.rotated, vma->obj); obj 3588 drivers/gpu/drm/i915/i915_gem_gtt.c intel_remap_pages(&vma->ggtt_view.remapped, vma->obj); obj 3592 drivers/gpu/drm/i915/i915_gem_gtt.c vma->pages = intel_partial_pages(&vma->ggtt_view, vma->obj); obj 587 drivers/gpu/drm/i915/i915_gem_gtt.h int __must_check i915_gem_gtt_prepare_pages(struct drm_i915_gem_object *obj, obj 589 drivers/gpu/drm/i915/i915_gem_gtt.h void i915_gem_gtt_finish_pages(struct drm_i915_gem_object *obj, obj 562 drivers/gpu/drm/i915/i915_gpu_error.c const struct drm_i915_error_object *obj) obj 567 drivers/gpu/drm/i915/i915_gpu_error.c if (!obj) obj 573 drivers/gpu/drm/i915/i915_gpu_error.c upper_32_bits(obj->gtt_offset), obj 574 drivers/gpu/drm/i915/i915_gpu_error.c lower_32_bits(obj->gtt_offset)); obj 578 drivers/gpu/drm/i915/i915_gpu_error.c for (page = 0; page < obj->page_count; page++) { obj 582 drivers/gpu/drm/i915/i915_gpu_error.c if (page == obj->page_count - 1) obj 583 drivers/gpu/drm/i915/i915_gpu_error.c len -= obj->unused; obj 587 drivers/gpu/drm/i915/i915_gpu_error.c err_puts(m, ascii85_encode(obj->pages[page][i], out)); obj 741 drivers/gpu/drm/i915/i915_gpu_error.c const struct drm_i915_error_object *obj; obj 743 drivers/gpu/drm/i915/i915_gpu_error.c obj = ee->batchbuffer; obj 744 drivers/gpu/drm/i915/i915_gpu_error.c if (obj) { obj 751 drivers/gpu/drm/i915/i915_gpu_error.c upper_32_bits(obj->gtt_offset), obj 752 drivers/gpu/drm/i915/i915_gpu_error.c lower_32_bits(obj->gtt_offset)); obj 753 drivers/gpu/drm/i915/i915_gpu_error.c print_error_obj(m, ee->engine, NULL, obj); obj 892 drivers/gpu/drm/i915/i915_gpu_error.c static void i915_error_object_free(struct drm_i915_error_object *obj) obj 896 drivers/gpu/drm/i915/i915_gpu_error.c if (obj == NULL) obj 899 drivers/gpu/drm/i915/i915_gpu_error.c for (page = 0; page < obj->page_count; page++) obj 900 drivers/gpu/drm/i915/i915_gpu_error.c free_page((unsigned long)obj->pages[page]); obj 902 drivers/gpu/drm/i915/i915_gpu_error.c kfree(obj); obj 974 drivers/gpu/drm/i915/i915_gpu_error.c num_pages = min_t(u64, vma->size, vma->obj->base.size) >> PAGE_SHIFT; obj 1345 drivers/gpu/drm/i915/i915_gpu_error.c struct drm_i915_gem_object *obj, obj 1348 drivers/gpu/drm/i915/i915_gpu_error.c if (obj && i915_gem_object_has_pages(obj)) { obj 1350 drivers/gpu/drm/i915/i915_gpu_error.c .node = { .start = U64_MAX, .size = obj->base.size }, obj 1351 drivers/gpu/drm/i915/i915_gpu_error.c .size = obj->base.size, obj 1352 drivers/gpu/drm/i915/i915_gpu_error.c .pages = obj->mm.pages, obj 1353 drivers/gpu/drm/i915/i915_gpu_error.c .obj = obj, obj 1060 drivers/gpu/drm/i915/i915_request.c struct drm_i915_gem_object *obj, obj 1070 drivers/gpu/drm/i915/i915_request.c ret = dma_resv_get_fences_rcu(obj->base.resv, obj 1087 drivers/gpu/drm/i915/i915_request.c excl = dma_resv_get_excl_rcu(obj->base.resv); obj 286 drivers/gpu/drm/i915/i915_request.h struct drm_i915_gem_object *obj, obj 418 drivers/gpu/drm/i915/i915_trace.h TP_PROTO(struct drm_i915_gem_object *obj), obj 419 drivers/gpu/drm/i915/i915_trace.h TP_ARGS(obj), obj 422 drivers/gpu/drm/i915/i915_trace.h __field(struct drm_i915_gem_object *, obj) obj 427 drivers/gpu/drm/i915/i915_trace.h __entry->obj = obj; obj 428 drivers/gpu/drm/i915/i915_trace.h __entry->size = obj->base.size; obj 431 drivers/gpu/drm/i915/i915_trace.h TP_printk("obj=%p, size=0x%llx", __entry->obj, __entry->size) obj 459 drivers/gpu/drm/i915/i915_trace.h __field(struct drm_i915_gem_object *, obj) obj 467 drivers/gpu/drm/i915/i915_trace.h __entry->obj = vma->obj; obj 475 drivers/gpu/drm/i915/i915_trace.h __entry->obj, __entry->offset, __entry->size, obj 485 drivers/gpu/drm/i915/i915_trace.h __field(struct drm_i915_gem_object *, obj) obj 492 drivers/gpu/drm/i915/i915_trace.h __entry->obj = vma->obj; obj 499 drivers/gpu/drm/i915/i915_trace.h __entry->obj, __entry->offset, __entry->size, __entry->vm) obj 503 drivers/gpu/drm/i915/i915_trace.h TP_PROTO(struct drm_i915_gem_object *obj, u64 offset, u64 len), obj 504 drivers/gpu/drm/i915/i915_trace.h TP_ARGS(obj, offset, len), obj 507 drivers/gpu/drm/i915/i915_trace.h __field(struct drm_i915_gem_object *, obj) obj 513 drivers/gpu/drm/i915/i915_trace.h __entry->obj = obj; obj 519 drivers/gpu/drm/i915/i915_trace.h __entry->obj, __entry->offset, __entry->len) obj 523 drivers/gpu/drm/i915/i915_trace.h TP_PROTO(struct drm_i915_gem_object *obj, u64 offset, u64 len), obj 524 drivers/gpu/drm/i915/i915_trace.h TP_ARGS(obj, offset, len), obj 527 drivers/gpu/drm/i915/i915_trace.h __field(struct drm_i915_gem_object *, obj) obj 533 drivers/gpu/drm/i915/i915_trace.h __entry->obj = obj; obj 539 drivers/gpu/drm/i915/i915_trace.h __entry->obj, __entry->offset, __entry->len) obj 543 drivers/gpu/drm/i915/i915_trace.h TP_PROTO(struct drm_i915_gem_object *obj, u64 index, bool gtt, bool write), obj 544 drivers/gpu/drm/i915/i915_trace.h TP_ARGS(obj, index, gtt, write), obj 547 drivers/gpu/drm/i915/i915_trace.h __field(struct drm_i915_gem_object *, obj) obj 554 drivers/gpu/drm/i915/i915_trace.h __entry->obj = obj; obj 561 drivers/gpu/drm/i915/i915_trace.h __entry->obj, obj 568 drivers/gpu/drm/i915/i915_trace.h TP_PROTO(struct drm_i915_gem_object *obj), obj 569 drivers/gpu/drm/i915/i915_trace.h TP_ARGS(obj), obj 572 drivers/gpu/drm/i915/i915_trace.h __field(struct drm_i915_gem_object *, obj) obj 576 drivers/gpu/drm/i915/i915_trace.h __entry->obj = obj; obj 579 drivers/gpu/drm/i915/i915_trace.h TP_printk("obj=%p", __entry->obj) obj 583 drivers/gpu/drm/i915/i915_trace.h TP_PROTO(struct drm_i915_gem_object *obj), obj 584 drivers/gpu/drm/i915/i915_trace.h TP_ARGS(obj) obj 588 drivers/gpu/drm/i915/i915_trace.h TP_PROTO(struct drm_i915_gem_object *obj), obj 589 drivers/gpu/drm/i915/i915_trace.h TP_ARGS(obj) obj 99 drivers/gpu/drm/i915/i915_vma.c vma_create(struct drm_i915_gem_object *obj, obj 115 drivers/gpu/drm/i915/i915_vma.c vma->obj = obj; obj 116 drivers/gpu/drm/i915/i915_vma.c vma->resv = obj->base.resv; obj 117 drivers/gpu/drm/i915/i915_vma.c vma->size = obj->base.size; obj 138 drivers/gpu/drm/i915/i915_vma.c obj->base.size >> PAGE_SHIFT)); obj 141 drivers/gpu/drm/i915/i915_vma.c GEM_BUG_ON(vma->size > obj->base.size); obj 161 drivers/gpu/drm/i915/i915_vma.c i915_gem_object_get_tiling(obj), obj 162 drivers/gpu/drm/i915/i915_vma.c i915_gem_object_get_stride(obj)); obj 170 drivers/gpu/drm/i915/i915_vma.c i915_gem_object_get_tiling(obj), obj 171 drivers/gpu/drm/i915/i915_vma.c i915_gem_object_get_stride(obj)); obj 177 drivers/gpu/drm/i915/i915_vma.c spin_lock(&obj->vma.lock); obj 180 drivers/gpu/drm/i915/i915_vma.c p = &obj->vma.tree.rb_node; obj 195 drivers/gpu/drm/i915/i915_vma.c spin_unlock(&obj->vma.lock); obj 206 drivers/gpu/drm/i915/i915_vma.c rb_insert_color(&vma->obj_node, &obj->vma.tree); obj 215 drivers/gpu/drm/i915/i915_vma.c list_add(&vma->obj_link, &obj->vma.list); obj 217 drivers/gpu/drm/i915/i915_vma.c list_add_tail(&vma->obj_link, &obj->vma.list); obj 219 drivers/gpu/drm/i915/i915_vma.c spin_unlock(&obj->vma.lock); obj 233 drivers/gpu/drm/i915/i915_vma.c vma_lookup(struct drm_i915_gem_object *obj, obj 239 drivers/gpu/drm/i915/i915_vma.c rb = obj->vma.tree.rb_node; obj 273 drivers/gpu/drm/i915/i915_vma.c i915_vma_instance(struct drm_i915_gem_object *obj, obj 282 drivers/gpu/drm/i915/i915_vma.c spin_lock(&obj->vma.lock); obj 283 drivers/gpu/drm/i915/i915_vma.c vma = vma_lookup(obj, vm, view); obj 284 drivers/gpu/drm/i915/i915_vma.c spin_unlock(&obj->vma.lock); obj 288 drivers/gpu/drm/i915/i915_vma.c vma = vma_create(obj, vm, view); obj 417 drivers/gpu/drm/i915/i915_vma.c struct drm_i915_gem_object *obj; obj 423 drivers/gpu/drm/i915/i915_vma.c obj = vma->obj; obj 424 drivers/gpu/drm/i915/i915_vma.c GEM_BUG_ON(!obj); obj 430 drivers/gpu/drm/i915/i915_vma.c i915_gem_object_unpin_map(obj); obj 432 drivers/gpu/drm/i915/i915_vma.c i915_gem_object_put(obj); obj 515 drivers/gpu/drm/i915/i915_vma.c static void assert_bind_count(const struct drm_i915_gem_object *obj) obj 524 drivers/gpu/drm/i915/i915_vma.c GEM_BUG_ON(atomic_read(&obj->mm.pages_pin_count) < atomic_read(&obj->bind_count)); obj 586 drivers/gpu/drm/i915/i915_vma.c if (vma->obj) { obj 587 drivers/gpu/drm/i915/i915_vma.c ret = i915_gem_object_pin_pages(vma->obj); obj 591 drivers/gpu/drm/i915/i915_vma.c cache_level = vma->obj->cache_level; obj 665 drivers/gpu/drm/i915/i915_vma.c if (vma->obj) { obj 666 drivers/gpu/drm/i915/i915_vma.c atomic_inc(&vma->obj->bind_count); obj 667 drivers/gpu/drm/i915/i915_vma.c assert_bind_count(vma->obj); obj 675 drivers/gpu/drm/i915/i915_vma.c if (vma->obj) obj 676 drivers/gpu/drm/i915/i915_vma.c i915_gem_object_unpin_pages(vma->obj); obj 697 drivers/gpu/drm/i915/i915_vma.c if (vma->obj) { obj 698 drivers/gpu/drm/i915/i915_vma.c struct drm_i915_gem_object *obj = vma->obj; obj 700 drivers/gpu/drm/i915/i915_vma.c atomic_dec(&obj->bind_count); obj 707 drivers/gpu/drm/i915/i915_vma.c i915_gem_object_unpin_pages(obj); obj 708 drivers/gpu/drm/i915/i915_vma.c assert_bind_count(obj); obj 734 drivers/gpu/drm/i915/i915_vma.c ret = i915_vma_bind(vma, vma->obj ? vma->obj->cache_level : 0, flags); obj 807 drivers/gpu/drm/i915/i915_vma.c if (vma->obj) { obj 808 drivers/gpu/drm/i915/i915_vma.c struct drm_i915_gem_object *obj = vma->obj; obj 810 drivers/gpu/drm/i915/i915_vma.c spin_lock(&obj->vma.lock); obj 812 drivers/gpu/drm/i915/i915_vma.c rb_erase(&vma->obj_node, &vma->obj->vma.tree); obj 813 drivers/gpu/drm/i915/i915_vma.c spin_unlock(&obj->vma.lock); obj 864 drivers/gpu/drm/i915/i915_vma.c struct drm_vma_offset_node *node = &vma->obj->base.vma_node; obj 873 drivers/gpu/drm/i915/i915_vma.c GEM_BUG_ON(!vma->obj->userfault_count); obj 882 drivers/gpu/drm/i915/i915_vma.c if (!--vma->obj->userfault_count) obj 883 drivers/gpu/drm/i915/i915_vma.c list_del(&vma->obj->userfault_link); obj 890 drivers/gpu/drm/i915/i915_vma.c struct drm_i915_gem_object *obj = vma->obj; obj 894 drivers/gpu/drm/i915/i915_vma.c assert_object_held(obj); obj 910 drivers/gpu/drm/i915/i915_vma.c if (intel_frontbuffer_invalidate(obj->frontbuffer, ORIGIN_CS)) obj 911 drivers/gpu/drm/i915/i915_vma.c i915_active_ref(&obj->frontbuffer->write, obj 916 drivers/gpu/drm/i915/i915_vma.c obj->write_domain = I915_GEM_DOMAIN_RENDER; obj 917 drivers/gpu/drm/i915/i915_vma.c obj->read_domains = 0; obj 924 drivers/gpu/drm/i915/i915_vma.c obj->write_domain = 0; obj 926 drivers/gpu/drm/i915/i915_vma.c obj->read_domains |= I915_GEM_GPU_DOMAINS; obj 927 drivers/gpu/drm/i915/i915_vma.c obj->mm.dirty = true; obj 1015 drivers/gpu/drm/i915/i915_vma.c i915_gem_object_make_unshrinkable(vma->obj); obj 1021 drivers/gpu/drm/i915/i915_vma.c i915_gem_object_make_shrinkable(vma->obj); obj 1026 drivers/gpu/drm/i915/i915_vma.c i915_gem_object_make_purgeable(vma->obj); obj 54 drivers/gpu/drm/i915/i915_vma.h struct drm_i915_gem_object *obj; obj 149 drivers/gpu/drm/i915/i915_vma.h i915_vma_instance(struct drm_i915_gem_object *obj, obj 230 drivers/gpu/drm/i915/i915_vma.h i915_gem_object_get(vma->obj); obj 236 drivers/gpu/drm/i915/i915_vma.h if (likely(kref_get_unless_zero(&vma->obj->base.refcount))) obj 244 drivers/gpu/drm/i915/i915_vma.h i915_gem_object_put(vma->obj); obj 2356 drivers/gpu/drm/i915/intel_pm.c struct drm_i915_gem_object *obj; obj 2358 drivers/gpu/drm/i915/intel_pm.c obj = intel_fb_obj(enabled->base.primary->state->fb); obj 2361 drivers/gpu/drm/i915/intel_pm.c if (!i915_gem_object_is_tiled(obj)) obj 37 drivers/gpu/drm/i915/selftests/i915_gem_evict.c static void quirk_add(struct drm_i915_gem_object *obj, obj 41 drivers/gpu/drm/i915/selftests/i915_gem_evict.c GEM_BUG_ON(obj->mm.quirked); obj 42 drivers/gpu/drm/i915/selftests/i915_gem_evict.c obj->mm.quirked = true; obj 43 drivers/gpu/drm/i915/selftests/i915_gem_evict.c list_add(&obj->st_link, objects); obj 50 drivers/gpu/drm/i915/selftests/i915_gem_evict.c struct drm_i915_gem_object *obj; obj 56 drivers/gpu/drm/i915/selftests/i915_gem_evict.c obj = i915_gem_object_create_internal(i915, I915_GTT_PAGE_SIZE); obj 57 drivers/gpu/drm/i915/selftests/i915_gem_evict.c if (IS_ERR(obj)) obj 58 drivers/gpu/drm/i915/selftests/i915_gem_evict.c return PTR_ERR(obj); obj 60 drivers/gpu/drm/i915/selftests/i915_gem_evict.c vma = i915_gem_object_ggtt_pin(obj, NULL, 0, 0, 0); obj 62 drivers/gpu/drm/i915/selftests/i915_gem_evict.c i915_gem_object_put(obj); obj 69 drivers/gpu/drm/i915/selftests/i915_gem_evict.c quirk_add(obj, objects); obj 77 drivers/gpu/drm/i915/selftests/i915_gem_evict.c list_for_each_entry(obj, objects, st_link) { obj 78 drivers/gpu/drm/i915/selftests/i915_gem_evict.c GEM_BUG_ON(!obj->mm.quirked); obj 80 drivers/gpu/drm/i915/selftests/i915_gem_evict.c if (atomic_read(&obj->bind_count)) obj 114 drivers/gpu/drm/i915/selftests/i915_gem_evict.c if (vma->obj->mm.quirked) obj 122 drivers/gpu/drm/i915/selftests/i915_gem_evict.c struct drm_i915_gem_object *obj, *on; obj 124 drivers/gpu/drm/i915/selftests/i915_gem_evict.c list_for_each_entry_safe(obj, on, list, st_link) { obj 125 drivers/gpu/drm/i915/selftests/i915_gem_evict.c GEM_BUG_ON(!obj->mm.quirked); obj 126 drivers/gpu/drm/i915/selftests/i915_gem_evict.c obj->mm.quirked = false; obj 127 drivers/gpu/drm/i915/selftests/i915_gem_evict.c i915_gem_object_put(obj); obj 182 drivers/gpu/drm/i915/selftests/i915_gem_evict.c struct drm_i915_gem_object *obj; obj 195 drivers/gpu/drm/i915/selftests/i915_gem_evict.c obj = i915_gem_object_create_internal(i915, I915_GTT_PAGE_SIZE); obj 196 drivers/gpu/drm/i915/selftests/i915_gem_evict.c if (IS_ERR(obj)) { obj 197 drivers/gpu/drm/i915/selftests/i915_gem_evict.c err = PTR_ERR(obj); obj 201 drivers/gpu/drm/i915/selftests/i915_gem_evict.c quirk_add(obj, &objects); obj 203 drivers/gpu/drm/i915/selftests/i915_gem_evict.c vma = i915_gem_object_ggtt_pin(obj, NULL, 0, 0, 0); obj 272 drivers/gpu/drm/i915/selftests/i915_gem_evict.c struct drm_i915_gem_object *obj; obj 284 drivers/gpu/drm/i915/selftests/i915_gem_evict.c obj = i915_gem_object_create_internal(i915, I915_GTT_PAGE_SIZE); obj 285 drivers/gpu/drm/i915/selftests/i915_gem_evict.c if (IS_ERR(obj)) { obj 286 drivers/gpu/drm/i915/selftests/i915_gem_evict.c err = PTR_ERR(obj); obj 289 drivers/gpu/drm/i915/selftests/i915_gem_evict.c i915_gem_object_set_cache_coherency(obj, I915_CACHE_LLC); obj 290 drivers/gpu/drm/i915/selftests/i915_gem_evict.c quirk_add(obj, &objects); obj 292 drivers/gpu/drm/i915/selftests/i915_gem_evict.c vma = i915_gem_object_ggtt_pin(obj, NULL, 0, 0, obj 300 drivers/gpu/drm/i915/selftests/i915_gem_evict.c obj = i915_gem_object_create_internal(i915, I915_GTT_PAGE_SIZE); obj 301 drivers/gpu/drm/i915/selftests/i915_gem_evict.c if (IS_ERR(obj)) { obj 302 drivers/gpu/drm/i915/selftests/i915_gem_evict.c err = PTR_ERR(obj); obj 305 drivers/gpu/drm/i915/selftests/i915_gem_evict.c i915_gem_object_set_cache_coherency(obj, I915_CACHE_LLC); obj 306 drivers/gpu/drm/i915/selftests/i915_gem_evict.c quirk_add(obj, &objects); obj 309 drivers/gpu/drm/i915/selftests/i915_gem_evict.c vma = i915_gem_object_ggtt_pin(obj, NULL, 0, 0, obj 50 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c static void fake_free_pages(struct drm_i915_gem_object *obj, obj 57 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c static int fake_get_pages(struct drm_i915_gem_object *obj) obj 64 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c typeof(obj->base.size) rem; obj 70 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c rem = round_up(obj->base.size, BIT(31)) >> 31; obj 77 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c rem = obj->base.size; obj 91 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c obj->mm.madv = I915_MADV_DONTNEED; obj 93 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c __i915_gem_object_set_pages(obj, pages, sg_page_sizes); obj 99 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c static void fake_put_pages(struct drm_i915_gem_object *obj, obj 102 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c fake_free_pages(obj, pages); obj 103 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c obj->mm.dirty = false; obj 104 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c obj->mm.madv = I915_MADV_WILLNEED; obj 116 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c struct drm_i915_gem_object *obj; obj 121 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c if (overflows_type(size, obj->base.size)) obj 124 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c obj = i915_gem_object_alloc(); obj 125 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c if (!obj) obj 128 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c drm_gem_private_object_init(&i915->drm, &obj->base, size); obj 129 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c i915_gem_object_init(obj, &fake_ops); obj 131 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c obj->write_domain = I915_GEM_DOMAIN_CPU; obj 132 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c obj->read_domains = I915_GEM_DOMAIN_CPU; obj 133 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c obj->cache_level = I915_CACHE_NONE; obj 136 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c if (i915_gem_object_pin_pages(obj)) obj 139 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c i915_gem_object_unpin_pages(obj); obj 140 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c return obj; obj 143 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c i915_gem_object_put(obj); obj 229 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c struct drm_i915_gem_object *obj; obj 261 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c obj = fake_dma_object(i915, BIT_ULL(size)); obj 262 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c if (IS_ERR(obj)) { obj 267 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c GEM_BUG_ON(obj->base.size != BIT_ULL(size)); obj 269 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c if (i915_gem_object_pin_pages(obj)) { obj 270 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c i915_gem_object_put(obj); obj 292 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c mock_vma.pages = obj->mm.pages; obj 310 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c i915_gem_object_unpin_pages(obj); obj 311 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c i915_gem_object_put(obj); obj 324 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c struct drm_i915_gem_object *obj, *on; obj 327 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c list_for_each_entry_safe(obj, on, objects, st_link) { obj 330 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vma = i915_vma_instance(obj, vm, NULL); obj 337 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c list_del(&obj->st_link); obj 338 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c i915_gem_object_put(obj); obj 348 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c struct drm_i915_gem_object *obj; obj 376 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c obj = fake_dma_object(i915, full_size); obj 377 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c if (IS_ERR(obj)) obj 380 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c list_add(&obj->st_link, &objects); obj 390 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c list_for_each_entry(obj, &objects, st_link) { obj 391 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vma = i915_vma_instance(obj, vm, NULL); obj 396 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c if (offset < hole_start + obj->base.size) obj 398 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c offset -= obj->base.size; obj 420 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c if (offset + obj->base.size > hole_end) obj 422 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c offset += obj->base.size; obj 427 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c list_for_each_entry(obj, &objects, st_link) { obj 428 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vma = i915_vma_instance(obj, vm, NULL); obj 433 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c if (offset < hole_start + obj->base.size) obj 435 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c offset -= obj->base.size; obj 456 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c if (offset + obj->base.size > hole_end) obj 458 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c offset += obj->base.size; obj 463 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c list_for_each_entry_reverse(obj, &objects, st_link) { obj 464 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vma = i915_vma_instance(obj, vm, NULL); obj 469 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c if (offset < hole_start + obj->base.size) obj 471 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c offset -= obj->base.size; obj 493 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c if (offset + obj->base.size > hole_end) obj 495 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c offset += obj->base.size; obj 500 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c list_for_each_entry_reverse(obj, &objects, st_link) { obj 501 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vma = i915_vma_instance(obj, vm, NULL); obj 506 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c if (offset < hole_start + obj->base.size) obj 508 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c offset -= obj->base.size; obj 529 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c if (offset + obj->base.size > hole_end) obj 531 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c offset += obj->base.size; obj 572 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c struct drm_i915_gem_object *obj; obj 577 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c obj = fake_dma_object(i915, size << PAGE_SHIFT); obj 578 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c if (IS_ERR(obj)) obj 581 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vma = i915_vma_instance(obj, vm, NULL); obj 588 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c addr + obj->base.size < hole_end; obj 589 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c addr += obj->base.size) { obj 628 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c i915_gem_object_put(obj); obj 643 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c struct drm_i915_gem_object *obj; obj 653 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c obj = i915_gem_object_create_internal(i915, 2 * I915_GTT_PAGE_SIZE); obj 654 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c if (IS_ERR(obj)) obj 655 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c return PTR_ERR(obj); obj 657 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vma = i915_vma_instance(obj, vm, NULL); obj 710 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c i915_gem_object_put(obj); obj 729 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c struct drm_i915_gem_object *obj; obj 760 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c obj = fake_dma_object(i915, BIT_ULL(size)); obj 761 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c if (IS_ERR(obj)) { obj 766 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vma = i915_vma_instance(obj, vm, NULL); obj 813 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c i915_gem_object_put(obj); obj 829 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c struct drm_i915_gem_object *obj; obj 842 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c obj = fake_dma_object(i915, size); obj 843 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c if (IS_ERR(obj)) { obj 844 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c err = PTR_ERR(obj); obj 848 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c list_add(&obj->st_link, &objects); obj 850 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vma = i915_vma_instance(obj, vm, NULL); obj 1145 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c struct drm_i915_gem_object *obj; obj 1153 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c obj = i915_gem_object_create_internal(i915, PAGE_SIZE); obj 1154 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c if (IS_ERR(obj)) { obj 1155 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c err = PTR_ERR(obj); obj 1159 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c err = i915_gem_object_pin_pages(obj); obj 1178 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c i915_gem_object_get_dma_address(obj, 0), obj 1222 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c i915_gem_object_unpin_pages(obj); obj 1224 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c i915_gem_object_put(obj); obj 1232 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c struct drm_i915_gem_object *obj = vma->obj; obj 1234 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c atomic_inc(&obj->bind_count); /* track for eviction later */ obj 1235 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c __i915_gem_object_pin_pages(obj); obj 1237 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vma->pages = obj->mm.pages; obj 1296 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c struct drm_i915_gem_object *obj, *on; obj 1312 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c obj = i915_gem_object_create_internal(ggtt->vm.i915, obj 1314 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c if (IS_ERR(obj)) { obj 1315 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c err = PTR_ERR(obj); obj 1319 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c err = i915_gem_object_pin_pages(obj); obj 1321 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c i915_gem_object_put(obj); obj 1325 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c list_add(&obj->st_link, &objects); obj 1327 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vma = i915_vma_instance(obj, &ggtt->vm, NULL); obj 1334 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c obj->base.size, obj 1336 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c obj->cache_level, obj 1362 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c obj = i915_gem_object_create_internal(ggtt->vm.i915, obj 1364 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c if (IS_ERR(obj)) { obj 1365 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c err = PTR_ERR(obj); obj 1369 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c err = i915_gem_object_pin_pages(obj); obj 1371 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c i915_gem_object_put(obj); obj 1375 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c list_add(&obj->st_link, &objects); obj 1377 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vma = i915_vma_instance(obj, &ggtt->vm, NULL); obj 1384 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c obj->base.size, obj 1386 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c obj->cache_level, obj 1407 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c list_for_each_entry_safe(obj, on, &objects, st_link) { obj 1411 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vma = i915_vma_instance(obj, &ggtt->vm, NULL); obj 1428 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c obj->base.size, obj 1430 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c obj->cache_level, obj 1451 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c list_for_each_entry_safe(obj, on, &objects, st_link) { obj 1452 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c i915_gem_object_unpin_pages(obj); obj 1453 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c i915_gem_object_put(obj); obj 1461 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c struct drm_i915_gem_object *obj, *on; obj 1519 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c obj = i915_gem_object_create_internal(ggtt->vm.i915, obj 1521 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c if (IS_ERR(obj)) { obj 1522 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c err = PTR_ERR(obj); obj 1526 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c err = i915_gem_object_pin_pages(obj); obj 1528 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c i915_gem_object_put(obj); obj 1532 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c list_add(&obj->st_link, &objects); obj 1534 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vma = i915_vma_instance(obj, &ggtt->vm, NULL); obj 1541 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c obj->base.size, 0, obj->cache_level, obj 1546 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c i915_gem_object_put(obj); obj 1560 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c list_for_each_entry(obj, &objects, st_link) { obj 1563 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vma = i915_vma_instance(obj, &ggtt->vm, NULL); obj 1579 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c list_for_each_entry_safe(obj, on, &objects, st_link) { obj 1583 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vma = i915_vma_instance(obj, &ggtt->vm, NULL); obj 1599 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c obj->base.size, 0, obj->cache_level, obj 1624 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c obj = i915_gem_object_create_internal(ggtt->vm.i915, obj 1626 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c if (IS_ERR(obj)) { obj 1627 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c err = PTR_ERR(obj); obj 1631 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c err = i915_gem_object_pin_pages(obj); obj 1633 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c i915_gem_object_put(obj); obj 1637 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c list_add(&obj->st_link, &objects); obj 1639 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vma = i915_vma_instance(obj, &ggtt->vm, NULL); obj 1646 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c obj->base.size, 0, obj->cache_level, obj 1660 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c list_for_each_entry_safe(obj, on, &objects, st_link) { obj 1661 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c i915_gem_object_unpin_pages(obj); obj 1662 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c i915_gem_object_put(obj); obj 618 drivers/gpu/drm/i915/selftests/i915_request.c struct drm_i915_gem_object *obj; obj 623 drivers/gpu/drm/i915/selftests/i915_request.c obj = i915_gem_object_create_internal(i915, PAGE_SIZE); obj 624 drivers/gpu/drm/i915/selftests/i915_request.c if (IS_ERR(obj)) obj 625 drivers/gpu/drm/i915/selftests/i915_request.c return ERR_CAST(obj); obj 627 drivers/gpu/drm/i915/selftests/i915_request.c cmd = i915_gem_object_pin_map(obj, I915_MAP_WB); obj 635 drivers/gpu/drm/i915/selftests/i915_request.c __i915_gem_object_flush_map(obj, 0, 64); obj 636 drivers/gpu/drm/i915/selftests/i915_request.c i915_gem_object_unpin_map(obj); obj 640 drivers/gpu/drm/i915/selftests/i915_request.c vma = i915_vma_instance(obj, &i915->ggtt.vm, NULL); obj 653 drivers/gpu/drm/i915/selftests/i915_request.c i915_gem_object_put(obj); obj 765 drivers/gpu/drm/i915/selftests/i915_request.c struct drm_i915_gem_object *obj; obj 771 drivers/gpu/drm/i915/selftests/i915_request.c obj = i915_gem_object_create_internal(i915, PAGE_SIZE); obj 772 drivers/gpu/drm/i915/selftests/i915_request.c if (IS_ERR(obj)) obj 773 drivers/gpu/drm/i915/selftests/i915_request.c return ERR_CAST(obj); obj 775 drivers/gpu/drm/i915/selftests/i915_request.c vma = i915_vma_instance(obj, vm, NULL); obj 785 drivers/gpu/drm/i915/selftests/i915_request.c cmd = i915_gem_object_pin_map(obj, I915_MAP_WC); obj 804 drivers/gpu/drm/i915/selftests/i915_request.c __i915_gem_object_flush_map(obj, 0, 64); obj 805 drivers/gpu/drm/i915/selftests/i915_request.c i915_gem_object_unpin_map(obj); obj 812 drivers/gpu/drm/i915/selftests/i915_request.c i915_gem_object_put(obj); obj 820 drivers/gpu/drm/i915/selftests/i915_request.c cmd = i915_gem_object_pin_map(batch->obj, I915_MAP_WC); obj 827 drivers/gpu/drm/i915/selftests/i915_request.c i915_gem_object_unpin_map(batch->obj); obj 879 drivers/gpu/drm/i915/selftests/i915_request.c err = i915_request_await_object(request[id], batch->obj, 0); obj 997 drivers/gpu/drm/i915/selftests/i915_request.c err = i915_request_await_object(request[id], batch->obj, false); obj 1047 drivers/gpu/drm/i915/selftests/i915_request.c cmd = i915_gem_object_pin_map(request[id]->batch->obj, obj 1053 drivers/gpu/drm/i915/selftests/i915_request.c i915_gem_object_unpin_map(request[id]->batch->obj); obj 36 drivers/gpu/drm/i915/selftests/i915_vma.c struct drm_i915_gem_object *obj, obj 46 drivers/gpu/drm/i915/selftests/i915_vma.c if (vma->size != obj->base.size) { obj 48 drivers/gpu/drm/i915/selftests/i915_vma.c vma->size, obj->base.size); obj 62 drivers/gpu/drm/i915/selftests/i915_vma.c checked_vma_instance(struct drm_i915_gem_object *obj, obj 69 drivers/gpu/drm/i915/selftests/i915_vma.c vma = i915_vma_instance(obj, vm, view); obj 109 drivers/gpu/drm/i915/selftests/i915_vma.c struct drm_i915_gem_object *obj; obj 113 drivers/gpu/drm/i915/selftests/i915_vma.c list_for_each_entry(obj, objects, st_link) { obj 120 drivers/gpu/drm/i915/selftests/i915_vma.c vma = checked_vma_instance(obj, vm, NULL); obj 124 drivers/gpu/drm/i915/selftests/i915_vma.c if (!assert_vma(vma, obj, ctx)) { obj 149 drivers/gpu/drm/i915/selftests/i915_vma.c struct drm_i915_gem_object *obj, *on; obj 165 drivers/gpu/drm/i915/selftests/i915_vma.c obj = i915_gem_object_create_internal(i915, PAGE_SIZE); obj 166 drivers/gpu/drm/i915/selftests/i915_vma.c if (IS_ERR(obj)) obj 169 drivers/gpu/drm/i915/selftests/i915_vma.c list_add(&obj->st_link, &objects); obj 209 drivers/gpu/drm/i915/selftests/i915_vma.c list_for_each_entry_safe(obj, on, &objects, st_link) obj 210 drivers/gpu/drm/i915/selftests/i915_vma.c i915_gem_object_put(obj); obj 307 drivers/gpu/drm/i915/selftests/i915_vma.c struct drm_i915_gem_object *obj; obj 317 drivers/gpu/drm/i915/selftests/i915_vma.c obj = i915_gem_object_create_internal(ggtt->vm.i915, PAGE_SIZE); obj 318 drivers/gpu/drm/i915/selftests/i915_vma.c if (IS_ERR(obj)) obj 319 drivers/gpu/drm/i915/selftests/i915_vma.c return PTR_ERR(obj); obj 321 drivers/gpu/drm/i915/selftests/i915_vma.c vma = checked_vma_instance(obj, &ggtt->vm, NULL); obj 352 drivers/gpu/drm/i915/selftests/i915_vma.c i915_gem_object_put(obj); obj 366 drivers/gpu/drm/i915/selftests/i915_vma.c assert_rotated(struct drm_i915_gem_object *obj, obj 384 drivers/gpu/drm/i915/selftests/i915_vma.c src = i915_gem_object_get_dma_address(obj, src_idx); obj 416 drivers/gpu/drm/i915/selftests/i915_vma.c assert_remapped(struct drm_i915_gem_object *obj, obj 440 drivers/gpu/drm/i915/selftests/i915_vma.c src = i915_gem_object_get_dma_address(obj, src_idx); obj 477 drivers/gpu/drm/i915/selftests/i915_vma.c struct drm_i915_gem_object *obj; obj 508 drivers/gpu/drm/i915/selftests/i915_vma.c obj = i915_gem_object_create_internal(vm->i915, max_pages * PAGE_SIZE); obj 509 drivers/gpu/drm/i915/selftests/i915_vma.c if (IS_ERR(obj)) obj 532 drivers/gpu/drm/i915/selftests/i915_vma.c vma = checked_vma_instance(obj, vm, &view); obj 574 drivers/gpu/drm/i915/selftests/i915_vma.c if (vma->pages == obj->mm.pages) { obj 583 drivers/gpu/drm/i915/selftests/i915_vma.c sg = assert_rotated(obj, &view.rotated, n, sg); obj 585 drivers/gpu/drm/i915/selftests/i915_vma.c sg = assert_remapped(obj, &view.remapped, n, sg); obj 613 drivers/gpu/drm/i915/selftests/i915_vma.c i915_gem_object_put(obj); obj 618 drivers/gpu/drm/i915/selftests/i915_vma.c static bool assert_partial(struct drm_i915_gem_object *obj, obj 634 drivers/gpu/drm/i915/selftests/i915_vma.c src = i915_gem_object_get_dma_address(obj, offset); obj 674 drivers/gpu/drm/i915/selftests/i915_vma.c if (vma->pages == vma->obj->mm.pages) { obj 686 drivers/gpu/drm/i915/selftests/i915_vma.c if (vma->pages != vma->obj->mm.pages) { obj 700 drivers/gpu/drm/i915/selftests/i915_vma.c struct drm_i915_gem_object *obj; obj 716 drivers/gpu/drm/i915/selftests/i915_vma.c obj = i915_gem_object_create_internal(vm->i915, npages * PAGE_SIZE); obj 717 drivers/gpu/drm/i915/selftests/i915_vma.c if (IS_ERR(obj)) obj 735 drivers/gpu/drm/i915/selftests/i915_vma.c vma = checked_vma_instance(obj, vm, &view); obj 752 drivers/gpu/drm/i915/selftests/i915_vma.c if (!assert_partial(obj, vma, offset, sz)) { obj 767 drivers/gpu/drm/i915/selftests/i915_vma.c list_for_each_entry(vma, &obj->vma.list, obj_link) obj 777 drivers/gpu/drm/i915/selftests/i915_vma.c vma = checked_vma_instance(obj, vm, NULL); obj 787 drivers/gpu/drm/i915/selftests/i915_vma.c if (!assert_pin(vma, NULL, obj->base.size, p->name)) { obj 796 drivers/gpu/drm/i915/selftests/i915_vma.c list_for_each_entry(vma, &obj->vma.list, obj_link) obj 806 drivers/gpu/drm/i915/selftests/i915_vma.c i915_gem_object_put(obj); obj 874 drivers/gpu/drm/i915/selftests/i915_vma.c struct drm_i915_gem_object *obj; obj 878 drivers/gpu/drm/i915/selftests/i915_vma.c obj = i915_gem_object_create_internal(i915, 10 * 10 * PAGE_SIZE); obj 879 drivers/gpu/drm/i915/selftests/i915_vma.c if (IS_ERR(obj)) obj 880 drivers/gpu/drm/i915/selftests/i915_vma.c return PTR_ERR(obj); obj 897 drivers/gpu/drm/i915/selftests/i915_vma.c i915_gem_object_lock(obj); obj 898 drivers/gpu/drm/i915/selftests/i915_vma.c err = i915_gem_object_set_to_gtt_domain(obj, true); obj 899 drivers/gpu/drm/i915/selftests/i915_vma.c i915_gem_object_unlock(obj); obj 903 drivers/gpu/drm/i915/selftests/i915_vma.c vma = i915_gem_object_ggtt_pin(obj, &view, 0, 0, PIN_MAPPABLE); obj 934 drivers/gpu/drm/i915/selftests/i915_vma.c vma = i915_gem_object_ggtt_pin(obj, NULL, 0, 0, PIN_MAPPABLE); obj 980 drivers/gpu/drm/i915/selftests/i915_vma.c i915_gem_object_put(obj); obj 29 drivers/gpu/drm/i915/selftests/igt_spinner.c spin->obj = i915_gem_object_create_internal(gt->i915, PAGE_SIZE); obj 30 drivers/gpu/drm/i915/selftests/igt_spinner.c if (IS_ERR(spin->obj)) { obj 31 drivers/gpu/drm/i915/selftests/igt_spinner.c err = PTR_ERR(spin->obj); obj 44 drivers/gpu/drm/i915/selftests/igt_spinner.c vaddr = i915_gem_object_pin_map(spin->obj, mode); obj 56 drivers/gpu/drm/i915/selftests/igt_spinner.c i915_gem_object_put(spin->obj); obj 81 drivers/gpu/drm/i915/selftests/igt_spinner.c err = i915_request_await_object(rq, vma->obj, obj 103 drivers/gpu/drm/i915/selftests/igt_spinner.c vma = i915_vma_instance(spin->obj, ce->vm, NULL); obj 188 drivers/gpu/drm/i915/selftests/igt_spinner.c i915_gem_object_unpin_map(spin->obj); obj 189 drivers/gpu/drm/i915/selftests/igt_spinner.c i915_gem_object_put(spin->obj); obj 22 drivers/gpu/drm/i915/selftests/igt_spinner.h struct drm_i915_gem_object *obj; obj 39 drivers/gpu/drm/lima/lima_gem.c void lima_gem_free_object(struct drm_gem_object *obj) obj 41 drivers/gpu/drm/lima/lima_gem.c struct lima_bo *bo = to_lima_bo(obj); obj 44 drivers/gpu/drm/lima/lima_gem.c dev_err(obj->dev->dev, "lima gem free bo still has va\n"); obj 49 drivers/gpu/drm/lima/lima_gem.c int lima_gem_object_open(struct drm_gem_object *obj, struct drm_file *file) obj 51 drivers/gpu/drm/lima/lima_gem.c struct lima_bo *bo = to_lima_bo(obj); obj 58 drivers/gpu/drm/lima/lima_gem.c void lima_gem_object_close(struct drm_gem_object *obj, struct drm_file *file) obj 60 drivers/gpu/drm/lima/lima_gem.c struct lima_bo *bo = to_lima_bo(obj); obj 69 drivers/gpu/drm/lima/lima_gem.c struct drm_gem_object *obj; obj 75 drivers/gpu/drm/lima/lima_gem.c obj = drm_gem_object_lookup(file, handle); obj 76 drivers/gpu/drm/lima/lima_gem.c if (!obj) obj 79 drivers/gpu/drm/lima/lima_gem.c bo = to_lima_bo(obj); obj 83 drivers/gpu/drm/lima/lima_gem.c err = drm_gem_create_mmap_offset(obj); obj 85 drivers/gpu/drm/lima/lima_gem.c *offset = drm_vma_node_offset_addr(&obj->vma_node); obj 87 drivers/gpu/drm/lima/lima_gem.c drm_gem_object_put_unlocked(obj); obj 94 drivers/gpu/drm/lima/lima_gem.c struct drm_gem_object *obj = vma->vm_private_data; obj 95 drivers/gpu/drm/lima/lima_gem.c struct lima_bo *bo = to_lima_bo(obj); obj 248 drivers/gpu/drm/lima/lima_gem.c struct drm_gem_object *obj; obj 251 drivers/gpu/drm/lima/lima_gem.c obj = drm_gem_object_lookup(file, submit->bos[i].handle); obj 252 drivers/gpu/drm/lima/lima_gem.c if (!obj) { obj 257 drivers/gpu/drm/lima/lima_gem.c bo = to_lima_bo(obj); obj 264 drivers/gpu/drm/lima/lima_gem.c drm_gem_object_put_unlocked(obj); obj 15 drivers/gpu/drm/lima/lima_gem.h void lima_gem_free_object(struct drm_gem_object *obj); obj 16 drivers/gpu/drm/lima/lima_gem.h int lima_gem_object_open(struct drm_gem_object *obj, struct drm_file *file); obj 17 drivers/gpu/drm/lima/lima_gem.h void lima_gem_object_close(struct drm_gem_object *obj, struct drm_file *file); obj 28 drivers/gpu/drm/lima/lima_gem_prime.c struct sg_table *lima_gem_prime_get_sg_table(struct drm_gem_object *obj) obj 30 drivers/gpu/drm/lima/lima_gem_prime.c struct lima_bo *bo = to_lima_bo(obj); obj 31 drivers/gpu/drm/lima/lima_gem_prime.c int npages = obj->size >> PAGE_SHIFT; obj 36 drivers/gpu/drm/lima/lima_gem_prime.c int lima_gem_prime_mmap(struct drm_gem_object *obj, struct vm_area_struct *vma) obj 40 drivers/gpu/drm/lima/lima_gem_prime.c ret = drm_gem_mmap_obj(obj, obj->size, vma); obj 10 drivers/gpu/drm/lima/lima_gem_prime.h struct sg_table *lima_gem_prime_get_sg_table(struct drm_gem_object *obj); obj 11 drivers/gpu/drm/lima/lima_gem_prime.h int lima_gem_prime_mmap(struct drm_gem_object *obj, struct vm_area_struct *vma); obj 24 drivers/gpu/drm/lima/lima_object.h to_lima_bo(struct drm_gem_object *obj) obj 26 drivers/gpu/drm/lima/lima_object.h return container_of(obj, struct lima_bo, gem); obj 26 drivers/gpu/drm/mediatek/mtk_drm_fb.c struct drm_gem_object *obj) obj 41 drivers/gpu/drm/mediatek/mtk_drm_fb.c fb->obj[0] = obj; obj 43 drivers/gpu/drm/mediatek/mtk_drm_gem.c struct drm_gem_object *obj; obj 50 drivers/gpu/drm/mediatek/mtk_drm_gem.c obj = &mtk_gem->base; obj 57 drivers/gpu/drm/mediatek/mtk_drm_gem.c mtk_gem->cookie = dma_alloc_attrs(priv->dma_dev, obj->size, obj 61 drivers/gpu/drm/mediatek/mtk_drm_gem.c DRM_ERROR("failed to allocate %zx byte dma buffer", obj->size); obj 76 drivers/gpu/drm/mediatek/mtk_drm_gem.c drm_gem_object_release(obj); obj 81 drivers/gpu/drm/mediatek/mtk_drm_gem.c void mtk_drm_gem_free_object(struct drm_gem_object *obj) obj 83 drivers/gpu/drm/mediatek/mtk_drm_gem.c struct mtk_drm_gem_obj *mtk_gem = to_mtk_gem_obj(obj); obj 84 drivers/gpu/drm/mediatek/mtk_drm_gem.c struct mtk_drm_private *priv = obj->dev->dev_private; obj 87 drivers/gpu/drm/mediatek/mtk_drm_gem.c drm_prime_gem_destroy(obj, mtk_gem->sg); obj 89 drivers/gpu/drm/mediatek/mtk_drm_gem.c dma_free_attrs(priv->dma_dev, obj->size, mtk_gem->cookie, obj 93 drivers/gpu/drm/mediatek/mtk_drm_gem.c drm_gem_object_release(obj); obj 129 drivers/gpu/drm/mediatek/mtk_drm_gem.c static int mtk_drm_gem_object_mmap(struct drm_gem_object *obj, obj 134 drivers/gpu/drm/mediatek/mtk_drm_gem.c struct mtk_drm_gem_obj *mtk_gem = to_mtk_gem_obj(obj); obj 135 drivers/gpu/drm/mediatek/mtk_drm_gem.c struct mtk_drm_private *priv = obj->dev->dev_private; obj 144 drivers/gpu/drm/mediatek/mtk_drm_gem.c mtk_gem->dma_addr, obj->size, mtk_gem->dma_attrs); obj 151 drivers/gpu/drm/mediatek/mtk_drm_gem.c int mtk_drm_gem_mmap_buf(struct drm_gem_object *obj, struct vm_area_struct *vma) obj 155 drivers/gpu/drm/mediatek/mtk_drm_gem.c ret = drm_gem_mmap_obj(obj, obj->size, vma); obj 159 drivers/gpu/drm/mediatek/mtk_drm_gem.c return mtk_drm_gem_object_mmap(obj, vma); obj 164 drivers/gpu/drm/mediatek/mtk_drm_gem.c struct drm_gem_object *obj; obj 171 drivers/gpu/drm/mediatek/mtk_drm_gem.c obj = vma->vm_private_data; obj 179 drivers/gpu/drm/mediatek/mtk_drm_gem.c return mtk_drm_gem_object_mmap(obj, vma); obj 188 drivers/gpu/drm/mediatek/mtk_drm_gem.c struct sg_table *mtk_gem_prime_get_sg_table(struct drm_gem_object *obj) obj 190 drivers/gpu/drm/mediatek/mtk_drm_gem.c struct mtk_drm_gem_obj *mtk_gem = to_mtk_gem_obj(obj); obj 191 drivers/gpu/drm/mediatek/mtk_drm_gem.c struct mtk_drm_private *priv = obj->dev->dev_private; obj 200 drivers/gpu/drm/mediatek/mtk_drm_gem.c mtk_gem->dma_addr, obj->size, obj 245 drivers/gpu/drm/mediatek/mtk_drm_gem.c void *mtk_drm_gem_prime_vmap(struct drm_gem_object *obj) obj 247 drivers/gpu/drm/mediatek/mtk_drm_gem.c struct mtk_drm_gem_obj *mtk_gem = to_mtk_gem_obj(obj); obj 256 drivers/gpu/drm/mediatek/mtk_drm_gem.c sgt = mtk_gem_prime_get_sg_table(obj); obj 260 drivers/gpu/drm/mediatek/mtk_drm_gem.c npages = obj->size >> PAGE_SHIFT; obj 279 drivers/gpu/drm/mediatek/mtk_drm_gem.c void mtk_drm_gem_prime_vunmap(struct drm_gem_object *obj, void *vaddr) obj 281 drivers/gpu/drm/mediatek/mtk_drm_gem.c struct mtk_drm_gem_obj *mtk_gem = to_mtk_gem_obj(obj); obj 43 drivers/gpu/drm/mediatek/mtk_drm_gem.h int mtk_drm_gem_mmap_buf(struct drm_gem_object *obj, obj 45 drivers/gpu/drm/mediatek/mtk_drm_gem.h struct sg_table *mtk_gem_prime_get_sg_table(struct drm_gem_object *obj); obj 48 drivers/gpu/drm/mediatek/mtk_drm_gem.h void *mtk_drm_gem_prime_vmap(struct drm_gem_object *obj); obj 49 drivers/gpu/drm/mediatek/mtk_drm_gem.h void mtk_drm_gem_prime_vunmap(struct drm_gem_object *obj, void *vaddr); obj 118 drivers/gpu/drm/mediatek/mtk_drm_plane.c gem = fb->obj[0]; obj 40 drivers/gpu/drm/mgag200/mgag200_cursor.c struct drm_gem_object *obj; obj 84 drivers/gpu/drm/mgag200/mgag200_cursor.c obj = drm_gem_object_lookup(file_priv, handle); obj 85 drivers/gpu/drm/mgag200/mgag200_cursor.c if (!obj) obj 87 drivers/gpu/drm/mgag200/mgag200_cursor.c gbo = drm_gem_vram_of_gem(obj); obj 218 drivers/gpu/drm/mgag200/mgag200_cursor.c drm_gem_object_put_unlocked(obj); obj 231 drivers/gpu/drm/mgag200/mgag200_cursor.c drm_gem_object_put_unlocked(obj); obj 868 drivers/gpu/drm/mgag200/mgag200_mode.c gbo = drm_gem_vram_of_gem(fb->obj[0]); obj 872 drivers/gpu/drm/mgag200/mgag200_mode.c gbo = drm_gem_vram_of_gem(crtc->primary->fb->obj[0]); obj 1408 drivers/gpu/drm/mgag200/mgag200_mode.c drm_gem_vram_of_gem(fb->obj[0]); obj 51 drivers/gpu/drm/msm/adreno/a5xx_gpu.c struct msm_gem_object *obj; obj 65 drivers/gpu/drm/msm/adreno/a5xx_gpu.c obj = submit->bos[submit->cmd[i].idx].obj; obj 68 drivers/gpu/drm/msm/adreno/a5xx_gpu.c ptr = msm_gem_get_vaddr(&obj->base); obj 88 drivers/gpu/drm/msm/adreno/a5xx_gpu.c msm_gem_put_vaddr(&obj->base); obj 85 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c struct a6xx_state_memobj *obj = obj 86 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c kzalloc((nr * objsize) + sizeof(*obj), GFP_KERNEL); obj 88 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c if (!obj) obj 91 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c list_add_tail(&obj->node, &a6xx_state->objs); obj 92 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c return &obj->data; obj 230 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c struct a6xx_gpu_state_obj *obj) obj 235 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c obj->data = state_kcalloc(a6xx_state, VBIF_DEBUGBUS_BLOCK_SIZE, obj 237 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c if (!obj->data) obj 240 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c obj->handle = NULL; obj 255 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c ptr = obj->data; obj 285 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c struct a6xx_gpu_state_obj *obj) obj 290 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c obj->data = state_kcalloc(a6xx_state, block->count, sizeof(u64)); obj 291 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c if (!obj->data) obj 294 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c obj->handle = block; obj 296 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c for (ptr = obj->data, i = 0; i < block->count; i++) obj 303 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c struct a6xx_gpu_state_obj *obj) obj 308 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c obj->data = state_kcalloc(a6xx_state, block->count, sizeof(u64)); obj 309 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c if (!obj->data) obj 312 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c obj->handle = block; obj 314 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c for (ptr = obj->data, i = 0; i < block->count; i++) obj 430 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c struct a6xx_gpu_state_obj *obj, obj 468 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c obj->handle = dbgahb; obj 469 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c obj->data = state_kmemdup(a6xx_state, dumper->ptr + A6XX_CD_DATA_OFFSET, obj 498 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c struct a6xx_gpu_state_obj *obj, obj 539 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c obj->handle = cluster; obj 540 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c obj->data = state_kmemdup(a6xx_state, dumper->ptr + A6XX_CD_DATA_OFFSET, obj 567 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c struct a6xx_gpu_state_obj *obj, obj 590 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c obj->handle = block; obj 591 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c obj->data = state_kmemdup(a6xx_state, dumper->ptr + A6XX_CD_DATA_OFFSET, obj 618 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c struct a6xx_gpu_state_obj *obj, obj 647 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c obj->handle = regs; obj 648 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c obj->data = state_kmemdup(a6xx_state, dumper->ptr + A6XX_CD_DATA_OFFSET, obj 656 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c struct a6xx_gpu_state_obj *obj, obj 685 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c obj->handle = regs; obj 686 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c obj->data = state_kmemdup(a6xx_state, dumper->ptr + A6XX_CD_DATA_OFFSET, obj 694 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c struct a6xx_gpu_state_obj *obj) obj 701 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c obj->handle = (const void *) regs; obj 702 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c obj->data = state_kcalloc(a6xx_state, regcount, sizeof(u32)); obj 703 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c if (!obj->data) obj 711 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c obj->data[index++] = gpu_read(gpu, obj 720 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c struct a6xx_gpu_state_obj *obj) obj 730 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c obj->handle = (const void *) regs; obj 731 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c obj->data = state_kcalloc(a6xx_state, regcount, sizeof(u32)); obj 732 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c if (!obj->data) obj 740 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c obj->data[index++] = gmu_read(gmu, obj 812 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c struct a6xx_gpu_state_obj *obj) obj 816 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c obj->handle = (const void *) indexed; obj 817 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c obj->data = state_kcalloc(a6xx_state, indexed->count, sizeof(u32)); obj 818 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c if (!obj->data) obj 826 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c obj->data[i] = gpu_read(gpu, indexed->data); obj 907 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c struct a6xx_state_memobj *obj, *tmp; obj 913 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c list_for_each_entry_safe(obj, tmp, &a6xx_state->objs, node) obj 914 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c kfree(obj); obj 983 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c static void a6xx_show_shader(struct a6xx_gpu_state_obj *obj, obj 986 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c const struct a6xx_shader_block *block = obj->handle; obj 989 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c if (!obj->handle) obj 998 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c if (!obj->data) obj 1002 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c obj->data + (block->size * i)); obj 1032 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c static void a6xx_show_dbgahb_cluster(struct a6xx_gpu_state_obj *obj, obj 1035 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c const struct a6xx_dbgahb_cluster *dbgahb = obj->handle; obj 1040 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c obj->data, p); obj 1044 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c static void a6xx_show_cluster(struct a6xx_gpu_state_obj *obj, obj 1047 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c const struct a6xx_cluster *cluster = obj->handle; obj 1052 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c obj->data, p); obj 1056 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c static void a6xx_show_indexed_regs(struct a6xx_gpu_state_obj *obj, obj 1059 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c const struct a6xx_indexed_registers *indexed = obj->handle; obj 1067 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c print_ascii85(p, indexed->count << 2, obj->data); obj 1092 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c struct a6xx_gpu_state_obj *obj = &a6xx_state->debugbus[i]; obj 1094 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c a6xx_show_debugbus_block(obj->handle, obj->data, p); obj 1098 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c struct a6xx_gpu_state_obj *obj = a6xx_state->vbif_debugbus; obj 1104 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c print_ascii85(p, VBIF_DEBUGBUS_BLOCK_SIZE << 2, obj->data); obj 1108 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c struct a6xx_gpu_state_obj *obj = &a6xx_state->cx_debugbus[i]; obj 1110 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c a6xx_show_debugbus_block(obj->handle, obj->data, p); obj 1128 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c struct a6xx_gpu_state_obj *obj = &a6xx_state->registers[i]; obj 1129 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c const struct a6xx_registers *regs = obj->handle; obj 1131 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c if (!obj->handle) obj 1134 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c a6xx_show_registers(regs->registers, obj->data, regs->count, p); obj 1139 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c struct a6xx_gpu_state_obj *obj = &a6xx_state->gmu_registers[i]; obj 1140 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c const struct a6xx_registers *regs = obj->handle; obj 1142 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c if (!obj->handle) obj 1145 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c a6xx_show_registers(regs->registers, obj->data, regs->count, p); obj 70 drivers/gpu/drm/msm/disp/mdp4/mdp4_plane.c struct drm_mode_object *obj) obj 105 drivers/gpu/drm/msm/disp/mdp5/mdp5_kms.c mdp5_global_duplicate_state(struct drm_private_obj *obj) obj 109 drivers/gpu/drm/msm/disp/mdp5/mdp5_kms.c state = kmemdup(obj->state, sizeof(*state), GFP_KERNEL); obj 113 drivers/gpu/drm/msm/disp/mdp5/mdp5_kms.c __drm_atomic_helper_private_obj_duplicate_state(obj, &state->base); obj 118 drivers/gpu/drm/msm/disp/mdp5/mdp5_kms.c static void mdp5_global_destroy_state(struct drm_private_obj *obj, obj 59 drivers/gpu/drm/msm/disp/mdp5/mdp5_plane.c struct drm_mode_object *obj) obj 739 drivers/gpu/drm/msm/msm_drv.c struct drm_gem_object *obj; obj 748 drivers/gpu/drm/msm/msm_drv.c obj = drm_gem_object_lookup(file, args->handle); obj 749 drivers/gpu/drm/msm/msm_drv.c if (!obj) obj 752 drivers/gpu/drm/msm/msm_drv.c ret = msm_gem_cpu_prep(obj, args->op, &timeout); obj 754 drivers/gpu/drm/msm/msm_drv.c drm_gem_object_put_unlocked(obj); obj 763 drivers/gpu/drm/msm/msm_drv.c struct drm_gem_object *obj; obj 766 drivers/gpu/drm/msm/msm_drv.c obj = drm_gem_object_lookup(file, args->handle); obj 767 drivers/gpu/drm/msm/msm_drv.c if (!obj) obj 770 drivers/gpu/drm/msm/msm_drv.c ret = msm_gem_cpu_fini(obj); obj 772 drivers/gpu/drm/msm/msm_drv.c drm_gem_object_put_unlocked(obj); obj 778 drivers/gpu/drm/msm/msm_drv.c struct drm_gem_object *obj, uint64_t *iova) obj 789 drivers/gpu/drm/msm/msm_drv.c return msm_gem_get_iova(obj, priv->gpu->aspace, iova); obj 796 drivers/gpu/drm/msm/msm_drv.c struct drm_gem_object *obj; obj 817 drivers/gpu/drm/msm/msm_drv.c obj = drm_gem_object_lookup(file, args->handle); obj 818 drivers/gpu/drm/msm/msm_drv.c if (!obj) obj 821 drivers/gpu/drm/msm/msm_drv.c msm_obj = to_msm_bo(obj); obj 825 drivers/gpu/drm/msm/msm_drv.c args->value = msm_gem_mmap_offset(obj); obj 828 drivers/gpu/drm/msm/msm_drv.c ret = msm_ioctl_gem_info_iova(dev, obj, &args->value); obj 864 drivers/gpu/drm/msm/msm_drv.c drm_gem_object_put_unlocked(obj); obj 902 drivers/gpu/drm/msm/msm_drv.c struct drm_gem_object *obj; obj 917 drivers/gpu/drm/msm/msm_drv.c obj = drm_gem_object_lookup(file, args->handle); obj 918 drivers/gpu/drm/msm/msm_drv.c if (!obj) { obj 923 drivers/gpu/drm/msm/msm_drv.c ret = msm_gem_madvise(obj, args->madv); obj 929 drivers/gpu/drm/msm/msm_drv.c drm_gem_object_put(obj); obj 269 drivers/gpu/drm/msm/msm_drv.h int msm_gem_mmap_obj(struct drm_gem_object *obj, obj 273 drivers/gpu/drm/msm/msm_drv.h uint64_t msm_gem_mmap_offset(struct drm_gem_object *obj); obj 274 drivers/gpu/drm/msm/msm_drv.h int msm_gem_get_iova(struct drm_gem_object *obj, obj 276 drivers/gpu/drm/msm/msm_drv.h int msm_gem_get_and_pin_iova(struct drm_gem_object *obj, obj 278 drivers/gpu/drm/msm/msm_drv.h uint64_t msm_gem_iova(struct drm_gem_object *obj, obj 280 drivers/gpu/drm/msm/msm_drv.h void msm_gem_unpin_iova(struct drm_gem_object *obj, obj 282 drivers/gpu/drm/msm/msm_drv.h struct page **msm_gem_get_pages(struct drm_gem_object *obj); obj 283 drivers/gpu/drm/msm/msm_drv.h void msm_gem_put_pages(struct drm_gem_object *obj); obj 288 drivers/gpu/drm/msm/msm_drv.h struct sg_table *msm_gem_prime_get_sg_table(struct drm_gem_object *obj); obj 289 drivers/gpu/drm/msm/msm_drv.h void *msm_gem_prime_vmap(struct drm_gem_object *obj); obj 290 drivers/gpu/drm/msm/msm_drv.h void msm_gem_prime_vunmap(struct drm_gem_object *obj, void *vaddr); obj 291 drivers/gpu/drm/msm/msm_drv.h int msm_gem_prime_mmap(struct drm_gem_object *obj, struct vm_area_struct *vma); obj 294 drivers/gpu/drm/msm/msm_drv.h int msm_gem_prime_pin(struct drm_gem_object *obj); obj 295 drivers/gpu/drm/msm/msm_drv.h void msm_gem_prime_unpin(struct drm_gem_object *obj); obj 296 drivers/gpu/drm/msm/msm_drv.h void *msm_gem_get_vaddr(struct drm_gem_object *obj); obj 297 drivers/gpu/drm/msm/msm_drv.h void *msm_gem_get_vaddr_active(struct drm_gem_object *obj); obj 298 drivers/gpu/drm/msm/msm_drv.h void msm_gem_put_vaddr(struct drm_gem_object *obj); obj 299 drivers/gpu/drm/msm/msm_drv.h int msm_gem_madvise(struct drm_gem_object *obj, unsigned madv); obj 300 drivers/gpu/drm/msm/msm_drv.h int msm_gem_sync_object(struct drm_gem_object *obj, obj 302 drivers/gpu/drm/msm/msm_drv.h void msm_gem_move_to_active(struct drm_gem_object *obj, obj 304 drivers/gpu/drm/msm/msm_drv.h void msm_gem_move_to_inactive(struct drm_gem_object *obj); obj 305 drivers/gpu/drm/msm/msm_drv.h int msm_gem_cpu_prep(struct drm_gem_object *obj, uint32_t op, ktime_t *timeout); obj 306 drivers/gpu/drm/msm/msm_drv.h int msm_gem_cpu_fini(struct drm_gem_object *obj); obj 307 drivers/gpu/drm/msm/msm_drv.h void msm_gem_free_object(struct drm_gem_object *obj); obj 384 drivers/gpu/drm/msm/msm_drv.h void msm_gem_describe(struct drm_gem_object *obj, struct seq_file *m); obj 45 drivers/gpu/drm/msm/msm_fb.c msm_gem_describe(fb->obj[i], m); obj 62 drivers/gpu/drm/msm/msm_fb.c ret = msm_gem_get_and_pin_iova(fb->obj[i], aspace, &iova); obj 77 drivers/gpu/drm/msm/msm_fb.c msm_gem_unpin_iova(fb->obj[i], aspace); obj 83 drivers/gpu/drm/msm/msm_fb.c if (!fb->obj[plane]) obj 85 drivers/gpu/drm/msm/msm_fb.c return msm_gem_iova(fb->obj[plane], aspace) + fb->offsets[plane]; obj 166 drivers/gpu/drm/msm/msm_fb.c if (n > ARRAY_SIZE(fb->obj)) { obj 185 drivers/gpu/drm/msm/msm_fb.c msm_fb->base.obj[i] = bos[i]; obj 14 drivers/gpu/drm/msm/msm_fbdev.c extern int msm_gem_mmap_obj(struct drm_gem_object *obj, obj 20 drivers/gpu/drm/msm/msm_gem.c static void msm_gem_vunmap_locked(struct drm_gem_object *obj); obj 23 drivers/gpu/drm/msm/msm_gem.c static dma_addr_t physaddr(struct drm_gem_object *obj) obj 25 drivers/gpu/drm/msm/msm_gem.c struct msm_gem_object *msm_obj = to_msm_bo(obj); obj 26 drivers/gpu/drm/msm/msm_gem.c struct msm_drm_private *priv = obj->dev->dev_private; obj 31 drivers/gpu/drm/msm/msm_gem.c static bool use_pages(struct drm_gem_object *obj) obj 33 drivers/gpu/drm/msm/msm_gem.c struct msm_gem_object *msm_obj = to_msm_bo(obj); obj 78 drivers/gpu/drm/msm/msm_gem.c static struct page **get_pages_vram(struct drm_gem_object *obj, int npages) obj 80 drivers/gpu/drm/msm/msm_gem.c struct msm_gem_object *msm_obj = to_msm_bo(obj); obj 81 drivers/gpu/drm/msm/msm_gem.c struct msm_drm_private *priv = obj->dev->dev_private; obj 98 drivers/gpu/drm/msm/msm_gem.c paddr = physaddr(obj); obj 107 drivers/gpu/drm/msm/msm_gem.c static struct page **get_pages(struct drm_gem_object *obj) obj 109 drivers/gpu/drm/msm/msm_gem.c struct msm_gem_object *msm_obj = to_msm_bo(obj); obj 112 drivers/gpu/drm/msm/msm_gem.c struct drm_device *dev = obj->dev; obj 114 drivers/gpu/drm/msm/msm_gem.c int npages = obj->size >> PAGE_SHIFT; obj 116 drivers/gpu/drm/msm/msm_gem.c if (use_pages(obj)) obj 117 drivers/gpu/drm/msm/msm_gem.c p = drm_gem_get_pages(obj); obj 119 drivers/gpu/drm/msm/msm_gem.c p = get_pages_vram(obj, npages); obj 148 drivers/gpu/drm/msm/msm_gem.c static void put_pages_vram(struct drm_gem_object *obj) obj 150 drivers/gpu/drm/msm/msm_gem.c struct msm_gem_object *msm_obj = to_msm_bo(obj); obj 151 drivers/gpu/drm/msm/msm_gem.c struct msm_drm_private *priv = obj->dev->dev_private; obj 160 drivers/gpu/drm/msm/msm_gem.c static void put_pages(struct drm_gem_object *obj) obj 162 drivers/gpu/drm/msm/msm_gem.c struct msm_gem_object *msm_obj = to_msm_bo(obj); obj 177 drivers/gpu/drm/msm/msm_gem.c if (use_pages(obj)) obj 178 drivers/gpu/drm/msm/msm_gem.c drm_gem_put_pages(obj, msm_obj->pages, true, false); obj 180 drivers/gpu/drm/msm/msm_gem.c put_pages_vram(obj); obj 186 drivers/gpu/drm/msm/msm_gem.c struct page **msm_gem_get_pages(struct drm_gem_object *obj) obj 188 drivers/gpu/drm/msm/msm_gem.c struct msm_gem_object *msm_obj = to_msm_bo(obj); obj 198 drivers/gpu/drm/msm/msm_gem.c p = get_pages(obj); obj 203 drivers/gpu/drm/msm/msm_gem.c void msm_gem_put_pages(struct drm_gem_object *obj) obj 208 drivers/gpu/drm/msm/msm_gem.c int msm_gem_mmap_obj(struct drm_gem_object *obj, obj 211 drivers/gpu/drm/msm/msm_gem.c struct msm_gem_object *msm_obj = to_msm_bo(obj); obj 227 drivers/gpu/drm/msm/msm_gem.c get_file(obj->filp); obj 229 drivers/gpu/drm/msm/msm_gem.c vma->vm_file = obj->filp; obj 253 drivers/gpu/drm/msm/msm_gem.c struct drm_gem_object *obj = vma->vm_private_data; obj 254 drivers/gpu/drm/msm/msm_gem.c struct msm_gem_object *msm_obj = to_msm_bo(obj); obj 277 drivers/gpu/drm/msm/msm_gem.c pages = get_pages(obj); obj 299 drivers/gpu/drm/msm/msm_gem.c static uint64_t mmap_offset(struct drm_gem_object *obj) obj 301 drivers/gpu/drm/msm/msm_gem.c struct drm_device *dev = obj->dev; obj 302 drivers/gpu/drm/msm/msm_gem.c struct msm_gem_object *msm_obj = to_msm_bo(obj); obj 308 drivers/gpu/drm/msm/msm_gem.c ret = drm_gem_create_mmap_offset(obj); obj 315 drivers/gpu/drm/msm/msm_gem.c return drm_vma_node_offset_addr(&obj->vma_node); obj 318 drivers/gpu/drm/msm/msm_gem.c uint64_t msm_gem_mmap_offset(struct drm_gem_object *obj) obj 321 drivers/gpu/drm/msm/msm_gem.c struct msm_gem_object *msm_obj = to_msm_bo(obj); obj 324 drivers/gpu/drm/msm/msm_gem.c offset = mmap_offset(obj); obj 329 drivers/gpu/drm/msm/msm_gem.c static struct msm_gem_vma *add_vma(struct drm_gem_object *obj, obj 332 drivers/gpu/drm/msm/msm_gem.c struct msm_gem_object *msm_obj = to_msm_bo(obj); obj 348 drivers/gpu/drm/msm/msm_gem.c static struct msm_gem_vma *lookup_vma(struct drm_gem_object *obj, obj 351 drivers/gpu/drm/msm/msm_gem.c struct msm_gem_object *msm_obj = to_msm_bo(obj); obj 375 drivers/gpu/drm/msm/msm_gem.c put_iova(struct drm_gem_object *obj) obj 377 drivers/gpu/drm/msm/msm_gem.c struct msm_gem_object *msm_obj = to_msm_bo(obj); obj 391 drivers/gpu/drm/msm/msm_gem.c static int msm_gem_get_iova_locked(struct drm_gem_object *obj, obj 394 drivers/gpu/drm/msm/msm_gem.c struct msm_gem_object *msm_obj = to_msm_bo(obj); obj 400 drivers/gpu/drm/msm/msm_gem.c vma = lookup_vma(obj, aspace); obj 403 drivers/gpu/drm/msm/msm_gem.c vma = add_vma(obj, aspace); obj 407 drivers/gpu/drm/msm/msm_gem.c ret = msm_gem_init_vma(aspace, vma, obj->size >> PAGE_SHIFT); obj 418 drivers/gpu/drm/msm/msm_gem.c static int msm_gem_pin_iova(struct drm_gem_object *obj, obj 421 drivers/gpu/drm/msm/msm_gem.c struct msm_gem_object *msm_obj = to_msm_bo(obj); obj 434 drivers/gpu/drm/msm/msm_gem.c vma = lookup_vma(obj, aspace); obj 438 drivers/gpu/drm/msm/msm_gem.c pages = get_pages(obj); obj 443 drivers/gpu/drm/msm/msm_gem.c msm_obj->sgt, obj->size >> PAGE_SHIFT); obj 447 drivers/gpu/drm/msm/msm_gem.c int msm_gem_get_and_pin_iova(struct drm_gem_object *obj, obj 450 drivers/gpu/drm/msm/msm_gem.c struct msm_gem_object *msm_obj = to_msm_bo(obj); obj 456 drivers/gpu/drm/msm/msm_gem.c ret = msm_gem_get_iova_locked(obj, aspace, &local); obj 459 drivers/gpu/drm/msm/msm_gem.c ret = msm_gem_pin_iova(obj, aspace); obj 472 drivers/gpu/drm/msm/msm_gem.c int msm_gem_get_iova(struct drm_gem_object *obj, obj 475 drivers/gpu/drm/msm/msm_gem.c struct msm_gem_object *msm_obj = to_msm_bo(obj); obj 479 drivers/gpu/drm/msm/msm_gem.c ret = msm_gem_get_iova_locked(obj, aspace, iova); obj 488 drivers/gpu/drm/msm/msm_gem.c uint64_t msm_gem_iova(struct drm_gem_object *obj, obj 491 drivers/gpu/drm/msm/msm_gem.c struct msm_gem_object *msm_obj = to_msm_bo(obj); obj 495 drivers/gpu/drm/msm/msm_gem.c vma = lookup_vma(obj, aspace); obj 507 drivers/gpu/drm/msm/msm_gem.c void msm_gem_unpin_iova(struct drm_gem_object *obj, obj 510 drivers/gpu/drm/msm/msm_gem.c struct msm_gem_object *msm_obj = to_msm_bo(obj); obj 514 drivers/gpu/drm/msm/msm_gem.c vma = lookup_vma(obj, aspace); obj 534 drivers/gpu/drm/msm/msm_gem.c struct drm_gem_object *obj; obj 538 drivers/gpu/drm/msm/msm_gem.c obj = drm_gem_object_lookup(file, handle); obj 539 drivers/gpu/drm/msm/msm_gem.c if (obj == NULL) { obj 544 drivers/gpu/drm/msm/msm_gem.c *offset = msm_gem_mmap_offset(obj); obj 546 drivers/gpu/drm/msm/msm_gem.c drm_gem_object_put_unlocked(obj); obj 552 drivers/gpu/drm/msm/msm_gem.c static void *get_vaddr(struct drm_gem_object *obj, unsigned madv) obj 554 drivers/gpu/drm/msm/msm_gem.c struct msm_gem_object *msm_obj = to_msm_bo(obj); obj 560 drivers/gpu/drm/msm/msm_gem.c DRM_DEV_ERROR(obj->dev->dev, "Invalid madv state: %u vs %u\n", obj 575 drivers/gpu/drm/msm/msm_gem.c struct page **pages = get_pages(obj); obj 580 drivers/gpu/drm/msm/msm_gem.c msm_obj->vaddr = vmap(pages, obj->size >> PAGE_SHIFT, obj 597 drivers/gpu/drm/msm/msm_gem.c void *msm_gem_get_vaddr(struct drm_gem_object *obj) obj 599 drivers/gpu/drm/msm/msm_gem.c return get_vaddr(obj, MSM_MADV_WILLNEED); obj 608 drivers/gpu/drm/msm/msm_gem.c void *msm_gem_get_vaddr_active(struct drm_gem_object *obj) obj 610 drivers/gpu/drm/msm/msm_gem.c return get_vaddr(obj, __MSM_MADV_PURGED); obj 613 drivers/gpu/drm/msm/msm_gem.c void msm_gem_put_vaddr(struct drm_gem_object *obj) obj 615 drivers/gpu/drm/msm/msm_gem.c struct msm_gem_object *msm_obj = to_msm_bo(obj); obj 626 drivers/gpu/drm/msm/msm_gem.c int msm_gem_madvise(struct drm_gem_object *obj, unsigned madv) obj 628 drivers/gpu/drm/msm/msm_gem.c struct msm_gem_object *msm_obj = to_msm_bo(obj); obj 632 drivers/gpu/drm/msm/msm_gem.c WARN_ON(!mutex_is_locked(&obj->dev->struct_mutex)); obj 644 drivers/gpu/drm/msm/msm_gem.c void msm_gem_purge(struct drm_gem_object *obj, enum msm_gem_lock subclass) obj 646 drivers/gpu/drm/msm/msm_gem.c struct drm_device *dev = obj->dev; obj 647 drivers/gpu/drm/msm/msm_gem.c struct msm_gem_object *msm_obj = to_msm_bo(obj); obj 651 drivers/gpu/drm/msm/msm_gem.c WARN_ON(obj->import_attach); obj 655 drivers/gpu/drm/msm/msm_gem.c put_iova(obj); obj 657 drivers/gpu/drm/msm/msm_gem.c msm_gem_vunmap_locked(obj); obj 659 drivers/gpu/drm/msm/msm_gem.c put_pages(obj); obj 663 drivers/gpu/drm/msm/msm_gem.c drm_vma_node_unmap(&obj->vma_node, dev->anon_inode->i_mapping); obj 664 drivers/gpu/drm/msm/msm_gem.c drm_gem_free_mmap_offset(obj); obj 671 drivers/gpu/drm/msm/msm_gem.c shmem_truncate_range(file_inode(obj->filp), 0, (loff_t)-1); obj 673 drivers/gpu/drm/msm/msm_gem.c invalidate_mapping_pages(file_inode(obj->filp)->i_mapping, obj 679 drivers/gpu/drm/msm/msm_gem.c static void msm_gem_vunmap_locked(struct drm_gem_object *obj) obj 681 drivers/gpu/drm/msm/msm_gem.c struct msm_gem_object *msm_obj = to_msm_bo(obj); obj 692 drivers/gpu/drm/msm/msm_gem.c void msm_gem_vunmap(struct drm_gem_object *obj, enum msm_gem_lock subclass) obj 694 drivers/gpu/drm/msm/msm_gem.c struct msm_gem_object *msm_obj = to_msm_bo(obj); obj 697 drivers/gpu/drm/msm/msm_gem.c msm_gem_vunmap_locked(obj); obj 702 drivers/gpu/drm/msm/msm_gem.c int msm_gem_sync_object(struct drm_gem_object *obj, obj 709 drivers/gpu/drm/msm/msm_gem.c fobj = dma_resv_get_list(obj->resv); obj 711 drivers/gpu/drm/msm/msm_gem.c fence = dma_resv_get_excl(obj->resv); obj 725 drivers/gpu/drm/msm/msm_gem.c dma_resv_held(obj->resv)); obj 736 drivers/gpu/drm/msm/msm_gem.c void msm_gem_move_to_active(struct drm_gem_object *obj, obj 739 drivers/gpu/drm/msm/msm_gem.c struct msm_gem_object *msm_obj = to_msm_bo(obj); obj 743 drivers/gpu/drm/msm/msm_gem.c dma_resv_add_excl_fence(obj->resv, fence); obj 745 drivers/gpu/drm/msm/msm_gem.c dma_resv_add_shared_fence(obj->resv, fence); obj 750 drivers/gpu/drm/msm/msm_gem.c void msm_gem_move_to_inactive(struct drm_gem_object *obj) obj 752 drivers/gpu/drm/msm/msm_gem.c struct drm_device *dev = obj->dev; obj 754 drivers/gpu/drm/msm/msm_gem.c struct msm_gem_object *msm_obj = to_msm_bo(obj); obj 763 drivers/gpu/drm/msm/msm_gem.c int msm_gem_cpu_prep(struct drm_gem_object *obj, uint32_t op, ktime_t *timeout) obj 770 drivers/gpu/drm/msm/msm_gem.c ret = dma_resv_wait_timeout_rcu(obj->resv, write, obj 782 drivers/gpu/drm/msm/msm_gem.c int msm_gem_cpu_fini(struct drm_gem_object *obj) obj 799 drivers/gpu/drm/msm/msm_gem.c void msm_gem_describe(struct drm_gem_object *obj, struct seq_file *m) obj 801 drivers/gpu/drm/msm/msm_gem.c struct msm_gem_object *msm_obj = to_msm_bo(obj); obj 802 drivers/gpu/drm/msm/msm_gem.c struct dma_resv *robj = obj->resv; obj 806 drivers/gpu/drm/msm/msm_gem.c uint64_t off = drm_vma_node_start(&obj->vma_node); obj 826 drivers/gpu/drm/msm/msm_gem.c obj->name, kref_read(&obj->refcount), obj 829 drivers/gpu/drm/msm/msm_gem.c seq_printf(m, " %08zu %9s %-32s\n", obj->size, madv, msm_obj->name); obj 871 drivers/gpu/drm/msm/msm_gem.c struct drm_gem_object *obj = &msm_obj->base; obj 873 drivers/gpu/drm/msm/msm_gem.c msm_gem_describe(obj, m); obj 875 drivers/gpu/drm/msm/msm_gem.c size += obj->size; obj 883 drivers/gpu/drm/msm/msm_gem.c void msm_gem_free_object(struct drm_gem_object *obj) obj 885 drivers/gpu/drm/msm/msm_gem.c struct msm_gem_object *msm_obj = to_msm_bo(obj); obj 886 drivers/gpu/drm/msm/msm_gem.c struct drm_device *dev = obj->dev; obj 895 drivers/gpu/drm/msm/msm_gem.c struct drm_gem_object *obj = &msm_obj->base; obj 896 drivers/gpu/drm/msm/msm_gem.c struct drm_device *dev = obj->dev; obj 907 drivers/gpu/drm/msm/msm_gem.c put_iova(obj); obj 909 drivers/gpu/drm/msm/msm_gem.c if (obj->import_attach) { obj 911 drivers/gpu/drm/msm/msm_gem.c dma_buf_vunmap(obj->import_attach->dmabuf, msm_obj->vaddr); obj 919 drivers/gpu/drm/msm/msm_gem.c drm_prime_gem_destroy(obj, msm_obj->sgt); obj 921 drivers/gpu/drm/msm/msm_gem.c msm_gem_vunmap_locked(obj); obj 922 drivers/gpu/drm/msm/msm_gem.c put_pages(obj); obj 925 drivers/gpu/drm/msm/msm_gem.c drm_gem_object_release(obj); obj 959 drivers/gpu/drm/msm/msm_gem.c struct drm_gem_object *obj; obj 962 drivers/gpu/drm/msm/msm_gem.c obj = msm_gem_new(dev, size, flags); obj 964 drivers/gpu/drm/msm/msm_gem.c if (IS_ERR(obj)) obj 965 drivers/gpu/drm/msm/msm_gem.c return PTR_ERR(obj); obj 968 drivers/gpu/drm/msm/msm_gem.c msm_gem_object_set_name(obj, "%s", name); obj 970 drivers/gpu/drm/msm/msm_gem.c ret = drm_gem_handle_create(file, obj, handle); obj 973 drivers/gpu/drm/msm/msm_gem.c drm_gem_object_put_unlocked(obj); obj 980 drivers/gpu/drm/msm/msm_gem.c struct drm_gem_object **obj, obj 1018 drivers/gpu/drm/msm/msm_gem.c *obj = &msm_obj->base; obj 1027 drivers/gpu/drm/msm/msm_gem.c struct drm_gem_object *obj = NULL; obj 1047 drivers/gpu/drm/msm/msm_gem.c ret = msm_gem_new_impl(dev, size, flags, &obj, struct_mutex_locked); obj 1054 drivers/gpu/drm/msm/msm_gem.c struct msm_gem_object *msm_obj = to_msm_bo(obj); obj 1058 drivers/gpu/drm/msm/msm_gem.c vma = add_vma(obj, NULL); obj 1065 drivers/gpu/drm/msm/msm_gem.c to_msm_bo(obj)->vram_node = &vma->node; obj 1067 drivers/gpu/drm/msm/msm_gem.c drm_gem_private_object_init(dev, obj, size); obj 1069 drivers/gpu/drm/msm/msm_gem.c pages = get_pages(obj); obj 1075 drivers/gpu/drm/msm/msm_gem.c vma->iova = physaddr(obj); obj 1077 drivers/gpu/drm/msm/msm_gem.c ret = drm_gem_object_init(dev, obj, size); obj 1086 drivers/gpu/drm/msm/msm_gem.c mapping_set_gfp_mask(obj->filp->f_mapping, GFP_HIGHUSER); obj 1089 drivers/gpu/drm/msm/msm_gem.c return obj; obj 1092 drivers/gpu/drm/msm/msm_gem.c drm_gem_object_put_unlocked(obj); obj 1112 drivers/gpu/drm/msm/msm_gem.c struct drm_gem_object *obj; obj 1124 drivers/gpu/drm/msm/msm_gem.c ret = msm_gem_new_impl(dev, size, MSM_BO_WC, &obj, false); obj 1128 drivers/gpu/drm/msm/msm_gem.c drm_gem_private_object_init(dev, obj, size); obj 1132 drivers/gpu/drm/msm/msm_gem.c msm_obj = to_msm_bo(obj); obj 1149 drivers/gpu/drm/msm/msm_gem.c return obj; obj 1152 drivers/gpu/drm/msm/msm_gem.c drm_gem_object_put_unlocked(obj); obj 1161 drivers/gpu/drm/msm/msm_gem.c struct drm_gem_object *obj = _msm_gem_new(dev, size, flags, locked); obj 1164 drivers/gpu/drm/msm/msm_gem.c if (IS_ERR(obj)) obj 1165 drivers/gpu/drm/msm/msm_gem.c return ERR_CAST(obj); obj 1168 drivers/gpu/drm/msm/msm_gem.c ret = msm_gem_get_and_pin_iova(obj, aspace, iova); obj 1173 drivers/gpu/drm/msm/msm_gem.c vaddr = msm_gem_get_vaddr(obj); obj 1175 drivers/gpu/drm/msm/msm_gem.c msm_gem_unpin_iova(obj, aspace); obj 1181 drivers/gpu/drm/msm/msm_gem.c *bo = obj; obj 1186 drivers/gpu/drm/msm/msm_gem.c drm_gem_object_put(obj); obj 1188 drivers/gpu/drm/msm/msm_gem.c drm_gem_object_put_unlocked(obj); obj 121 drivers/gpu/drm/msm/msm_gem.h void msm_gem_purge(struct drm_gem_object *obj, enum msm_gem_lock subclass); obj 122 drivers/gpu/drm/msm/msm_gem.h void msm_gem_vunmap(struct drm_gem_object *obj, enum msm_gem_lock subclass); obj 156 drivers/gpu/drm/msm/msm_gem.h struct msm_gem_object *obj; obj 14 drivers/gpu/drm/msm/msm_gem_prime.c struct sg_table *msm_gem_prime_get_sg_table(struct drm_gem_object *obj) obj 16 drivers/gpu/drm/msm/msm_gem_prime.c struct msm_gem_object *msm_obj = to_msm_bo(obj); obj 17 drivers/gpu/drm/msm/msm_gem_prime.c int npages = obj->size >> PAGE_SHIFT; obj 25 drivers/gpu/drm/msm/msm_gem_prime.c void *msm_gem_prime_vmap(struct drm_gem_object *obj) obj 27 drivers/gpu/drm/msm/msm_gem_prime.c return msm_gem_get_vaddr(obj); obj 30 drivers/gpu/drm/msm/msm_gem_prime.c void msm_gem_prime_vunmap(struct drm_gem_object *obj, void *vaddr) obj 32 drivers/gpu/drm/msm/msm_gem_prime.c msm_gem_put_vaddr(obj); obj 35 drivers/gpu/drm/msm/msm_gem_prime.c int msm_gem_prime_mmap(struct drm_gem_object *obj, struct vm_area_struct *vma) obj 39 drivers/gpu/drm/msm/msm_gem_prime.c ret = drm_gem_mmap_obj(obj, obj->size, vma); obj 52 drivers/gpu/drm/msm/msm_gem_prime.c int msm_gem_prime_pin(struct drm_gem_object *obj) obj 54 drivers/gpu/drm/msm/msm_gem_prime.c if (!obj->import_attach) obj 55 drivers/gpu/drm/msm/msm_gem_prime.c msm_gem_get_pages(obj); obj 59 drivers/gpu/drm/msm/msm_gem_prime.c void msm_gem_prime_unpin(struct drm_gem_object *obj) obj 61 drivers/gpu/drm/msm/msm_gem_prime.c if (!obj->import_attach) obj 62 drivers/gpu/drm/msm/msm_gem_prime.c msm_gem_put_pages(obj); obj 114 drivers/gpu/drm/msm/msm_gem_submit.c struct drm_gem_object *obj; obj 120 drivers/gpu/drm/msm/msm_gem_submit.c obj = idr_find(&file->object_idr, submit->bos[i].handle); obj 121 drivers/gpu/drm/msm/msm_gem_submit.c if (!obj) { obj 127 drivers/gpu/drm/msm/msm_gem_submit.c msm_obj = to_msm_bo(obj); obj 136 drivers/gpu/drm/msm/msm_gem_submit.c drm_gem_object_get(obj); obj 138 drivers/gpu/drm/msm/msm_gem_submit.c submit->bos[i].obj = msm_obj; obj 155 drivers/gpu/drm/msm/msm_gem_submit.c struct msm_gem_object *msm_obj = submit->bos[i].obj; obj 176 drivers/gpu/drm/msm/msm_gem_submit.c struct msm_gem_object *msm_obj = submit->bos[i].obj; obj 204 drivers/gpu/drm/msm/msm_gem_submit.c struct msm_gem_object *msm_obj = submit->bos[contended].obj; obj 223 drivers/gpu/drm/msm/msm_gem_submit.c struct msm_gem_object *msm_obj = submit->bos[i].obj; obj 257 drivers/gpu/drm/msm/msm_gem_submit.c struct msm_gem_object *msm_obj = submit->bos[i].obj; obj 283 drivers/gpu/drm/msm/msm_gem_submit.c struct msm_gem_object **obj, uint64_t *iova, bool *valid) obj 291 drivers/gpu/drm/msm/msm_gem_submit.c if (obj) obj 292 drivers/gpu/drm/msm/msm_gem_submit.c *obj = submit->bos[idx].obj; obj 302 drivers/gpu/drm/msm/msm_gem_submit.c static int submit_reloc(struct msm_gem_submit *submit, struct msm_gem_object *obj, obj 320 drivers/gpu/drm/msm/msm_gem_submit.c ptr = msm_gem_get_vaddr(&obj->base); obj 351 drivers/gpu/drm/msm/msm_gem_submit.c if ((off >= (obj->base.size / 4)) || obj 378 drivers/gpu/drm/msm/msm_gem_submit.c msm_gem_put_vaddr(&obj->base); obj 388 drivers/gpu/drm/msm/msm_gem_submit.c struct msm_gem_object *msm_obj = submit->bos[i].obj; obj 306 drivers/gpu/drm/msm/msm_gpu.c struct msm_gem_object *obj, u64 iova, u32 flags) obj 311 drivers/gpu/drm/msm/msm_gpu.c state_bo->size = obj->base.size; obj 315 drivers/gpu/drm/msm/msm_gpu.c if ((flags & MSM_SUBMIT_BO_READ) && !obj->base.import_attach) { obj 318 drivers/gpu/drm/msm/msm_gpu.c state_bo->data = kvmalloc(obj->base.size, GFP_KERNEL); obj 322 drivers/gpu/drm/msm/msm_gpu.c ptr = msm_gem_get_vaddr_active(&obj->base); obj 329 drivers/gpu/drm/msm/msm_gpu.c memcpy(state_bo->data, ptr, obj->base.size); obj 330 drivers/gpu/drm/msm/msm_gpu.c msm_gem_put_vaddr(&obj->base); obj 366 drivers/gpu/drm/msm/msm_gpu.c msm_gpu_crashstate_get_bo(state, submit->bos[idx].obj, obj 675 drivers/gpu/drm/msm/msm_gpu.c struct msm_gem_object *msm_obj = submit->bos[i].obj; obj 752 drivers/gpu/drm/msm/msm_gpu.c struct msm_gem_object *msm_obj = submit->bos[i].obj; obj 303 drivers/gpu/drm/msm/msm_rd.c struct msm_gem_object *obj = submit->bos[idx].obj; obj 311 drivers/gpu/drm/msm/msm_rd.c size = obj->base.size; obj 325 drivers/gpu/drm/msm/msm_rd.c buf = msm_gem_get_vaddr_active(&obj->base); obj 333 drivers/gpu/drm/msm/msm_rd.c msm_gem_put_vaddr(&obj->base); obj 75 drivers/gpu/drm/nouveau/nouveau_acpi.c union acpi_object *obj; obj 88 drivers/gpu/drm/nouveau/nouveau_acpi.c obj = acpi_evaluate_dsm_typed(handle, &nouveau_op_dsm_muid, 0x00000100, obj 90 drivers/gpu/drm/nouveau/nouveau_acpi.c if (!obj) { obj 94 drivers/gpu/drm/nouveau/nouveau_acpi.c if (obj->buffer.length == 4) { obj 95 drivers/gpu/drm/nouveau/nouveau_acpi.c *result |= obj->buffer.pointer[0]; obj 96 drivers/gpu/drm/nouveau/nouveau_acpi.c *result |= (obj->buffer.pointer[1] << 8); obj 97 drivers/gpu/drm/nouveau/nouveau_acpi.c *result |= (obj->buffer.pointer[2] << 16); obj 98 drivers/gpu/drm/nouveau/nouveau_acpi.c *result |= (obj->buffer.pointer[3] << 24); obj 100 drivers/gpu/drm/nouveau/nouveau_acpi.c ACPI_FREE(obj); obj 134 drivers/gpu/drm/nouveau/nouveau_acpi.c union acpi_object *obj; obj 140 drivers/gpu/drm/nouveau/nouveau_acpi.c obj = acpi_evaluate_dsm_typed(handle, &nouveau_dsm_muid, 0x00000102, obj 142 drivers/gpu/drm/nouveau/nouveau_acpi.c if (!obj) { obj 146 drivers/gpu/drm/nouveau/nouveau_acpi.c if (obj->integer.value == 0x80000002) obj 148 drivers/gpu/drm/nouveau/nouveau_acpi.c ACPI_FREE(obj); obj 393 drivers/gpu/drm/nouveau/nouveau_acpi.c union acpi_object rom_arg_elements[2], *obj; obj 412 drivers/gpu/drm/nouveau/nouveau_acpi.c obj = (union acpi_object *)buffer.pointer; obj 413 drivers/gpu/drm/nouveau/nouveau_acpi.c len = min(len, (int)obj->buffer.length); obj 414 drivers/gpu/drm/nouveau/nouveau_acpi.c memcpy(bios+offset, obj->buffer.pointer, len); obj 1194 drivers/gpu/drm/nouveau/nouveau_connector.c nouveau_connector_aux_xfer(struct drm_dp_aux *obj, struct drm_dp_aux_msg *msg) obj 1197 drivers/gpu/drm/nouveau/nouveau_connector.c container_of(obj, typeof(*nv_connector), aux); obj 30 drivers/gpu/drm/nouveau/nouveau_prime.c struct sg_table *nouveau_gem_prime_get_sg_table(struct drm_gem_object *obj) obj 32 drivers/gpu/drm/nouveau/nouveau_prime.c struct nouveau_bo *nvbo = nouveau_gem_object(obj); obj 38 drivers/gpu/drm/nouveau/nouveau_prime.c void *nouveau_gem_prime_vmap(struct drm_gem_object *obj) obj 40 drivers/gpu/drm/nouveau/nouveau_prime.c struct nouveau_bo *nvbo = nouveau_gem_object(obj); obj 51 drivers/gpu/drm/nouveau/nouveau_prime.c void nouveau_gem_prime_vunmap(struct drm_gem_object *obj, void *vaddr) obj 53 drivers/gpu/drm/nouveau/nouveau_prime.c struct nouveau_bo *nvbo = nouveau_gem_object(obj); obj 63 drivers/gpu/drm/nouveau/nouveau_prime.c struct drm_gem_object *obj; obj 76 drivers/gpu/drm/nouveau/nouveau_prime.c obj = ERR_CAST(nvbo); obj 87 drivers/gpu/drm/nouveau/nouveau_prime.c obj = ERR_PTR(-ENOMEM); obj 94 drivers/gpu/drm/nouveau/nouveau_prime.c obj = ERR_PTR(ret); obj 98 drivers/gpu/drm/nouveau/nouveau_prime.c obj = &nvbo->bo.base; obj 102 drivers/gpu/drm/nouveau/nouveau_prime.c return obj; obj 105 drivers/gpu/drm/nouveau/nouveau_prime.c int nouveau_gem_prime_pin(struct drm_gem_object *obj) obj 107 drivers/gpu/drm/nouveau/nouveau_prime.c struct nouveau_bo *nvbo = nouveau_gem_object(obj); obj 118 drivers/gpu/drm/nouveau/nouveau_prime.c void nouveau_gem_prime_unpin(struct drm_gem_object *obj) obj 120 drivers/gpu/drm/nouveau/nouveau_prime.c struct nouveau_bo *nvbo = nouveau_gem_object(obj); obj 552 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c struct nvkm_gpuobj *obj = ctx->data; obj 585 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c nvkm_wo32(obj, offset * 4, 0x3f800000); obj 589 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c nvkm_wo32(obj, (offset + b0_offset + i) * 4, 0x00000001); obj 591 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c nvkm_wo32(obj, (offset + b1_offset + i) * 4, 0x3f800000); obj 200 drivers/gpu/drm/nouveau/nvkm/subdev/clk/nv40.c nv40_clk_tidy(struct nvkm_clk *obj) obj 76 drivers/gpu/drm/nouveau/nvkm/subdev/i2c/auxg94.c g94_i2c_aux_xfer(struct nvkm_i2c_aux *obj, bool retry, obj 79 drivers/gpu/drm/nouveau/nvkm/subdev/i2c/auxg94.c struct g94_i2c_aux *aux = g94_i2c_aux(obj); obj 76 drivers/gpu/drm/nouveau/nvkm/subdev/i2c/auxgm200.c gm200_i2c_aux_xfer(struct nvkm_i2c_aux *obj, bool retry, obj 79 drivers/gpu/drm/nouveau/nvkm/subdev/i2c/auxgm200.c struct gm200_i2c_aux *aux = gm200_i2c_aux(obj); obj 143 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/gk20a.c gk20a_instobj_iommu_recycle_vaddr(struct gk20a_instobj_iommu *obj) obj 145 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/gk20a.c struct gk20a_instmem *imem = obj->base.imem; obj 147 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/gk20a.c WARN_ON(obj->use_cpt); obj 148 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/gk20a.c list_del(&obj->vaddr_node); obj 149 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/gk20a.c vunmap(obj->base.vaddr); obj 150 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/gk20a.c obj->base.vaddr = NULL; obj 151 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/gk20a.c imem->vaddr_use -= nvkm_memory_size(&obj->base.memory); obj 93 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/base.c union acpi_object *obj; obj 107 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/base.c obj = acpi_evaluate_dsm(handle, &muid, rev, 0x00000010, &argv4); obj 108 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/base.c if (!obj) { obj 113 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/base.c if (obj->type == ACPI_TYPE_BUFFER) { obj 114 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/base.c mxm->mxms = kmemdup(obj->buffer.pointer, obj 115 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/base.c obj->buffer.length, GFP_KERNEL); obj 116 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/base.c } else if (obj->type == ACPI_TYPE_INTEGER) { obj 118 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/base.c obj->integer.value); obj 121 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/base.c ACPI_FREE(obj); obj 137 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/base.c union acpi_object *obj; obj 146 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/base.c obj = retn.pointer; obj 147 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/base.c if (obj->type == ACPI_TYPE_INTEGER) { obj 148 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/base.c version = obj->integer.value; obj 156 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/base.c kfree(obj); obj 167 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/base.c union acpi_object *obj; obj 187 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/base.c obj = retn.pointer; obj 188 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/base.c if (obj->type == ACPI_TYPE_BUFFER) { obj 189 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/base.c mxm->mxms = kmemdup(obj->buffer.pointer, obj 190 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/base.c obj->buffer.length, GFP_KERNEL); obj 193 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/base.c kfree(obj); obj 456 drivers/gpu/drm/omapdrm/omap_drv.c struct drm_gem_object *obj; obj 461 drivers/gpu/drm/omapdrm/omap_drv.c obj = drm_gem_object_lookup(file_priv, args->handle); obj 462 drivers/gpu/drm/omapdrm/omap_drv.c if (!obj) obj 465 drivers/gpu/drm/omapdrm/omap_drv.c args->size = omap_gem_mmap_size(obj); obj 466 drivers/gpu/drm/omapdrm/omap_drv.c args->offset = omap_gem_mmap_offset(obj); obj 468 drivers/gpu/drm/omapdrm/omap_drv.c drm_gem_object_put_unlocked(obj); obj 98 drivers/gpu/drm/omapdrm/omap_fb.c return omap_gem_flags(fb->obj[0]) & OMAP_BO_TILED; obj 157 drivers/gpu/drm/omapdrm/omap_fb.c if (omap_gem_flags(fb->obj[0]) & OMAP_BO_TILED) { obj 182 drivers/gpu/drm/omapdrm/omap_fb.c omap_gem_rotated_dma_addr(fb->obj[0], orient, x, y, obj 187 drivers/gpu/drm/omapdrm/omap_fb.c info->screen_width = omap_gem_tiled_stride(fb->obj[0], orient); obj 215 drivers/gpu/drm/omapdrm/omap_fb.c WARN_ON(!(omap_gem_flags(fb->obj[1]) & OMAP_BO_TILED)); obj 216 drivers/gpu/drm/omapdrm/omap_fb.c omap_gem_rotated_dma_addr(fb->obj[1], orient, x/2, y/2, obj 242 drivers/gpu/drm/omapdrm/omap_fb.c ret = omap_gem_pin(fb->obj[i], &plane->dma_addr); obj 245 drivers/gpu/drm/omapdrm/omap_fb.c omap_gem_dma_sync_buffer(fb->obj[i], DMA_TO_DEVICE); obj 257 drivers/gpu/drm/omapdrm/omap_fb.c omap_gem_unpin(fb->obj[i]); obj 283 drivers/gpu/drm/omapdrm/omap_fb.c omap_gem_unpin(fb->obj[i]); obj 301 drivers/gpu/drm/omapdrm/omap_fb.c omap_gem_describe(fb->obj[i], m); obj 408 drivers/gpu/drm/omapdrm/omap_fb.c fb->obj[i] = bos[i]; obj 115 drivers/gpu/drm/omapdrm/omap_gem.c struct drm_gem_object *obj; /* the current pinned obj */ obj 134 drivers/gpu/drm/omapdrm/omap_gem.c u64 omap_gem_mmap_offset(struct drm_gem_object *obj) obj 136 drivers/gpu/drm/omapdrm/omap_gem.c struct drm_device *dev = obj->dev; obj 141 drivers/gpu/drm/omapdrm/omap_gem.c size = omap_gem_mmap_size(obj); obj 142 drivers/gpu/drm/omapdrm/omap_gem.c ret = drm_gem_create_mmap_offset_size(obj, size); obj 148 drivers/gpu/drm/omapdrm/omap_gem.c return drm_vma_node_offset_addr(&obj->vma_node); obj 166 drivers/gpu/drm/omapdrm/omap_gem.c static void omap_gem_evict_entry(struct drm_gem_object *obj, obj 169 drivers/gpu/drm/omapdrm/omap_gem.c struct omap_gem_object *omap_obj = to_omap_bo(obj); obj 170 drivers/gpu/drm/omapdrm/omap_gem.c struct omap_drm_private *priv = obj->dev->dev_private; obj 173 drivers/gpu/drm/omapdrm/omap_gem.c loff_t off = omap_gem_mmap_offset(obj) + obj 181 drivers/gpu/drm/omapdrm/omap_gem.c unmap_mapping_range(obj->dev->anon_inode->i_mapping, obj 186 drivers/gpu/drm/omapdrm/omap_gem.c unmap_mapping_range(obj->dev->anon_inode->i_mapping, obj 190 drivers/gpu/drm/omapdrm/omap_gem.c entry->obj = NULL; obj 194 drivers/gpu/drm/omapdrm/omap_gem.c static void omap_gem_evict(struct drm_gem_object *obj) obj 196 drivers/gpu/drm/omapdrm/omap_gem.c struct omap_gem_object *omap_obj = to_omap_bo(obj); obj 197 drivers/gpu/drm/omapdrm/omap_gem.c struct omap_drm_private *priv = obj->dev->dev_private; obj 207 drivers/gpu/drm/omapdrm/omap_gem.c if (entry->obj == obj) obj 208 drivers/gpu/drm/omapdrm/omap_gem.c omap_gem_evict_entry(obj, fmt, entry); obj 221 drivers/gpu/drm/omapdrm/omap_gem.c static int omap_gem_attach_pages(struct drm_gem_object *obj) obj 223 drivers/gpu/drm/omapdrm/omap_gem.c struct drm_device *dev = obj->dev; obj 224 drivers/gpu/drm/omapdrm/omap_gem.c struct omap_gem_object *omap_obj = to_omap_bo(obj); obj 226 drivers/gpu/drm/omapdrm/omap_gem.c int npages = obj->size >> PAGE_SHIFT; obj 239 drivers/gpu/drm/omapdrm/omap_gem.c pages = drm_gem_get_pages(obj); obj 241 drivers/gpu/drm/omapdrm/omap_gem.c dev_err(obj->dev->dev, "could not get pages: %ld\n", PTR_ERR(pages)); obj 288 drivers/gpu/drm/omapdrm/omap_gem.c drm_gem_put_pages(obj, pages, true, false); obj 294 drivers/gpu/drm/omapdrm/omap_gem.c static void omap_gem_detach_pages(struct drm_gem_object *obj) obj 296 drivers/gpu/drm/omapdrm/omap_gem.c struct omap_gem_object *omap_obj = to_omap_bo(obj); obj 297 drivers/gpu/drm/omapdrm/omap_gem.c unsigned int npages = obj->size >> PAGE_SHIFT; obj 304 drivers/gpu/drm/omapdrm/omap_gem.c dma_unmap_page(obj->dev->dev, omap_obj->dma_addrs[i], obj 311 drivers/gpu/drm/omapdrm/omap_gem.c drm_gem_put_pages(obj, omap_obj->pages, true, false); obj 316 drivers/gpu/drm/omapdrm/omap_gem.c u32 omap_gem_flags(struct drm_gem_object *obj) obj 318 drivers/gpu/drm/omapdrm/omap_gem.c return to_omap_bo(obj)->flags; obj 322 drivers/gpu/drm/omapdrm/omap_gem.c size_t omap_gem_mmap_size(struct drm_gem_object *obj) obj 324 drivers/gpu/drm/omapdrm/omap_gem.c struct omap_gem_object *omap_obj = to_omap_bo(obj); obj 325 drivers/gpu/drm/omapdrm/omap_gem.c size_t size = obj->size; obj 346 drivers/gpu/drm/omapdrm/omap_gem.c static vm_fault_t omap_gem_fault_1d(struct drm_gem_object *obj, obj 349 drivers/gpu/drm/omapdrm/omap_gem.c struct omap_gem_object *omap_obj = to_omap_bo(obj); obj 357 drivers/gpu/drm/omapdrm/omap_gem.c omap_gem_cpu_sync_page(obj, pgoff); obj 372 drivers/gpu/drm/omapdrm/omap_gem.c static vm_fault_t omap_gem_fault_2d(struct drm_gem_object *obj, obj 375 drivers/gpu/drm/omapdrm/omap_gem.c struct omap_gem_object *omap_obj = to_omap_bo(obj); obj 376 drivers/gpu/drm/omapdrm/omap_gem.c struct omap_drm_private *priv = obj->dev->dev_private; obj 419 drivers/gpu/drm/omapdrm/omap_gem.c if (entry->obj) obj 420 drivers/gpu/drm/omapdrm/omap_gem.c omap_gem_evict_entry(entry->obj, fmt, entry); obj 422 drivers/gpu/drm/omapdrm/omap_gem.c entry->obj = obj; obj 453 drivers/gpu/drm/omapdrm/omap_gem.c dev_err(obj->dev->dev, "failed to pin: %d\n", err); obj 493 drivers/gpu/drm/omapdrm/omap_gem.c struct drm_gem_object *obj = vma->vm_private_data; obj 494 drivers/gpu/drm/omapdrm/omap_gem.c struct omap_gem_object *omap_obj = to_omap_bo(obj); obj 504 drivers/gpu/drm/omapdrm/omap_gem.c err = omap_gem_attach_pages(obj); obj 517 drivers/gpu/drm/omapdrm/omap_gem.c ret = omap_gem_fault_2d(obj, vma, vmf); obj 519 drivers/gpu/drm/omapdrm/omap_gem.c ret = omap_gem_fault_1d(obj, vma, vmf); obj 541 drivers/gpu/drm/omapdrm/omap_gem.c int omap_gem_mmap_obj(struct drm_gem_object *obj, obj 544 drivers/gpu/drm/omapdrm/omap_gem.c struct omap_gem_object *omap_obj = to_omap_bo(obj); obj 559 drivers/gpu/drm/omapdrm/omap_gem.c if (WARN_ON(!obj->filp)) obj 569 drivers/gpu/drm/omapdrm/omap_gem.c vma->vm_file = get_file(obj->filp); obj 620 drivers/gpu/drm/omapdrm/omap_gem.c struct drm_gem_object *obj; obj 624 drivers/gpu/drm/omapdrm/omap_gem.c obj = drm_gem_object_lookup(file, handle); obj 625 drivers/gpu/drm/omapdrm/omap_gem.c if (obj == NULL) { obj 630 drivers/gpu/drm/omapdrm/omap_gem.c *offset = omap_gem_mmap_offset(obj); obj 632 drivers/gpu/drm/omapdrm/omap_gem.c drm_gem_object_put_unlocked(obj); obj 644 drivers/gpu/drm/omapdrm/omap_gem.c int omap_gem_roll(struct drm_gem_object *obj, u32 roll) obj 646 drivers/gpu/drm/omapdrm/omap_gem.c struct omap_gem_object *omap_obj = to_omap_bo(obj); obj 647 drivers/gpu/drm/omapdrm/omap_gem.c u32 npages = obj->size >> PAGE_SHIFT; obj 651 drivers/gpu/drm/omapdrm/omap_gem.c dev_err(obj->dev->dev, "invalid roll: %d\n", roll); obj 661 drivers/gpu/drm/omapdrm/omap_gem.c ret = omap_gem_attach_pages(obj); obj 668 drivers/gpu/drm/omapdrm/omap_gem.c dev_err(obj->dev->dev, "could not repin: %d\n", ret); obj 691 drivers/gpu/drm/omapdrm/omap_gem.c static inline bool omap_gem_is_cached_coherent(struct drm_gem_object *obj) obj 693 drivers/gpu/drm/omapdrm/omap_gem.c struct omap_gem_object *omap_obj = to_omap_bo(obj); obj 702 drivers/gpu/drm/omapdrm/omap_gem.c void omap_gem_cpu_sync_page(struct drm_gem_object *obj, int pgoff) obj 704 drivers/gpu/drm/omapdrm/omap_gem.c struct drm_device *dev = obj->dev; obj 705 drivers/gpu/drm/omapdrm/omap_gem.c struct omap_gem_object *omap_obj = to_omap_bo(obj); obj 707 drivers/gpu/drm/omapdrm/omap_gem.c if (omap_gem_is_cached_coherent(obj)) obj 718 drivers/gpu/drm/omapdrm/omap_gem.c void omap_gem_dma_sync_buffer(struct drm_gem_object *obj, obj 721 drivers/gpu/drm/omapdrm/omap_gem.c struct drm_device *dev = obj->dev; obj 722 drivers/gpu/drm/omapdrm/omap_gem.c struct omap_gem_object *omap_obj = to_omap_bo(obj); obj 723 drivers/gpu/drm/omapdrm/omap_gem.c int i, npages = obj->size >> PAGE_SHIFT; obj 727 drivers/gpu/drm/omapdrm/omap_gem.c if (omap_gem_is_cached_coherent(obj)) obj 748 drivers/gpu/drm/omapdrm/omap_gem.c unmap_mapping_range(obj->filp->f_mapping, 0, obj 749 drivers/gpu/drm/omapdrm/omap_gem.c omap_gem_mmap_size(obj), 1); obj 767 drivers/gpu/drm/omapdrm/omap_gem.c int omap_gem_pin(struct drm_gem_object *obj, dma_addr_t *dma_addr) obj 769 drivers/gpu/drm/omapdrm/omap_gem.c struct omap_drm_private *priv = obj->dev->dev_private; obj 770 drivers/gpu/drm/omapdrm/omap_gem.c struct omap_gem_object *omap_obj = to_omap_bo(obj); obj 777 drivers/gpu/drm/omapdrm/omap_gem.c u32 npages = obj->size >> PAGE_SHIFT; obj 783 drivers/gpu/drm/omapdrm/omap_gem.c ret = omap_gem_attach_pages(obj); obj 792 drivers/gpu/drm/omapdrm/omap_gem.c block = tiler_reserve_1d(obj->size); obj 797 drivers/gpu/drm/omapdrm/omap_gem.c dev_err(obj->dev->dev, obj 807 drivers/gpu/drm/omapdrm/omap_gem.c dev_err(obj->dev->dev, obj 842 drivers/gpu/drm/omapdrm/omap_gem.c void omap_gem_unpin(struct drm_gem_object *obj) obj 844 drivers/gpu/drm/omapdrm/omap_gem.c struct omap_gem_object *omap_obj = to_omap_bo(obj); obj 854 drivers/gpu/drm/omapdrm/omap_gem.c dev_err(obj->dev->dev, obj 859 drivers/gpu/drm/omapdrm/omap_gem.c dev_err(obj->dev->dev, obj 874 drivers/gpu/drm/omapdrm/omap_gem.c int omap_gem_rotated_dma_addr(struct drm_gem_object *obj, u32 orient, obj 877 drivers/gpu/drm/omapdrm/omap_gem.c struct omap_gem_object *omap_obj = to_omap_bo(obj); obj 894 drivers/gpu/drm/omapdrm/omap_gem.c int omap_gem_tiled_stride(struct drm_gem_object *obj, u32 orient) obj 896 drivers/gpu/drm/omapdrm/omap_gem.c struct omap_gem_object *omap_obj = to_omap_bo(obj); obj 913 drivers/gpu/drm/omapdrm/omap_gem.c int omap_gem_get_pages(struct drm_gem_object *obj, struct page ***pages, obj 916 drivers/gpu/drm/omapdrm/omap_gem.c struct omap_gem_object *omap_obj = to_omap_bo(obj); obj 922 drivers/gpu/drm/omapdrm/omap_gem.c ret = omap_gem_attach_pages(obj); obj 941 drivers/gpu/drm/omapdrm/omap_gem.c int omap_gem_put_pages(struct drm_gem_object *obj) obj 955 drivers/gpu/drm/omapdrm/omap_gem.c void *omap_gem_vaddr(struct drm_gem_object *obj) obj 957 drivers/gpu/drm/omapdrm/omap_gem.c struct omap_gem_object *omap_obj = to_omap_bo(obj); obj 964 drivers/gpu/drm/omapdrm/omap_gem.c ret = omap_gem_attach_pages(obj); obj 970 drivers/gpu/drm/omapdrm/omap_gem.c omap_obj->vaddr = vmap(omap_obj->pages, obj->size >> PAGE_SHIFT, obj 997 drivers/gpu/drm/omapdrm/omap_gem.c struct drm_gem_object *obj = &omap_obj->base; obj 998 drivers/gpu/drm/omapdrm/omap_gem.c u32 npages = obj->size >> PAGE_SHIFT; obj 1022 drivers/gpu/drm/omapdrm/omap_gem.c void omap_gem_describe(struct drm_gem_object *obj, struct seq_file *m) obj 1024 drivers/gpu/drm/omapdrm/omap_gem.c struct omap_gem_object *omap_obj = to_omap_bo(obj); obj 1027 drivers/gpu/drm/omapdrm/omap_gem.c off = drm_vma_node_start(&obj->vma_node); obj 1032 drivers/gpu/drm/omapdrm/omap_gem.c omap_obj->flags, obj->name, kref_read(&obj->refcount), obj 1045 drivers/gpu/drm/omapdrm/omap_gem.c seq_printf(m, " %zu", obj->size); obj 1060 drivers/gpu/drm/omapdrm/omap_gem.c struct drm_gem_object *obj = &omap_obj->base; obj 1062 drivers/gpu/drm/omapdrm/omap_gem.c omap_gem_describe(obj, m); obj 1064 drivers/gpu/drm/omapdrm/omap_gem.c size += obj->size; obj 1075 drivers/gpu/drm/omapdrm/omap_gem.c void omap_gem_free_object(struct drm_gem_object *obj) obj 1077 drivers/gpu/drm/omapdrm/omap_gem.c struct drm_device *dev = obj->dev; obj 1079 drivers/gpu/drm/omapdrm/omap_gem.c struct omap_gem_object *omap_obj = to_omap_bo(obj); obj 1081 drivers/gpu/drm/omapdrm/omap_gem.c omap_gem_evict(obj); obj 1102 drivers/gpu/drm/omapdrm/omap_gem.c omap_gem_detach_pages(obj); obj 1106 drivers/gpu/drm/omapdrm/omap_gem.c dma_free_wc(dev->dev, obj->size, omap_obj->vaddr, obj 1110 drivers/gpu/drm/omapdrm/omap_gem.c } else if (obj->import_attach) { obj 1111 drivers/gpu/drm/omapdrm/omap_gem.c drm_prime_gem_destroy(obj, omap_obj->sgt); obj 1116 drivers/gpu/drm/omapdrm/omap_gem.c drm_gem_object_release(obj); obj 1129 drivers/gpu/drm/omapdrm/omap_gem.c struct drm_gem_object *obj; obj 1173 drivers/gpu/drm/omapdrm/omap_gem.c obj = &omap_obj->base; obj 1196 drivers/gpu/drm/omapdrm/omap_gem.c drm_gem_private_object_init(dev, obj, size); obj 1198 drivers/gpu/drm/omapdrm/omap_gem.c ret = drm_gem_object_init(dev, obj, size); obj 1202 drivers/gpu/drm/omapdrm/omap_gem.c mapping = obj->filp->f_mapping; obj 1219 drivers/gpu/drm/omapdrm/omap_gem.c return obj; obj 1222 drivers/gpu/drm/omapdrm/omap_gem.c drm_gem_object_release(obj); obj 1233 drivers/gpu/drm/omapdrm/omap_gem.c struct drm_gem_object *obj; obj 1241 drivers/gpu/drm/omapdrm/omap_gem.c obj = omap_gem_new(dev, gsize, OMAP_BO_MEM_DMABUF | OMAP_BO_WC); obj 1242 drivers/gpu/drm/omapdrm/omap_gem.c if (!obj) obj 1245 drivers/gpu/drm/omapdrm/omap_gem.c omap_obj = to_omap_bo(obj); obj 1263 drivers/gpu/drm/omapdrm/omap_gem.c omap_gem_free_object(obj); obj 1264 drivers/gpu/drm/omapdrm/omap_gem.c obj = ERR_PTR(-ENOMEM); obj 1277 drivers/gpu/drm/omapdrm/omap_gem.c omap_gem_free_object(obj); obj 1278 drivers/gpu/drm/omapdrm/omap_gem.c obj = ERR_PTR(-ENOMEM); obj 1285 drivers/gpu/drm/omapdrm/omap_gem.c return obj; obj 1292 drivers/gpu/drm/omapdrm/omap_gem.c struct drm_gem_object *obj; obj 1295 drivers/gpu/drm/omapdrm/omap_gem.c obj = omap_gem_new(dev, gsize, flags); obj 1296 drivers/gpu/drm/omapdrm/omap_gem.c if (!obj) obj 1299 drivers/gpu/drm/omapdrm/omap_gem.c ret = drm_gem_handle_create(file, obj, handle); obj 1301 drivers/gpu/drm/omapdrm/omap_gem.c omap_gem_free_object(obj); obj 1306 drivers/gpu/drm/omapdrm/omap_gem.c drm_gem_object_put_unlocked(obj); obj 40 drivers/gpu/drm/omapdrm/omap_gem.h void omap_gem_describe(struct drm_gem_object *obj, struct seq_file *m); obj 51 drivers/gpu/drm/omapdrm/omap_gem.h void omap_gem_free_object(struct drm_gem_object *obj); obj 52 drivers/gpu/drm/omapdrm/omap_gem.h void *omap_gem_vaddr(struct drm_gem_object *obj); obj 62 drivers/gpu/drm/omapdrm/omap_gem.h int omap_gem_mmap_obj(struct drm_gem_object *obj, obj 64 drivers/gpu/drm/omapdrm/omap_gem.h u64 omap_gem_mmap_offset(struct drm_gem_object *obj); obj 65 drivers/gpu/drm/omapdrm/omap_gem.h size_t omap_gem_mmap_size(struct drm_gem_object *obj); obj 68 drivers/gpu/drm/omapdrm/omap_gem.h struct dma_buf *omap_gem_prime_export(struct drm_gem_object *obj, int flags); obj 73 drivers/gpu/drm/omapdrm/omap_gem.h int omap_gem_roll(struct drm_gem_object *obj, u32 roll); obj 74 drivers/gpu/drm/omapdrm/omap_gem.h void omap_gem_cpu_sync_page(struct drm_gem_object *obj, int pgoff); obj 75 drivers/gpu/drm/omapdrm/omap_gem.h void omap_gem_dma_sync_buffer(struct drm_gem_object *obj, obj 77 drivers/gpu/drm/omapdrm/omap_gem.h int omap_gem_pin(struct drm_gem_object *obj, dma_addr_t *dma_addr); obj 78 drivers/gpu/drm/omapdrm/omap_gem.h void omap_gem_unpin(struct drm_gem_object *obj); obj 79 drivers/gpu/drm/omapdrm/omap_gem.h int omap_gem_get_pages(struct drm_gem_object *obj, struct page ***pages, obj 81 drivers/gpu/drm/omapdrm/omap_gem.h int omap_gem_put_pages(struct drm_gem_object *obj); obj 83 drivers/gpu/drm/omapdrm/omap_gem.h u32 omap_gem_flags(struct drm_gem_object *obj); obj 84 drivers/gpu/drm/omapdrm/omap_gem.h int omap_gem_rotated_dma_addr(struct drm_gem_object *obj, u32 orient, obj 86 drivers/gpu/drm/omapdrm/omap_gem.h int omap_gem_tiled_stride(struct drm_gem_object *obj, u32 orient); obj 22 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c struct drm_gem_object *obj = attachment->dmabuf->priv; obj 34 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c ret = omap_gem_pin(obj, &dma_addr); obj 43 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c sg_dma_len(sg->sgl) = obj->size; obj 44 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c sg_set_page(sg->sgl, pfn_to_page(PFN_DOWN(dma_addr)), obj->size, 0); obj 48 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c omap_gem_dma_sync_buffer(obj, dir); obj 59 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c struct drm_gem_object *obj = attachment->dmabuf->priv; obj 60 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c omap_gem_unpin(obj); obj 68 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c struct drm_gem_object *obj = buffer->priv; obj 70 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c if (omap_gem_flags(obj) & OMAP_BO_TILED) { obj 77 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c return omap_gem_get_pages(obj, &pages, true); obj 83 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c struct drm_gem_object *obj = buffer->priv; obj 84 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c omap_gem_put_pages(obj); obj 91 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c struct drm_gem_object *obj = buffer->priv; obj 93 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c omap_gem_get_pages(obj, &pages, false); obj 94 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c omap_gem_cpu_sync_page(obj, page_num); obj 101 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c struct drm_gem_object *obj = buffer->priv; obj 103 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c omap_gem_get_pages(obj, &pages, false); obj 110 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c struct drm_gem_object *obj = buffer->priv; obj 113 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c ret = drm_gem_mmap_obj(obj, omap_gem_mmap_size(obj), vma); obj 117 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c return omap_gem_mmap_obj(obj, vma); obj 131 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c struct dma_buf *omap_gem_prime_export(struct drm_gem_object *obj, int flags) obj 136 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c exp_info.size = obj->size; obj 138 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c exp_info.priv = obj; obj 140 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c return drm_gem_dmabuf_export(obj->dev, &exp_info); obj 151 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c struct drm_gem_object *obj; obj 156 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c obj = dma_buf->priv; obj 157 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c if (obj->dev == dev) { obj 162 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c drm_gem_object_get(obj); obj 163 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c return obj; obj 179 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c obj = omap_gem_new_dmabuf(dev, dma_buf->size, sgt); obj 180 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c if (IS_ERR(obj)) { obj 181 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c ret = PTR_ERR(obj); obj 185 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c obj->import_attach = attach; obj 187 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c return obj; obj 156 drivers/gpu/drm/omapdrm/omap_plane.c struct drm_mode_object *obj) obj 170 drivers/gpu/drm/omapdrm/omap_plane.c if (plane->rotation_property && obj != &plane->base) obj 171 drivers/gpu/drm/omapdrm/omap_plane.c drm_object_attach_property(obj, plane->rotation_property, obj 175 drivers/gpu/drm/omapdrm/omap_plane.c drm_object_attach_property(obj, priv->zorder_prop, 0); obj 24 drivers/gpu/drm/omapdrm/omap_plane.h struct drm_mode_object *obj); obj 17 drivers/gpu/drm/panfrost/panfrost_gem.c static void panfrost_gem_free_object(struct drm_gem_object *obj) obj 19 drivers/gpu/drm/panfrost/panfrost_gem.c struct panfrost_gem_object *bo = to_panfrost_bo(obj); obj 20 drivers/gpu/drm/panfrost/panfrost_gem.c struct panfrost_device *pfdev = obj->dev->dev_private; obj 52 drivers/gpu/drm/panfrost/panfrost_gem.c drm_gem_shmem_free_object(obj); obj 96 drivers/gpu/drm/panfrost/panfrost_gem.c drm_gem_object_put_unlocked(&mapping->obj->base.base); obj 118 drivers/gpu/drm/panfrost/panfrost_gem.c int panfrost_gem_open(struct drm_gem_object *obj, struct drm_file *file_priv) obj 121 drivers/gpu/drm/panfrost/panfrost_gem.c size_t size = obj->size; obj 123 drivers/gpu/drm/panfrost/panfrost_gem.c struct panfrost_gem_object *bo = to_panfrost_bo(obj); obj 134 drivers/gpu/drm/panfrost/panfrost_gem.c drm_gem_object_get(obj); obj 135 drivers/gpu/drm/panfrost/panfrost_gem.c mapping->obj = bo; obj 173 drivers/gpu/drm/panfrost/panfrost_gem.c void panfrost_gem_close(struct drm_gem_object *obj, struct drm_file *file_priv) obj 176 drivers/gpu/drm/panfrost/panfrost_gem.c struct panfrost_gem_object *bo = to_panfrost_bo(obj); obj 192 drivers/gpu/drm/panfrost/panfrost_gem.c static int panfrost_gem_pin(struct drm_gem_object *obj) obj 194 drivers/gpu/drm/panfrost/panfrost_gem.c if (to_panfrost_bo(obj)->is_heap) obj 197 drivers/gpu/drm/panfrost/panfrost_gem.c return drm_gem_shmem_pin(obj); obj 223 drivers/gpu/drm/panfrost/panfrost_gem.c struct panfrost_gem_object *obj; obj 225 drivers/gpu/drm/panfrost/panfrost_gem.c obj = kzalloc(sizeof(*obj), GFP_KERNEL); obj 226 drivers/gpu/drm/panfrost/panfrost_gem.c if (!obj) obj 229 drivers/gpu/drm/panfrost/panfrost_gem.c INIT_LIST_HEAD(&obj->mappings.list); obj 230 drivers/gpu/drm/panfrost/panfrost_gem.c mutex_init(&obj->mappings.lock); obj 231 drivers/gpu/drm/panfrost/panfrost_gem.c obj->base.base.funcs = &panfrost_gem_funcs; obj 233 drivers/gpu/drm/panfrost/panfrost_gem.c return &obj->base.base; obj 276 drivers/gpu/drm/panfrost/panfrost_gem.c struct drm_gem_object *obj; obj 279 drivers/gpu/drm/panfrost/panfrost_gem.c obj = drm_gem_shmem_prime_import_sg_table(dev, attach, sgt); obj 280 drivers/gpu/drm/panfrost/panfrost_gem.c if (IS_ERR(obj)) obj 281 drivers/gpu/drm/panfrost/panfrost_gem.c return ERR_CAST(obj); obj 283 drivers/gpu/drm/panfrost/panfrost_gem.c bo = to_panfrost_bo(obj); obj 286 drivers/gpu/drm/panfrost/panfrost_gem.c return obj; obj 46 drivers/gpu/drm/panfrost/panfrost_gem.h struct panfrost_gem_object *obj; obj 53 drivers/gpu/drm/panfrost/panfrost_gem.h struct panfrost_gem_object *to_panfrost_bo(struct drm_gem_object *obj) obj 55 drivers/gpu/drm/panfrost/panfrost_gem.h return container_of(to_drm_gem_shmem_obj(obj), struct panfrost_gem_object, base); obj 77 drivers/gpu/drm/panfrost/panfrost_gem.h int panfrost_gem_open(struct drm_gem_object *obj, struct drm_file *file_priv); obj 78 drivers/gpu/drm/panfrost/panfrost_gem.h void panfrost_gem_close(struct drm_gem_object *obj, obj 39 drivers/gpu/drm/panfrost/panfrost_gem_shrinker.c static bool panfrost_gem_purge(struct drm_gem_object *obj) obj 41 drivers/gpu/drm/panfrost/panfrost_gem_shrinker.c struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); obj 42 drivers/gpu/drm/panfrost/panfrost_gem_shrinker.c struct panfrost_gem_object *bo = to_panfrost_bo(obj); obj 51 drivers/gpu/drm/panfrost/panfrost_gem_shrinker.c drm_gem_shmem_purge_locked(obj); obj 277 drivers/gpu/drm/panfrost/panfrost_job.c atomic_dec(&job->mappings[i]->obj->gpu_usecount); obj 279 drivers/gpu/drm/panfrost/panfrost_mmu.c struct panfrost_gem_object *bo = mapping->obj; obj 280 drivers/gpu/drm/panfrost/panfrost_mmu.c struct drm_gem_object *obj = &bo->base.base; obj 281 drivers/gpu/drm/panfrost/panfrost_mmu.c struct panfrost_device *pfdev = to_panfrost_device(obj->dev); obj 291 drivers/gpu/drm/panfrost/panfrost_mmu.c sgt = drm_gem_shmem_get_pages_sgt(obj); obj 304 drivers/gpu/drm/panfrost/panfrost_mmu.c struct panfrost_gem_object *bo = mapping->obj; obj 305 drivers/gpu/drm/panfrost/panfrost_mmu.c struct drm_gem_object *obj = &bo->base.base; obj 306 drivers/gpu/drm/panfrost/panfrost_mmu.c struct panfrost_device *pfdev = to_panfrost_device(obj->dev); obj 461 drivers/gpu/drm/panfrost/panfrost_mmu.c bo = bomapping->obj; obj 191 drivers/gpu/drm/panfrost/panfrost_perfcnt.c drm_gem_shmem_vunmap(&perfcnt->mapping->obj->base.base, perfcnt->buf); obj 193 drivers/gpu/drm/panfrost/panfrost_perfcnt.c panfrost_gem_close(&perfcnt->mapping->obj->base.base, file_priv); obj 424 drivers/gpu/drm/qxl/qxl_display.c qobj = gem_to_qxl_bo(fb->obj[0]); obj 484 drivers/gpu/drm/qxl/qxl_display.c bo = gem_to_qxl_bo(state->fb->obj[0]); obj 540 drivers/gpu/drm/qxl/qxl_display.c struct qxl_bo *bo = gem_to_qxl_bo(plane->state->fb->obj[0]); obj 573 drivers/gpu/drm/qxl/qxl_display.c struct qxl_bo *bo = gem_to_qxl_bo(old_state->fb->obj[0]); obj 592 drivers/gpu/drm/qxl/qxl_display.c struct drm_gem_object *obj; obj 605 drivers/gpu/drm/qxl/qxl_display.c obj = fb->obj[0]; obj 606 drivers/gpu/drm/qxl/qxl_display.c user_bo = gem_to_qxl_bo(obj); obj 779 drivers/gpu/drm/qxl/qxl_display.c struct drm_gem_object *obj; obj 787 drivers/gpu/drm/qxl/qxl_display.c obj = new_state->fb->obj[0]; obj 788 drivers/gpu/drm/qxl/qxl_display.c user_bo = gem_to_qxl_bo(obj); obj 828 drivers/gpu/drm/qxl/qxl_display.c struct drm_gem_object *obj; obj 839 drivers/gpu/drm/qxl/qxl_display.c obj = old_state->fb->obj[0]; obj 840 drivers/gpu/drm/qxl/qxl_display.c user_bo = gem_to_qxl_bo(obj); obj 331 drivers/gpu/drm/qxl/qxl_drv.h struct drm_gem_object **obj); obj 340 drivers/gpu/drm/qxl/qxl_drv.h int qxl_gem_object_open(struct drm_gem_object *obj, struct drm_file *file_priv); obj 341 drivers/gpu/drm/qxl/qxl_drv.h void qxl_gem_object_close(struct drm_gem_object *obj, obj 447 drivers/gpu/drm/qxl/qxl_drv.h int qxl_gem_prime_pin(struct drm_gem_object *obj); obj 448 drivers/gpu/drm/qxl/qxl_drv.h void qxl_gem_prime_unpin(struct drm_gem_object *obj); obj 449 drivers/gpu/drm/qxl/qxl_drv.h struct sg_table *qxl_gem_prime_get_sg_table(struct drm_gem_object *obj); obj 453 drivers/gpu/drm/qxl/qxl_drv.h void *qxl_gem_prime_vmap(struct drm_gem_object *obj); obj 454 drivers/gpu/drm/qxl/qxl_drv.h void qxl_gem_prime_vunmap(struct drm_gem_object *obj, void *vaddr); obj 455 drivers/gpu/drm/qxl/qxl_drv.h int qxl_gem_prime_mmap(struct drm_gem_object *obj, obj 49 drivers/gpu/drm/qxl/qxl_gem.c struct drm_gem_object **obj) obj 54 drivers/gpu/drm/qxl/qxl_gem.c *obj = NULL; obj 66 drivers/gpu/drm/qxl/qxl_gem.c *obj = &qbo->tbo.base; obj 104 drivers/gpu/drm/qxl/qxl_gem.c int qxl_gem_object_open(struct drm_gem_object *obj, struct drm_file *file_priv) obj 109 drivers/gpu/drm/qxl/qxl_gem.c void qxl_gem_object_close(struct drm_gem_object *obj, obj 31 drivers/gpu/drm/qxl/qxl_prime.c int qxl_gem_prime_pin(struct drm_gem_object *obj) obj 33 drivers/gpu/drm/qxl/qxl_prime.c struct qxl_bo *bo = gem_to_qxl_bo(obj); obj 38 drivers/gpu/drm/qxl/qxl_prime.c void qxl_gem_prime_unpin(struct drm_gem_object *obj) obj 40 drivers/gpu/drm/qxl/qxl_prime.c struct qxl_bo *bo = gem_to_qxl_bo(obj); obj 45 drivers/gpu/drm/qxl/qxl_prime.c struct sg_table *qxl_gem_prime_get_sg_table(struct drm_gem_object *obj) obj 57 drivers/gpu/drm/qxl/qxl_prime.c void *qxl_gem_prime_vmap(struct drm_gem_object *obj) obj 59 drivers/gpu/drm/qxl/qxl_prime.c struct qxl_bo *bo = gem_to_qxl_bo(obj); obj 70 drivers/gpu/drm/qxl/qxl_prime.c void qxl_gem_prime_vunmap(struct drm_gem_object *obj, void *vaddr) obj 72 drivers/gpu/drm/qxl/qxl_prime.c struct qxl_bo *bo = gem_to_qxl_bo(obj); obj 77 drivers/gpu/drm/qxl/qxl_prime.c int qxl_gem_prime_mmap(struct drm_gem_object *obj, obj 1152 drivers/gpu/drm/radeon/atombios_crtc.c struct drm_gem_object *obj; obj 1177 drivers/gpu/drm/radeon/atombios_crtc.c obj = target_fb->obj[0]; obj 1178 drivers/gpu/drm/radeon/atombios_crtc.c rbo = gem_to_radeon_bo(obj); obj 1452 drivers/gpu/drm/radeon/atombios_crtc.c rbo = gem_to_radeon_bo(fb->obj[0]); obj 1473 drivers/gpu/drm/radeon/atombios_crtc.c struct drm_gem_object *obj; obj 1495 drivers/gpu/drm/radeon/atombios_crtc.c obj = target_fb->obj[0]; obj 1496 drivers/gpu/drm/radeon/atombios_crtc.c rbo = gem_to_radeon_bo(obj); obj 1661 drivers/gpu/drm/radeon/atombios_crtc.c rbo = gem_to_radeon_bo(fb->obj[0]); obj 2170 drivers/gpu/drm/radeon/atombios_crtc.c rbo = gem_to_radeon_bo(crtc->primary->fb->obj[0]); obj 580 drivers/gpu/drm/radeon/radeon.h struct drm_gem_object **obj); obj 154 drivers/gpu/drm/radeon/radeon_bios.c union acpi_object atrm_arg_elements[2], *obj; obj 173 drivers/gpu/drm/radeon/radeon_bios.c obj = (union acpi_object *)buffer.pointer; obj 174 drivers/gpu/drm/radeon/radeon_bios.c memcpy(bios+offset, obj->buffer.pointer, obj->buffer.length); obj 175 drivers/gpu/drm/radeon/radeon_bios.c len = obj->buffer.length; obj 286 drivers/gpu/drm/radeon/radeon_cursor.c struct drm_gem_object *obj; obj 293 drivers/gpu/drm/radeon/radeon_cursor.c obj = NULL; obj 303 drivers/gpu/drm/radeon/radeon_cursor.c obj = drm_gem_object_lookup(file_priv, handle); obj 304 drivers/gpu/drm/radeon/radeon_cursor.c if (!obj) { obj 309 drivers/gpu/drm/radeon/radeon_cursor.c robj = gem_to_radeon_bo(obj); obj 312 drivers/gpu/drm/radeon/radeon_cursor.c drm_gem_object_put_unlocked(obj); obj 322 drivers/gpu/drm/radeon/radeon_cursor.c drm_gem_object_put_unlocked(obj); obj 360 drivers/gpu/drm/radeon/radeon_cursor.c radeon_crtc->cursor_bo = obj; obj 1604 drivers/gpu/drm/radeon/radeon_device.c if (fb == NULL || fb->obj[0] == NULL) { obj 1607 drivers/gpu/drm/radeon/radeon_device.c robj = gem_to_radeon_bo(fb->obj[0]); obj 490 drivers/gpu/drm/radeon/radeon_display.c struct drm_gem_object *obj; obj 511 drivers/gpu/drm/radeon/radeon_display.c obj = crtc->primary->fb->obj[0]; obj 514 drivers/gpu/drm/radeon/radeon_display.c drm_gem_object_get(obj); obj 515 drivers/gpu/drm/radeon/radeon_display.c work->old_rbo = gem_to_radeon_bo(obj); obj 517 drivers/gpu/drm/radeon/radeon_display.c obj = fb->obj[0]; obj 518 drivers/gpu/drm/radeon/radeon_display.c new_rbo = gem_to_radeon_bo(obj); obj 1302 drivers/gpu/drm/radeon/radeon_display.c struct drm_gem_object *obj) obj 1305 drivers/gpu/drm/radeon/radeon_display.c fb->obj[0] = obj; obj 1309 drivers/gpu/drm/radeon/radeon_display.c fb->obj[0] = NULL; obj 1320 drivers/gpu/drm/radeon/radeon_display.c struct drm_gem_object *obj; obj 1324 drivers/gpu/drm/radeon/radeon_display.c obj = drm_gem_object_lookup(file_priv, mode_cmd->handles[0]); obj 1325 drivers/gpu/drm/radeon/radeon_display.c if (obj == NULL) { obj 1332 drivers/gpu/drm/radeon/radeon_display.c if (obj->import_attach) { obj 1339 drivers/gpu/drm/radeon/radeon_display.c drm_gem_object_put_unlocked(obj); obj 1343 drivers/gpu/drm/radeon/radeon_display.c ret = radeon_framebuffer_init(dev, fb, mode_cmd, obj); obj 1346 drivers/gpu/drm/radeon/radeon_display.c drm_gem_object_put_unlocked(obj); obj 130 drivers/gpu/drm/radeon/radeon_drv.c void radeon_gem_object_free(struct drm_gem_object *obj); obj 131 drivers/gpu/drm/radeon/radeon_drv.c int radeon_gem_object_open(struct drm_gem_object *obj, obj 133 drivers/gpu/drm/radeon/radeon_drv.c void radeon_gem_object_close(struct drm_gem_object *obj, obj 151 drivers/gpu/drm/radeon/radeon_drv.c struct sg_table *radeon_gem_prime_get_sg_table(struct drm_gem_object *obj); obj 155 drivers/gpu/drm/radeon/radeon_drv.c int radeon_gem_prime_pin(struct drm_gem_object *obj); obj 156 drivers/gpu/drm/radeon/radeon_drv.c void radeon_gem_prime_unpin(struct drm_gem_object *obj); obj 157 drivers/gpu/drm/radeon/radeon_drv.c void *radeon_gem_prime_vmap(struct drm_gem_object *obj); obj 158 drivers/gpu/drm/radeon/radeon_drv.c void radeon_gem_prime_vunmap(struct drm_gem_object *obj, void *vaddr); obj 315 drivers/gpu/drm/radeon/radeon_fb.c if (fb->obj[0]) { obj 316 drivers/gpu/drm/radeon/radeon_fb.c radeonfb_destroy_pinned_object(fb->obj[0]); obj 317 drivers/gpu/drm/radeon/radeon_fb.c fb->obj[0] = NULL; obj 403 drivers/gpu/drm/radeon/radeon_fb.c if (robj == gem_to_radeon_bo(rdev->mode_info.rfbdev->fb.obj[0])) obj 50 drivers/gpu/drm/radeon/radeon_gem.c struct drm_gem_object **obj) obj 56 drivers/gpu/drm/radeon/radeon_gem.c *obj = NULL; obj 86 drivers/gpu/drm/radeon/radeon_gem.c *obj = &robj->tbo.base; obj 148 drivers/gpu/drm/radeon/radeon_gem.c int radeon_gem_object_open(struct drm_gem_object *obj, struct drm_file *file_priv) obj 150 drivers/gpu/drm/radeon/radeon_gem.c struct radeon_bo *rbo = gem_to_radeon_bo(obj); obj 178 drivers/gpu/drm/radeon/radeon_gem.c void radeon_gem_object_close(struct drm_gem_object *obj, obj 181 drivers/gpu/drm/radeon/radeon_gem.c struct radeon_bo *rbo = gem_to_radeon_bo(obj); obj 381 drivers/gpu/drm/radeon/radeon_legacy_crtc.c struct drm_gem_object *obj; obj 424 drivers/gpu/drm/radeon/radeon_legacy_crtc.c obj = target_fb->obj[0]; obj 425 drivers/gpu/drm/radeon/radeon_legacy_crtc.c rbo = gem_to_radeon_bo(obj); obj 452 drivers/gpu/drm/radeon/radeon_legacy_crtc.c old_rbo = gem_to_radeon_bo(fb->obj[0]); obj 559 drivers/gpu/drm/radeon/radeon_legacy_crtc.c rbo = gem_to_radeon_bo(fb->obj[0]); obj 1095 drivers/gpu/drm/radeon/radeon_legacy_crtc.c rbo = gem_to_radeon_bo(crtc->primary->fb->obj[0]); obj 931 drivers/gpu/drm/radeon/radeon_mode.h struct drm_gem_object *obj); obj 34 drivers/gpu/drm/radeon/radeon_prime.c struct sg_table *radeon_gem_prime_get_sg_table(struct drm_gem_object *obj) obj 36 drivers/gpu/drm/radeon/radeon_prime.c struct radeon_bo *bo = gem_to_radeon_bo(obj); obj 42 drivers/gpu/drm/radeon/radeon_prime.c void *radeon_gem_prime_vmap(struct drm_gem_object *obj) obj 44 drivers/gpu/drm/radeon/radeon_prime.c struct radeon_bo *bo = gem_to_radeon_bo(obj); obj 55 drivers/gpu/drm/radeon/radeon_prime.c void radeon_gem_prime_vunmap(struct drm_gem_object *obj, void *vaddr) obj 57 drivers/gpu/drm/radeon/radeon_prime.c struct radeon_bo *bo = gem_to_radeon_bo(obj); obj 86 drivers/gpu/drm/radeon/radeon_prime.c int radeon_gem_prime_pin(struct drm_gem_object *obj) obj 88 drivers/gpu/drm/radeon/radeon_prime.c struct radeon_bo *bo = gem_to_radeon_bo(obj); obj 104 drivers/gpu/drm/radeon/radeon_prime.c void radeon_gem_prime_unpin(struct drm_gem_object *obj) obj 106 drivers/gpu/drm/radeon/radeon_prime.c struct radeon_bo *bo = gem_to_radeon_bo(obj); obj 29 drivers/gpu/drm/rockchip/rockchip_drm_fb.c struct drm_gem_object **obj, unsigned int num_planes) obj 42 drivers/gpu/drm/rockchip/rockchip_drm_fb.c fb->obj[i] = obj[i]; obj 64 drivers/gpu/drm/rockchip/rockchip_drm_fb.c struct drm_gem_object *obj; obj 74 drivers/gpu/drm/rockchip/rockchip_drm_fb.c obj = drm_gem_object_lookup(file_priv, mode_cmd->handles[i]); obj 75 drivers/gpu/drm/rockchip/rockchip_drm_fb.c if (!obj) { obj 86 drivers/gpu/drm/rockchip/rockchip_drm_fb.c if (obj->size < min_size) { obj 87 drivers/gpu/drm/rockchip/rockchip_drm_fb.c drm_gem_object_put_unlocked(obj); obj 91 drivers/gpu/drm/rockchip/rockchip_drm_fb.c objs[i] = obj; obj 122 drivers/gpu/drm/rockchip/rockchip_drm_fb.c struct drm_gem_object *obj) obj 126 drivers/gpu/drm/rockchip/rockchip_drm_fb.c fb = rockchip_fb_alloc(dev, mode_cmd, &obj, 1); obj 13 drivers/gpu/drm/rockchip/rockchip_drm_fb.h struct drm_gem_object *obj); obj 156 drivers/gpu/drm/rockchip/rockchip_drm_gem.c struct drm_gem_object *obj = &rk_obj->base; obj 157 drivers/gpu/drm/rockchip/rockchip_drm_gem.c struct drm_device *drm = obj->dev; obj 164 drivers/gpu/drm/rockchip/rockchip_drm_gem.c rk_obj->kvaddr = dma_alloc_attrs(drm->dev, obj->size, obj 168 drivers/gpu/drm/rockchip/rockchip_drm_gem.c DRM_ERROR("failed to allocate %zu byte dma buffer", obj->size); obj 178 drivers/gpu/drm/rockchip/rockchip_drm_gem.c struct drm_gem_object *obj = &rk_obj->base; obj 179 drivers/gpu/drm/rockchip/rockchip_drm_gem.c struct drm_device *drm = obj->dev; obj 197 drivers/gpu/drm/rockchip/rockchip_drm_gem.c struct drm_gem_object *obj = &rk_obj->base; obj 198 drivers/gpu/drm/rockchip/rockchip_drm_gem.c struct drm_device *drm = obj->dev; obj 200 drivers/gpu/drm/rockchip/rockchip_drm_gem.c dma_free_attrs(drm->dev, obj->size, rk_obj->kvaddr, rk_obj->dma_addr, obj 212 drivers/gpu/drm/rockchip/rockchip_drm_gem.c static int rockchip_drm_gem_object_mmap_iommu(struct drm_gem_object *obj, obj 215 drivers/gpu/drm/rockchip/rockchip_drm_gem.c struct rockchip_gem_object *rk_obj = to_rockchip_obj(obj); obj 216 drivers/gpu/drm/rockchip/rockchip_drm_gem.c unsigned int count = obj->size >> PAGE_SHIFT; obj 225 drivers/gpu/drm/rockchip/rockchip_drm_gem.c static int rockchip_drm_gem_object_mmap_dma(struct drm_gem_object *obj, obj 228 drivers/gpu/drm/rockchip/rockchip_drm_gem.c struct rockchip_gem_object *rk_obj = to_rockchip_obj(obj); obj 229 drivers/gpu/drm/rockchip/rockchip_drm_gem.c struct drm_device *drm = obj->dev; obj 232 drivers/gpu/drm/rockchip/rockchip_drm_gem.c obj->size, rk_obj->dma_attrs); obj 235 drivers/gpu/drm/rockchip/rockchip_drm_gem.c static int rockchip_drm_gem_object_mmap(struct drm_gem_object *obj, obj 239 drivers/gpu/drm/rockchip/rockchip_drm_gem.c struct rockchip_gem_object *rk_obj = to_rockchip_obj(obj); obj 248 drivers/gpu/drm/rockchip/rockchip_drm_gem.c ret = rockchip_drm_gem_object_mmap_iommu(obj, vma); obj 250 drivers/gpu/drm/rockchip/rockchip_drm_gem.c ret = rockchip_drm_gem_object_mmap_dma(obj, vma); obj 258 drivers/gpu/drm/rockchip/rockchip_drm_gem.c int rockchip_gem_mmap_buf(struct drm_gem_object *obj, obj 263 drivers/gpu/drm/rockchip/rockchip_drm_gem.c ret = drm_gem_mmap_obj(obj, obj->size, vma); obj 267 drivers/gpu/drm/rockchip/rockchip_drm_gem.c return rockchip_drm_gem_object_mmap(obj, vma); obj 273 drivers/gpu/drm/rockchip/rockchip_drm_gem.c struct drm_gem_object *obj; obj 286 drivers/gpu/drm/rockchip/rockchip_drm_gem.c obj = vma->vm_private_data; obj 288 drivers/gpu/drm/rockchip/rockchip_drm_gem.c return rockchip_drm_gem_object_mmap(obj, vma); obj 301 drivers/gpu/drm/rockchip/rockchip_drm_gem.c struct drm_gem_object *obj; obj 309 drivers/gpu/drm/rockchip/rockchip_drm_gem.c obj = &rk_obj->base; obj 311 drivers/gpu/drm/rockchip/rockchip_drm_gem.c drm_gem_object_init(drm, obj, size); obj 342 drivers/gpu/drm/rockchip/rockchip_drm_gem.c void rockchip_gem_free_object(struct drm_gem_object *obj) obj 344 drivers/gpu/drm/rockchip/rockchip_drm_gem.c struct drm_device *drm = obj->dev; obj 346 drivers/gpu/drm/rockchip/rockchip_drm_gem.c struct rockchip_gem_object *rk_obj = to_rockchip_obj(obj); obj 348 drivers/gpu/drm/rockchip/rockchip_drm_gem.c if (obj->import_attach) { obj 355 drivers/gpu/drm/rockchip/rockchip_drm_gem.c drm_prime_gem_destroy(obj, rk_obj->sgt); obj 376 drivers/gpu/drm/rockchip/rockchip_drm_gem.c struct drm_gem_object *obj; obj 383 drivers/gpu/drm/rockchip/rockchip_drm_gem.c obj = &rk_obj->base; obj 389 drivers/gpu/drm/rockchip/rockchip_drm_gem.c ret = drm_gem_handle_create(file_priv, obj, handle); obj 394 drivers/gpu/drm/rockchip/rockchip_drm_gem.c drm_gem_object_put_unlocked(obj); obj 399 drivers/gpu/drm/rockchip/rockchip_drm_gem.c rockchip_gem_free_object(obj); obj 436 drivers/gpu/drm/rockchip/rockchip_drm_gem.c struct sg_table *rockchip_gem_prime_get_sg_table(struct drm_gem_object *obj) obj 438 drivers/gpu/drm/rockchip/rockchip_drm_gem.c struct rockchip_gem_object *rk_obj = to_rockchip_obj(obj); obj 439 drivers/gpu/drm/rockchip/rockchip_drm_gem.c struct drm_device *drm = obj->dev; obj 451 drivers/gpu/drm/rockchip/rockchip_drm_gem.c rk_obj->dma_addr, obj->size, obj 542 drivers/gpu/drm/rockchip/rockchip_drm_gem.c void *rockchip_gem_prime_vmap(struct drm_gem_object *obj) obj 544 drivers/gpu/drm/rockchip/rockchip_drm_gem.c struct rockchip_gem_object *rk_obj = to_rockchip_obj(obj); obj 556 drivers/gpu/drm/rockchip/rockchip_drm_gem.c void rockchip_gem_prime_vunmap(struct drm_gem_object *obj, void *vaddr) obj 558 drivers/gpu/drm/rockchip/rockchip_drm_gem.c struct rockchip_gem_object *rk_obj = to_rockchip_obj(obj); obj 29 drivers/gpu/drm/rockchip/rockchip_drm_gem.h struct sg_table *rockchip_gem_prime_get_sg_table(struct drm_gem_object *obj); obj 34 drivers/gpu/drm/rockchip/rockchip_drm_gem.h void *rockchip_gem_prime_vmap(struct drm_gem_object *obj); obj 35 drivers/gpu/drm/rockchip/rockchip_drm_gem.h void rockchip_gem_prime_vunmap(struct drm_gem_object *obj, void *vaddr); obj 41 drivers/gpu/drm/rockchip/rockchip_drm_gem.h int rockchip_gem_mmap_buf(struct drm_gem_object *obj, obj 48 drivers/gpu/drm/rockchip/rockchip_drm_gem.h void rockchip_gem_free_object(struct drm_gem_object *obj); obj 794 drivers/gpu/drm/rockchip/rockchip_drm_vop.c struct drm_gem_object *obj, *uv_obj; obj 819 drivers/gpu/drm/rockchip/rockchip_drm_vop.c obj = fb->obj[0]; obj 820 drivers/gpu/drm/rockchip/rockchip_drm_vop.c rk_obj = to_rockchip_obj(obj); obj 862 drivers/gpu/drm/rockchip/rockchip_drm_vop.c uv_obj = fb->obj[1]; obj 170 drivers/gpu/drm/sis/sis_mm.c struct sis_memblock *obj; obj 173 drivers/gpu/drm/sis/sis_mm.c obj = idr_find(&dev_priv->object_idr, mem->free); obj 174 drivers/gpu/drm/sis/sis_mm.c if (obj == NULL) { obj 180 drivers/gpu/drm/sis/sis_mm.c list_del(&obj->owner_list); obj 181 drivers/gpu/drm/sis/sis_mm.c if (drm_mm_node_allocated(&obj->mm_node)) obj 182 drivers/gpu/drm/sis/sis_mm.c drm_mm_remove_node(&obj->mm_node); obj 185 drivers/gpu/drm/sis/sis_mm.c sis_free(obj->req.offset); obj 187 drivers/gpu/drm/sis/sis_mm.c kfree(obj); obj 383 drivers/gpu/drm/tegra/drm.c struct tegra_bo *obj; obj 407 drivers/gpu/drm/tegra/drm.c obj = host1x_to_tegra_bo(bo); obj 408 drivers/gpu/drm/tegra/drm.c refs[num_refs++] = &obj->gem; obj 415 drivers/gpu/drm/tegra/drm.c if (offset & 3 || offset > obj->gem.size) { obj 428 drivers/gpu/drm/tegra/drm.c struct tegra_bo *obj; obj 437 drivers/gpu/drm/tegra/drm.c obj = host1x_to_tegra_bo(reloc->cmdbuf.bo); obj 438 drivers/gpu/drm/tegra/drm.c refs[num_refs++] = &obj->gem; obj 446 drivers/gpu/drm/tegra/drm.c reloc->cmdbuf.offset >= obj->gem.size) { obj 451 drivers/gpu/drm/tegra/drm.c obj = host1x_to_tegra_bo(reloc->target.bo); obj 452 drivers/gpu/drm/tegra/drm.c refs[num_refs++] = &obj->gem; obj 454 drivers/gpu/drm/tegra/drm.c if (reloc->target.offset >= obj->gem.size) { obj 116 drivers/gpu/drm/tegra/fb.c fb->obj[i] = &planes[i]->gem; obj 25 drivers/gpu/drm/tegra/gem.c struct tegra_bo *obj = host1x_to_tegra_bo(bo); obj 27 drivers/gpu/drm/tegra/gem.c drm_gem_object_put_unlocked(&obj->gem); obj 32 drivers/gpu/drm/tegra/gem.c struct tegra_bo *obj = host1x_to_tegra_bo(bo); obj 34 drivers/gpu/drm/tegra/gem.c *sgt = obj->sgt; obj 36 drivers/gpu/drm/tegra/gem.c return obj->paddr; obj 45 drivers/gpu/drm/tegra/gem.c struct tegra_bo *obj = host1x_to_tegra_bo(bo); obj 47 drivers/gpu/drm/tegra/gem.c if (obj->vaddr) obj 48 drivers/gpu/drm/tegra/gem.c return obj->vaddr; obj 49 drivers/gpu/drm/tegra/gem.c else if (obj->gem.import_attach) obj 50 drivers/gpu/drm/tegra/gem.c return dma_buf_vmap(obj->gem.import_attach->dmabuf); obj 52 drivers/gpu/drm/tegra/gem.c return vmap(obj->pages, obj->num_pages, VM_MAP, obj 58 drivers/gpu/drm/tegra/gem.c struct tegra_bo *obj = host1x_to_tegra_bo(bo); obj 60 drivers/gpu/drm/tegra/gem.c if (obj->vaddr) obj 62 drivers/gpu/drm/tegra/gem.c else if (obj->gem.import_attach) obj 63 drivers/gpu/drm/tegra/gem.c dma_buf_vunmap(obj->gem.import_attach->dmabuf, addr); obj 70 drivers/gpu/drm/tegra/gem.c struct tegra_bo *obj = host1x_to_tegra_bo(bo); obj 72 drivers/gpu/drm/tegra/gem.c if (obj->vaddr) obj 73 drivers/gpu/drm/tegra/gem.c return obj->vaddr + page * PAGE_SIZE; obj 74 drivers/gpu/drm/tegra/gem.c else if (obj->gem.import_attach) obj 75 drivers/gpu/drm/tegra/gem.c return dma_buf_kmap(obj->gem.import_attach->dmabuf, page); obj 77 drivers/gpu/drm/tegra/gem.c return vmap(obj->pages + page, 1, VM_MAP, obj 84 drivers/gpu/drm/tegra/gem.c struct tegra_bo *obj = host1x_to_tegra_bo(bo); obj 86 drivers/gpu/drm/tegra/gem.c if (obj->vaddr) obj 88 drivers/gpu/drm/tegra/gem.c else if (obj->gem.import_attach) obj 89 drivers/gpu/drm/tegra/gem.c dma_buf_kunmap(obj->gem.import_attach->dmabuf, page, addr); obj 96 drivers/gpu/drm/tegra/gem.c struct tegra_bo *obj = host1x_to_tegra_bo(bo); obj 98 drivers/gpu/drm/tegra/gem.c drm_gem_object_get(&obj->gem); obj 577 drivers/gpu/drm/tegra/hub.c tegra_display_hub_duplicate_state(struct drm_private_obj *obj) obj 581 drivers/gpu/drm/tegra/hub.c state = kmemdup(obj->state, sizeof(*state), GFP_KERNEL); obj 585 drivers/gpu/drm/tegra/hub.c __drm_atomic_helper_private_obj_duplicate_state(obj, &state->base); obj 590 drivers/gpu/drm/tegra/hub.c static void tegra_display_hub_destroy_state(struct drm_private_obj *obj, obj 281 drivers/gpu/drm/tiny/gm12u320.c vaddr = drm_gem_shmem_vmap(fb->obj[0]); obj 287 drivers/gpu/drm/tiny/gm12u320.c if (fb->obj[0]->import_attach) { obj 289 drivers/gpu/drm/tiny/gm12u320.c fb->obj[0]->import_attach->dmabuf, DMA_FROM_DEVICE); obj 330 drivers/gpu/drm/tiny/gm12u320.c if (fb->obj[0]->import_attach) { obj 331 drivers/gpu/drm/tiny/gm12u320.c ret = dma_buf_end_cpu_access(fb->obj[0]->import_attach->dmabuf, obj 337 drivers/gpu/drm/tiny/gm12u320.c drm_gem_shmem_vunmap(fb->obj[0], vaddr); obj 66 drivers/gpu/drm/udl/udl_dmabuf.c struct udl_gem_object *obj = to_udl_bo(attach->dmabuf->priv); obj 67 drivers/gpu/drm/udl/udl_dmabuf.c struct drm_device *dev = obj->base.dev; obj 82 drivers/gpu/drm/udl/udl_dmabuf.c if (!obj->pages) { obj 83 drivers/gpu/drm/udl/udl_dmabuf.c ret = udl_gem_get_pages(obj); obj 90 drivers/gpu/drm/udl/udl_dmabuf.c page_count = obj->base.size / PAGE_SIZE; obj 91 drivers/gpu/drm/udl/udl_dmabuf.c obj->sg = drm_prime_pages_to_sg(obj->pages, page_count); obj 92 drivers/gpu/drm/udl/udl_dmabuf.c if (IS_ERR(obj->sg)) { obj 94 drivers/gpu/drm/udl/udl_dmabuf.c return ERR_CAST(obj->sg); obj 99 drivers/gpu/drm/udl/udl_dmabuf.c ret = sg_alloc_table(sgt, obj->sg->orig_nents, GFP_KERNEL); obj 107 drivers/gpu/drm/udl/udl_dmabuf.c rd = obj->sg->sgl; obj 175 drivers/gpu/drm/udl/udl_dmabuf.c struct dma_buf *udl_gem_prime_export(struct drm_gem_object *obj, int flags) obj 180 drivers/gpu/drm/udl/udl_dmabuf.c exp_info.size = obj->size; obj 182 drivers/gpu/drm/udl/udl_dmabuf.c exp_info.priv = obj; obj 184 drivers/gpu/drm/udl/udl_dmabuf.c return drm_gem_dmabuf_export(obj->dev, &exp_info); obj 192 drivers/gpu/drm/udl/udl_dmabuf.c struct udl_gem_object *obj; obj 198 drivers/gpu/drm/udl/udl_dmabuf.c obj = udl_gem_alloc_object(dev, npages * PAGE_SIZE); obj 199 drivers/gpu/drm/udl/udl_dmabuf.c if (!obj) obj 202 drivers/gpu/drm/udl/udl_dmabuf.c obj->sg = sg; obj 203 drivers/gpu/drm/udl/udl_dmabuf.c obj->pages = kvmalloc_array(npages, sizeof(struct page *), GFP_KERNEL); obj 204 drivers/gpu/drm/udl/udl_dmabuf.c if (obj->pages == NULL) { obj 209 drivers/gpu/drm/udl/udl_dmabuf.c drm_prime_sg_to_page_addr_arrays(sg, obj->pages, NULL, npages); obj 211 drivers/gpu/drm/udl/udl_dmabuf.c *obj_p = obj; obj 91 drivers/gpu/drm/udl/udl_drv.h struct udl_gem_object *obj; obj 135 drivers/gpu/drm/udl/udl_drv.h struct dma_buf *udl_gem_prime_export(struct drm_gem_object *obj, int flags); obj 139 drivers/gpu/drm/udl/udl_drv.h int udl_gem_get_pages(struct udl_gem_object *obj); obj 140 drivers/gpu/drm/udl/udl_drv.h void udl_gem_put_pages(struct udl_gem_object *obj); obj 141 drivers/gpu/drm/udl/udl_drv.h int udl_gem_vmap(struct udl_gem_object *obj); obj 142 drivers/gpu/drm/udl/udl_drv.h void udl_gem_vunmap(struct udl_gem_object *obj); obj 97 drivers/gpu/drm/udl/udl_fb.c if (!fb->obj->vmapping) { obj 98 drivers/gpu/drm/udl/udl_fb.c ret = udl_gem_vmap(fb->obj); obj 103 drivers/gpu/drm/udl/udl_fb.c if (!fb->obj->vmapping) { obj 130 drivers/gpu/drm/udl/udl_fb.c (char *) fb->obj->vmapping, obj 292 drivers/gpu/drm/udl/udl_fb.c if (ufb->obj->base.import_attach) { obj 293 drivers/gpu/drm/udl/udl_fb.c ret = dma_buf_begin_cpu_access(ufb->obj->base.import_attach->dmabuf, obj 307 drivers/gpu/drm/udl/udl_fb.c if (ufb->obj->base.import_attach) { obj 308 drivers/gpu/drm/udl/udl_fb.c ret = dma_buf_end_cpu_access(ufb->obj->base.import_attach->dmabuf, obj 322 drivers/gpu/drm/udl/udl_fb.c if (ufb->obj) obj 323 drivers/gpu/drm/udl/udl_fb.c drm_gem_object_put_unlocked(&ufb->obj->base); obj 339 drivers/gpu/drm/udl/udl_fb.c struct udl_gem_object *obj) obj 343 drivers/gpu/drm/udl/udl_fb.c ufb->obj = obj; obj 359 drivers/gpu/drm/udl/udl_fb.c struct udl_gem_object *obj; obj 376 drivers/gpu/drm/udl/udl_fb.c obj = udl_gem_alloc_object(dev, size); obj 377 drivers/gpu/drm/udl/udl_fb.c if (!obj) obj 380 drivers/gpu/drm/udl/udl_fb.c ret = udl_gem_vmap(obj); obj 392 drivers/gpu/drm/udl/udl_fb.c ret = udl_framebuffer_init(dev, &ufbdev->ufb, &mode_cmd, obj); obj 400 drivers/gpu/drm/udl/udl_fb.c info->screen_base = ufbdev->ufb.obj->vmapping; obj 402 drivers/gpu/drm/udl/udl_fb.c info->fix.smem_start = (unsigned long)ufbdev->ufb.obj->vmapping; obj 409 drivers/gpu/drm/udl/udl_fb.c ufbdev->ufb.obj->vmapping); obj 413 drivers/gpu/drm/udl/udl_fb.c drm_gem_object_put_unlocked(&ufbdev->ufb.obj->base); obj 427 drivers/gpu/drm/udl/udl_fb.c if (ufbdev->ufb.obj) { obj 430 drivers/gpu/drm/udl/udl_fb.c drm_gem_object_put_unlocked(&ufbdev->ufb.obj->base); obj 500 drivers/gpu/drm/udl/udl_fb.c struct drm_gem_object *obj; obj 505 drivers/gpu/drm/udl/udl_fb.c obj = drm_gem_object_lookup(file, mode_cmd->handles[0]); obj 506 drivers/gpu/drm/udl/udl_fb.c if (obj == NULL) obj 512 drivers/gpu/drm/udl/udl_fb.c if (size > obj->size) { obj 513 drivers/gpu/drm/udl/udl_fb.c DRM_ERROR("object size not sufficient for fb %d %zu %d %d\n", size, obj->size, mode_cmd->pitches[0], mode_cmd->height); obj 521 drivers/gpu/drm/udl/udl_fb.c ret = udl_framebuffer_init(dev, ufb, mode_cmd, to_udl_bo(obj)); obj 17 drivers/gpu/drm/udl/udl_gem.c struct udl_gem_object *obj; obj 19 drivers/gpu/drm/udl/udl_gem.c obj = kzalloc(sizeof(*obj), GFP_KERNEL); obj 20 drivers/gpu/drm/udl/udl_gem.c if (obj == NULL) obj 23 drivers/gpu/drm/udl/udl_gem.c if (drm_gem_object_init(dev, &obj->base, size) != 0) { obj 24 drivers/gpu/drm/udl/udl_gem.c kfree(obj); obj 28 drivers/gpu/drm/udl/udl_gem.c obj->flags = UDL_BO_CACHEABLE; obj 29 drivers/gpu/drm/udl/udl_gem.c return obj; obj 38 drivers/gpu/drm/udl/udl_gem.c struct udl_gem_object *obj; obj 44 drivers/gpu/drm/udl/udl_gem.c obj = udl_gem_alloc_object(dev, size); obj 45 drivers/gpu/drm/udl/udl_gem.c if (obj == NULL) obj 48 drivers/gpu/drm/udl/udl_gem.c ret = drm_gem_handle_create(file, &obj->base, &handle); obj 50 drivers/gpu/drm/udl/udl_gem.c drm_gem_object_release(&obj->base); obj 51 drivers/gpu/drm/udl/udl_gem.c kfree(obj); obj 55 drivers/gpu/drm/udl/udl_gem.c drm_gem_object_put_unlocked(&obj->base); obj 60 drivers/gpu/drm/udl/udl_gem.c static void update_vm_cache_attr(struct udl_gem_object *obj, obj 63 drivers/gpu/drm/udl/udl_gem.c DRM_DEBUG_KMS("flags = 0x%x\n", obj->flags); obj 66 drivers/gpu/drm/udl/udl_gem.c if (obj->flags & UDL_BO_CACHEABLE) { obj 68 drivers/gpu/drm/udl/udl_gem.c } else if (obj->flags & UDL_BO_WC) { obj 106 drivers/gpu/drm/udl/udl_gem.c struct udl_gem_object *obj = to_udl_bo(vma->vm_private_data); obj 112 drivers/gpu/drm/udl/udl_gem.c if (!obj->pages) obj 115 drivers/gpu/drm/udl/udl_gem.c page = obj->pages[page_offset]; obj 119 drivers/gpu/drm/udl/udl_gem.c int udl_gem_get_pages(struct udl_gem_object *obj) obj 123 drivers/gpu/drm/udl/udl_gem.c if (obj->pages) obj 126 drivers/gpu/drm/udl/udl_gem.c pages = drm_gem_get_pages(&obj->base); obj 130 drivers/gpu/drm/udl/udl_gem.c obj->pages = pages; obj 135 drivers/gpu/drm/udl/udl_gem.c void udl_gem_put_pages(struct udl_gem_object *obj) obj 137 drivers/gpu/drm/udl/udl_gem.c if (obj->base.import_attach) { obj 138 drivers/gpu/drm/udl/udl_gem.c kvfree(obj->pages); obj 139 drivers/gpu/drm/udl/udl_gem.c obj->pages = NULL; obj 143 drivers/gpu/drm/udl/udl_gem.c drm_gem_put_pages(&obj->base, obj->pages, false, false); obj 144 drivers/gpu/drm/udl/udl_gem.c obj->pages = NULL; obj 147 drivers/gpu/drm/udl/udl_gem.c int udl_gem_vmap(struct udl_gem_object *obj) obj 149 drivers/gpu/drm/udl/udl_gem.c int page_count = obj->base.size / PAGE_SIZE; obj 152 drivers/gpu/drm/udl/udl_gem.c if (obj->base.import_attach) { obj 153 drivers/gpu/drm/udl/udl_gem.c obj->vmapping = dma_buf_vmap(obj->base.import_attach->dmabuf); obj 154 drivers/gpu/drm/udl/udl_gem.c if (!obj->vmapping) obj 159 drivers/gpu/drm/udl/udl_gem.c ret = udl_gem_get_pages(obj); obj 163 drivers/gpu/drm/udl/udl_gem.c obj->vmapping = vmap(obj->pages, page_count, 0, PAGE_KERNEL); obj 164 drivers/gpu/drm/udl/udl_gem.c if (!obj->vmapping) obj 169 drivers/gpu/drm/udl/udl_gem.c void udl_gem_vunmap(struct udl_gem_object *obj) obj 171 drivers/gpu/drm/udl/udl_gem.c if (obj->base.import_attach) { obj 172 drivers/gpu/drm/udl/udl_gem.c dma_buf_vunmap(obj->base.import_attach->dmabuf, obj->vmapping); obj 176 drivers/gpu/drm/udl/udl_gem.c vunmap(obj->vmapping); obj 178 drivers/gpu/drm/udl/udl_gem.c udl_gem_put_pages(obj); obj 183 drivers/gpu/drm/udl/udl_gem.c struct udl_gem_object *obj = to_udl_bo(gem_obj); obj 185 drivers/gpu/drm/udl/udl_gem.c if (obj->vmapping) obj 186 drivers/gpu/drm/udl/udl_gem.c udl_gem_vunmap(obj); obj 189 drivers/gpu/drm/udl/udl_gem.c drm_prime_gem_destroy(gem_obj, obj->sg); obj 193 drivers/gpu/drm/udl/udl_gem.c if (obj->pages) obj 194 drivers/gpu/drm/udl/udl_gem.c udl_gem_put_pages(obj); obj 205 drivers/gpu/drm/udl/udl_gem.c struct drm_gem_object *obj; obj 210 drivers/gpu/drm/udl/udl_gem.c obj = drm_gem_object_lookup(file, handle); obj 211 drivers/gpu/drm/udl/udl_gem.c if (obj == NULL) { obj 215 drivers/gpu/drm/udl/udl_gem.c gobj = to_udl_bo(obj); obj 220 drivers/gpu/drm/udl/udl_gem.c ret = drm_gem_create_mmap_offset(obj); obj 31 drivers/gpu/drm/v3d/v3d_bo.c void v3d_free_object(struct drm_gem_object *obj) obj 33 drivers/gpu/drm/v3d/v3d_bo.c struct v3d_dev *v3d = to_v3d_dev(obj->dev); obj 34 drivers/gpu/drm/v3d/v3d_bo.c struct v3d_bo *bo = to_v3d_bo(obj); obj 40 drivers/gpu/drm/v3d/v3d_bo.c v3d->bo_stats.pages_allocated -= obj->size >> PAGE_SHIFT; obj 50 drivers/gpu/drm/v3d/v3d_bo.c drm_gem_shmem_free_object(obj); obj 70 drivers/gpu/drm/v3d/v3d_bo.c struct drm_gem_object *obj; obj 78 drivers/gpu/drm/v3d/v3d_bo.c obj = &bo->base.base; obj 80 drivers/gpu/drm/v3d/v3d_bo.c obj->funcs = &v3d_gem_funcs; obj 88 drivers/gpu/drm/v3d/v3d_bo.c v3d_bo_create_finish(struct drm_gem_object *obj) obj 90 drivers/gpu/drm/v3d/v3d_bo.c struct v3d_dev *v3d = to_v3d_dev(obj->dev); obj 91 drivers/gpu/drm/v3d/v3d_bo.c struct v3d_bo *bo = to_v3d_bo(obj); obj 108 drivers/gpu/drm/v3d/v3d_bo.c obj->size >> PAGE_SHIFT, obj 117 drivers/gpu/drm/v3d/v3d_bo.c v3d->bo_stats.pages_allocated += obj->size >> PAGE_SHIFT; obj 153 drivers/gpu/drm/v3d/v3d_bo.c struct drm_gem_object *obj; obj 156 drivers/gpu/drm/v3d/v3d_bo.c obj = drm_gem_shmem_prime_import_sg_table(dev, attach, sgt); obj 157 drivers/gpu/drm/v3d/v3d_bo.c if (IS_ERR(obj)) obj 158 drivers/gpu/drm/v3d/v3d_bo.c return obj; obj 160 drivers/gpu/drm/v3d/v3d_bo.c ret = v3d_bo_create_finish(obj); obj 162 drivers/gpu/drm/v3d/v3d_bo.c drm_gem_shmem_free_object(obj); obj 166 drivers/gpu/drm/v3d/v3d_bo.c return obj; obj 43 drivers/gpu/drm/v3d/v3d_irq.c struct drm_gem_object *obj; obj 50 drivers/gpu/drm/v3d/v3d_irq.c obj = &bo->base.base; obj 67 drivers/gpu/drm/v3d/v3d_irq.c drm_gem_object_get(obj); obj 72 drivers/gpu/drm/v3d/v3d_irq.c V3D_CORE_WRITE(0, V3D_PTB_BPOS, obj->size); obj 75 drivers/gpu/drm/v3d/v3d_irq.c drm_gem_object_put_unlocked(obj); obj 53 drivers/gpu/drm/vboxvideo/vbox_drv.h struct drm_gem_object *obj; obj 158 drivers/gpu/drm/vboxvideo/vbox_drv.h struct drm_gem_object *obj); obj 168 drivers/gpu/drm/vboxvideo/vbox_drv.h u32 size, bool iskernel, struct drm_gem_object **obj); obj 136 drivers/gpu/drm/vboxvideo/vbox_fb.c if (afb->obj) { obj 137 drivers/gpu/drm/vboxvideo/vbox_fb.c struct drm_gem_vram_object *gbo = drm_gem_vram_of_gem(afb->obj); obj 142 drivers/gpu/drm/vboxvideo/vbox_fb.c drm_gem_object_put_unlocked(afb->obj); obj 143 drivers/gpu/drm/vboxvideo/vbox_fb.c afb->obj = NULL; obj 23 drivers/gpu/drm/vboxvideo/vbox_main.c if (vbox_fb->obj) obj 24 drivers/gpu/drm/vboxvideo/vbox_main.c drm_gem_object_put_unlocked(vbox_fb->obj); obj 107 drivers/gpu/drm/vboxvideo/vbox_main.c struct drm_gem_object *obj) obj 112 drivers/gpu/drm/vboxvideo/vbox_main.c vbox_fb->obj = obj; obj 275 drivers/gpu/drm/vboxvideo/vbox_main.c u32 size, bool iskernel, struct drm_gem_object **obj) obj 280 drivers/gpu/drm/vboxvideo/vbox_main.c *obj = NULL; obj 295 drivers/gpu/drm/vboxvideo/vbox_main.c *obj = &gbo->bo.base; obj 176 drivers/gpu/drm/vboxvideo/vbox_mode.c drm_gem_vram_of_gem(to_vbox_framebuffer(fb)->obj); obj 312 drivers/gpu/drm/vboxvideo/vbox_mode.c gbo = drm_gem_vram_of_gem(to_vbox_framebuffer(new_state->fb)->obj); obj 328 drivers/gpu/drm/vboxvideo/vbox_mode.c gbo = drm_gem_vram_of_gem(to_vbox_framebuffer(old_state->fb)->obj); obj 390 drivers/gpu/drm/vboxvideo/vbox_mode.c drm_gem_vram_of_gem(to_vbox_framebuffer(fb)->obj); obj 470 drivers/gpu/drm/vboxvideo/vbox_mode.c gbo = drm_gem_vram_of_gem(to_vbox_framebuffer(new_state->fb)->obj); obj 482 drivers/gpu/drm/vboxvideo/vbox_mode.c gbo = drm_gem_vram_of_gem(to_vbox_framebuffer(plane->state->fb)->obj); obj 866 drivers/gpu/drm/vboxvideo/vbox_mode.c struct drm_gem_object *obj; obj 870 drivers/gpu/drm/vboxvideo/vbox_mode.c obj = drm_gem_object_lookup(filp, mode_cmd->handles[0]); obj 871 drivers/gpu/drm/vboxvideo/vbox_mode.c if (!obj) obj 878 drivers/gpu/drm/vboxvideo/vbox_mode.c ret = vbox_framebuffer_init(vbox, vbox_fb, mode_cmd, obj); obj 887 drivers/gpu/drm/vboxvideo/vbox_mode.c drm_gem_object_put_unlocked(obj); obj 164 drivers/gpu/drm/vc4/vc4_bo.c struct drm_gem_object *obj = &bo->base.base; obj 165 drivers/gpu/drm/vc4/vc4_bo.c struct vc4_dev *vc4 = to_vc4_dev(obj->dev); obj 169 drivers/gpu/drm/vc4/vc4_bo.c vc4_bo_set_label(obj, -1); obj 178 drivers/gpu/drm/vc4/vc4_bo.c drm_gem_cma_free_object(obj); obj 286 drivers/gpu/drm/vc4/vc4_bo.c static void vc4_bo_purge(struct drm_gem_object *obj) obj 288 drivers/gpu/drm/vc4/vc4_bo.c struct vc4_bo *bo = to_vc4_bo(obj); obj 289 drivers/gpu/drm/vc4/vc4_bo.c struct drm_device *dev = obj->dev; obj 294 drivers/gpu/drm/vc4/vc4_bo.c drm_vma_node_unmap(&obj->vma_node, dev->anon_inode->i_mapping); obj 296 drivers/gpu/drm/vc4/vc4_bo.c dma_free_wc(dev->dev, obj->size, bo->base.vaddr, bo->base.paddr); obj 309 drivers/gpu/drm/vc4/vc4_bo.c struct drm_gem_object *obj = &bo->base.base; obj 335 drivers/gpu/drm/vc4/vc4_bo.c vc4_bo_purge(obj); obj 658 drivers/gpu/drm/vc4/vc4_bo.c struct dma_buf * vc4_prime_export(struct drm_gem_object *obj, int flags) obj 660 drivers/gpu/drm/vc4/vc4_bo.c struct vc4_bo *bo = to_vc4_bo(obj); obj 680 drivers/gpu/drm/vc4/vc4_bo.c dmabuf = drm_gem_prime_export(obj, flags); obj 690 drivers/gpu/drm/vc4/vc4_bo.c struct drm_gem_object *obj = vma->vm_private_data; obj 691 drivers/gpu/drm/vc4/vc4_bo.c struct vc4_bo *bo = to_vc4_bo(obj); obj 758 drivers/gpu/drm/vc4/vc4_bo.c int vc4_prime_mmap(struct drm_gem_object *obj, struct vm_area_struct *vma) obj 760 drivers/gpu/drm/vc4/vc4_bo.c struct vc4_bo *bo = to_vc4_bo(obj); obj 767 drivers/gpu/drm/vc4/vc4_bo.c return drm_gem_cma_prime_mmap(obj, vma); obj 770 drivers/gpu/drm/vc4/vc4_bo.c void *vc4_prime_vmap(struct drm_gem_object *obj) obj 772 drivers/gpu/drm/vc4/vc4_bo.c struct vc4_bo *bo = to_vc4_bo(obj); obj 779 drivers/gpu/drm/vc4/vc4_bo.c return drm_gem_cma_prime_vmap(obj); obj 787 drivers/gpu/drm/vc4/vc4_bo.c struct drm_gem_object *obj; obj 789 drivers/gpu/drm/vc4/vc4_bo.c obj = drm_gem_cma_prime_import_sg_table(dev, attach, sgt); obj 790 drivers/gpu/drm/vc4/vc4_bo.c if (IS_ERR(obj)) obj 791 drivers/gpu/drm/vc4/vc4_bo.c return obj; obj 793 drivers/gpu/drm/vc4/vc4_bo.c return obj; obj 715 drivers/gpu/drm/vc4/vc4_drv.h struct dma_buf *vc4_prime_export(struct drm_gem_object *obj, int flags); obj 732 drivers/gpu/drm/vc4/vc4_drv.h int vc4_prime_mmap(struct drm_gem_object *obj, struct vm_area_struct *vma); obj 736 drivers/gpu/drm/vc4/vc4_drv.h void *vc4_prime_vmap(struct drm_gem_object *obj); obj 68 drivers/gpu/drm/vc4/vc4_kms.c vc4_ctm_duplicate_state(struct drm_private_obj *obj) obj 72 drivers/gpu/drm/vc4/vc4_kms.c state = kmemdup(obj->state, sizeof(*state), GFP_KERNEL); obj 76 drivers/gpu/drm/vc4/vc4_kms.c __drm_atomic_helper_private_obj_duplicate_state(obj, &state->base); obj 81 drivers/gpu/drm/vc4/vc4_kms.c static void vc4_ctm_destroy_state(struct drm_private_obj *obj, obj 461 drivers/gpu/drm/vc4/vc4_kms.c vc4_load_tracker_duplicate_state(struct drm_private_obj *obj) obj 465 drivers/gpu/drm/vc4/vc4_kms.c state = kmemdup(obj->state, sizeof(*state), GFP_KERNEL); obj 469 drivers/gpu/drm/vc4/vc4_kms.c __drm_atomic_helper_private_obj_duplicate_state(obj, &state->base); obj 474 drivers/gpu/drm/vc4/vc4_kms.c static void vc4_load_tracker_destroy_state(struct drm_private_obj *obj, obj 384 drivers/gpu/drm/vc4/vc4_render_cl.c struct drm_gem_cma_object *obj, obj 390 drivers/gpu/drm/vc4/vc4_render_cl.c if (surf->offset > obj->base.size) { obj 392 drivers/gpu/drm/vc4/vc4_render_cl.c surf->offset, obj->base.size); obj 396 drivers/gpu/drm/vc4/vc4_render_cl.c if ((obj->base.size - surf->offset) / VC4_TILE_BUFFER_SIZE < obj 401 drivers/gpu/drm/vc4/vc4_render_cl.c obj->base.size, obj 410 drivers/gpu/drm/vc4/vc4_render_cl.c struct drm_gem_cma_object **obj, obj 421 drivers/gpu/drm/vc4/vc4_render_cl.c *obj = vc4_use_bo(exec, surf->hindex); obj 422 drivers/gpu/drm/vc4/vc4_render_cl.c if (!*obj) obj 425 drivers/gpu/drm/vc4/vc4_render_cl.c exec->rcl_write_bo[exec->rcl_write_bo_count++] = *obj; obj 432 drivers/gpu/drm/vc4/vc4_render_cl.c return vc4_full_res_bounds_check(exec, *obj, surf); obj 436 drivers/gpu/drm/vc4/vc4_render_cl.c struct drm_gem_cma_object **obj, obj 457 drivers/gpu/drm/vc4/vc4_render_cl.c *obj = vc4_use_bo(exec, surf->hindex); obj 458 drivers/gpu/drm/vc4/vc4_render_cl.c if (!*obj) obj 462 drivers/gpu/drm/vc4/vc4_render_cl.c exec->rcl_write_bo[exec->rcl_write_bo_count++] = *obj; obj 476 drivers/gpu/drm/vc4/vc4_render_cl.c ret = vc4_full_res_bounds_check(exec, *obj, surf); obj 525 drivers/gpu/drm/vc4/vc4_render_cl.c if (!vc4_check_tex_size(exec, *obj, surf->offset, tiling, obj 536 drivers/gpu/drm/vc4/vc4_render_cl.c struct drm_gem_cma_object **obj, obj 562 drivers/gpu/drm/vc4/vc4_render_cl.c *obj = vc4_use_bo(exec, surf->hindex); obj 563 drivers/gpu/drm/vc4/vc4_render_cl.c if (!*obj) obj 566 drivers/gpu/drm/vc4/vc4_render_cl.c exec->rcl_write_bo[exec->rcl_write_bo_count++] = *obj; obj 586 drivers/gpu/drm/vc4/vc4_render_cl.c if (!vc4_check_tex_size(exec, *obj, surf->offset, tiling, obj 108 drivers/gpu/drm/vc4/vc4_validate.c struct drm_gem_cma_object *obj; obj 116 drivers/gpu/drm/vc4/vc4_validate.c obj = exec->bo[hindex]; obj 117 drivers/gpu/drm/vc4/vc4_validate.c bo = to_vc4_bo(&obj->base); obj 125 drivers/gpu/drm/vc4/vc4_validate.c return obj; obj 57 drivers/gpu/drm/vgem/vgem_drv.c static void vgem_gem_free_object(struct drm_gem_object *obj) obj 59 drivers/gpu/drm/vgem/vgem_drv.c struct drm_vgem_gem_object *vgem_obj = to_vgem_bo(obj); obj 64 drivers/gpu/drm/vgem/vgem_drv.c if (obj->import_attach) obj 65 drivers/gpu/drm/vgem/vgem_drv.c drm_prime_gem_destroy(obj, vgem_obj->table); obj 67 drivers/gpu/drm/vgem/vgem_drv.c drm_gem_object_release(obj); obj 74 drivers/gpu/drm/vgem/vgem_drv.c struct drm_vgem_gem_object *obj = vma->vm_private_data; obj 82 drivers/gpu/drm/vgem/vgem_drv.c num_pages = DIV_ROUND_UP(obj->base.size, PAGE_SIZE); obj 87 drivers/gpu/drm/vgem/vgem_drv.c mutex_lock(&obj->pages_lock); obj 88 drivers/gpu/drm/vgem/vgem_drv.c if (obj->pages) { obj 89 drivers/gpu/drm/vgem/vgem_drv.c get_page(obj->pages[page_offset]); obj 90 drivers/gpu/drm/vgem/vgem_drv.c vmf->page = obj->pages[page_offset]; obj 93 drivers/gpu/drm/vgem/vgem_drv.c mutex_unlock(&obj->pages_lock); obj 98 drivers/gpu/drm/vgem/vgem_drv.c file_inode(obj->base.filp)->i_mapping, obj 162 drivers/gpu/drm/vgem/vgem_drv.c struct drm_vgem_gem_object *obj; obj 165 drivers/gpu/drm/vgem/vgem_drv.c obj = kzalloc(sizeof(*obj), GFP_KERNEL); obj 166 drivers/gpu/drm/vgem/vgem_drv.c if (!obj) obj 169 drivers/gpu/drm/vgem/vgem_drv.c ret = drm_gem_object_init(dev, &obj->base, roundup(size, PAGE_SIZE)); obj 171 drivers/gpu/drm/vgem/vgem_drv.c kfree(obj); obj 175 drivers/gpu/drm/vgem/vgem_drv.c mutex_init(&obj->pages_lock); obj 177 drivers/gpu/drm/vgem/vgem_drv.c return obj; obj 180 drivers/gpu/drm/vgem/vgem_drv.c static void __vgem_gem_destroy(struct drm_vgem_gem_object *obj) obj 182 drivers/gpu/drm/vgem/vgem_drv.c drm_gem_object_release(&obj->base); obj 183 drivers/gpu/drm/vgem/vgem_drv.c kfree(obj); obj 191 drivers/gpu/drm/vgem/vgem_drv.c struct drm_vgem_gem_object *obj; obj 194 drivers/gpu/drm/vgem/vgem_drv.c obj = __vgem_gem_create(dev, size); obj 195 drivers/gpu/drm/vgem/vgem_drv.c if (IS_ERR(obj)) obj 196 drivers/gpu/drm/vgem/vgem_drv.c return ERR_CAST(obj); obj 198 drivers/gpu/drm/vgem/vgem_drv.c ret = drm_gem_handle_create(file, &obj->base, handle); obj 200 drivers/gpu/drm/vgem/vgem_drv.c drm_gem_object_put_unlocked(&obj->base); obj 204 drivers/gpu/drm/vgem/vgem_drv.c return &obj->base; obj 235 drivers/gpu/drm/vgem/vgem_drv.c struct drm_gem_object *obj; obj 238 drivers/gpu/drm/vgem/vgem_drv.c obj = drm_gem_object_lookup(file, handle); obj 239 drivers/gpu/drm/vgem/vgem_drv.c if (!obj) obj 242 drivers/gpu/drm/vgem/vgem_drv.c if (!obj->filp) { obj 247 drivers/gpu/drm/vgem/vgem_drv.c ret = drm_gem_create_mmap_offset(obj); obj 251 drivers/gpu/drm/vgem/vgem_drv.c *offset = drm_vma_node_offset_addr(&obj->vma_node); obj 253 drivers/gpu/drm/vgem/vgem_drv.c drm_gem_object_put_unlocked(obj); obj 320 drivers/gpu/drm/vgem/vgem_drv.c static int vgem_prime_pin(struct drm_gem_object *obj) obj 322 drivers/gpu/drm/vgem/vgem_drv.c struct drm_vgem_gem_object *bo = to_vgem_bo(obj); obj 323 drivers/gpu/drm/vgem/vgem_drv.c long n_pages = obj->size >> PAGE_SHIFT; obj 338 drivers/gpu/drm/vgem/vgem_drv.c static void vgem_prime_unpin(struct drm_gem_object *obj) obj 340 drivers/gpu/drm/vgem/vgem_drv.c struct drm_vgem_gem_object *bo = to_vgem_bo(obj); obj 345 drivers/gpu/drm/vgem/vgem_drv.c static struct sg_table *vgem_prime_get_sg_table(struct drm_gem_object *obj) obj 347 drivers/gpu/drm/vgem/vgem_drv.c struct drm_vgem_gem_object *bo = to_vgem_bo(obj); obj 363 drivers/gpu/drm/vgem/vgem_drv.c struct drm_vgem_gem_object *obj; obj 366 drivers/gpu/drm/vgem/vgem_drv.c obj = __vgem_gem_create(dev, attach->dmabuf->size); obj 367 drivers/gpu/drm/vgem/vgem_drv.c if (IS_ERR(obj)) obj 368 drivers/gpu/drm/vgem/vgem_drv.c return ERR_CAST(obj); obj 372 drivers/gpu/drm/vgem/vgem_drv.c obj->table = sg; obj 373 drivers/gpu/drm/vgem/vgem_drv.c obj->pages = kvmalloc_array(npages, sizeof(struct page *), GFP_KERNEL); obj 374 drivers/gpu/drm/vgem/vgem_drv.c if (!obj->pages) { obj 375 drivers/gpu/drm/vgem/vgem_drv.c __vgem_gem_destroy(obj); obj 379 drivers/gpu/drm/vgem/vgem_drv.c obj->pages_pin_count++; /* perma-pinned */ obj 380 drivers/gpu/drm/vgem/vgem_drv.c drm_prime_sg_to_page_addr_arrays(obj->table, obj->pages, NULL, obj 382 drivers/gpu/drm/vgem/vgem_drv.c return &obj->base; obj 385 drivers/gpu/drm/vgem/vgem_drv.c static void *vgem_prime_vmap(struct drm_gem_object *obj) obj 387 drivers/gpu/drm/vgem/vgem_drv.c struct drm_vgem_gem_object *bo = to_vgem_bo(obj); obj 388 drivers/gpu/drm/vgem/vgem_drv.c long n_pages = obj->size >> PAGE_SHIFT; obj 398 drivers/gpu/drm/vgem/vgem_drv.c static void vgem_prime_vunmap(struct drm_gem_object *obj, void *vaddr) obj 400 drivers/gpu/drm/vgem/vgem_drv.c struct drm_vgem_gem_object *bo = to_vgem_bo(obj); obj 406 drivers/gpu/drm/vgem/vgem_drv.c static int vgem_prime_mmap(struct drm_gem_object *obj, obj 411 drivers/gpu/drm/vgem/vgem_drv.c if (obj->size < vma->vm_end - vma->vm_start) obj 414 drivers/gpu/drm/vgem/vgem_drv.c if (!obj->filp) obj 417 drivers/gpu/drm/vgem/vgem_drv.c ret = call_mmap(obj->filp, vma); obj 422 drivers/gpu/drm/vgem/vgem_drv.c vma->vm_file = get_file(obj->filp); obj 132 drivers/gpu/drm/vgem/vgem_fence.c struct drm_gem_object *obj; obj 142 drivers/gpu/drm/vgem/vgem_fence.c obj = drm_gem_object_lookup(file, arg->handle); obj 143 drivers/gpu/drm/vgem/vgem_fence.c if (!obj) obj 153 drivers/gpu/drm/vgem/vgem_fence.c resv = obj->resv; obj 185 drivers/gpu/drm/vgem/vgem_fence.c drm_gem_object_put_unlocked(obj); obj 189 drivers/gpu/drm/via/via_mm.c struct via_memblock *obj; obj 192 drivers/gpu/drm/via/via_mm.c obj = idr_find(&dev_priv->object_idr, mem->index); obj 193 drivers/gpu/drm/via/via_mm.c if (obj == NULL) { obj 199 drivers/gpu/drm/via/via_mm.c list_del(&obj->owner_list); obj 200 drivers/gpu/drm/via/via_mm.c drm_mm_remove_node(&obj->mm_node); obj 201 drivers/gpu/drm/via/via_mm.c kfree(obj); obj 66 drivers/gpu/drm/virtio/virtgpu_display.c struct drm_gem_object *obj) obj 70 drivers/gpu/drm/virtio/virtgpu_display.c vgfb->base.obj[0] = obj; obj 76 drivers/gpu/drm/virtio/virtgpu_display.c vgfb->base.obj[0] = NULL; obj 297 drivers/gpu/drm/virtio/virtgpu_display.c struct drm_gem_object *obj = NULL; obj 306 drivers/gpu/drm/virtio/virtgpu_display.c obj = drm_gem_object_lookup(file_priv, mode_cmd->handles[0]); obj 307 drivers/gpu/drm/virtio/virtgpu_display.c if (!obj) obj 314 drivers/gpu/drm/virtio/virtgpu_display.c ret = virtio_gpu_framebuffer_init(dev, virtio_gpu_fb, mode_cmd, obj); obj 317 drivers/gpu/drm/virtio/virtgpu_display.c drm_gem_object_put_unlocked(obj); obj 239 drivers/gpu/drm/virtio/virtgpu_drv.h int virtio_gpu_gem_object_open(struct drm_gem_object *obj, obj 241 drivers/gpu/drm/virtio/virtgpu_drv.h void virtio_gpu_gem_object_close(struct drm_gem_object *obj, obj 278 drivers/gpu/drm/virtio/virtgpu_drv.h struct virtio_gpu_object *obj, obj 281 drivers/gpu/drm/virtio/virtgpu_drv.h struct virtio_gpu_object *obj); obj 332 drivers/gpu/drm/virtio/virtgpu_drv.h struct drm_gem_object *obj); obj 370 drivers/gpu/drm/virtio/virtgpu_drv.h struct sg_table *virtgpu_gem_prime_get_sg_table(struct drm_gem_object *obj); obj 374 drivers/gpu/drm/virtio/virtgpu_drv.h void *virtgpu_gem_prime_vmap(struct drm_gem_object *obj); obj 375 drivers/gpu/drm/virtio/virtgpu_drv.h void virtgpu_gem_prime_vunmap(struct drm_gem_object *obj, void *vaddr); obj 376 drivers/gpu/drm/virtio/virtgpu_drv.h int virtgpu_gem_prime_mmap(struct drm_gem_object *obj, obj 33 drivers/gpu/drm/virtio/virtgpu_gem.c struct virtio_gpu_object *obj = gem_to_virtio_gpu_obj(gem_obj); obj 35 drivers/gpu/drm/virtio/virtgpu_gem.c if (obj) obj 36 drivers/gpu/drm/virtio/virtgpu_gem.c virtio_gpu_object_unref(&obj); obj 45 drivers/gpu/drm/virtio/virtgpu_gem.c struct virtio_gpu_object *obj; obj 48 drivers/gpu/drm/virtio/virtgpu_gem.c ret = virtio_gpu_object_create(vgdev, params, &obj, fence); obj 52 drivers/gpu/drm/virtio/virtgpu_gem.c return obj; obj 61 drivers/gpu/drm/virtio/virtgpu_gem.c struct virtio_gpu_object *obj; obj 65 drivers/gpu/drm/virtio/virtgpu_gem.c obj = virtio_gpu_alloc_object(dev, params, NULL); obj 66 drivers/gpu/drm/virtio/virtgpu_gem.c if (IS_ERR(obj)) obj 67 drivers/gpu/drm/virtio/virtgpu_gem.c return PTR_ERR(obj); obj 69 drivers/gpu/drm/virtio/virtgpu_gem.c ret = drm_gem_handle_create(file, &obj->gem_base, &handle); obj 71 drivers/gpu/drm/virtio/virtgpu_gem.c drm_gem_object_release(&obj->gem_base); obj 75 drivers/gpu/drm/virtio/virtgpu_gem.c *obj_p = &obj->gem_base; obj 78 drivers/gpu/drm/virtio/virtgpu_gem.c drm_gem_object_put_unlocked(&obj->gem_base); obj 122 drivers/gpu/drm/virtio/virtgpu_gem.c struct virtio_gpu_object *obj; obj 128 drivers/gpu/drm/virtio/virtgpu_gem.c obj = gem_to_virtio_gpu_obj(gobj); obj 129 drivers/gpu/drm/virtio/virtgpu_gem.c *offset_p = virtio_gpu_object_mmap_offset(obj); obj 134 drivers/gpu/drm/virtio/virtgpu_gem.c int virtio_gpu_gem_object_open(struct drm_gem_object *obj, obj 137 drivers/gpu/drm/virtio/virtgpu_gem.c struct virtio_gpu_device *vgdev = obj->dev->dev_private; obj 139 drivers/gpu/drm/virtio/virtgpu_gem.c struct virtio_gpu_object *qobj = gem_to_virtio_gpu_obj(obj); obj 155 drivers/gpu/drm/virtio/virtgpu_gem.c void virtio_gpu_gem_object_close(struct drm_gem_object *obj, obj 158 drivers/gpu/drm/virtio/virtgpu_gem.c struct virtio_gpu_device *vgdev = obj->dev->dev_private; obj 160 drivers/gpu/drm/virtio/virtgpu_gem.c struct virtio_gpu_object *qobj = gem_to_virtio_gpu_obj(obj); obj 281 drivers/gpu/drm/virtio/virtgpu_ioctl.c struct drm_gem_object *obj; obj 323 drivers/gpu/drm/virtio/virtgpu_ioctl.c obj = &qobj->gem_base; obj 325 drivers/gpu/drm/virtio/virtgpu_ioctl.c ret = drm_gem_handle_create(file_priv, obj, &handle); obj 327 drivers/gpu/drm/virtio/virtgpu_ioctl.c drm_gem_object_release(obj); obj 330 drivers/gpu/drm/virtio/virtgpu_ioctl.c drm_gem_object_put_unlocked(obj); obj 109 drivers/gpu/drm/virtio/virtgpu_plane.c bo = gem_to_virtio_gpu_obj(vgfb->base.obj[0]); obj 155 drivers/gpu/drm/virtio/virtgpu_plane.c bo = gem_to_virtio_gpu_obj(vgfb->base.obj[0]); obj 200 drivers/gpu/drm/virtio/virtgpu_plane.c bo = gem_to_virtio_gpu_obj(vgfb->base.obj[0]); obj 33 drivers/gpu/drm/virtio/virtgpu_prime.c struct sg_table *virtgpu_gem_prime_get_sg_table(struct drm_gem_object *obj) obj 35 drivers/gpu/drm/virtio/virtgpu_prime.c struct virtio_gpu_object *bo = gem_to_virtio_gpu_obj(obj); obj 52 drivers/gpu/drm/virtio/virtgpu_prime.c void *virtgpu_gem_prime_vmap(struct drm_gem_object *obj) obj 54 drivers/gpu/drm/virtio/virtgpu_prime.c struct virtio_gpu_object *bo = gem_to_virtio_gpu_obj(obj); obj 63 drivers/gpu/drm/virtio/virtgpu_prime.c void virtgpu_gem_prime_vunmap(struct drm_gem_object *obj, void *vaddr) obj 65 drivers/gpu/drm/virtio/virtgpu_prime.c virtio_gpu_object_kunmap(gem_to_virtio_gpu_obj(obj)); obj 68 drivers/gpu/drm/virtio/virtgpu_prime.c int virtgpu_gem_prime_mmap(struct drm_gem_object *obj, obj 71 drivers/gpu/drm/virtio/virtgpu_prime.c return drm_gem_prime_mmap(obj, vma); obj 192 drivers/gpu/drm/virtio/virtgpu_ttm.c struct virtio_gpu_object *obj; obj 201 drivers/gpu/drm/virtio/virtgpu_ttm.c virtio_gpu_get_vgdev(gtt->obj->tbo.bdev); obj 203 drivers/gpu/drm/virtio/virtgpu_ttm.c virtio_gpu_object_attach(vgdev, gtt->obj, NULL); obj 212 drivers/gpu/drm/virtio/virtgpu_ttm.c virtio_gpu_get_vgdev(gtt->obj->tbo.bdev); obj 214 drivers/gpu/drm/virtio/virtgpu_ttm.c virtio_gpu_object_detach(vgdev, gtt->obj); obj 244 drivers/gpu/drm/virtio/virtgpu_ttm.c gtt->obj = container_of(bo, struct virtio_gpu_object, tbo); obj 962 drivers/gpu/drm/virtio/virtgpu_vq.c struct virtio_gpu_object *obj, obj 970 drivers/gpu/drm/virtio/virtgpu_vq.c if (WARN_ON_ONCE(!obj->created)) obj 973 drivers/gpu/drm/virtio/virtgpu_vq.c if (!obj->pages) { obj 976 drivers/gpu/drm/virtio/virtgpu_vq.c ret = virtio_gpu_object_get_sg_table(vgdev, obj); obj 982 drivers/gpu/drm/virtio/virtgpu_vq.c obj->mapped = dma_map_sg(vgdev->vdev->dev.parent, obj 983 drivers/gpu/drm/virtio/virtgpu_vq.c obj->pages->sgl, obj->pages->nents, obj 985 drivers/gpu/drm/virtio/virtgpu_vq.c nents = obj->mapped; obj 987 drivers/gpu/drm/virtio/virtgpu_vq.c nents = obj->pages->nents; obj 998 drivers/gpu/drm/virtio/virtgpu_vq.c for_each_sg(obj->pages->sgl, sg, nents, si) { obj 1006 drivers/gpu/drm/virtio/virtgpu_vq.c virtio_gpu_cmd_resource_attach_backing(vgdev, obj->hw_res_handle, obj 1013 drivers/gpu/drm/virtio/virtgpu_vq.c struct virtio_gpu_object *obj) obj 1017 drivers/gpu/drm/virtio/virtgpu_vq.c if (use_dma_api && obj->mapped) { obj 1020 drivers/gpu/drm/virtio/virtgpu_vq.c virtio_gpu_cmd_resource_inval_backing(vgdev, obj->hw_res_handle, fence); obj 1026 drivers/gpu/drm/virtio/virtgpu_vq.c obj->pages->sgl, obj->mapped, obj 1028 drivers/gpu/drm/virtio/virtgpu_vq.c obj->mapped = 0; obj 1030 drivers/gpu/drm/virtio/virtgpu_vq.c virtio_gpu_cmd_resource_inval_backing(vgdev, obj->hw_res_handle, NULL); obj 129 drivers/gpu/drm/vkms/vkms_drv.h void vkms_gem_free_object(struct drm_gem_object *obj); obj 131 drivers/gpu/drm/vkms/vkms_drv.h int vkms_gem_vmap(struct drm_gem_object *obj); obj 133 drivers/gpu/drm/vkms/vkms_drv.h void vkms_gem_vunmap(struct drm_gem_object *obj); obj 11 drivers/gpu/drm/vkms/vkms_gem.c struct vkms_gem_object *obj; obj 14 drivers/gpu/drm/vkms/vkms_gem.c obj = kzalloc(sizeof(*obj), GFP_KERNEL); obj 15 drivers/gpu/drm/vkms/vkms_gem.c if (!obj) obj 19 drivers/gpu/drm/vkms/vkms_gem.c ret = drm_gem_object_init(dev, &obj->gem, size); obj 21 drivers/gpu/drm/vkms/vkms_gem.c kfree(obj); obj 25 drivers/gpu/drm/vkms/vkms_gem.c mutex_init(&obj->pages_lock); obj 27 drivers/gpu/drm/vkms/vkms_gem.c return obj; obj 30 drivers/gpu/drm/vkms/vkms_gem.c void vkms_gem_free_object(struct drm_gem_object *obj) obj 32 drivers/gpu/drm/vkms/vkms_gem.c struct vkms_gem_object *gem = container_of(obj, struct vkms_gem_object, obj 39 drivers/gpu/drm/vkms/vkms_gem.c drm_gem_object_release(obj); obj 46 drivers/gpu/drm/vkms/vkms_gem.c struct vkms_gem_object *obj = vma->vm_private_data; obj 53 drivers/gpu/drm/vkms/vkms_gem.c num_pages = DIV_ROUND_UP(obj->gem.size, PAGE_SIZE); obj 58 drivers/gpu/drm/vkms/vkms_gem.c mutex_lock(&obj->pages_lock); obj 59 drivers/gpu/drm/vkms/vkms_gem.c if (obj->pages) { obj 60 drivers/gpu/drm/vkms/vkms_gem.c get_page(obj->pages[page_offset]); obj 61 drivers/gpu/drm/vkms/vkms_gem.c vmf->page = obj->pages[page_offset]; obj 64 drivers/gpu/drm/vkms/vkms_gem.c mutex_unlock(&obj->pages_lock); obj 69 drivers/gpu/drm/vkms/vkms_gem.c mapping = file_inode(obj->gem.filp)->i_mapping; obj 103 drivers/gpu/drm/vkms/vkms_gem.c struct vkms_gem_object *obj; obj 109 drivers/gpu/drm/vkms/vkms_gem.c obj = __vkms_gem_create(dev, size); obj 110 drivers/gpu/drm/vkms/vkms_gem.c if (IS_ERR(obj)) obj 111 drivers/gpu/drm/vkms/vkms_gem.c return ERR_CAST(obj); obj 113 drivers/gpu/drm/vkms/vkms_gem.c ret = drm_gem_handle_create(file, &obj->gem, handle); obj 117 drivers/gpu/drm/vkms/vkms_gem.c return &obj->gem; obj 166 drivers/gpu/drm/vkms/vkms_gem.c void vkms_gem_vunmap(struct drm_gem_object *obj) obj 168 drivers/gpu/drm/vkms/vkms_gem.c struct vkms_gem_object *vkms_obj = drm_gem_to_vkms_gem(obj); obj 183 drivers/gpu/drm/vkms/vkms_gem.c drm_gem_put_pages(obj, vkms_obj->pages, false, true); obj 190 drivers/gpu/drm/vkms/vkms_gem.c int vkms_gem_vmap(struct drm_gem_object *obj) obj 192 drivers/gpu/drm/vkms/vkms_gem.c struct vkms_gem_object *vkms_obj = drm_gem_to_vkms_gem(obj); obj 198 drivers/gpu/drm/vkms/vkms_gem.c unsigned int n_pages = obj->size >> PAGE_SHIFT; obj 216 drivers/gpu/drm/vkms/vkms_gem.c drm_gem_put_pages(obj, vkms_obj->pages, false, true); obj 127 drivers/gpu/drm/vmwgfx/ttm_object.c struct ttm_base_object *obj; obj 259 drivers/gpu/drm/vmwgfx/ttm_object.c return drm_hash_entry(hash, struct ttm_ref_object, hash)->obj; obj 275 drivers/gpu/drm/vmwgfx/ttm_object.c base = drm_hash_entry(hash, struct ttm_ref_object, hash)->obj; obj 326 drivers/gpu/drm/vmwgfx/ttm_object.c if (unlikely(base != ref->obj)) obj 392 drivers/gpu/drm/vmwgfx/ttm_object.c ref->obj = base; obj 424 drivers/gpu/drm/vmwgfx/ttm_object.c struct ttm_base_object *base = ref->obj; obj 437 drivers/gpu/drm/vmwgfx/ttm_object.c ttm_base_object_unref(&ref->obj); obj 129 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c SVGAScreenObject obj; obj 141 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c cmd->obj.structSize = sizeof(SVGAScreenObject); obj 142 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c cmd->obj.id = sou->base.unit; obj 143 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c cmd->obj.flags = SVGA_SCREEN_HAS_ROOT | obj 145 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c cmd->obj.size.width = mode->hdisplay; obj 146 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c cmd->obj.size.height = mode->vdisplay; obj 147 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c cmd->obj.root.x = x; obj 148 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c cmd->obj.root.y = y; obj 149 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c sou->base.set_gui_x = cmd->obj.root.x; obj 150 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c sou->base.set_gui_y = cmd->obj.root.y; obj 153 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c vmw_bo_get_guest_ptr(&sou->buffer->base, &cmd->obj.backingStore.ptr); obj 154 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c cmd->obj.backingStore.pitch = mode->hdisplay * 4; obj 387 drivers/gpu/drm/xen/xen_drm_front.c struct drm_gem_object *obj; obj 402 drivers/gpu/drm/xen/xen_drm_front.c obj = xen_drm_front_gem_create(dev, args->size); obj 403 drivers/gpu/drm/xen/xen_drm_front.c if (IS_ERR_OR_NULL(obj)) { obj 404 drivers/gpu/drm/xen/xen_drm_front.c ret = PTR_ERR(obj); obj 409 drivers/gpu/drm/xen/xen_drm_front.c xen_drm_front_dbuf_to_cookie(obj), obj 412 drivers/gpu/drm/xen/xen_drm_front.c xen_drm_front_gem_get_pages(obj)); obj 417 drivers/gpu/drm/xen/xen_drm_front.c ret = drm_gem_handle_create(filp, obj, &args->handle); obj 422 drivers/gpu/drm/xen/xen_drm_front.c drm_gem_object_put_unlocked(obj); obj 427 drivers/gpu/drm/xen/xen_drm_front.c xen_drm_front_dbuf_to_cookie(obj)); obj 430 drivers/gpu/drm/xen/xen_drm_front.c drm_gem_object_put_unlocked(obj); obj 436 drivers/gpu/drm/xen/xen_drm_front.c static void xen_drm_drv_free_object_unlocked(struct drm_gem_object *obj) obj 438 drivers/gpu/drm/xen/xen_drm_front.c struct xen_drm_front_drm_info *drm_info = obj->dev->dev_private; obj 441 drivers/gpu/drm/xen/xen_drm_front.c if (drm_dev_enter(obj->dev, &idx)) { obj 443 drivers/gpu/drm/xen/xen_drm_front.c xen_drm_front_dbuf_to_cookie(obj)); obj 447 drivers/gpu/drm/xen/xen_drm_front.c xen_drm_front_dbuf_to_cookie(obj)); obj 450 drivers/gpu/drm/xen/xen_drm_front.c xen_drm_front_gem_free_object_unlocked(obj); obj 31 drivers/gpu/drm/xen/xen_drm_front_gem.h struct page **xen_drm_front_gem_get_pages(struct drm_gem_object *obj); obj 901 drivers/hid/i2c-hid/i2c-hid-core.c union acpi_object *obj; obj 914 drivers/hid/i2c-hid/i2c-hid-core.c obj = acpi_evaluate_dsm_typed(handle, &i2c_hid_guid, 1, 1, NULL, obj 916 drivers/hid/i2c-hid/i2c-hid-core.c if (!obj) { obj 921 drivers/hid/i2c-hid/i2c-hid-core.c pdata->hid_descriptor_address = obj->integer.value; obj 922 drivers/hid/i2c-hid/i2c-hid-core.c ACPI_FREE(obj); obj 524 drivers/hwmon/acpi_power_meter.c struct acpi_device *obj = resource->domain_devices[i]; obj 525 drivers/hwmon/acpi_power_meter.c if (!obj) obj 529 drivers/hwmon/acpi_power_meter.c kobject_name(&obj->dev.kobj)); obj 530 drivers/hwmon/acpi_power_meter.c put_device(&obj->dev); obj 583 drivers/hwmon/acpi_power_meter.c struct acpi_device *obj; obj 596 drivers/hwmon/acpi_power_meter.c obj = resource->domain_devices[i]; obj 597 drivers/hwmon/acpi_power_meter.c get_device(&obj->dev); obj 599 drivers/hwmon/acpi_power_meter.c res = sysfs_create_link(resource->holders_dir, &obj->dev.kobj, obj 600 drivers/hwmon/acpi_power_meter.c kobject_name(&obj->dev.kobj)); obj 602 drivers/hwmon/acpi_power_meter.c put_device(&obj->dev); obj 191 drivers/hwmon/asus_atk0110.c static void atk_print_sensor(struct atk_data *data, union acpi_object *obj); obj 327 drivers/hwmon/asus_atk0110.c static int validate_hwmon_pack(struct atk_data *data, union acpi_object *obj) obj 335 drivers/hwmon/asus_atk0110.c if (obj->type != ACPI_TYPE_PACKAGE) { obj 336 drivers/hwmon/asus_atk0110.c dev_warn(dev, "Invalid type: %d\n", obj->type); obj 340 drivers/hwmon/asus_atk0110.c if (obj->package.count != expected_size) { obj 342 drivers/hwmon/asus_atk0110.c obj->package.count, expected_size); obj 346 drivers/hwmon/asus_atk0110.c tmp = atk_get_pack_member(data, obj, HWMON_PACK_FLAGS); obj 352 drivers/hwmon/asus_atk0110.c tmp = atk_get_pack_member(data, obj, HWMON_PACK_NAME); obj 360 drivers/hwmon/asus_atk0110.c tmp = &obj->package.elements[HWMON_PACK_UNK1]; obj 366 drivers/hwmon/asus_atk0110.c tmp = &obj->package.elements[HWMON_PACK_UNK2]; obj 373 drivers/hwmon/asus_atk0110.c tmp = atk_get_pack_member(data, obj, HWMON_PACK_LIMIT1); obj 379 drivers/hwmon/asus_atk0110.c tmp = atk_get_pack_member(data, obj, HWMON_PACK_LIMIT2); obj 385 drivers/hwmon/asus_atk0110.c tmp = atk_get_pack_member(data, obj, HWMON_PACK_ENABLE); obj 391 drivers/hwmon/asus_atk0110.c atk_print_sensor(data, obj); obj 421 drivers/hwmon/asus_atk0110.c static void atk_print_sensor(struct atk_data *data, union acpi_object *obj) obj 432 drivers/hwmon/asus_atk0110.c flags = atk_get_pack_member(data, obj, HWMON_PACK_FLAGS); obj 433 drivers/hwmon/asus_atk0110.c name = atk_get_pack_member(data, obj, HWMON_PACK_NAME); obj 434 drivers/hwmon/asus_atk0110.c limit1 = atk_get_pack_member(data, obj, HWMON_PACK_LIMIT1); obj 435 drivers/hwmon/asus_atk0110.c limit2 = atk_get_pack_member(data, obj, HWMON_PACK_LIMIT2); obj 436 drivers/hwmon/asus_atk0110.c enable = atk_get_pack_member(data, obj, HWMON_PACK_ENABLE); obj 530 drivers/hwmon/asus_atk0110.c union acpi_object *obj; obj 552 drivers/hwmon/asus_atk0110.c obj = ret.pointer; obj 555 drivers/hwmon/asus_atk0110.c if (obj->buffer.length < 8) { obj 557 drivers/hwmon/asus_atk0110.c obj->buffer.length); obj 558 drivers/hwmon/asus_atk0110.c ACPI_FREE(obj); obj 561 drivers/hwmon/asus_atk0110.c return obj; obj 571 drivers/hwmon/asus_atk0110.c union acpi_object *obj; obj 589 drivers/hwmon/asus_atk0110.c obj = ret.pointer; obj 592 drivers/hwmon/asus_atk0110.c if (obj->buffer.length < 8) { obj 594 drivers/hwmon/asus_atk0110.c obj->buffer.length); obj 595 drivers/hwmon/asus_atk0110.c ACPI_FREE(obj); obj 598 drivers/hwmon/asus_atk0110.c return obj; obj 605 drivers/hwmon/asus_atk0110.c union acpi_object *obj; obj 609 drivers/hwmon/asus_atk0110.c obj = atk_gitm(data, sensor->id); obj 610 drivers/hwmon/asus_atk0110.c if (IS_ERR(obj)) obj 611 drivers/hwmon/asus_atk0110.c return PTR_ERR(obj); obj 613 drivers/hwmon/asus_atk0110.c buf = (struct atk_acpi_ret_buffer *)obj->buffer.pointer; obj 627 drivers/hwmon/asus_atk0110.c ACPI_FREE(obj); obj 687 drivers/hwmon/asus_atk0110.c static int atk_acpi_print(char *buf, size_t sz, union acpi_object *obj) obj 691 drivers/hwmon/asus_atk0110.c switch (obj->type) { obj 693 drivers/hwmon/asus_atk0110.c ret = snprintf(buf, sz, "0x%08llx\n", obj->integer.value); obj 696 drivers/hwmon/asus_atk0110.c ret = snprintf(buf, sz, "%s\n", obj->string.pointer); obj 709 drivers/hwmon/asus_atk0110.c union acpi_object *obj = &pack->package.elements[i]; obj 711 drivers/hwmon/asus_atk0110.c ret = atk_acpi_print(buf, sz, obj); obj 820 drivers/hwmon/asus_atk0110.c static int atk_add_sensor(struct atk_data *data, union acpi_object *obj) obj 837 drivers/hwmon/asus_atk0110.c if (obj->type != ACPI_TYPE_PACKAGE) { obj 840 drivers/hwmon/asus_atk0110.c obj->type); obj 844 drivers/hwmon/asus_atk0110.c err = validate_hwmon_pack(data, obj); obj 849 drivers/hwmon/asus_atk0110.c type = atk_get_pack_member(data, obj, HWMON_PACK_FLAGS)->integer.value obj 879 drivers/hwmon/asus_atk0110.c enable = atk_get_pack_member(data, obj, HWMON_PACK_ENABLE); obj 884 drivers/hwmon/asus_atk0110.c flags = atk_get_pack_member(data, obj, HWMON_PACK_FLAGS); obj 885 drivers/hwmon/asus_atk0110.c name = atk_get_pack_member(data, obj, HWMON_PACK_NAME); obj 886 drivers/hwmon/asus_atk0110.c limit1 = atk_get_pack_member(data, obj, HWMON_PACK_LIMIT1); obj 887 drivers/hwmon/asus_atk0110.c limit2 = atk_get_pack_member(data, obj, HWMON_PACK_LIMIT2); obj 960 drivers/hwmon/asus_atk0110.c union acpi_object *obj = &pack->package.elements[i]; obj 962 drivers/hwmon/asus_atk0110.c ret = atk_add_sensor(data, obj); obj 981 drivers/hwmon/asus_atk0110.c union acpi_object *obj = &pack->package.elements[i]; obj 983 drivers/hwmon/asus_atk0110.c ret = atk_add_sensor(data, obj); obj 1002 drivers/hwmon/asus_atk0110.c union acpi_object *obj = &pack->package.elements[i]; obj 1004 drivers/hwmon/asus_atk0110.c ret = atk_add_sensor(data, obj); obj 1034 drivers/hwmon/asus_atk0110.c union acpi_object *obj = &pack->package.elements[i]; obj 1037 drivers/hwmon/asus_atk0110.c if (obj->type != ACPI_TYPE_PACKAGE) obj 1040 drivers/hwmon/asus_atk0110.c id = &obj->package.elements[0]; obj 1045 drivers/hwmon/asus_atk0110.c ec = obj; obj 1062 drivers/hwmon/asus_atk0110.c union acpi_object *obj; obj 1066 drivers/hwmon/asus_atk0110.c obj = atk_gitm(data, ATK_EC_ID); obj 1067 drivers/hwmon/asus_atk0110.c if (IS_ERR(obj)) { obj 1069 drivers/hwmon/asus_atk0110.c return PTR_ERR(obj); obj 1071 drivers/hwmon/asus_atk0110.c buf = (struct atk_acpi_ret_buffer *)obj->buffer.pointer; obj 1082 drivers/hwmon/asus_atk0110.c ACPI_FREE(obj); obj 1089 drivers/hwmon/asus_atk0110.c union acpi_object *obj; obj 1098 drivers/hwmon/asus_atk0110.c obj = atk_sitm(data, &sitm); obj 1099 drivers/hwmon/asus_atk0110.c if (IS_ERR(obj)) { obj 1102 drivers/hwmon/asus_atk0110.c return PTR_ERR(obj); obj 1104 drivers/hwmon/asus_atk0110.c ec_ret = (struct atk_acpi_ret_buffer *)obj->buffer.pointer; obj 1114 drivers/hwmon/asus_atk0110.c ACPI_FREE(obj); obj 1149 drivers/hwmon/asus_atk0110.c union acpi_object *obj = &pack->package.elements[i]; obj 1151 drivers/hwmon/asus_atk0110.c atk_add_sensor(data, obj); obj 1278 drivers/hwmon/asus_atk0110.c union acpi_object *obj; obj 1298 drivers/hwmon/asus_atk0110.c obj = buf.pointer; obj 1299 drivers/hwmon/asus_atk0110.c if (obj->package.count >= 2) { obj 1300 drivers/hwmon/asus_atk0110.c union acpi_object *id = &obj->package.elements[1]; obj 326 drivers/hwtracing/coresight/coresight-platform.c static inline bool is_acpi_guid(const union acpi_object *obj) obj 328 drivers/hwtracing/coresight/coresight-platform.c return (obj->type == ACPI_TYPE_BUFFER) && (obj->buffer.length == 16); obj 335 drivers/hwtracing/coresight/coresight-platform.c static inline bool acpi_guid_matches(const union acpi_object *obj, obj 338 drivers/hwtracing/coresight/coresight-platform.c return is_acpi_guid(obj) && obj 339 drivers/hwtracing/coresight/coresight-platform.c guid_equal((guid_t *)obj->buffer.pointer, guid); obj 342 drivers/hwtracing/coresight/coresight-platform.c static inline bool is_acpi_dsd_graph_guid(const union acpi_object *obj) obj 344 drivers/hwtracing/coresight/coresight-platform.c return acpi_guid_matches(obj, &acpi_graph_uuid); obj 347 drivers/hwtracing/coresight/coresight-platform.c static inline bool is_acpi_coresight_graph_guid(const union acpi_object *obj) obj 349 drivers/hwtracing/coresight/coresight-platform.c return acpi_guid_matches(obj, &coresight_graph_uuid); obj 352 drivers/hwtracing/coresight/coresight-platform.c static inline bool is_acpi_coresight_graph(const union acpi_object *obj) obj 356 drivers/hwtracing/coresight/coresight-platform.c if (obj->type != ACPI_TYPE_PACKAGE || obj 357 drivers/hwtracing/coresight/coresight-platform.c obj->package.count < 3) obj 360 drivers/hwtracing/coresight/coresight-platform.c graphid = &obj->package.elements[0]; obj 361 drivers/hwtracing/coresight/coresight-platform.c guid = &obj->package.elements[1]; obj 362 drivers/hwtracing/coresight/coresight-platform.c links = &obj->package.elements[2]; obj 473 drivers/hwtracing/coresight/coresight-platform.c const union acpi_object *obj = &graph->package.elements[i]; obj 475 drivers/hwtracing/coresight/coresight-platform.c if (obj->type != ACPI_TYPE_PACKAGE || obj 476 drivers/hwtracing/coresight/coresight-platform.c obj->package.count < 3) obj 63 drivers/i2c/busses/i2c-designware-platdrv.c union acpi_object *obj; obj 71 drivers/i2c/busses/i2c-designware-platdrv.c obj = (union acpi_object *)buf.pointer; obj 72 drivers/i2c/busses/i2c-designware-platdrv.c if (obj->type == ACPI_TYPE_PACKAGE && obj->package.count == 3) { obj 73 drivers/i2c/busses/i2c-designware-platdrv.c const union acpi_object *objs = obj->package.elements; obj 92 drivers/i2c/busses/i2c-scmi.c union acpi_object *obj; obj 190 drivers/i2c/busses/i2c-scmi.c obj = pkg->package.elements; obj 196 drivers/i2c/busses/i2c-scmi.c if (obj == NULL || obj->type != ACPI_TYPE_INTEGER) { obj 202 drivers/i2c/busses/i2c-scmi.c result = obj->integer.value; obj 227 drivers/i2c/busses/i2c-scmi.c obj = pkg->package.elements + 1; obj 228 drivers/i2c/busses/i2c-scmi.c if (obj->type != ACPI_TYPE_INTEGER) { obj 234 drivers/i2c/busses/i2c-scmi.c len = obj->integer.value; obj 235 drivers/i2c/busses/i2c-scmi.c obj = pkg->package.elements + 2; obj 240 drivers/i2c/busses/i2c-scmi.c if (obj->type != ACPI_TYPE_INTEGER) { obj 247 drivers/i2c/busses/i2c-scmi.c data->word = obj->integer.value; obj 249 drivers/i2c/busses/i2c-scmi.c data->byte = obj->integer.value; obj 252 drivers/i2c/busses/i2c-scmi.c if (obj->type != ACPI_TYPE_BUFFER) { obj 261 drivers/i2c/busses/i2c-scmi.c memcpy(data->block + 1, obj->buffer.pointer, len); obj 305 drivers/i2c/busses/i2c-scmi.c union acpi_object *obj; obj 318 drivers/i2c/busses/i2c-scmi.c obj = buffer.pointer; obj 319 drivers/i2c/busses/i2c-scmi.c if (obj && obj->type == ACPI_TYPE_PACKAGE) obj 320 drivers/i2c/busses/i2c-scmi.c obj = obj->package.elements; obj 327 drivers/i2c/busses/i2c-scmi.c if (obj->type != ACPI_TYPE_INTEGER) { obj 333 drivers/i2c/busses/i2c-scmi.c (int)obj->integer.value); obj 169 drivers/infiniband/core/cm.c struct kobject obj; obj 4250 drivers/infiniband/core/cm.c static ssize_t cm_show_counter(struct kobject *obj, struct attribute *attr, obj 4256 drivers/infiniband/core/cm.c group = container_of(obj, struct cm_counter_group, obj); obj 4293 drivers/infiniband/core/cm.c &port->counter_group[i].obj, obj 4304 drivers/infiniband/core/cm.c ib_port_unregister_module_stat(&port->counter_group[i].obj); obj 4314 drivers/infiniband/core/cm.c ib_port_unregister_module_stat(&port->counter_group[i].obj); obj 235 drivers/infiniband/core/rdma_core.c struct ib_uobject *__uobj_get_destroy(const struct uverbs_api_object *obj, obj 241 drivers/infiniband/core/rdma_core.c uobj = rdma_lookup_get_uobject(obj, attrs->ufile, id, obj 259 drivers/infiniband/core/rdma_core.c int __uobj_perform_destroy(const struct uverbs_api_object *obj, u32 id, obj 264 drivers/infiniband/core/rdma_core.c uobj = __uobj_get_destroy(obj, id, attrs); obj 273 drivers/infiniband/core/rdma_core.c const struct uverbs_api_object *obj) obj 282 drivers/infiniband/core/rdma_core.c uobj = kzalloc(obj->type_attrs->obj_size, GFP_KERNEL); obj 292 drivers/infiniband/core/rdma_core.c uobj->uapi_object = obj; obj 317 drivers/infiniband/core/rdma_core.c lookup_get_idr_uobject(const struct uverbs_api_object *obj, obj 341 drivers/infiniband/core/rdma_core.c lookup_get_fd_uobject(const struct uverbs_api_object *obj, obj 356 drivers/infiniband/core/rdma_core.c if (!obj->type_attrs) obj 359 drivers/infiniband/core/rdma_core.c container_of(obj->type_attrs, struct uverbs_obj_fd_type, type); obj 380 drivers/infiniband/core/rdma_core.c struct ib_uobject *rdma_lookup_get_uobject(const struct uverbs_api_object *obj, obj 388 drivers/infiniband/core/rdma_core.c if (obj == ERR_PTR(-ENOMSG)) { obj 394 drivers/infiniband/core/rdma_core.c if (IS_ERR(obj)) obj 397 drivers/infiniband/core/rdma_core.c uobj = obj->type_class->lookup_get(obj, ufile, id, mode); obj 401 drivers/infiniband/core/rdma_core.c if (uobj->uapi_object != obj) { obj 432 drivers/infiniband/core/rdma_core.c alloc_begin_idr_uobject(const struct uverbs_api_object *obj, obj 438 drivers/infiniband/core/rdma_core.c uobj = alloc_uobj(ufile, obj); obj 461 drivers/infiniband/core/rdma_core.c alloc_begin_fd_uobject(const struct uverbs_api_object *obj, obj 471 drivers/infiniband/core/rdma_core.c uobj = alloc_uobj(ufile, obj); obj 483 drivers/infiniband/core/rdma_core.c struct ib_uobject *rdma_alloc_begin_uobject(const struct uverbs_api_object *obj, obj 489 drivers/infiniband/core/rdma_core.c if (IS_ERR(obj)) obj 500 drivers/infiniband/core/rdma_core.c ret = obj->type_class->alloc_begin(obj, ufile); obj 834 drivers/infiniband/core/rdma_core.c struct ib_uobject *obj, *next_obj; obj 847 drivers/infiniband/core/rdma_core.c list_for_each_entry_safe(obj, next_obj, &ufile->uobjects, list) { obj 848 drivers/infiniband/core/rdma_core.c attrs.context = obj->context; obj 853 drivers/infiniband/core/rdma_core.c WARN_ON(uverbs_try_lock_object(obj, UVERBS_LOOKUP_WRITE)); obj 854 drivers/infiniband/core/rdma_core.c if (!uverbs_destroy_uobject(obj, reason, &attrs)) obj 857 drivers/infiniband/core/rdma_core.c atomic_set(&obj->usecnt, 0); obj 936 drivers/infiniband/core/rdma_core.c const struct uverbs_api_object *obj = obj 941 drivers/infiniband/core/rdma_core.c return rdma_lookup_get_uobject(obj, attrs->ufile, id, obj 945 drivers/infiniband/core/rdma_core.c return rdma_lookup_get_uobject(obj, attrs->ufile, id, obj 948 drivers/infiniband/core/rdma_core.c return rdma_lookup_get_uobject(obj, attrs->ufile, id, obj 951 drivers/infiniband/core/rdma_core.c return rdma_alloc_begin_uobject(obj, attrs->ufile, attrs); obj 561 drivers/infiniband/core/uverbs_cmd.c struct ib_uxrcd_object *obj; obj 597 drivers/infiniband/core/uverbs_cmd.c obj = (struct ib_uxrcd_object *)uobj_alloc(UVERBS_OBJECT_XRCD, attrs, obj 599 drivers/infiniband/core/uverbs_cmd.c if (IS_ERR(obj)) { obj 600 drivers/infiniband/core/uverbs_cmd.c ret = PTR_ERR(obj); obj 619 drivers/infiniband/core/uverbs_cmd.c atomic_set(&obj->refcnt, 0); obj 620 drivers/infiniband/core/uverbs_cmd.c obj->uobject.object = xrcd; obj 622 drivers/infiniband/core/uverbs_cmd.c resp.xrcd_handle = obj->uobject.id; obj 643 drivers/infiniband/core/uverbs_cmd.c return uobj_alloc_commit(&obj->uobject, attrs); obj 656 drivers/infiniband/core/uverbs_cmd.c uobj_alloc_abort(&obj->uobject, attrs); obj 987 drivers/infiniband/core/uverbs_cmd.c struct ib_ucq_object *obj; obj 998 drivers/infiniband/core/uverbs_cmd.c obj = (struct ib_ucq_object *)uobj_alloc(UVERBS_OBJECT_CQ, attrs, obj 1000 drivers/infiniband/core/uverbs_cmd.c if (IS_ERR(obj)) obj 1001 drivers/infiniband/core/uverbs_cmd.c return obj; obj 1011 drivers/infiniband/core/uverbs_cmd.c obj->uobject.user_handle = cmd->user_handle; obj 1012 drivers/infiniband/core/uverbs_cmd.c obj->comp_events_reported = 0; obj 1013 drivers/infiniband/core/uverbs_cmd.c obj->async_events_reported = 0; obj 1014 drivers/infiniband/core/uverbs_cmd.c INIT_LIST_HEAD(&obj->comp_list); obj 1015 drivers/infiniband/core/uverbs_cmd.c INIT_LIST_HEAD(&obj->async_list); obj 1027 drivers/infiniband/core/uverbs_cmd.c cq->uobject = &obj->uobject; obj 1037 drivers/infiniband/core/uverbs_cmd.c obj->uobject.object = cq; obj 1039 drivers/infiniband/core/uverbs_cmd.c resp.base.cq_handle = obj->uobject.id; obj 1050 drivers/infiniband/core/uverbs_cmd.c ret = uobj_alloc_commit(&obj->uobject, attrs); obj 1053 drivers/infiniband/core/uverbs_cmd.c return obj; obj 1062 drivers/infiniband/core/uverbs_cmd.c ib_uverbs_release_ucq(attrs->ufile, ev_file, obj); obj 1065 drivers/infiniband/core/uverbs_cmd.c uobj_alloc_abort(&obj->uobject, attrs); obj 1074 drivers/infiniband/core/uverbs_cmd.c struct ib_ucq_object *obj; obj 1087 drivers/infiniband/core/uverbs_cmd.c obj = create_cq(attrs, &cmd_ex); obj 1088 drivers/infiniband/core/uverbs_cmd.c return PTR_ERR_OR_ZERO(obj); obj 1094 drivers/infiniband/core/uverbs_cmd.c struct ib_ucq_object *obj; obj 1107 drivers/infiniband/core/uverbs_cmd.c obj = create_cq(attrs, &cmd); obj 1108 drivers/infiniband/core/uverbs_cmd.c return PTR_ERR_OR_ZERO(obj); obj 1248 drivers/infiniband/core/uverbs_cmd.c struct ib_ucq_object *obj; obj 1259 drivers/infiniband/core/uverbs_cmd.c obj = container_of(uobj, struct ib_ucq_object, uobject); obj 1261 drivers/infiniband/core/uverbs_cmd.c resp.comp_events_reported = obj->comp_events_reported; obj 1262 drivers/infiniband/core/uverbs_cmd.c resp.async_events_reported = obj->async_events_reported; obj 1272 drivers/infiniband/core/uverbs_cmd.c struct ib_uqp_object *obj; obj 1290 drivers/infiniband/core/uverbs_cmd.c obj = (struct ib_uqp_object *)uobj_alloc(UVERBS_OBJECT_QP, attrs, obj 1292 drivers/infiniband/core/uverbs_cmd.c if (IS_ERR(obj)) obj 1293 drivers/infiniband/core/uverbs_cmd.c return PTR_ERR(obj); obj 1294 drivers/infiniband/core/uverbs_cmd.c obj->uxrcd = NULL; obj 1295 drivers/infiniband/core/uverbs_cmd.c obj->uevent.uobject.user_handle = cmd->user_handle; obj 1296 drivers/infiniband/core/uverbs_cmd.c mutex_init(&obj->mcast_lock); obj 1392 drivers/infiniband/core/uverbs_cmd.c obj->uevent.events_reported = 0; obj 1393 drivers/infiniband/core/uverbs_cmd.c INIT_LIST_HEAD(&obj->uevent.event_list); obj 1394 drivers/infiniband/core/uverbs_cmd.c INIT_LIST_HEAD(&obj->mcast_list); obj 1422 drivers/infiniband/core/uverbs_cmd.c &obj->uevent.uobject); obj 1445 drivers/infiniband/core/uverbs_cmd.c qp->uobject = &obj->uevent.uobject; obj 1448 drivers/infiniband/core/uverbs_cmd.c obj->uevent.uobject.object = qp; obj 1452 drivers/infiniband/core/uverbs_cmd.c resp.base.qp_handle = obj->uevent.uobject.id; obj 1465 drivers/infiniband/core/uverbs_cmd.c obj->uxrcd = container_of(xrcd_uobj, struct ib_uxrcd_object, obj 1467 drivers/infiniband/core/uverbs_cmd.c atomic_inc(&obj->uxrcd->refcnt); obj 1482 drivers/infiniband/core/uverbs_cmd.c return uobj_alloc_commit(&obj->uevent.uobject, attrs); obj 1500 drivers/infiniband/core/uverbs_cmd.c uobj_alloc_abort(&obj->uevent.uobject, attrs); obj 1554 drivers/infiniband/core/uverbs_cmd.c struct ib_uqp_object *obj; obj 1566 drivers/infiniband/core/uverbs_cmd.c obj = (struct ib_uqp_object *)uobj_alloc(UVERBS_OBJECT_QP, attrs, obj 1568 drivers/infiniband/core/uverbs_cmd.c if (IS_ERR(obj)) obj 1569 drivers/infiniband/core/uverbs_cmd.c return PTR_ERR(obj); obj 1588 drivers/infiniband/core/uverbs_cmd.c obj->uevent.events_reported = 0; obj 1589 drivers/infiniband/core/uverbs_cmd.c INIT_LIST_HEAD(&obj->uevent.event_list); obj 1590 drivers/infiniband/core/uverbs_cmd.c INIT_LIST_HEAD(&obj->mcast_list); obj 1598 drivers/infiniband/core/uverbs_cmd.c obj->uevent.uobject.object = qp; obj 1599 drivers/infiniband/core/uverbs_cmd.c obj->uevent.uobject.user_handle = cmd.user_handle; obj 1603 drivers/infiniband/core/uverbs_cmd.c resp.qp_handle = obj->uevent.uobject.id; obj 1609 drivers/infiniband/core/uverbs_cmd.c obj->uxrcd = container_of(xrcd_uobj, struct ib_uxrcd_object, uobject); obj 1610 drivers/infiniband/core/uverbs_cmd.c atomic_inc(&obj->uxrcd->refcnt); obj 1611 drivers/infiniband/core/uverbs_cmd.c qp->uobject = &obj->uevent.uobject; obj 1614 drivers/infiniband/core/uverbs_cmd.c return uobj_alloc_commit(&obj->uevent.uobject, attrs); obj 1621 drivers/infiniband/core/uverbs_cmd.c uobj_alloc_abort(&obj->uevent.uobject, attrs); obj 1969 drivers/infiniband/core/uverbs_cmd.c struct ib_uqp_object *obj; obj 1980 drivers/infiniband/core/uverbs_cmd.c obj = container_of(uobj, struct ib_uqp_object, uevent.uobject); obj 1982 drivers/infiniband/core/uverbs_cmd.c resp.events_reported = obj->uevent.events_reported; obj 2486 drivers/infiniband/core/uverbs_cmd.c struct ib_uqp_object *obj; obj 2498 drivers/infiniband/core/uverbs_cmd.c obj = container_of(qp->uobject, struct ib_uqp_object, uevent.uobject); obj 2500 drivers/infiniband/core/uverbs_cmd.c mutex_lock(&obj->mcast_lock); obj 2501 drivers/infiniband/core/uverbs_cmd.c list_for_each_entry(mcast, &obj->mcast_list, list) obj 2519 drivers/infiniband/core/uverbs_cmd.c list_add_tail(&mcast->list, &obj->mcast_list); obj 2524 drivers/infiniband/core/uverbs_cmd.c mutex_unlock(&obj->mcast_lock); obj 2533 drivers/infiniband/core/uverbs_cmd.c struct ib_uqp_object *obj; obj 2547 drivers/infiniband/core/uverbs_cmd.c obj = container_of(qp->uobject, struct ib_uqp_object, uevent.uobject); obj 2548 drivers/infiniband/core/uverbs_cmd.c mutex_lock(&obj->mcast_lock); obj 2550 drivers/infiniband/core/uverbs_cmd.c list_for_each_entry(mcast, &obj->mcast_list, list) obj 2567 drivers/infiniband/core/uverbs_cmd.c mutex_unlock(&obj->mcast_lock); obj 2894 drivers/infiniband/core/uverbs_cmd.c struct ib_uwq_object *obj; obj 2909 drivers/infiniband/core/uverbs_cmd.c obj = (struct ib_uwq_object *)uobj_alloc(UVERBS_OBJECT_WQ, attrs, obj 2911 drivers/infiniband/core/uverbs_cmd.c if (IS_ERR(obj)) obj 2912 drivers/infiniband/core/uverbs_cmd.c return PTR_ERR(obj); obj 2933 drivers/infiniband/core/uverbs_cmd.c obj->uevent.events_reported = 0; obj 2934 drivers/infiniband/core/uverbs_cmd.c INIT_LIST_HEAD(&obj->uevent.event_list); obj 2942 drivers/infiniband/core/uverbs_cmd.c wq->uobject = &obj->uevent.uobject; obj 2943 drivers/infiniband/core/uverbs_cmd.c obj->uevent.uobject.object = wq; obj 2952 drivers/infiniband/core/uverbs_cmd.c wq->uobject = &obj->uevent.uobject; obj 2953 drivers/infiniband/core/uverbs_cmd.c obj->uevent.uobject.object = wq; obj 2956 drivers/infiniband/core/uverbs_cmd.c resp.wq_handle = obj->uevent.uobject.id; obj 2967 drivers/infiniband/core/uverbs_cmd.c return uobj_alloc_commit(&obj->uevent.uobject, attrs); obj 2976 drivers/infiniband/core/uverbs_cmd.c uobj_alloc_abort(&obj->uevent.uobject, attrs); obj 2986 drivers/infiniband/core/uverbs_cmd.c struct ib_uwq_object *obj; obj 3001 drivers/infiniband/core/uverbs_cmd.c obj = container_of(uobj, struct ib_uwq_object, uevent.uobject); obj 3002 drivers/infiniband/core/uverbs_cmd.c resp.events_reported = obj->uevent.events_reported; obj 3357 drivers/infiniband/core/uverbs_cmd.c struct ib_usrq_object *obj; obj 3365 drivers/infiniband/core/uverbs_cmd.c obj = (struct ib_usrq_object *)uobj_alloc(UVERBS_OBJECT_SRQ, attrs, obj 3367 drivers/infiniband/core/uverbs_cmd.c if (IS_ERR(obj)) obj 3368 drivers/infiniband/core/uverbs_cmd.c return PTR_ERR(obj); obj 3387 drivers/infiniband/core/uverbs_cmd.c obj->uxrcd = container_of(xrcd_uobj, struct ib_uxrcd_object, uobject); obj 3388 drivers/infiniband/core/uverbs_cmd.c atomic_inc(&obj->uxrcd->refcnt); obj 3413 drivers/infiniband/core/uverbs_cmd.c obj->uevent.events_reported = 0; obj 3414 drivers/infiniband/core/uverbs_cmd.c INIT_LIST_HEAD(&obj->uevent.event_list); obj 3425 drivers/infiniband/core/uverbs_cmd.c srq->uobject = &obj->uevent.uobject; obj 3446 drivers/infiniband/core/uverbs_cmd.c obj->uevent.uobject.object = srq; obj 3447 drivers/infiniband/core/uverbs_cmd.c obj->uevent.uobject.user_handle = cmd->user_handle; obj 3450 drivers/infiniband/core/uverbs_cmd.c resp.srq_handle = obj->uevent.uobject.id; obj 3467 drivers/infiniband/core/uverbs_cmd.c return uobj_alloc_commit(&obj->uevent.uobject, attrs); obj 3484 drivers/infiniband/core/uverbs_cmd.c atomic_dec(&obj->uxrcd->refcnt); obj 3489 drivers/infiniband/core/uverbs_cmd.c uobj_alloc_abort(&obj->uevent.uobject, attrs); obj 3590 drivers/infiniband/core/uverbs_cmd.c struct ib_uevent_object *obj; obj 3601 drivers/infiniband/core/uverbs_cmd.c obj = container_of(uobj, struct ib_uevent_object, uobject); obj 3603 drivers/infiniband/core/uverbs_cmd.c resp.events_reported = obj->events_reported; obj 328 drivers/infiniband/core/uverbs_ioctl.c spec->u.obj.obj_type, spec->u.obj.access, obj 334 drivers/infiniband/core/uverbs_ioctl.c if (spec->u.obj.access == UVERBS_ACCESS_NEW) { obj 514 drivers/infiniband/core/uverbs_ioctl.c attr->obj_attr.attr_elm->spec.u.obj.access, commit, obj 64 drivers/infiniband/core/uverbs_std_types_cq.c struct ib_ucq_object *obj = container_of( obj 66 drivers/infiniband/core/uverbs_std_types_cq.c typeof(*obj), uobject); obj 109 drivers/infiniband/core/uverbs_std_types_cq.c obj->comp_events_reported = 0; obj 110 drivers/infiniband/core/uverbs_std_types_cq.c obj->async_events_reported = 0; obj 111 drivers/infiniband/core/uverbs_std_types_cq.c INIT_LIST_HEAD(&obj->comp_list); obj 112 drivers/infiniband/core/uverbs_std_types_cq.c INIT_LIST_HEAD(&obj->async_list); obj 121 drivers/infiniband/core/uverbs_std_types_cq.c cq->uobject = &obj->uobject; obj 132 drivers/infiniband/core/uverbs_std_types_cq.c obj->uobject.object = cq; obj 133 drivers/infiniband/core/uverbs_std_types_cq.c obj->uobject.user_handle = user_handle; obj 184 drivers/infiniband/core/uverbs_std_types_cq.c struct ib_ucq_object *obj = obj 187 drivers/infiniband/core/uverbs_std_types_cq.c .comp_events_reported = obj->comp_events_reported, obj 188 drivers/infiniband/core/uverbs_std_types_cq.c .async_events_reported = obj->async_events_reported obj 64 drivers/infiniband/core/uverbs_std_types_device.c struct ib_uobject *obj; obj 76 drivers/infiniband/core/uverbs_std_types_device.c list_for_each_entry(obj, &ufile->uobjects, list) { obj 77 drivers/infiniband/core/uverbs_std_types_device.c u32 obj_id = obj->id; obj 79 drivers/infiniband/core/uverbs_std_types_device.c if (obj->uapi_object != uapi_object) obj 171 drivers/infiniband/core/uverbs_uapi.c const struct uverbs_object_def *obj, obj 180 drivers/infiniband/core/uverbs_uapi.c obj_key = uapi_key_obj(obj->id); obj 185 drivers/infiniband/core/uverbs_uapi.c if (obj->type_attrs) { obj 189 drivers/infiniband/core/uverbs_uapi.c obj_elm->id = obj->id; obj 190 drivers/infiniband/core/uverbs_uapi.c obj_elm->type_attrs = obj->type_attrs; obj 191 drivers/infiniband/core/uverbs_uapi.c obj_elm->type_class = obj->type_attrs->type_class; obj 203 drivers/infiniband/core/uverbs_uapi.c obj->type_attrs->type_class != &uverbs_idr_class && obj 204 drivers/infiniband/core/uverbs_uapi.c obj->type_attrs->type_class != &uverbs_fd_class)) obj 208 drivers/infiniband/core/uverbs_uapi.c if (!obj->methods) obj 211 drivers/infiniband/core/uverbs_uapi.c for (i = 0; i != obj->num_methods; i++) { obj 212 drivers/infiniband/core/uverbs_uapi.c const struct uverbs_method_def *method = (*obj->methods)[i]; obj 386 drivers/infiniband/core/uverbs_uapi.c u8 access = elm->spec.u.obj.access; obj 503 drivers/infiniband/core/uverbs_uapi.c return spec->u.obj.obj_type; obj 812 drivers/infiniband/hw/bnxt_re/main.c void *obj) obj 817 drivers/infiniband/hw/bnxt_re/main.c if (!obj) obj 822 drivers/infiniband/hw/bnxt_re/main.c struct bnxt_qplib_qp *lib_qp = obj; obj 831 drivers/infiniband/hw/bnxt_re/main.c void *aeqe, void *obj) obj 844 drivers/infiniband/hw/bnxt_re/main.c rc = bnxt_re_handle_affi_async_event(affi_async, obj); obj 284 drivers/infiniband/hw/bnxt_re/qplib_rcfw.h void *aeqe, void *obj)); obj 47 drivers/infiniband/hw/cxgb4/id_table.c u32 obj; obj 51 drivers/infiniband/hw/cxgb4/id_table.c obj = find_next_zero_bit(alloc->table, alloc->max, alloc->last); obj 52 drivers/infiniband/hw/cxgb4/id_table.c if (obj >= alloc->max) obj 53 drivers/infiniband/hw/cxgb4/id_table.c obj = find_first_zero_bit(alloc->table, alloc->max); obj 55 drivers/infiniband/hw/cxgb4/id_table.c if (obj < alloc->max) { obj 59 drivers/infiniband/hw/cxgb4/id_table.c alloc->last = obj + 1; obj 62 drivers/infiniband/hw/cxgb4/id_table.c set_bit(obj, alloc->table); obj 63 drivers/infiniband/hw/cxgb4/id_table.c obj += alloc->start; obj 65 drivers/infiniband/hw/cxgb4/id_table.c obj = -1; obj 68 drivers/infiniband/hw/cxgb4/id_table.c return obj; obj 71 drivers/infiniband/hw/cxgb4/id_table.c void c4iw_id_free(struct c4iw_id_table *alloc, u32 obj) obj 75 drivers/infiniband/hw/cxgb4/id_table.c obj -= alloc->start; obj 78 drivers/infiniband/hw/cxgb4/id_table.c clear_bit(obj, alloc->table); obj 938 drivers/infiniband/hw/cxgb4/iw_cxgb4.h void c4iw_id_free(struct c4iw_id_table *alloc, u32 obj); obj 31 drivers/infiniband/hw/efa/efa_verbs.c void *obj; obj 194 drivers/infiniband/hw/efa/efa_verbs.c entry->obj, get_mmap_key(entry), entry->address, obj 221 drivers/infiniband/hw/efa/efa_verbs.c entry->obj, key, entry->address, entry->length); obj 231 drivers/infiniband/hw/efa/efa_verbs.c void *obj, u64 address, u64 length, u8 mmap_flag) obj 241 drivers/infiniband/hw/efa/efa_verbs.c entry->obj = obj; obj 264 drivers/infiniband/hw/efa/efa_verbs.c entry->obj, entry->address, entry->length, get_mmap_key(entry)); obj 39 drivers/infiniband/hw/hns/hns_roce_alloc.c int hns_roce_bitmap_alloc(struct hns_roce_bitmap *bitmap, unsigned long *obj) obj 44 drivers/infiniband/hw/hns/hns_roce_alloc.c *obj = find_next_zero_bit(bitmap->table, bitmap->max, bitmap->last); obj 45 drivers/infiniband/hw/hns/hns_roce_alloc.c if (*obj >= bitmap->max) { obj 48 drivers/infiniband/hw/hns/hns_roce_alloc.c *obj = find_first_zero_bit(bitmap->table, bitmap->max); obj 51 drivers/infiniband/hw/hns/hns_roce_alloc.c if (*obj < bitmap->max) { obj 52 drivers/infiniband/hw/hns/hns_roce_alloc.c set_bit(*obj, bitmap->table); obj 53 drivers/infiniband/hw/hns/hns_roce_alloc.c bitmap->last = (*obj + 1); obj 56 drivers/infiniband/hw/hns/hns_roce_alloc.c *obj |= bitmap->top; obj 66 drivers/infiniband/hw/hns/hns_roce_alloc.c void hns_roce_bitmap_free(struct hns_roce_bitmap *bitmap, unsigned long obj, obj 69 drivers/infiniband/hw/hns/hns_roce_alloc.c hns_roce_bitmap_free_range(bitmap, obj, 1, rr); obj 73 drivers/infiniband/hw/hns/hns_roce_alloc.c int align, unsigned long *obj) obj 79 drivers/infiniband/hw/hns/hns_roce_alloc.c return hns_roce_bitmap_alloc(bitmap, obj); obj 83 drivers/infiniband/hw/hns/hns_roce_alloc.c *obj = bitmap_find_next_zero_area(bitmap->table, bitmap->max, obj 85 drivers/infiniband/hw/hns/hns_roce_alloc.c if (*obj >= bitmap->max) { obj 88 drivers/infiniband/hw/hns/hns_roce_alloc.c *obj = bitmap_find_next_zero_area(bitmap->table, bitmap->max, 0, obj 92 drivers/infiniband/hw/hns/hns_roce_alloc.c if (*obj < bitmap->max) { obj 94 drivers/infiniband/hw/hns/hns_roce_alloc.c set_bit(*obj + i, bitmap->table); obj 96 drivers/infiniband/hw/hns/hns_roce_alloc.c if (*obj == bitmap->last) { obj 97 drivers/infiniband/hw/hns/hns_roce_alloc.c bitmap->last = (*obj + cnt); obj 101 drivers/infiniband/hw/hns/hns_roce_alloc.c *obj |= bitmap->top; obj 112 drivers/infiniband/hw/hns/hns_roce_alloc.c unsigned long obj, int cnt, obj 117 drivers/infiniband/hw/hns/hns_roce_alloc.c obj &= bitmap->max + bitmap->reserved_top - 1; obj 121 drivers/infiniband/hw/hns/hns_roce_alloc.c clear_bit(obj + i, bitmap->table); obj 124 drivers/infiniband/hw/hns/hns_roce_alloc.c bitmap->last = min(bitmap->last, obj); obj 957 drivers/infiniband/hw/hns/hns_roce_device.h struct hns_roce_hem_table *table, int obj, int step_idx); obj 959 drivers/infiniband/hw/hns/hns_roce_device.h struct hns_roce_hem_table *table, int obj, obj 1167 drivers/infiniband/hw/hns/hns_roce_device.h int hns_roce_bitmap_alloc(struct hns_roce_bitmap *bitmap, unsigned long *obj); obj 1168 drivers/infiniband/hw/hns/hns_roce_device.h void hns_roce_bitmap_free(struct hns_roce_bitmap *bitmap, unsigned long obj, obj 1175 drivers/infiniband/hw/hns/hns_roce_device.h int align, unsigned long *obj); obj 1177 drivers/infiniband/hw/hns/hns_roce_device.h unsigned long obj, int cnt, obj 228 drivers/infiniband/hw/hns/hns_roce_hem.c struct hns_roce_hem_table *table, unsigned long *obj, obj 240 drivers/infiniband/hw/hns/hns_roce_hem.c if (!obj) obj 251 drivers/infiniband/hw/hns/hns_roce_hem.c table_idx = (*obj & (table->num_obj - 1)) / obj 363 drivers/infiniband/hw/hns/hns_roce_hem.c struct hns_roce_hem_table *table, unsigned long obj) obj 378 drivers/infiniband/hw/hns/hns_roce_hem.c unsigned long i = (obj & (table->num_obj - 1)) / obj 394 drivers/infiniband/hw/hns/hns_roce_hem.c ROCEE_BT_CMD_H_ROCEE_BT_CMD_IN_MDF_S, obj); obj 439 drivers/infiniband/hw/hns/hns_roce_hem.c unsigned long obj) obj 454 drivers/infiniband/hw/hns/hns_roce_hem.c unsigned long mhop_obj = obj; obj 519 drivers/infiniband/hw/hns/hns_roce_hem.c if (hr_dev->hw->set_hem(hr_dev, table, obj, step_idx)) { obj 543 drivers/infiniband/hw/hns/hns_roce_hem.c if (hr_dev->hw->set_hem(hr_dev, table, obj, step_idx)) { obj 583 drivers/infiniband/hw/hns/hns_roce_hem.c if (hr_dev->hw->set_hem(hr_dev, table, obj, step_idx)) { obj 615 drivers/infiniband/hw/hns/hns_roce_hem.c struct hns_roce_hem_table *table, unsigned long obj) obj 622 drivers/infiniband/hw/hns/hns_roce_hem.c return hns_roce_table_mhop_get(hr_dev, table, obj); obj 624 drivers/infiniband/hw/hns/hns_roce_hem.c i = (obj & (table->num_obj - 1)) / (table->table_chunk_size / obj 645 drivers/infiniband/hw/hns/hns_roce_hem.c if (hns_roce_set_hem(hr_dev, table, obj)) { obj 661 drivers/infiniband/hw/hns/hns_roce_hem.c unsigned long obj, obj 666 drivers/infiniband/hw/hns/hns_roce_hem.c unsigned long mhop_obj = obj; obj 711 drivers/infiniband/hw/hns/hns_roce_hem.c if (hr_dev->hw->clear_hem(hr_dev, table, obj, 1)) obj 714 drivers/infiniband/hw/hns/hns_roce_hem.c if (hr_dev->hw->clear_hem(hr_dev, table, obj, 2)) obj 718 drivers/infiniband/hw/hns/hns_roce_hem.c if (hr_dev->hw->clear_hem(hr_dev, table, obj, 0)) obj 734 drivers/infiniband/hw/hns/hns_roce_hem.c hr_dev->hw->clear_hem(hr_dev, table, obj, 0)) obj 747 drivers/infiniband/hw/hns/hns_roce_hem.c if (hr_dev->hw->clear_hem(hr_dev, table, obj, 1)) obj 758 drivers/infiniband/hw/hns/hns_roce_hem.c if (hr_dev->hw->clear_hem(hr_dev, table, obj, obj 774 drivers/infiniband/hw/hns/hns_roce_hem.c struct hns_roce_hem_table *table, unsigned long obj) obj 780 drivers/infiniband/hw/hns/hns_roce_hem.c hns_roce_table_mhop_put(hr_dev, table, obj, 1); obj 784 drivers/infiniband/hw/hns/hns_roce_hem.c i = (obj & (table->num_obj - 1)) / obj 791 drivers/infiniband/hw/hns/hns_roce_hem.c if (hr_dev->hw->clear_hem(hr_dev, table, obj, 0)) obj 803 drivers/infiniband/hw/hns/hns_roce_hem.c unsigned long obj, dma_addr_t *dma_handle) obj 809 drivers/infiniband/hw/hns/hns_roce_hem.c unsigned long mhop_obj = obj; obj 824 drivers/infiniband/hw/hns/hns_roce_hem.c hem = table->hem[(obj & (table->num_obj - 1)) / obj_per_chunk]; obj 825 drivers/infiniband/hw/hns/hns_roce_hem.c idx_offset = (obj & (table->num_obj - 1)) % obj_per_chunk; obj 842 drivers/infiniband/hw/hns/hns_roce_hem.c dma_offset = offset = (obj & (table->num_obj - 1)) * seg_size % obj 1037 drivers/infiniband/hw/hns/hns_roce_hem.c u64 obj; obj 1045 drivers/infiniband/hw/hns/hns_roce_hem.c obj = i * buf_chunk_size / table->obj_size; obj 1047 drivers/infiniband/hw/hns/hns_roce_hem.c hns_roce_table_mhop_put(hr_dev, table, obj, 0); obj 112 drivers/infiniband/hw/hns/hns_roce_hem.h struct hns_roce_hem_table *table, unsigned long obj); obj 114 drivers/infiniband/hw/hns/hns_roce_hem.h struct hns_roce_hem_table *table, unsigned long obj); obj 116 drivers/infiniband/hw/hns/hns_roce_hem.h struct hns_roce_hem_table *table, unsigned long obj, obj 132 drivers/infiniband/hw/hns/hns_roce_hem.h struct hns_roce_hem_table *table, unsigned long *obj, obj 2421 drivers/infiniband/hw/hns/hns_roce_hw_v1.c struct hns_roce_hem_table *table, int obj, obj 2453 drivers/infiniband/hw/hns/hns_roce_hw_v1.c ROCEE_BT_CMD_H_ROCEE_BT_CMD_IN_MDF_S, obj); obj 3037 drivers/infiniband/hw/hns/hns_roce_hw_v2.c struct hns_roce_hem_table *table, int obj, obj 3044 drivers/infiniband/hw/hns/hns_roce_hw_v2.c unsigned long mhop_obj = obj; obj 3083 drivers/infiniband/hw/hns/hns_roce_hw_v2.c obj = mhop.l0_idx; obj 3093 drivers/infiniband/hw/hns/hns_roce_hw_v2.c obj, 0, op, obj 3103 drivers/infiniband/hw/hns/hns_roce_hw_v2.c ret = hns_roce_cmd_mbox(hr_dev, bt_ba, mailbox->dma, obj, obj 3112 drivers/infiniband/hw/hns/hns_roce_hw_v2.c struct hns_roce_hem_table *table, int obj, obj 3158 drivers/infiniband/hw/hns/hns_roce_hw_v2.c ret = hns_roce_cmd_mbox(hr_dev, 0, mailbox->dma, obj, 0, op, obj 173 drivers/infiniband/hw/mlx5/devx.c bool mlx5_ib_devx_is_flow_dest(void *obj, int *dest_id, int *dest_type) obj 175 drivers/infiniband/hw/mlx5/devx.c struct devx_obj *devx_obj = obj; obj 195 drivers/infiniband/hw/mlx5/devx.c bool mlx5_ib_devx_is_flow_counter(void *obj, u32 *counter_id) obj 197 drivers/infiniband/hw/mlx5/devx.c struct devx_obj *devx_obj = obj; obj 260 drivers/infiniband/hw/mlx5/devx.c static u16 get_dec_obj_type(struct devx_obj *obj, u16 event_num) obj 264 drivers/infiniband/hw/mlx5/devx.c opcode = (obj->obj_id >> 32) & 0xffff; obj 271 drivers/infiniband/hw/mlx5/devx.c return (obj->obj_id >> 48); obj 1248 drivers/infiniband/hw/mlx5/devx.c static int devx_handle_mkey_indirect(struct devx_obj *obj, obj 1252 drivers/infiniband/hw/mlx5/devx.c struct mlx5_ib_devx_mr *devx_mr = &obj->devx_mr; obj 1273 drivers/infiniband/hw/mlx5/devx.c struct devx_obj *obj, obj 1293 drivers/infiniband/hw/mlx5/devx.c obj->flags |= DEVX_OBJ_FLAGS_INDIRECT_MKEY; obj 1336 drivers/infiniband/hw/mlx5/devx.c struct devx_obj *obj = uobject->object; obj 1342 drivers/infiniband/hw/mlx5/devx.c if (obj->flags & DEVX_OBJ_FLAGS_INDIRECT_MKEY) { obj 1348 drivers/infiniband/hw/mlx5/devx.c xa_erase(&obj->ib_dev->mdev->priv.mkey_table, obj 1349 drivers/infiniband/hw/mlx5/devx.c mlx5_base_mkey(obj->devx_mr.mmkey.key)); obj 1353 drivers/infiniband/hw/mlx5/devx.c if (obj->flags & DEVX_OBJ_FLAGS_DCT) obj 1354 drivers/infiniband/hw/mlx5/devx.c ret = mlx5_core_destroy_dct(obj->ib_dev->mdev, &obj->core_dct); obj 1355 drivers/infiniband/hw/mlx5/devx.c else if (obj->flags & DEVX_OBJ_FLAGS_CQ) obj 1356 drivers/infiniband/hw/mlx5/devx.c ret = mlx5_core_destroy_cq(obj->ib_dev->mdev, &obj->core_cq); obj 1358 drivers/infiniband/hw/mlx5/devx.c ret = mlx5_cmd_exec(obj->ib_dev->mdev, obj->dinbox, obj 1359 drivers/infiniband/hw/mlx5/devx.c obj->dinlen, out, sizeof(out)); obj 1366 drivers/infiniband/hw/mlx5/devx.c list_for_each_entry_safe(sub_entry, tmp, &obj->event_sub, obj_list) obj 1370 drivers/infiniband/hw/mlx5/devx.c kfree(obj); obj 1376 drivers/infiniband/hw/mlx5/devx.c struct devx_obj *obj = container_of(mcq, struct devx_obj, core_cq); obj 1382 drivers/infiniband/hw/mlx5/devx.c table = &obj->ib_dev->devx_event_table; obj 1412 drivers/infiniband/hw/mlx5/devx.c struct devx_obj *obj; obj 1433 drivers/infiniband/hw/mlx5/devx.c obj = kzalloc(sizeof(struct devx_obj), GFP_KERNEL); obj 1434 drivers/infiniband/hw/mlx5/devx.c if (!obj) obj 1439 drivers/infiniband/hw/mlx5/devx.c err = devx_handle_mkey_create(dev, obj, cmd_in, cmd_in_len); obj 1447 drivers/infiniband/hw/mlx5/devx.c obj->flags |= DEVX_OBJ_FLAGS_DCT; obj 1448 drivers/infiniband/hw/mlx5/devx.c err = mlx5_core_create_dct(dev->mdev, &obj->core_dct, obj 1452 drivers/infiniband/hw/mlx5/devx.c obj->flags |= DEVX_OBJ_FLAGS_CQ; obj 1453 drivers/infiniband/hw/mlx5/devx.c obj->core_cq.comp = devx_cq_comp; obj 1454 drivers/infiniband/hw/mlx5/devx.c err = mlx5_core_create_cq(dev->mdev, &obj->core_cq, obj 1466 drivers/infiniband/hw/mlx5/devx.c uobj->object = obj; obj 1467 drivers/infiniband/hw/mlx5/devx.c INIT_LIST_HEAD(&obj->event_sub); obj 1468 drivers/infiniband/hw/mlx5/devx.c obj->ib_dev = dev; obj 1469 drivers/infiniband/hw/mlx5/devx.c devx_obj_build_destroy_cmd(cmd_in, cmd_out, obj->dinbox, &obj->dinlen, obj 1471 drivers/infiniband/hw/mlx5/devx.c WARN_ON(obj->dinlen > MLX5_MAX_DESTROY_INBOX_SIZE_DW * sizeof(u32)); obj 1479 drivers/infiniband/hw/mlx5/devx.c obj->obj_id = get_enc_obj_id(opcode | obj_type << 16, obj_id); obj 1481 drivers/infiniband/hw/mlx5/devx.c if (obj->flags & DEVX_OBJ_FLAGS_INDIRECT_MKEY) { obj 1482 drivers/infiniband/hw/mlx5/devx.c err = devx_handle_mkey_indirect(obj, dev, cmd_in, cmd_out); obj 1489 drivers/infiniband/hw/mlx5/devx.c if (obj->flags & DEVX_OBJ_FLAGS_DCT) obj 1490 drivers/infiniband/hw/mlx5/devx.c mlx5_core_destroy_dct(obj->ib_dev->mdev, &obj->core_dct); obj 1491 drivers/infiniband/hw/mlx5/devx.c else if (obj->flags & DEVX_OBJ_FLAGS_CQ) obj 1492 drivers/infiniband/hw/mlx5/devx.c mlx5_core_destroy_cq(obj->ib_dev->mdev, &obj->core_cq); obj 1494 drivers/infiniband/hw/mlx5/devx.c mlx5_cmd_exec(obj->ib_dev->mdev, obj->dinbox, obj->dinlen, out, obj 1497 drivers/infiniband/hw/mlx5/devx.c kfree(obj); obj 1849 drivers/infiniband/hw/mlx5/devx.c struct devx_obj *obj) obj 1854 drivers/infiniband/hw/mlx5/devx.c if (obj) { obj 1869 drivers/infiniband/hw/mlx5/devx.c struct devx_obj *obj) obj 1884 drivers/infiniband/hw/mlx5/devx.c obj); obj 1894 drivers/infiniband/hw/mlx5/devx.c if (obj) { obj 1925 drivers/infiniband/hw/mlx5/devx.c struct devx_obj *obj = NULL; obj 1945 drivers/infiniband/hw/mlx5/devx.c obj = (struct devx_obj *)devx_uobj->object; obj 1946 drivers/infiniband/hw/mlx5/devx.c if (obj) obj 1947 drivers/infiniband/hw/mlx5/devx.c obj_id = get_dec_obj_id(obj->obj_id); obj 1992 drivers/infiniband/hw/mlx5/devx.c if (!is_valid_events(dev->mdev, num_events, event_type_num_list, obj)) obj 2004 drivers/infiniband/hw/mlx5/devx.c if (obj) obj 2005 drivers/infiniband/hw/mlx5/devx.c obj_type = get_dec_obj_type(obj, obj 2011 drivers/infiniband/hw/mlx5/devx.c obj, obj 2061 drivers/infiniband/hw/mlx5/devx.c if (!obj) { obj 2072 drivers/infiniband/hw/mlx5/devx.c &obj->event_sub); obj 2084 drivers/infiniband/hw/mlx5/devx.c obj, obj 2099 drivers/infiniband/hw/mlx5/devx.c struct devx_umem *obj) obj 2124 drivers/infiniband/hw/mlx5/devx.c obj->umem = ib_umem_get(&attrs->driver_udata, addr, size, access, 0); obj 2125 drivers/infiniband/hw/mlx5/devx.c if (IS_ERR(obj->umem)) obj 2126 drivers/infiniband/hw/mlx5/devx.c return PTR_ERR(obj->umem); obj 2128 drivers/infiniband/hw/mlx5/devx.c mlx5_ib_cont_pages(obj->umem, obj->umem->address, obj 2130 drivers/infiniband/hw/mlx5/devx.c &obj->page_shift, &obj->ncont, NULL); obj 2133 drivers/infiniband/hw/mlx5/devx.c ib_umem_release(obj->umem); obj 2137 drivers/infiniband/hw/mlx5/devx.c page_mask = (1 << obj->page_shift) - 1; obj 2138 drivers/infiniband/hw/mlx5/devx.c obj->page_offset = obj->umem->address & page_mask; obj 2144 drivers/infiniband/hw/mlx5/devx.c struct devx_umem *obj, obj 2148 drivers/infiniband/hw/mlx5/devx.c (MLX5_ST_SZ_BYTES(mtt) * obj->ncont); obj 2154 drivers/infiniband/hw/mlx5/devx.c struct devx_umem *obj, obj 2164 drivers/infiniband/hw/mlx5/devx.c MLX5_SET64(umem, umem, num_of_mtt, obj->ncont); obj 2165 drivers/infiniband/hw/mlx5/devx.c MLX5_SET(umem, umem, log_page_size, obj->page_shift - obj 2167 drivers/infiniband/hw/mlx5/devx.c MLX5_SET(umem, umem, page_offset, obj->page_offset); obj 2168 drivers/infiniband/hw/mlx5/devx.c mlx5_ib_populate_pas(dev, obj->umem, obj->page_shift, mtt, obj 2169 drivers/infiniband/hw/mlx5/devx.c (obj->umem->writable ? MLX5_IB_MTT_WRITE : 0) | obj 2177 drivers/infiniband/hw/mlx5/devx.c struct devx_umem *obj; obj 2189 drivers/infiniband/hw/mlx5/devx.c obj = kzalloc(sizeof(struct devx_umem), GFP_KERNEL); obj 2190 drivers/infiniband/hw/mlx5/devx.c if (!obj) obj 2193 drivers/infiniband/hw/mlx5/devx.c err = devx_umem_get(dev, &c->ibucontext, attrs, obj); obj 2197 drivers/infiniband/hw/mlx5/devx.c err = devx_umem_reg_cmd_alloc(attrs, obj, &cmd); obj 2201 drivers/infiniband/hw/mlx5/devx.c devx_umem_reg_cmd_build(dev, obj, &cmd); obj 2209 drivers/infiniband/hw/mlx5/devx.c obj->mdev = dev->mdev; obj 2210 drivers/infiniband/hw/mlx5/devx.c uobj->object = obj; obj 2211 drivers/infiniband/hw/mlx5/devx.c devx_obj_build_destroy_cmd(cmd.in, cmd.out, obj->dinbox, &obj->dinlen, &obj_id); obj 2219 drivers/infiniband/hw/mlx5/devx.c mlx5_cmd_exec(obj->mdev, obj->dinbox, obj->dinlen, cmd.out, sizeof(cmd.out)); obj 2221 drivers/infiniband/hw/mlx5/devx.c ib_umem_release(obj->umem); obj 2223 drivers/infiniband/hw/mlx5/devx.c kfree(obj); obj 2231 drivers/infiniband/hw/mlx5/devx.c struct devx_umem *obj = uobject->object; obj 2235 drivers/infiniband/hw/mlx5/devx.c err = mlx5_cmd_exec(obj->mdev, obj->dinbox, obj->dinlen, out, sizeof(out)); obj 2239 drivers/infiniband/hw/mlx5/devx.c ib_umem_release(obj->umem); obj 2240 drivers/infiniband/hw/mlx5/devx.c kfree(obj); obj 218 drivers/infiniband/hw/mlx5/flow.c struct mlx5_ib_flow_matcher *obj = uobject->object; obj 221 drivers/infiniband/hw/mlx5/flow.c ret = ib_destroy_usecnt(&obj->usecnt, why, uobject); obj 225 drivers/infiniband/hw/mlx5/flow.c kfree(obj); obj 230 drivers/infiniband/hw/mlx5/flow.c struct mlx5_ib_flow_matcher *obj) obj 250 drivers/infiniband/hw/mlx5/flow.c err = mlx5_ib_ft_type_to_namespace(ft_type, &obj->ns_type); obj 267 drivers/infiniband/hw/mlx5/flow.c &obj->ns_type); obj 272 drivers/infiniband/hw/mlx5/flow.c obj->ns_type = MLX5_FLOW_NAMESPACE_BYPASS; obj 283 drivers/infiniband/hw/mlx5/flow.c struct mlx5_ib_flow_matcher *obj; obj 286 drivers/infiniband/hw/mlx5/flow.c obj = kzalloc(sizeof(struct mlx5_ib_flow_matcher), GFP_KERNEL); obj 287 drivers/infiniband/hw/mlx5/flow.c if (!obj) obj 290 drivers/infiniband/hw/mlx5/flow.c obj->mask_len = uverbs_attr_get_len( obj 292 drivers/infiniband/hw/mlx5/flow.c err = uverbs_copy_from(&obj->matcher_mask, obj 298 drivers/infiniband/hw/mlx5/flow.c obj->flow_type = uverbs_attr_get_enum_id( obj 301 drivers/infiniband/hw/mlx5/flow.c if (obj->flow_type == MLX5_IB_FLOW_TYPE_NORMAL) { obj 302 drivers/infiniband/hw/mlx5/flow.c err = uverbs_copy_from(&obj->priority, obj 309 drivers/infiniband/hw/mlx5/flow.c err = uverbs_copy_from(&obj->match_criteria_enable, obj 315 drivers/infiniband/hw/mlx5/flow.c err = mlx5_ib_matcher_ns(attrs, obj); obj 319 drivers/infiniband/hw/mlx5/flow.c uobj->object = obj; obj 320 drivers/infiniband/hw/mlx5/flow.c obj->mdev = dev->mdev; obj 321 drivers/infiniband/hw/mlx5/flow.c atomic_set(&obj->usecnt, 0); obj 325 drivers/infiniband/hw/mlx5/flow.c kfree(obj); obj 1352 drivers/infiniband/hw/mlx5/mlx5_ib.h bool mlx5_ib_devx_is_flow_dest(void *obj, int *dest_id, int *dest_type); obj 1353 drivers/infiniband/hw/mlx5/mlx5_ib.h bool mlx5_ib_devx_is_flow_counter(void *obj, u32 *counter_id); obj 1363 drivers/infiniband/hw/mlx5/mlx5_ib.h static inline bool mlx5_ib_devx_is_flow_dest(void *obj, int *dest_id, obj 43 drivers/infiniband/hw/mthca/mthca_allocator.c u32 obj; obj 47 drivers/infiniband/hw/mthca/mthca_allocator.c obj = find_next_zero_bit(alloc->table, alloc->max, alloc->last); obj 48 drivers/infiniband/hw/mthca/mthca_allocator.c if (obj >= alloc->max) { obj 50 drivers/infiniband/hw/mthca/mthca_allocator.c obj = find_first_zero_bit(alloc->table, alloc->max); obj 53 drivers/infiniband/hw/mthca/mthca_allocator.c if (obj < alloc->max) { obj 54 drivers/infiniband/hw/mthca/mthca_allocator.c set_bit(obj, alloc->table); obj 55 drivers/infiniband/hw/mthca/mthca_allocator.c obj |= alloc->top; obj 57 drivers/infiniband/hw/mthca/mthca_allocator.c obj = -1; obj 61 drivers/infiniband/hw/mthca/mthca_allocator.c return obj; obj 64 drivers/infiniband/hw/mthca/mthca_allocator.c void mthca_free(struct mthca_alloc *alloc, u32 obj) obj 68 drivers/infiniband/hw/mthca/mthca_allocator.c obj &= alloc->max - 1; obj 72 drivers/infiniband/hw/mthca/mthca_allocator.c clear_bit(obj, alloc->table); obj 73 drivers/infiniband/hw/mthca/mthca_allocator.c alloc->last = min(alloc->last, obj); obj 415 drivers/infiniband/hw/mthca/mthca_dev.h void mthca_free(struct mthca_alloc *alloc, u32 obj); obj 222 drivers/infiniband/hw/mthca/mthca_memfree.c int mthca_table_get(struct mthca_dev *dev, struct mthca_icm_table *table, int obj) obj 224 drivers/infiniband/hw/mthca/mthca_memfree.c int i = (obj & (table->num_obj - 1)) * table->obj_size / MTHCA_TABLE_CHUNK_SIZE; obj 257 drivers/infiniband/hw/mthca/mthca_memfree.c void mthca_table_put(struct mthca_dev *dev, struct mthca_icm_table *table, int obj) obj 264 drivers/infiniband/hw/mthca/mthca_memfree.c i = (obj & (table->num_obj - 1)) * table->obj_size / MTHCA_TABLE_CHUNK_SIZE; obj 278 drivers/infiniband/hw/mthca/mthca_memfree.c void *mthca_table_find(struct mthca_icm_table *table, int obj, dma_addr_t *dma_handle) obj 290 drivers/infiniband/hw/mthca/mthca_memfree.c idx = (obj & (table->num_obj - 1)) * table->obj_size; obj 91 drivers/infiniband/hw/mthca/mthca_memfree.h int mthca_table_get(struct mthca_dev *dev, struct mthca_icm_table *table, int obj); obj 92 drivers/infiniband/hw/mthca/mthca_memfree.h void mthca_table_put(struct mthca_dev *dev, struct mthca_icm_table *table, int obj); obj 93 drivers/infiniband/hw/mthca/mthca_memfree.h void *mthca_table_find(struct mthca_icm_table *table, int obj, dma_addr_t *dma_handle); obj 76 drivers/infiniband/hw/usnic/usnic_ib_main.c static int usnic_ib_dump_vf_hdr(void *obj, char *buf, int buf_sz) obj 78 drivers/infiniband/hw/usnic/usnic_ib_main.c struct usnic_ib_vf *vf = obj; obj 77 drivers/infiniband/hw/usnic/usnic_ib_qp_grp.c int usnic_ib_qp_grp_dump_rows(void *obj, char *buf, int buf_sz) obj 79 drivers/infiniband/hw/usnic/usnic_ib_qp_grp.c struct usnic_ib_qp_grp *qp_grp = obj; obj 81 drivers/infiniband/hw/usnic/usnic_ib_qp_grp.c if (obj) { obj 91 drivers/infiniband/hw/usnic/usnic_ib_qp_grp.h int usnic_ib_qp_grp_dump_rows(void *obj, char *buf, int buf_sz); obj 87 drivers/infiniband/hw/vmw_pvrdma/pvrdma_doorbell.c u32 obj; obj 92 drivers/infiniband/hw/vmw_pvrdma/pvrdma_doorbell.c obj = find_next_zero_bit(tbl->table, tbl->max, tbl->last); obj 93 drivers/infiniband/hw/vmw_pvrdma/pvrdma_doorbell.c if (obj >= tbl->max) { obj 95 drivers/infiniband/hw/vmw_pvrdma/pvrdma_doorbell.c obj = find_first_zero_bit(tbl->table, tbl->max); obj 98 drivers/infiniband/hw/vmw_pvrdma/pvrdma_doorbell.c if (obj >= tbl->max) { obj 103 drivers/infiniband/hw/vmw_pvrdma/pvrdma_doorbell.c set_bit(obj, tbl->table); obj 104 drivers/infiniband/hw/vmw_pvrdma/pvrdma_doorbell.c obj |= tbl->top; obj 108 drivers/infiniband/hw/vmw_pvrdma/pvrdma_doorbell.c uar->index = obj; obj 119 drivers/infiniband/hw/vmw_pvrdma/pvrdma_doorbell.c u32 obj; obj 121 drivers/infiniband/hw/vmw_pvrdma/pvrdma_doorbell.c obj = uar->index & (tbl->max - 1); obj 123 drivers/infiniband/hw/vmw_pvrdma/pvrdma_doorbell.c clear_bit(obj, tbl->table); obj 124 drivers/infiniband/hw/vmw_pvrdma/pvrdma_doorbell.c tbl->last = min(tbl->last, obj); obj 81 drivers/infiniband/sw/rdmavt/mmap.c vfree(ip->obj); obj 137 drivers/infiniband/sw/rdmavt/mmap.c ret = remap_vmalloc_range(vma, ip->obj, 0); obj 160 drivers/infiniband/sw/rdmavt/mmap.c struct ib_udata *udata, void *obj) obj 185 drivers/infiniband/sw/rdmavt/mmap.c ip->obj = obj; obj 199 drivers/infiniband/sw/rdmavt/mmap.c u32 size, void *obj) obj 211 drivers/infiniband/sw/rdmavt/mmap.c ip->obj = obj; obj 57 drivers/infiniband/sw/rdmavt/mmap.h struct ib_udata *udata, void *obj); obj 59 drivers/infiniband/sw/rdmavt/mmap.h u32 size, void *obj); obj 88 drivers/infiniband/sw/rxe/rxe_loc.h void *obj; obj 96 drivers/infiniband/sw/rxe/rxe_loc.h struct ib_udata *udata, void *obj); obj 58 drivers/infiniband/sw/rxe/rxe_mmap.c vfree(ip->obj); /* buf */ obj 128 drivers/infiniband/sw/rxe/rxe_mmap.c ret = remap_vmalloc_range(vma, ip->obj, 0); obj 145 drivers/infiniband/sw/rxe/rxe_mmap.c struct ib_udata *udata, void *obj) obj 173 drivers/infiniband/sw/rxe/rxe_mmap.c ip->obj = obj; obj 66 drivers/infiniband/sw/rxe/rxe_pool.h void (*cleanup)(struct rxe_pool_entry *obj); obj 97 drivers/infiniband/sw/rxe/rxe_pool.h void (*cleanup)(struct rxe_pool_entry *obj); obj 117 drivers/infiniband/sw/rxe/rxe_task.c int rxe_init_task(void *obj, struct rxe_task *task, obj 120 drivers/infiniband/sw/rxe/rxe_task.c task->obj = obj; obj 49 drivers/infiniband/sw/rxe/rxe_task.h void *obj; obj 65 drivers/infiniband/sw/rxe/rxe_task.h int rxe_init_task(void *obj, struct rxe_task *task, obj 280 drivers/infiniband/ulp/opa_vnic/opa_vnic_internal.h #define vnic_hash_for_each_safe(name, bkt, tmp, obj, member) \ obj 281 drivers/infiniband/ulp/opa_vnic/opa_vnic_internal.h for ((bkt) = 0, obj = NULL; \ obj 282 drivers/infiniband/ulp/opa_vnic/opa_vnic_internal.h !obj && (bkt) < OPA_VNIC_MAC_TBL_SIZE; (bkt)++) \ obj 283 drivers/infiniband/ulp/opa_vnic/opa_vnic_internal.h hlist_for_each_entry_safe(obj, tmp, &name[bkt], member) obj 285 drivers/infiniband/ulp/opa_vnic/opa_vnic_internal.h #define vnic_hash_for_each_possible(name, obj, member, key) \ obj 286 drivers/infiniband/ulp/opa_vnic/opa_vnic_internal.h hlist_for_each_entry(obj, \ obj 289 drivers/infiniband/ulp/opa_vnic/opa_vnic_internal.h #define vnic_hash_for_each(name, bkt, obj, member) \ obj 290 drivers/infiniband/ulp/opa_vnic/opa_vnic_internal.h for ((bkt) = 0, obj = NULL; \ obj 291 drivers/infiniband/ulp/opa_vnic/opa_vnic_internal.h !obj && (bkt) < OPA_VNIC_MAC_TBL_SIZE; (bkt)++) \ obj 292 drivers/infiniband/ulp/opa_vnic/opa_vnic_internal.h hlist_for_each_entry(obj, &name[bkt], member) obj 148 drivers/input/misc/soc_button_array.c static int soc_button_get_acpi_object_int(const union acpi_object *obj) obj 150 drivers/input/misc/soc_button_array.c if (obj->type != ACPI_TYPE_INTEGER) obj 153 drivers/input/misc/soc_button_array.c return obj->integer.value; obj 23 drivers/input/rmi4/rmi_2d_sensor.c struct rmi_2d_sensor_abs_object *obj, obj 29 drivers/input/rmi4/rmi_2d_sensor.c if (obj->type == RMI_2D_OBJECT_NONE) obj 33 drivers/input/rmi4/rmi_2d_sensor.c obj->x = sensor->max_x - obj->x; obj 36 drivers/input/rmi4/rmi_2d_sensor.c obj->y = sensor->max_y - obj->y; obj 39 drivers/input/rmi4/rmi_2d_sensor.c swap(obj->x, obj->y); obj 49 drivers/input/rmi4/rmi_2d_sensor.c obj->x += axis_align->offset_x; obj 50 drivers/input/rmi4/rmi_2d_sensor.c obj->y += axis_align->offset_y; obj 52 drivers/input/rmi4/rmi_2d_sensor.c obj->x = max(axis_align->clip_x_low, obj->x); obj 53 drivers/input/rmi4/rmi_2d_sensor.c obj->y = max(axis_align->clip_y_low, obj->y); obj 56 drivers/input/rmi4/rmi_2d_sensor.c obj->x = min(sensor->max_x, obj->x); obj 59 drivers/input/rmi4/rmi_2d_sensor.c obj->y = min(sensor->max_y, obj->y); obj 61 drivers/input/rmi4/rmi_2d_sensor.c sensor->tracking_pos[slot].x = obj->x; obj 62 drivers/input/rmi4/rmi_2d_sensor.c sensor->tracking_pos[slot].y = obj->y; obj 67 drivers/input/rmi4/rmi_2d_sensor.c struct rmi_2d_sensor_abs_object *obj, obj 79 drivers/input/rmi4/rmi_2d_sensor.c input_mt_report_slot_state(input, obj->mt_tool, obj 80 drivers/input/rmi4/rmi_2d_sensor.c obj->type != RMI_2D_OBJECT_NONE); obj 82 drivers/input/rmi4/rmi_2d_sensor.c if (obj->type != RMI_2D_OBJECT_NONE) { obj 83 drivers/input/rmi4/rmi_2d_sensor.c obj->x = sensor->tracking_pos[slot].x; obj 84 drivers/input/rmi4/rmi_2d_sensor.c obj->y = sensor->tracking_pos[slot].y; obj 87 drivers/input/rmi4/rmi_2d_sensor.c swap(obj->wx, obj->wy); obj 89 drivers/input/rmi4/rmi_2d_sensor.c wide = (obj->wx > obj->wy); obj 90 drivers/input/rmi4/rmi_2d_sensor.c major = max(obj->wx, obj->wy); obj 91 drivers/input/rmi4/rmi_2d_sensor.c minor = min(obj->wx, obj->wy); obj 93 drivers/input/rmi4/rmi_2d_sensor.c if (obj->type == RMI_2D_OBJECT_STYLUS) { obj 98 drivers/input/rmi4/rmi_2d_sensor.c input_event(sensor->input, EV_ABS, ABS_MT_POSITION_X, obj->x); obj 99 drivers/input/rmi4/rmi_2d_sensor.c input_event(sensor->input, EV_ABS, ABS_MT_POSITION_Y, obj->y); obj 101 drivers/input/rmi4/rmi_2d_sensor.c input_event(sensor->input, EV_ABS, ABS_MT_PRESSURE, obj->z); obj 107 drivers/input/rmi4/rmi_2d_sensor.c __func__, slot, obj->type, obj->x, obj->y, obj->z, obj 108 drivers/input/rmi4/rmi_2d_sensor.c obj->wx, obj->wy); obj 75 drivers/input/rmi4/rmi_2d_sensor.h struct rmi_2d_sensor_abs_object *obj, obj 79 drivers/input/rmi4/rmi_2d_sensor.h struct rmi_2d_sensor_abs_object *obj, obj 536 drivers/input/rmi4/rmi_f11.c struct rmi_2d_sensor_abs_object *obj, obj 546 drivers/input/rmi4/rmi_f11.c obj->type = RMI_2D_OBJECT_FINGER; obj 549 drivers/input/rmi4/rmi_f11.c obj->type = RMI_2D_OBJECT_NONE; obj 552 drivers/input/rmi4/rmi_f11.c obj->mt_tool = tool_type; obj 553 drivers/input/rmi4/rmi_f11.c obj->x = (pos_data[0] << 4) | (pos_data[2] & 0x0F); obj 554 drivers/input/rmi4/rmi_f11.c obj->y = (pos_data[1] << 4) | (pos_data[2] >> 4); obj 555 drivers/input/rmi4/rmi_f11.c obj->z = pos_data[4]; obj 556 drivers/input/rmi4/rmi_f11.c obj->wx = pos_data[3] & 0x0f; obj 557 drivers/input/rmi4/rmi_f11.c obj->wy = pos_data[3] >> 4; obj 559 drivers/input/rmi4/rmi_f11.c rmi_2d_sensor_abs_process(sensor, obj, n_finger); obj 154 drivers/input/rmi4/rmi_f12.c struct rmi_2d_sensor_abs_object *obj = &sensor->objs[i]; obj 156 drivers/input/rmi4/rmi_f12.c obj->type = RMI_2D_OBJECT_NONE; obj 157 drivers/input/rmi4/rmi_f12.c obj->mt_tool = MT_TOOL_FINGER; obj 161 drivers/input/rmi4/rmi_f12.c obj->type = RMI_2D_OBJECT_FINGER; obj 164 drivers/input/rmi4/rmi_f12.c obj->type = RMI_2D_OBJECT_STYLUS; obj 165 drivers/input/rmi4/rmi_f12.c obj->mt_tool = MT_TOOL_PEN; obj 168 drivers/input/rmi4/rmi_f12.c obj->type = RMI_2D_OBJECT_PALM; obj 169 drivers/input/rmi4/rmi_f12.c obj->mt_tool = MT_TOOL_PALM; obj 172 drivers/input/rmi4/rmi_f12.c obj->type = RMI_2D_OBJECT_UNCLASSIFIED; obj 176 drivers/input/rmi4/rmi_f12.c obj->x = (data1[2] << 8) | data1[1]; obj 177 drivers/input/rmi4/rmi_f12.c obj->y = (data1[4] << 8) | data1[3]; obj 178 drivers/input/rmi4/rmi_f12.c obj->z = data1[5]; obj 179 drivers/input/rmi4/rmi_f12.c obj->wx = data1[6]; obj 180 drivers/input/rmi4/rmi_f12.c obj->wy = data1[7]; obj 182 drivers/input/rmi4/rmi_f12.c rmi_2d_sensor_abs_process(sensor, obj, i); obj 346 drivers/input/touchscreen/atmel_mxt_ts.c static size_t mxt_obj_size(const struct mxt_object *obj) obj 348 drivers/input/touchscreen/atmel_mxt_ts.c return obj->size_minus_one + 1; obj 351 drivers/input/touchscreen/atmel_mxt_ts.c static size_t mxt_obj_instances(const struct mxt_object *obj) obj 353 drivers/input/touchscreen/atmel_mxt_ts.c return obj->instances_minus_one + 1; obj 370 drivers/input/touchscreen/chipone_icn8505.c union acpi_object *obj; obj 379 drivers/input/touchscreen/chipone_icn8505.c obj = buffer.pointer; obj 380 drivers/input/touchscreen/chipone_icn8505.c if (obj->type == ACPI_TYPE_STRING) obj 381 drivers/input/touchscreen/chipone_icn8505.c subsys = obj->string.pointer; obj 1933 drivers/iommu/dmar.c union acpi_object *obj; obj 1945 drivers/iommu/dmar.c obj = acpi_evaluate_dsm_typed(handle, &dmar_hp_guid, DMAR_DSM_REV_ID, obj 1947 drivers/iommu/dmar.c if (!obj) obj 1953 drivers/iommu/dmar.c start = (struct acpi_dmar_header *)obj->buffer.pointer; obj 1954 drivers/iommu/dmar.c ret = dmar_walk_remapping_entries(start, obj->buffer.length, &callback); obj 1956 drivers/iommu/dmar.c ACPI_FREE(obj); obj 25 drivers/iommu/omap-iommu-debug.c static inline bool is_omap_iommu_detached(struct omap_iommu *obj) obj 27 drivers/iommu/omap-iommu-debug.c return !obj->domain; obj 36 drivers/iommu/omap-iommu-debug.c iommu_read_reg(obj, MMU_##name)); \ obj 44 drivers/iommu/omap-iommu-debug.c omap2_iommu_dump_ctx(struct omap_iommu *obj, char *buf, ssize_t len) obj 68 drivers/iommu/omap-iommu-debug.c static ssize_t omap_iommu_dump_ctx(struct omap_iommu *obj, char *buf, obj 71 drivers/iommu/omap-iommu-debug.c if (!obj || !buf) obj 74 drivers/iommu/omap-iommu-debug.c pm_runtime_get_sync(obj->dev); obj 76 drivers/iommu/omap-iommu-debug.c bytes = omap2_iommu_dump_ctx(obj, buf, bytes); obj 78 drivers/iommu/omap-iommu-debug.c pm_runtime_put_sync(obj->dev); obj 86 drivers/iommu/omap-iommu-debug.c struct omap_iommu *obj = file->private_data; obj 90 drivers/iommu/omap-iommu-debug.c if (is_omap_iommu_detached(obj)) obj 100 drivers/iommu/omap-iommu-debug.c bytes = omap_iommu_dump_ctx(obj, p, count); obj 110 drivers/iommu/omap-iommu-debug.c __dump_tlb_entries(struct omap_iommu *obj, struct cr_regs *crs, int num) obj 117 drivers/iommu/omap-iommu-debug.c pm_runtime_get_sync(obj->dev); obj 118 drivers/iommu/omap-iommu-debug.c iotlb_lock_get(obj, &saved); obj 120 drivers/iommu/omap-iommu-debug.c for_each_iotlb_cr(obj, num, i, tmp) { obj 126 drivers/iommu/omap-iommu-debug.c iotlb_lock_set(obj, &saved); obj 127 drivers/iommu/omap-iommu-debug.c pm_runtime_put_sync(obj->dev); obj 132 drivers/iommu/omap-iommu-debug.c static ssize_t iotlb_dump_cr(struct omap_iommu *obj, struct cr_regs *cr, obj 140 drivers/iommu/omap-iommu-debug.c static size_t omap_dump_tlb_entries(struct omap_iommu *obj, struct seq_file *s) obj 145 drivers/iommu/omap-iommu-debug.c num = obj->nr_tlb_entries; obj 151 drivers/iommu/omap-iommu-debug.c num = __dump_tlb_entries(obj, cr, num); obj 153 drivers/iommu/omap-iommu-debug.c iotlb_dump_cr(obj, cr + i, s); obj 161 drivers/iommu/omap-iommu-debug.c struct omap_iommu *obj = s->private; obj 163 drivers/iommu/omap-iommu-debug.c if (is_omap_iommu_detached(obj)) obj 170 drivers/iommu/omap-iommu-debug.c omap_dump_tlb_entries(obj, s); obj 182 drivers/iommu/omap-iommu-debug.c struct omap_iommu *obj = s->private; obj 184 drivers/iommu/omap-iommu-debug.c spin_lock(&obj->page_table_lock); obj 186 drivers/iommu/omap-iommu-debug.c iopgd = iopgd_offset(obj, 0); obj 207 drivers/iommu/omap-iommu-debug.c spin_unlock(&obj->page_table_lock); obj 212 drivers/iommu/omap-iommu-debug.c struct omap_iommu *obj = s->private; obj 214 drivers/iommu/omap-iommu-debug.c if (is_omap_iommu_detached(obj)) obj 239 drivers/iommu/omap-iommu-debug.c void omap_iommu_debugfs_add(struct omap_iommu *obj) obj 246 drivers/iommu/omap-iommu-debug.c d = debugfs_create_dir(obj->name, iommu_debug_root); obj 247 drivers/iommu/omap-iommu-debug.c obj->debug_dir = d; obj 249 drivers/iommu/omap-iommu-debug.c debugfs_create_u32("nr_tlb_entries", 0400, d, &obj->nr_tlb_entries); obj 250 drivers/iommu/omap-iommu-debug.c debugfs_create_file("regs", 0400, d, obj, ®s_fops); obj 251 drivers/iommu/omap-iommu-debug.c debugfs_create_file("tlb", 0400, d, obj, &tlb_fops); obj 252 drivers/iommu/omap-iommu-debug.c debugfs_create_file("pagetable", 0400, d, obj, &pagetable_fops); obj 255 drivers/iommu/omap-iommu-debug.c void omap_iommu_debugfs_remove(struct omap_iommu *obj) obj 257 drivers/iommu/omap-iommu-debug.c if (!obj->debug_dir) obj 260 drivers/iommu/omap-iommu-debug.c debugfs_remove_recursive(obj->debug_dir); obj 86 drivers/iommu/omap-iommu.c struct omap_iommu *obj; obj 94 drivers/iommu/omap-iommu.c obj = arch_data->iommu_dev; obj 95 drivers/iommu/omap-iommu.c p = obj->ctx; obj 97 drivers/iommu/omap-iommu.c p[i] = iommu_read_reg(obj, i * sizeof(u32)); obj 98 drivers/iommu/omap-iommu.c dev_dbg(obj->dev, "%s\t[%02d] %08x\n", __func__, i, obj 116 drivers/iommu/omap-iommu.c struct omap_iommu *obj; obj 124 drivers/iommu/omap-iommu.c obj = arch_data->iommu_dev; obj 125 drivers/iommu/omap-iommu.c p = obj->ctx; obj 127 drivers/iommu/omap-iommu.c iommu_write_reg(obj, p[i], i * sizeof(u32)); obj 128 drivers/iommu/omap-iommu.c dev_dbg(obj->dev, "%s\t[%02d] %08x\n", __func__, i, obj 136 drivers/iommu/omap-iommu.c static void dra7_cfg_dspsys_mmu(struct omap_iommu *obj, bool enable) obj 140 drivers/iommu/omap-iommu.c if (!obj->syscfg) obj 143 drivers/iommu/omap-iommu.c mask = (1 << (obj->id * DSP_SYS_MMU_CONFIG_EN_SHIFT)); obj 145 drivers/iommu/omap-iommu.c regmap_update_bits(obj->syscfg, DSP_SYS_MMU_CONFIG, mask, val); obj 148 drivers/iommu/omap-iommu.c static void __iommu_set_twl(struct omap_iommu *obj, bool on) obj 150 drivers/iommu/omap-iommu.c u32 l = iommu_read_reg(obj, MMU_CNTL); obj 153 drivers/iommu/omap-iommu.c iommu_write_reg(obj, MMU_IRQ_TWL_MASK, MMU_IRQENABLE); obj 155 drivers/iommu/omap-iommu.c iommu_write_reg(obj, MMU_IRQ_TLB_MISS_MASK, MMU_IRQENABLE); obj 163 drivers/iommu/omap-iommu.c iommu_write_reg(obj, l, MMU_CNTL); obj 166 drivers/iommu/omap-iommu.c static int omap2_iommu_enable(struct omap_iommu *obj) obj 170 drivers/iommu/omap-iommu.c if (!obj->iopgd || !IS_ALIGNED((u32)obj->iopgd, SZ_16K)) obj 173 drivers/iommu/omap-iommu.c pa = virt_to_phys(obj->iopgd); obj 177 drivers/iommu/omap-iommu.c l = iommu_read_reg(obj, MMU_REVISION); obj 178 drivers/iommu/omap-iommu.c dev_info(obj->dev, "%s: version %d.%d\n", obj->name, obj 181 drivers/iommu/omap-iommu.c iommu_write_reg(obj, pa, MMU_TTB); obj 183 drivers/iommu/omap-iommu.c dra7_cfg_dspsys_mmu(obj, true); obj 185 drivers/iommu/omap-iommu.c if (obj->has_bus_err_back) obj 186 drivers/iommu/omap-iommu.c iommu_write_reg(obj, MMU_GP_REG_BUS_ERR_BACK_EN, MMU_GP_REG); obj 188 drivers/iommu/omap-iommu.c __iommu_set_twl(obj, true); obj 193 drivers/iommu/omap-iommu.c static void omap2_iommu_disable(struct omap_iommu *obj) obj 195 drivers/iommu/omap-iommu.c u32 l = iommu_read_reg(obj, MMU_CNTL); obj 198 drivers/iommu/omap-iommu.c iommu_write_reg(obj, l, MMU_CNTL); obj 199 drivers/iommu/omap-iommu.c dra7_cfg_dspsys_mmu(obj, false); obj 201 drivers/iommu/omap-iommu.c dev_dbg(obj->dev, "%s is shutting down\n", obj->name); obj 204 drivers/iommu/omap-iommu.c static int iommu_enable(struct omap_iommu *obj) obj 208 drivers/iommu/omap-iommu.c ret = pm_runtime_get_sync(obj->dev); obj 210 drivers/iommu/omap-iommu.c pm_runtime_put_noidle(obj->dev); obj 215 drivers/iommu/omap-iommu.c static void iommu_disable(struct omap_iommu *obj) obj 217 drivers/iommu/omap-iommu.c pm_runtime_put_sync(obj->dev); obj 243 drivers/iommu/omap-iommu.c static u32 iommu_report_fault(struct omap_iommu *obj, u32 *da) obj 247 drivers/iommu/omap-iommu.c status = iommu_read_reg(obj, MMU_IRQSTATUS); obj 254 drivers/iommu/omap-iommu.c fault_addr = iommu_read_reg(obj, MMU_FAULT_AD); obj 257 drivers/iommu/omap-iommu.c iommu_write_reg(obj, status, MMU_IRQSTATUS); obj 262 drivers/iommu/omap-iommu.c void iotlb_lock_get(struct omap_iommu *obj, struct iotlb_lock *l) obj 266 drivers/iommu/omap-iommu.c val = iommu_read_reg(obj, MMU_LOCK); obj 272 drivers/iommu/omap-iommu.c void iotlb_lock_set(struct omap_iommu *obj, struct iotlb_lock *l) obj 279 drivers/iommu/omap-iommu.c iommu_write_reg(obj, val, MMU_LOCK); obj 282 drivers/iommu/omap-iommu.c static void iotlb_read_cr(struct omap_iommu *obj, struct cr_regs *cr) obj 284 drivers/iommu/omap-iommu.c cr->cam = iommu_read_reg(obj, MMU_READ_CAM); obj 285 drivers/iommu/omap-iommu.c cr->ram = iommu_read_reg(obj, MMU_READ_RAM); obj 288 drivers/iommu/omap-iommu.c static void iotlb_load_cr(struct omap_iommu *obj, struct cr_regs *cr) obj 290 drivers/iommu/omap-iommu.c iommu_write_reg(obj, cr->cam | MMU_CAM_V, MMU_CAM); obj 291 drivers/iommu/omap-iommu.c iommu_write_reg(obj, cr->ram, MMU_RAM); obj 293 drivers/iommu/omap-iommu.c iommu_write_reg(obj, 1, MMU_FLUSH_ENTRY); obj 294 drivers/iommu/omap-iommu.c iommu_write_reg(obj, 1, MMU_LD_TLB); obj 298 drivers/iommu/omap-iommu.c struct cr_regs __iotlb_read_cr(struct omap_iommu *obj, int n) obj 303 drivers/iommu/omap-iommu.c iotlb_lock_get(obj, &l); obj 305 drivers/iommu/omap-iommu.c iotlb_lock_set(obj, &l); obj 306 drivers/iommu/omap-iommu.c iotlb_read_cr(obj, &cr); obj 312 drivers/iommu/omap-iommu.c static struct cr_regs *iotlb_alloc_cr(struct omap_iommu *obj, obj 321 drivers/iommu/omap-iommu.c dev_err(obj->dev, "%s:\twrong alignment: %08x\n", __func__, obj 341 drivers/iommu/omap-iommu.c static int load_iotlb_entry(struct omap_iommu *obj, struct iotlb_entry *e) obj 347 drivers/iommu/omap-iommu.c if (!obj || !obj->nr_tlb_entries || !e) obj 350 drivers/iommu/omap-iommu.c pm_runtime_get_sync(obj->dev); obj 352 drivers/iommu/omap-iommu.c iotlb_lock_get(obj, &l); obj 353 drivers/iommu/omap-iommu.c if (l.base == obj->nr_tlb_entries) { obj 354 drivers/iommu/omap-iommu.c dev_warn(obj->dev, "%s: preserve entries full\n", __func__); obj 362 drivers/iommu/omap-iommu.c for_each_iotlb_cr(obj, obj->nr_tlb_entries, i, tmp) obj 366 drivers/iommu/omap-iommu.c if (i == obj->nr_tlb_entries) { obj 367 drivers/iommu/omap-iommu.c dev_dbg(obj->dev, "%s: full: no entry\n", __func__); obj 372 drivers/iommu/omap-iommu.c iotlb_lock_get(obj, &l); obj 375 drivers/iommu/omap-iommu.c iotlb_lock_set(obj, &l); obj 378 drivers/iommu/omap-iommu.c cr = iotlb_alloc_cr(obj, e); obj 380 drivers/iommu/omap-iommu.c pm_runtime_put_sync(obj->dev); obj 384 drivers/iommu/omap-iommu.c iotlb_load_cr(obj, cr); obj 390 drivers/iommu/omap-iommu.c if (++l.vict == obj->nr_tlb_entries) obj 392 drivers/iommu/omap-iommu.c iotlb_lock_set(obj, &l); obj 394 drivers/iommu/omap-iommu.c pm_runtime_put_sync(obj->dev); obj 400 drivers/iommu/omap-iommu.c static int load_iotlb_entry(struct omap_iommu *obj, struct iotlb_entry *e) obj 407 drivers/iommu/omap-iommu.c static int prefetch_iotlb_entry(struct omap_iommu *obj, struct iotlb_entry *e) obj 409 drivers/iommu/omap-iommu.c return load_iotlb_entry(obj, e); obj 419 drivers/iommu/omap-iommu.c static void flush_iotlb_page(struct omap_iommu *obj, u32 da) obj 424 drivers/iommu/omap-iommu.c pm_runtime_get_sync(obj->dev); obj 426 drivers/iommu/omap-iommu.c for_each_iotlb_cr(obj, obj->nr_tlb_entries, i, cr) { obj 437 drivers/iommu/omap-iommu.c dev_dbg(obj->dev, "%s: %08x<=%08x(%x)\n", obj 439 drivers/iommu/omap-iommu.c iotlb_load_cr(obj, &cr); obj 440 drivers/iommu/omap-iommu.c iommu_write_reg(obj, 1, MMU_FLUSH_ENTRY); obj 444 drivers/iommu/omap-iommu.c pm_runtime_put_sync(obj->dev); obj 446 drivers/iommu/omap-iommu.c if (i == obj->nr_tlb_entries) obj 447 drivers/iommu/omap-iommu.c dev_dbg(obj->dev, "%s: no page for %08x\n", __func__, da); obj 454 drivers/iommu/omap-iommu.c static void flush_iotlb_all(struct omap_iommu *obj) obj 458 drivers/iommu/omap-iommu.c pm_runtime_get_sync(obj->dev); obj 462 drivers/iommu/omap-iommu.c iotlb_lock_set(obj, &l); obj 464 drivers/iommu/omap-iommu.c iommu_write_reg(obj, 1, MMU_GFLUSH); obj 466 drivers/iommu/omap-iommu.c pm_runtime_put_sync(obj->dev); obj 480 drivers/iommu/omap-iommu.c static void iopte_free(struct omap_iommu *obj, u32 *iopte, bool dma_valid) obj 488 drivers/iommu/omap-iommu.c dma_unmap_single(obj->dev, pt_dma, IOPTE_TABLE_SIZE, obj 496 drivers/iommu/omap-iommu.c static u32 *iopte_alloc(struct omap_iommu *obj, u32 *iopgd, obj 509 drivers/iommu/omap-iommu.c spin_unlock(&obj->page_table_lock); obj 511 drivers/iommu/omap-iommu.c spin_lock(&obj->page_table_lock); obj 517 drivers/iommu/omap-iommu.c *pt_dma = dma_map_single(obj->dev, iopte, IOPTE_TABLE_SIZE, obj 519 drivers/iommu/omap-iommu.c if (dma_mapping_error(obj->dev, *pt_dma)) { obj 520 drivers/iommu/omap-iommu.c dev_err(obj->dev, "DMA map error for L2 table\n"); obj 521 drivers/iommu/omap-iommu.c iopte_free(obj, iopte, false); obj 530 drivers/iommu/omap-iommu.c dev_err(obj->dev, "DMA translation error for L2 table\n"); obj 531 drivers/iommu/omap-iommu.c dma_unmap_single(obj->dev, *pt_dma, IOPTE_TABLE_SIZE, obj 533 drivers/iommu/omap-iommu.c iopte_free(obj, iopte, false); obj 539 drivers/iommu/omap-iommu.c flush_iopte_range(obj->dev, obj->pd_dma, offset, 1); obj 540 drivers/iommu/omap-iommu.c dev_vdbg(obj->dev, "%s: a new pte:%p\n", __func__, iopte); obj 543 drivers/iommu/omap-iommu.c iopte_free(obj, iopte, false); obj 549 drivers/iommu/omap-iommu.c dev_vdbg(obj->dev, obj 556 drivers/iommu/omap-iommu.c static int iopgd_alloc_section(struct omap_iommu *obj, u32 da, u32 pa, u32 prot) obj 558 drivers/iommu/omap-iommu.c u32 *iopgd = iopgd_offset(obj, da); obj 562 drivers/iommu/omap-iommu.c dev_err(obj->dev, "%s: %08x:%08x should aligned on %08lx\n", obj 568 drivers/iommu/omap-iommu.c flush_iopte_range(obj->dev, obj->pd_dma, offset, 1); obj 572 drivers/iommu/omap-iommu.c static int iopgd_alloc_super(struct omap_iommu *obj, u32 da, u32 pa, u32 prot) obj 574 drivers/iommu/omap-iommu.c u32 *iopgd = iopgd_offset(obj, da); obj 579 drivers/iommu/omap-iommu.c dev_err(obj->dev, "%s: %08x:%08x should aligned on %08lx\n", obj 586 drivers/iommu/omap-iommu.c flush_iopte_range(obj->dev, obj->pd_dma, offset, 16); obj 590 drivers/iommu/omap-iommu.c static int iopte_alloc_page(struct omap_iommu *obj, u32 da, u32 pa, u32 prot) obj 592 drivers/iommu/omap-iommu.c u32 *iopgd = iopgd_offset(obj, da); obj 594 drivers/iommu/omap-iommu.c u32 *iopte = iopte_alloc(obj, iopgd, &pt_dma, da); obj 601 drivers/iommu/omap-iommu.c flush_iopte_range(obj->dev, pt_dma, offset, 1); obj 603 drivers/iommu/omap-iommu.c dev_vdbg(obj->dev, "%s: da:%08x pa:%08x pte:%p *pte:%08x\n", obj 609 drivers/iommu/omap-iommu.c static int iopte_alloc_large(struct omap_iommu *obj, u32 da, u32 pa, u32 prot) obj 611 drivers/iommu/omap-iommu.c u32 *iopgd = iopgd_offset(obj, da); obj 613 drivers/iommu/omap-iommu.c u32 *iopte = iopte_alloc(obj, iopgd, &pt_dma, da); obj 618 drivers/iommu/omap-iommu.c dev_err(obj->dev, "%s: %08x:%08x should aligned on %08lx\n", obj 628 drivers/iommu/omap-iommu.c flush_iopte_range(obj->dev, pt_dma, offset, 16); obj 633 drivers/iommu/omap-iommu.c iopgtable_store_entry_core(struct omap_iommu *obj, struct iotlb_entry *e) obj 639 drivers/iommu/omap-iommu.c if (!obj || !e) obj 665 drivers/iommu/omap-iommu.c spin_lock(&obj->page_table_lock); obj 666 drivers/iommu/omap-iommu.c err = fn(obj, e->da, e->pa, prot); obj 667 drivers/iommu/omap-iommu.c spin_unlock(&obj->page_table_lock); obj 678 drivers/iommu/omap-iommu.c omap_iopgtable_store_entry(struct omap_iommu *obj, struct iotlb_entry *e) obj 682 drivers/iommu/omap-iommu.c flush_iotlb_page(obj, e->da); obj 683 drivers/iommu/omap-iommu.c err = iopgtable_store_entry_core(obj, e); obj 685 drivers/iommu/omap-iommu.c prefetch_iotlb_entry(obj, e); obj 697 drivers/iommu/omap-iommu.c iopgtable_lookup_entry(struct omap_iommu *obj, u32 da, u32 **ppgd, u32 **ppte) obj 701 drivers/iommu/omap-iommu.c iopgd = iopgd_offset(obj, da); obj 712 drivers/iommu/omap-iommu.c static size_t iopgtable_clear_entry_core(struct omap_iommu *obj, u32 da) obj 715 drivers/iommu/omap-iommu.c u32 *iopgd = iopgd_offset(obj, da); obj 737 drivers/iommu/omap-iommu.c flush_iopte_range(obj->dev, pt_dma, pt_offset, nent); obj 747 drivers/iommu/omap-iommu.c iopte_free(obj, iopte, true); obj 754 drivers/iommu/omap-iommu.c iopgd = iopgd_offset(obj, (da & IOSUPER_MASK)); obj 759 drivers/iommu/omap-iommu.c flush_iopte_range(obj->dev, obj->pd_dma, pd_offset, nent); obj 769 drivers/iommu/omap-iommu.c static size_t iopgtable_clear_entry(struct omap_iommu *obj, u32 da) obj 773 drivers/iommu/omap-iommu.c spin_lock(&obj->page_table_lock); obj 775 drivers/iommu/omap-iommu.c bytes = iopgtable_clear_entry_core(obj, da); obj 776 drivers/iommu/omap-iommu.c flush_iotlb_page(obj, da); obj 778 drivers/iommu/omap-iommu.c spin_unlock(&obj->page_table_lock); obj 783 drivers/iommu/omap-iommu.c static void iopgtable_clear_entry_all(struct omap_iommu *obj) obj 788 drivers/iommu/omap-iommu.c spin_lock(&obj->page_table_lock); obj 795 drivers/iommu/omap-iommu.c iopgd = iopgd_offset(obj, da); obj 802 drivers/iommu/omap-iommu.c iopte_free(obj, iopte_offset(iopgd, 0), true); obj 805 drivers/iommu/omap-iommu.c flush_iopte_range(obj->dev, obj->pd_dma, offset, 1); obj 808 drivers/iommu/omap-iommu.c flush_iotlb_all(obj); obj 810 drivers/iommu/omap-iommu.c spin_unlock(&obj->page_table_lock); obj 820 drivers/iommu/omap-iommu.c struct omap_iommu *obj = data; obj 821 drivers/iommu/omap-iommu.c struct iommu_domain *domain = obj->domain; obj 827 drivers/iommu/omap-iommu.c errs = iommu_report_fault(obj, &da); obj 832 drivers/iommu/omap-iommu.c if (!report_iommu_fault(domain, obj->dev, da, 0)) obj 835 drivers/iommu/omap-iommu.c iommu_write_reg(obj, 0, MMU_IRQENABLE); obj 837 drivers/iommu/omap-iommu.c iopgd = iopgd_offset(obj, da); obj 840 drivers/iommu/omap-iommu.c dev_err(obj->dev, "%s: errs:0x%08x da:0x%08x pgd:0x%p *pgd:px%08x\n", obj 841 drivers/iommu/omap-iommu.c obj->name, errs, da, iopgd, *iopgd); obj 847 drivers/iommu/omap-iommu.c dev_err(obj->dev, "%s: errs:0x%08x da:0x%08x pgd:0x%p *pgd:0x%08x pte:0x%p *pte:0x%08x\n", obj 848 drivers/iommu/omap-iommu.c obj->name, errs, da, iopgd, *iopgd, iopte, *iopte); obj 858 drivers/iommu/omap-iommu.c static int omap_iommu_attach(struct omap_iommu *obj, u32 *iopgd) obj 862 drivers/iommu/omap-iommu.c spin_lock(&obj->iommu_lock); obj 864 drivers/iommu/omap-iommu.c obj->pd_dma = dma_map_single(obj->dev, iopgd, IOPGD_TABLE_SIZE, obj 866 drivers/iommu/omap-iommu.c if (dma_mapping_error(obj->dev, obj->pd_dma)) { obj 867 drivers/iommu/omap-iommu.c dev_err(obj->dev, "DMA map error for L1 table\n"); obj 872 drivers/iommu/omap-iommu.c obj->iopgd = iopgd; obj 873 drivers/iommu/omap-iommu.c err = iommu_enable(obj); obj 876 drivers/iommu/omap-iommu.c flush_iotlb_all(obj); obj 878 drivers/iommu/omap-iommu.c spin_unlock(&obj->iommu_lock); obj 880 drivers/iommu/omap-iommu.c dev_dbg(obj->dev, "%s: %s\n", __func__, obj->name); obj 885 drivers/iommu/omap-iommu.c spin_unlock(&obj->iommu_lock); obj 894 drivers/iommu/omap-iommu.c static void omap_iommu_detach(struct omap_iommu *obj) obj 896 drivers/iommu/omap-iommu.c if (!obj || IS_ERR(obj)) obj 899 drivers/iommu/omap-iommu.c spin_lock(&obj->iommu_lock); obj 901 drivers/iommu/omap-iommu.c dma_unmap_single(obj->dev, obj->pd_dma, IOPGD_TABLE_SIZE, obj 903 drivers/iommu/omap-iommu.c obj->pd_dma = 0; obj 904 drivers/iommu/omap-iommu.c obj->iopgd = NULL; obj 905 drivers/iommu/omap-iommu.c iommu_disable(obj); obj 907 drivers/iommu/omap-iommu.c spin_unlock(&obj->iommu_lock); obj 909 drivers/iommu/omap-iommu.c dev_dbg(obj->dev, "%s: %s\n", __func__, obj->name); obj 912 drivers/iommu/omap-iommu.c static void omap_iommu_save_tlb_entries(struct omap_iommu *obj) obj 920 drivers/iommu/omap-iommu.c iotlb_lock_get(obj, &lock); obj 921 drivers/iommu/omap-iommu.c obj->num_cr_ctx = lock.base; obj 922 drivers/iommu/omap-iommu.c if (!obj->num_cr_ctx) obj 925 drivers/iommu/omap-iommu.c tmp = obj->cr_ctx; obj 926 drivers/iommu/omap-iommu.c for_each_iotlb_cr(obj, obj->num_cr_ctx, i, cr) obj 930 drivers/iommu/omap-iommu.c static void omap_iommu_restore_tlb_entries(struct omap_iommu *obj) obj 937 drivers/iommu/omap-iommu.c if (!obj->num_cr_ctx) obj 941 drivers/iommu/omap-iommu.c tmp = obj->cr_ctx; obj 942 drivers/iommu/omap-iommu.c for (i = 0; i < obj->num_cr_ctx; i++, tmp++) { obj 944 drivers/iommu/omap-iommu.c iotlb_lock_set(obj, &l); obj 945 drivers/iommu/omap-iommu.c iotlb_load_cr(obj, tmp); obj 947 drivers/iommu/omap-iommu.c l.base = obj->num_cr_ctx; obj 949 drivers/iommu/omap-iommu.c iotlb_lock_set(obj, &l); obj 1026 drivers/iommu/omap-iommu.c struct omap_iommu *obj = to_iommu(dev); obj 1030 drivers/iommu/omap-iommu.c if (obj->domain && obj->iopgd) obj 1031 drivers/iommu/omap-iommu.c omap_iommu_save_tlb_entries(obj); obj 1033 drivers/iommu/omap-iommu.c omap2_iommu_disable(obj); obj 1042 drivers/iommu/omap-iommu.c ret = pdata->set_pwrdm_constraint(pdev, false, &obj->pwrst); obj 1044 drivers/iommu/omap-iommu.c dev_warn(obj->dev, "pwrdm_constraint failed to be reset, status = %d\n", obj 1068 drivers/iommu/omap-iommu.c struct omap_iommu *obj = to_iommu(dev); obj 1072 drivers/iommu/omap-iommu.c ret = pdata->set_pwrdm_constraint(pdev, true, &obj->pwrst); obj 1074 drivers/iommu/omap-iommu.c dev_warn(obj->dev, "pwrdm_constraint failed to be set, status = %d\n", obj 1091 drivers/iommu/omap-iommu.c if (obj->domain) obj 1092 drivers/iommu/omap-iommu.c omap_iommu_restore_tlb_entries(obj); obj 1094 drivers/iommu/omap-iommu.c ret = omap2_iommu_enable(obj); obj 1135 drivers/iommu/omap-iommu.c struct omap_iommu *obj) obj 1148 drivers/iommu/omap-iommu.c obj->syscfg = obj 1150 drivers/iommu/omap-iommu.c if (IS_ERR(obj->syscfg)) { obj 1152 drivers/iommu/omap-iommu.c ret = PTR_ERR(obj->syscfg); obj 1157 drivers/iommu/omap-iommu.c &obj->id)) { obj 1162 drivers/iommu/omap-iommu.c if (obj->id != 0 && obj->id != 1) { obj 1177 drivers/iommu/omap-iommu.c struct omap_iommu *obj; obj 1187 drivers/iommu/omap-iommu.c obj = devm_kzalloc(&pdev->dev, sizeof(*obj) + MMU_REG_SIZE, GFP_KERNEL); obj 1188 drivers/iommu/omap-iommu.c if (!obj) obj 1200 drivers/iommu/omap-iommu.c obj->name = dev_name(&pdev->dev); obj 1201 drivers/iommu/omap-iommu.c obj->nr_tlb_entries = 32; obj 1202 drivers/iommu/omap-iommu.c err = of_property_read_u32(of, "ti,#tlb-entries", &obj->nr_tlb_entries); obj 1205 drivers/iommu/omap-iommu.c if (obj->nr_tlb_entries != 32 && obj->nr_tlb_entries != 8) obj 1208 drivers/iommu/omap-iommu.c obj->has_bus_err_back = MMU_GP_REG_BUS_ERR_BACK_EN; obj 1210 drivers/iommu/omap-iommu.c obj->dev = &pdev->dev; obj 1211 drivers/iommu/omap-iommu.c obj->ctx = (void *)obj + sizeof(*obj); obj 1212 drivers/iommu/omap-iommu.c obj->cr_ctx = devm_kzalloc(&pdev->dev, obj 1213 drivers/iommu/omap-iommu.c sizeof(*obj->cr_ctx) * obj->nr_tlb_entries, obj 1215 drivers/iommu/omap-iommu.c if (!obj->cr_ctx) obj 1218 drivers/iommu/omap-iommu.c spin_lock_init(&obj->iommu_lock); obj 1219 drivers/iommu/omap-iommu.c spin_lock_init(&obj->page_table_lock); obj 1222 drivers/iommu/omap-iommu.c obj->regbase = devm_ioremap_resource(obj->dev, res); obj 1223 drivers/iommu/omap-iommu.c if (IS_ERR(obj->regbase)) obj 1224 drivers/iommu/omap-iommu.c return PTR_ERR(obj->regbase); obj 1226 drivers/iommu/omap-iommu.c err = omap_iommu_dra7_get_dsp_system_cfg(pdev, obj); obj 1234 drivers/iommu/omap-iommu.c err = devm_request_irq(obj->dev, irq, iommu_fault_handler, IRQF_SHARED, obj 1235 drivers/iommu/omap-iommu.c dev_name(obj->dev), obj); obj 1238 drivers/iommu/omap-iommu.c platform_set_drvdata(pdev, obj); obj 1241 drivers/iommu/omap-iommu.c obj->group = iommu_group_alloc(); obj 1242 drivers/iommu/omap-iommu.c if (IS_ERR(obj->group)) obj 1243 drivers/iommu/omap-iommu.c return PTR_ERR(obj->group); obj 1245 drivers/iommu/omap-iommu.c err = iommu_device_sysfs_add(&obj->iommu, obj->dev, NULL, obj 1246 drivers/iommu/omap-iommu.c obj->name); obj 1250 drivers/iommu/omap-iommu.c iommu_device_set_ops(&obj->iommu, &omap_iommu_ops); obj 1252 drivers/iommu/omap-iommu.c err = iommu_device_register(&obj->iommu); obj 1257 drivers/iommu/omap-iommu.c pm_runtime_enable(obj->dev); obj 1259 drivers/iommu/omap-iommu.c omap_iommu_debugfs_add(obj); obj 1261 drivers/iommu/omap-iommu.c dev_info(&pdev->dev, "%s registered\n", obj->name); obj 1274 drivers/iommu/omap-iommu.c iommu_device_sysfs_remove(&obj->iommu); obj 1276 drivers/iommu/omap-iommu.c iommu_group_put(obj->group); obj 1282 drivers/iommu/omap-iommu.c struct omap_iommu *obj = platform_get_drvdata(pdev); obj 1284 drivers/iommu/omap-iommu.c if (obj->group) { obj 1285 drivers/iommu/omap-iommu.c iommu_group_put(obj->group); obj 1286 drivers/iommu/omap-iommu.c obj->group = NULL; obj 1288 drivers/iommu/omap-iommu.c iommu_device_sysfs_remove(&obj->iommu); obj 1289 drivers/iommu/omap-iommu.c iommu_device_unregister(&obj->iommu); obj 1292 drivers/iommu/omap-iommu.c omap_iommu_debugfs_remove(obj); obj 1294 drivers/iommu/omap-iommu.c pm_runtime_disable(obj->dev); obj 1296 drivers/iommu/omap-iommu.c dev_info(&pdev->dev, "%s removed\n", obj->name); obj 16 drivers/iommu/omap-iommu.h #define for_each_iotlb_cr(obj, n, __i, cr) \ obj 18 drivers/iommu/omap-iommu.h (__i < (n)) && (cr = __iotlb_read_cr((obj), __i), true); \ obj 235 drivers/iommu/omap-iommu.h struct cr_regs __iotlb_read_cr(struct omap_iommu *obj, int n); obj 236 drivers/iommu/omap-iommu.h void iotlb_lock_get(struct omap_iommu *obj, struct iotlb_lock *l); obj 237 drivers/iommu/omap-iommu.h void iotlb_lock_set(struct omap_iommu *obj, struct iotlb_lock *l); obj 243 drivers/iommu/omap-iommu.h void omap_iommu_debugfs_add(struct omap_iommu *obj); obj 244 drivers/iommu/omap-iommu.h void omap_iommu_debugfs_remove(struct omap_iommu *obj); obj 249 drivers/iommu/omap-iommu.h static inline void omap_iommu_debugfs_add(struct omap_iommu *obj) { } obj 250 drivers/iommu/omap-iommu.h static inline void omap_iommu_debugfs_remove(struct omap_iommu *obj) { } obj 256 drivers/iommu/omap-iommu.h static inline u32 iommu_read_reg(struct omap_iommu *obj, size_t offs) obj 258 drivers/iommu/omap-iommu.h return __raw_readl(obj->regbase + offs); obj 261 drivers/iommu/omap-iommu.h static inline void iommu_write_reg(struct omap_iommu *obj, u32 val, size_t offs) obj 263 drivers/iommu/omap-iommu.h __raw_writel(val, obj->regbase + offs); obj 90 drivers/iommu/omap-iopgtable.h #define iopgd_offset(obj, da) ((obj)->iopgd + iopgd_index(da)) obj 1330 drivers/media/common/videobuf2/videobuf2-core.c static int vb2_req_prepare(struct media_request_object *obj) obj 1332 drivers/media/common/videobuf2/videobuf2-core.c struct vb2_buffer *vb = container_of(obj, struct vb2_buffer, req_obj); obj 1346 drivers/media/common/videobuf2/videobuf2-core.c static void vb2_req_unprepare(struct media_request_object *obj) obj 1348 drivers/media/common/videobuf2/videobuf2-core.c struct vb2_buffer *vb = container_of(obj, struct vb2_buffer, req_obj); obj 1360 drivers/media/common/videobuf2/videobuf2-core.c static void vb2_req_queue(struct media_request_object *obj) obj 1362 drivers/media/common/videobuf2/videobuf2-core.c struct vb2_buffer *vb = container_of(obj, struct vb2_buffer, req_obj); obj 1369 drivers/media/common/videobuf2/videobuf2-core.c static void vb2_req_unbind(struct media_request_object *obj) obj 1371 drivers/media/common/videobuf2/videobuf2-core.c struct vb2_buffer *vb = container_of(obj, struct vb2_buffer, req_obj); obj 1377 drivers/media/common/videobuf2/videobuf2-core.c static void vb2_req_release(struct media_request_object *obj) obj 1379 drivers/media/common/videobuf2/videobuf2-core.c struct vb2_buffer *vb = container_of(obj, struct vb2_buffer, req_obj); obj 1397 drivers/media/common/videobuf2/videobuf2-core.c bool vb2_request_object_is_buffer(struct media_request_object *obj) obj 1399 drivers/media/common/videobuf2/videobuf2-core.c return obj->ops == &vb2_core_req_ops; obj 1405 drivers/media/common/videobuf2/videobuf2-core.c struct media_request_object *obj; obj 1410 drivers/media/common/videobuf2/videobuf2-core.c list_for_each_entry(obj, &req->objects, list) obj 1411 drivers/media/common/videobuf2/videobuf2-core.c if (vb2_request_object_is_buffer(obj)) obj 1170 drivers/media/common/videobuf2/videobuf2-v4l2.c struct media_request_object *obj; obj 1176 drivers/media/common/videobuf2/videobuf2-v4l2.c list_for_each_entry(obj, &req->objects, list) { obj 1177 drivers/media/common/videobuf2/videobuf2-v4l2.c if (!obj->ops->prepare) obj 1180 drivers/media/common/videobuf2/videobuf2-v4l2.c ret = obj->ops->prepare(obj); obj 1186 drivers/media/common/videobuf2/videobuf2-v4l2.c list_for_each_entry_continue_reverse(obj, &req->objects, list) obj 1187 drivers/media/common/videobuf2/videobuf2-v4l2.c if (obj->ops->unprepare) obj 1188 drivers/media/common/videobuf2/videobuf2-v4l2.c obj->ops->unprepare(obj); obj 1197 drivers/media/common/videobuf2/videobuf2-v4l2.c struct media_request_object *obj, *obj_safe; obj 1207 drivers/media/common/videobuf2/videobuf2-v4l2.c list_for_each_entry_safe(obj, obj_safe, &req->objects, list) obj 1208 drivers/media/common/videobuf2/videobuf2-v4l2.c if (obj->ops->queue) obj 1209 drivers/media/common/videobuf2/videobuf2-v4l2.c obj->ops->queue(obj); obj 41 drivers/media/mc/mc-request.c struct media_request_object *obj, *obj_safe; obj 48 drivers/media/mc/mc-request.c list_for_each_entry_safe(obj, obj_safe, &req->objects, list) { obj 49 drivers/media/mc/mc-request.c media_request_object_unbind(obj); obj 50 drivers/media/mc/mc-request.c media_request_object_put(obj); obj 350 drivers/media/mc/mc-request.c struct media_request_object *obj = obj 352 drivers/media/mc/mc-request.c struct media_request *req = obj->req; obj 355 drivers/media/mc/mc-request.c media_request_object_unbind(obj); obj 356 drivers/media/mc/mc-request.c obj->ops->release(obj); obj 364 drivers/media/mc/mc-request.c struct media_request_object *obj; obj 372 drivers/media/mc/mc-request.c list_for_each_entry(obj, &req->objects, list) { obj 373 drivers/media/mc/mc-request.c if (obj->ops == ops && obj->priv == priv) { obj 374 drivers/media/mc/mc-request.c media_request_object_get(obj); obj 375 drivers/media/mc/mc-request.c found = obj; obj 384 drivers/media/mc/mc-request.c void media_request_object_put(struct media_request_object *obj) obj 386 drivers/media/mc/mc-request.c kref_put(&obj->kref, media_request_object_release); obj 390 drivers/media/mc/mc-request.c void media_request_object_init(struct media_request_object *obj) obj 392 drivers/media/mc/mc-request.c obj->ops = NULL; obj 393 drivers/media/mc/mc-request.c obj->req = NULL; obj 394 drivers/media/mc/mc-request.c obj->priv = NULL; obj 395 drivers/media/mc/mc-request.c obj->completed = false; obj 396 drivers/media/mc/mc-request.c INIT_LIST_HEAD(&obj->list); obj 397 drivers/media/mc/mc-request.c kref_init(&obj->kref); obj 404 drivers/media/mc/mc-request.c struct media_request_object *obj) obj 417 drivers/media/mc/mc-request.c obj->req = req; obj 418 drivers/media/mc/mc-request.c obj->ops = ops; obj 419 drivers/media/mc/mc-request.c obj->priv = priv; obj 422 drivers/media/mc/mc-request.c list_add_tail(&obj->list, &req->objects); obj 424 drivers/media/mc/mc-request.c list_add(&obj->list, &req->objects); obj 434 drivers/media/mc/mc-request.c void media_request_object_unbind(struct media_request_object *obj) obj 436 drivers/media/mc/mc-request.c struct media_request *req = obj->req; obj 444 drivers/media/mc/mc-request.c list_del(&obj->list); obj 445 drivers/media/mc/mc-request.c obj->req = NULL; obj 454 drivers/media/mc/mc-request.c if (!obj->completed) obj 472 drivers/media/mc/mc-request.c if (obj->ops->unbind) obj 473 drivers/media/mc/mc-request.c obj->ops->unbind(obj); obj 479 drivers/media/mc/mc-request.c void media_request_object_complete(struct media_request_object *obj) obj 481 drivers/media/mc/mc-request.c struct media_request *req = obj->req; obj 486 drivers/media/mc/mc-request.c if (obj->completed) obj 488 drivers/media/mc/mc-request.c obj->completed = true; obj 507 drivers/media/platform/ti-vpe/vpe.c #define GET_OFFSET_TOP(ctx, obj, reg) \ obj 508 drivers/media/platform/ti-vpe/vpe.c ((obj)->res->start - ctx->dev->res->start + reg) obj 2013 drivers/media/platform/vicodec/vicodec-core.c struct media_request_object *obj; obj 2019 drivers/media/platform/vicodec/vicodec-core.c list_for_each_entry(obj, &req->objects, list) { obj 2022 drivers/media/platform/vicodec/vicodec-core.c if (vb2_request_object_is_buffer(obj)) { obj 2023 drivers/media/platform/vicodec/vicodec-core.c vb = container_of(obj, struct vb2_buffer, req_obj); obj 3139 drivers/media/v4l2-core/v4l2-ctrls.c static void v4l2_ctrl_request_queue(struct media_request_object *obj) obj 3142 drivers/media/v4l2-core/v4l2-ctrls.c container_of(obj, struct v4l2_ctrl_handler, req_obj); obj 3143 drivers/media/v4l2-core/v4l2-ctrls.c struct v4l2_ctrl_handler *main_hdl = obj->priv; obj 3182 drivers/media/v4l2-core/v4l2-ctrls.c static void v4l2_ctrl_request_unbind(struct media_request_object *obj) obj 3185 drivers/media/v4l2-core/v4l2-ctrls.c container_of(obj, struct v4l2_ctrl_handler, req_obj); obj 3186 drivers/media/v4l2-core/v4l2-ctrls.c struct v4l2_ctrl_handler *main_hdl = obj->priv; obj 3197 drivers/media/v4l2-core/v4l2-ctrls.c static void v4l2_ctrl_request_release(struct media_request_object *obj) obj 3200 drivers/media/v4l2-core/v4l2-ctrls.c container_of(obj, struct v4l2_ctrl_handler, req_obj); obj 3215 drivers/media/v4l2-core/v4l2-ctrls.c struct media_request_object *obj; obj 3221 drivers/media/v4l2-core/v4l2-ctrls.c obj = media_request_object_find(req, &req_ops, parent); obj 3222 drivers/media/v4l2-core/v4l2-ctrls.c if (obj) obj 3223 drivers/media/v4l2-core/v4l2-ctrls.c return container_of(obj, struct v4l2_ctrl_handler, req_obj); obj 3507 drivers/media/v4l2-core/v4l2-ctrls.c struct media_request_object *obj; obj 3517 drivers/media/v4l2-core/v4l2-ctrls.c obj = media_request_object_find(req, &req_ops, hdl); obj 3518 drivers/media/v4l2-core/v4l2-ctrls.c if (obj) obj 3519 drivers/media/v4l2-core/v4l2-ctrls.c return obj; obj 3527 drivers/media/v4l2-core/v4l2-ctrls.c obj = &new_hdl->req_obj; obj 3537 drivers/media/v4l2-core/v4l2-ctrls.c media_request_object_get(obj); obj 3538 drivers/media/v4l2-core/v4l2-ctrls.c return obj; obj 3544 drivers/media/v4l2-core/v4l2-ctrls.c struct media_request_object *obj = NULL; obj 3567 drivers/media/v4l2-core/v4l2-ctrls.c obj = v4l2_ctrls_find_req_obj(hdl, req, false); obj 3568 drivers/media/v4l2-core/v4l2-ctrls.c if (IS_ERR(obj)) { obj 3571 drivers/media/v4l2-core/v4l2-ctrls.c return PTR_ERR(obj); obj 3574 drivers/media/v4l2-core/v4l2-ctrls.c hdl = container_of(obj, struct v4l2_ctrl_handler, obj 3580 drivers/media/v4l2-core/v4l2-ctrls.c if (obj) { obj 3582 drivers/media/v4l2-core/v4l2-ctrls.c media_request_object_put(obj); obj 3913 drivers/media/v4l2-core/v4l2-ctrls.c struct media_request_object *obj = NULL; obj 3945 drivers/media/v4l2-core/v4l2-ctrls.c obj = v4l2_ctrls_find_req_obj(hdl, req, set); obj 3946 drivers/media/v4l2-core/v4l2-ctrls.c if (IS_ERR(obj)) { obj 3953 drivers/media/v4l2-core/v4l2-ctrls.c return PTR_ERR(obj); obj 3955 drivers/media/v4l2-core/v4l2-ctrls.c hdl = container_of(obj, struct v4l2_ctrl_handler, obj 3965 drivers/media/v4l2-core/v4l2-ctrls.c if (obj) { obj 3967 drivers/media/v4l2-core/v4l2-ctrls.c media_request_object_put(obj); obj 4091 drivers/media/v4l2-core/v4l2-ctrls.c struct media_request_object *obj; obj 4103 drivers/media/v4l2-core/v4l2-ctrls.c obj = media_request_object_find(req, &req_ops, main_hdl); obj 4104 drivers/media/v4l2-core/v4l2-ctrls.c if (!obj) obj 4106 drivers/media/v4l2-core/v4l2-ctrls.c hdl = container_of(obj, struct v4l2_ctrl_handler, req_obj); obj 4141 drivers/media/v4l2-core/v4l2-ctrls.c media_request_object_complete(obj); obj 4142 drivers/media/v4l2-core/v4l2-ctrls.c media_request_object_put(obj); obj 4149 drivers/media/v4l2-core/v4l2-ctrls.c struct media_request_object *obj; obj 4165 drivers/media/v4l2-core/v4l2-ctrls.c obj = media_request_object_find(req, &req_ops, main_hdl); obj 4166 drivers/media/v4l2-core/v4l2-ctrls.c if (!obj) obj 4168 drivers/media/v4l2-core/v4l2-ctrls.c if (obj->completed) { obj 4169 drivers/media/v4l2-core/v4l2-ctrls.c media_request_object_put(obj); obj 4172 drivers/media/v4l2-core/v4l2-ctrls.c hdl = container_of(obj, struct v4l2_ctrl_handler, req_obj); obj 4245 drivers/media/v4l2-core/v4l2-ctrls.c media_request_object_put(obj); obj 1001 drivers/media/v4l2-core/v4l2-mem2mem.c struct media_request_object *obj, *obj_safe; obj 1012 drivers/media/v4l2-core/v4l2-mem2mem.c list_for_each_entry_safe(obj, obj_safe, &req->objects, list) { obj 1016 drivers/media/v4l2-core/v4l2-mem2mem.c if (!obj->ops->queue) obj 1019 drivers/media/v4l2-core/v4l2-mem2mem.c if (vb2_request_object_is_buffer(obj)) { obj 1021 drivers/media/v4l2-core/v4l2-mem2mem.c vb = container_of(obj, struct vb2_buffer, req_obj); obj 1035 drivers/media/v4l2-core/v4l2-mem2mem.c obj->ops->queue(obj); obj 514 drivers/misc/cxl/sysfs.c #define to_cr(obj) container_of(obj, struct afu_config_record, kobj) obj 112 drivers/mmc/host/sdhci-acpi.c union acpi_object *obj; obj 115 drivers/mmc/host/sdhci-acpi.c obj = acpi_evaluate_dsm(ACPI_HANDLE(dev), &intel_dsm_guid, 0, fn, NULL); obj 116 drivers/mmc/host/sdhci-acpi.c if (!obj) obj 119 drivers/mmc/host/sdhci-acpi.c if (obj->type == ACPI_TYPE_INTEGER) { obj 120 drivers/mmc/host/sdhci-acpi.c *result = obj->integer.value; obj 121 drivers/mmc/host/sdhci-acpi.c } else if (obj->type == ACPI_TYPE_BUFFER && obj->buffer.length > 0) { obj 122 drivers/mmc/host/sdhci-acpi.c size_t len = min_t(size_t, obj->buffer.length, 4); obj 125 drivers/mmc/host/sdhci-acpi.c memcpy(result, obj->buffer.pointer, len); obj 128 drivers/mmc/host/sdhci-acpi.c __func__, fn, obj->type, obj->buffer.length); obj 132 drivers/mmc/host/sdhci-acpi.c ACPI_FREE(obj); obj 524 drivers/mmc/host/sdhci-pci-core.c union acpi_object *obj; obj 528 drivers/mmc/host/sdhci-pci-core.c obj = acpi_evaluate_dsm(ACPI_HANDLE(dev), &intel_dsm_guid, 0, fn, NULL); obj 529 drivers/mmc/host/sdhci-pci-core.c if (!obj) obj 532 drivers/mmc/host/sdhci-pci-core.c if (obj->type != ACPI_TYPE_BUFFER || obj->buffer.length < 1) { obj 537 drivers/mmc/host/sdhci-pci-core.c len = min_t(size_t, obj->buffer.length, 4); obj 540 drivers/mmc/host/sdhci-pci-core.c memcpy(result, obj->buffer.pointer, len); obj 542 drivers/mmc/host/sdhci-pci-core.c ACPI_FREE(obj); obj 124 drivers/net/bonding/bond_sysfs_slave.c #define to_slave(obj) container_of(obj, struct slave, kobj) obj 255 drivers/net/can/c_can/c_can.c static void c_can_obj_update(struct net_device *dev, int iface, u32 cmd, u32 obj) obj 260 drivers/net/can/c_can/c_can.c priv->write_reg32(priv, reg, (cmd << 16) | obj); obj 272 drivers/net/can/c_can/c_can.c u32 obj, u32 cmd) obj 274 drivers/net/can/c_can/c_can.c c_can_obj_update(dev, iface, cmd, obj); obj 278 drivers/net/can/c_can/c_can.c u32 obj, u32 cmd) obj 280 drivers/net/can/c_can/c_can.c c_can_obj_update(dev, iface, cmd | IF_COMM_WR, obj); obj 288 drivers/net/can/c_can/c_can.c static void c_can_inval_tx_object(struct net_device *dev, int iface, int obj) obj 293 drivers/net/can/c_can/c_can.c c_can_object_put(dev, iface, obj, IF_COMM_INVAL); obj 296 drivers/net/can/c_can/c_can.c static void c_can_inval_msg_object(struct net_device *dev, int iface, int obj) obj 302 drivers/net/can/c_can/c_can.c c_can_inval_tx_object(dev, iface, obj); obj 329 drivers/net/can/c_can/c_can.c u32 obj = idx + C_CAN_MSG_OBJ_TX_FIRST; obj 331 drivers/net/can/c_can/c_can.c c_can_inval_msg_object(dev, iface, obj); obj 448 drivers/net/can/c_can/c_can.c u32 obj, u32 mask, u32 id, u32 mcont) obj 459 drivers/net/can/c_can/c_can.c c_can_object_put(dev, iface, obj, IF_COMM_RCV_SETUP); obj 467 drivers/net/can/c_can/c_can.c u32 idx, obj; obj 476 drivers/net/can/c_can/c_can.c obj = idx + C_CAN_MSG_OBJ_TX_FIRST; obj 493 drivers/net/can/c_can/c_can.c c_can_object_put(dev, IF_TX, obj, IF_COMM_TX); obj 736 drivers/net/can/c_can/c_can.c u32 idx, obj, pkts = 0, bytes = 0, pend, clr; obj 743 drivers/net/can/c_can/c_can.c obj = idx + C_CAN_MSG_OBJ_TX_FIRST; obj 744 drivers/net/can/c_can/c_can.c c_can_inval_tx_object(dev, IF_RX, obj); obj 796 drivers/net/can/c_can/c_can.c struct c_can_priv *priv, u32 obj) obj 798 drivers/net/can/c_can/c_can.c c_can_object_get(dev, IF_RX, obj, priv->comm_rcv_high); obj 802 drivers/net/can/c_can/c_can.c struct c_can_priv *priv, u32 obj) obj 805 drivers/net/can/c_can/c_can.c c_can_object_get(dev, IF_RX, obj, IF_COMM_CLR_NEWDAT); obj 811 drivers/net/can/c_can/c_can.c u32 pkts = 0, ctrl, obj; obj 813 drivers/net/can/c_can/c_can.c while ((obj = ffs(pend)) && quota > 0) { obj 814 drivers/net/can/c_can/c_can.c pend &= ~BIT(obj - 1); obj 816 drivers/net/can/c_can/c_can.c c_can_rx_object_get(dev, priv, obj); obj 820 drivers/net/can/c_can/c_can.c int n = c_can_handle_lost_msg_obj(dev, IF_RX, obj, ctrl); obj 838 drivers/net/can/c_can/c_can.c c_can_rx_finalize(dev, priv, obj); obj 1995 drivers/net/ethernet/broadcom/bnx2x/bnx2x.h struct bnx2x_vlan_mac_obj *obj, bool set, obj 1999 drivers/net/ethernet/broadcom/bnx2x/bnx2x.h struct bnx2x_vlan_mac_obj *obj, bool set, obj 8411 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c struct bnx2x_vlan_mac_obj *obj, bool set, obj 8420 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c ramrod_param.vlan_mac_obj = obj; obj 8449 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c struct bnx2x_vlan_mac_obj *obj, bool set, obj 8458 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c ramrod_param.vlan_mac_obj = obj; obj 4699 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c static int bnx2x_queue_set_pending(struct bnx2x_queue_sp_obj *obj, obj 4713 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c set_bit(bit, &obj->pending); obj 5144 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c struct bnx2x_queue_sp_obj *obj, obj 5149 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c data->client_id = obj->cl_id; obj 5152 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c data->func_id = obj->func_id; obj 5291 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c struct bnx2x_queue_sp_obj *obj, obj 5295 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c data->client_id = obj->cl_id; obj 5659 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c struct bnx2x_queue_sp_obj *obj, obj 5664 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c memset(obj, 0, sizeof(*obj)); obj 5669 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c memcpy(obj->cids, cids, sizeof(obj->cids[0]) * cid_cnt); obj 5670 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c obj->max_cos = cid_cnt; obj 5671 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c obj->cl_id = cl_id; obj 5672 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c obj->func_id = func_id; obj 5673 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c obj->rdata = rdata; obj 5674 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c obj->rdata_mapping = rdata_mapping; obj 5675 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c obj->type = type; obj 5676 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c obj->next_state = BNX2X_Q_STATE_MAX; obj 5679 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c obj->send_cmd = bnx2x_queue_send_cmd_e1x; obj 5681 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c obj->send_cmd = bnx2x_queue_send_cmd_e2; obj 5683 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c obj->check_transition = bnx2x_queue_chk_transition; obj 5685 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c obj->complete_cmd = bnx2x_queue_comp_cmd; obj 5686 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c obj->wait_comp = bnx2x_queue_wait_comp; obj 5687 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c obj->set_pending = bnx2x_queue_set_pending; obj 5692 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c struct bnx2x_queue_sp_obj *obj) obj 5694 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c switch (obj->state) { obj 6467 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c struct bnx2x_func_sp_obj *obj, obj 6472 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c memset(obj, 0, sizeof(*obj)); obj 6474 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c mutex_init(&obj->one_pending_mutex); obj 6476 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c obj->rdata = rdata; obj 6477 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c obj->rdata_mapping = rdata_mapping; obj 6478 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c obj->afex_rdata = afex_rdata; obj 6479 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c obj->afex_rdata_mapping = afex_rdata_mapping; obj 6480 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c obj->send_cmd = bnx2x_func_send_cmd; obj 6481 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c obj->check_transition = bnx2x_func_chk_transition; obj 6482 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c obj->complete_cmd = bnx2x_func_comp_cmd; obj 6483 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c obj->wait_comp = bnx2x_func_wait_comp; obj 6485 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c obj->drv = drv_iface; obj 1391 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.h struct bnx2x_func_sp_obj *obj, obj 1403 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.h struct bnx2x_queue_sp_obj *obj, u8 cl_id, u32 *cids, obj 1411 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.h struct bnx2x_queue_sp_obj *obj); obj 335 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sriov.c struct bnx2x_vlan_mac_obj *obj, obj 342 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sriov.c read_lock = bnx2x_vlan_mac_h_read_lock(bp, obj); obj 346 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sriov.c list_for_each(pos, &obj->head) obj 350 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sriov.c bnx2x_vlan_mac_h_read_unlock(bp, obj); obj 61 drivers/net/ethernet/hisilicon/hns/hns_dsaf_misc.c union acpi_object *obj; obj 75 drivers/net/ethernet/hisilicon/hns/hns_dsaf_misc.c obj = acpi_evaluate_dsm(ACPI_HANDLE(mac_cb->dev), obj 77 drivers/net/ethernet/hisilicon/hns/hns_dsaf_misc.c if (!obj) { obj 83 drivers/net/ethernet/hisilicon/hns/hns_dsaf_misc.c ACPI_FREE(obj); obj 91 drivers/net/ethernet/hisilicon/hns/hns_dsaf_misc.c union acpi_object *obj; obj 102 drivers/net/ethernet/hisilicon/hns/hns_dsaf_misc.c obj = acpi_evaluate_dsm(ACPI_HANDLE(mac_cb->dev), obj 104 drivers/net/ethernet/hisilicon/hns/hns_dsaf_misc.c if (!obj) { obj 110 drivers/net/ethernet/hisilicon/hns/hns_dsaf_misc.c ACPI_FREE(obj); obj 253 drivers/net/ethernet/hisilicon/hns/hns_dsaf_misc.c union acpi_object *obj; obj 267 drivers/net/ethernet/hisilicon/hns/hns_dsaf_misc.c obj = acpi_evaluate_dsm(ACPI_HANDLE(dsaf_dev->dev), obj 269 drivers/net/ethernet/hisilicon/hns/hns_dsaf_misc.c if (!obj) { obj 275 drivers/net/ethernet/hisilicon/hns/hns_dsaf_misc.c ACPI_FREE(obj); obj 541 drivers/net/ethernet/hisilicon/hns/hns_dsaf_misc.c union acpi_object *obj; obj 551 drivers/net/ethernet/hisilicon/hns/hns_dsaf_misc.c obj = acpi_evaluate_dsm(ACPI_HANDLE(mac_cb->dev), obj 555 drivers/net/ethernet/hisilicon/hns/hns_dsaf_misc.c if (!obj || obj->type != ACPI_TYPE_INTEGER) obj 558 drivers/net/ethernet/hisilicon/hns/hns_dsaf_misc.c phy_if = obj->integer.value ? obj 563 drivers/net/ethernet/hisilicon/hns/hns_dsaf_misc.c ACPI_FREE(obj); obj 588 drivers/net/ethernet/hisilicon/hns/hns_dsaf_misc.c union acpi_object *obj; obj 598 drivers/net/ethernet/hisilicon/hns/hns_dsaf_misc.c obj = acpi_evaluate_dsm(ACPI_HANDLE(mac_cb->dev), obj 602 drivers/net/ethernet/hisilicon/hns/hns_dsaf_misc.c if (!obj || obj->type != ACPI_TYPE_INTEGER) obj 605 drivers/net/ethernet/hisilicon/hns/hns_dsaf_misc.c *sfp_prsnt = obj->integer.value; obj 607 drivers/net/ethernet/hisilicon/hns/hns_dsaf_misc.c ACPI_FREE(obj); obj 680 drivers/net/ethernet/hisilicon/hns/hns_dsaf_misc.c union acpi_object *obj; obj 692 drivers/net/ethernet/hisilicon/hns/hns_dsaf_misc.c obj = acpi_evaluate_dsm(ACPI_HANDLE(mac_cb->dsaf_dev->dev), obj 695 drivers/net/ethernet/hisilicon/hns/hns_dsaf_misc.c if (!obj) { obj 702 drivers/net/ethernet/hisilicon/hns/hns_dsaf_misc.c ACPI_FREE(obj); obj 81 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c struct i40e_hmc_obj_info *obj, *full_obj; obj 105 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c obj = &hw->hmc.hmc_obj[I40E_HMC_LAN_TX]; obj 106 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c obj->max_cnt = rd32(hw, I40E_GLHMC_LANQMAX); obj 107 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c obj->cnt = txq_num; obj 108 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c obj->base = 0; obj 110 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c obj->size = BIT_ULL(size_exp); obj 113 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c if (txq_num > obj->max_cnt) { obj 116 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c txq_num, obj->max_cnt, ret_code); obj 121 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c full_obj->max_cnt += obj->max_cnt; obj 122 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c full_obj->cnt += obj->cnt; obj 125 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c obj = &hw->hmc.hmc_obj[I40E_HMC_LAN_RX]; obj 126 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c obj->max_cnt = rd32(hw, I40E_GLHMC_LANQMAX); obj 127 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c obj->cnt = rxq_num; obj 128 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c obj->base = hw->hmc.hmc_obj[I40E_HMC_LAN_TX].base + obj 131 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c obj->base = i40e_align_l2obj_base(obj->base); obj 133 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c obj->size = BIT_ULL(size_exp); obj 136 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c if (rxq_num > obj->max_cnt) { obj 139 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c rxq_num, obj->max_cnt, ret_code); obj 144 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c full_obj->max_cnt += obj->max_cnt; obj 145 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c full_obj->cnt += obj->cnt; obj 148 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c obj = &hw->hmc.hmc_obj[I40E_HMC_FCOE_CTX]; obj 149 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c obj->max_cnt = rd32(hw, I40E_GLHMC_FCOEMAX); obj 150 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c obj->cnt = fcoe_cntx_num; obj 151 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c obj->base = hw->hmc.hmc_obj[I40E_HMC_LAN_RX].base + obj 154 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c obj->base = i40e_align_l2obj_base(obj->base); obj 156 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c obj->size = BIT_ULL(size_exp); obj 159 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c if (fcoe_cntx_num > obj->max_cnt) { obj 162 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c fcoe_cntx_num, obj->max_cnt, ret_code); obj 167 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c full_obj->max_cnt += obj->max_cnt; obj 168 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c full_obj->cnt += obj->cnt; obj 171 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c obj = &hw->hmc.hmc_obj[I40E_HMC_FCOE_FILT]; obj 172 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c obj->max_cnt = rd32(hw, I40E_GLHMC_FCOEFMAX); obj 173 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c obj->cnt = fcoe_filt_num; obj 174 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c obj->base = hw->hmc.hmc_obj[I40E_HMC_FCOE_CTX].base + obj 177 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c obj->base = i40e_align_l2obj_base(obj->base); obj 179 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c obj->size = BIT_ULL(size_exp); obj 182 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c if (fcoe_filt_num > obj->max_cnt) { obj 185 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c fcoe_filt_num, obj->max_cnt, ret_code); obj 190 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c full_obj->max_cnt += obj->max_cnt; obj 191 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c full_obj->cnt += obj->cnt; obj 444 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c struct i40e_hmc_obj_info *obj; obj 486 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c obj = &hw->hmc.hmc_obj[I40E_HMC_LAN_TX]; obj 488 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c (u32)((obj->base & I40E_GLHMC_LANTXBASE_FPMLANTXBASE_MASK) / 512)); obj 489 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c wr32(hw, I40E_GLHMC_LANTXCNT(hmc_fn_id), obj->cnt); obj 492 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c obj = &hw->hmc.hmc_obj[I40E_HMC_LAN_RX]; obj 494 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c (u32)((obj->base & I40E_GLHMC_LANRXBASE_FPMLANRXBASE_MASK) / 512)); obj 495 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c wr32(hw, I40E_GLHMC_LANRXCNT(hmc_fn_id), obj->cnt); obj 498 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c obj = &hw->hmc.hmc_obj[I40E_HMC_FCOE_CTX]; obj 500 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c (u32)((obj->base & I40E_GLHMC_FCOEDDPBASE_FPMFCOEDDPBASE_MASK) / 512)); obj 501 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c wr32(hw, I40E_GLHMC_FCOEDDPCNT(hmc_fn_id), obj->cnt); obj 504 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c obj = &hw->hmc.hmc_obj[I40E_HMC_FCOE_FILT]; obj 506 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c (u32)((obj->base & I40E_GLHMC_FCOEFBASE_FPMFCOEFBASE_MASK) / 512)); obj 507 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c wr32(hw, I40E_GLHMC_FCOEFCNT(hmc_fn_id), obj->cnt); obj 46 drivers/net/ethernet/mellanox/mlx4/alloc.c u32 obj; obj 50 drivers/net/ethernet/mellanox/mlx4/alloc.c obj = find_next_zero_bit(bitmap->table, bitmap->max, bitmap->last); obj 51 drivers/net/ethernet/mellanox/mlx4/alloc.c if (obj >= bitmap->max) { obj 54 drivers/net/ethernet/mellanox/mlx4/alloc.c obj = find_first_zero_bit(bitmap->table, bitmap->max); obj 57 drivers/net/ethernet/mellanox/mlx4/alloc.c if (obj < bitmap->max) { obj 58 drivers/net/ethernet/mellanox/mlx4/alloc.c set_bit(obj, bitmap->table); obj 59 drivers/net/ethernet/mellanox/mlx4/alloc.c bitmap->last = (obj + 1); obj 62 drivers/net/ethernet/mellanox/mlx4/alloc.c obj |= bitmap->top; obj 64 drivers/net/ethernet/mellanox/mlx4/alloc.c obj = -1; obj 66 drivers/net/ethernet/mellanox/mlx4/alloc.c if (obj != -1) obj 71 drivers/net/ethernet/mellanox/mlx4/alloc.c return obj; obj 74 drivers/net/ethernet/mellanox/mlx4/alloc.c void mlx4_bitmap_free(struct mlx4_bitmap *bitmap, u32 obj, int use_rr) obj 76 drivers/net/ethernet/mellanox/mlx4/alloc.c mlx4_bitmap_free_range(bitmap, obj, 1, use_rr); obj 112 drivers/net/ethernet/mellanox/mlx4/alloc.c u32 obj; obj 119 drivers/net/ethernet/mellanox/mlx4/alloc.c obj = find_aligned_range(bitmap->table, bitmap->last, obj 121 drivers/net/ethernet/mellanox/mlx4/alloc.c if (obj >= bitmap->max) { obj 124 drivers/net/ethernet/mellanox/mlx4/alloc.c obj = find_aligned_range(bitmap->table, 0, bitmap->max, obj 128 drivers/net/ethernet/mellanox/mlx4/alloc.c if (obj < bitmap->max) { obj 129 drivers/net/ethernet/mellanox/mlx4/alloc.c bitmap_set(bitmap->table, obj, cnt); obj 130 drivers/net/ethernet/mellanox/mlx4/alloc.c if (obj == bitmap->last) { obj 131 drivers/net/ethernet/mellanox/mlx4/alloc.c bitmap->last = (obj + cnt); obj 135 drivers/net/ethernet/mellanox/mlx4/alloc.c obj |= bitmap->top; obj 137 drivers/net/ethernet/mellanox/mlx4/alloc.c obj = -1; obj 139 drivers/net/ethernet/mellanox/mlx4/alloc.c if (obj != -1) obj 144 drivers/net/ethernet/mellanox/mlx4/alloc.c return obj; obj 152 drivers/net/ethernet/mellanox/mlx4/alloc.c static u32 mlx4_bitmap_masked_value(struct mlx4_bitmap *bitmap, u32 obj) obj 154 drivers/net/ethernet/mellanox/mlx4/alloc.c return obj & (bitmap->max + bitmap->reserved_top - 1); obj 157 drivers/net/ethernet/mellanox/mlx4/alloc.c void mlx4_bitmap_free_range(struct mlx4_bitmap *bitmap, u32 obj, int cnt, obj 160 drivers/net/ethernet/mellanox/mlx4/alloc.c obj &= bitmap->max + bitmap->reserved_top - 1; obj 164 drivers/net/ethernet/mellanox/mlx4/alloc.c bitmap->last = min(bitmap->last, obj); obj 168 drivers/net/ethernet/mellanox/mlx4/alloc.c bitmap_clear(bitmap->table, obj, cnt); obj 418 drivers/net/ethernet/mellanox/mlx4/alloc.c static void __mlx4_free_from_zone(struct mlx4_zone_entry *zone, u32 obj, obj 421 drivers/net/ethernet/mellanox/mlx4/alloc.c mlx4_bitmap_free_range(zone->bitmap, obj - zone->offset, count, zone->use_rr); obj 479 drivers/net/ethernet/mellanox/mlx4/alloc.c struct mlx4_zone_allocator *zones, u32 obj) obj 491 drivers/net/ethernet/mellanox/mlx4/alloc.c if (obj >= zone->offset) { obj 492 drivers/net/ethernet/mellanox/mlx4/alloc.c u32 mobj = (obj - zone->offset) & zones->mask; obj 529 drivers/net/ethernet/mellanox/mlx4/alloc.c u32 mlx4_zone_free_entries(struct mlx4_zone_allocator *zones, u32 uid, u32 obj, u32 count) obj 543 drivers/net/ethernet/mellanox/mlx4/alloc.c __mlx4_free_from_zone(zone, obj, count); obj 551 drivers/net/ethernet/mellanox/mlx4/alloc.c u32 mlx4_zone_free_entries_unique(struct mlx4_zone_allocator *zones, u32 obj, u32 count) obj 561 drivers/net/ethernet/mellanox/mlx4/alloc.c zone = __mlx4_find_zone_by_uid_unique(zones, obj); obj 568 drivers/net/ethernet/mellanox/mlx4/alloc.c __mlx4_free_from_zone(zone, obj, count); obj 258 drivers/net/ethernet/mellanox/mlx4/icm.c int mlx4_table_get(struct mlx4_dev *dev, struct mlx4_icm_table *table, u32 obj) obj 260 drivers/net/ethernet/mellanox/mlx4/icm.c u32 i = (obj & (table->num_obj - 1)) / obj 294 drivers/net/ethernet/mellanox/mlx4/icm.c void mlx4_table_put(struct mlx4_dev *dev, struct mlx4_icm_table *table, u32 obj) obj 299 drivers/net/ethernet/mellanox/mlx4/icm.c i = (obj & (table->num_obj - 1)) / (MLX4_TABLE_CHUNK_SIZE / table->obj_size); obj 314 drivers/net/ethernet/mellanox/mlx4/icm.c void *mlx4_table_find(struct mlx4_icm_table *table, u32 obj, obj 328 drivers/net/ethernet/mellanox/mlx4/icm.c idx = (u64) (obj & (table->num_obj - 1)) * table->obj_size; obj 84 drivers/net/ethernet/mellanox/mlx4/icm.h int mlx4_table_get(struct mlx4_dev *dev, struct mlx4_icm_table *table, u32 obj); obj 85 drivers/net/ethernet/mellanox/mlx4/icm.h void mlx4_table_put(struct mlx4_dev *dev, struct mlx4_icm_table *table, u32 obj); obj 94 drivers/net/ethernet/mellanox/mlx4/icm.h void *mlx4_table_find(struct mlx4_icm_table *table, u32 obj, dma_addr_t *dma_handle); obj 939 drivers/net/ethernet/mellanox/mlx4/mlx4.h void mlx4_bitmap_free(struct mlx4_bitmap *bitmap, u32 obj, int use_rr); obj 942 drivers/net/ethernet/mellanox/mlx4/mlx4.h void mlx4_bitmap_free_range(struct mlx4_bitmap *bitmap, u32 obj, int cnt, obj 1474 drivers/net/ethernet/mellanox/mlx4/mlx4.h u32 uid, u32 obj, u32 count); obj 1480 drivers/net/ethernet/mellanox/mlx4/mlx4.h u32 mlx4_zone_free_entries_unique(struct mlx4_zone_allocator *zones, u32 obj, u32 count); obj 16 drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c void *obj, *key_p; obj 19 drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c obj = MLX5_ADDR_OF(create_encryption_key_in, in, encryption_key_object); obj 20 drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c key_p = MLX5_ADDR_OF(encryption_key_obj, obj, key); obj 43 drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c MLX5_SET(encryption_key_obj, obj, key_size, general_obj_key_size); obj 44 drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c MLX5_SET(encryption_key_obj, obj, key_type, obj 50 drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c MLX5_SET(encryption_key_obj, obj, pd, mdev->mlx5e_res.pdn); obj 1205 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_erp.c const void *obj) obj 1208 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_erp.c const struct mlxsw_sp_acl_erp_key *key = obj; obj 1231 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_erp.c void *obj) obj 1236 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_erp.c struct mlxsw_sp_acl_erp_key *key = obj; obj 1284 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_erp.c static void *mlxsw_sp_acl_erp_root_create(void *priv, void *obj, obj 1289 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_erp.c struct mlxsw_sp_acl_erp_key *key = obj; obj 685 drivers/net/ethernet/mellanox/mlxsw/spectrum_ptp.c void *obj; obj 692 drivers/net/ethernet/mellanox/mlxsw/spectrum_ptp.c while ((obj = rhashtable_walk_next(&iter))) { obj 693 drivers/net/ethernet/mellanox/mlxsw/spectrum_ptp.c if (IS_ERR(obj)) obj 696 drivers/net/ethernet/mellanox/mlxsw/spectrum_ptp.c unmatched = obj; obj 1146 drivers/net/ethernet/mellanox/mlxsw/spectrum_switchdev.c struct net_device *orig_dev = vlan->obj.orig_dev; obj 1624 drivers/net/ethernet/mellanox/mlxsw/spectrum_switchdev.c struct net_device *orig_dev = mdb->obj.orig_dev; obj 1746 drivers/net/ethernet/mellanox/mlxsw/spectrum_switchdev.c const struct switchdev_obj *obj, obj 1754 drivers/net/ethernet/mellanox/mlxsw/spectrum_switchdev.c switch (obj->id) { obj 1756 drivers/net/ethernet/mellanox/mlxsw/spectrum_switchdev.c vlan = SWITCHDEV_OBJ_PORT_VLAN(obj); obj 1771 drivers/net/ethernet/mellanox/mlxsw/spectrum_switchdev.c SWITCHDEV_OBJ_PORT_MDB(obj), obj 1803 drivers/net/ethernet/mellanox/mlxsw/spectrum_switchdev.c struct net_device *orig_dev = vlan->obj.orig_dev; obj 1849 drivers/net/ethernet/mellanox/mlxsw/spectrum_switchdev.c struct net_device *orig_dev = mdb->obj.orig_dev; obj 1900 drivers/net/ethernet/mellanox/mlxsw/spectrum_switchdev.c const struct switchdev_obj *obj) obj 1905 drivers/net/ethernet/mellanox/mlxsw/spectrum_switchdev.c switch (obj->id) { obj 1908 drivers/net/ethernet/mellanox/mlxsw/spectrum_switchdev.c SWITCHDEV_OBJ_PORT_VLAN(obj)); obj 1912 drivers/net/ethernet/mellanox/mlxsw/spectrum_switchdev.c SWITCHDEV_OBJ_PORT_MDB(obj)); obj 3323 drivers/net/ethernet/mellanox/mlxsw/spectrum_switchdev.c SWITCHDEV_OBJ_PORT_VLAN(port_obj_info->obj); obj 3374 drivers/net/ethernet/mellanox/mlxsw/spectrum_switchdev.c SWITCHDEV_OBJ_PORT_VLAN(port_obj_info->obj); obj 3409 drivers/net/ethernet/mellanox/mlxsw/spectrum_switchdev.c switch (port_obj_info->obj->id) { obj 3426 drivers/net/ethernet/mellanox/mlxsw/spectrum_switchdev.c switch (port_obj_info->obj->id) { obj 1461 drivers/net/ethernet/mscc/ocelot.c const struct switchdev_obj *obj, obj 1467 drivers/net/ethernet/mscc/ocelot.c switch (obj->id) { obj 1470 drivers/net/ethernet/mscc/ocelot.c SWITCHDEV_OBJ_PORT_VLAN(obj), obj 1474 drivers/net/ethernet/mscc/ocelot.c ret = ocelot_port_obj_add_mdb(dev, SWITCHDEV_OBJ_PORT_MDB(obj), obj 1485 drivers/net/ethernet/mscc/ocelot.c const struct switchdev_obj *obj) obj 1489 drivers/net/ethernet/mscc/ocelot.c switch (obj->id) { obj 1492 drivers/net/ethernet/mscc/ocelot.c SWITCHDEV_OBJ_PORT_VLAN(obj)); obj 1495 drivers/net/ethernet/mscc/ocelot.c ret = ocelot_port_obj_del_mdb(dev, SWITCHDEV_OBJ_PORT_MDB(obj)); obj 447 drivers/net/ethernet/netronome/nfp/flower/metadata.c const void *obj) obj 450 drivers/net/ethernet/netronome/nfp/flower/metadata.c const struct nfp_fl_payload *flow_entry = obj; obj 1658 drivers/net/ethernet/rocker/rocker_main.c if (netif_is_bridge_master(vlan->obj.orig_dev)) obj 2102 drivers/net/ethernet/rocker/rocker_main.c const struct switchdev_obj *obj, obj 2108 drivers/net/ethernet/rocker/rocker_main.c switch (obj->id) { obj 2111 drivers/net/ethernet/rocker/rocker_main.c SWITCHDEV_OBJ_PORT_VLAN(obj), obj 2123 drivers/net/ethernet/rocker/rocker_main.c const struct switchdev_obj *obj) obj 2128 drivers/net/ethernet/rocker/rocker_main.c switch (obj->id) { obj 2131 drivers/net/ethernet/rocker/rocker_main.c SWITCHDEV_OBJ_PORT_VLAN(obj)); obj 2850 drivers/net/ethernet/rocker/rocker_main.c err = rocker_port_obj_add(netdev, port_obj_info->obj, obj 2854 drivers/net/ethernet/rocker/rocker_main.c err = rocker_port_obj_del(netdev, port_obj_info->obj); obj 285 drivers/net/phy/mdio-xgene.c const union acpi_object *obj; obj 291 drivers/net/phy/mdio-xgene.c if (acpi_dev_get_property(adev, "phy-channel", ACPI_TYPE_INTEGER, &obj)) obj 293 drivers/net/phy/mdio-xgene.c phy_addr = obj->integer.value; obj 1235 drivers/net/usb/r8152.c union acpi_object *obj; obj 1262 drivers/net/usb/r8152.c obj = (union acpi_object *)buffer.pointer; obj 1265 drivers/net/usb/r8152.c if (obj->type != ACPI_TYPE_BUFFER || obj->string.length != 0x17) { obj 1268 drivers/net/usb/r8152.c obj->type, obj->string.length); obj 1271 drivers/net/usb/r8152.c if (strncmp(obj->string.pointer, "_AUXMAC_#", 9) != 0 || obj 1272 drivers/net/usb/r8152.c strncmp(obj->string.pointer + 0x15, "#", 1) != 0) { obj 1277 drivers/net/usb/r8152.c ret = hex2bin(buf, obj->string.pointer + 9, 6); obj 1290 drivers/net/usb/r8152.c kfree(obj); obj 124 drivers/parisc/pdc_stable.c #define to_pdcspath_entry(obj) container_of(obj, struct pdcspath_entry, kobj) obj 1114 drivers/pci/pci-acpi.c union acpi_object *obj; obj 1130 drivers/pci/pci-acpi.c obj = acpi_evaluate_dsm(ACPI_HANDLE(bus->bridge), &pci_acpi_dsm_guid, 3, obj 1132 drivers/pci/pci-acpi.c if (!obj) obj 1135 drivers/pci/pci-acpi.c if (obj->type == ACPI_TYPE_INTEGER && obj->integer.value == 1) { obj 1139 drivers/pci/pci-acpi.c ACPI_FREE(obj); obj 1190 drivers/pci/pci-acpi.c union acpi_object *obj, *elements; obj 1195 drivers/pci/pci-acpi.c obj = acpi_evaluate_dsm(handle, &pci_acpi_dsm_guid, 3, obj 1197 drivers/pci/pci-acpi.c if (!obj) obj 1200 drivers/pci/pci-acpi.c if (obj->type == ACPI_TYPE_PACKAGE && obj->package.count == 5) { obj 1201 drivers/pci/pci-acpi.c elements = obj->package.elements; obj 1213 drivers/pci/pci-acpi.c ACPI_FREE(obj); obj 159 drivers/pci/pci-label.c static void dsm_label_utf16s_to_utf8s(union acpi_object *obj, char *buf) obj 162 drivers/pci/pci-label.c len = utf16s_to_utf8s((const wchar_t *)obj->buffer.pointer, obj 163 drivers/pci/pci-label.c obj->buffer.length, obj 173 drivers/pci/pci-label.c union acpi_object *obj, *tmp; obj 180 drivers/pci/pci-label.c obj = acpi_evaluate_dsm(handle, &pci_acpi_dsm_guid, 0x2, obj 182 drivers/pci/pci-label.c if (!obj) obj 185 drivers/pci/pci-label.c tmp = obj->package.elements; obj 186 drivers/pci/pci-label.c if (obj->type == ACPI_TYPE_PACKAGE && obj->package.count == 2 && obj 207 drivers/pci/pci-label.c ACPI_FREE(obj); obj 1480 drivers/perf/xgene_pmu.c const union acpi_object *obj; obj 1508 drivers/perf/xgene_pmu.c ACPI_TYPE_INTEGER, &obj); obj 1512 drivers/perf/xgene_pmu.c enable_bit = (int) obj->integer.value; obj 265 drivers/platform/chrome/wilco_ec/event.c union acpi_object *obj; obj 282 drivers/platform/chrome/wilco_ec/event.c obj = (union acpi_object *)event_buffer.pointer; obj 283 drivers/platform/chrome/wilco_ec/event.c if (!obj) { obj 288 drivers/platform/chrome/wilco_ec/event.c if (obj->type != ACPI_TYPE_BUFFER) { obj 291 drivers/platform/chrome/wilco_ec/event.c kfree(obj); obj 294 drivers/platform/chrome/wilco_ec/event.c if (obj->buffer.length < sizeof(struct ec_event)) { obj 296 drivers/platform/chrome/wilco_ec/event.c obj->buffer.length, EC_ACPI_GET_EVENT); obj 297 drivers/platform/chrome/wilco_ec/event.c kfree(obj); obj 301 drivers/platform/chrome/wilco_ec/event.c enqueue_events(adev, obj->buffer.pointer, obj->buffer.length); obj 302 drivers/platform/chrome/wilco_ec/event.c kfree(obj); obj 811 drivers/platform/x86/acer-wmi.c union acpi_object *obj; obj 820 drivers/platform/x86/acer-wmi.c obj = (union acpi_object *) out.pointer; obj 821 drivers/platform/x86/acer-wmi.c if (obj && obj->type == ACPI_TYPE_BUFFER && obj 822 drivers/platform/x86/acer-wmi.c obj->buffer.length == sizeof(struct wmab_ret)) { obj 823 drivers/platform/x86/acer-wmi.c ret = *((struct wmab_ret *) obj->buffer.pointer); obj 863 drivers/platform/x86/acer-wmi.c union acpi_object *obj; obj 886 drivers/platform/x86/acer-wmi.c obj = out.pointer; obj 887 drivers/platform/x86/acer-wmi.c if (obj && obj->type == ACPI_TYPE_BUFFER && obj 888 drivers/platform/x86/acer-wmi.c obj->buffer.length == sizeof(struct wmab_ret)) { obj 889 drivers/platform/x86/acer-wmi.c ret = *((struct wmab_ret *) obj->buffer.pointer); obj 909 drivers/platform/x86/acer-wmi.c obj = (union acpi_object *) out.pointer; obj 910 drivers/platform/x86/acer-wmi.c if (obj && obj->type == ACPI_TYPE_BUFFER obj 911 drivers/platform/x86/acer-wmi.c && obj->buffer.length == sizeof(struct wmab_ret)) { obj 912 drivers/platform/x86/acer-wmi.c ret = *((struct wmab_ret *) obj->buffer.pointer); obj 951 drivers/platform/x86/acer-wmi.c union acpi_object *obj; obj 960 drivers/platform/x86/acer-wmi.c obj = (union acpi_object *) result.pointer; obj 961 drivers/platform/x86/acer-wmi.c if (obj) { obj 962 drivers/platform/x86/acer-wmi.c if (obj->type == ACPI_TYPE_BUFFER && obj 963 drivers/platform/x86/acer-wmi.c (obj->buffer.length == sizeof(u32) || obj 964 drivers/platform/x86/acer-wmi.c obj->buffer.length == sizeof(u64))) { obj 965 drivers/platform/x86/acer-wmi.c tmp = *((u32 *) obj->buffer.pointer); obj 966 drivers/platform/x86/acer-wmi.c } else if (obj->type == ACPI_TYPE_INTEGER) { obj 967 drivers/platform/x86/acer-wmi.c tmp = (u32) obj->integer.value; obj 1063 drivers/platform/x86/acer-wmi.c union acpi_object *obj; obj 1079 drivers/platform/x86/acer-wmi.c obj = output.pointer; obj 1081 drivers/platform/x86/acer-wmi.c if (!obj) obj 1083 drivers/platform/x86/acer-wmi.c else if (obj->type != ACPI_TYPE_BUFFER) { obj 1084 drivers/platform/x86/acer-wmi.c kfree(obj); obj 1087 drivers/platform/x86/acer-wmi.c if (obj->buffer.length != 8) { obj 1088 drivers/platform/x86/acer-wmi.c pr_warn("Unknown buffer length %d\n", obj->buffer.length); obj 1089 drivers/platform/x86/acer-wmi.c kfree(obj); obj 1093 drivers/platform/x86/acer-wmi.c return_value = *((struct wmid3_gds_return_value *)obj->buffer.pointer); obj 1094 drivers/platform/x86/acer-wmi.c kfree(obj); obj 1131 drivers/platform/x86/acer-wmi.c union acpi_object *obj; obj 1158 drivers/platform/x86/acer-wmi.c obj = output.pointer; obj 1160 drivers/platform/x86/acer-wmi.c if (!obj) obj 1162 drivers/platform/x86/acer-wmi.c else if (obj->type != ACPI_TYPE_BUFFER) { obj 1163 drivers/platform/x86/acer-wmi.c kfree(obj); obj 1166 drivers/platform/x86/acer-wmi.c if (obj->buffer.length != 8) { obj 1167 drivers/platform/x86/acer-wmi.c pr_warn("Unknown buffer length %d\n", obj->buffer.length); obj 1168 drivers/platform/x86/acer-wmi.c kfree(obj); obj 1172 drivers/platform/x86/acer-wmi.c return_value = *((struct wmid3_gds_return_value *)obj->buffer.pointer); obj 1173 drivers/platform/x86/acer-wmi.c kfree(obj); obj 1189 drivers/platform/x86/acer-wmi.c obj = output2.pointer; obj 1191 drivers/platform/x86/acer-wmi.c if (!obj) obj 1193 drivers/platform/x86/acer-wmi.c else if (obj->type != ACPI_TYPE_BUFFER) { obj 1194 drivers/platform/x86/acer-wmi.c kfree(obj); obj 1197 drivers/platform/x86/acer-wmi.c if (obj->buffer.length != 4) { obj 1198 drivers/platform/x86/acer-wmi.c pr_warn("Unknown buffer length %d\n", obj->buffer.length); obj 1199 drivers/platform/x86/acer-wmi.c kfree(obj); obj 1203 drivers/platform/x86/acer-wmi.c return_value = *((struct wmid3_gds_return_value *)obj->buffer.pointer); obj 1204 drivers/platform/x86/acer-wmi.c kfree(obj); obj 1266 drivers/platform/x86/acer-wmi.c union acpi_object *obj; obj 1274 drivers/platform/x86/acer-wmi.c obj = (union acpi_object *) out.pointer; obj 1275 drivers/platform/x86/acer-wmi.c if (obj) { obj 1276 drivers/platform/x86/acer-wmi.c if (obj->type == ACPI_TYPE_BUFFER && obj 1277 drivers/platform/x86/acer-wmi.c (obj->buffer.length == sizeof(u32) || obj 1278 drivers/platform/x86/acer-wmi.c obj->buffer.length == sizeof(u64))) { obj 1279 drivers/platform/x86/acer-wmi.c devices = *((u32 *) obj->buffer.pointer); obj 1280 drivers/platform/x86/acer-wmi.c } else if (obj->type == ACPI_TYPE_INTEGER) { obj 1281 drivers/platform/x86/acer-wmi.c devices = (u32) obj->integer.value; obj 1709 drivers/platform/x86/acer-wmi.c union acpi_object *obj; obj 1722 drivers/platform/x86/acer-wmi.c obj = (union acpi_object *)response.pointer; obj 1724 drivers/platform/x86/acer-wmi.c if (!obj) obj 1726 drivers/platform/x86/acer-wmi.c if (obj->type != ACPI_TYPE_BUFFER) { obj 1727 drivers/platform/x86/acer-wmi.c pr_warn("Unknown response received %d\n", obj->type); obj 1728 drivers/platform/x86/acer-wmi.c kfree(obj); obj 1731 drivers/platform/x86/acer-wmi.c if (obj->buffer.length != 8) { obj 1732 drivers/platform/x86/acer-wmi.c pr_warn("Unknown buffer length %d\n", obj->buffer.length); obj 1733 drivers/platform/x86/acer-wmi.c kfree(obj); obj 1737 drivers/platform/x86/acer-wmi.c return_value = *((struct event_return_value *)obj->buffer.pointer); obj 1738 drivers/platform/x86/acer-wmi.c kfree(obj); obj 1787 drivers/platform/x86/acer-wmi.c union acpi_object *obj; obj 1796 drivers/platform/x86/acer-wmi.c obj = output.pointer; obj 1798 drivers/platform/x86/acer-wmi.c if (!obj) obj 1800 drivers/platform/x86/acer-wmi.c else if (obj->type != ACPI_TYPE_BUFFER) { obj 1801 drivers/platform/x86/acer-wmi.c kfree(obj); obj 1804 drivers/platform/x86/acer-wmi.c if (obj->buffer.length != 4) { obj 1805 drivers/platform/x86/acer-wmi.c pr_warn("Unknown buffer length %d\n", obj->buffer.length); obj 1806 drivers/platform/x86/acer-wmi.c kfree(obj); obj 1810 drivers/platform/x86/acer-wmi.c *return_value = *((struct func_return_value *)obj->buffer.pointer); obj 1811 drivers/platform/x86/acer-wmi.c kfree(obj); obj 1977 drivers/platform/x86/acer-wmi.c union acpi_object *obj; obj 1985 drivers/platform/x86/acer-wmi.c obj = (union acpi_object *) out.pointer; obj 1986 drivers/platform/x86/acer-wmi.c if (obj) { obj 1987 drivers/platform/x86/acer-wmi.c if (obj->type == ACPI_TYPE_BUFFER && obj 1988 drivers/platform/x86/acer-wmi.c (obj->buffer.length == sizeof(u32) || obj 1989 drivers/platform/x86/acer-wmi.c obj->buffer.length == sizeof(u64))) { obj 1990 drivers/platform/x86/acer-wmi.c devices = *((u32 *) obj->buffer.pointer); obj 1991 drivers/platform/x86/acer-wmi.c } else if (obj->type == ACPI_TYPE_INTEGER) { obj 1992 drivers/platform/x86/acer-wmi.c devices = (u32) obj->integer.value; obj 509 drivers/platform/x86/alienware-wmi.c union acpi_object *obj; obj 521 drivers/platform/x86/alienware-wmi.c obj = (union acpi_object *)output.pointer; obj 522 drivers/platform/x86/alienware-wmi.c if (obj && obj->type == ACPI_TYPE_INTEGER) obj 523 drivers/platform/x86/alienware-wmi.c *out_data = (u32)obj->integer.value; obj 1219 drivers/platform/x86/asus-laptop.c union acpi_object *obj = buffer.pointer; obj 1220 drivers/platform/x86/asus-laptop.c if (obj && obj->type == ACPI_TYPE_INTEGER) obj 1221 drivers/platform/x86/asus-laptop.c *result = obj->integer.value; obj 56 drivers/platform/x86/asus-wireless.c union acpi_object obj; obj 61 drivers/platform/x86/asus-wireless.c obj.type = ACPI_TYPE_INTEGER; obj 62 drivers/platform/x86/asus-wireless.c obj.integer.value = param; obj 64 drivers/platform/x86/asus-wireless.c p.pointer = &obj; obj 269 drivers/platform/x86/asus-wmi.c union acpi_object *obj; obj 278 drivers/platform/x86/asus-wmi.c obj = (union acpi_object *)output.pointer; obj 279 drivers/platform/x86/asus-wmi.c if (obj && obj->type == ACPI_TYPE_INTEGER) obj 280 drivers/platform/x86/asus-wmi.c tmp = (u32) obj->integer.value; obj 285 drivers/platform/x86/asus-wmi.c kfree(obj); obj 1929 drivers/platform/x86/asus-wmi.c union acpi_object *obj; obj 1940 drivers/platform/x86/asus-wmi.c obj = (union acpi_object *)response.pointer; obj 1942 drivers/platform/x86/asus-wmi.c if (obj && obj->type == ACPI_TYPE_INTEGER) obj 1943 drivers/platform/x86/asus-wmi.c code = (int)(obj->integer.value & WMI_EVENT_MASK); obj 1947 drivers/platform/x86/asus-wmi.c kfree(obj); obj 2329 drivers/platform/x86/asus-wmi.c union acpi_object *obj; obj 2339 drivers/platform/x86/asus-wmi.c obj = (union acpi_object *)output.pointer; obj 2340 drivers/platform/x86/asus-wmi.c if (obj && obj->type == ACPI_TYPE_INTEGER) obj 2343 drivers/platform/x86/asus-wmi.c (u32) obj->integer.value); obj 2347 drivers/platform/x86/asus-wmi.c obj ? obj->type : -1); obj 2349 drivers/platform/x86/asus-wmi.c kfree(obj); obj 172 drivers/platform/x86/classmate-laptop.c union acpi_object *obj; obj 173 drivers/platform/x86/classmate-laptop.c obj = output.pointer; obj 174 drivers/platform/x86/classmate-laptop.c locs = (int16_t *) obj->buffer.pointer; obj 515 drivers/platform/x86/classmate-laptop.c union acpi_object *obj; obj 516 drivers/platform/x86/classmate-laptop.c obj = output.pointer; obj 517 drivers/platform/x86/classmate-laptop.c locs = obj->buffer.pointer; obj 51 drivers/platform/x86/dell-smbios-wmi.c union acpi_object *obj; obj 66 drivers/platform/x86/dell-smbios-wmi.c obj = (union acpi_object *)output.pointer; obj 67 drivers/platform/x86/dell-smbios-wmi.c if (obj->type != ACPI_TYPE_BUFFER) { obj 68 drivers/platform/x86/dell-smbios-wmi.c dev_dbg(&wdev->dev, "received type: %d\n", obj->type); obj 69 drivers/platform/x86/dell-smbios-wmi.c if (obj->type == ACPI_TYPE_INTEGER) obj 71 drivers/platform/x86/dell-smbios-wmi.c obj->integer.value); obj 74 drivers/platform/x86/dell-smbios-wmi.c memcpy(&priv->buf->std, obj->buffer.pointer, obj->buffer.length); obj 76 drivers/platform/x86/dell-wmi-aio.c union acpi_object *obj; obj 86 drivers/platform/x86/dell-wmi-aio.c obj = (union acpi_object *)response.pointer; obj 87 drivers/platform/x86/dell-wmi-aio.c if (obj) { obj 90 drivers/platform/x86/dell-wmi-aio.c switch (obj->type) { obj 93 drivers/platform/x86/dell-wmi-aio.c scancode = obj->integer.value; obj 98 drivers/platform/x86/dell-wmi-aio.c if (dell_wmi_aio_event_check(obj->buffer.pointer, obj 99 drivers/platform/x86/dell-wmi-aio.c obj->buffer.length)) { obj 101 drivers/platform/x86/dell-wmi-aio.c obj->buffer.pointer; obj 106 drivers/platform/x86/dell-wmi-aio.c if (obj->buffer.pointer && obj 107 drivers/platform/x86/dell-wmi-aio.c obj->buffer.length > 0) obj 108 drivers/platform/x86/dell-wmi-aio.c scancode = obj->buffer.pointer[0]; obj 117 drivers/platform/x86/dell-wmi-aio.c kfree(obj); obj 104 drivers/platform/x86/dell-wmi-descriptor.c union acpi_object *obj = NULL; obj 109 drivers/platform/x86/dell-wmi-descriptor.c obj = wmidev_block_query(wdev, 0); obj 110 drivers/platform/x86/dell-wmi-descriptor.c if (!obj) { obj 116 drivers/platform/x86/dell-wmi-descriptor.c if (obj->type != ACPI_TYPE_BUFFER) { obj 126 drivers/platform/x86/dell-wmi-descriptor.c if (obj->buffer.length != 128) { obj 129 drivers/platform/x86/dell-wmi-descriptor.c obj->buffer.length); obj 135 drivers/platform/x86/dell-wmi-descriptor.c buffer = (u32 *)obj->buffer.pointer; obj 137 drivers/platform/x86/dell-wmi-descriptor.c if (strncmp(obj->string.pointer, "DELL WMI", 8) != 0) { obj 173 drivers/platform/x86/dell-wmi-descriptor.c kfree(obj); obj 55 drivers/platform/x86/dell-wmi-led.c union acpi_object *obj; obj 75 drivers/platform/x86/dell-wmi-led.c obj = output.pointer; obj 77 drivers/platform/x86/dell-wmi-led.c if (!obj) obj 79 drivers/platform/x86/dell-wmi-led.c if (obj->type != ACPI_TYPE_BUFFER) { obj 80 drivers/platform/x86/dell-wmi-led.c kfree(obj); obj 84 drivers/platform/x86/dell-wmi-led.c bios_return = ((struct bios_args *)obj->buffer.pointer); obj 87 drivers/platform/x86/dell-wmi-led.c kfree(obj); obj 355 drivers/platform/x86/dell-wmi.c union acpi_object *obj) obj 362 drivers/platform/x86/dell-wmi.c if (obj->type != ACPI_TYPE_BUFFER) { obj 363 drivers/platform/x86/dell-wmi.c pr_warn("bad response type %x\n", obj->type); obj 368 drivers/platform/x86/dell-wmi.c obj->buffer.length, obj->buffer.pointer); obj 370 drivers/platform/x86/dell-wmi.c buffer_entry = (u16 *)obj->buffer.pointer; obj 371 drivers/platform/x86/dell-wmi.c buffer_size = obj->buffer.length/2; obj 213 drivers/platform/x86/hp-wmi.c union acpi_object *obj; obj 235 drivers/platform/x86/hp-wmi.c obj = output.pointer; obj 237 drivers/platform/x86/hp-wmi.c if (!obj) obj 240 drivers/platform/x86/hp-wmi.c if (obj->type != ACPI_TYPE_BUFFER) { obj 245 drivers/platform/x86/hp-wmi.c bios_return = (struct bios_return *)obj->buffer.pointer; obj 258 drivers/platform/x86/hp-wmi.c actual_outsize = min(outsize, (int)(obj->buffer.length - sizeof(*bios_return))); obj 259 drivers/platform/x86/hp-wmi.c memcpy(buffer, obj->buffer.pointer + sizeof(*bios_return), actual_outsize); obj 263 drivers/platform/x86/hp-wmi.c kfree(obj); obj 520 drivers/platform/x86/hp-wmi.c union acpi_object *obj; obj 531 drivers/platform/x86/hp-wmi.c obj = (union acpi_object *)response.pointer; obj 533 drivers/platform/x86/hp-wmi.c if (!obj) obj 535 drivers/platform/x86/hp-wmi.c if (obj->type != ACPI_TYPE_BUFFER) { obj 536 drivers/platform/x86/hp-wmi.c pr_info("Unknown response received %d\n", obj->type); obj 537 drivers/platform/x86/hp-wmi.c kfree(obj); obj 545 drivers/platform/x86/hp-wmi.c location = (u32 *)obj->buffer.pointer; obj 546 drivers/platform/x86/hp-wmi.c if (obj->buffer.length == 8) { obj 549 drivers/platform/x86/hp-wmi.c } else if (obj->buffer.length == 16) { obj 553 drivers/platform/x86/hp-wmi.c pr_info("Unknown buffer length %d\n", obj->buffer.length); obj 554 drivers/platform/x86/hp-wmi.c kfree(obj); obj 557 drivers/platform/x86/hp-wmi.c kfree(obj); obj 116 drivers/platform/x86/huawei-wmi.c union acpi_object *obj; obj 123 drivers/platform/x86/huawei-wmi.c obj = (union acpi_object *)response.pointer; obj 124 drivers/platform/x86/huawei-wmi.c if (obj && obj->type == ACPI_TYPE_INTEGER) obj 125 drivers/platform/x86/huawei-wmi.c code = obj->integer.value; obj 140 drivers/platform/x86/huawei-wmi.c union acpi_object *obj) obj 142 drivers/platform/x86/huawei-wmi.c if (obj->type == ACPI_TYPE_INTEGER) obj 143 drivers/platform/x86/huawei-wmi.c huawei_wmi_process_key(wdev, obj->integer.value); obj 145 drivers/platform/x86/huawei-wmi.c dev_info(&wdev->dev, "Bad response type %d\n", obj->type); obj 125 drivers/platform/x86/intel-hid.c union acpi_object *obj, argv4, req; obj 146 drivers/platform/x86/intel-hid.c obj = acpi_evaluate_dsm(handle, &intel_dsm_guid, 1, fn_index, &argv4); obj 147 drivers/platform/x86/intel-hid.c if (obj) { obj 150 drivers/platform/x86/intel-hid.c ACPI_FREE(obj); obj 166 drivers/platform/x86/intel-hid.c union acpi_object *obj; obj 179 drivers/platform/x86/intel-hid.c obj = acpi_evaluate_dsm_typed(handle, &intel_dsm_guid, obj 182 drivers/platform/x86/intel-hid.c if (obj) { obj 183 drivers/platform/x86/intel-hid.c *result = obj->integer.value; obj 187 drivers/platform/x86/intel-hid.c ACPI_FREE(obj); obj 201 drivers/platform/x86/intel-hid.c union acpi_object *obj; obj 205 drivers/platform/x86/intel-hid.c obj = acpi_evaluate_dsm_typed(handle, &intel_dsm_guid, 1, 0, NULL, obj 207 drivers/platform/x86/intel-hid.c if (obj) { obj 208 drivers/platform/x86/intel-hid.c intel_hid_dsm_fn_mask = *obj->buffer.pointer; obj 209 drivers/platform/x86/intel-hid.c ACPI_FREE(obj); obj 122 drivers/platform/x86/intel-vbtn.c union acpi_object *obj; obj 133 drivers/platform/x86/intel-vbtn.c obj = vgbs_output.pointer; obj 134 drivers/platform/x86/intel-vbtn.c if (!(obj && obj->type == ACPI_TYPE_INTEGER)) obj 137 drivers/platform/x86/intel-vbtn.c m = !(obj->integer.value & TABLET_MODE_FLAG); obj 139 drivers/platform/x86/intel-vbtn.c m = (obj->integer.value & DOCK_MODE_FLAG) ? 1 : 0; obj 200 drivers/platform/x86/lg-laptop.c union acpi_object *obj; obj 211 drivers/platform/x86/lg-laptop.c obj = (union acpi_object *)response.pointer; obj 212 drivers/platform/x86/lg-laptop.c if (!obj) obj 215 drivers/platform/x86/lg-laptop.c if (obj->type == ACPI_TYPE_INTEGER) { obj 216 drivers/platform/x86/lg-laptop.c int eventcode = obj->integer.value; obj 225 drivers/platform/x86/lg-laptop.c pr_debug("Type: %i Eventcode: 0x%llx\n", obj->type, obj 226 drivers/platform/x86/lg-laptop.c obj->integer.value); obj 94 drivers/platform/x86/msi-wmi.c union acpi_object *obj; obj 100 drivers/platform/x86/msi-wmi.c obj = output.pointer; obj 102 drivers/platform/x86/msi-wmi.c if (!obj || obj->type != ACPI_TYPE_INTEGER) { obj 103 drivers/platform/x86/msi-wmi.c if (obj) { obj 105 drivers/platform/x86/msi-wmi.c "type: %d - buffer length:%d\n", obj->type, obj 106 drivers/platform/x86/msi-wmi.c obj->type == ACPI_TYPE_BUFFER ? obj 107 drivers/platform/x86/msi-wmi.c obj->buffer.length : 0); obj 109 drivers/platform/x86/msi-wmi.c kfree(obj); obj 112 drivers/platform/x86/msi-wmi.c *ret = obj->integer.value; obj 113 drivers/platform/x86/msi-wmi.c kfree(obj); obj 175 drivers/platform/x86/msi-wmi.c union acpi_object *obj; obj 184 drivers/platform/x86/msi-wmi.c obj = (union acpi_object *)response.pointer; obj 186 drivers/platform/x86/msi-wmi.c if (obj && obj->type == ACPI_TYPE_INTEGER) { obj 187 drivers/platform/x86/msi-wmi.c int eventcode = obj->integer.value; obj 34 drivers/platform/x86/peaq-wmi.c union acpi_object obj; obj 39 drivers/platform/x86/peaq-wmi.c struct acpi_buffer output = { sizeof(obj), &obj }; obj 47 drivers/platform/x86/peaq-wmi.c if (obj.type != ACPI_TYPE_INTEGER) { obj 53 drivers/platform/x86/peaq-wmi.c if (!obj.integer.value) obj 61 drivers/platform/x86/surface3-wmi.c union acpi_object *obj; obj 67 drivers/platform/x86/surface3-wmi.c obj = output.pointer; obj 69 drivers/platform/x86/surface3-wmi.c if (!obj || obj->type != ACPI_TYPE_INTEGER) { obj 70 drivers/platform/x86/surface3-wmi.c if (obj) { obj 72 drivers/platform/x86/surface3-wmi.c obj->type, obj 73 drivers/platform/x86/surface3-wmi.c obj->type == ACPI_TYPE_BUFFER ? obj 74 drivers/platform/x86/surface3-wmi.c obj->buffer.length : 0); obj 79 drivers/platform/x86/surface3-wmi.c *ret = obj->integer.value; obj 81 drivers/platform/x86/surface3-wmi.c kfree(obj); obj 51 drivers/platform/x86/tc1100-wmi.c union acpi_object *obj; obj 63 drivers/platform/x86/tc1100-wmi.c obj = (union acpi_object *) result.pointer; obj 64 drivers/platform/x86/tc1100-wmi.c if (obj && obj->type == ACPI_TYPE_INTEGER) { obj 65 drivers/platform/x86/tc1100-wmi.c tmp = obj->integer.value; obj 6855 drivers/platform/x86/thinkpad_acpi.c union acpi_object *obj; obj 6869 drivers/platform/x86/thinkpad_acpi.c obj = (union acpi_object *)buffer.pointer; obj 6870 drivers/platform/x86/thinkpad_acpi.c if (!obj || (obj->type != ACPI_TYPE_PACKAGE)) { obj 6875 drivers/platform/x86/thinkpad_acpi.c rc = obj->package.count; obj 38 drivers/platform/x86/toshiba-wmi.c union acpi_object *obj; obj 47 drivers/platform/x86/toshiba-wmi.c obj = (union acpi_object *)response.pointer; obj 48 drivers/platform/x86/toshiba-wmi.c if (!obj) obj 52 drivers/platform/x86/toshiba-wmi.c pr_debug("Unknown event received, obj type %x\n", obj->type); obj 476 drivers/platform/x86/wmi.c union acpi_object *obj; obj 485 drivers/platform/x86/wmi.c obj = (union acpi_object *)response.pointer; obj 487 drivers/platform/x86/wmi.c if (!obj) obj 491 drivers/platform/x86/wmi.c switch(obj->type) { obj 493 drivers/platform/x86/wmi.c pr_cont("BUFFER_TYPE - length %d\n", obj->buffer.length); obj 496 drivers/platform/x86/wmi.c pr_cont("STRING_TYPE - %s\n", obj->string.pointer); obj 499 drivers/platform/x86/wmi.c pr_cont("INTEGER_TYPE - %llu\n", obj->integer.value); obj 502 drivers/platform/x86/wmi.c pr_cont("PACKAGE_TYPE - %d elements\n", obj->package.count); obj 505 drivers/platform/x86/wmi.c pr_cont("object type 0x%X\n", obj->type); obj 507 drivers/platform/x86/wmi.c kfree(obj); obj 1148 drivers/platform/x86/wmi.c union acpi_object *obj; obj 1157 drivers/platform/x86/wmi.c obj = (union acpi_object *) out.pointer; obj 1158 drivers/platform/x86/wmi.c if (!obj) obj 1161 drivers/platform/x86/wmi.c if (obj->type != ACPI_TYPE_BUFFER) { obj 1166 drivers/platform/x86/wmi.c gblock = (const struct guid_block *)obj->buffer.pointer; obj 1167 drivers/platform/x86/wmi.c total = obj->buffer.length / sizeof(struct guid_block); obj 2236 drivers/scsi/aacraid/aacraid.h __le32 obj; /* FT_FILESYS, etc. */ obj 883 drivers/scsi/hisi_sas/hisi_sas_v3_hw.c union acpi_object *obj; obj 902 drivers/scsi/hisi_sas/hisi_sas_v3_hw.c obj = acpi_evaluate_dsm(ACPI_HANDLE(dev), &guid, 0, obj 904 drivers/scsi/hisi_sas/hisi_sas_v3_hw.c if (!obj) obj 907 drivers/scsi/hisi_sas/hisi_sas_v3_hw.c ACPI_FREE(obj); obj 120 drivers/scsi/sd.h #define to_scsi_disk(obj) container_of(obj,struct scsi_disk,dev) obj 222 drivers/scsi/st.c #define to_scsi_tape(obj) container_of(obj, struct scsi_tape, kref) obj 112 drivers/soc/fsl/dpio/dpio-service.c struct dpaa2_io *obj = kmalloc(sizeof(*obj), GFP_KERNEL); obj 114 drivers/soc/fsl/dpio/dpio-service.c if (!obj) obj 119 drivers/soc/fsl/dpio/dpio-service.c kfree(obj); obj 123 drivers/soc/fsl/dpio/dpio-service.c obj->dpio_desc = *desc; obj 124 drivers/soc/fsl/dpio/dpio-service.c obj->swp_desc.cena_bar = obj->dpio_desc.regs_cena; obj 125 drivers/soc/fsl/dpio/dpio-service.c obj->swp_desc.cinh_bar = obj->dpio_desc.regs_cinh; obj 126 drivers/soc/fsl/dpio/dpio-service.c obj->swp_desc.qman_version = obj->dpio_desc.qman_version; obj 127 drivers/soc/fsl/dpio/dpio-service.c obj->swp = qbman_swp_init(&obj->swp_desc); obj 129 drivers/soc/fsl/dpio/dpio-service.c if (!obj->swp) { obj 130 drivers/soc/fsl/dpio/dpio-service.c kfree(obj); obj 134 drivers/soc/fsl/dpio/dpio-service.c INIT_LIST_HEAD(&obj->node); obj 135 drivers/soc/fsl/dpio/dpio-service.c spin_lock_init(&obj->lock_mgmt_cmd); obj 136 drivers/soc/fsl/dpio/dpio-service.c spin_lock_init(&obj->lock_notifications); obj 137 drivers/soc/fsl/dpio/dpio-service.c INIT_LIST_HEAD(&obj->notifications); obj 140 drivers/soc/fsl/dpio/dpio-service.c qbman_swp_interrupt_set_trigger(obj->swp, obj 142 drivers/soc/fsl/dpio/dpio-service.c qbman_swp_interrupt_clear_status(obj->swp, 0xffffffff); obj 143 drivers/soc/fsl/dpio/dpio-service.c if (obj->dpio_desc.receives_notifications) obj 144 drivers/soc/fsl/dpio/dpio-service.c qbman_swp_push_set(obj->swp, 0, 1); obj 147 drivers/soc/fsl/dpio/dpio-service.c list_add_tail(&obj->node, &dpio_list); obj 149 drivers/soc/fsl/dpio/dpio-service.c dpio_by_cpu[desc->cpu] = obj; obj 152 drivers/soc/fsl/dpio/dpio-service.c obj->dev = dev; obj 154 drivers/soc/fsl/dpio/dpio-service.c return obj; obj 186 drivers/soc/fsl/dpio/dpio-service.c irqreturn_t dpaa2_io_irq(struct dpaa2_io *obj) obj 193 drivers/soc/fsl/dpio/dpio-service.c swp = obj->swp; obj 1888 drivers/spi/spi.c const union acpi_object *obj; obj 1893 drivers/spi/spi.c if (!acpi_dev_get_property(dev, "spiSclkPeriod", ACPI_TYPE_BUFFER, &obj) obj 1894 drivers/spi/spi.c && obj->buffer.length >= 4) obj 1895 drivers/spi/spi.c lookup->max_speed_hz = NSEC_PER_SEC / *(u32 *)obj->buffer.pointer; obj 1897 drivers/spi/spi.c if (!acpi_dev_get_property(dev, "spiWordSize", ACPI_TYPE_BUFFER, &obj) obj 1898 drivers/spi/spi.c && obj->buffer.length == 8) obj 1899 drivers/spi/spi.c lookup->bits_per_word = *(u64 *)obj->buffer.pointer; obj 1901 drivers/spi/spi.c if (!acpi_dev_get_property(dev, "spiBitOrder", ACPI_TYPE_BUFFER, &obj) obj 1902 drivers/spi/spi.c && obj->buffer.length == 8 && !*(u64 *)obj->buffer.pointer) obj 1905 drivers/spi/spi.c if (!acpi_dev_get_property(dev, "spiSPO", ACPI_TYPE_BUFFER, &obj) obj 1906 drivers/spi/spi.c && obj->buffer.length == 8 && *(u64 *)obj->buffer.pointer) obj 1909 drivers/spi/spi.c if (!acpi_dev_get_property(dev, "spiSPH", ACPI_TYPE_BUFFER, &obj) obj 1910 drivers/spi/spi.c && obj->buffer.length == 8 && *(u64 *)obj->buffer.pointer) obj 957 drivers/staging/fsl-dpaa2/ethsw/ethsw.c const struct switchdev_obj *obj, obj 962 drivers/staging/fsl-dpaa2/ethsw/ethsw.c switch (obj->id) { obj 965 drivers/staging/fsl-dpaa2/ethsw/ethsw.c SWITCHDEV_OBJ_PORT_VLAN(obj), obj 970 drivers/staging/fsl-dpaa2/ethsw/ethsw.c SWITCHDEV_OBJ_PORT_MDB(obj), obj 1044 drivers/staging/fsl-dpaa2/ethsw/ethsw.c if (netif_is_bridge_master(vlan->obj.orig_dev)) obj 1079 drivers/staging/fsl-dpaa2/ethsw/ethsw.c const struct switchdev_obj *obj) obj 1083 drivers/staging/fsl-dpaa2/ethsw/ethsw.c switch (obj->id) { obj 1085 drivers/staging/fsl-dpaa2/ethsw/ethsw.c err = port_vlans_del(netdev, SWITCHDEV_OBJ_PORT_VLAN(obj)); obj 1088 drivers/staging/fsl-dpaa2/ethsw/ethsw.c err = port_mdb_del(netdev, SWITCHDEV_OBJ_PORT_MDB(obj)); obj 1290 drivers/staging/fsl-dpaa2/ethsw/ethsw.c err = swdev_port_obj_add(netdev, port_obj_info->obj, obj 1294 drivers/staging/fsl-dpaa2/ethsw/ethsw.c err = swdev_port_obj_del(netdev, port_obj_info->obj); obj 156 drivers/staging/media/sunxi/cedrus/cedrus.c struct media_request_object *obj; obj 163 drivers/staging/media/sunxi/cedrus/cedrus.c list_for_each_entry(obj, &req->objects, list) { obj 166 drivers/staging/media/sunxi/cedrus/cedrus.c if (vb2_request_object_is_buffer(obj)) { obj 167 drivers/staging/media/sunxi/cedrus/cedrus.c vb = container_of(obj, struct vb2_buffer, req_obj); obj 38 drivers/staging/rtl8188eu/core/rtw_cmd.c static int _rtw_enqueue_cmd(struct __queue *queue, struct cmd_obj *obj) obj 42 drivers/staging/rtl8188eu/core/rtw_cmd.c if (!obj) obj 47 drivers/staging/rtl8188eu/core/rtw_cmd.c list_add_tail(&obj->list, &queue->queue); obj 59 drivers/staging/rtl8188eu/core/rtw_cmd.c struct cmd_obj *obj; obj 62 drivers/staging/rtl8188eu/core/rtw_cmd.c obj = list_first_entry_or_null(&queue->queue, struct cmd_obj, list); obj 63 drivers/staging/rtl8188eu/core/rtw_cmd.c if (obj) obj 64 drivers/staging/rtl8188eu/core/rtw_cmd.c list_del_init(&obj->list); obj 67 drivers/staging/rtl8188eu/core/rtw_cmd.c return obj; obj 51 drivers/staging/rtl8188eu/include/rtw_cmd.h u32 rtw_enqueue_cmd(struct cmd_priv *pcmdpriv, struct cmd_obj *obj); obj 113 drivers/staging/rtl8712/rtl871x_cmd.c void r8712_enqueue_cmd(struct cmd_priv *pcmdpriv, struct cmd_obj *obj) obj 120 drivers/staging/rtl8712/rtl871x_cmd.c if (!obj) obj 124 drivers/staging/rtl8712/rtl871x_cmd.c list_add_tail(&obj->list, &queue->queue); obj 132 drivers/staging/rtl8712/rtl871x_cmd.c struct cmd_obj *obj; obj 135 drivers/staging/rtl8712/rtl871x_cmd.c obj = list_first_entry_or_null(&queue->queue, obj 137 drivers/staging/rtl8712/rtl871x_cmd.c if (obj) obj 138 drivers/staging/rtl8712/rtl871x_cmd.c list_del_init(&obj->list); obj 140 drivers/staging/rtl8712/rtl871x_cmd.c return obj; obj 143 drivers/staging/rtl8712/rtl871x_cmd.c void r8712_enqueue_cmd_ex(struct cmd_priv *pcmdpriv, struct cmd_obj *obj) obj 148 drivers/staging/rtl8712/rtl871x_cmd.c if (!obj) obj 154 drivers/staging/rtl8712/rtl871x_cmd.c list_add_tail(&obj->list, &queue->queue); obj 82 drivers/staging/rtl8712/rtl871x_cmd.h void r8712_enqueue_cmd(struct cmd_priv *pcmdpriv, struct cmd_obj *obj); obj 83 drivers/staging/rtl8712/rtl871x_cmd.h void r8712_enqueue_cmd_ex(struct cmd_priv *pcmdpriv, struct cmd_obj *obj); obj 260 drivers/staging/rtl8723bs/core/rtw_cmd.c int _rtw_enqueue_cmd(struct __queue *queue, struct cmd_obj *obj) obj 264 drivers/staging/rtl8723bs/core/rtw_cmd.c if (obj == NULL) obj 270 drivers/staging/rtl8723bs/core/rtw_cmd.c list_add_tail(&obj->list, &queue->queue); obj 282 drivers/staging/rtl8723bs/core/rtw_cmd.c struct cmd_obj *obj; obj 287 drivers/staging/rtl8723bs/core/rtw_cmd.c obj = NULL; obj 289 drivers/staging/rtl8723bs/core/rtw_cmd.c obj = LIST_CONTAINOR(get_next(&(queue->queue)), struct cmd_obj, list); obj 290 drivers/staging/rtl8723bs/core/rtw_cmd.c list_del_init(&obj->list); obj 296 drivers/staging/rtl8723bs/core/rtw_cmd.c return obj; obj 15 drivers/staging/rtl8723bs/include/cmd_osdep.h int _rtw_enqueue_cmd(struct __queue *queue, struct cmd_obj *obj); obj 125 drivers/staging/rtl8723bs/include/rtw_cmd.h int rtw_enqueue_cmd(struct cmd_priv *pcmdpriv, struct cmd_obj *obj); obj 55 drivers/thermal/intel/int340x_thermal/int3403_thermal.c struct int3403_sensor *obj; obj 60 drivers/thermal/intel/int340x_thermal/int3403_thermal.c obj = priv->priv; obj 61 drivers/thermal/intel/int340x_thermal/int3403_thermal.c if (priv->type != INT3403_TYPE_SENSOR || !obj) obj 68 drivers/thermal/intel/int340x_thermal/int3403_thermal.c int340x_thermal_zone_device_update(obj->int340x_zone, obj 72 drivers/thermal/intel/int340x_thermal/int3403_thermal.c int340x_thermal_read_trips(obj->int340x_zone); obj 73 drivers/thermal/intel/int340x_thermal/int3403_thermal.c int340x_thermal_zone_device_update(obj->int340x_zone, obj 85 drivers/thermal/intel/int340x_thermal/int3403_thermal.c struct int3403_sensor *obj; obj 87 drivers/thermal/intel/int340x_thermal/int3403_thermal.c obj = devm_kzalloc(&priv->pdev->dev, sizeof(*obj), GFP_KERNEL); obj 88 drivers/thermal/intel/int340x_thermal/int3403_thermal.c if (!obj) obj 91 drivers/thermal/intel/int340x_thermal/int3403_thermal.c priv->priv = obj; obj 93 drivers/thermal/intel/int340x_thermal/int3403_thermal.c obj->int340x_zone = int340x_thermal_zone_add(priv->adev, NULL); obj 94 drivers/thermal/intel/int340x_thermal/int3403_thermal.c if (IS_ERR(obj->int340x_zone)) obj 95 drivers/thermal/intel/int340x_thermal/int3403_thermal.c return PTR_ERR(obj->int340x_zone); obj 106 drivers/thermal/intel/int340x_thermal/int3403_thermal.c int340x_thermal_zone_remove(obj->int340x_zone); obj 112 drivers/thermal/intel/int340x_thermal/int3403_thermal.c struct int3403_sensor *obj = priv->priv; obj 116 drivers/thermal/intel/int340x_thermal/int3403_thermal.c int340x_thermal_zone_remove(obj->int340x_zone); obj 126 drivers/thermal/intel/int340x_thermal/int3403_thermal.c struct int3403_cdev *obj = priv->priv; obj 128 drivers/thermal/intel/int340x_thermal/int3403_thermal.c *state = obj->max_state; obj 170 drivers/thermal/intel/int340x_thermal/int3403_thermal.c struct int3403_cdev *obj; obj 174 drivers/thermal/intel/int340x_thermal/int3403_thermal.c obj = devm_kzalloc(&priv->pdev->dev, sizeof(*obj), GFP_KERNEL); obj 175 drivers/thermal/intel/int340x_thermal/int3403_thermal.c if (!obj) obj 189 drivers/thermal/intel/int340x_thermal/int3403_thermal.c priv->priv = obj; obj 190 drivers/thermal/intel/int340x_thermal/int3403_thermal.c obj->max_state = p->package.count - 1; obj 191 drivers/thermal/intel/int340x_thermal/int3403_thermal.c obj->cdev = obj 194 drivers/thermal/intel/int340x_thermal/int3403_thermal.c if (IS_ERR(obj->cdev)) obj 195 drivers/thermal/intel/int340x_thermal/int3403_thermal.c result = PTR_ERR(obj->cdev); obj 205 drivers/thermal/intel/int340x_thermal/int3403_thermal.c struct int3403_cdev *obj = priv->priv; obj 207 drivers/thermal/intel/int340x_thermal/int3403_thermal.c thermal_cooling_device_unregister(obj->cdev); obj 368 drivers/usb/dwc3/dwc3-pci.c union acpi_object *obj; obj 378 drivers/usb/dwc3/dwc3-pci.c obj = acpi_evaluate_dsm(ACPI_HANDLE(&dwc->pci->dev), &dwc->guid, obj 380 drivers/usb/dwc3/dwc3-pci.c if (!obj) { obj 385 drivers/usb/dwc3/dwc3-pci.c ACPI_FREE(obj); obj 277 drivers/usb/host/xhci-pci.c union acpi_object *obj; obj 279 drivers/usb/host/xhci-pci.c obj = acpi_evaluate_dsm(ACPI_HANDLE(&dev->dev), &intel_dsm_guid, 3, 1, obj 281 drivers/usb/host/xhci-pci.c ACPI_FREE(obj); obj 182 drivers/usb/typec/tcpm/wcove.c union acpi_object *obj; obj 189 drivers/usb/typec/tcpm/wcove.c obj = acpi_evaluate_dsm(ACPI_HANDLE(wcove->dev), &wcove->guid, 1, func, obj 191 drivers/usb/typec/tcpm/wcove.c if (!obj) { obj 196 drivers/usb/typec/tcpm/wcove.c ACPI_FREE(obj); obj 28 drivers/usb/typec/ucsi/ucsi_acpi.c union acpi_object *obj; obj 30 drivers/usb/typec/ucsi/ucsi_acpi.c obj = acpi_evaluate_dsm(ACPI_HANDLE(ua->dev), &ua->guid, 1, func, obj 32 drivers/usb/typec/ucsi/ucsi_acpi.c if (!obj) { obj 38 drivers/usb/typec/ucsi/ucsi_acpi.c ACPI_FREE(obj); obj 101 drivers/xen/gntdev-dmabuf.c struct gntdev_dmabuf_wait_obj *obj; obj 103 drivers/xen/gntdev-dmabuf.c obj = kzalloc(sizeof(*obj), GFP_KERNEL); obj 104 drivers/xen/gntdev-dmabuf.c if (!obj) obj 107 drivers/xen/gntdev-dmabuf.c init_completion(&obj->completion); obj 108 drivers/xen/gntdev-dmabuf.c obj->gntdev_dmabuf = gntdev_dmabuf; obj 111 drivers/xen/gntdev-dmabuf.c list_add(&obj->next, &priv->exp_wait_list); obj 115 drivers/xen/gntdev-dmabuf.c return obj; obj 119 drivers/xen/gntdev-dmabuf.c struct gntdev_dmabuf_wait_obj *obj) obj 122 drivers/xen/gntdev-dmabuf.c list_del(&obj->next); obj 124 drivers/xen/gntdev-dmabuf.c kfree(obj); obj 127 drivers/xen/gntdev-dmabuf.c static int dmabuf_exp_wait_obj_wait(struct gntdev_dmabuf_wait_obj *obj, obj 130 drivers/xen/gntdev-dmabuf.c if (wait_for_completion_timeout(&obj->completion, obj 140 drivers/xen/gntdev-dmabuf.c struct gntdev_dmabuf_wait_obj *obj; obj 142 drivers/xen/gntdev-dmabuf.c list_for_each_entry(obj, &priv->exp_wait_list, next) obj 143 drivers/xen/gntdev-dmabuf.c if (obj->gntdev_dmabuf == gntdev_dmabuf) { obj 145 drivers/xen/gntdev-dmabuf.c complete_all(&obj->completion); obj 171 drivers/xen/gntdev-dmabuf.c struct gntdev_dmabuf_wait_obj *obj; obj 189 drivers/xen/gntdev-dmabuf.c obj = dmabuf_exp_wait_obj_new(priv, gntdev_dmabuf); obj 190 drivers/xen/gntdev-dmabuf.c if (IS_ERR(obj)) obj 191 drivers/xen/gntdev-dmabuf.c return PTR_ERR(obj); obj 193 drivers/xen/gntdev-dmabuf.c ret = dmabuf_exp_wait_obj_wait(obj, wait_to_ms); obj 194 drivers/xen/gntdev-dmabuf.c dmabuf_exp_wait_obj_free(priv, obj); obj 142 drivers/xen/xen-acpi-cpuhotplug.c union acpi_object *obj; obj 152 drivers/xen/xen-acpi-cpuhotplug.c obj = buffer.pointer; obj 153 drivers/xen/xen-acpi-cpuhotplug.c if (obj->type != ACPI_TYPE_BUFFER || obj 154 drivers/xen/xen-acpi-cpuhotplug.c obj->buffer.length < sizeof(*lapic)) { obj 159 drivers/xen/xen-acpi-cpuhotplug.c lapic = (struct acpi_madt_local_apic *)obj->buffer.pointer; obj 126 fs/adfs/adfs.h int (*getnext)(struct adfs_dir *dir, struct object_info *obj); obj 127 fs/adfs/adfs.h int (*update)(struct adfs_dir *dir, struct object_info *obj); obj 128 fs/adfs/adfs.h int (*create)(struct adfs_dir *dir, struct object_info *obj); obj 129 fs/adfs/adfs.h int (*remove)(struct adfs_dir *dir, struct object_info *obj); obj 142 fs/adfs/adfs.h struct inode *adfs_iget(struct super_block *sb, struct object_info *obj); obj 170 fs/adfs/adfs.h void adfs_object_fixup(struct adfs_dir *dir, struct object_info *obj); obj 171 fs/adfs/adfs.h extern int adfs_dir_update(struct super_block *sb, struct object_info *obj, obj 16 fs/adfs/dir.c void adfs_object_fixup(struct adfs_dir *dir, struct object_info *obj) obj 29 fs/adfs/dir.c for (i = dots = 0; i < obj->name_len; i++) obj 30 fs/adfs/dir.c if (obj->name[i] == '/') { obj 31 fs/adfs/dir.c obj->name[i] = '.'; obj 35 fs/adfs/dir.c if (obj->name_len <= 2 && dots == obj->name_len) obj 36 fs/adfs/dir.c obj->name[0] = '^'; obj 42 fs/adfs/dir.c if (!(obj->attr & ADFS_NDA_DIRECTORY) && ADFS_SB(dir->sb)->s_ftsuffix) { obj 43 fs/adfs/dir.c u16 filetype = adfs_filetype(obj->loadaddr); obj 46 fs/adfs/dir.c obj->name[obj->name_len++] = ','; obj 47 fs/adfs/dir.c obj->name[obj->name_len++] = hex_asc_lo(filetype >> 8); obj 48 fs/adfs/dir.c obj->name[obj->name_len++] = hex_asc_lo(filetype >> 4); obj 49 fs/adfs/dir.c obj->name[obj->name_len++] = hex_asc_lo(filetype >> 0); obj 60 fs/adfs/dir.c struct object_info obj; obj 87 fs/adfs/dir.c while (ops->getnext(&dir, &obj) == 0) { obj 88 fs/adfs/dir.c if (!dir_emit(ctx, obj.name, obj.name_len, obj 89 fs/adfs/dir.c obj.indaddr, DT_UNKNOWN)) obj 103 fs/adfs/dir.c adfs_dir_update(struct super_block *sb, struct object_info *obj, int wait) obj 111 fs/adfs/dir.c obj->indaddr, obj->parent_id); obj 118 fs/adfs/dir.c ret = ops->read(sb, obj->parent_id, 0, &dir); obj 123 fs/adfs/dir.c ret = ops->update(&dir, obj); obj 161 fs/adfs/dir.c struct object_info *obj) obj 182 fs/adfs/dir.c obj->parent_id = inode->i_ino; obj 193 fs/adfs/dir.c while (ops->getnext(&dir, obj) == 0) { obj 194 fs/adfs/dir.c if (!__adfs_compare(name, name_len, obj->name, obj->name_len)) { obj 255 fs/adfs/dir.c struct object_info obj; obj 258 fs/adfs/dir.c error = adfs_dir_lookup_byname(dir, &dentry->d_name, &obj); obj 264 fs/adfs/dir.c inode = adfs_iget(dir->i_sb, &obj); obj 193 fs/adfs/dir_f.c adfs_dir2obj(struct adfs_dir *dir, struct object_info *obj, obj 202 fs/adfs/dir_f.c obj->name[name_len] = de->dirobname[name_len]; obj 205 fs/adfs/dir_f.c obj->name_len = name_len; obj 206 fs/adfs/dir_f.c obj->indaddr = adfs_readval(de->dirinddiscadd, 3); obj 207 fs/adfs/dir_f.c obj->loadaddr = adfs_readval(de->dirload, 4); obj 208 fs/adfs/dir_f.c obj->execaddr = adfs_readval(de->direxec, 4); obj 209 fs/adfs/dir_f.c obj->size = adfs_readval(de->dirlen, 4); obj 210 fs/adfs/dir_f.c obj->attr = de->newdiratts; obj 212 fs/adfs/dir_f.c adfs_object_fixup(dir, obj); obj 219 fs/adfs/dir_f.c adfs_obj2dir(struct adfs_direntry *de, struct object_info *obj) obj 221 fs/adfs/dir_f.c adfs_writeval(de->dirinddiscadd, 3, obj->indaddr); obj 222 fs/adfs/dir_f.c adfs_writeval(de->dirload, 4, obj->loadaddr); obj 223 fs/adfs/dir_f.c adfs_writeval(de->direxec, 4, obj->execaddr); obj 224 fs/adfs/dir_f.c adfs_writeval(de->dirlen, 4, obj->size); obj 225 fs/adfs/dir_f.c de->newdiratts = obj->attr; obj 233 fs/adfs/dir_f.c __adfs_dir_get(struct adfs_dir *dir, int pos, struct object_info *obj) obj 257 fs/adfs/dir_f.c adfs_dir2obj(dir, obj, &de); obj 263 fs/adfs/dir_f.c __adfs_dir_put(struct adfs_dir *dir, int pos, struct object_info *obj) obj 290 fs/adfs/dir_f.c adfs_obj2dir(&de, obj); obj 314 fs/adfs/dir_f.c struct object_info obj; obj 316 fs/adfs/dir_f.c if (!__adfs_dir_get(dir, pos, &obj)) obj 319 fs/adfs/dir_f.c if (obj.indaddr == indaddr) { obj 356 fs/adfs/dir_f.c adfs_f_getnext(struct adfs_dir *dir, struct object_info *obj) obj 360 fs/adfs/dir_f.c ret = __adfs_dir_get(dir, dir->pos, obj); obj 368 fs/adfs/dir_f.c adfs_f_update(struct adfs_dir *dir, struct object_info *obj) obj 373 fs/adfs/dir_f.c ret = adfs_dir_find_entry(dir, obj->indaddr); obj 379 fs/adfs/dir_f.c __adfs_dir_put(dir, ret, obj); obj 160 fs/adfs/dir_fplus.c adfs_fplus_getnext(struct adfs_dir *dir, struct object_info *obj) obj 177 fs/adfs/dir_fplus.c obj->loadaddr = le32_to_cpu(bde.bigdirload); obj 178 fs/adfs/dir_fplus.c obj->execaddr = le32_to_cpu(bde.bigdirexec); obj 179 fs/adfs/dir_fplus.c obj->size = le32_to_cpu(bde.bigdirlen); obj 180 fs/adfs/dir_fplus.c obj->indaddr = le32_to_cpu(bde.bigdirindaddr); obj 181 fs/adfs/dir_fplus.c obj->attr = le32_to_cpu(bde.bigdirattr); obj 182 fs/adfs/dir_fplus.c obj->name_len = le32_to_cpu(bde.bigdirobnamelen); obj 189 fs/adfs/dir_fplus.c dir_memcpy(dir, offset, obj->name, obj->name_len); obj 190 fs/adfs/dir_fplus.c adfs_object_fixup(dir, obj); obj 240 fs/adfs/inode.c adfs_iget(struct super_block *sb, struct object_info *obj) obj 250 fs/adfs/inode.c inode->i_ino = obj->indaddr; obj 251 fs/adfs/inode.c inode->i_size = obj->size; obj 262 fs/adfs/inode.c ADFS_I(inode)->parent_id = obj->parent_id; obj 263 fs/adfs/inode.c ADFS_I(inode)->loadaddr = obj->loadaddr; obj 264 fs/adfs/inode.c ADFS_I(inode)->execaddr = obj->execaddr; obj 265 fs/adfs/inode.c ADFS_I(inode)->attr = obj->attr; obj 353 fs/adfs/inode.c struct object_info obj; obj 356 fs/adfs/inode.c obj.indaddr = inode->i_ino; obj 357 fs/adfs/inode.c obj.name_len = 0; obj 358 fs/adfs/inode.c obj.parent_id = ADFS_I(inode)->parent_id; obj 359 fs/adfs/inode.c obj.loadaddr = ADFS_I(inode)->loadaddr; obj 360 fs/adfs/inode.c obj.execaddr = ADFS_I(inode)->execaddr; obj 361 fs/adfs/inode.c obj.attr = ADFS_I(inode)->attr; obj 362 fs/adfs/inode.c obj.size = inode->i_size; obj 364 fs/adfs/inode.c ret = adfs_dir_update(sb, &obj, wbc->sync_mode == WB_SYNC_ALL); obj 1119 fs/aio.c (void __user *)(unsigned long)iocb->ki_res.obj, obj 1830 fs/aio.c req->ki_res.obj = (u64)(unsigned long)user_iocb; obj 2016 fs/aio.c u64 obj = (u64)(unsigned long)iocb; obj 2030 fs/aio.c if (kiocb->ki_res.obj == obj) { obj 650 fs/ecryptfs/main.c void (*ctor)(void *obj); obj 74 fs/erofs/zdata.c atomic_set(&pcl->obj.refcount, 1); obj 172 fs/erofs/zdata.c pgoff_t index = pcl->obj.index + (pages - pcl->compressed_pages); obj 215 fs/erofs/zdata.c container_of(grp, struct z_erofs_pcluster, obj); obj 255 fs/erofs/zdata.c if (erofs_workgroup_try_to_freeze(&pcl->obj, 1)) { obj 265 fs/erofs/zdata.c erofs_workgroup_unfreeze(&pcl->obj, 1); obj 354 fs/erofs/zdata.c pcl = container_of(grp, struct z_erofs_pcluster, obj); obj 414 fs/erofs/zdata.c pcl->obj.index = map->m_pa >> PAGE_SHIFT; obj 441 fs/erofs/zdata.c err = erofs_register_workgroup(inode->i_sb, &pcl->obj, 0); obj 511 fs/erofs/zdata.c container_of(grp, struct z_erofs_pcluster, obj); obj 522 fs/erofs/zdata.c erofs_workgroup_put(&pcl->obj); obj 999 fs/erofs/zdata.c const pgoff_t index = pcl->obj.index; obj 1249 fs/erofs/zdata.c first_index = pcl->obj.index; obj 56 fs/erofs/zdata.h struct erofs_workgroup obj; obj 42 fs/fscache/object-list.c void fscache_objlist_add(struct fscache_object *obj) obj 47 fs/fscache/object-list.c ASSERT(RB_EMPTY_NODE(&obj->objlist_link)); obj 55 fs/fscache/object-list.c if (obj < xobj) obj 57 fs/fscache/object-list.c else if (obj > xobj) obj 63 fs/fscache/object-list.c rb_link_node(&obj->objlist_link, parent, p); obj 64 fs/fscache/object-list.c rb_insert_color(&obj->objlist_link, &fscache_object_list); obj 72 fs/fscache/object-list.c void fscache_objlist_remove(struct fscache_object *obj) obj 74 fs/fscache/object-list.c if (RB_EMPTY_NODE(&obj->objlist_link)) obj 80 fs/fscache/object-list.c rb_erase(&obj->objlist_link, &fscache_object_list); obj 90 fs/fscache/object-list.c struct fscache_object *pobj, *obj = NULL, *minobj = NULL; obj 108 fs/fscache/object-list.c obj = rb_entry(p, struct fscache_object, objlist_link); obj 109 fs/fscache/object-list.c if (pobj < obj) { obj 110 fs/fscache/object-list.c if (!minobj || minobj > obj) obj 111 fs/fscache/object-list.c minobj = obj; obj 113 fs/fscache/object-list.c } else if (pobj > obj) { obj 116 fs/fscache/object-list.c minobj = obj; obj 119 fs/fscache/object-list.c obj = NULL; obj 124 fs/fscache/object-list.c else if (minobj != obj) obj 163 fs/fscache/object-list.c struct fscache_object *obj = v; obj 213 fs/fscache/object-list.c cookie = obj->cookie; obj 217 fs/fscache/object-list.c FILTER(fscache_object_is_active(obj) || obj 218 fs/fscache/object-list.c obj->n_ops != 0 || obj 219 fs/fscache/object-list.c obj->n_obj_ops != 0 || obj 220 fs/fscache/object-list.c obj->flags || obj 221 fs/fscache/object-list.c !list_empty(&obj->dependents), obj 223 fs/fscache/object-list.c FILTER(test_bit(FSCACHE_OBJECT_PENDING_WRITE, &obj->flags), obj 225 fs/fscache/object-list.c FILTER(atomic_read(&obj->n_reads), obj 227 fs/fscache/object-list.c FILTER(obj->events & obj->event_mask, obj 229 fs/fscache/object-list.c FILTER(work_busy(&obj->work), WORK, NOWORK); obj 234 fs/fscache/object-list.c obj->debug_id, obj 235 fs/fscache/object-list.c obj->parent ? obj->parent->debug_id : -1, obj 236 fs/fscache/object-list.c obj->state->short_name, obj 237 fs/fscache/object-list.c obj->n_children, obj 238 fs/fscache/object-list.c obj->n_ops, obj 239 fs/fscache/object-list.c obj->n_obj_ops, obj 240 fs/fscache/object-list.c obj->n_in_progress, obj 241 fs/fscache/object-list.c obj->n_exclusive, obj 242 fs/fscache/object-list.c atomic_read(&obj->n_reads), obj 243 fs/fscache/object-list.c obj->event_mask, obj 244 fs/fscache/object-list.c obj->events, obj 245 fs/fscache/object-list.c obj->flags, obj 246 fs/fscache/object-list.c work_busy(&obj->work)); obj 248 fs/fscache/object-list.c if (fscache_use_cookie(obj)) { obj 294 fs/fscache/object-list.c fscache_unuse_cookie(obj); obj 565 fs/jffs2/nodemgmt.c static inline int on_list(struct list_head *obj, struct list_head *head) obj 570 fs/jffs2/nodemgmt.c if (this == obj) { obj 571 fs/jffs2/nodemgmt.c jffs2_dbg(1, "%p is on list at %p\n", obj, head); obj 118 fs/lockd/clnt4xdr.c struct xdr_netobj *obj) obj 122 fs/lockd/clnt4xdr.c ret = xdr_stream_decode_opaque_inline(xdr, (void *)&obj->data, obj 126 fs/lockd/clnt4xdr.c obj->len = ret; obj 115 fs/lockd/clntxdr.c struct xdr_netobj *obj) obj 119 fs/lockd/clntxdr.c ret = xdr_stream_decode_opaque_inline(xdr, (void *)&obj->data, obj 123 fs/lockd/clntxdr.c obj->len = ret; obj 1370 fs/nilfs2/super.c static void nilfs_inode_init_once(void *obj) obj 1372 fs/nilfs2/super.c struct nilfs_inode_info *ii = obj; obj 1383 fs/nilfs2/super.c static void nilfs_segbuf_init_once(void *obj) obj 1385 fs/nilfs2/super.c memset(obj, 0, sizeof(struct nilfs_segment_buffer)); obj 15 fs/notify/fsnotify.h return container_of(conn->obj, struct inode, i_fsnotify_marks); obj 21 fs/notify/fsnotify.h return container_of(conn->obj, struct mount, mnt_fsnotify_marks); obj 27 fs/notify/fsnotify.h return container_of(conn->obj, struct super_block, s_fsnotify_marks); obj 192 fs/notify/mark.c rcu_assign_pointer(*(conn->obj), NULL); obj 193 fs/notify/mark.c conn->obj = NULL; obj 485 fs/notify/mark.c conn->obj = connp; obj 434 fs/ocfs2/alloc.c void *obj, obj 441 fs/ocfs2/alloc.c if (!obj) obj 442 fs/ocfs2/alloc.c obj = (void *)bh->b_data; obj 443 fs/ocfs2/alloc.c et->et_object = obj; obj 127 fs/ocfs2/cluster/masklog.c static ssize_t mlog_show(struct kobject *obj, struct attribute *attr, obj 135 fs/ocfs2/cluster/masklog.c static ssize_t mlog_store(struct kobject *obj, struct attribute *attr, obj 2312 fs/ubifs/super.c static void inode_slab_ctor(void *obj) obj 2314 fs/ubifs/super.c struct ubifs_inode *ui = obj; obj 495 fs/xfs/xfs_buf.c const void *obj) obj 498 fs/xfs/xfs_buf.c const struct xfs_buf *bp = obj; obj 1940 fs/xfs/xfs_inode.c const void *obj) obj 1943 fs/xfs/xfs_inode.c const struct xfs_iunlink *iu = obj; obj 209 include/acpi/acoutput.h #define ACPI_DEBUG_OBJECT(obj,l,i) acpi_ex_do_debug_object(obj,l,i) obj 222 include/acpi/acoutput.h #define ACPI_DEBUG_OBJECT(obj,l,i) obj 60 include/acpi/acpi_bus.h union acpi_object *obj; obj 62 include/acpi/acpi_bus.h obj = acpi_evaluate_dsm(handle, guid, rev, func, argv4); obj 63 include/acpi/acpi_bus.h if (obj && obj->type != type) { obj 64 include/acpi/acpi_bus.h ACPI_FREE(obj); obj 65 include/acpi/acpi_bus.h obj = NULL; obj 68 include/acpi/acpi_bus.h return obj; obj 210 include/drm/drm_atomic.h struct drm_private_state *(*atomic_duplicate_state)(struct drm_private_obj *obj); obj 217 include/drm/drm_atomic.h void (*atomic_destroy_state)(struct drm_private_obj *obj, obj 447 include/drm/drm_atomic.h struct drm_private_obj *obj, obj 450 include/drm/drm_atomic.h void drm_atomic_private_obj_fini(struct drm_private_obj *obj); obj 454 include/drm/drm_atomic.h struct drm_private_obj *obj); obj 457 include/drm/drm_atomic.h struct drm_private_obj *obj); obj 460 include/drm/drm_atomic.h struct drm_private_obj *obj); obj 890 include/drm/drm_atomic.h #define for_each_oldnew_private_obj_in_state(__state, obj, old_obj_state, new_obj_state, __i) \ obj 893 include/drm/drm_atomic.h ((obj) = (__state)->private_objs[__i].ptr, \ obj 909 include/drm/drm_atomic.h #define for_each_old_private_obj_in_state(__state, obj, old_obj_state, __i) \ obj 912 include/drm/drm_atomic.h ((obj) = (__state)->private_objs[__i].ptr, \ obj 927 include/drm/drm_atomic.h #define for_each_new_private_obj_in_state(__state, obj, new_obj_state, __i) \ obj 930 include/drm/drm_atomic.h ((obj) = (__state)->private_objs[__i].ptr, \ obj 75 include/drm/drm_atomic_state_helper.h void __drm_atomic_helper_private_obj_duplicate_state(struct drm_private_obj *obj, obj 501 include/drm/drm_drv.h void (*gem_free_object) (struct drm_gem_object *obj); obj 511 include/drm/drm_drv.h void (*gem_free_object_unlocked) (struct drm_gem_object *obj); obj 546 include/drm/drm_drv.h const struct drm_gem_object *obj); obj 584 include/drm/drm_drv.h struct dma_buf * (*gem_prime_export)(struct drm_gem_object *obj, obj 601 include/drm/drm_drv.h int (*gem_prime_pin)(struct drm_gem_object *obj); obj 608 include/drm/drm_drv.h void (*gem_prime_unpin)(struct drm_gem_object *obj); obj 616 include/drm/drm_drv.h struct sg_table *(*gem_prime_get_sg_table)(struct drm_gem_object *obj); obj 634 include/drm/drm_drv.h void *(*gem_prime_vmap)(struct drm_gem_object *obj); obj 642 include/drm/drm_drv.h void (*gem_prime_vunmap)(struct drm_gem_object *obj, void *vaddr); obj 653 include/drm/drm_drv.h int (*gem_prime_mmap)(struct drm_gem_object *obj, obj 213 include/drm/drm_framebuffer.h struct drm_gem_object *obj[4]; obj 55 include/drm/drm_gem.h void (*free)(struct drm_gem_object *obj); obj 64 include/drm/drm_gem.h int (*open)(struct drm_gem_object *obj, struct drm_file *file); obj 73 include/drm/drm_gem.h void (*close)(struct drm_gem_object *obj, struct drm_file *file); obj 89 include/drm/drm_gem.h const struct drm_gem_object *obj); obj 99 include/drm/drm_gem.h struct dma_buf *(*export)(struct drm_gem_object *obj, int flags); obj 108 include/drm/drm_gem.h int (*pin)(struct drm_gem_object *obj); obj 117 include/drm/drm_gem.h void (*unpin)(struct drm_gem_object *obj); obj 131 include/drm/drm_gem.h struct sg_table *(*get_sg_table)(struct drm_gem_object *obj); obj 141 include/drm/drm_gem.h void *(*vmap)(struct drm_gem_object *obj); obj 151 include/drm/drm_gem.h void (*vunmap)(struct drm_gem_object *obj, void *vaddr); obj 328 include/drm/drm_gem.h void drm_gem_object_release(struct drm_gem_object *obj); obj 331 include/drm/drm_gem.h struct drm_gem_object *obj, size_t size); obj 333 include/drm/drm_gem.h struct drm_gem_object *obj, size_t size); obj 336 include/drm/drm_gem.h int drm_gem_mmap_obj(struct drm_gem_object *obj, unsigned long obj_size, obj 347 include/drm/drm_gem.h static inline void drm_gem_object_get(struct drm_gem_object *obj) obj 349 include/drm/drm_gem.h kref_get(&obj->refcount); obj 368 include/drm/drm_gem.h __drm_gem_object_put(struct drm_gem_object *obj) obj 370 include/drm/drm_gem.h kref_put(&obj->refcount, drm_gem_object_free); obj 373 include/drm/drm_gem.h void drm_gem_object_put_unlocked(struct drm_gem_object *obj); obj 374 include/drm/drm_gem.h void drm_gem_object_put(struct drm_gem_object *obj); obj 377 include/drm/drm_gem.h struct drm_gem_object *obj, obj 382 include/drm/drm_gem.h void drm_gem_free_mmap_offset(struct drm_gem_object *obj); obj 383 include/drm/drm_gem.h int drm_gem_create_mmap_offset(struct drm_gem_object *obj); obj 384 include/drm/drm_gem.h int drm_gem_create_mmap_offset_size(struct drm_gem_object *obj, size_t size); obj 386 include/drm/drm_gem.h struct page **drm_gem_get_pages(struct drm_gem_object *obj); obj 387 include/drm/drm_gem.h void drm_gem_put_pages(struct drm_gem_object *obj, struct page **pages, obj 402 include/drm/drm_gem.h struct drm_gem_object *obj, obj 97 include/drm/drm_gem_cma_helper.h const struct drm_gem_object *obj); obj 99 include/drm/drm_gem_cma_helper.h struct sg_table *drm_gem_cma_prime_get_sg_table(struct drm_gem_object *obj); obj 104 include/drm/drm_gem_cma_helper.h int drm_gem_cma_prime_mmap(struct drm_gem_object *obj, obj 106 include/drm/drm_gem_cma_helper.h void *drm_gem_cma_prime_vmap(struct drm_gem_object *obj); obj 107 include/drm/drm_gem_cma_helper.h void drm_gem_cma_prime_vunmap(struct drm_gem_object *obj, void *vaddr); obj 101 include/drm/drm_gem_shmem_helper.h #define to_drm_gem_shmem_obj(obj) \ obj 102 include/drm/drm_gem_shmem_helper.h container_of(obj, struct drm_gem_shmem_object, base) obj 131 include/drm/drm_gem_shmem_helper.h void drm_gem_shmem_free_object(struct drm_gem_object *obj); obj 135 include/drm/drm_gem_shmem_helper.h int drm_gem_shmem_pin(struct drm_gem_object *obj); obj 136 include/drm/drm_gem_shmem_helper.h void drm_gem_shmem_unpin(struct drm_gem_object *obj); obj 137 include/drm/drm_gem_shmem_helper.h void *drm_gem_shmem_vmap(struct drm_gem_object *obj); obj 138 include/drm/drm_gem_shmem_helper.h void drm_gem_shmem_vunmap(struct drm_gem_object *obj, void *vaddr); obj 140 include/drm/drm_gem_shmem_helper.h int drm_gem_shmem_madvise(struct drm_gem_object *obj, int madv); obj 149 include/drm/drm_gem_shmem_helper.h void drm_gem_shmem_purge_locked(struct drm_gem_object *obj); obj 150 include/drm/drm_gem_shmem_helper.h bool drm_gem_shmem_purge(struct drm_gem_object *obj); obj 164 include/drm/drm_gem_shmem_helper.h const struct drm_gem_object *obj); obj 166 include/drm/drm_gem_shmem_helper.h struct sg_table *drm_gem_shmem_get_sg_table(struct drm_gem_object *obj); obj 172 include/drm/drm_gem_shmem_helper.h struct sg_table *drm_gem_shmem_get_pages_sgt(struct drm_gem_object *obj); obj 120 include/drm/drm_mode_object.h void drm_mode_object_get(struct drm_mode_object *obj); obj 121 include/drm/drm_mode_object.h void drm_mode_object_put(struct drm_mode_object *obj); obj 123 include/drm/drm_mode_object.h int drm_object_property_set_value(struct drm_mode_object *obj, obj 126 include/drm/drm_mode_object.h int drm_object_property_get_value(struct drm_mode_object *obj, obj 130 include/drm/drm_mode_object.h void drm_object_attach_property(struct drm_mode_object *obj, obj 90 include/drm/drm_prime.h int drm_gem_prime_mmap(struct drm_gem_object *obj, struct vm_area_struct *vma); obj 94 include/drm/drm_prime.h struct dma_buf *drm_gem_prime_export(struct drm_gem_object *obj, obj 104 include/drm/drm_prime.h void drm_prime_gem_destroy(struct drm_gem_object *obj, struct sg_table *sg); obj 76 include/drm/drm_syncobj.h drm_syncobj_get(struct drm_syncobj *obj) obj 78 include/drm/drm_syncobj.h kref_get(&obj->refcount); obj 86 include/drm/drm_syncobj.h drm_syncobj_put(struct drm_syncobj *obj) obj 88 include/drm/drm_syncobj.h kref_put(&obj->refcount, drm_syncobj_free); obj 1032 include/linux/acpi.h acpi_object_type type, const union acpi_object **obj); obj 1127 include/linux/acpi.h const union acpi_object **obj) obj 195 include/linux/backlight.h #define to_backlight_device(obj) container_of(obj, struct backlight_device, dev) obj 46 include/linux/btf.h void btf_type_seq_show(const struct btf *btf, u32 type_id, void *obj, obj 73 include/linux/compiler-gcc.h #define __compiletime_object_size(obj) __builtin_object_size(obj, 0) obj 317 include/linux/compiler.h # define __compiletime_object_size(obj) -1 obj 27 include/linux/cpu_rmap.h void **obj; obj 38 include/linux/cpu_rmap.h extern int cpu_rmap_add(struct cpu_rmap *rmap, void *obj); obj 49 include/linux/cpu_rmap.h return rmap->obj[rmap->near[cpu].index]; obj 597 include/linux/device-mapper.h #define dm_array_too_big(fixed, obj, num) \ obj 598 include/linux/device-mapper.h ((num) > (UINT_MAX - (fixed)) / (obj)) obj 80 include/linux/dma-resv.h #define dma_resv_held(obj) lockdep_is_held(&(obj)->lock.base) obj 81 include/linux/dma-resv.h #define dma_resv_assert_held(obj) lockdep_assert_held(&(obj)->lock.base) obj 91 include/linux/dma-resv.h static inline struct dma_resv_list *dma_resv_get_list(struct dma_resv *obj) obj 93 include/linux/dma-resv.h return rcu_dereference_protected(obj->fence, obj 94 include/linux/dma-resv.h dma_resv_held(obj)); obj 112 include/linux/dma-resv.h static inline int dma_resv_lock(struct dma_resv *obj, obj 115 include/linux/dma-resv.h return ww_mutex_lock(&obj->lock, ctx); obj 133 include/linux/dma-resv.h static inline int dma_resv_lock_interruptible(struct dma_resv *obj, obj 136 include/linux/dma-resv.h return ww_mutex_lock_interruptible(&obj->lock, ctx); obj 148 include/linux/dma-resv.h static inline void dma_resv_lock_slow(struct dma_resv *obj, obj 151 include/linux/dma-resv.h ww_mutex_lock_slow(&obj->lock, ctx); obj 164 include/linux/dma-resv.h static inline int dma_resv_lock_slow_interruptible(struct dma_resv *obj, obj 167 include/linux/dma-resv.h return ww_mutex_lock_slow_interruptible(&obj->lock, ctx); obj 184 include/linux/dma-resv.h static inline bool __must_check dma_resv_trylock(struct dma_resv *obj) obj 186 include/linux/dma-resv.h return ww_mutex_trylock(&obj->lock); obj 195 include/linux/dma-resv.h static inline bool dma_resv_is_locked(struct dma_resv *obj) obj 197 include/linux/dma-resv.h return ww_mutex_is_locked(&obj->lock); obj 207 include/linux/dma-resv.h static inline struct ww_acquire_ctx *dma_resv_locking_ctx(struct dma_resv *obj) obj 209 include/linux/dma-resv.h return READ_ONCE(obj->lock.ctx); obj 218 include/linux/dma-resv.h static inline void dma_resv_unlock(struct dma_resv *obj) obj 222 include/linux/dma-resv.h if (rcu_access_pointer(obj->fence)) { obj 223 include/linux/dma-resv.h struct dma_resv_list *fence = dma_resv_get_list(obj); obj 228 include/linux/dma-resv.h ww_mutex_unlock(&obj->lock); obj 244 include/linux/dma-resv.h dma_resv_get_excl(struct dma_resv *obj) obj 246 include/linux/dma-resv.h return rcu_dereference_protected(obj->fence_excl, obj 247 include/linux/dma-resv.h dma_resv_held(obj)); obj 262 include/linux/dma-resv.h dma_resv_get_excl_rcu(struct dma_resv *obj) obj 266 include/linux/dma-resv.h if (!rcu_access_pointer(obj->fence_excl)) obj 270 include/linux/dma-resv.h fence = dma_fence_get_rcu_safe(&obj->fence_excl); obj 276 include/linux/dma-resv.h void dma_resv_init(struct dma_resv *obj); obj 277 include/linux/dma-resv.h void dma_resv_fini(struct dma_resv *obj); obj 278 include/linux/dma-resv.h int dma_resv_reserve_shared(struct dma_resv *obj, unsigned int num_fences); obj 279 include/linux/dma-resv.h void dma_resv_add_shared_fence(struct dma_resv *obj, struct dma_fence *fence); obj 281 include/linux/dma-resv.h void dma_resv_add_excl_fence(struct dma_resv *obj, struct dma_fence *fence); obj 283 include/linux/dma-resv.h int dma_resv_get_fences_rcu(struct dma_resv *obj, obj 290 include/linux/dma-resv.h long dma_resv_wait_timeout_rcu(struct dma_resv *obj, bool wait_all, bool intr, obj 293 include/linux/dma-resv.h bool dma_resv_test_signaled_rcu(struct dma_resv *obj, bool test_all); obj 302 include/linux/extcon.h static inline int extcon_register_interest(struct extcon_specific_cable_nb *obj, obj 309 include/linux/extcon.h static inline int extcon_unregister_interest(struct extcon_specific_cable_nb *obj) obj 300 include/linux/fsnotify_backend.h fsnotify_connp_t *obj; obj 126 include/linux/hashtable.h #define hash_for_each(name, bkt, obj, member) \ obj 127 include/linux/hashtable.h for ((bkt) = 0, obj = NULL; obj == NULL && (bkt) < HASH_SIZE(name);\ obj 129 include/linux/hashtable.h hlist_for_each_entry(obj, &name[bkt], member) obj 138 include/linux/hashtable.h #define hash_for_each_rcu(name, bkt, obj, member) \ obj 139 include/linux/hashtable.h for ((bkt) = 0, obj = NULL; obj == NULL && (bkt) < HASH_SIZE(name);\ obj 141 include/linux/hashtable.h hlist_for_each_entry_rcu(obj, &name[bkt], member) obj 152 include/linux/hashtable.h #define hash_for_each_safe(name, bkt, tmp, obj, member) \ obj 153 include/linux/hashtable.h for ((bkt) = 0, obj = NULL; obj == NULL && (bkt) < HASH_SIZE(name);\ obj 155 include/linux/hashtable.h hlist_for_each_entry_safe(obj, tmp, &name[bkt], member) obj 165 include/linux/hashtable.h #define hash_for_each_possible(name, obj, member, key) \ obj 166 include/linux/hashtable.h hlist_for_each_entry(obj, &name[hash_min(key, HASH_BITS(name))], member) obj 176 include/linux/hashtable.h #define hash_for_each_possible_rcu(name, obj, member, key) \ obj 177 include/linux/hashtable.h hlist_for_each_entry_rcu(obj, &name[hash_min(key, HASH_BITS(name))],\ obj 191 include/linux/hashtable.h #define hash_for_each_possible_rcu_notrace(name, obj, member, key) \ obj 192 include/linux/hashtable.h hlist_for_each_entry_rcu_notrace(obj, \ obj 204 include/linux/hashtable.h #define hash_for_each_possible_safe(name, obj, tmp, member, key) \ obj 205 include/linux/hashtable.h hlist_for_each_entry_safe(obj, tmp,\ obj 631 include/linux/ide.h #define to_ide_drv(obj, cont_type) \ obj 632 include/linux/ide.h container_of(obj, struct cont_type, dev) obj 113 include/linux/lcd.h #define to_lcd_device(obj) container_of(obj, struct lcd_device, dev) obj 98 include/linux/livepatch.h int (*pre_patch)(struct klp_object *obj); obj 99 include/linux/livepatch.h void (*post_patch)(struct klp_object *obj); obj 100 include/linux/livepatch.h void (*pre_unpatch)(struct klp_object *obj); obj 101 include/linux/livepatch.h void (*post_unpatch)(struct klp_object *obj); obj 162 include/linux/livepatch.h #define klp_for_each_object_static(patch, obj) \ obj 163 include/linux/livepatch.h for (obj = patch->objs; obj->funcs || obj->name; obj++) obj 165 include/linux/livepatch.h #define klp_for_each_object_safe(patch, obj, tmp_obj) \ obj 166 include/linux/livepatch.h list_for_each_entry_safe(obj, tmp_obj, &patch->obj_list, node) obj 168 include/linux/livepatch.h #define klp_for_each_object(patch, obj) \ obj 169 include/linux/livepatch.h list_for_each_entry(obj, &patch->obj_list, node) obj 171 include/linux/livepatch.h #define klp_for_each_func_static(obj, func) \ obj 172 include/linux/livepatch.h for (func = obj->funcs; \ obj 176 include/linux/livepatch.h #define klp_for_each_func_safe(obj, func, tmp_func) \ obj 177 include/linux/livepatch.h list_for_each_entry_safe(func, tmp_func, &obj->func_list, node) obj 179 include/linux/livepatch.h #define klp_for_each_func(obj, func) \ obj 180 include/linux/livepatch.h list_for_each_entry(func, &obj->func_list, node) obj 185 include/linux/livepatch.h struct klp_object *obj); obj 205 include/linux/livepatch.h typedef int (*klp_shadow_ctor_t)(void *obj, obj 208 include/linux/livepatch.h typedef void (*klp_shadow_dtor_t)(void *obj, void *shadow_data); obj 210 include/linux/livepatch.h void *klp_shadow_get(void *obj, unsigned long id); obj 211 include/linux/livepatch.h void *klp_shadow_alloc(void *obj, unsigned long id, obj 214 include/linux/livepatch.h void *klp_shadow_get_or_alloc(void *obj, unsigned long id, obj 217 include/linux/livepatch.h void klp_shadow_free(void *obj, unsigned long id, klp_shadow_dtor_t dtor); obj 10 include/linux/objagg.h const void *obj); obj 12 include/linux/objagg.h void * (*delta_create)(void *priv, void *parent_obj, void *obj); obj 14 include/linux/objagg.h void * (*root_create)(void *priv, void *obj, unsigned int root_id); obj 27 include/linux/objagg.h struct objagg_obj *objagg_obj_get(struct objagg *objagg, void *obj); obj 41 include/linux/rhashtable-types.h const void *obj); obj 243 include/linux/rhashtable.h struct rhash_head *obj); obj 390 include/linux/rhashtable.h struct rhash_head *obj) obj 394 include/linux/rhashtable.h if (rht_is_a_nulls(obj)) obj 395 include/linux/rhashtable.h obj = NULL; obj 396 include/linux/rhashtable.h rcu_assign_pointer(*p, (void *)((unsigned long)obj | BIT(0))); obj 401 include/linux/rhashtable.h struct rhash_head *obj) obj 405 include/linux/rhashtable.h if (rht_is_a_nulls(obj)) obj 406 include/linux/rhashtable.h obj = NULL; obj 408 include/linux/rhashtable.h rcu_assign_pointer(*p, obj); obj 579 include/linux/rhashtable.h const void *obj) obj 582 include/linux/rhashtable.h const char *ptr = obj; obj 668 include/linux/rhashtable.h void *obj; obj 671 include/linux/rhashtable.h obj = rhashtable_lookup(ht, key, params); obj 674 include/linux/rhashtable.h return obj; obj 705 include/linux/rhashtable.h struct rhashtable *ht, const void *key, struct rhash_head *obj, obj 723 include/linux/rhashtable.h hash = rht_head_hashfn(ht, tbl, obj, params); obj 736 include/linux/rhashtable.h return rhashtable_insert_slow(ht, key, obj); obj 758 include/linux/rhashtable.h list = container_of(obj, struct rhlist_head, rhead); obj 765 include/linux/rhashtable.h rcu_assign_pointer(*pprev, obj); obj 768 include/linux/rhashtable.h rht_assign_unlock(tbl, bkt, obj); obj 786 include/linux/rhashtable.h RCU_INIT_POINTER(obj->next, head); obj 790 include/linux/rhashtable.h list = container_of(obj, struct rhlist_head, rhead); obj 795 include/linux/rhashtable.h rht_assign_unlock(tbl, bkt, obj); obj 827 include/linux/rhashtable.h struct rhashtable *ht, struct rhash_head *obj, obj 832 include/linux/rhashtable.h ret = __rhashtable_insert_fast(ht, NULL, obj, params, false); obj 904 include/linux/rhashtable.h struct rhashtable *ht, struct rhash_head *obj, obj 907 include/linux/rhashtable.h const char *key = rht_obj(ht, obj); obj 912 include/linux/rhashtable.h ret = __rhashtable_insert_fast(ht, key + ht->p.key_offset, obj, params, obj 931 include/linux/rhashtable.h struct rhashtable *ht, struct rhash_head *obj, obj 934 include/linux/rhashtable.h const char *key = rht_obj(ht, obj); obj 938 include/linux/rhashtable.h return __rhashtable_insert_fast(ht, key + ht->p.key_offset, obj, params, obj 958 include/linux/rhashtable.h struct rhashtable *ht, const void *key, struct rhash_head *obj, obj 965 include/linux/rhashtable.h ret = __rhashtable_insert_fast(ht, key, obj, params, false); obj 984 include/linux/rhashtable.h struct rhashtable *ht, const void *key, struct rhash_head *obj, obj 989 include/linux/rhashtable.h return __rhashtable_insert_fast(ht, key, obj, params, false); obj 995 include/linux/rhashtable.h struct rhash_head *obj, const struct rhashtable_params params, obj 1004 include/linux/rhashtable.h hash = rht_head_hashfn(ht, tbl, obj, params); obj 1016 include/linux/rhashtable.h if (he != obj) { obj 1028 include/linux/rhashtable.h } while (list && obj != &list->rhead); obj 1039 include/linux/rhashtable.h obj = rht_dereference_bucket(obj->next, tbl, hash); obj 1045 include/linux/rhashtable.h RCU_INIT_POINTER(list->rhead.next, obj); obj 1046 include/linux/rhashtable.h obj = &list->rhead; obj 1052 include/linux/rhashtable.h rcu_assign_pointer(*pprev, obj); obj 1055 include/linux/rhashtable.h rht_assign_unlock(tbl, bkt, obj); obj 1075 include/linux/rhashtable.h struct rhashtable *ht, struct rhash_head *obj, obj 1090 include/linux/rhashtable.h while ((err = __rhashtable_remove_fast_one(ht, tbl, obj, params, obj 1116 include/linux/rhashtable.h struct rhashtable *ht, struct rhash_head *obj, obj 1119 include/linux/rhashtable.h return __rhashtable_remove_fast(ht, obj, params, false); obj 81 include/linux/sched/task_stack.h static inline int object_is_on_stack(const void *obj) obj 85 include/linux/sched/task_stack.h return (obj >= stack) && (obj < (stack + THREAD_SIZE)); obj 39 include/linux/slab_def.h void (*ctor)(void *obj); obj 111 include/linux/slab_def.h const struct page *page, void *obj) obj 113 include/linux/slab_def.h u32 offset = (obj - page->s_mem); obj 247 include/linux/sunrpc/xdr.h const void *obj); obj 249 include/linux/sunrpc/xdr.h void *obj); obj 105 include/linux/thread_info.h const void *obj, unsigned long len) obj 48 include/linux/zsmalloc.h void zs_free(struct zs_pool *pool, unsigned long obj); obj 287 include/media/media-request.h static inline void media_request_object_get(struct media_request_object *obj) obj 289 include/media/media-request.h kref_get(&obj->kref); obj 300 include/media/media-request.h void media_request_object_put(struct media_request_object *obj); obj 331 include/media/media-request.h void media_request_object_init(struct media_request_object *obj); obj 359 include/media/media-request.h struct media_request_object *obj); obj 368 include/media/media-request.h void media_request_object_unbind(struct media_request_object *obj); obj 378 include/media/media-request.h void media_request_object_complete(struct media_request_object *obj); obj 402 include/media/media-request.h static inline void media_request_object_get(struct media_request_object *obj) obj 406 include/media/media-request.h static inline void media_request_object_put(struct media_request_object *obj) obj 418 include/media/media-request.h static inline void media_request_object_init(struct media_request_object *obj) obj 420 include/media/media-request.h obj->ops = NULL; obj 421 include/media/media-request.h obj->req = NULL; obj 427 include/media/media-request.h struct media_request_object *obj) obj 432 include/media/media-request.h static inline void media_request_object_unbind(struct media_request_object *obj) obj 436 include/media/media-request.h static inline void media_request_object_complete(struct media_request_object *obj) obj 1216 include/media/videobuf2-core.h bool vb2_request_object_is_buffer(struct media_request_object *obj); obj 1081 include/net/netfilter/nf_tables.h static inline void *nft_obj_data(const struct nft_object *obj) obj 1083 include/net/netfilter/nf_tables.h return (void *)obj->data; obj 1094 include/net/netfilter/nf_tables.h struct nft_object *obj, u32 portid, u32 seq, obj 1130 include/net/netfilter/nf_tables.h void (*eval)(struct nft_object *obj, obj 1136 include/net/netfilter/nf_tables.h struct nft_object *obj); obj 1138 include/net/netfilter/nf_tables.h struct nft_object *obj); obj 1140 include/net/netfilter/nf_tables.h struct nft_object *obj, obj 1142 include/net/netfilter/nf_tables.h void (*update)(struct nft_object *obj, obj 1433 include/net/netfilter/nf_tables.h struct nft_object *obj; obj 1439 include/net/netfilter/nf_tables.h (((struct nft_trans_obj *)trans->data)->obj) obj 71 include/net/phonet/phonet.h u16 obj = pn_object(ph->pn_sdev, ph->pn_sobj); obj 74 include/net/phonet/phonet.h pn_sockaddr_set_object(sa, obj); obj 83 include/net/phonet/phonet.h u16 obj = pn_object(ph->pn_rdev, ph->pn_robj); obj 86 include/net/phonet/phonet.h pn_sockaddr_set_object(sa, obj); obj 185 include/net/sctp/command.h union sctp_arg obj; obj 214 include/net/sctp/command.h enum sctp_verb verb, union sctp_arg obj) obj 221 include/net/sctp/command.h cmd->obj = obj; obj 78 include/net/switchdev.h struct switchdev_obj obj; obj 85 include/net/switchdev.h container_of((OBJ), struct switchdev_obj_port_vlan, obj) obj 89 include/net/switchdev.h struct switchdev_obj obj; obj 95 include/net/switchdev.h container_of((OBJ), struct switchdev_obj_port_mdb, obj) obj 97 include/net/switchdev.h typedef int switchdev_obj_dump_cb_t(struct switchdev_obj *obj); obj 132 include/net/switchdev.h const struct switchdev_obj *obj; obj 162 include/net/switchdev.h const struct switchdev_obj *obj, obj 165 include/net/switchdev.h const struct switchdev_obj *obj); obj 187 include/net/switchdev.h const struct switchdev_obj *obj, obj 194 include/net/switchdev.h const struct switchdev_obj *obj)); obj 215 include/net/switchdev.h const struct switchdev_obj *obj, obj 222 include/net/switchdev.h const struct switchdev_obj *obj) obj 271 include/net/switchdev.h const struct switchdev_obj *obj, obj 283 include/net/switchdev.h const struct switchdev_obj *obj)) obj 218 include/rdma/rdma_vt.h void *obj; obj 100 include/rdma/uverbs_ioctl.h } obj; obj 538 include/rdma/uverbs_ioctl.h .u.obj.obj_type = _idr_type, \ obj 539 include/rdma/uverbs_ioctl.h .u.obj.access = _access, \ obj 548 include/rdma/uverbs_ioctl.h .u.obj.obj_type = _fd_type, \ obj 549 include/rdma/uverbs_ioctl.h .u.obj.access = _access, \ obj 76 include/rdma/uverbs_std_types.h int __uobj_perform_destroy(const struct uverbs_api_object *obj, u32 id, obj 82 include/rdma/uverbs_std_types.h struct ib_uobject *__uobj_get_destroy(const struct uverbs_api_object *obj, obj 124 include/rdma/uverbs_std_types.h __uobj_alloc(const struct uverbs_api_object *obj, obj 128 include/rdma/uverbs_std_types.h rdma_alloc_begin_uobject(obj, attrs->ufile, attrs); obj 85 include/rdma/uverbs_types.h struct ib_uobject *(*alloc_begin)(const struct uverbs_api_object *obj, obj 92 include/rdma/uverbs_types.h struct ib_uobject *(*lookup_get)(const struct uverbs_api_object *obj, obj 134 include/rdma/uverbs_types.h struct ib_uobject *rdma_lookup_get_uobject(const struct uverbs_api_object *obj, obj 140 include/rdma/uverbs_types.h struct ib_uobject *rdma_alloc_begin_uobject(const struct uverbs_api_object *obj, obj 65 include/soc/fsl/dpaa2-io.h irqreturn_t dpaa2_io_irq(struct dpaa2_io *obj); obj 49 include/trace/events/btrfs.h #define __show_root_type(obj) \ obj 50 include/trace/events/btrfs.h __print_symbolic_u64(obj, \ obj 65 include/trace/events/btrfs.h #define show_root_type(obj) \ obj 66 include/trace/events/btrfs.h obj, ((obj >= BTRFS_DATA_RELOC_TREE_OBJECTID) || \ obj 67 include/trace/events/btrfs.h (obj >= BTRFS_ROOT_TREE_OBJECTID && \ obj 68 include/trace/events/btrfs.h obj <= BTRFS_QUOTA_TREE_OBJECTID)) ? __show_root_type(obj) : "-" obj 72 include/trace/events/cachefiles.h TP_PROTO(struct cachefiles_object *obj, obj 77 include/trace/events/cachefiles.h TP_ARGS(obj, cookie, why, usage), obj 81 include/trace/events/cachefiles.h __field(struct cachefiles_object *, obj ) obj 88 include/trace/events/cachefiles.h __entry->obj = obj; obj 95 include/trace/events/cachefiles.h __entry->cookie, __entry->obj, __entry->usage, obj 100 include/trace/events/cachefiles.h TP_PROTO(struct cachefiles_object *obj, obj 104 include/trace/events/cachefiles.h TP_ARGS(obj, de, inode), obj 107 include/trace/events/cachefiles.h __field(struct cachefiles_object *, obj ) obj 113 include/trace/events/cachefiles.h __entry->obj = obj; obj 119 include/trace/events/cachefiles.h __entry->obj, __entry->de, __entry->inode) obj 123 include/trace/events/cachefiles.h TP_PROTO(struct cachefiles_object *obj, obj 126 include/trace/events/cachefiles.h TP_ARGS(obj, de, ret), obj 129 include/trace/events/cachefiles.h __field(struct cachefiles_object *, obj ) obj 135 include/trace/events/cachefiles.h __entry->obj = obj; obj 141 include/trace/events/cachefiles.h __entry->obj, __entry->de, __entry->ret) obj 145 include/trace/events/cachefiles.h TP_PROTO(struct cachefiles_object *obj, obj 148 include/trace/events/cachefiles.h TP_ARGS(obj, de, ret), obj 151 include/trace/events/cachefiles.h __field(struct cachefiles_object *, obj ) obj 157 include/trace/events/cachefiles.h __entry->obj = obj; obj 163 include/trace/events/cachefiles.h __entry->obj, __entry->de, __entry->ret) obj 167 include/trace/events/cachefiles.h TP_PROTO(struct cachefiles_object *obj, obj 171 include/trace/events/cachefiles.h TP_ARGS(obj, de, why), obj 175 include/trace/events/cachefiles.h __field(struct cachefiles_object *, obj ) obj 181 include/trace/events/cachefiles.h __entry->obj = obj; obj 187 include/trace/events/cachefiles.h __entry->obj, __entry->de, obj 192 include/trace/events/cachefiles.h TP_PROTO(struct cachefiles_object *obj, obj 197 include/trace/events/cachefiles.h TP_ARGS(obj, de, to, why), obj 201 include/trace/events/cachefiles.h __field(struct cachefiles_object *, obj ) obj 208 include/trace/events/cachefiles.h __entry->obj = obj; obj 215 include/trace/events/cachefiles.h __entry->obj, __entry->de, __entry->to, obj 220 include/trace/events/cachefiles.h TP_PROTO(struct cachefiles_object *obj, obj 223 include/trace/events/cachefiles.h TP_ARGS(obj, de), obj 227 include/trace/events/cachefiles.h __field(struct cachefiles_object *, obj ) obj 232 include/trace/events/cachefiles.h __entry->obj = obj; obj 237 include/trace/events/cachefiles.h __entry->obj, __entry->de) obj 241 include/trace/events/cachefiles.h TP_PROTO(struct cachefiles_object *obj, obj 245 include/trace/events/cachefiles.h TP_ARGS(obj, de, xobj), obj 249 include/trace/events/cachefiles.h __field(struct cachefiles_object *, obj ) obj 257 include/trace/events/cachefiles.h __entry->obj = obj; obj 265 include/trace/events/cachefiles.h __entry->obj, __entry->de, __entry->xobj, obj 270 include/trace/events/cachefiles.h TP_PROTO(struct cachefiles_object *obj, obj 274 include/trace/events/cachefiles.h TP_ARGS(obj, de, inode), obj 278 include/trace/events/cachefiles.h __field(struct cachefiles_object *, obj ) obj 284 include/trace/events/cachefiles.h __entry->obj = obj; obj 290 include/trace/events/cachefiles.h __entry->obj, __entry->de, __entry->inode) obj 294 include/trace/events/cachefiles.h TP_PROTO(struct cachefiles_object *obj, obj 298 include/trace/events/cachefiles.h TP_ARGS(obj, de, why), obj 302 include/trace/events/cachefiles.h __field(struct cachefiles_object *, obj ) obj 308 include/trace/events/cachefiles.h __entry->obj = obj; obj 314 include/trace/events/cachefiles.h __entry->obj, __entry->de, obj 49 include/trace/events/objagg.h const struct objagg_obj *obj), obj 51 include/trace/events/objagg.h TP_ARGS(objagg, obj), obj 55 include/trace/events/objagg.h __field(const void *, obj) obj 60 include/trace/events/objagg.h __entry->obj = obj; obj 63 include/trace/events/objagg.h TP_printk("objagg %p, obj %p", __entry->objagg, __entry->obj) obj 68 include/trace/events/objagg.h const struct objagg_obj *obj), obj 70 include/trace/events/objagg.h TP_ARGS(objagg, obj), obj 74 include/trace/events/objagg.h __field(const void *, obj) obj 79 include/trace/events/objagg.h __entry->obj = obj; obj 82 include/trace/events/objagg.h TP_printk("objagg %p, obj %p", __entry->objagg, __entry->obj) obj 87 include/trace/events/objagg.h const struct objagg_obj *obj, obj 90 include/trace/events/objagg.h TP_ARGS(objagg, obj, refcount), obj 94 include/trace/events/objagg.h __field(const void *, obj) obj 100 include/trace/events/objagg.h __entry->obj = obj; obj 105 include/trace/events/objagg.h __entry->objagg, __entry->obj, __entry->refcount) obj 110 include/trace/events/objagg.h const struct objagg_obj *obj, obj 113 include/trace/events/objagg.h TP_ARGS(objagg, obj, refcount), obj 117 include/trace/events/objagg.h __field(const void *, obj) obj 123 include/trace/events/objagg.h __entry->obj = obj; obj 128 include/trace/events/objagg.h __entry->objagg, __entry->obj, __entry->refcount) obj 133 include/trace/events/objagg.h const struct objagg_obj *obj, obj 137 include/trace/events/objagg.h TP_ARGS(objagg, obj, parent, parent_refcount), obj 141 include/trace/events/objagg.h __field(const void *, obj) obj 148 include/trace/events/objagg.h __entry->obj = obj; obj 154 include/trace/events/objagg.h __entry->objagg, __entry->obj, obj 160 include/trace/events/objagg.h const struct objagg_obj *obj, obj 164 include/trace/events/objagg.h TP_ARGS(objagg, obj, parent, parent_refcount), obj 168 include/trace/events/objagg.h __field(const void *, obj) obj 175 include/trace/events/objagg.h __entry->obj = obj; obj 181 include/trace/events/objagg.h __entry->objagg, __entry->obj, obj 187 include/trace/events/objagg.h const struct objagg_obj *obj), obj 189 include/trace/events/objagg.h TP_ARGS(objagg, obj), obj 193 include/trace/events/objagg.h __field(const void *, obj) obj 198 include/trace/events/objagg.h __entry->obj = obj; obj 202 include/trace/events/objagg.h __entry->objagg, __entry->obj) obj 207 include/trace/events/objagg.h const struct objagg_obj *obj), obj 209 include/trace/events/objagg.h TP_ARGS(objagg, obj), obj 213 include/trace/events/objagg.h __field(const void *, obj) obj 218 include/trace/events/objagg.h __entry->obj = obj; obj 222 include/trace/events/objagg.h __entry->objagg, __entry->obj) obj 62 include/uapi/linux/aio_abi.h __u64 obj; /* what iocb this event came from */ obj 3370 kernel/bpf/btf.c void btf_type_seq_show(const struct btf *btf, u32 type_id, void *obj, obj 3375 kernel/bpf/btf.c btf_type_ops(t)->seq_show(btf, t, type_id, obj, 0, m); obj 47 kernel/bpf/cpumap.c struct bpf_cpu_map_entry *obj; obj 357 kernel/bpf/cpumap.c bq->obj = rcpu; obj 604 kernel/bpf/cpumap.c struct bpf_cpu_map_entry *rcpu = bq->obj; obj 694 kernel/bpf/cpumap.c wake_up_process(bq->obj->kthread); obj 62 kernel/bpf/devmap.c struct bpf_dtab_netdev *obj; obj 352 kernel/bpf/devmap.c struct bpf_dtab_netdev *obj = bq->obj; obj 353 kernel/bpf/devmap.c struct net_device *dev = obj->dev; obj 376 kernel/bpf/devmap.c trace_xdp_devmap_xmit(&obj->dtab->map, obj->idx, obj 424 kernel/bpf/devmap.c struct bpf_dtab_netdev *obj; obj 429 kernel/bpf/devmap.c obj = READ_ONCE(dtab->netdev_map[key]); obj 430 kernel/bpf/devmap.c return obj; obj 436 kernel/bpf/devmap.c static int bq_enqueue(struct bpf_dtab_netdev *obj, struct xdp_frame *xdpf, obj 440 kernel/bpf/devmap.c struct list_head *flush_list = this_cpu_ptr(obj->dtab->flush_list); obj 441 kernel/bpf/devmap.c struct xdp_bulk_queue *bq = this_cpu_ptr(obj->bulkq); obj 498 kernel/bpf/devmap.c struct bpf_dtab_netdev *obj = __dev_map_lookup_elem(map, *(u32 *)key); obj 499 kernel/bpf/devmap.c struct net_device *dev = obj ? obj->dev : NULL; obj 506 kernel/bpf/devmap.c struct bpf_dtab_netdev *obj = __dev_map_hash_lookup_elem(map, obj 508 kernel/bpf/devmap.c struct net_device *dev = obj ? obj->dev : NULL; obj 607 kernel/bpf/devmap.c bq->obj = dev; obj 46 kernel/livepatch/core.c static bool klp_is_module(struct klp_object *obj) obj 48 kernel/livepatch/core.c return obj->name; obj 52 kernel/livepatch/core.c static void klp_find_object_module(struct klp_object *obj) obj 56 kernel/livepatch/core.c if (!klp_is_module(obj)) obj 65 kernel/livepatch/core.c mod = find_module(obj->name); obj 74 kernel/livepatch/core.c obj->mod = mod; obj 84 kernel/livepatch/core.c static struct klp_func *klp_find_func(struct klp_object *obj, obj 89 kernel/livepatch/core.c klp_for_each_func(obj, func) { obj 102 kernel/livepatch/core.c struct klp_object *obj; obj 104 kernel/livepatch/core.c klp_for_each_object(patch, obj) { obj 106 kernel/livepatch/core.c if (klp_is_module(obj) && obj 107 kernel/livepatch/core.c strcmp(old_obj->name, obj->name) == 0) { obj 108 kernel/livepatch/core.c return obj; obj 110 kernel/livepatch/core.c } else if (!klp_is_module(obj)) { obj 111 kernel/livepatch/core.c return obj; obj 249 kernel/livepatch/core.c struct klp_object *obj) obj 256 kernel/livepatch/core.c if (WARN_ON(!klp_is_object_loaded(obj))) obj 259 kernel/livepatch/core.c objname = klp_is_module(obj) ? obj->name : "vmlinux"; obj 413 kernel/livepatch/core.c static void klp_free_object_dynamic(struct klp_object *obj) obj 415 kernel/livepatch/core.c kfree(obj->name); obj 416 kernel/livepatch/core.c kfree(obj); obj 419 kernel/livepatch/core.c static void klp_init_func_early(struct klp_object *obj, obj 422 kernel/livepatch/core.c struct klp_object *obj); obj 427 kernel/livepatch/core.c struct klp_object *obj; obj 429 kernel/livepatch/core.c obj = kzalloc(sizeof(*obj), GFP_KERNEL); obj 430 kernel/livepatch/core.c if (!obj) obj 434 kernel/livepatch/core.c obj->name = kstrdup(name, GFP_KERNEL); obj 435 kernel/livepatch/core.c if (!obj->name) { obj 436 kernel/livepatch/core.c kfree(obj); obj 441 kernel/livepatch/core.c klp_init_object_early(patch, obj); obj 442 kernel/livepatch/core.c obj->dynamic = true; obj 444 kernel/livepatch/core.c return obj; obj 454 kernel/livepatch/core.c struct klp_object *obj) obj 470 kernel/livepatch/core.c klp_init_func_early(obj, func); obj 484 kernel/livepatch/core.c struct klp_object *obj; obj 487 kernel/livepatch/core.c obj = klp_find_object(patch, old_obj); obj 489 kernel/livepatch/core.c if (!obj) { obj 490 kernel/livepatch/core.c obj = klp_alloc_object_dynamic(old_obj->name, patch); obj 491 kernel/livepatch/core.c if (!obj) obj 496 kernel/livepatch/core.c func = klp_find_func(obj, old_func); obj 500 kernel/livepatch/core.c func = klp_alloc_func_nop(old_func, obj); obj 547 kernel/livepatch/core.c struct klp_object *obj; obj 549 kernel/livepatch/core.c obj = container_of(kobj, struct klp_object, kobj); obj 551 kernel/livepatch/core.c if (obj->dynamic) obj 552 kernel/livepatch/core.c klp_free_object_dynamic(obj); obj 575 kernel/livepatch/core.c static void __klp_free_funcs(struct klp_object *obj, bool nops_only) obj 579 kernel/livepatch/core.c klp_for_each_func_safe(obj, func, tmp_func) { obj 589 kernel/livepatch/core.c static void klp_free_object_loaded(struct klp_object *obj) obj 593 kernel/livepatch/core.c obj->mod = NULL; obj 595 kernel/livepatch/core.c klp_for_each_func(obj, func) { obj 605 kernel/livepatch/core.c struct klp_object *obj, *tmp_obj; obj 607 kernel/livepatch/core.c klp_for_each_object_safe(patch, obj, tmp_obj) { obj 608 kernel/livepatch/core.c __klp_free_funcs(obj, nops_only); obj 610 kernel/livepatch/core.c if (nops_only && !obj->dynamic) obj 613 kernel/livepatch/core.c list_del(&obj->node); obj 614 kernel/livepatch/core.c kobject_put(&obj->kobj); obj 680 kernel/livepatch/core.c static int klp_init_func(struct klp_object *obj, struct klp_func *func) obj 704 kernel/livepatch/core.c return kobject_add(&func->kobj, &obj->kobj, "%s,%lu", obj 711 kernel/livepatch/core.c struct klp_object *obj) obj 717 kernel/livepatch/core.c struct klp_object *obj) obj 725 kernel/livepatch/core.c ret = klp_write_object_relocations(patch->mod, obj); obj 732 kernel/livepatch/core.c arch_klp_init_object_loaded(patch, obj); obj 737 kernel/livepatch/core.c klp_for_each_func(obj, func) { obj 738 kernel/livepatch/core.c ret = klp_find_object_symbol(obj->name, func->old_name, obj 767 kernel/livepatch/core.c static int klp_init_object(struct klp_patch *patch, struct klp_object *obj) obj 773 kernel/livepatch/core.c if (klp_is_module(obj) && strlen(obj->name) >= MODULE_NAME_LEN) obj 776 kernel/livepatch/core.c obj->patched = false; obj 777 kernel/livepatch/core.c obj->mod = NULL; obj 779 kernel/livepatch/core.c klp_find_object_module(obj); obj 781 kernel/livepatch/core.c name = klp_is_module(obj) ? obj->name : "vmlinux"; obj 782 kernel/livepatch/core.c ret = kobject_add(&obj->kobj, &patch->kobj, "%s", name); obj 786 kernel/livepatch/core.c klp_for_each_func(obj, func) { obj 787 kernel/livepatch/core.c ret = klp_init_func(obj, func); obj 792 kernel/livepatch/core.c if (klp_is_object_loaded(obj)) obj 793 kernel/livepatch/core.c ret = klp_init_object_loaded(patch, obj); obj 798 kernel/livepatch/core.c static void klp_init_func_early(struct klp_object *obj, obj 802 kernel/livepatch/core.c list_add_tail(&func->node, &obj->func_list); obj 806 kernel/livepatch/core.c struct klp_object *obj) obj 808 kernel/livepatch/core.c INIT_LIST_HEAD(&obj->func_list); obj 809 kernel/livepatch/core.c kobject_init(&obj->kobj, &klp_ktype_object); obj 810 kernel/livepatch/core.c list_add_tail(&obj->node, &patch->obj_list); obj 815 kernel/livepatch/core.c struct klp_object *obj; obj 829 kernel/livepatch/core.c klp_for_each_object_static(patch, obj) { obj 830 kernel/livepatch/core.c if (!obj->funcs) obj 833 kernel/livepatch/core.c klp_init_object_early(patch, obj); obj 835 kernel/livepatch/core.c klp_for_each_func_static(obj, func) { obj 836 kernel/livepatch/core.c klp_init_func_early(obj, func); obj 848 kernel/livepatch/core.c struct klp_object *obj; obj 861 kernel/livepatch/core.c klp_for_each_object(patch, obj) { obj 862 kernel/livepatch/core.c ret = klp_init_object(patch, obj); obj 874 kernel/livepatch/core.c struct klp_object *obj; obj 884 kernel/livepatch/core.c klp_for_each_object(patch, obj) obj 885 kernel/livepatch/core.c if (obj->patched) obj 886 kernel/livepatch/core.c klp_pre_unpatch_callback(obj); obj 906 kernel/livepatch/core.c struct klp_object *obj; obj 928 kernel/livepatch/core.c klp_for_each_object(patch, obj) { obj 929 kernel/livepatch/core.c if (!klp_is_object_loaded(obj)) obj 932 kernel/livepatch/core.c ret = klp_pre_patch_callback(obj); obj 935 kernel/livepatch/core.c klp_is_module(obj) ? obj->name : "vmlinux"); obj 939 kernel/livepatch/core.c ret = klp_patch_object(obj); obj 942 kernel/livepatch/core.c klp_is_module(obj) ? obj->name : "vmlinux"); obj 1084 kernel/livepatch/core.c struct klp_object *obj; obj 1090 kernel/livepatch/core.c klp_for_each_object(patch, obj) { obj 1091 kernel/livepatch/core.c if (!klp_is_module(obj) || strcmp(obj->name, mod->name)) obj 1095 kernel/livepatch/core.c klp_pre_unpatch_callback(obj); obj 1098 kernel/livepatch/core.c patch->mod->name, obj->mod->name); obj 1099 kernel/livepatch/core.c klp_unpatch_object(obj); obj 1101 kernel/livepatch/core.c klp_post_unpatch_callback(obj); obj 1103 kernel/livepatch/core.c klp_free_object_loaded(obj); obj 1113 kernel/livepatch/core.c struct klp_object *obj; obj 1127 kernel/livepatch/core.c klp_for_each_object(patch, obj) { obj 1128 kernel/livepatch/core.c if (!klp_is_module(obj) || strcmp(obj->name, mod->name)) obj 1131 kernel/livepatch/core.c obj->mod = mod; obj 1133 kernel/livepatch/core.c ret = klp_init_object_loaded(patch, obj); obj 1136 kernel/livepatch/core.c patch->mod->name, obj->mod->name, ret); obj 1141 kernel/livepatch/core.c patch->mod->name, obj->mod->name); obj 1143 kernel/livepatch/core.c ret = klp_pre_patch_callback(obj); obj 1146 kernel/livepatch/core.c obj->name); obj 1150 kernel/livepatch/core.c ret = klp_patch_object(obj); obj 1153 kernel/livepatch/core.c patch->mod->name, obj->mod->name, ret); obj 1155 kernel/livepatch/core.c klp_post_unpatch_callback(obj); obj 1160 kernel/livepatch/core.c klp_post_patch_callback(obj); obj 1176 kernel/livepatch/core.c patch->mod->name, obj->mod->name, obj->mod->name); obj 1178 kernel/livepatch/core.c obj->mod = NULL; obj 20 kernel/livepatch/core.h static inline bool klp_is_object_loaded(struct klp_object *obj) obj 22 kernel/livepatch/core.h return !obj->name || obj->mod; obj 25 kernel/livepatch/core.h static inline int klp_pre_patch_callback(struct klp_object *obj) obj 29 kernel/livepatch/core.h if (obj->callbacks.pre_patch) obj 30 kernel/livepatch/core.h ret = (*obj->callbacks.pre_patch)(obj); obj 32 kernel/livepatch/core.h obj->callbacks.post_unpatch_enabled = !ret; obj 37 kernel/livepatch/core.h static inline void klp_post_patch_callback(struct klp_object *obj) obj 39 kernel/livepatch/core.h if (obj->callbacks.post_patch) obj 40 kernel/livepatch/core.h (*obj->callbacks.post_patch)(obj); obj 43 kernel/livepatch/core.h static inline void klp_pre_unpatch_callback(struct klp_object *obj) obj 45 kernel/livepatch/core.h if (obj->callbacks.pre_unpatch) obj 46 kernel/livepatch/core.h (*obj->callbacks.pre_unpatch)(obj); obj 49 kernel/livepatch/core.h static inline void klp_post_unpatch_callback(struct klp_object *obj) obj 51 kernel/livepatch/core.h if (obj->callbacks.post_unpatch_enabled && obj 52 kernel/livepatch/core.h obj->callbacks.post_unpatch) obj 53 kernel/livepatch/core.h (*obj->callbacks.post_unpatch)(obj); obj 55 kernel/livepatch/core.h obj->callbacks.post_unpatch_enabled = false; obj 237 kernel/livepatch/patch.c static void __klp_unpatch_object(struct klp_object *obj, bool nops_only) obj 241 kernel/livepatch/patch.c klp_for_each_func(obj, func) { obj 249 kernel/livepatch/patch.c if (obj->dynamic || !nops_only) obj 250 kernel/livepatch/patch.c obj->patched = false; obj 254 kernel/livepatch/patch.c void klp_unpatch_object(struct klp_object *obj) obj 256 kernel/livepatch/patch.c __klp_unpatch_object(obj, false); obj 259 kernel/livepatch/patch.c int klp_patch_object(struct klp_object *obj) obj 264 kernel/livepatch/patch.c if (WARN_ON(obj->patched)) obj 267 kernel/livepatch/patch.c klp_for_each_func(obj, func) { obj 270 kernel/livepatch/patch.c klp_unpatch_object(obj); obj 274 kernel/livepatch/patch.c obj->patched = true; obj 281 kernel/livepatch/patch.c struct klp_object *obj; obj 283 kernel/livepatch/patch.c klp_for_each_object(patch, obj) obj 284 kernel/livepatch/patch.c if (obj->patched) obj 285 kernel/livepatch/patch.c __klp_unpatch_object(obj, nops_only); obj 30 kernel/livepatch/patch.h int klp_patch_object(struct klp_object *obj); obj 31 kernel/livepatch/patch.h void klp_unpatch_object(struct klp_object *obj); obj 57 kernel/livepatch/shadow.c void *obj; obj 70 kernel/livepatch/shadow.c static inline bool klp_shadow_match(struct klp_shadow *shadow, void *obj, obj 73 kernel/livepatch/shadow.c return shadow->obj == obj && shadow->id == id; obj 83 kernel/livepatch/shadow.c void *klp_shadow_get(void *obj, unsigned long id) obj 90 kernel/livepatch/shadow.c (unsigned long)obj) { obj 92 kernel/livepatch/shadow.c if (klp_shadow_match(shadow, obj, id)) { obj 104 kernel/livepatch/shadow.c static void *__klp_shadow_get_or_alloc(void *obj, unsigned long id, obj 114 kernel/livepatch/shadow.c shadow_data = klp_shadow_get(obj, id); obj 129 kernel/livepatch/shadow.c shadow_data = klp_shadow_get(obj, id); obj 140 kernel/livepatch/shadow.c new_shadow->obj = obj; obj 146 kernel/livepatch/shadow.c err = ctor(obj, new_shadow->data, ctor_data); obj 151 kernel/livepatch/shadow.c obj, id, err); obj 158 kernel/livepatch/shadow.c (unsigned long)new_shadow->obj); obj 165 kernel/livepatch/shadow.c WARN(1, "Duplicate shadow variable <%p, %lx>\n", obj, id); obj 196 kernel/livepatch/shadow.c void *klp_shadow_alloc(void *obj, unsigned long id, obj 200 kernel/livepatch/shadow.c return __klp_shadow_get_or_alloc(obj, id, size, gfp_flags, obj 225 kernel/livepatch/shadow.c void *klp_shadow_get_or_alloc(void *obj, unsigned long id, obj 229 kernel/livepatch/shadow.c return __klp_shadow_get_or_alloc(obj, id, size, gfp_flags, obj 239 kernel/livepatch/shadow.c dtor(shadow->obj, shadow->data); obj 253 kernel/livepatch/shadow.c void klp_shadow_free(void *obj, unsigned long id, klp_shadow_dtor_t dtor) obj 262 kernel/livepatch/shadow.c (unsigned long)obj) { obj 264 kernel/livepatch/shadow.c if (klp_shadow_match(shadow, obj, id)) { obj 293 kernel/livepatch/shadow.c if (klp_shadow_match(shadow, shadow->obj, id)) obj 71 kernel/livepatch/transition.c struct klp_object *obj; obj 101 kernel/livepatch/transition.c klp_for_each_object(klp_transition_patch, obj) obj 102 kernel/livepatch/transition.c klp_for_each_func(obj, func) obj 122 kernel/livepatch/transition.c klp_for_each_object(klp_transition_patch, obj) { obj 123 kernel/livepatch/transition.c if (!klp_is_object_loaded(obj)) obj 126 kernel/livepatch/transition.c klp_post_patch_callback(obj); obj 128 kernel/livepatch/transition.c klp_post_unpatch_callback(obj); obj 245 kernel/livepatch/transition.c struct klp_object *obj; obj 258 kernel/livepatch/transition.c klp_for_each_object(klp_transition_patch, obj) { obj 259 kernel/livepatch/transition.c if (!obj->patched) obj 261 kernel/livepatch/transition.c klp_for_each_func(obj, func) { obj 508 kernel/livepatch/transition.c struct klp_object *obj; obj 567 kernel/livepatch/transition.c klp_for_each_object(patch, obj) obj 568 kernel/livepatch/transition.c klp_for_each_func(obj, func) obj 736 kernel/locking/lockdep.c static int static_obj(const void *obj) obj 740 kernel/locking/lockdep.c addr = (unsigned long) obj; obj 259 kernel/sched/topology.c struct em_perf_domain *obj = em_cpu_get(cpu); obj 262 kernel/sched/topology.c if (!obj) { obj 271 kernel/sched/topology.c pd->em_pd = obj; obj 39 lib/cpu_rmap.c rmap = kzalloc(obj_offset + size * sizeof(rmap->obj[0]), flags); obj 44 lib/cpu_rmap.c rmap->obj = (void **)((char *)rmap + obj_offset); obj 138 lib/cpu_rmap.c int cpu_rmap_add(struct cpu_rmap *rmap, void *obj) obj 144 lib/cpu_rmap.c rmap->obj[index] = obj; obj 234 lib/cpu_rmap.c glue = rmap->obj[index]; obj 132 lib/debugobjects.c struct debug_obj *obj; obj 153 lib/debugobjects.c obj = hlist_entry(obj_to_free.first, typeof(*obj), node); obj 154 lib/debugobjects.c hlist_del(&obj->node); obj 156 lib/debugobjects.c hlist_add_head(&obj->node, &obj_pool); obj 192 lib/debugobjects.c struct debug_obj *obj; obj 195 lib/debugobjects.c hlist_for_each_entry(obj, &b->list, node) { obj 197 lib/debugobjects.c if (obj->object == addr) obj 198 lib/debugobjects.c return obj; obj 211 lib/debugobjects.c struct debug_obj *obj = NULL; obj 214 lib/debugobjects.c obj = hlist_entry(list->first, typeof(*obj), node); obj 215 lib/debugobjects.c hlist_del(&obj->node); obj 218 lib/debugobjects.c return obj; obj 229 lib/debugobjects.c struct debug_obj *obj; obj 232 lib/debugobjects.c obj = __alloc_object(&percpu_pool->free_objs); obj 233 lib/debugobjects.c if (obj) { obj 240 lib/debugobjects.c obj = __alloc_object(&obj_pool); obj 241 lib/debugobjects.c if (obj) { obj 275 lib/debugobjects.c if (obj) { obj 276 lib/debugobjects.c obj->object = addr; obj 277 lib/debugobjects.c obj->descr = descr; obj 278 lib/debugobjects.c obj->state = ODEBUG_STATE_NONE; obj 279 lib/debugobjects.c obj->astate = 0; obj 280 lib/debugobjects.c hlist_add_head(&obj->node, &b->list); obj 282 lib/debugobjects.c return obj; obj 294 lib/debugobjects.c struct debug_obj *obj; obj 313 lib/debugobjects.c obj = hlist_entry(obj_to_free.first, typeof(*obj), node); obj 314 lib/debugobjects.c hlist_del(&obj->node); obj 315 lib/debugobjects.c hlist_add_head(&obj->node, &obj_pool); obj 335 lib/debugobjects.c hlist_for_each_entry_safe(obj, tmp, &tofree, node) { obj 336 lib/debugobjects.c hlist_del(&obj->node); obj 337 lib/debugobjects.c kmem_cache_free(obj_cache, obj); obj 341 lib/debugobjects.c static void __free_object(struct debug_obj *obj) obj 358 lib/debugobjects.c hlist_add_head(&obj->node, &percpu_pool->free_objs); obj 383 lib/debugobjects.c hlist_add_head(&obj->node, &obj_to_free); obj 401 lib/debugobjects.c obj = __alloc_object(&obj_pool); obj 402 lib/debugobjects.c hlist_add_head(&obj->node, &obj_to_free); obj 409 lib/debugobjects.c hlist_add_head(&obj->node, &obj_pool); obj 427 lib/debugobjects.c static void free_object(struct debug_obj *obj) obj 429 lib/debugobjects.c __free_object(obj); obj 445 lib/debugobjects.c struct debug_obj *obj; obj 457 lib/debugobjects.c hlist_for_each_entry_safe(obj, tmp, &freelist, node) { obj 458 lib/debugobjects.c hlist_del(&obj->node); obj 459 lib/debugobjects.c free_object(obj); obj 476 lib/debugobjects.c static void debug_print_object(struct debug_obj *obj, char *msg) obj 478 lib/debugobjects.c struct debug_obj_descr *descr = obj->descr; obj 483 lib/debugobjects.c descr->debug_hint(obj->object) : NULL; obj 487 lib/debugobjects.c msg, obj_states[obj->state], obj->astate, obj 537 lib/debugobjects.c struct debug_obj *obj; obj 546 lib/debugobjects.c obj = lookup_object(addr, db); obj 547 lib/debugobjects.c if (!obj) { obj 548 lib/debugobjects.c obj = alloc_object(addr, db, descr); obj 549 lib/debugobjects.c if (!obj) { obj 558 lib/debugobjects.c switch (obj->state) { obj 562 lib/debugobjects.c obj->state = ODEBUG_STATE_INIT; obj 566 lib/debugobjects.c state = obj->state; obj 568 lib/debugobjects.c debug_print_object(obj, "init"); obj 574 lib/debugobjects.c debug_print_object(obj, "init"); obj 624 lib/debugobjects.c struct debug_obj *obj; obj 638 lib/debugobjects.c obj = lookup_object(addr, db); obj 639 lib/debugobjects.c if (obj) { obj 642 lib/debugobjects.c switch (obj->state) { obj 645 lib/debugobjects.c obj->state = ODEBUG_STATE_ACTIVE; obj 650 lib/debugobjects.c state = obj->state; obj 652 lib/debugobjects.c debug_print_object(obj, "activate"); obj 666 lib/debugobjects.c debug_print_object(obj, "activate"); obj 701 lib/debugobjects.c struct debug_obj *obj; obj 712 lib/debugobjects.c obj = lookup_object(addr, db); obj 713 lib/debugobjects.c if (obj) { obj 714 lib/debugobjects.c switch (obj->state) { obj 718 lib/debugobjects.c if (!obj->astate) obj 719 lib/debugobjects.c obj->state = ODEBUG_STATE_INACTIVE; obj 733 lib/debugobjects.c if (!obj) { obj 740 lib/debugobjects.c debug_print_object(obj, "deactivate"); obj 754 lib/debugobjects.c struct debug_obj *obj; obj 765 lib/debugobjects.c obj = lookup_object(addr, db); obj 766 lib/debugobjects.c if (!obj) obj 769 lib/debugobjects.c switch (obj->state) { obj 773 lib/debugobjects.c obj->state = ODEBUG_STATE_DESTROYED; obj 776 lib/debugobjects.c state = obj->state; obj 778 lib/debugobjects.c debug_print_object(obj, "destroy"); obj 791 lib/debugobjects.c debug_print_object(obj, "destroy"); obj 804 lib/debugobjects.c struct debug_obj *obj; obj 814 lib/debugobjects.c obj = lookup_object(addr, db); obj 815 lib/debugobjects.c if (!obj) obj 818 lib/debugobjects.c switch (obj->state) { obj 820 lib/debugobjects.c state = obj->state; obj 822 lib/debugobjects.c debug_print_object(obj, "free"); obj 826 lib/debugobjects.c hlist_del(&obj->node); obj 828 lib/debugobjects.c free_object(obj); obj 844 lib/debugobjects.c struct debug_obj *obj; obj 854 lib/debugobjects.c obj = lookup_object(addr, db); obj 855 lib/debugobjects.c if (!obj) { obj 893 lib/debugobjects.c struct debug_obj *obj; obj 904 lib/debugobjects.c obj = lookup_object(addr, db); obj 905 lib/debugobjects.c if (obj) { obj 906 lib/debugobjects.c switch (obj->state) { obj 908 lib/debugobjects.c if (obj->astate == expect) obj 909 lib/debugobjects.c obj->astate = next; obj 921 lib/debugobjects.c if (!obj) { obj 928 lib/debugobjects.c debug_print_object(obj, "active_state"); obj 941 lib/debugobjects.c struct debug_obj *obj; obj 956 lib/debugobjects.c hlist_for_each_entry_safe(obj, tmp, &db->list, node) { obj 958 lib/debugobjects.c oaddr = (unsigned long) obj->object; obj 962 lib/debugobjects.c switch (obj->state) { obj 964 lib/debugobjects.c descr = obj->descr; obj 965 lib/debugobjects.c state = obj->state; obj 967 lib/debugobjects.c debug_print_object(obj, "free"); obj 972 lib/debugobjects.c hlist_del(&obj->node); obj 973 lib/debugobjects.c __free_object(obj); obj 1070 lib/debugobjects.c struct self_test *obj = addr; obj 1072 lib/debugobjects.c return obj->static_init; obj 1081 lib/debugobjects.c struct self_test *obj = addr; obj 1085 lib/debugobjects.c debug_object_deactivate(obj, &descr_type_test); obj 1086 lib/debugobjects.c debug_object_init(obj, &descr_type_test); obj 1100 lib/debugobjects.c struct self_test *obj = addr; obj 1106 lib/debugobjects.c debug_object_deactivate(obj, &descr_type_test); obj 1107 lib/debugobjects.c debug_object_activate(obj, &descr_type_test); obj 1121 lib/debugobjects.c struct self_test *obj = addr; obj 1125 lib/debugobjects.c debug_object_deactivate(obj, &descr_type_test); obj 1126 lib/debugobjects.c debug_object_destroy(obj, &descr_type_test); obj 1139 lib/debugobjects.c struct self_test *obj = addr; obj 1143 lib/debugobjects.c debug_object_deactivate(obj, &descr_type_test); obj 1144 lib/debugobjects.c debug_object_free(obj, &descr_type_test); obj 1155 lib/debugobjects.c struct debug_obj *obj; obj 1163 lib/debugobjects.c obj = lookup_object(addr, db); obj 1164 lib/debugobjects.c if (!obj && state != ODEBUG_STATE_NONE) { obj 1168 lib/debugobjects.c if (obj && obj->state != state) { obj 1170 lib/debugobjects.c obj->state, state); obj 1200 lib/debugobjects.c static __initdata struct self_test obj = { .static_init = 0 }; obj 1213 lib/debugobjects.c debug_object_init(&obj, &descr_type_test); obj 1214 lib/debugobjects.c if (check_results(&obj, ODEBUG_STATE_INIT, fixups, warnings)) obj 1216 lib/debugobjects.c debug_object_activate(&obj, &descr_type_test); obj 1217 lib/debugobjects.c if (check_results(&obj, ODEBUG_STATE_ACTIVE, fixups, warnings)) obj 1219 lib/debugobjects.c debug_object_activate(&obj, &descr_type_test); obj 1220 lib/debugobjects.c if (check_results(&obj, ODEBUG_STATE_ACTIVE, ++fixups, ++warnings)) obj 1222 lib/debugobjects.c debug_object_deactivate(&obj, &descr_type_test); obj 1223 lib/debugobjects.c if (check_results(&obj, ODEBUG_STATE_INACTIVE, fixups, warnings)) obj 1225 lib/debugobjects.c debug_object_destroy(&obj, &descr_type_test); obj 1226 lib/debugobjects.c if (check_results(&obj, ODEBUG_STATE_DESTROYED, fixups, warnings)) obj 1228 lib/debugobjects.c debug_object_init(&obj, &descr_type_test); obj 1229 lib/debugobjects.c if (check_results(&obj, ODEBUG_STATE_DESTROYED, fixups, ++warnings)) obj 1231 lib/debugobjects.c debug_object_activate(&obj, &descr_type_test); obj 1232 lib/debugobjects.c if (check_results(&obj, ODEBUG_STATE_DESTROYED, fixups, ++warnings)) obj 1234 lib/debugobjects.c debug_object_deactivate(&obj, &descr_type_test); obj 1235 lib/debugobjects.c if (check_results(&obj, ODEBUG_STATE_DESTROYED, fixups, ++warnings)) obj 1237 lib/debugobjects.c debug_object_free(&obj, &descr_type_test); obj 1238 lib/debugobjects.c if (check_results(&obj, ODEBUG_STATE_NONE, fixups, warnings)) obj 1241 lib/debugobjects.c obj.static_init = 1; obj 1242 lib/debugobjects.c debug_object_activate(&obj, &descr_type_test); obj 1243 lib/debugobjects.c if (check_results(&obj, ODEBUG_STATE_ACTIVE, fixups, warnings)) obj 1245 lib/debugobjects.c debug_object_init(&obj, &descr_type_test); obj 1246 lib/debugobjects.c if (check_results(&obj, ODEBUG_STATE_INIT, ++fixups, ++warnings)) obj 1248 lib/debugobjects.c debug_object_free(&obj, &descr_type_test); obj 1249 lib/debugobjects.c if (check_results(&obj, ODEBUG_STATE_NONE, fixups, warnings)) obj 1253 lib/debugobjects.c debug_object_init(&obj, &descr_type_test); obj 1254 lib/debugobjects.c if (check_results(&obj, ODEBUG_STATE_INIT, fixups, warnings)) obj 1256 lib/debugobjects.c debug_object_activate(&obj, &descr_type_test); obj 1257 lib/debugobjects.c if (check_results(&obj, ODEBUG_STATE_ACTIVE, fixups, warnings)) obj 1259 lib/debugobjects.c __debug_check_no_obj_freed(&obj, sizeof(obj)); obj 1260 lib/debugobjects.c if (check_results(&obj, ODEBUG_STATE_NONE, ++fixups, ++warnings)) obj 1299 lib/debugobjects.c struct debug_obj *obj, *new; obj 1304 lib/debugobjects.c obj = kmem_cache_zalloc(obj_cache, GFP_KERNEL); obj 1305 lib/debugobjects.c if (!obj) obj 1307 lib/debugobjects.c hlist_add_head(&obj->node, &objects); obj 1317 lib/debugobjects.c hlist_for_each_entry_safe(obj, tmp, &obj_pool, node) obj 1318 lib/debugobjects.c hlist_del(&obj->node); obj 1326 lib/debugobjects.c hlist_for_each_entry(obj, &objects, node) { obj 1327 lib/debugobjects.c new = hlist_entry(obj_pool.first, typeof(*obj), node); obj 1330 lib/debugobjects.c *new = *obj; obj 1340 lib/debugobjects.c hlist_for_each_entry_safe(obj, tmp, &objects, node) { obj 1341 lib/debugobjects.c hlist_del(&obj->node); obj 1342 lib/debugobjects.c kmem_cache_free(obj_cache, obj); obj 21 lib/livepatch/test_klp_callbacks_demo.c static void callback_info(const char *callback, struct klp_object *obj) obj 23 lib/livepatch/test_klp_callbacks_demo.c if (obj->mod) obj 24 lib/livepatch/test_klp_callbacks_demo.c pr_info("%s: %s -> %s\n", callback, obj->mod->name, obj 25 lib/livepatch/test_klp_callbacks_demo.c module_state[obj->mod->state]); obj 31 lib/livepatch/test_klp_callbacks_demo.c static int pre_patch_callback(struct klp_object *obj) obj 33 lib/livepatch/test_klp_callbacks_demo.c callback_info(__func__, obj); obj 38 lib/livepatch/test_klp_callbacks_demo.c static void post_patch_callback(struct klp_object *obj) obj 40 lib/livepatch/test_klp_callbacks_demo.c callback_info(__func__, obj); obj 44 lib/livepatch/test_klp_callbacks_demo.c static void pre_unpatch_callback(struct klp_object *obj) obj 46 lib/livepatch/test_klp_callbacks_demo.c callback_info(__func__, obj); obj 50 lib/livepatch/test_klp_callbacks_demo.c static void post_unpatch_callback(struct klp_object *obj) obj 52 lib/livepatch/test_klp_callbacks_demo.c callback_info(__func__, obj); obj 21 lib/livepatch/test_klp_callbacks_demo2.c static void callback_info(const char *callback, struct klp_object *obj) obj 23 lib/livepatch/test_klp_callbacks_demo2.c if (obj->mod) obj 24 lib/livepatch/test_klp_callbacks_demo2.c pr_info("%s: %s -> %s\n", callback, obj->mod->name, obj 25 lib/livepatch/test_klp_callbacks_demo2.c module_state[obj->mod->state]); obj 31 lib/livepatch/test_klp_callbacks_demo2.c static int pre_patch_callback(struct klp_object *obj) obj 33 lib/livepatch/test_klp_callbacks_demo2.c callback_info(__func__, obj); obj 38 lib/livepatch/test_klp_callbacks_demo2.c static void post_patch_callback(struct klp_object *obj) obj 40 lib/livepatch/test_klp_callbacks_demo2.c callback_info(__func__, obj); obj 44 lib/livepatch/test_klp_callbacks_demo2.c static void pre_unpatch_callback(struct klp_object *obj) obj 46 lib/livepatch/test_klp_callbacks_demo2.c callback_info(__func__, obj); obj 50 lib/livepatch/test_klp_callbacks_demo2.c static void post_unpatch_callback(struct klp_object *obj) obj 52 lib/livepatch/test_klp_callbacks_demo2.c callback_info(__func__, obj); obj 61 lib/livepatch/test_klp_shadow_vars.c static void *shadow_get(void *obj, unsigned long id) obj 63 lib/livepatch/test_klp_shadow_vars.c void *ret = klp_shadow_get(obj, id); obj 66 lib/livepatch/test_klp_shadow_vars.c __func__, ptr_id(obj), id, ptr_id(ret)); obj 71 lib/livepatch/test_klp_shadow_vars.c static void *shadow_alloc(void *obj, unsigned long id, size_t size, obj 75 lib/livepatch/test_klp_shadow_vars.c void *ret = klp_shadow_alloc(obj, id, size, gfp_flags, ctor, obj 78 lib/livepatch/test_klp_shadow_vars.c __func__, ptr_id(obj), id, size, &gfp_flags, ptr_id(ctor), obj 83 lib/livepatch/test_klp_shadow_vars.c static void *shadow_get_or_alloc(void *obj, unsigned long id, size_t size, obj 87 lib/livepatch/test_klp_shadow_vars.c void *ret = klp_shadow_get_or_alloc(obj, id, size, gfp_flags, ctor, obj 90 lib/livepatch/test_klp_shadow_vars.c __func__, ptr_id(obj), id, size, &gfp_flags, ptr_id(ctor), obj 95 lib/livepatch/test_klp_shadow_vars.c static void shadow_free(void *obj, unsigned long id, klp_shadow_dtor_t dtor) obj 97 lib/livepatch/test_klp_shadow_vars.c klp_shadow_free(obj, id, dtor); obj 99 lib/livepatch/test_klp_shadow_vars.c __func__, ptr_id(obj), id, ptr_id(dtor)); obj 111 lib/livepatch/test_klp_shadow_vars.c static int shadow_ctor(void *obj, void *shadow_data, void *ctor_data) obj 121 lib/livepatch/test_klp_shadow_vars.c static void shadow_dtor(void *obj, void *shadow_data) obj 124 lib/livepatch/test_klp_shadow_vars.c __func__, ptr_id(obj), ptr_id(shadow_data)); obj 129 lib/livepatch/test_klp_shadow_vars.c void *obj = THIS_MODULE; obj 149 lib/livepatch/test_klp_shadow_vars.c ret = shadow_get(obj, id); obj 156 lib/livepatch/test_klp_shadow_vars.c sv1 = shadow_alloc(obj, id, size, gfp_flags, shadow_ctor, &var1); obj 160 lib/livepatch/test_klp_shadow_vars.c sv2 = shadow_alloc(obj + 1, id, size, gfp_flags, shadow_ctor, &var2); obj 164 lib/livepatch/test_klp_shadow_vars.c sv3 = shadow_alloc(obj, id + 1, size, gfp_flags, shadow_ctor, &var3); obj 172 lib/livepatch/test_klp_shadow_vars.c ret = shadow_get(obj, id); obj 179 lib/livepatch/test_klp_shadow_vars.c ret = shadow_get(obj + 1, id); obj 185 lib/livepatch/test_klp_shadow_vars.c ret = shadow_get(obj, id + 1); obj 196 lib/livepatch/test_klp_shadow_vars.c sv4 = shadow_get_or_alloc(obj + 2, id, size, gfp_flags, shadow_ctor, &var4); obj 200 lib/livepatch/test_klp_shadow_vars.c ret = shadow_get_or_alloc(obj + 2, id, size, gfp_flags, shadow_ctor, &var4); obj 211 lib/livepatch/test_klp_shadow_vars.c shadow_free(obj, id, shadow_dtor); /* sv1 */ obj 212 lib/livepatch/test_klp_shadow_vars.c ret = shadow_get(obj, id); obj 216 lib/livepatch/test_klp_shadow_vars.c shadow_free(obj + 1, id, shadow_dtor); /* sv2 */ obj 217 lib/livepatch/test_klp_shadow_vars.c ret = shadow_get(obj + 1, id); obj 221 lib/livepatch/test_klp_shadow_vars.c shadow_free(obj + 2, id, shadow_dtor); /* sv4 */ obj 222 lib/livepatch/test_klp_shadow_vars.c ret = shadow_get(obj + 2, id); obj 229 lib/livepatch/test_klp_shadow_vars.c ret = shadow_get(obj, id + 1); obj 240 lib/livepatch/test_klp_shadow_vars.c ret = shadow_get(obj, id); obj 31 lib/objagg.c unsigned long obj[0]; obj 35 lib/objagg.c objagg_hints_lookup(struct objagg_hints *objagg_hints, void *obj) obj 39 lib/objagg.c return rhashtable_lookup_fast(&objagg_hints->node_ht, obj, obj 69 lib/objagg.c unsigned long obj[0]; obj 154 lib/objagg.c return objagg_obj->obj; obj 158 lib/objagg.c static struct objagg_obj *objagg_obj_lookup(struct objagg *objagg, void *obj) obj 160 lib/objagg.c return rhashtable_lookup_fast(&objagg->obj_ht, obj, objagg->ht_params); obj 170 lib/objagg.c delta_priv = objagg->ops->delta_create(objagg->priv, parent->obj, obj 171 lib/objagg.c objagg_obj->obj); obj 271 lib/objagg.c objagg_obj->obj, obj 293 lib/objagg.c static struct objagg_obj *__objagg_obj_get(struct objagg *objagg, void *obj); obj 303 lib/objagg.c hnode = objagg_hints_lookup(objagg->hints, objagg_obj->obj); obj 313 lib/objagg.c parent = __objagg_obj_get(objagg, hnode->parent->obj); obj 364 lib/objagg.c static struct objagg_obj *objagg_obj_create(struct objagg *objagg, void *obj) obj 374 lib/objagg.c memcpy(objagg_obj->obj, obj, objagg->ops->obj_size); obj 397 lib/objagg.c static struct objagg_obj *__objagg_obj_get(struct objagg *objagg, void *obj) obj 404 lib/objagg.c objagg_obj = objagg_obj_lookup(objagg, obj); obj 410 lib/objagg.c return objagg_obj_create(objagg, obj); obj 437 lib/objagg.c struct objagg_obj *objagg_obj_get(struct objagg *objagg, void *obj) obj 441 lib/objagg.c objagg_obj = __objagg_obj_get(objagg, obj); obj 537 lib/objagg.c objagg->ht_params.key_offset = offsetof(struct objagg_obj, obj); obj 660 lib/objagg.c memcpy(hnode->obj, &objagg_obj->obj, obj_size); obj 818 lib/objagg.c pnode->objagg_obj->obj, obj 819 lib/objagg.c node->objagg_obj->obj)) { obj 910 lib/objagg.c const void *obj) obj 916 lib/objagg.c const char *ptr = obj; obj 958 lib/objagg.c offsetof(struct objagg_hints_node, obj); obj 481 lib/rhashtable.c const void *key, struct rhash_head *obj) obj 508 lib/rhashtable.c list = container_of(obj, struct rhlist_head, rhead); obj 515 lib/rhashtable.c rcu_assign_pointer(*pprev, obj); obj 518 lib/rhashtable.c rht_assign_locked(bkt, obj); obj 533 lib/rhashtable.c struct rhash_head *obj, obj 560 lib/rhashtable.c RCU_INIT_POINTER(obj->next, head); obj 564 lib/rhashtable.c list = container_of(obj, struct rhlist_head, rhead); obj 571 lib/rhashtable.c rht_assign_locked(bkt, obj); obj 581 lib/rhashtable.c struct rhash_head *obj) obj 593 lib/rhashtable.c hash = rht_head_hashfn(ht, tbl, obj, ht->p); obj 605 lib/rhashtable.c hash, key, obj); obj 607 lib/rhashtable.c hash, obj, data); obj 623 lib/rhashtable.c struct rhash_head *obj) obj 629 lib/rhashtable.c data = rhashtable_try_insert(ht, key, obj); obj 1089 lib/rhashtable.c static void rhashtable_free_one(struct rhashtable *ht, struct rhash_head *obj, obj 1096 lib/rhashtable.c free_fn(rht_obj(ht, obj), arg); obj 1100 lib/rhashtable.c list = container_of(obj, struct rhlist_head, rhead); obj 1102 lib/rhashtable.c obj = &list->rhead; obj 1104 lib/rhashtable.c free_fn(rht_obj(ht, obj), arg); obj 151 lib/test_meminit.c static void test_ctor(void *obj) obj 153 lib/test_meminit.c *(unsigned int *)obj = CTOR_PATTERN; obj 90 lib/test_objagg.c static bool delta_check(void *priv, const void *parent_obj, const void *obj) obj 93 lib/test_objagg.c const struct tokey *key = obj; obj 99 lib/test_objagg.c static void *delta_create(void *priv, void *parent_obj, void *obj) obj 103 lib/test_objagg.c struct tokey *key = obj; obj 107 lib/test_objagg.c if (!delta_check(priv, parent_obj, obj)) obj 127 lib/test_objagg.c static void *root_create(void *priv, void *obj, unsigned int id) obj 130 lib/test_objagg.c struct tokey *key = obj; obj 281 lib/test_objagg.c const void *obj) obj 286 lib/test_objagg.c static void *delta_create_dummy(void *priv, void *parent_obj, void *obj) obj 81 lib/test_rhashtable.c const struct test_obj_rhl *obj = data; obj 83 lib/test_rhashtable.c return (obj->value.id % 10); obj 86 lib/test_rhashtable.c static int my_cmpfn(struct rhashtable_compare_arg *arg, const void *obj) obj 88 lib/test_rhashtable.c const struct test_obj_rhl *test_obj = obj; obj 115 lib/test_rhashtable.c static int insert_retry(struct rhashtable *ht, struct test_obj *obj, obj 123 lib/test_rhashtable.c err = rhashtable_insert_fast(ht, &obj->node, params); obj 143 lib/test_rhashtable.c struct test_obj *obj; obj 152 lib/test_rhashtable.c obj = rhashtable_lookup_fast(ht, &key, test_rht_params); obj 154 lib/test_rhashtable.c if (expected && !obj) { obj 157 lib/test_rhashtable.c } else if (!expected && obj) { obj 161 lib/test_rhashtable.c } else if (expected && obj) { obj 162 lib/test_rhashtable.c if (obj->value.id != i) { obj 164 lib/test_rhashtable.c obj->value.id, i); obj 211 lib/test_rhashtable.c struct test_obj *obj; obj 223 lib/test_rhashtable.c struct test_obj *obj = &array[i]; obj 225 lib/test_rhashtable.c obj->value.id = i * 2; obj 226 lib/test_rhashtable.c err = insert_retry(ht, obj, test_rht_params); obj 251 lib/test_rhashtable.c obj = rhashtable_lookup_fast(ht, &key, test_rht_params); obj 252 lib/test_rhashtable.c BUG_ON(!obj); obj 254 lib/test_rhashtable.c rhashtable_remove_fast(ht, &obj->node, test_rht_params); obj 312 lib/test_rhashtable.c struct test_obj_rhl *obj; obj 327 lib/test_rhashtable.c rhl_for_each_entry_rcu(obj, pos, h, list_node) { obj 337 lib/test_rhashtable.c rhl_for_each_entry_rcu(obj, pos, h, list_node) { obj 460 lib/test_rhashtable.c struct test_obj *obj = &array[i]; obj 462 lib/test_rhashtable.c obj->value.id = i * 2; obj 463 lib/test_rhashtable.c err = insert_retry(&ht, obj, test_rht_params); obj 607 lib/test_rhashtable.c struct test_obj *obj; obj 613 lib/test_rhashtable.c obj = rhashtable_lookup_fast(&ht, &key, test_rht_params); obj 614 lib/test_rhashtable.c if (obj && (tdata->objs[i].value.id == TEST_INSERT_FAIL)) { obj 617 lib/test_rhashtable.c } else if (!obj && (tdata->objs[i].value.id != TEST_INSERT_FAIL)) { obj 620 lib/test_rhashtable.c } else if (obj && memcmp(&obj->value, &key, sizeof(key))) { obj 622 lib/test_rhashtable.c obj->value.tid, obj->value.id, key.tid, key.id); obj 1644 mm/kmemleak.c struct kmemleak_object *obj = prev_obj; obj 1648 mm/kmemleak.c list_for_each_entry_continue_rcu(obj, &object_list, object_list) { obj 1649 mm/kmemleak.c if (get_object(obj)) { obj 1650 mm/kmemleak.c next_obj = obj; obj 44 mm/mempool.c u8 *obj = element; obj 50 mm/mempool.c if (obj[i] != exp) { obj 55 mm/mempool.c memset(obj, POISON_INUSE, size); obj 76 mm/mempool.c u8 *obj = element; obj 78 mm/mempool.c memset(obj, POISON_FREE, size - 1); obj 79 mm/mempool.c obj[size - 1] = POISON_END; obj 2691 mm/slab.c static inline void verify_redzone_free(struct kmem_cache *cache, void *obj) obj 2695 mm/slab.c redzone1 = *dbg_redzone1(cache, obj); obj 2696 mm/slab.c redzone2 = *dbg_redzone2(cache, obj); obj 2710 mm/slab.c obj, redzone1, redzone2); obj 2857 mm/slab.c void *obj; obj 2870 mm/slab.c obj = slab_get_obj(cachep, page); obj 2878 mm/slab.c return obj; obj 2965 mm/slab.c void *obj = cache_alloc_pfmemalloc(cachep, n, flags); obj 2967 mm/slab.c if (obj) obj 2968 mm/slab.c return obj; obj 3109 mm/slab.c void *obj = NULL; obj 3132 mm/slab.c obj = ____cache_alloc_node(cache, obj 3134 mm/slab.c if (obj) obj 3139 mm/slab.c if (!obj) { obj 3150 mm/slab.c obj = ____cache_alloc_node(cache, obj 3157 mm/slab.c if (!obj) obj 3162 mm/slab.c if (unlikely(!obj && read_mems_allowed_retry(cpuset_mems_cookie))) obj 3164 mm/slab.c return obj; obj 3175 mm/slab.c void *obj = NULL; obj 3196 mm/slab.c obj = slab_get_obj(cachep, page); obj 3203 mm/slab.c return obj; obj 3210 mm/slab.c obj = slab_get_obj(cachep, page); obj 3214 mm/slab.c return obj ? obj : fallback_alloc(cachep, flags); obj 468 mm/slab.h static inline struct kmem_cache *virt_to_cache(const void *obj) obj 472 mm/slab.h page = virt_to_head_page(obj); obj 2666 mm/slub.c void *obj) obj 2668 mm/slub.c if (unlikely(slab_want_init_on_free(s)) && obj) obj 2669 mm/slub.c memset((void *)((char *)obj + s->offset), 0, sizeof(void *)); obj 34 mm/usercopy.c static noinline int check_stack_object(const void *obj, unsigned long len) obj 41 mm/usercopy.c if (obj + len <= stack || stackend <= obj) obj 49 mm/usercopy.c if (obj < stack || stackend < obj + len) obj 53 mm/usercopy.c ret = arch_within_stack_frames(stack, stackend, obj, len); obj 373 mm/zsmalloc.c static void record_obj(unsigned long handle, unsigned long obj) obj 380 mm/zsmalloc.c WRITE_ONCE(*(unsigned long *)handle, obj); obj 509 mm/zsmalloc.c static inline void set_freeobj(struct zspage *zspage, unsigned int obj) obj 511 mm/zsmalloc.c zspage->freeobj = obj; obj 846 mm/zsmalloc.c static void obj_to_location(unsigned long obj, struct page **page, obj 849 mm/zsmalloc.c obj >>= OBJ_TAG_BITS; obj 850 mm/zsmalloc.c *page = pfn_to_page(obj >> OBJ_INDEX_BITS); obj 851 mm/zsmalloc.c *obj_idx = (obj & OBJ_INDEX_MASK); obj 861 mm/zsmalloc.c unsigned long obj; obj 863 mm/zsmalloc.c obj = page_to_pfn(page) << OBJ_INDEX_BITS; obj 864 mm/zsmalloc.c obj |= obj_idx & OBJ_INDEX_MASK; obj 865 mm/zsmalloc.c obj <<= OBJ_TAG_BITS; obj 867 mm/zsmalloc.c return obj; obj 875 mm/zsmalloc.c static unsigned long obj_to_head(struct page *page, void *obj) obj 881 mm/zsmalloc.c return *(unsigned long *)obj; obj 1296 mm/zsmalloc.c unsigned long obj, off; obj 1316 mm/zsmalloc.c obj = handle_to_obj(handle); obj 1317 mm/zsmalloc.c obj_to_location(obj, &page, &obj_idx); obj 1354 mm/zsmalloc.c unsigned long obj, off; obj 1362 mm/zsmalloc.c obj = handle_to_obj(handle); obj 1363 mm/zsmalloc.c obj_to_location(obj, &page, &obj_idx); obj 1411 mm/zsmalloc.c unsigned long obj; obj 1419 mm/zsmalloc.c obj = get_freeobj(zspage); obj 1421 mm/zsmalloc.c offset = obj * class->size; obj 1443 mm/zsmalloc.c obj = location_to_obj(m_page, obj); obj 1445 mm/zsmalloc.c return obj; obj 1461 mm/zsmalloc.c unsigned long handle, obj; obj 1480 mm/zsmalloc.c obj = obj_malloc(class, zspage, handle); obj 1483 mm/zsmalloc.c record_obj(handle, obj); obj 1498 mm/zsmalloc.c obj = obj_malloc(class, zspage, handle); obj 1502 mm/zsmalloc.c record_obj(handle, obj); obj 1515 mm/zsmalloc.c static void obj_free(struct size_class *class, unsigned long obj) obj 1524 mm/zsmalloc.c obj &= ~OBJ_ALLOCATED_TAG; obj 1525 mm/zsmalloc.c obj_to_location(obj, &f_page, &f_objidx); obj 1544 mm/zsmalloc.c unsigned long obj; obj 1555 mm/zsmalloc.c obj = handle_to_obj(handle); obj 1556 mm/zsmalloc.c obj_to_location(obj, &f_page, &f_objidx); obj 1565 mm/zsmalloc.c obj_free(class, obj); obj 56 net/batman-adv/sysfs.c static struct net_device *batadv_kobj_to_netdev(struct kobject *obj) obj 58 net/batman-adv/sysfs.c struct device *dev = container_of(obj->parent, struct device, kobj); obj 63 net/batman-adv/sysfs.c static struct batadv_priv *batadv_kobj_to_batpriv(struct kobject *obj) obj 65 net/batman-adv/sysfs.c struct net_device *net_dev = batadv_kobj_to_netdev(obj); obj 76 net/batman-adv/sysfs.c static struct batadv_priv *batadv_vlan_kobj_to_batpriv(struct kobject *obj) obj 81 net/batman-adv/sysfs.c if (!strcmp(BATADV_SYSFS_IF_MESH_SUBDIR, obj->name)) obj 82 net/batman-adv/sysfs.c return batadv_kobj_to_batpriv(obj); obj 87 net/batman-adv/sysfs.c return batadv_kobj_to_batpriv(obj->parent); obj 98 net/batman-adv/sysfs.c batadv_kobj_to_vlan(struct batadv_priv *bat_priv, struct kobject *obj) obj 104 net/batman-adv/sysfs.c if (vlan_tmp->kobj != obj) obj 347 net/bridge/br_mdb.c .obj = { obj 361 net/bridge/br_mdb.c mdb.obj.orig_dev = dev; obj 364 net/bridge/br_mdb.c switchdev_port_obj_add(lower_dev, &mdb.obj, NULL); obj 367 net/bridge/br_mdb.c switchdev_port_obj_del(lower_dev, &mdb.obj); obj 387 net/bridge/br_mdb.c .obj = { obj 406 net/bridge/br_mdb.c mdb.obj.orig_dev = port_dev; obj 412 net/bridge/br_mdb.c mdb.obj.complete_priv = complete_info; obj 413 net/bridge/br_mdb.c mdb.obj.complete = br_mdb_complete; obj 414 net/bridge/br_mdb.c if (switchdev_port_obj_add(port_dev, &mdb.obj, NULL)) obj 418 net/bridge/br_mdb.c switchdev_port_obj_del(port_dev, &mdb.obj); obj 283 net/bridge/br_private.h #define kobj_to_brport(obj) container_of(obj, struct net_bridge_port, kobj) obj 149 net/bridge/br_switchdev.c .obj.orig_dev = dev, obj 150 net/bridge/br_switchdev.c .obj.id = SWITCHDEV_OBJ_ID_PORT_VLAN, obj 156 net/bridge/br_switchdev.c return switchdev_port_obj_add(dev, &v.obj, extack); obj 162 net/bridge/br_switchdev.c .obj.orig_dev = dev, obj 163 net/bridge/br_switchdev.c .obj.id = SWITCHDEV_OBJ_ID_PORT_VLAN, obj 168 net/bridge/br_switchdev.c return switchdev_port_obj_del(dev, &v.obj); obj 654 net/core/net-sysfs.c #define to_rx_queue(obj) container_of(obj, struct netdev_rx_queue, kobj) obj 997 net/core/net-sysfs.c #define to_netdev_queue(obj) container_of(obj, struct netdev_queue, kobj) obj 134 net/core/skbuff.c void *obj; obj 141 net/core/skbuff.c obj = kmalloc_node_track_caller(size, obj 144 net/core/skbuff.c if (obj || !(gfp_pfmemalloc_allowed(flags))) obj 149 net/core/skbuff.c obj = kmalloc_node_track_caller(size, flags, node); obj 155 net/core/skbuff.c return obj; obj 393 net/dsa/port.c .obj.id = SWITCHDEV_OBJ_ID_PORT_VLAN, obj 414 net/dsa/port.c .obj.id = SWITCHDEV_OBJ_ID_PORT_VLAN, obj 312 net/dsa/slave.c const struct switchdev_obj *obj, obj 319 net/dsa/slave.c if (obj->orig_dev != dev) obj 325 net/dsa/slave.c vlan = *SWITCHDEV_OBJ_PORT_VLAN(obj); obj 345 net/dsa/slave.c const struct switchdev_obj *obj, obj 357 net/dsa/slave.c switch (obj->id) { obj 359 net/dsa/slave.c if (obj->orig_dev != dev) obj 361 net/dsa/slave.c err = dsa_port_mdb_add(dp, SWITCHDEV_OBJ_PORT_MDB(obj), trans); obj 367 net/dsa/slave.c err = dsa_port_mdb_add(dp->cpu_dp, SWITCHDEV_OBJ_PORT_MDB(obj), obj 371 net/dsa/slave.c err = dsa_slave_vlan_add(dev, obj, trans); obj 382 net/dsa/slave.c const struct switchdev_obj *obj) obj 386 net/dsa/slave.c if (obj->orig_dev != dev) obj 395 net/dsa/slave.c return dsa_port_vlan_del(dp, SWITCHDEV_OBJ_PORT_VLAN(obj)); obj 399 net/dsa/slave.c const struct switchdev_obj *obj) obj 404 net/dsa/slave.c switch (obj->id) { obj 406 net/dsa/slave.c if (obj->orig_dev != dev) obj 408 net/dsa/slave.c err = dsa_port_mdb_del(dp, SWITCHDEV_OBJ_PORT_MDB(obj)); obj 414 net/dsa/slave.c err = dsa_port_mdb_del(dp->cpu_dp, SWITCHDEV_OBJ_PORT_MDB(obj)); obj 417 net/dsa/slave.c err = dsa_slave_vlan_del(dev, obj); obj 68 net/ipv6/ila/ila_xlat.c const void *obj) obj 70 net/ipv6/ila/ila_xlat.c const struct ila_map *ila = obj; obj 47 net/ipv6/seg6_hmac.c static int seg6_hmac_cmpfn(struct rhashtable_compare_arg *arg, const void *obj) obj 49 net/ipv6/seg6_hmac.c const struct seg6_hmac_info *hinfo = obj; obj 381 net/netfilter/nf_tables_api.c struct nft_object *obj) obj 390 net/netfilter/nf_tables_api.c nft_activate_next(ctx->net, obj); obj 392 net/netfilter/nf_tables_api.c nft_trans_obj(trans) = obj; obj 398 net/netfilter/nf_tables_api.c static int nft_delobj(struct nft_ctx *ctx, struct nft_object *obj) obj 402 net/netfilter/nf_tables_api.c err = nft_trans_obj_add(ctx, NFT_MSG_DELOBJ, obj); obj 406 net/netfilter/nf_tables_api.c nft_deactivate_next(ctx->net, obj); obj 880 net/netfilter/nf_tables_api.c const struct nft_object *obj = data; obj 882 net/netfilter/nf_tables_api.c return nft_objname_hash(&obj->key, 0, seed); obj 889 net/netfilter/nf_tables_api.c const struct nft_object *obj = ptr; obj 891 net/netfilter/nf_tables_api.c if (obj->key.table != k->table) obj 894 net/netfilter/nf_tables_api.c return strcmp(obj->key.name, k->name); obj 977 net/netfilter/nf_tables_api.c struct nft_object *obj, *ne; obj 1014 net/netfilter/nf_tables_api.c list_for_each_entry_safe(obj, ne, &ctx->table->objects, list) { obj 1015 net/netfilter/nf_tables_api.c if (!nft_is_active_next(ctx->net, obj)) obj 1018 net/netfilter/nf_tables_api.c err = nft_delobj(ctx, obj); obj 4501 net/netfilter/nf_tables_api.c struct nft_object *obj = NULL; obj 4588 net/netfilter/nf_tables_api.c obj = nft_obj_lookup(ctx->net, ctx->table, obj 4591 net/netfilter/nf_tables_api.c if (IS_ERR(obj)) { obj 4592 net/netfilter/nf_tables_api.c err = PTR_ERR(obj); obj 4662 net/netfilter/nf_tables_api.c if (obj) { obj 4663 net/netfilter/nf_tables_api.c *nft_set_ext_obj(ext) = obj; obj 4664 net/netfilter/nf_tables_api.c obj->use++; obj 4711 net/netfilter/nf_tables_api.c if (obj) obj 4712 net/netfilter/nf_tables_api.c obj->use--; obj 5032 net/netfilter/nf_tables_api.c struct nft_object *obj; obj 5045 net/netfilter/nf_tables_api.c rhl_for_each_entry_rcu(obj, tmp, list, rhlhead) { obj 5046 net/netfilter/nf_tables_api.c if (objtype == obj->ops->type->type && obj 5047 net/netfilter/nf_tables_api.c nft_active_genmask(obj, genmask)) { obj 5049 net/netfilter/nf_tables_api.c return obj; obj 5062 net/netfilter/nf_tables_api.c struct nft_object *obj; obj 5064 net/netfilter/nf_tables_api.c list_for_each_entry(obj, &table->objects, list) { obj 5065 net/netfilter/nf_tables_api.c if (be64_to_cpu(nla_get_be64(nla)) == obj->handle && obj 5066 net/netfilter/nf_tables_api.c objtype == obj->ops->type->type && obj 5067 net/netfilter/nf_tables_api.c nft_active_genmask(obj, genmask)) obj 5068 net/netfilter/nf_tables_api.c return obj; obj 5089 net/netfilter/nf_tables_api.c struct nft_object *obj; obj 5116 net/netfilter/nf_tables_api.c obj = kzalloc(sizeof(*obj) + ops->size, GFP_KERNEL); obj 5117 net/netfilter/nf_tables_api.c if (!obj) obj 5120 net/netfilter/nf_tables_api.c err = ops->init(ctx, (const struct nlattr * const *)tb, obj); obj 5124 net/netfilter/nf_tables_api.c obj->ops = ops; obj 5127 net/netfilter/nf_tables_api.c return obj; obj 5129 net/netfilter/nf_tables_api.c kfree(obj); obj 5137 net/netfilter/nf_tables_api.c struct nft_object *obj, bool reset) obj 5144 net/netfilter/nf_tables_api.c if (obj->ops->dump(skb, obj, reset) < 0) obj 5186 net/netfilter/nf_tables_api.c struct nft_object *obj) obj 5203 net/netfilter/nf_tables_api.c nft_trans_obj(trans) = obj; obj 5225 net/netfilter/nf_tables_api.c struct nft_object *obj; obj 5242 net/netfilter/nf_tables_api.c obj = nft_obj_lookup(net, table, nla[NFTA_OBJ_NAME], objtype, genmask); obj 5243 net/netfilter/nf_tables_api.c if (IS_ERR(obj)) { obj 5244 net/netfilter/nf_tables_api.c err = PTR_ERR(obj); obj 5260 net/netfilter/nf_tables_api.c return nf_tables_updobj(&ctx, type, nla[NFTA_OBJ_DATA], obj); obj 5269 net/netfilter/nf_tables_api.c obj = nft_obj_init(&ctx, type, nla[NFTA_OBJ_DATA]); obj 5270 net/netfilter/nf_tables_api.c if (IS_ERR(obj)) { obj 5271 net/netfilter/nf_tables_api.c err = PTR_ERR(obj); obj 5274 net/netfilter/nf_tables_api.c obj->key.table = table; obj 5275 net/netfilter/nf_tables_api.c obj->handle = nf_tables_alloc_handle(table); obj 5277 net/netfilter/nf_tables_api.c obj->key.name = nla_strdup(nla[NFTA_OBJ_NAME], GFP_KERNEL); obj 5278 net/netfilter/nf_tables_api.c if (!obj->key.name) { obj 5283 net/netfilter/nf_tables_api.c err = nft_trans_obj_add(&ctx, NFT_MSG_NEWOBJ, obj); obj 5287 net/netfilter/nf_tables_api.c err = rhltable_insert(&nft_objname_ht, &obj->rhlhead, obj 5292 net/netfilter/nf_tables_api.c list_add_tail_rcu(&obj->list, &table->objects); obj 5297 net/netfilter/nf_tables_api.c INIT_LIST_HEAD(&obj->list); obj 5300 net/netfilter/nf_tables_api.c kfree(obj->key.name); obj 5302 net/netfilter/nf_tables_api.c if (obj->ops->destroy) obj 5303 net/netfilter/nf_tables_api.c obj->ops->destroy(&ctx, obj); obj 5304 net/netfilter/nf_tables_api.c kfree(obj); obj 5313 net/netfilter/nf_tables_api.c struct nft_object *obj, bool reset) obj 5329 net/netfilter/nf_tables_api.c nla_put_string(skb, NFTA_OBJ_NAME, obj->key.name) || obj 5330 net/netfilter/nf_tables_api.c nla_put_be32(skb, NFTA_OBJ_TYPE, htonl(obj->ops->type->type)) || obj 5331 net/netfilter/nf_tables_api.c nla_put_be32(skb, NFTA_OBJ_USE, htonl(obj->use)) || obj 5332 net/netfilter/nf_tables_api.c nft_object_dump(skb, NFTA_OBJ_DATA, obj, reset) || obj 5333 net/netfilter/nf_tables_api.c nla_put_be64(skb, NFTA_OBJ_HANDLE, cpu_to_be64(obj->handle), obj 5358 net/netfilter/nf_tables_api.c struct nft_object *obj; obj 5371 net/netfilter/nf_tables_api.c list_for_each_entry_rcu(obj, &table->objects, list) { obj 5372 net/netfilter/nf_tables_api.c if (!nft_is_active(net, obj)) obj 5384 net/netfilter/nf_tables_api.c obj->ops->type->type != filter->type) obj 5392 net/netfilter/nf_tables_api.c obj, reset) < 0) obj 5455 net/netfilter/nf_tables_api.c struct nft_object *obj; obj 5484 net/netfilter/nf_tables_api.c obj = nft_obj_lookup(net, table, nla[NFTA_OBJ_NAME], objtype, genmask); obj 5485 net/netfilter/nf_tables_api.c if (IS_ERR(obj)) { obj 5487 net/netfilter/nf_tables_api.c return PTR_ERR(obj); obj 5499 net/netfilter/nf_tables_api.c family, table, obj, reset); obj 5509 net/netfilter/nf_tables_api.c static void nft_obj_destroy(const struct nft_ctx *ctx, struct nft_object *obj) obj 5511 net/netfilter/nf_tables_api.c if (obj->ops->destroy) obj 5512 net/netfilter/nf_tables_api.c obj->ops->destroy(ctx, obj); obj 5514 net/netfilter/nf_tables_api.c module_put(obj->ops->type->owner); obj 5515 net/netfilter/nf_tables_api.c kfree(obj->key.name); obj 5516 net/netfilter/nf_tables_api.c kfree(obj); obj 5529 net/netfilter/nf_tables_api.c struct nft_object *obj; obj 5546 net/netfilter/nf_tables_api.c obj = nft_obj_lookup_byhandle(table, attr, objtype, genmask); obj 5549 net/netfilter/nf_tables_api.c obj = nft_obj_lookup(net, table, attr, objtype, genmask); obj 5552 net/netfilter/nf_tables_api.c if (IS_ERR(obj)) { obj 5554 net/netfilter/nf_tables_api.c return PTR_ERR(obj); obj 5556 net/netfilter/nf_tables_api.c if (obj->use > 0) { obj 5563 net/netfilter/nf_tables_api.c return nft_delobj(&ctx, obj); obj 5567 net/netfilter/nf_tables_api.c struct nft_object *obj, u32 portid, u32 seq, int event, obj 5582 net/netfilter/nf_tables_api.c table, obj, false); obj 5596 net/netfilter/nf_tables_api.c struct nft_object *obj, int event) obj 5598 net/netfilter/nf_tables_api.c nft_obj_notify(ctx->net, ctx->table, obj, ctx->portid, ctx->seq, event, obj 6539 net/netfilter/nf_tables_api.c struct nft_object *obj; obj 6541 net/netfilter/nf_tables_api.c obj = nft_trans_obj(trans); obj 6544 net/netfilter/nf_tables_api.c if (obj->ops->update) obj 6545 net/netfilter/nf_tables_api.c obj->ops->update(obj, newobj); obj 6724 net/netfilter/nf_tables_api.c static void nft_obj_del(struct nft_object *obj) obj 6726 net/netfilter/nf_tables_api.c rhltable_remove(&nft_objname_ht, &obj->rhlhead, nft_objname_ht_params); obj 6727 net/netfilter/nf_tables_api.c list_del_rcu(&obj->list); obj 7693 net/netfilter/nf_tables_api.c struct nft_object *obj, *ne; obj 7726 net/netfilter/nf_tables_api.c list_for_each_entry_safe(obj, ne, &table->objects, list) { obj 7727 net/netfilter/nf_tables_api.c nft_obj_del(obj); obj 7729 net/netfilter/nf_tables_api.c nft_obj_destroy(&ctx, obj); obj 108 net/netfilter/nft_connlimit.c static inline void nft_connlimit_obj_eval(struct nft_object *obj, obj 112 net/netfilter/nft_connlimit.c struct nft_connlimit *priv = nft_obj_data(obj); obj 119 net/netfilter/nft_connlimit.c struct nft_object *obj) obj 121 net/netfilter/nft_connlimit.c struct nft_connlimit *priv = nft_obj_data(obj); obj 127 net/netfilter/nft_connlimit.c struct nft_object *obj) obj 129 net/netfilter/nft_connlimit.c struct nft_connlimit *priv = nft_obj_data(obj); obj 135 net/netfilter/nft_connlimit.c struct nft_object *obj, bool reset) obj 137 net/netfilter/nft_connlimit.c struct nft_connlimit *priv = nft_obj_data(obj); obj 48 net/netfilter/nft_counter.c static inline void nft_counter_obj_eval(struct nft_object *obj, obj 52 net/netfilter/nft_counter.c struct nft_counter_percpu_priv *priv = nft_obj_data(obj); obj 84 net/netfilter/nft_counter.c struct nft_object *obj) obj 86 net/netfilter/nft_counter.c struct nft_counter_percpu_priv *priv = nft_obj_data(obj); obj 97 net/netfilter/nft_counter.c struct nft_object *obj) obj 99 net/netfilter/nft_counter.c struct nft_counter_percpu_priv *priv = nft_obj_data(obj); obj 164 net/netfilter/nft_counter.c struct nft_object *obj, bool reset) obj 166 net/netfilter/nft_counter.c struct nft_counter_percpu_priv *priv = nft_obj_data(obj); obj 826 net/netfilter/nft_ct.c static void nft_ct_timeout_obj_eval(struct nft_object *obj, obj 830 net/netfilter/nft_ct.c const struct nft_ct_timeout_obj *priv = nft_obj_data(obj); obj 862 net/netfilter/nft_ct.c struct nft_object *obj) obj 864 net/netfilter/nft_ct.c struct nft_ct_timeout_obj *priv = nft_obj_data(obj); obj 917 net/netfilter/nft_ct.c struct nft_object *obj) obj 919 net/netfilter/nft_ct.c struct nft_ct_timeout_obj *priv = nft_obj_data(obj); obj 928 net/netfilter/nft_ct.c struct nft_object *obj, bool reset) obj 930 net/netfilter/nft_ct.c const struct nft_ct_timeout_obj *priv = nft_obj_data(obj); obj 978 net/netfilter/nft_ct.c struct nft_object *obj) obj 980 net/netfilter/nft_ct.c struct nft_ct_helper_obj *priv = nft_obj_data(obj); obj 1050 net/netfilter/nft_ct.c struct nft_object *obj) obj 1052 net/netfilter/nft_ct.c struct nft_ct_helper_obj *priv = nft_obj_data(obj); obj 1062 net/netfilter/nft_ct.c static void nft_ct_helper_obj_eval(struct nft_object *obj, obj 1066 net/netfilter/nft_ct.c const struct nft_ct_helper_obj *priv = nft_obj_data(obj); obj 1103 net/netfilter/nft_ct.c struct nft_object *obj, bool reset) obj 1105 net/netfilter/nft_ct.c const struct nft_ct_helper_obj *priv = nft_obj_data(obj); obj 1167 net/netfilter/nft_ct.c struct nft_object *obj) obj 1169 net/netfilter/nft_ct.c struct nft_ct_expect_obj *priv = nft_obj_data(obj); obj 1190 net/netfilter/nft_ct.c struct nft_object *obj) obj 1196 net/netfilter/nft_ct.c struct nft_object *obj, bool reset) obj 1198 net/netfilter/nft_ct.c const struct nft_ct_expect_obj *priv = nft_obj_data(obj); obj 1210 net/netfilter/nft_ct.c static void nft_ct_expect_obj_eval(struct nft_object *obj, obj 1214 net/netfilter/nft_ct.c const struct nft_ct_expect_obj *priv = nft_obj_data(obj); obj 239 net/netfilter/nft_limit.c static void nft_limit_obj_pkts_eval(struct nft_object *obj, obj 243 net/netfilter/nft_limit.c struct nft_limit_pkts *priv = nft_obj_data(obj); obj 251 net/netfilter/nft_limit.c struct nft_object *obj) obj 253 net/netfilter/nft_limit.c struct nft_limit_pkts *priv = nft_obj_data(obj); obj 265 net/netfilter/nft_limit.c struct nft_object *obj, obj 268 net/netfilter/nft_limit.c const struct nft_limit_pkts *priv = nft_obj_data(obj); obj 282 net/netfilter/nft_limit.c static void nft_limit_obj_bytes_eval(struct nft_object *obj, obj 286 net/netfilter/nft_limit.c struct nft_limit *priv = nft_obj_data(obj); obj 295 net/netfilter/nft_limit.c struct nft_object *obj) obj 297 net/netfilter/nft_limit.c struct nft_limit *priv = nft_obj_data(obj); obj 303 net/netfilter/nft_limit.c struct nft_object *obj, obj 306 net/netfilter/nft_limit.c const struct nft_limit *priv = nft_obj_data(obj); obj 638 net/netfilter/nft_meta.c static void nft_secmark_obj_eval(struct nft_object *obj, struct nft_regs *regs, obj 641 net/netfilter/nft_meta.c const struct nft_secmark *priv = nft_obj_data(obj); obj 649 net/netfilter/nft_meta.c struct nft_object *obj) obj 651 net/netfilter/nft_meta.c struct nft_secmark *priv = nft_obj_data(obj); obj 672 net/netfilter/nft_meta.c static int nft_secmark_obj_dump(struct sk_buff *skb, struct nft_object *obj, obj 675 net/netfilter/nft_meta.c struct nft_secmark *priv = nft_obj_data(obj); obj 690 net/netfilter/nft_meta.c static void nft_secmark_obj_destroy(const struct nft_ctx *ctx, struct nft_object *obj) obj 692 net/netfilter/nft_meta.c struct nft_secmark *priv = nft_obj_data(obj); obj 20 net/netfilter/nft_objref.c struct nft_object *obj = nft_objref_priv(expr); obj 22 net/netfilter/nft_objref.c obj->ops->eval(obj, regs, pkt); obj 29 net/netfilter/nft_objref.c struct nft_object *obj = nft_objref_priv(expr); obj 38 net/netfilter/nft_objref.c obj = nft_obj_lookup(ctx->net, ctx->table, obj 41 net/netfilter/nft_objref.c if (IS_ERR(obj)) obj 44 net/netfilter/nft_objref.c nft_objref_priv(expr) = obj; obj 45 net/netfilter/nft_objref.c obj->use++; obj 52 net/netfilter/nft_objref.c const struct nft_object *obj = nft_objref_priv(expr); obj 54 net/netfilter/nft_objref.c if (nla_put_string(skb, NFTA_OBJREF_IMM_NAME, obj->key.name) || obj 56 net/netfilter/nft_objref.c htonl(obj->ops->type->type))) obj 69 net/netfilter/nft_objref.c struct nft_object *obj = nft_objref_priv(expr); obj 74 net/netfilter/nft_objref.c obj->use--; obj 80 net/netfilter/nft_objref.c struct nft_object *obj = nft_objref_priv(expr); obj 82 net/netfilter/nft_objref.c obj->use++; obj 109 net/netfilter/nft_objref.c struct nft_object *obj; obj 118 net/netfilter/nft_objref.c obj = *nft_set_ext_obj(ext); obj 119 net/netfilter/nft_objref.c obj->ops->eval(obj, regs, pkt); obj 49 net/netfilter/nft_quota.c static void nft_quota_obj_eval(struct nft_object *obj, obj 53 net/netfilter/nft_quota.c struct nft_quota *priv = nft_obj_data(obj); obj 62 net/netfilter/nft_quota.c nft_obj_notify(nft_net(pkt), obj->key.table, obj, 0, 0, obj 102 net/netfilter/nft_quota.c struct nft_object *obj) obj 104 net/netfilter/nft_quota.c struct nft_quota *priv = nft_obj_data(obj); obj 109 net/netfilter/nft_quota.c static void nft_quota_obj_update(struct nft_object *obj, obj 113 net/netfilter/nft_quota.c struct nft_quota *priv = nft_obj_data(obj); obj 157 net/netfilter/nft_quota.c static int nft_quota_obj_dump(struct sk_buff *skb, struct nft_object *obj, obj 160 net/netfilter/nft_quota.c struct nft_quota *priv = nft_obj_data(obj); obj 301 net/netfilter/nft_synproxy.c struct nft_object *obj) obj 303 net/netfilter/nft_synproxy.c struct nft_synproxy *priv = nft_obj_data(obj); obj 309 net/netfilter/nft_synproxy.c struct nft_object *obj) obj 315 net/netfilter/nft_synproxy.c struct nft_object *obj, bool reset) obj 317 net/netfilter/nft_synproxy.c struct nft_synproxy *priv = nft_obj_data(obj); obj 322 net/netfilter/nft_synproxy.c static void nft_synproxy_obj_eval(struct nft_object *obj, obj 326 net/netfilter/nft_synproxy.c const struct nft_synproxy *priv = nft_obj_data(obj); obj 331 net/netfilter/nft_synproxy.c static void nft_synproxy_obj_update(struct nft_object *obj, obj 335 net/netfilter/nft_synproxy.c struct nft_synproxy *priv = nft_obj_data(obj); obj 349 net/netfilter/nft_tunnel.c struct nft_object *obj) obj 351 net/netfilter/nft_tunnel.c struct nft_tunnel_obj *priv = nft_obj_data(obj); obj 430 net/netfilter/nft_tunnel.c static inline void nft_tunnel_obj_eval(struct nft_object *obj, obj 434 net/netfilter/nft_tunnel.c struct nft_tunnel_obj *priv = nft_obj_data(obj); obj 536 net/netfilter/nft_tunnel.c struct nft_object *obj, bool reset) obj 538 net/netfilter/nft_tunnel.c struct nft_tunnel_obj *priv = nft_obj_data(obj); obj 558 net/netfilter/nft_tunnel.c struct nft_object *obj) obj 560 net/netfilter/nft_tunnel.c struct nft_tunnel_obj *priv = nft_obj_data(obj); obj 2588 net/netlink/af_netlink.c void *obj = SEQ_START_TOKEN; obj 2595 net/netlink/af_netlink.c for (pos = *posp; pos && obj && !IS_ERR(obj); pos--) obj 2596 net/netlink/af_netlink.c obj = __netlink_seq_next(seq); obj 2598 net/netlink/af_netlink.c return obj; obj 1400 net/openvswitch/datapath.c u32 bucket, obj; obj 1403 net/openvswitch/datapath.c obj = cb->args[1]; obj 1404 net/openvswitch/datapath.c flow = ovs_flow_tbl_dump_next(ti, &bucket, &obj); obj 1415 net/openvswitch/datapath.c cb->args[1] = obj; obj 57 net/phonet/socket.c static struct hlist_head *pn_hash_list(u16 obj) obj 59 net/phonet/socket.c return pnsocks.hlist + (obj & PN_HASHMASK); obj 70 net/phonet/socket.c u16 obj = pn_sockaddr_get_object(spn); obj 72 net/phonet/socket.c struct hlist_head *hlist = pn_hash_list(obj); obj 81 net/phonet/socket.c if (pn_port(obj)) { obj 83 net/phonet/socket.c if (pn_port(pn->sobject) != pn_port(obj)) obj 91 net/phonet/socket.c pn_addr(pn->sobject) != pn_addr(obj)) obj 150 net/sched/cls_bpf.c struct cls_bpf_prog *obj; obj 155 net/sched/cls_bpf.c obj = prog ?: oldprog; obj 157 net/sched/cls_bpf.c tc_cls_common_offload_init(&cls_bpf.common, tp, obj->gen_flags, extack); obj 159 net/sched/cls_bpf.c cls_bpf.exts = &obj->exts; obj 162 net/sched/cls_bpf.c cls_bpf.name = obj->bpf_name; obj 163 net/sched/cls_bpf.c cls_bpf.exts_integrated = obj->exts_integrated; obj 1298 net/sctp/sm_sideeffect.c asoc = cmd->obj.asoc; obj 1304 net/sctp/sm_sideeffect.c sctp_cmd_assoc_update(commands, asoc, cmd->obj.asoc); obj 1323 net/sctp/sm_sideeffect.c sctp_cmd_new_state(commands, asoc, cmd->obj.state); obj 1329 net/sctp/sm_sideeffect.c cmd->obj.u32, NULL); obj 1333 net/sctp/sm_sideeffect.c asoc->stream.si->report_ftsn(&asoc->ulpq, cmd->obj.u32); obj 1338 net/sctp/sm_sideeffect.c cmd->obj.chunk); obj 1347 net/sctp/sm_sideeffect.c force = cmd->obj.i32; obj 1354 net/sctp/sm_sideeffect.c cmd->obj.chunk); obj 1377 net/sctp/sm_sideeffect.c cmd->obj.init, gfp); obj 1384 net/sctp/sm_sideeffect.c if (cmd->obj.chunk) obj 1385 net/sctp/sm_sideeffect.c sctp_chunk_free(cmd->obj.chunk); obj 1395 net/sctp/sm_sideeffect.c if (cmd->obj.chunk) obj 1397 net/sctp/sm_sideeffect.c SCTP_CHUNK(cmd->obj.chunk)); obj 1445 net/sctp/sm_sideeffect.c __func__, cmd->obj.chunk, &asoc->ulpq); obj 1448 net/sctp/sm_sideeffect.c cmd->obj.chunk, obj 1455 net/sctp/sm_sideeffect.c __func__, cmd->obj.ulpevent, &asoc->ulpq); obj 1458 net/sctp/sm_sideeffect.c cmd->obj.ulpevent); obj 1468 net/sctp/sm_sideeffect.c sctp_outq_tail(&asoc->outqueue, cmd->obj.chunk, gfp); obj 1473 net/sctp/sm_sideeffect.c packet = cmd->obj.packet; obj 1480 net/sctp/sm_sideeffect.c sctp_retransmit(&asoc->outqueue, cmd->obj.transport, obj 1486 net/sctp/sm_sideeffect.c sctp_retransmit(&asoc->outqueue, cmd->obj.transport, obj 1492 net/sctp/sm_sideeffect.c sctp_do_ecn_ce_work(asoc, cmd->obj.u32); obj 1497 net/sctp/sm_sideeffect.c new_obj = sctp_do_ecn_ecne_work(asoc, cmd->obj.u32, obj 1506 net/sctp/sm_sideeffect.c sctp_do_ecn_cwr_work(asoc, cmd->obj.u32); obj 1510 net/sctp/sm_sideeffect.c sctp_cmd_setup_t2(commands, asoc, cmd->obj.chunk); obj 1514 net/sctp/sm_sideeffect.c timer = &asoc->timers[cmd->obj.to]; obj 1521 net/sctp/sm_sideeffect.c timer = &asoc->timers[cmd->obj.to]; obj 1522 net/sctp/sm_sideeffect.c timeout = asoc->timeouts[cmd->obj.to]; obj 1539 net/sctp/sm_sideeffect.c timer = &asoc->timers[cmd->obj.to]; obj 1540 net/sctp/sm_sideeffect.c timeout = asoc->timeouts[cmd->obj.to]; obj 1546 net/sctp/sm_sideeffect.c timer = &asoc->timers[cmd->obj.to]; obj 1552 net/sctp/sm_sideeffect.c chunk = cmd->obj.chunk; obj 1603 net/sctp/sm_sideeffect.c sctp_cmd_init_failed(commands, asoc, cmd->obj.u32); obj 1608 net/sctp/sm_sideeffect.c subtype, chunk, cmd->obj.u32); obj 1626 net/sctp/sm_sideeffect.c cmd->obj.u32); obj 1636 net/sctp/sm_sideeffect.c cmd->obj.transport, 0); obj 1640 net/sctp/sm_sideeffect.c t = cmd->obj.transport; obj 1645 net/sctp/sm_sideeffect.c t = cmd->obj.transport; obj 1652 net/sctp/sm_sideeffect.c t = cmd->obj.transport; obj 1661 net/sctp/sm_sideeffect.c t = cmd->obj.transport; obj 1670 net/sctp/sm_sideeffect.c error = cmd->obj.error; obj 1675 net/sctp/sm_sideeffect.c sackh.cum_tsn_ack = cmd->obj.be32; obj 1698 net/sctp/sm_sideeffect.c t = cmd->obj.transport; obj 1708 net/sctp/sm_sideeffect.c cmd->obj.chunk, obj 1713 net/sctp/sm_sideeffect.c sctp_cmd_setup_t4(commands, asoc, cmd->obj.chunk); obj 1736 net/sctp/sm_sideeffect.c sctp_cmd_set_sk_err(asoc, cmd->obj.error); obj 1740 net/sctp/sm_sideeffect.c cmd->obj.u8); obj 1754 net/sctp/sm_sideeffect.c asoc->peer.i.init_tag = cmd->obj.u32; obj 1761 net/sctp/sm_sideeffect.c sctp_cmd_send_msg(asoc, cmd->obj.msg, gfp); obj 1772 net/sctp/sm_sideeffect.c asoc = cmd->obj.asoc; obj 1785 net/sctp/sm_sideeffect.c sctp_chunk_free(cmd->obj.chunk); obj 154 net/sunrpc/auth_gss/gss_mech_switch.c struct gss_api_mech *gss_mech_get_by_OID(struct rpcsec_gss_oid *obj) obj 159 net/sunrpc/auth_gss/gss_mech_switch.c if (sprint_oid(obj->data, obj->len, buf, sizeof(buf)) < 0) obj 166 net/sunrpc/auth_gss/gss_mech_switch.c if (obj->len == pos->gm_oid.len) { obj 167 net/sunrpc/auth_gss/gss_mech_switch.c if (0 == memcmp(obj->data, pos->gm_oid.data, obj->len)) { obj 838 net/sunrpc/auth_gss/svcauth_gss.c read_u32_from_xdr_buf(struct xdr_buf *buf, int base, u32 *obj) obj 843 net/sunrpc/auth_gss/svcauth_gss.c status = read_bytes_from_xdr_buf(buf, base, &raw, sizeof(*obj)); obj 846 net/sunrpc/auth_gss/svcauth_gss.c *obj = ntohl(raw); obj 2710 net/sunrpc/clnt.c const void *obj) obj 2715 net/sunrpc/clnt.c void *obj) obj 26 net/sunrpc/xdr.c xdr_encode_netobj(__be32 *p, const struct xdr_netobj *obj) obj 28 net/sunrpc/xdr.c unsigned int quadlen = XDR_QUADLEN(obj->len); obj 31 net/sunrpc/xdr.c *p++ = cpu_to_be32(obj->len); obj 32 net/sunrpc/xdr.c memcpy(p, obj->data, obj->len); obj 33 net/sunrpc/xdr.c return p + XDR_QUADLEN(obj->len); obj 38 net/sunrpc/xdr.c xdr_decode_netobj(__be32 *p, struct xdr_netobj *obj) obj 44 net/sunrpc/xdr.c obj->len = len; obj 45 net/sunrpc/xdr.c obj->data = (u8 *) p; obj 1194 net/sunrpc/xdr.c static void __read_bytes_from_xdr_buf(struct xdr_buf *subbuf, void *obj, unsigned int len) obj 1199 net/sunrpc/xdr.c memcpy(obj, subbuf->head[0].iov_base, this_len); obj 1201 net/sunrpc/xdr.c obj += this_len; obj 1204 net/sunrpc/xdr.c _copy_from_pages(obj, subbuf->pages, subbuf->page_base, this_len); obj 1206 net/sunrpc/xdr.c obj += this_len; obj 1208 net/sunrpc/xdr.c memcpy(obj, subbuf->tail[0].iov_base, this_len); obj 1212 net/sunrpc/xdr.c int read_bytes_from_xdr_buf(struct xdr_buf *buf, unsigned int base, void *obj, unsigned int len) obj 1220 net/sunrpc/xdr.c __read_bytes_from_xdr_buf(&subbuf, obj, len); obj 1225 net/sunrpc/xdr.c static void __write_bytes_to_xdr_buf(struct xdr_buf *subbuf, void *obj, unsigned int len) obj 1230 net/sunrpc/xdr.c memcpy(subbuf->head[0].iov_base, obj, this_len); obj 1232 net/sunrpc/xdr.c obj += this_len; obj 1235 net/sunrpc/xdr.c _copy_to_pages(subbuf->pages, subbuf->page_base, obj, this_len); obj 1237 net/sunrpc/xdr.c obj += this_len; obj 1239 net/sunrpc/xdr.c memcpy(subbuf->tail[0].iov_base, obj, this_len); obj 1243 net/sunrpc/xdr.c int write_bytes_to_xdr_buf(struct xdr_buf *buf, unsigned int base, void *obj, unsigned int len) obj 1251 net/sunrpc/xdr.c __write_bytes_to_xdr_buf(&subbuf, obj, len); obj 1257 net/sunrpc/xdr.c xdr_decode_word(struct xdr_buf *buf, unsigned int base, u32 *obj) obj 1262 net/sunrpc/xdr.c status = read_bytes_from_xdr_buf(buf, base, &raw, sizeof(*obj)); obj 1265 net/sunrpc/xdr.c *obj = be32_to_cpu(raw); obj 1271 net/sunrpc/xdr.c xdr_encode_word(struct xdr_buf *buf, unsigned int base, u32 obj) obj 1273 net/sunrpc/xdr.c __be32 raw = cpu_to_be32(obj); obj 1275 net/sunrpc/xdr.c return write_bytes_to_xdr_buf(buf, base, &raw, sizeof(obj)); obj 206 net/switchdev/switchdev.c static size_t switchdev_obj_size(const struct switchdev_obj *obj) obj 208 net/switchdev/switchdev.c switch (obj->id) { obj 223 net/switchdev/switchdev.c const struct switchdev_obj *obj, obj 231 net/switchdev/switchdev.c .obj = obj, obj 248 net/switchdev/switchdev.c const struct switchdev_obj *obj, obj 265 net/switchdev/switchdev.c dev, obj, &trans, extack); obj 276 net/switchdev/switchdev.c dev, obj, &trans, extack); obj 277 net/switchdev/switchdev.c WARN(err, "%s: Commit of object (id=%d) failed.\n", dev->name, obj->id); obj 285 net/switchdev/switchdev.c const struct switchdev_obj *obj = data; obj 288 net/switchdev/switchdev.c err = switchdev_port_obj_add_now(dev, obj, NULL); obj 291 net/switchdev/switchdev.c err, obj->id); obj 292 net/switchdev/switchdev.c if (obj->complete) obj 293 net/switchdev/switchdev.c obj->complete(dev, err, obj->complete_priv); obj 297 net/switchdev/switchdev.c const struct switchdev_obj *obj) obj 299 net/switchdev/switchdev.c return switchdev_deferred_enqueue(dev, obj, switchdev_obj_size(obj), obj 318 net/switchdev/switchdev.c const struct switchdev_obj *obj, obj 321 net/switchdev/switchdev.c if (obj->flags & SWITCHDEV_F_DEFER) obj 322 net/switchdev/switchdev.c return switchdev_port_obj_add_defer(dev, obj); obj 324 net/switchdev/switchdev.c return switchdev_port_obj_add_now(dev, obj, extack); obj 329 net/switchdev/switchdev.c const struct switchdev_obj *obj) obj 332 net/switchdev/switchdev.c dev, obj, NULL, NULL); obj 338 net/switchdev/switchdev.c const struct switchdev_obj *obj = data; obj 341 net/switchdev/switchdev.c err = switchdev_port_obj_del_now(dev, obj); obj 344 net/switchdev/switchdev.c err, obj->id); obj 345 net/switchdev/switchdev.c if (obj->complete) obj 346 net/switchdev/switchdev.c obj->complete(dev, err, obj->complete_priv); obj 350 net/switchdev/switchdev.c const struct switchdev_obj *obj) obj 352 net/switchdev/switchdev.c return switchdev_deferred_enqueue(dev, obj, switchdev_obj_size(obj), obj 367 net/switchdev/switchdev.c const struct switchdev_obj *obj) obj 369 net/switchdev/switchdev.c if (obj->flags & SWITCHDEV_F_DEFER) obj 370 net/switchdev/switchdev.c return switchdev_port_obj_del_defer(dev, obj); obj 372 net/switchdev/switchdev.c return switchdev_port_obj_del_now(dev, obj); obj 452 net/switchdev/switchdev.c const struct switchdev_obj *obj, obj 466 net/switchdev/switchdev.c return add_cb(dev, port_obj_info->obj, port_obj_info->trans, obj 491 net/switchdev/switchdev.c const struct switchdev_obj *obj, obj 509 net/switchdev/switchdev.c const struct switchdev_obj *obj)) obj 518 net/switchdev/switchdev.c return del_cb(dev, port_obj_info->obj); obj 542 net/switchdev/switchdev.c const struct switchdev_obj *obj)) obj 61 samples/bpf/fds_example.c struct bpf_object *obj; obj 66 samples/bpf/fds_example.c &obj, &prog_fd)); obj 73 samples/bpf/hbm.c struct bpf_object *obj; obj 138 samples/bpf/hbm.c if (bpf_prog_load_xattr(&prog_load_attr, &obj, &bpfprog_fd)) obj 141 samples/bpf/hbm.c map = bpf_object__find_map_by_name(obj, "queue_stats"); obj 13 samples/bpf/sockex1_user.c struct bpf_object *obj; obj 22 samples/bpf/sockex1_user.c &obj, &prog_fd)) obj 25 samples/bpf/sockex1_user.c map_fd = bpf_object__find_map_fd_by_name(obj, "my_map"); obj 20 samples/bpf/sockex2_user.c struct bpf_object *obj; obj 30 samples/bpf/sockex2_user.c &obj, &prog_fd)) obj 33 samples/bpf/sockex2_user.c map_fd = bpf_object__find_map_fd_by_name(obj, "hash_map"); obj 90 samples/bpf/xdp1_user.c struct bpf_object *obj; obj 131 samples/bpf/xdp1_user.c if (bpf_prog_load_xattr(&prog_load_attr, &obj, &prog_fd)) obj 134 samples/bpf/xdp1_user.c map = bpf_map__next(NULL, obj); obj 93 samples/bpf/xdp_adjust_tail_user.c struct bpf_object *obj; obj 150 samples/bpf/xdp_adjust_tail_user.c if (bpf_prog_load_xattr(&prog_load_attr, &obj, &prog_fd)) obj 153 samples/bpf/xdp_adjust_tail_user.c map = bpf_map__next(NULL, obj); obj 81 samples/bpf/xdp_fwd_user.c struct bpf_object *obj; obj 115 samples/bpf/xdp_fwd_user.c err = bpf_prog_load_xattr(&prog_load_attr, &obj, &prog_fd); obj 124 samples/bpf/xdp_fwd_user.c prog = bpf_object__find_program_by_title(obj, prog_name); obj 130 samples/bpf/xdp_fwd_user.c map_fd = bpf_map__fd(bpf_object__find_map_by_name(obj, obj 106 samples/bpf/xdp_redirect_cpu_user.c static void print_avail_progs(struct bpf_object *obj) obj 110 samples/bpf/xdp_redirect_cpu_user.c bpf_object__for_each_program(pos, obj) { obj 116 samples/bpf/xdp_redirect_cpu_user.c static void usage(char *argv[], struct bpf_object *obj) obj 135 samples/bpf/xdp_redirect_cpu_user.c print_avail_progs(obj); obj 603 samples/bpf/xdp_redirect_cpu_user.c static struct bpf_link * attach_tp(struct bpf_object *obj, obj 617 samples/bpf/xdp_redirect_cpu_user.c prog = bpf_object__find_program_by_title(obj, sec_name); obj 630 samples/bpf/xdp_redirect_cpu_user.c static void init_tracepoints(struct bpf_object *obj) { obj 631 samples/bpf/xdp_redirect_cpu_user.c tp_links[tp_cnt++] = attach_tp(obj, "xdp", "xdp_redirect_err"); obj 632 samples/bpf/xdp_redirect_cpu_user.c tp_links[tp_cnt++] = attach_tp(obj, "xdp", "xdp_redirect_map_err"); obj 633 samples/bpf/xdp_redirect_cpu_user.c tp_links[tp_cnt++] = attach_tp(obj, "xdp", "xdp_exception"); obj 634 samples/bpf/xdp_redirect_cpu_user.c tp_links[tp_cnt++] = attach_tp(obj, "xdp", "xdp_cpumap_enqueue"); obj 635 samples/bpf/xdp_redirect_cpu_user.c tp_links[tp_cnt++] = attach_tp(obj, "xdp", "xdp_cpumap_kthread"); obj 638 samples/bpf/xdp_redirect_cpu_user.c static int init_map_fds(struct bpf_object *obj) obj 642 samples/bpf/xdp_redirect_cpu_user.c bpf_object__find_map_fd_by_name(obj, "redirect_err_cnt"); obj 644 samples/bpf/xdp_redirect_cpu_user.c bpf_object__find_map_fd_by_name(obj, "exception_cnt"); obj 646 samples/bpf/xdp_redirect_cpu_user.c bpf_object__find_map_fd_by_name(obj, "cpumap_enqueue_cnt"); obj 648 samples/bpf/xdp_redirect_cpu_user.c bpf_object__find_map_fd_by_name(obj, "cpumap_kthread_cnt"); obj 651 samples/bpf/xdp_redirect_cpu_user.c rx_cnt_map_fd = bpf_object__find_map_fd_by_name(obj, "rx_cnt"); obj 652 samples/bpf/xdp_redirect_cpu_user.c cpu_map_fd = bpf_object__find_map_fd_by_name(obj, "cpu_map"); obj 654 samples/bpf/xdp_redirect_cpu_user.c bpf_object__find_map_fd_by_name(obj, "cpus_available"); obj 655 samples/bpf/xdp_redirect_cpu_user.c cpus_count_map_fd = bpf_object__find_map_fd_by_name(obj, "cpus_count"); obj 657 samples/bpf/xdp_redirect_cpu_user.c bpf_object__find_map_fd_by_name(obj, "cpus_iterator"); obj 681 samples/bpf/xdp_redirect_cpu_user.c struct bpf_object *obj; obj 707 samples/bpf/xdp_redirect_cpu_user.c if (bpf_prog_load_xattr(&prog_load_attr, &obj, &prog_fd)) obj 715 samples/bpf/xdp_redirect_cpu_user.c init_tracepoints(obj); obj 716 samples/bpf/xdp_redirect_cpu_user.c if (init_map_fds(obj) < 0) { obj 778 samples/bpf/xdp_redirect_cpu_user.c usage(argv, obj); obj 785 samples/bpf/xdp_redirect_cpu_user.c usage(argv, obj); obj 792 samples/bpf/xdp_redirect_cpu_user.c usage(argv, obj); obj 800 samples/bpf/xdp_redirect_cpu_user.c prog = bpf_object__find_program_by_title(obj, prog_name); obj 108 samples/bpf/xdp_redirect_map_user.c struct bpf_object *obj; obj 153 samples/bpf/xdp_redirect_map_user.c if (bpf_prog_load_xattr(&prog_load_attr, &obj, &prog_fd)) obj 156 samples/bpf/xdp_redirect_map_user.c prog = bpf_program__next(NULL, obj); obj 157 samples/bpf/xdp_redirect_map_user.c dummy_prog = bpf_program__next(prog, obj); obj 171 samples/bpf/xdp_redirect_map_user.c tx_port_map_fd = bpf_object__find_map_fd_by_name(obj, "tx_port"); obj 172 samples/bpf/xdp_redirect_map_user.c rxcnt_map_fd = bpf_object__find_map_fd_by_name(obj, "rxcnt"); obj 109 samples/bpf/xdp_redirect_user.c struct bpf_object *obj; obj 154 samples/bpf/xdp_redirect_user.c if (bpf_prog_load_xattr(&prog_load_attr, &obj, &prog_fd)) obj 157 samples/bpf/xdp_redirect_user.c prog = bpf_program__next(NULL, obj); obj 158 samples/bpf/xdp_redirect_user.c dummy_prog = bpf_program__next(prog, obj); obj 172 samples/bpf/xdp_redirect_user.c tx_port_map_fd = bpf_object__find_map_fd_by_name(obj, "tx_port"); obj 173 samples/bpf/xdp_redirect_user.c rxcnt_map_fd = bpf_object__find_map_fd_by_name(obj, "rxcnt"); obj 635 samples/bpf/xdp_router_ipv4_user.c struct bpf_object *obj; obj 675 samples/bpf/xdp_router_ipv4_user.c if (bpf_prog_load_xattr(&prog_load_attr, &obj, &prog_fd)) obj 684 samples/bpf/xdp_router_ipv4_user.c lpm_map_fd = bpf_object__find_map_fd_by_name(obj, "lpm_map"); obj 685 samples/bpf/xdp_router_ipv4_user.c rxcnt_map_fd = bpf_object__find_map_fd_by_name(obj, "rxcnt"); obj 686 samples/bpf/xdp_router_ipv4_user.c arp_table_map_fd = bpf_object__find_map_fd_by_name(obj, "arp_table"); obj 687 samples/bpf/xdp_router_ipv4_user.c exact_match_map_fd = bpf_object__find_map_fd_by_name(obj, obj 689 samples/bpf/xdp_router_ipv4_user.c tx_port_map_fd = bpf_object__find_map_fd_by_name(obj, "tx_port"); obj 469 samples/bpf/xdp_rxq_info_user.c struct bpf_object *obj; obj 489 samples/bpf/xdp_rxq_info_user.c if (bpf_prog_load_xattr(&prog_load_attr, &obj, &prog_fd)) obj 492 samples/bpf/xdp_rxq_info_user.c map = bpf_object__find_map_by_name(obj, "config_map"); obj 493 samples/bpf/xdp_rxq_info_user.c stats_global_map = bpf_object__find_map_by_name(obj, "stats_global_map"); obj 494 samples/bpf/xdp_rxq_info_user.c rx_queue_index_map = bpf_object__find_map_by_name(obj, "rx_queue_index_map"); obj 120 samples/bpf/xdp_sample_pkts_user.c struct bpf_object *obj; obj 149 samples/bpf/xdp_sample_pkts_user.c if (bpf_prog_load_xattr(&prog_load_attr, &obj, &prog_fd)) obj 157 samples/bpf/xdp_sample_pkts_user.c map = bpf_map__next(NULL, obj); obj 166 samples/bpf/xdp_tx_iptunnel_user.c struct bpf_object *obj; obj 267 samples/bpf/xdp_tx_iptunnel_user.c if (bpf_prog_load_xattr(&prog_load_attr, &obj, &prog_fd)) obj 275 samples/bpf/xdp_tx_iptunnel_user.c rxcnt_map_fd = bpf_object__find_map_fd_by_name(obj, "rxcnt"); obj 276 samples/bpf/xdp_tx_iptunnel_user.c vip2tnl_map_fd = bpf_object__find_map_fd_by_name(obj, "vip2tnl"); obj 98 samples/livepatch/livepatch-callbacks-demo.c static void callback_info(const char *callback, struct klp_object *obj) obj 100 samples/livepatch/livepatch-callbacks-demo.c if (obj->mod) obj 101 samples/livepatch/livepatch-callbacks-demo.c pr_info("%s: %s -> %s\n", callback, obj->mod->name, obj 102 samples/livepatch/livepatch-callbacks-demo.c module_state[obj->mod->state]); obj 108 samples/livepatch/livepatch-callbacks-demo.c static int pre_patch_callback(struct klp_object *obj) obj 110 samples/livepatch/livepatch-callbacks-demo.c callback_info(__func__, obj); obj 115 samples/livepatch/livepatch-callbacks-demo.c static void post_patch_callback(struct klp_object *obj) obj 117 samples/livepatch/livepatch-callbacks-demo.c callback_info(__func__, obj); obj 121 samples/livepatch/livepatch-callbacks-demo.c static void pre_unpatch_callback(struct klp_object *obj) obj 123 samples/livepatch/livepatch-callbacks-demo.c callback_info(__func__, obj); obj 127 samples/livepatch/livepatch-callbacks-demo.c static void post_unpatch_callback(struct klp_object *obj) obj 129 samples/livepatch/livepatch-callbacks-demo.c callback_info(__func__, obj); obj 53 samples/livepatch/livepatch-shadow-fix1.c static int shadow_leak_ctor(void *obj, void *shadow_data, void *ctor_data) obj 94 samples/livepatch/livepatch-shadow-fix1.c static void livepatch_fix1_dummy_leak_dtor(void *obj, void *shadow_data) obj 96 samples/livepatch/livepatch-shadow-fix1.c void *d = obj; obj 59 samples/livepatch/livepatch-shadow-fix2.c static void livepatch_fix2_dummy_leak_dtor(void *obj, void *shadow_data) obj 61 samples/livepatch/livepatch-shadow-fix2.c void *d = obj; obj 657 security/apparmor/policy_unpack.c static int datacmp(struct rhashtable_compare_arg *arg, const void *obj) obj 659 security/apparmor/policy_unpack.c const struct aa_data *data = obj; obj 154 security/tomoyo/audit.c struct tomoyo_obj_info *obj = r->obj; obj 179 security/tomoyo/audit.c if (!obj) obj 181 security/tomoyo/audit.c if (!obj->validate_done) { obj 182 security/tomoyo/audit.c tomoyo_get_attributes(obj); obj 183 security/tomoyo/audit.c obj->validate_done = true; obj 190 security/tomoyo/audit.c if (!obj->stat_valid[i]) obj 192 security/tomoyo/audit.c stat = &obj->stat[i]; obj 269 security/tomoyo/audit.c } else if (r->obj && r->obj->symlink_target) { obj 270 security/tomoyo/audit.c symlink = r->obj->symlink_target->name; obj 425 security/tomoyo/common.h struct tomoyo_obj_info *obj; obj 623 security/tomoyo/common.h struct tomoyo_obj_info obj; obj 1054 security/tomoyo/common.h void tomoyo_get_attributes(struct tomoyo_obj_info *obj); obj 713 security/tomoyo/condition.c void tomoyo_get_attributes(struct tomoyo_obj_info *obj) obj 723 security/tomoyo/condition.c dentry = obj->path1.dentry; obj 728 security/tomoyo/condition.c dentry = obj->path2.dentry; obj 740 security/tomoyo/condition.c struct tomoyo_mini_stat *stat = &obj->stat[i]; obj 748 security/tomoyo/condition.c obj->stat_valid[i] = true; obj 776 security/tomoyo/condition.c struct tomoyo_obj_info *obj; obj 787 security/tomoyo/condition.c obj = r->obj; obj 818 security/tomoyo/condition.c symlink = obj ? obj->symlink_target : NULL; obj 941 security/tomoyo/condition.c if (!obj) obj 943 security/tomoyo/condition.c if (!obj->validate_done) { obj 944 security/tomoyo/condition.c tomoyo_get_attributes(obj); obj 945 security/tomoyo/condition.c obj->validate_done = true; obj 991 security/tomoyo/condition.c if (!obj->stat_valid[stat_index]) obj 993 security/tomoyo/condition.c stat = &obj->stat[stat_index]; obj 725 security/tomoyo/domain.c ee->r.obj = &ee->obj; obj 726 security/tomoyo/domain.c ee->obj.path1 = bprm->file->f_path; obj 712 security/tomoyo/file.c struct tomoyo_obj_info obj = { obj 725 security/tomoyo/file.c r.obj = &obj; obj 760 security/tomoyo/file.c struct tomoyo_obj_info obj = { obj 775 security/tomoyo/file.c r.obj = &obj; obj 806 security/tomoyo/file.c struct tomoyo_obj_info obj = { obj 824 security/tomoyo/file.c r.obj = &obj; obj 835 security/tomoyo/file.c obj.symlink_target = &symlink_target; obj 863 security/tomoyo/file.c struct tomoyo_obj_info obj = { obj 876 security/tomoyo/file.c r.obj = &obj; obj 910 security/tomoyo/file.c struct tomoyo_obj_info obj = { obj 936 security/tomoyo/file.c r.obj = &obj; obj 82 security/tomoyo/mount.c struct tomoyo_obj_info obj = { }; obj 94 security/tomoyo/mount.c r->obj = &obj; obj 104 security/tomoyo/mount.c obj.path2 = *dir; obj 140 security/tomoyo/mount.c obj.path1 = path; obj 175 security/tomoyo/mount.c if (obj.path1.dentry) obj 176 security/tomoyo/mount.c path_put(&obj.path1); obj 17 sound/hda/intel-nhlt.c union acpi_object *obj; obj 27 sound/hda/intel-nhlt.c obj = acpi_evaluate_dsm(handle, &osc_guid, 1, 1, NULL); obj 29 sound/hda/intel-nhlt.c if (!obj) obj 32 sound/hda/intel-nhlt.c if (obj->type != ACPI_TYPE_BUFFER) { obj 34 sound/hda/intel-nhlt.c ACPI_FREE(obj); obj 38 sound/hda/intel-nhlt.c nhlt_ptr = (struct nhlt_resource_desc *)obj->buffer.pointer; obj 43 sound/hda/intel-nhlt.c ACPI_FREE(obj); obj 837 sound/oss/dmasound/dmasound_atari.c static void AtaFree(void *obj, unsigned int size) obj 839 sound/oss/dmasound/dmasound_atari.c atari_stram_free( obj ); obj 75 sound/oss/dmasound/dmasound_paula.c static void AmiFree(void *obj, unsigned int size); obj 324 sound/oss/dmasound/dmasound_paula.c static void AmiFree(void *obj, unsigned int size) obj 326 sound/oss/dmasound/dmasound_paula.c amiga_chip_free (obj); obj 166 sound/pci/asihpi/asihpi.c unsigned int obj = hpi_handle_object(h_stream); obj 170 sound/pci/asihpi/asihpi.c hpi_init_message_response(&hm, &hr, obj, obj 171 sound/pci/asihpi/asihpi.c obj == HPI_OBJ_OSTREAM ? obj 189 sound/pci/asihpi/asihpi.c unsigned int obj = hpi_handle_object(h_stream); obj 194 sound/pci/asihpi/asihpi.c hpi_init_message_response(&hm, &hr, obj, obj 195 sound/pci/asihpi/asihpi.c obj == HPI_OBJ_OSTREAM ? obj 378 sound/pci/asihpi/hpi_internal.h #define HPI_FUNC_ID(obj, i) (HPI_OBJ_##obj * HPI_OBJ_FUNCTION_SPACING + i) obj 119 sound/pci/asihpi/hpios.h #define hpios_msgxlock_init(obj) spin_lock_init(&(obj)->lock) obj 120 sound/pci/asihpi/hpios.h #define hpios_msgxlock_lock(obj) cond_lock(obj) obj 121 sound/pci/asihpi/hpios.h #define hpios_msgxlock_unlock(obj) cond_unlock(obj) obj 123 sound/pci/asihpi/hpios.h #define hpios_dsplock_init(obj) spin_lock_init(&(obj)->dsp_lock.lock) obj 124 sound/pci/asihpi/hpios.h #define hpios_dsplock_lock(obj) cond_lock(&(obj)->dsp_lock) obj 125 sound/pci/asihpi/hpios.h #define hpios_dsplock_unlock(obj) cond_unlock(&(obj)->dsp_lock) obj 132 sound/pci/asihpi/hpios.h #define hpios_alistlock_init(obj) spin_lock_init(&((obj)->list_lock.lock)) obj 133 sound/pci/asihpi/hpios.h #define hpios_alistlock_lock(obj) spin_lock(&((obj)->list_lock.lock)) obj 134 sound/pci/asihpi/hpios.h #define hpios_alistlock_unlock(obj) spin_unlock(&((obj)->list_lock.lock)) obj 74 sound/usb/clock.c #define DEFINE_FIND_HELPER(name, obj, validator, type) \ obj 75 sound/usb/clock.c static obj *name(struct usb_host_interface *iface, int id) \ obj 565 tools/bpf/bpftool/btf.c struct btf_attach_point *obj; obj 570 tools/bpf/bpftool/btf.c hash_for_each_safe(tab->table, bkt, tmp, obj, hash) { obj 571 tools/bpf/bpftool/btf.c hash_del(&obj->hash); obj 572 tools/bpf/bpftool/btf.c free(obj); obj 707 tools/bpf/bpftool/btf.c struct btf_attach_point *obj; obj 714 tools/bpf/bpftool/btf.c hash_for_each_possible(btf_prog_table->table, obj, hash, info->id) { obj 715 tools/bpf/bpftool/btf.c if (obj->btf_id == info->id) obj 717 tools/bpf/bpftool/btf.c obj->obj_id); obj 721 tools/bpf/bpftool/btf.c hash_for_each_possible(btf_map_table->table, obj, hash, info->id) { obj 722 tools/bpf/bpftool/btf.c if (obj->btf_id == info->id) obj 724 tools/bpf/bpftool/btf.c obj->obj_id); obj 735 tools/bpf/bpftool/btf.c struct btf_attach_point *obj; obj 743 tools/bpf/bpftool/btf.c hash_for_each_possible(btf_prog_table->table, obj, hash, obj 745 tools/bpf/bpftool/btf.c if (obj->btf_id == info->id) obj 746 tools/bpf/bpftool/btf.c jsonw_uint(json_wtr, obj->obj_id); obj 752 tools/bpf/bpftool/btf.c hash_for_each_possible(btf_map_table->table, obj, hash, obj 754 tools/bpf/bpftool/btf.c if (obj->btf_id == info->id) obj 755 tools/bpf/bpftool/btf.c jsonw_uint(json_wtr, obj->obj_id); obj 425 tools/bpf/bpftool/common.c struct pinned_obj *obj; obj 429 tools/bpf/bpftool/common.c hash_for_each_safe(tab->table, bkt, tmp, obj, hash) { obj 430 tools/bpf/bpftool/common.c hash_del(&obj->hash); obj 431 tools/bpf/bpftool/common.c free(obj->path); obj 432 tools/bpf/bpftool/common.c free(obj); obj 548 tools/bpf/bpftool/map.c struct pinned_obj *obj; obj 552 tools/bpf/bpftool/map.c hash_for_each_possible(map_table.table, obj, hash, info->id) { obj 553 tools/bpf/bpftool/map.c if (obj->id == info->id) obj 554 tools/bpf/bpftool/map.c jsonw_string(json_wtr, obj->path); obj 616 tools/bpf/bpftool/map.c struct pinned_obj *obj; obj 618 tools/bpf/bpftool/map.c hash_for_each_possible(map_table.table, obj, hash, info->id) { obj 619 tools/bpf/bpftool/map.c if (obj->id == info->id) obj 620 tools/bpf/bpftool/map.c printf("\n\tpinned %s", obj->path); obj 257 tools/bpf/bpftool/prog.c struct pinned_obj *obj; obj 261 tools/bpf/bpftool/prog.c hash_for_each_possible(prog_table.table, obj, hash, info->id) { obj 262 tools/bpf/bpftool/prog.c if (obj->id == info->id) obj 263 tools/bpf/bpftool/prog.c jsonw_string(json_wtr, obj->path); obj 318 tools/bpf/bpftool/prog.c struct pinned_obj *obj; obj 320 tools/bpf/bpftool/prog.c hash_for_each_possible(prog_table.table, obj, hash, info->id) { obj 321 tools/bpf/bpftool/prog.c if (obj->id == info->id) obj 322 tools/bpf/bpftool/prog.c printf("\n\tpinned %s", obj->path); obj 1103 tools/bpf/bpftool/prog.c struct bpf_object *obj; obj 1227 tools/bpf/bpftool/prog.c obj = __bpf_object__open_xattr(&open_attr, bpf_flags); obj 1228 tools/bpf/bpftool/prog.c if (IS_ERR_OR_NULL(obj)) { obj 1233 tools/bpf/bpftool/prog.c bpf_object__for_each_program(pos, obj) { obj 1259 tools/bpf/bpftool/prog.c bpf_object__for_each_map(map, obj) { obj 1280 tools/bpf/bpftool/prog.c bpf_object__for_each_map(map, obj) { obj 1306 tools/bpf/bpftool/prog.c load_attr.obj = obj; obj 1322 tools/bpf/bpftool/prog.c prog = bpf_program__next(NULL, obj); obj 1335 tools/bpf/bpftool/prog.c err = bpf_object__pin_programs(obj, pinfile); obj 1343 tools/bpf/bpftool/prog.c err = bpf_object__pin_maps(obj, pinmaps); obj 1353 tools/bpf/bpftool/prog.c bpf_object__close(obj); obj 1364 tools/bpf/bpftool/prog.c bpf_object__unpin_programs(obj, pinfile); obj 1366 tools/bpf/bpftool/prog.c bpf_object__close(obj); obj 105 tools/include/linux/hashtable.h #define hash_for_each(name, bkt, obj, member) \ obj 106 tools/include/linux/hashtable.h for ((bkt) = 0, obj = NULL; obj == NULL && (bkt) < HASH_SIZE(name);\ obj 108 tools/include/linux/hashtable.h hlist_for_each_entry(obj, &name[bkt], member) obj 119 tools/include/linux/hashtable.h #define hash_for_each_safe(name, bkt, tmp, obj, member) \ obj 120 tools/include/linux/hashtable.h for ((bkt) = 0, obj = NULL; obj == NULL && (bkt) < HASH_SIZE(name);\ obj 122 tools/include/linux/hashtable.h hlist_for_each_entry_safe(obj, tmp, &name[bkt], member) obj 132 tools/include/linux/hashtable.h #define hash_for_each_possible(name, obj, member, key) \ obj 133 tools/include/linux/hashtable.h hlist_for_each_entry(obj, &name[hash_min(key, HASH_BITS(name))], member) obj 144 tools/include/linux/hashtable.h #define hash_for_each_possible_safe(name, obj, tmp, member, key) \ obj 145 tools/include/linux/hashtable.h hlist_for_each_entry_safe(obj, tmp,\ obj 505 tools/lib/bpf/btf.c static int btf_fixup_datasec(struct bpf_object *obj, struct btf *btf, obj 520 tools/lib/bpf/btf.c ret = bpf_object__section_size(obj, name, &size); obj 546 tools/lib/bpf/btf.c ret = bpf_object__variable_offset(obj, name, &off); obj 560 tools/lib/bpf/btf.c int btf__finalize_data(struct bpf_object *obj, struct btf *btf) obj 574 tools/lib/bpf/btf.c err = btf_fixup_datasec(obj, btf, t); obj 71 tools/lib/bpf/btf.h LIBBPF_API int btf__finalize_data(struct bpf_object *obj, struct btf *btf); obj 185 tools/lib/bpf/libbpf.c struct bpf_object *obj; obj 406 tools/lib/bpf/libbpf.c bpf_object__add_program(struct bpf_object *obj, void *data, size_t size, obj 416 tools/lib/bpf/libbpf.c prog.caps = &obj->caps; obj 417 tools/lib/bpf/libbpf.c progs = obj->programs; obj 418 tools/lib/bpf/libbpf.c nr_progs = obj->nr_programs; obj 434 tools/lib/bpf/libbpf.c obj->programs = progs; obj 435 tools/lib/bpf/libbpf.c obj->nr_programs = nr_progs + 1; obj 436 tools/lib/bpf/libbpf.c prog.obj = obj; obj 442 tools/lib/bpf/libbpf.c bpf_object__init_prog_names(struct bpf_object *obj) obj 444 tools/lib/bpf/libbpf.c Elf_Data *symbols = obj->efile.symbols; obj 448 tools/lib/bpf/libbpf.c for (pi = 0; pi < obj->nr_programs; pi++) { obj 451 tools/lib/bpf/libbpf.c prog = &obj->programs[pi]; obj 464 tools/lib/bpf/libbpf.c name = elf_strptr(obj->efile.elf, obj 465 tools/lib/bpf/libbpf.c obj->efile.strtabidx, obj 474 tools/lib/bpf/libbpf.c if (!name && prog->idx == obj->efile.text_shndx) obj 498 tools/lib/bpf/libbpf.c struct bpf_object *obj; obj 501 tools/lib/bpf/libbpf.c obj = calloc(1, sizeof(struct bpf_object) + strlen(path) + 1); obj 502 tools/lib/bpf/libbpf.c if (!obj) { obj 507 tools/lib/bpf/libbpf.c strcpy(obj->path, path); obj 509 tools/lib/bpf/libbpf.c strncpy(obj->name, basename((void *)path), sizeof(obj->name) - 1); obj 510 tools/lib/bpf/libbpf.c end = strchr(obj->name, '.'); obj 514 tools/lib/bpf/libbpf.c obj->efile.fd = -1; obj 521 tools/lib/bpf/libbpf.c obj->efile.obj_buf = obj_buf; obj 522 tools/lib/bpf/libbpf.c obj->efile.obj_buf_sz = obj_buf_sz; obj 523 tools/lib/bpf/libbpf.c obj->efile.maps_shndx = -1; obj 524 tools/lib/bpf/libbpf.c obj->efile.btf_maps_shndx = -1; obj 525 tools/lib/bpf/libbpf.c obj->efile.data_shndx = -1; obj 526 tools/lib/bpf/libbpf.c obj->efile.rodata_shndx = -1; obj 527 tools/lib/bpf/libbpf.c obj->efile.bss_shndx = -1; obj 529 tools/lib/bpf/libbpf.c obj->loaded = false; obj 531 tools/lib/bpf/libbpf.c INIT_LIST_HEAD(&obj->list); obj 532 tools/lib/bpf/libbpf.c list_add(&obj->list, &bpf_objects_list); obj 533 tools/lib/bpf/libbpf.c return obj; obj 536 tools/lib/bpf/libbpf.c static void bpf_object__elf_finish(struct bpf_object *obj) obj 538 tools/lib/bpf/libbpf.c if (!obj_elf_valid(obj)) obj 541 tools/lib/bpf/libbpf.c if (obj->efile.elf) { obj 542 tools/lib/bpf/libbpf.c elf_end(obj->efile.elf); obj 543 tools/lib/bpf/libbpf.c obj->efile.elf = NULL; obj 545 tools/lib/bpf/libbpf.c obj->efile.symbols = NULL; obj 546 tools/lib/bpf/libbpf.c obj->efile.data = NULL; obj 547 tools/lib/bpf/libbpf.c obj->efile.rodata = NULL; obj 548 tools/lib/bpf/libbpf.c obj->efile.bss = NULL; obj 550 tools/lib/bpf/libbpf.c zfree(&obj->efile.reloc); obj 551 tools/lib/bpf/libbpf.c obj->efile.nr_reloc = 0; obj 552 tools/lib/bpf/libbpf.c zclose(obj->efile.fd); obj 553 tools/lib/bpf/libbpf.c obj->efile.obj_buf = NULL; obj 554 tools/lib/bpf/libbpf.c obj->efile.obj_buf_sz = 0; obj 557 tools/lib/bpf/libbpf.c static int bpf_object__elf_init(struct bpf_object *obj) obj 562 tools/lib/bpf/libbpf.c if (obj_elf_valid(obj)) { obj 567 tools/lib/bpf/libbpf.c if (obj->efile.obj_buf_sz > 0) { obj 572 tools/lib/bpf/libbpf.c obj->efile.elf = elf_memory(obj->efile.obj_buf, obj 573 tools/lib/bpf/libbpf.c obj->efile.obj_buf_sz); obj 575 tools/lib/bpf/libbpf.c obj->efile.fd = open(obj->path, O_RDONLY); obj 576 tools/lib/bpf/libbpf.c if (obj->efile.fd < 0) { obj 581 tools/lib/bpf/libbpf.c pr_warning("failed to open %s: %s\n", obj->path, cp); obj 585 tools/lib/bpf/libbpf.c obj->efile.elf = elf_begin(obj->efile.fd, obj 589 tools/lib/bpf/libbpf.c if (!obj->efile.elf) { obj 590 tools/lib/bpf/libbpf.c pr_warning("failed to open %s as ELF file\n", obj->path); obj 595 tools/lib/bpf/libbpf.c if (!gelf_getehdr(obj->efile.elf, &obj->efile.ehdr)) { obj 596 tools/lib/bpf/libbpf.c pr_warning("failed to get EHDR from %s\n", obj->path); obj 600 tools/lib/bpf/libbpf.c ep = &obj->efile.ehdr; obj 605 tools/lib/bpf/libbpf.c pr_warning("%s is not an eBPF object file\n", obj->path); obj 612 tools/lib/bpf/libbpf.c bpf_object__elf_finish(obj); obj 616 tools/lib/bpf/libbpf.c static int bpf_object__check_endianness(struct bpf_object *obj) obj 619 tools/lib/bpf/libbpf.c if (obj->efile.ehdr.e_ident[EI_DATA] == ELFDATA2LSB) obj 622 tools/lib/bpf/libbpf.c if (obj->efile.ehdr.e_ident[EI_DATA] == ELFDATA2MSB) obj 632 tools/lib/bpf/libbpf.c bpf_object__init_license(struct bpf_object *obj, void *data, size_t size) obj 634 tools/lib/bpf/libbpf.c memcpy(obj->license, data, min(size, sizeof(obj->license) - 1)); obj 635 tools/lib/bpf/libbpf.c pr_debug("license of %s is %s\n", obj->path, obj->license); obj 640 tools/lib/bpf/libbpf.c bpf_object__init_kversion(struct bpf_object *obj, void *data, size_t size) obj 645 tools/lib/bpf/libbpf.c pr_warning("invalid kver section in %s\n", obj->path); obj 649 tools/lib/bpf/libbpf.c obj->kern_version = kver; obj 650 tools/lib/bpf/libbpf.c pr_debug("kernel version of %s is %x\n", obj->path, obj->kern_version); obj 672 tools/lib/bpf/libbpf.c static int bpf_object_search_section_size(const struct bpf_object *obj, obj 675 tools/lib/bpf/libbpf.c const GElf_Ehdr *ep = &obj->efile.ehdr; obj 676 tools/lib/bpf/libbpf.c Elf *elf = obj->efile.elf; obj 688 tools/lib/bpf/libbpf.c idx, obj->path); obj 695 tools/lib/bpf/libbpf.c idx, obj->path); obj 705 tools/lib/bpf/libbpf.c idx, name, obj->path); obj 716 tools/lib/bpf/libbpf.c int bpf_object__section_size(const struct bpf_object *obj, const char *name, obj 726 tools/lib/bpf/libbpf.c if (obj->efile.data) obj 727 tools/lib/bpf/libbpf.c *size = obj->efile.data->d_size; obj 729 tools/lib/bpf/libbpf.c if (obj->efile.bss) obj 730 tools/lib/bpf/libbpf.c *size = obj->efile.bss->d_size; obj 732 tools/lib/bpf/libbpf.c if (obj->efile.rodata) obj 733 tools/lib/bpf/libbpf.c *size = obj->efile.rodata->d_size; obj 735 tools/lib/bpf/libbpf.c ret = bpf_object_search_section_size(obj, name, &d_size); obj 743 tools/lib/bpf/libbpf.c int bpf_object__variable_offset(const struct bpf_object *obj, const char *name, obj 746 tools/lib/bpf/libbpf.c Elf_Data *symbols = obj->efile.symbols; obj 762 tools/lib/bpf/libbpf.c sname = elf_strptr(obj->efile.elf, obj->efile.strtabidx, obj 778 tools/lib/bpf/libbpf.c static struct bpf_map *bpf_object__add_map(struct bpf_object *obj) obj 784 tools/lib/bpf/libbpf.c if (obj->nr_maps < obj->maps_cap) obj 785 tools/lib/bpf/libbpf.c return &obj->maps[obj->nr_maps++]; obj 787 tools/lib/bpf/libbpf.c new_cap = max((size_t)4, obj->maps_cap * 3 / 2); obj 788 tools/lib/bpf/libbpf.c new_maps = realloc(obj->maps, new_cap * sizeof(*obj->maps)); obj 794 tools/lib/bpf/libbpf.c obj->maps_cap = new_cap; obj 795 tools/lib/bpf/libbpf.c obj->maps = new_maps; obj 798 tools/lib/bpf/libbpf.c memset(obj->maps + obj->nr_maps, 0, obj 799 tools/lib/bpf/libbpf.c (obj->maps_cap - obj->nr_maps) * sizeof(*obj->maps)); obj 804 tools/lib/bpf/libbpf.c for (i = obj->nr_maps; i < obj->maps_cap; i++) { obj 805 tools/lib/bpf/libbpf.c obj->maps[i].fd = -1; obj 806 tools/lib/bpf/libbpf.c obj->maps[i].inner_map_fd = -1; obj 809 tools/lib/bpf/libbpf.c return &obj->maps[obj->nr_maps++]; obj 813 tools/lib/bpf/libbpf.c bpf_object__init_internal_map(struct bpf_object *obj, enum libbpf_map_type type, obj 820 tools/lib/bpf/libbpf.c map = bpf_object__add_map(obj); obj 827 tools/lib/bpf/libbpf.c snprintf(map_name, sizeof(map_name), "%.8s%.7s", obj->name, obj 853 tools/lib/bpf/libbpf.c pr_debug("map %td is \"%s\"\n", map - obj->maps, map->name); obj 857 tools/lib/bpf/libbpf.c static int bpf_object__init_global_data_maps(struct bpf_object *obj) obj 861 tools/lib/bpf/libbpf.c if (!obj->caps.global_data) obj 866 tools/lib/bpf/libbpf.c if (obj->efile.data_shndx >= 0) { obj 867 tools/lib/bpf/libbpf.c err = bpf_object__init_internal_map(obj, LIBBPF_MAP_DATA, obj 868 tools/lib/bpf/libbpf.c obj->efile.data_shndx, obj 869 tools/lib/bpf/libbpf.c obj->efile.data, obj 870 tools/lib/bpf/libbpf.c &obj->sections.data); obj 874 tools/lib/bpf/libbpf.c if (obj->efile.rodata_shndx >= 0) { obj 875 tools/lib/bpf/libbpf.c err = bpf_object__init_internal_map(obj, LIBBPF_MAP_RODATA, obj 876 tools/lib/bpf/libbpf.c obj->efile.rodata_shndx, obj 877 tools/lib/bpf/libbpf.c obj->efile.rodata, obj 878 tools/lib/bpf/libbpf.c &obj->sections.rodata); obj 882 tools/lib/bpf/libbpf.c if (obj->efile.bss_shndx >= 0) { obj 883 tools/lib/bpf/libbpf.c err = bpf_object__init_internal_map(obj, LIBBPF_MAP_BSS, obj 884 tools/lib/bpf/libbpf.c obj->efile.bss_shndx, obj 885 tools/lib/bpf/libbpf.c obj->efile.bss, NULL); obj 892 tools/lib/bpf/libbpf.c static int bpf_object__init_user_maps(struct bpf_object *obj, bool strict) obj 894 tools/lib/bpf/libbpf.c Elf_Data *symbols = obj->efile.symbols; obj 899 tools/lib/bpf/libbpf.c if (obj->efile.maps_shndx < 0) obj 905 tools/lib/bpf/libbpf.c scn = elf_getscn(obj->efile.elf, obj->efile.maps_shndx); obj 910 tools/lib/bpf/libbpf.c obj->efile.maps_shndx); obj 927 tools/lib/bpf/libbpf.c if (sym.st_shndx != obj->efile.maps_shndx) obj 933 tools/lib/bpf/libbpf.c obj->path, nr_maps, data->d_size); obj 939 tools/lib/bpf/libbpf.c obj->path, nr_maps, data->d_size); obj 952 tools/lib/bpf/libbpf.c if (sym.st_shndx != obj->efile.maps_shndx) obj 955 tools/lib/bpf/libbpf.c map = bpf_object__add_map(obj); obj 959 tools/lib/bpf/libbpf.c map_name = elf_strptr(obj->efile.elf, obj->efile.strtabidx, obj 963 tools/lib/bpf/libbpf.c i, obj->path); obj 974 tools/lib/bpf/libbpf.c obj->path, map_name); obj 1007 tools/lib/bpf/libbpf.c obj->path, map_name); obj 1072 tools/lib/bpf/libbpf.c static int bpf_object__init_user_btf_map(struct bpf_object *obj, obj 1086 tools/lib/bpf/libbpf.c var = btf__type_by_id(obj->btf, vi->type); obj 1088 tools/lib/bpf/libbpf.c map_name = btf__name_by_offset(obj->btf, var->name_off); obj 1111 tools/lib/bpf/libbpf.c def = skip_mods_and_typedefs(obj->btf, var->type, NULL); obj 1122 tools/lib/bpf/libbpf.c map = bpf_object__add_map(obj); obj 1140 tools/lib/bpf/libbpf.c const char *name = btf__name_by_offset(obj->btf, m->name_off); obj 1148 tools/lib/bpf/libbpf.c if (!get_map_field_int(map_name, obj->btf, def, m, obj 1154 tools/lib/bpf/libbpf.c if (!get_map_field_int(map_name, obj->btf, def, m, obj 1160 tools/lib/bpf/libbpf.c if (!get_map_field_int(map_name, obj->btf, def, m, obj 1168 tools/lib/bpf/libbpf.c if (!get_map_field_int(map_name, obj->btf, def, m, obj 1182 tools/lib/bpf/libbpf.c t = btf__type_by_id(obj->btf, m->type); obj 1193 tools/lib/bpf/libbpf.c sz = btf__resolve_size(obj->btf, t->type); obj 1211 tools/lib/bpf/libbpf.c if (!get_map_field_int(map_name, obj->btf, def, m, obj 1225 tools/lib/bpf/libbpf.c t = btf__type_by_id(obj->btf, m->type); obj 1236 tools/lib/bpf/libbpf.c sz = btf__resolve_size(obj->btf, t->type); obj 1270 tools/lib/bpf/libbpf.c static int bpf_object__init_user_btf_maps(struct bpf_object *obj, bool strict) obj 1279 tools/lib/bpf/libbpf.c if (obj->efile.btf_maps_shndx < 0) obj 1282 tools/lib/bpf/libbpf.c scn = elf_getscn(obj->efile.elf, obj->efile.btf_maps_shndx); obj 1287 tools/lib/bpf/libbpf.c obj->efile.maps_shndx, MAPS_ELF_SEC); obj 1291 tools/lib/bpf/libbpf.c nr_types = btf__get_nr_types(obj->btf); obj 1293 tools/lib/bpf/libbpf.c t = btf__type_by_id(obj->btf, i); obj 1296 tools/lib/bpf/libbpf.c name = btf__name_by_offset(obj->btf, t->name_off); obj 1310 tools/lib/bpf/libbpf.c err = bpf_object__init_user_btf_map(obj, sec, i, obj 1311 tools/lib/bpf/libbpf.c obj->efile.btf_maps_shndx, obj 1320 tools/lib/bpf/libbpf.c static int bpf_object__init_maps(struct bpf_object *obj, int flags) obj 1325 tools/lib/bpf/libbpf.c err = bpf_object__init_user_maps(obj, strict); obj 1329 tools/lib/bpf/libbpf.c err = bpf_object__init_user_btf_maps(obj, strict); obj 1333 tools/lib/bpf/libbpf.c err = bpf_object__init_global_data_maps(obj); obj 1337 tools/lib/bpf/libbpf.c if (obj->nr_maps) { obj 1338 tools/lib/bpf/libbpf.c qsort(obj->maps, obj->nr_maps, sizeof(obj->maps[0]), obj 1344 tools/lib/bpf/libbpf.c static bool section_have_execinstr(struct bpf_object *obj, int idx) obj 1349 tools/lib/bpf/libbpf.c scn = elf_getscn(obj->efile.elf, idx); obj 1362 tools/lib/bpf/libbpf.c static void bpf_object__sanitize_btf(struct bpf_object *obj) obj 1364 tools/lib/bpf/libbpf.c bool has_datasec = obj->caps.btf_datasec; obj 1365 tools/lib/bpf/libbpf.c bool has_func = obj->caps.btf_func; obj 1366 tools/lib/bpf/libbpf.c struct btf *btf = obj->btf; obj 1370 tools/lib/bpf/libbpf.c if (!obj->btf || (has_func && has_datasec)) obj 1422 tools/lib/bpf/libbpf.c static void bpf_object__sanitize_btf_ext(struct bpf_object *obj) obj 1424 tools/lib/bpf/libbpf.c if (!obj->btf_ext) obj 1427 tools/lib/bpf/libbpf.c if (!obj->caps.btf_func) { obj 1428 tools/lib/bpf/libbpf.c btf_ext__free(obj->btf_ext); obj 1429 tools/lib/bpf/libbpf.c obj->btf_ext = NULL; obj 1433 tools/lib/bpf/libbpf.c static bool bpf_object__is_btf_mandatory(const struct bpf_object *obj) obj 1435 tools/lib/bpf/libbpf.c return obj->efile.btf_maps_shndx >= 0; obj 1438 tools/lib/bpf/libbpf.c static int bpf_object__init_btf(struct bpf_object *obj, obj 1442 tools/lib/bpf/libbpf.c bool btf_required = bpf_object__is_btf_mandatory(obj); obj 1446 tools/lib/bpf/libbpf.c obj->btf = btf__new(btf_data->d_buf, btf_data->d_size); obj 1447 tools/lib/bpf/libbpf.c if (IS_ERR(obj->btf)) { obj 1452 tools/lib/bpf/libbpf.c err = btf__finalize_data(obj, obj->btf); obj 1460 tools/lib/bpf/libbpf.c if (!obj->btf) { obj 1465 tools/lib/bpf/libbpf.c obj->btf_ext = btf_ext__new(btf_ext_data->d_buf, obj 1467 tools/lib/bpf/libbpf.c if (IS_ERR(obj->btf_ext)) { obj 1469 tools/lib/bpf/libbpf.c BTF_EXT_ELF_SEC, PTR_ERR(obj->btf_ext)); obj 1470 tools/lib/bpf/libbpf.c obj->btf_ext = NULL; obj 1475 tools/lib/bpf/libbpf.c if (err || IS_ERR(obj->btf)) { obj 1477 tools/lib/bpf/libbpf.c err = err ? : PTR_ERR(obj->btf); obj 1480 tools/lib/bpf/libbpf.c if (!IS_ERR_OR_NULL(obj->btf)) obj 1481 tools/lib/bpf/libbpf.c btf__free(obj->btf); obj 1482 tools/lib/bpf/libbpf.c obj->btf = NULL; obj 1484 tools/lib/bpf/libbpf.c if (btf_required && !obj->btf) { obj 1491 tools/lib/bpf/libbpf.c static int bpf_object__sanitize_and_load_btf(struct bpf_object *obj) obj 1495 tools/lib/bpf/libbpf.c if (!obj->btf) obj 1498 tools/lib/bpf/libbpf.c bpf_object__sanitize_btf(obj); obj 1499 tools/lib/bpf/libbpf.c bpf_object__sanitize_btf_ext(obj); obj 1501 tools/lib/bpf/libbpf.c err = btf__load(obj->btf); obj 1505 tools/lib/bpf/libbpf.c btf__free(obj->btf); obj 1506 tools/lib/bpf/libbpf.c obj->btf = NULL; obj 1508 tools/lib/bpf/libbpf.c if (obj->btf_ext) { obj 1509 tools/lib/bpf/libbpf.c btf_ext__free(obj->btf_ext); obj 1510 tools/lib/bpf/libbpf.c obj->btf_ext = NULL; obj 1513 tools/lib/bpf/libbpf.c if (bpf_object__is_btf_mandatory(obj)) obj 1519 tools/lib/bpf/libbpf.c static int bpf_object__elf_collect(struct bpf_object *obj, int flags) obj 1521 tools/lib/bpf/libbpf.c Elf *elf = obj->efile.elf; obj 1522 tools/lib/bpf/libbpf.c GElf_Ehdr *ep = &obj->efile.ehdr; obj 1530 tools/lib/bpf/libbpf.c pr_warning("failed to get e_shstrndx from %s\n", obj->path); obj 1542 tools/lib/bpf/libbpf.c idx, obj->path); obj 1549 tools/lib/bpf/libbpf.c idx, obj->path); obj 1556 tools/lib/bpf/libbpf.c idx, name, obj->path); obj 1565 tools/lib/bpf/libbpf.c err = bpf_object__init_license(obj, obj 1571 tools/lib/bpf/libbpf.c err = bpf_object__init_kversion(obj, obj 1577 tools/lib/bpf/libbpf.c obj->efile.maps_shndx = idx; obj 1579 tools/lib/bpf/libbpf.c obj->efile.btf_maps_shndx = idx; obj 1585 tools/lib/bpf/libbpf.c if (obj->efile.symbols) { obj 1587 tools/lib/bpf/libbpf.c obj->path); obj 1590 tools/lib/bpf/libbpf.c obj->efile.symbols = data; obj 1591 tools/lib/bpf/libbpf.c obj->efile.strtabidx = sh.sh_link; obj 1595 tools/lib/bpf/libbpf.c obj->efile.text_shndx = idx; obj 1596 tools/lib/bpf/libbpf.c err = bpf_object__add_program(obj, data->d_buf, obj 1604 tools/lib/bpf/libbpf.c name, obj->path, cp); obj 1608 tools/lib/bpf/libbpf.c obj->efile.data = data; obj 1609 tools/lib/bpf/libbpf.c obj->efile.data_shndx = idx; obj 1611 tools/lib/bpf/libbpf.c obj->efile.rodata = data; obj 1612 tools/lib/bpf/libbpf.c obj->efile.rodata_shndx = idx; obj 1617 tools/lib/bpf/libbpf.c int nr_reloc = obj->efile.nr_reloc; obj 1618 tools/lib/bpf/libbpf.c void *reloc = obj->efile.reloc; obj 1622 tools/lib/bpf/libbpf.c if (!section_have_execinstr(obj, sec)) { obj 1629 tools/lib/bpf/libbpf.c sizeof(*obj->efile.reloc)); obj 1635 tools/lib/bpf/libbpf.c obj->efile.reloc = reloc; obj 1636 tools/lib/bpf/libbpf.c obj->efile.nr_reloc++; obj 1638 tools/lib/bpf/libbpf.c obj->efile.reloc[nr_reloc].shdr = sh; obj 1639 tools/lib/bpf/libbpf.c obj->efile.reloc[nr_reloc].data = data; obj 1641 tools/lib/bpf/libbpf.c obj->efile.bss = data; obj 1642 tools/lib/bpf/libbpf.c obj->efile.bss_shndx = idx; obj 1648 tools/lib/bpf/libbpf.c if (!obj->efile.strtabidx || obj->efile.strtabidx >= idx) { obj 1652 tools/lib/bpf/libbpf.c err = bpf_object__init_btf(obj, btf_data, btf_ext_data); obj 1654 tools/lib/bpf/libbpf.c err = bpf_object__init_maps(obj, flags); obj 1656 tools/lib/bpf/libbpf.c err = bpf_object__sanitize_and_load_btf(obj); obj 1658 tools/lib/bpf/libbpf.c err = bpf_object__init_prog_names(obj); obj 1663 tools/lib/bpf/libbpf.c bpf_object__find_prog_by_idx(struct bpf_object *obj, int idx) obj 1668 tools/lib/bpf/libbpf.c for (i = 0; i < obj->nr_programs; i++) { obj 1669 tools/lib/bpf/libbpf.c prog = &obj->programs[i]; obj 1677 tools/lib/bpf/libbpf.c bpf_object__find_program_by_title(const struct bpf_object *obj, obj 1682 tools/lib/bpf/libbpf.c bpf_object__for_each_program(pos, obj) { obj 1689 tools/lib/bpf/libbpf.c static bool bpf_object__shndx_is_data(const struct bpf_object *obj, obj 1692 tools/lib/bpf/libbpf.c return shndx == obj->efile.data_shndx || obj 1693 tools/lib/bpf/libbpf.c shndx == obj->efile.bss_shndx || obj 1694 tools/lib/bpf/libbpf.c shndx == obj->efile.rodata_shndx; obj 1697 tools/lib/bpf/libbpf.c static bool bpf_object__shndx_is_maps(const struct bpf_object *obj, obj 1700 tools/lib/bpf/libbpf.c return shndx == obj->efile.maps_shndx || obj 1701 tools/lib/bpf/libbpf.c shndx == obj->efile.btf_maps_shndx; obj 1704 tools/lib/bpf/libbpf.c static bool bpf_object__relo_in_known_section(const struct bpf_object *obj, obj 1707 tools/lib/bpf/libbpf.c return shndx == obj->efile.text_shndx || obj 1708 tools/lib/bpf/libbpf.c bpf_object__shndx_is_maps(obj, shndx) || obj 1709 tools/lib/bpf/libbpf.c bpf_object__shndx_is_data(obj, shndx); obj 1713 tools/lib/bpf/libbpf.c bpf_object__section_to_libbpf_map_type(const struct bpf_object *obj, int shndx) obj 1715 tools/lib/bpf/libbpf.c if (shndx == obj->efile.data_shndx) obj 1717 tools/lib/bpf/libbpf.c else if (shndx == obj->efile.bss_shndx) obj 1719 tools/lib/bpf/libbpf.c else if (shndx == obj->efile.rodata_shndx) obj 1727 tools/lib/bpf/libbpf.c Elf_Data *data, struct bpf_object *obj) obj 1729 tools/lib/bpf/libbpf.c Elf_Data *symbols = obj->efile.symbols; obj 1730 tools/lib/bpf/libbpf.c struct bpf_map *maps = obj->maps; obj 1731 tools/lib/bpf/libbpf.c size_t nr_maps = obj->nr_maps; obj 1765 tools/lib/bpf/libbpf.c name = elf_strptr(obj->efile.elf, obj->efile.strtabidx, obj 1783 tools/lib/bpf/libbpf.c if (!bpf_object__relo_in_known_section(obj, shdr_idx)) { obj 1797 tools/lib/bpf/libbpf.c obj->has_pseudo_calls = true; obj 1807 tools/lib/bpf/libbpf.c if (bpf_object__shndx_is_maps(obj, shdr_idx) || obj 1808 tools/lib/bpf/libbpf.c bpf_object__shndx_is_data(obj, shdr_idx)) { obj 1809 tools/lib/bpf/libbpf.c type = bpf_object__section_to_libbpf_map_type(obj, shdr_idx); obj 1816 tools/lib/bpf/libbpf.c if (!obj->caps.global_data) { obj 1853 tools/lib/bpf/libbpf.c static int bpf_map_find_btf_info(struct bpf_object *obj, struct bpf_map *map) obj 1860 tools/lib/bpf/libbpf.c if (map->sec_idx == obj->efile.btf_maps_shndx) obj 1864 tools/lib/bpf/libbpf.c ret = btf__get_map_kv_tids(obj->btf, map->name, def->key_size, obj 1872 tools/lib/bpf/libbpf.c ret = btf__find_by_name(obj->btf, obj 1952 tools/lib/bpf/libbpf.c bpf_object__probe_name(struct bpf_object *obj) obj 1984 tools/lib/bpf/libbpf.c obj->caps.name = 1; obj 1992 tools/lib/bpf/libbpf.c bpf_object__probe_global_data(struct bpf_object *obj) obj 2029 tools/lib/bpf/libbpf.c obj->caps.global_data = 1; obj 2037 tools/lib/bpf/libbpf.c static int bpf_object__probe_btf_func(struct bpf_object *obj) obj 2055 tools/lib/bpf/libbpf.c obj->caps.btf_func = 1; obj 2063 tools/lib/bpf/libbpf.c static int bpf_object__probe_btf_datasec(struct bpf_object *obj) obj 2082 tools/lib/bpf/libbpf.c obj->caps.btf_datasec = 1; obj 2091 tools/lib/bpf/libbpf.c bpf_object__probe_caps(struct bpf_object *obj) obj 2093 tools/lib/bpf/libbpf.c int (*probe_fn[])(struct bpf_object *obj) = { obj 2102 tools/lib/bpf/libbpf.c ret = probe_fn[i](obj); obj 2111 tools/lib/bpf/libbpf.c bpf_object__populate_internal_map(struct bpf_object *obj, struct bpf_map *map) obj 2122 tools/lib/bpf/libbpf.c obj->sections.data : obj->sections.rodata; obj 2139 tools/lib/bpf/libbpf.c bpf_object__create_maps(struct bpf_object *obj) obj 2146 tools/lib/bpf/libbpf.c for (i = 0; i < obj->nr_maps; i++) { obj 2147 tools/lib/bpf/libbpf.c struct bpf_map *map = &obj->maps[i]; obj 2158 tools/lib/bpf/libbpf.c if (obj->caps.name) obj 2188 tools/lib/bpf/libbpf.c if (obj->btf && !bpf_map_find_btf_info(obj, map)) { obj 2189 tools/lib/bpf/libbpf.c create_attr.btf_fd = btf__fd(obj->btf); obj 2218 tools/lib/bpf/libbpf.c zclose(obj->maps[j].fd); obj 2223 tools/lib/bpf/libbpf.c err = bpf_object__populate_internal_map(obj, map); obj 2265 tools/lib/bpf/libbpf.c bpf_program_reloc_btf_ext(struct bpf_program *prog, struct bpf_object *obj, obj 2277 tools/lib/bpf/libbpf.c err = btf_ext__reloc_func_info(obj->btf, obj->btf_ext, obj 2286 tools/lib/bpf/libbpf.c prog->func_info_rec_size = btf_ext__func_info_rec_size(obj->btf_ext); obj 2290 tools/lib/bpf/libbpf.c err = btf_ext__reloc_line_info(obj->btf, obj->btf_ext, obj 2299 tools/lib/bpf/libbpf.c prog->line_info_rec_size = btf_ext__line_info_rec_size(obj->btf_ext); obj 3116 tools/lib/bpf/libbpf.c bpf_core_reloc_offsets(struct bpf_object *obj, const char *targ_btf_path) obj 3144 tools/lib/bpf/libbpf.c seg = &obj->btf_ext->offset_reloc_info; obj 3146 tools/lib/bpf/libbpf.c sec_name = btf__name_by_offset(obj->btf, sec->sec_name_off); obj 3151 tools/lib/bpf/libbpf.c prog = bpf_object__find_program_by_title(obj, sec_name); obj 3163 tools/lib/bpf/libbpf.c err = bpf_core_reloc_offset(prog, rec, i, obj->btf, obj 3185 tools/lib/bpf/libbpf.c bpf_object__relocate_core(struct bpf_object *obj, const char *targ_btf_path) obj 3189 tools/lib/bpf/libbpf.c if (obj->btf_ext->offset_reloc_info.len) obj 3190 tools/lib/bpf/libbpf.c err = bpf_core_reloc_offsets(obj, targ_btf_path); obj 3196 tools/lib/bpf/libbpf.c bpf_program__reloc_text(struct bpf_program *prog, struct bpf_object *obj, obj 3207 tools/lib/bpf/libbpf.c if (prog->idx == obj->efile.text_shndx) { obj 3214 tools/lib/bpf/libbpf.c text = bpf_object__find_prog_by_idx(obj, obj->efile.text_shndx); obj 3227 tools/lib/bpf/libbpf.c if (obj->btf_ext) { obj 3228 tools/lib/bpf/libbpf.c err = bpf_program_reloc_btf_ext(prog, obj, obj 3249 tools/lib/bpf/libbpf.c bpf_program__relocate(struct bpf_program *prog, struct bpf_object *obj) obj 3256 tools/lib/bpf/libbpf.c if (obj->btf_ext) { obj 3257 tools/lib/bpf/libbpf.c err = bpf_program_reloc_btf_ext(prog, obj, obj 3288 tools/lib/bpf/libbpf.c insns[insn_idx].imm = obj->maps[map_idx].fd; obj 3290 tools/lib/bpf/libbpf.c err = bpf_program__reloc_text(prog, obj, obj 3303 tools/lib/bpf/libbpf.c bpf_object__relocate(struct bpf_object *obj, const char *targ_btf_path) obj 3309 tools/lib/bpf/libbpf.c if (obj->btf_ext) { obj 3310 tools/lib/bpf/libbpf.c err = bpf_object__relocate_core(obj, targ_btf_path); obj 3317 tools/lib/bpf/libbpf.c for (i = 0; i < obj->nr_programs; i++) { obj 3318 tools/lib/bpf/libbpf.c prog = &obj->programs[i]; obj 3320 tools/lib/bpf/libbpf.c err = bpf_program__relocate(prog, obj); obj 3330 tools/lib/bpf/libbpf.c static int bpf_object__collect_reloc(struct bpf_object *obj) obj 3334 tools/lib/bpf/libbpf.c if (!obj_elf_valid(obj)) { obj 3339 tools/lib/bpf/libbpf.c for (i = 0; i < obj->efile.nr_reloc; i++) { obj 3340 tools/lib/bpf/libbpf.c GElf_Shdr *shdr = &obj->efile.reloc[i].shdr; obj 3341 tools/lib/bpf/libbpf.c Elf_Data *data = obj->efile.reloc[i].data; obj 3350 tools/lib/bpf/libbpf.c prog = bpf_object__find_prog_by_idx(obj, idx); obj 3356 tools/lib/bpf/libbpf.c err = bpf_program__collect_reloc(prog, shdr, data, obj); obj 3387 tools/lib/bpf/libbpf.c if (prog->obj->btf_ext) obj 3388 tools/lib/bpf/libbpf.c btf_fd = bpf_object__btf_fd(prog->obj); obj 3538 tools/lib/bpf/libbpf.c const struct bpf_object *obj) obj 3540 tools/lib/bpf/libbpf.c return prog->idx == obj->efile.text_shndx && obj->has_pseudo_calls; obj 3544 tools/lib/bpf/libbpf.c bpf_object__load_progs(struct bpf_object *obj, int log_level) obj 3549 tools/lib/bpf/libbpf.c for (i = 0; i < obj->nr_programs; i++) { obj 3550 tools/lib/bpf/libbpf.c if (bpf_program__is_function_storage(&obj->programs[i], obj)) obj 3552 tools/lib/bpf/libbpf.c obj->programs[i].log_level |= log_level; obj 3553 tools/lib/bpf/libbpf.c err = bpf_program__load(&obj->programs[i], obj 3554 tools/lib/bpf/libbpf.c obj->license, obj 3555 tools/lib/bpf/libbpf.c obj->kern_version); obj 3597 tools/lib/bpf/libbpf.c static int bpf_object__validate(struct bpf_object *obj, bool needs_kver) obj 3599 tools/lib/bpf/libbpf.c if (needs_kver && obj->kern_version == 0) { obj 3601 tools/lib/bpf/libbpf.c obj->path); obj 3611 tools/lib/bpf/libbpf.c struct bpf_object *obj; obj 3619 tools/lib/bpf/libbpf.c obj = bpf_object__new(path, obj_buf, obj_buf_sz); obj 3620 tools/lib/bpf/libbpf.c if (IS_ERR(obj)) obj 3621 tools/lib/bpf/libbpf.c return obj; obj 3623 tools/lib/bpf/libbpf.c CHECK_ERR(bpf_object__elf_init(obj), err, out); obj 3624 tools/lib/bpf/libbpf.c CHECK_ERR(bpf_object__check_endianness(obj), err, out); obj 3625 tools/lib/bpf/libbpf.c CHECK_ERR(bpf_object__probe_caps(obj), err, out); obj 3626 tools/lib/bpf/libbpf.c CHECK_ERR(bpf_object__elf_collect(obj, flags), err, out); obj 3627 tools/lib/bpf/libbpf.c CHECK_ERR(bpf_object__collect_reloc(obj), err, out); obj 3628 tools/lib/bpf/libbpf.c CHECK_ERR(bpf_object__validate(obj, needs_kver), err, out); obj 3630 tools/lib/bpf/libbpf.c bpf_object__elf_finish(obj); obj 3631 tools/lib/bpf/libbpf.c return obj; obj 3633 tools/lib/bpf/libbpf.c bpf_object__close(obj); obj 3687 tools/lib/bpf/libbpf.c int bpf_object__unload(struct bpf_object *obj) obj 3691 tools/lib/bpf/libbpf.c if (!obj) obj 3694 tools/lib/bpf/libbpf.c for (i = 0; i < obj->nr_maps; i++) obj 3695 tools/lib/bpf/libbpf.c zclose(obj->maps[i].fd); obj 3697 tools/lib/bpf/libbpf.c for (i = 0; i < obj->nr_programs; i++) obj 3698 tools/lib/bpf/libbpf.c bpf_program__unload(&obj->programs[i]); obj 3705 tools/lib/bpf/libbpf.c struct bpf_object *obj; obj 3710 tools/lib/bpf/libbpf.c obj = attr->obj; obj 3711 tools/lib/bpf/libbpf.c if (!obj) obj 3714 tools/lib/bpf/libbpf.c if (obj->loaded) { obj 3719 tools/lib/bpf/libbpf.c obj->loaded = true; obj 3721 tools/lib/bpf/libbpf.c CHECK_ERR(bpf_object__create_maps(obj), err, out); obj 3722 tools/lib/bpf/libbpf.c CHECK_ERR(bpf_object__relocate(obj, attr->target_btf_path), err, out); obj 3723 tools/lib/bpf/libbpf.c CHECK_ERR(bpf_object__load_progs(obj, attr->log_level), err, out); obj 3727 tools/lib/bpf/libbpf.c bpf_object__unload(obj); obj 3728 tools/lib/bpf/libbpf.c pr_warning("failed to load object '%s'\n", obj->path); obj 3732 tools/lib/bpf/libbpf.c int bpf_object__load(struct bpf_object *obj) obj 3735 tools/lib/bpf/libbpf.c .obj = obj, obj 4004 tools/lib/bpf/libbpf.c int bpf_object__pin_maps(struct bpf_object *obj, const char *path) obj 4009 tools/lib/bpf/libbpf.c if (!obj) obj 4012 tools/lib/bpf/libbpf.c if (!obj->loaded) { obj 4021 tools/lib/bpf/libbpf.c bpf_object__for_each_map(map, obj) { obj 4043 tools/lib/bpf/libbpf.c while ((map = bpf_map__prev(map, obj))) { obj 4060 tools/lib/bpf/libbpf.c int bpf_object__unpin_maps(struct bpf_object *obj, const char *path) obj 4065 tools/lib/bpf/libbpf.c if (!obj) obj 4068 tools/lib/bpf/libbpf.c bpf_object__for_each_map(map, obj) { obj 4087 tools/lib/bpf/libbpf.c int bpf_object__pin_programs(struct bpf_object *obj, const char *path) obj 4092 tools/lib/bpf/libbpf.c if (!obj) obj 4095 tools/lib/bpf/libbpf.c if (!obj->loaded) { obj 4104 tools/lib/bpf/libbpf.c bpf_object__for_each_program(prog, obj) { obj 4126 tools/lib/bpf/libbpf.c while ((prog = bpf_program__prev(prog, obj))) { obj 4143 tools/lib/bpf/libbpf.c int bpf_object__unpin_programs(struct bpf_object *obj, const char *path) obj 4148 tools/lib/bpf/libbpf.c if (!obj) obj 4151 tools/lib/bpf/libbpf.c bpf_object__for_each_program(prog, obj) { obj 4170 tools/lib/bpf/libbpf.c int bpf_object__pin(struct bpf_object *obj, const char *path) obj 4174 tools/lib/bpf/libbpf.c err = bpf_object__pin_maps(obj, path); obj 4178 tools/lib/bpf/libbpf.c err = bpf_object__pin_programs(obj, path); obj 4180 tools/lib/bpf/libbpf.c bpf_object__unpin_maps(obj, path); obj 4187 tools/lib/bpf/libbpf.c void bpf_object__close(struct bpf_object *obj) obj 4191 tools/lib/bpf/libbpf.c if (!obj) obj 4194 tools/lib/bpf/libbpf.c if (obj->clear_priv) obj 4195 tools/lib/bpf/libbpf.c obj->clear_priv(obj, obj->priv); obj 4197 tools/lib/bpf/libbpf.c bpf_object__elf_finish(obj); obj 4198 tools/lib/bpf/libbpf.c bpf_object__unload(obj); obj 4199 tools/lib/bpf/libbpf.c btf__free(obj->btf); obj 4200 tools/lib/bpf/libbpf.c btf_ext__free(obj->btf_ext); obj 4202 tools/lib/bpf/libbpf.c for (i = 0; i < obj->nr_maps; i++) { obj 4203 tools/lib/bpf/libbpf.c zfree(&obj->maps[i].name); obj 4204 tools/lib/bpf/libbpf.c if (obj->maps[i].clear_priv) obj 4205 tools/lib/bpf/libbpf.c obj->maps[i].clear_priv(&obj->maps[i], obj 4206 tools/lib/bpf/libbpf.c obj->maps[i].priv); obj 4207 tools/lib/bpf/libbpf.c obj->maps[i].priv = NULL; obj 4208 tools/lib/bpf/libbpf.c obj->maps[i].clear_priv = NULL; obj 4211 tools/lib/bpf/libbpf.c zfree(&obj->sections.rodata); obj 4212 tools/lib/bpf/libbpf.c zfree(&obj->sections.data); obj 4213 tools/lib/bpf/libbpf.c zfree(&obj->maps); obj 4214 tools/lib/bpf/libbpf.c obj->nr_maps = 0; obj 4216 tools/lib/bpf/libbpf.c if (obj->programs && obj->nr_programs) { obj 4217 tools/lib/bpf/libbpf.c for (i = 0; i < obj->nr_programs; i++) obj 4218 tools/lib/bpf/libbpf.c bpf_program__exit(&obj->programs[i]); obj 4220 tools/lib/bpf/libbpf.c zfree(&obj->programs); obj 4222 tools/lib/bpf/libbpf.c list_del(&obj->list); obj 4223 tools/lib/bpf/libbpf.c free(obj); obj 4245 tools/lib/bpf/libbpf.c const char *bpf_object__name(const struct bpf_object *obj) obj 4247 tools/lib/bpf/libbpf.c return obj ? obj->path : ERR_PTR(-EINVAL); obj 4250 tools/lib/bpf/libbpf.c unsigned int bpf_object__kversion(const struct bpf_object *obj) obj 4252 tools/lib/bpf/libbpf.c return obj ? obj->kern_version : 0; obj 4255 tools/lib/bpf/libbpf.c struct btf *bpf_object__btf(const struct bpf_object *obj) obj 4257 tools/lib/bpf/libbpf.c return obj ? obj->btf : NULL; obj 4260 tools/lib/bpf/libbpf.c int bpf_object__btf_fd(const struct bpf_object *obj) obj 4262 tools/lib/bpf/libbpf.c return obj->btf ? btf__fd(obj->btf) : -1; obj 4265 tools/lib/bpf/libbpf.c int bpf_object__set_priv(struct bpf_object *obj, void *priv, obj 4268 tools/lib/bpf/libbpf.c if (obj->priv && obj->clear_priv) obj 4269 tools/lib/bpf/libbpf.c obj->clear_priv(obj, obj->priv); obj 4271 tools/lib/bpf/libbpf.c obj->priv = priv; obj 4272 tools/lib/bpf/libbpf.c obj->clear_priv = clear_priv; obj 4276 tools/lib/bpf/libbpf.c void *bpf_object__priv(const struct bpf_object *obj) obj 4278 tools/lib/bpf/libbpf.c return obj ? obj->priv : ERR_PTR(-EINVAL); obj 4282 tools/lib/bpf/libbpf.c __bpf_program__iter(const struct bpf_program *p, const struct bpf_object *obj, obj 4285 tools/lib/bpf/libbpf.c size_t nr_programs = obj->nr_programs; obj 4293 tools/lib/bpf/libbpf.c return forward ? &obj->programs[0] : obj 4294 tools/lib/bpf/libbpf.c &obj->programs[nr_programs - 1]; obj 4296 tools/lib/bpf/libbpf.c if (p->obj != obj) { obj 4301 tools/lib/bpf/libbpf.c idx = (p - obj->programs) + (forward ? 1 : -1); obj 4302 tools/lib/bpf/libbpf.c if (idx >= obj->nr_programs || idx < 0) obj 4304 tools/lib/bpf/libbpf.c return &obj->programs[idx]; obj 4308 tools/lib/bpf/libbpf.c bpf_program__next(struct bpf_program *prev, const struct bpf_object *obj) obj 4313 tools/lib/bpf/libbpf.c prog = __bpf_program__iter(prog, obj, true); obj 4314 tools/lib/bpf/libbpf.c } while (prog && bpf_program__is_function_storage(prog, obj)); obj 4320 tools/lib/bpf/libbpf.c bpf_program__prev(struct bpf_program *next, const struct bpf_object *obj) obj 4325 tools/lib/bpf/libbpf.c prog = __bpf_program__iter(prog, obj, false); obj 4326 tools/lib/bpf/libbpf.c } while (prog && bpf_program__is_function_storage(prog, obj)); obj 4726 tools/lib/bpf/libbpf.c __bpf_map__iter(const struct bpf_map *m, const struct bpf_object *obj, int i) obj 4731 tools/lib/bpf/libbpf.c if (!obj || !obj->maps) obj 4734 tools/lib/bpf/libbpf.c s = obj->maps; obj 4735 tools/lib/bpf/libbpf.c e = obj->maps + obj->nr_maps; obj 4743 tools/lib/bpf/libbpf.c idx = (m - obj->maps) + i; obj 4744 tools/lib/bpf/libbpf.c if (idx >= obj->nr_maps || idx < 0) obj 4746 tools/lib/bpf/libbpf.c return &obj->maps[idx]; obj 4750 tools/lib/bpf/libbpf.c bpf_map__next(const struct bpf_map *prev, const struct bpf_object *obj) obj 4753 tools/lib/bpf/libbpf.c return obj->maps; obj 4755 tools/lib/bpf/libbpf.c return __bpf_map__iter(prev, obj, 1); obj 4759 tools/lib/bpf/libbpf.c bpf_map__prev(const struct bpf_map *next, const struct bpf_object *obj) obj 4762 tools/lib/bpf/libbpf.c if (!obj->nr_maps) obj 4764 tools/lib/bpf/libbpf.c return obj->maps + obj->nr_maps - 1; obj 4767 tools/lib/bpf/libbpf.c return __bpf_map__iter(next, obj, -1); obj 4771 tools/lib/bpf/libbpf.c bpf_object__find_map_by_name(const struct bpf_object *obj, const char *name) obj 4775 tools/lib/bpf/libbpf.c bpf_object__for_each_map(pos, obj) { obj 4783 tools/lib/bpf/libbpf.c bpf_object__find_map_fd_by_name(const struct bpf_object *obj, const char *name) obj 4785 tools/lib/bpf/libbpf.c return bpf_map__fd(bpf_object__find_map_by_name(obj, name)); obj 4789 tools/lib/bpf/libbpf.c bpf_object__find_map_by_offset(struct bpf_object *obj, size_t offset) obj 4819 tools/lib/bpf/libbpf.c struct bpf_object *obj; obj 4831 tools/lib/bpf/libbpf.c obj = bpf_object__open_xattr(&open_attr); obj 4832 tools/lib/bpf/libbpf.c if (IS_ERR_OR_NULL(obj)) obj 4835 tools/lib/bpf/libbpf.c bpf_object__for_each_program(prog, obj) { obj 4847 tools/lib/bpf/libbpf.c bpf_object__close(obj); obj 4862 tools/lib/bpf/libbpf.c bpf_object__for_each_map(map, obj) { obj 4869 tools/lib/bpf/libbpf.c bpf_object__close(obj); obj 4873 tools/lib/bpf/libbpf.c err = bpf_object__load(obj); obj 4875 tools/lib/bpf/libbpf.c bpf_object__close(obj); obj 4879 tools/lib/bpf/libbpf.c *pobj = obj; obj 78 tools/lib/bpf/libbpf.h int bpf_object__section_size(const struct bpf_object *obj, const char *name, obj 80 tools/lib/bpf/libbpf.h int bpf_object__variable_offset(const struct bpf_object *obj, const char *name, obj 82 tools/lib/bpf/libbpf.h LIBBPF_API int bpf_object__pin_maps(struct bpf_object *obj, const char *path); obj 83 tools/lib/bpf/libbpf.h LIBBPF_API int bpf_object__unpin_maps(struct bpf_object *obj, obj 85 tools/lib/bpf/libbpf.h LIBBPF_API int bpf_object__pin_programs(struct bpf_object *obj, obj 87 tools/lib/bpf/libbpf.h LIBBPF_API int bpf_object__unpin_programs(struct bpf_object *obj, obj 93 tools/lib/bpf/libbpf.h struct bpf_object *obj; obj 99 tools/lib/bpf/libbpf.h LIBBPF_API int bpf_object__load(struct bpf_object *obj); obj 101 tools/lib/bpf/libbpf.h LIBBPF_API int bpf_object__unload(struct bpf_object *obj); obj 102 tools/lib/bpf/libbpf.h LIBBPF_API const char *bpf_object__name(const struct bpf_object *obj); obj 103 tools/lib/bpf/libbpf.h LIBBPF_API unsigned int bpf_object__kversion(const struct bpf_object *obj); obj 106 tools/lib/bpf/libbpf.h LIBBPF_API struct btf *bpf_object__btf(const struct bpf_object *obj); obj 107 tools/lib/bpf/libbpf.h LIBBPF_API int bpf_object__btf_fd(const struct bpf_object *obj); obj 110 tools/lib/bpf/libbpf.h bpf_object__find_program_by_title(const struct bpf_object *obj, obj 121 tools/lib/bpf/libbpf.h LIBBPF_API int bpf_object__set_priv(struct bpf_object *obj, void *priv, obj 134 tools/lib/bpf/libbpf.h const struct bpf_object *obj); obj 136 tools/lib/bpf/libbpf.h #define bpf_object__for_each_program(pos, obj) \ obj 137 tools/lib/bpf/libbpf.h for ((pos) = bpf_program__next(NULL, (obj)); \ obj 139 tools/lib/bpf/libbpf.h (pos) = bpf_program__next((pos), (obj))) obj 142 tools/lib/bpf/libbpf.h const struct bpf_object *obj); obj 300 tools/lib/bpf/libbpf.h bpf_object__find_map_by_name(const struct bpf_object *obj, const char *name); obj 303 tools/lib/bpf/libbpf.h bpf_object__find_map_fd_by_name(const struct bpf_object *obj, const char *name); obj 310 tools/lib/bpf/libbpf.h bpf_object__find_map_by_offset(struct bpf_object *obj, size_t offset); obj 313 tools/lib/bpf/libbpf.h bpf_map__next(const struct bpf_map *map, const struct bpf_object *obj); obj 314 tools/lib/bpf/libbpf.h #define bpf_object__for_each_map(pos, obj) \ obj 315 tools/lib/bpf/libbpf.h for ((pos) = bpf_map__next(NULL, (obj)); \ obj 317 tools/lib/bpf/libbpf.h (pos) = bpf_map__next((pos), (obj))) obj 321 tools/lib/bpf/libbpf.h bpf_map__prev(const struct bpf_map *map, const struct bpf_object *obj); obj 544 tools/perf/pmu-events/jevents.c jsmntok_t *obj = tok++; obj 546 tools/perf/pmu-events/jevents.c EXPECT(obj->type == JSMN_OBJECT, obj, "expected object"); obj 547 tools/perf/pmu-events/jevents.c for (j = 0; j < obj->size; j += 2) { obj 23 tools/perf/scripts/python/Perf-Trace-Util/Context.c static PyObject *perf_trace_context_common_pc(PyObject *obj, PyObject *args) obj 38 tools/perf/scripts/python/Perf-Trace-Util/Context.c static PyObject *perf_trace_context_common_flags(PyObject *obj, obj 54 tools/perf/scripts/python/Perf-Trace-Util/Context.c static PyObject *perf_trace_context_common_lock_depth(PyObject *obj, obj 110 tools/perf/tests/bpf.c static int do_test(struct bpf_object *obj, int (*func)(void), obj 136 tools/perf/tests/bpf.c err = parse_events_load_bpf_obj(&parse_state, &parse_state.list, obj, NULL); obj 214 tools/perf/tests/bpf.c struct bpf_object *obj; obj 216 tools/perf/tests/bpf.c obj = bpf__prepare_load_buffer(obj_buf, obj_buf_sz, name); obj 217 tools/perf/tests/bpf.c if (IS_ERR(obj)) { obj 221 tools/perf/tests/bpf.c return obj; obj 229 tools/perf/tests/bpf.c struct bpf_object *obj; obj 243 tools/perf/tests/bpf.c obj = prepare_bpf(obj_buf, obj_buf_sz, obj 245 tools/perf/tests/bpf.c if ((!!bpf_testcase_table[idx].target_func) != (!!obj)) { obj 246 tools/perf/tests/bpf.c if (!obj) obj 256 tools/perf/tests/bpf.c if (obj) { obj 257 tools/perf/tests/bpf.c ret = do_test(obj, obj 277 tools/perf/tests/bpf.c if (bpf_object__pin(obj, PERF_TEST_BPF_PATH)) obj 14 tools/perf/tests/llvm.c struct bpf_object *obj; obj 16 tools/perf/tests/llvm.c obj = bpf_object__open_buffer(obj_buf, obj_buf_sz, NULL); obj 17 tools/perf/tests/llvm.c if (libbpf_get_error(obj)) obj 19 tools/perf/tests/llvm.c bpf_object__close(obj); obj 110 tools/perf/ui/browsers/annotate.c .obj = browser, obj 27 tools/perf/ui/libslang.h #define sltt_set_color(obj, name, fg, bg) \ obj 28 tools/perf/ui/libslang.h SLtt_set_color(obj,(char *)(name), (char *)(fg), (char *)(bg)) obj 2411 tools/perf/util/annotate.c .obj = fp, obj 2762 tools/perf/util/annotate.c void *obj, char *bf, size_t size, obj 2763 tools/perf/util/annotate.c void (*obj__printf)(void *obj, const char *fmt, ...), obj 2764 tools/perf/util/annotate.c void (*obj__write_graph)(void *obj, int graph)) obj 2773 tools/perf/util/annotate.c obj__write_graph(obj, fwd ? DARROW_CHAR : UARROW_CHAR); obj 2774 tools/perf/util/annotate.c obj__printf(obj, " "); obj 2777 tools/perf/util/annotate.c obj__write_graph(obj, RARROW_CHAR); obj 2778 tools/perf/util/annotate.c obj__printf(obj, " "); obj 2780 tools/perf/util/annotate.c obj__write_graph(obj, LARROW_CHAR); obj 2781 tools/perf/util/annotate.c obj__printf(obj, " "); obj 2783 tools/perf/util/annotate.c obj__printf(obj, " "); obj 2786 tools/perf/util/annotate.c obj__printf(obj, " "); obj 2810 tools/perf/util/annotate.c void *obj, unsigned int percent_type, obj 2811 tools/perf/util/annotate.c int (*obj__set_color)(void *obj, int color), obj 2812 tools/perf/util/annotate.c void (*obj__set_percent_color)(void *obj, double percent, bool current), obj 2813 tools/perf/util/annotate.c int (*obj__set_jumps_percent_color)(void *obj, int nr, bool current), obj 2814 tools/perf/util/annotate.c void (*obj__printf)(void *obj, const char *fmt, ...), obj 2815 tools/perf/util/annotate.c void (*obj__write_graph)(void *obj, int graph)) obj 2841 tools/perf/util/annotate.c obj__set_percent_color(obj, percent, current_entry); obj 2843 tools/perf/util/annotate.c obj__printf(obj, "%11" PRIu64 " ", al->data[i].he.period); obj 2845 tools/perf/util/annotate.c obj__printf(obj, "%6" PRIu64 " ", obj 2848 tools/perf/util/annotate.c obj__printf(obj, "%6.2f ", percent); obj 2852 tools/perf/util/annotate.c obj__set_percent_color(obj, 0, current_entry); obj 2855 tools/perf/util/annotate.c obj__printf(obj, "%-*s", pcnt_width, " "); obj 2857 tools/perf/util/annotate.c obj__printf(obj, "%-*s", pcnt_width, obj 2865 tools/perf/util/annotate.c obj__printf(obj, "%*.2f ", ANNOTATION__IPC_WIDTH - 1, al->ipc); obj 2867 tools/perf/util/annotate.c obj__printf(obj, "%*s", ANNOTATION__IPC_WIDTH, " "); obj 2869 tools/perf/util/annotate.c obj__printf(obj, "%*s ", ANNOTATION__IPC_WIDTH - 1, "IPC"); obj 2873 tools/perf/util/annotate.c obj__printf(obj, "%*" PRIu64 " ", obj 2876 tools/perf/util/annotate.c obj__printf(obj, "%*s", obj 2879 tools/perf/util/annotate.c obj__printf(obj, "%*s ", obj 2891 tools/perf/util/annotate.c obj__printf(obj, "%*s ", obj 2895 tools/perf/util/annotate.c obj__printf(obj, "%*s", obj 2899 tools/perf/util/annotate.c obj__printf(obj, "%*s ", obj 2906 tools/perf/util/annotate.c obj__printf(obj, "%*s", ANNOTATION__AVG_IPC_WIDTH, bf); obj 2910 tools/perf/util/annotate.c obj__printf(obj, " "); obj 2913 tools/perf/util/annotate.c obj__printf(obj, "%-*s", width - pcnt_width - cycles_width, " "); obj 2919 tools/perf/util/annotate.c obj__printf(obj, bf); obj 2920 tools/perf/util/annotate.c obj__printf(obj, "%-*s", width - printed - pcnt_width - cycles_width + 1, al->line); obj 2938 tools/perf/util/annotate.c prev = obj__set_jumps_percent_color(obj, al->jump_sources, obj 2940 tools/perf/util/annotate.c obj__printf(obj, bf); obj 2941 tools/perf/util/annotate.c obj__set_color(obj, prev); obj 2958 tools/perf/util/annotate.c color = obj__set_color(obj, HE_COLORSET_ADDR); obj 2959 tools/perf/util/annotate.c obj__printf(obj, bf); obj 2961 tools/perf/util/annotate.c obj__set_color(obj, color); obj 2963 tools/perf/util/annotate.c disasm_line__write(disasm_line(al), notes, obj, bf, sizeof(bf), obj__printf, obj__write_graph); obj 2965 tools/perf/util/annotate.c obj__printf(obj, "%-*s", width - pcnt_width - cycles_width - 3 - printed, bf); obj 2975 tools/perf/util/annotate.c wops->change_color, wops->width, wops->obj, obj 205 tools/perf/util/annotate.h void *obj; obj 206 tools/perf/util/annotate.h int (*set_color)(void *obj, int color); obj 207 tools/perf/util/annotate.h void (*set_percent_color)(void *obj, double percent, bool current); obj 208 tools/perf/util/annotate.h int (*set_jumps_percent_color)(void *obj, int nr, bool current); obj 209 tools/perf/util/annotate.h void (*printf)(void *obj, const char *fmt, ...); obj 210 tools/perf/util/annotate.h void (*write_graph)(void *obj, int graph); obj 54 tools/perf/util/bpf-loader.c struct bpf_object *obj; obj 61 tools/perf/util/bpf-loader.c obj = bpf_object__open_buffer(obj_buf, obj_buf_sz, name); obj 62 tools/perf/util/bpf-loader.c if (IS_ERR_OR_NULL(obj)) { obj 67 tools/perf/util/bpf-loader.c return obj; obj 72 tools/perf/util/bpf-loader.c struct bpf_object *obj; obj 94 tools/perf/util/bpf-loader.c obj = bpf_object__open_buffer(obj_buf, obj_buf_sz, filename); obj 96 tools/perf/util/bpf-loader.c if (!IS_ERR_OR_NULL(obj) && llvm_param.dump_obj) obj 101 tools/perf/util/bpf-loader.c obj = bpf_object__open(filename); obj 103 tools/perf/util/bpf-loader.c if (IS_ERR_OR_NULL(obj)) { obj 105 tools/perf/util/bpf-loader.c return obj; obj 108 tools/perf/util/bpf-loader.c return obj; obj 113 tools/perf/util/bpf-loader.c struct bpf_object *obj, *tmp; obj 115 tools/perf/util/bpf-loader.c bpf_object__for_each_safe(obj, tmp) { obj 116 tools/perf/util/bpf-loader.c bpf__unprobe(obj); obj 117 tools/perf/util/bpf-loader.c bpf_object__close(obj); obj 634 tools/perf/util/bpf-loader.c int bpf__probe(struct bpf_object *obj) obj 647 tools/perf/util/bpf-loader.c bpf_object__for_each_program(prog, obj) { obj 695 tools/perf/util/bpf-loader.c int bpf__unprobe(struct bpf_object *obj) obj 700 tools/perf/util/bpf-loader.c bpf_object__for_each_program(prog, obj) { obj 735 tools/perf/util/bpf-loader.c int bpf__load(struct bpf_object *obj) obj 739 tools/perf/util/bpf-loader.c err = bpf_object__load(obj); obj 749 tools/perf/util/bpf-loader.c int bpf__foreach_event(struct bpf_object *obj, obj 756 tools/perf/util/bpf-loader.c bpf_object__for_each_program(prog, obj) { obj 769 tools/perf/util/bpf-loader.c err = (*func)(priv->sys_name, priv->evt_name, fd, obj, arg); obj 794 tools/perf/util/bpf-loader.c err = (*func)(tev->group, tev->event, fd, obj, arg); obj 1173 tools/perf/util/bpf-loader.c bpf__obj_config_map(struct bpf_object *obj, obj 1200 tools/perf/util/bpf-loader.c map = bpf_object__find_map_by_name(obj, map_name); obj 1232 tools/perf/util/bpf-loader.c int bpf__config_obj(struct bpf_object *obj, obj 1240 tools/perf/util/bpf-loader.c if (!obj || !term || !term->config) obj 1245 tools/perf/util/bpf-loader.c err = bpf__obj_config_map(obj, term, evlist, &key_scan_pos); obj 1492 tools/perf/util/bpf-loader.c apply_obj_config_object(struct bpf_object *obj) obj 1497 tools/perf/util/bpf-loader.c bpf_object__for_each_map(map, obj) { obj 1507 tools/perf/util/bpf-loader.c struct bpf_object *obj, *tmp; obj 1510 tools/perf/util/bpf-loader.c bpf_object__for_each_safe(obj, tmp) { obj 1511 tools/perf/util/bpf-loader.c err = apply_obj_config_object(obj); obj 1519 tools/perf/util/bpf-loader.c #define bpf__for_each_map(pos, obj, objtmp) \ obj 1520 tools/perf/util/bpf-loader.c bpf_object__for_each_safe(obj, objtmp) \ obj 1521 tools/perf/util/bpf-loader.c bpf_object__for_each_map(pos, obj) obj 1523 tools/perf/util/bpf-loader.c #define bpf__for_each_map_named(pos, obj, objtmp, name) \ obj 1524 tools/perf/util/bpf-loader.c bpf__for_each_map(pos, obj, objtmp) \ obj 1532 tools/perf/util/bpf-loader.c struct bpf_object *obj, *tmp; obj 1538 tools/perf/util/bpf-loader.c bpf__for_each_map_named(map, obj, tmp, name) { obj 1574 tools/perf/util/bpf-loader.c bpf__for_each_map_named(map, obj, tmp, name) { obj 1716 tools/perf/util/bpf-loader.c int bpf__strerror_probe(struct bpf_object *obj __maybe_unused, obj 1732 tools/perf/util/bpf-loader.c int bpf__strerror_load(struct bpf_object *obj, obj 1737 tools/perf/util/bpf-loader.c unsigned int obj_kver = bpf_object__kversion(obj); obj 1760 tools/perf/util/bpf-loader.c int bpf__strerror_config_obj(struct bpf_object *obj __maybe_unused, obj 49 tools/perf/util/bpf-loader.h int fd, struct bpf_object *obj, void *arg); obj 61 tools/perf/util/bpf-loader.h int bpf__probe(struct bpf_object *obj); obj 62 tools/perf/util/bpf-loader.h int bpf__unprobe(struct bpf_object *obj); obj 63 tools/perf/util/bpf-loader.h int bpf__strerror_probe(struct bpf_object *obj, int err, obj 66 tools/perf/util/bpf-loader.h int bpf__load(struct bpf_object *obj); obj 67 tools/perf/util/bpf-loader.h int bpf__strerror_load(struct bpf_object *obj, int err, obj 69 tools/perf/util/bpf-loader.h int bpf__foreach_event(struct bpf_object *obj, obj 72 tools/perf/util/bpf-loader.h int bpf__config_obj(struct bpf_object *obj, struct parse_events_term *term, obj 74 tools/perf/util/bpf-loader.h int bpf__strerror_config_obj(struct bpf_object *obj, obj 107 tools/perf/util/bpf-loader.h static inline int bpf__probe(struct bpf_object *obj __maybe_unused) { return 0;} obj 108 tools/perf/util/bpf-loader.h static inline int bpf__unprobe(struct bpf_object *obj __maybe_unused) { return 0;} obj 109 tools/perf/util/bpf-loader.h static inline int bpf__load(struct bpf_object *obj __maybe_unused) { return 0; } obj 112 tools/perf/util/bpf-loader.h bpf__foreach_event(struct bpf_object *obj __maybe_unused, obj 120 tools/perf/util/bpf-loader.h bpf__config_obj(struct bpf_object *obj __maybe_unused, obj 168 tools/perf/util/bpf-loader.h bpf__strerror_probe(struct bpf_object *obj __maybe_unused, obj 175 tools/perf/util/bpf-loader.h static inline int bpf__strerror_load(struct bpf_object *obj __maybe_unused, obj 183 tools/perf/util/bpf-loader.h bpf__strerror_config_obj(struct bpf_object *obj __maybe_unused, obj 635 tools/perf/util/parse-events.c static int add_bpf_event(const char *group, const char *event, int fd, struct bpf_object *obj, obj 677 tools/perf/util/parse-events.c pos->bpf_obj = obj; obj 685 tools/perf/util/parse-events.c struct bpf_object *obj, obj 693 tools/perf/util/parse-events.c if (IS_ERR(obj) || !obj) { obj 710 tools/perf/util/parse-events.c err = bpf__probe(obj); obj 712 tools/perf/util/parse-events.c bpf__strerror_probe(obj, err, errbuf, sizeof(errbuf)); obj 716 tools/perf/util/parse-events.c err = bpf__load(obj); obj 718 tools/perf/util/parse-events.c bpf__strerror_load(obj, err, errbuf, sizeof(errbuf)); obj 722 tools/perf/util/parse-events.c err = bpf__foreach_event(obj, add_bpf_event, ¶m); obj 738 tools/perf/util/parse-events.c struct bpf_object *obj, obj 761 tools/perf/util/parse-events.c err = bpf__config_obj(obj, term, parse_state->evlist, &error_pos); obj 763 tools/perf/util/parse-events.c bpf__strerror_config_obj(obj, term, parse_state->evlist, obj 820 tools/perf/util/parse-events.c struct bpf_object *obj; obj 826 tools/perf/util/parse-events.c obj = bpf__prepare_load(bpf_file_name, source); obj 827 tools/perf/util/parse-events.c if (IS_ERR(obj)) { obj 830 tools/perf/util/parse-events.c err = PTR_ERR(obj); obj 846 tools/perf/util/parse-events.c err = parse_events_load_bpf_obj(parse_state, list, obj, head_config); obj 849 tools/perf/util/parse-events.c err = parse_events_config_bpf(parse_state, obj, &obj_head_config); obj 159 tools/perf/util/parse-events.h struct bpf_object *obj, obj 420 tools/perf/util/python.c PyObject *obj = NULL; obj 423 tools/perf/util/python.c obj = get_tracepoint_field(pevent, attr_name); obj 425 tools/perf/util/python.c return obj ?: PyObject_GenericGetAttr((PyObject *) pevent, attr_name); obj 575 tools/perf/util/python.c static Py_ssize_t pyrf_cpu_map__length(PyObject *obj) obj 577 tools/perf/util/python.c struct pyrf_cpu_map *pcpus = (void *)obj; obj 582 tools/perf/util/python.c static PyObject *pyrf_cpu_map__item(PyObject *obj, Py_ssize_t i) obj 584 tools/perf/util/python.c struct pyrf_cpu_map *pcpus = (void *)obj; obj 644 tools/perf/util/python.c static Py_ssize_t pyrf_thread_map__length(PyObject *obj) obj 646 tools/perf/util/python.c struct pyrf_thread_map *pthreads = (void *)obj; obj 651 tools/perf/util/python.c static PyObject *pyrf_thread_map__item(PyObject *obj, Py_ssize_t i) obj 653 tools/perf/util/python.c struct pyrf_thread_map *pthreads = (void *)obj; obj 1122 tools/perf/util/python.c static Py_ssize_t pyrf_evlist__length(PyObject *obj) obj 1124 tools/perf/util/python.c struct pyrf_evlist *pevlist = (void *)obj; obj 1129 tools/perf/util/python.c static PyObject *pyrf_evlist__item(PyObject *obj, Py_ssize_t i) obj 1131 tools/perf/util/python.c struct pyrf_evlist *pevlist = (void *)obj; obj 1288 tools/perf/util/python.c PyObject *obj; obj 1367 tools/perf/util/python.c obj = _PyLong_FromLong(perf__constants[i].value); obj 1368 tools/perf/util/python.c if (obj == NULL) obj 1370 tools/perf/util/python.c PyDict_SetItemString(dict, perf__constants[i].name, obj); obj 1371 tools/perf/util/python.c Py_DECREF(obj); obj 343 tools/perf/util/scripting-engines/trace-event-python.c PyObject *obj = NULL, *list = NULL; obj 363 tools/perf/util/scripting-engines/trace-event-python.c obj = _PyLong_FromLong(val); obj 365 tools/perf/util/scripting-engines/trace-event-python.c obj = PyLong_FromLongLong(val); obj 368 tools/perf/util/scripting-engines/trace-event-python.c obj = _PyLong_FromLong(val); obj 370 tools/perf/util/scripting-engines/trace-event-python.c obj = PyLong_FromUnsignedLongLong(val); obj 373 tools/perf/util/scripting-engines/trace-event-python.c PyList_SET_ITEM(list, i, obj); obj 376 tools/perf/util/scripting-engines/trace-event-python.c obj = list; obj 377 tools/perf/util/scripting-engines/trace-event-python.c return obj; obj 797 tools/perf/util/scripting-engines/trace-event-python.c PyObject *handler, *context, *t, *obj = NULL, *callchain; obj 885 tools/perf/util/scripting-engines/trace-event-python.c obj = _PyUnicode_FromString((char *) data + offset); obj 887 tools/perf/util/scripting-engines/trace-event-python.c obj = PyByteArray_FromStringAndSize((const char *) data + offset, len); obj 891 tools/perf/util/scripting-engines/trace-event-python.c obj = get_field_numeric_entry(event, field, data); obj 894 tools/perf/util/scripting-engines/trace-event-python.c PyTuple_SetItem(t, n++, obj); obj 896 tools/perf/util/scripting-engines/trace-event-python.c pydict_set_item_string_decref(dict, field->name, obj); obj 401 tools/testing/nvdimm/test/iomap.c union acpi_object **obj; obj 406 tools/testing/nvdimm/test/iomap.c obj = nfit_res->buf; obj 408 tools/testing/nvdimm/test/iomap.c buf->pointer = *obj; obj 416 tools/testing/nvdimm/test/iomap.c union acpi_object *obj = ERR_PTR(-ENXIO); obj 422 tools/testing/nvdimm/test/iomap.c obj = ops->evaluate_dsm(handle, guid, rev, func, argv4); obj 425 tools/testing/nvdimm/test/iomap.c if (IS_ERR(obj)) obj 427 tools/testing/nvdimm/test/iomap.c return obj; obj 2915 tools/testing/nvdimm/test/nfit.c union acpi_object *obj; obj 3004 tools/testing/nvdimm/test/nfit.c obj = kzalloc(sizeof(*obj), GFP_KERNEL); obj 3005 tools/testing/nvdimm/test/nfit.c if (!obj) obj 3007 tools/testing/nvdimm/test/nfit.c obj->type = ACPI_TYPE_BUFFER; obj 3008 tools/testing/nvdimm/test/nfit.c obj->buffer.length = nfit_test->nfit_size; obj 3009 tools/testing/nvdimm/test/nfit.c obj->buffer.pointer = nfit_test->nfit_buf; obj 3010 tools/testing/nvdimm/test/nfit.c *(nfit_test->_fit) = obj; obj 26 tools/testing/selftests/bpf/flow_dissector_load.c struct bpf_object *obj; obj 28 tools/testing/selftests/bpf/flow_dissector_load.c ret = bpf_flow_load(&obj, cfg_path_name, cfg_section_name, obj 37 tools/testing/selftests/bpf/flow_dissector_load.c ret = bpf_object__pin(obj, cfg_pin_path); obj 8 tools/testing/selftests/bpf/flow_dissector_load.h static inline int bpf_flow_load(struct bpf_object **obj, obj 21 tools/testing/selftests/bpf/flow_dissector_load.h ret = bpf_prog_load(path, BPF_PROG_TYPE_FLOW_DISSECTOR, obj, obj 26 tools/testing/selftests/bpf/flow_dissector_load.h main_prog = bpf_object__find_program_by_title(*obj, section_name); obj 34 tools/testing/selftests/bpf/flow_dissector_load.h prog_array = bpf_object__find_map_by_name(*obj, map_name); obj 43 tools/testing/selftests/bpf/flow_dissector_load.h keys = bpf_object__find_map_by_name(*obj, keys_map_name); obj 53 tools/testing/selftests/bpf/flow_dissector_load.h bpf_object__for_each_program(prog, *obj) { obj 35 tools/testing/selftests/bpf/get_cgroup_id_user.c static int bpf_find_map(const char *test, struct bpf_object *obj, obj 40 tools/testing/selftests/bpf/get_cgroup_id_user.c map = bpf_object__find_map_by_name(obj, name); obj 55 tools/testing/selftests/bpf/get_cgroup_id_user.c struct bpf_object *obj; obj 75 tools/testing/selftests/bpf/get_cgroup_id_user.c err = bpf_prog_load(file, BPF_PROG_TYPE_TRACEPOINT, &obj, &prog_fd); obj 79 tools/testing/selftests/bpf/get_cgroup_id_user.c cgidmap_fd = bpf_find_map(__func__, obj, "cg_ids"); obj 84 tools/testing/selftests/bpf/get_cgroup_id_user.c pidmap_fd = bpf_find_map(__func__, obj, "pidmap"); obj 145 tools/testing/selftests/bpf/get_cgroup_id_user.c bpf_object__close(obj); obj 36 tools/testing/selftests/bpf/prog_tests/attach_probe.c struct bpf_object *obj; obj 53 tools/testing/selftests/bpf/prog_tests/attach_probe.c err = bpf_prog_load(file, BPF_PROG_TYPE_KPROBE, &obj, &prog_fd); obj 57 tools/testing/selftests/bpf/prog_tests/attach_probe.c kprobe_prog = bpf_object__find_program_by_title(obj, kprobe_name); obj 61 tools/testing/selftests/bpf/prog_tests/attach_probe.c kretprobe_prog = bpf_object__find_program_by_title(obj, kretprobe_name); obj 65 tools/testing/selftests/bpf/prog_tests/attach_probe.c uprobe_prog = bpf_object__find_program_by_title(obj, uprobe_name); obj 69 tools/testing/selftests/bpf/prog_tests/attach_probe.c uretprobe_prog = bpf_object__find_program_by_title(obj, uretprobe_name); obj 75 tools/testing/selftests/bpf/prog_tests/attach_probe.c results_map_fd = bpf_find_map(__func__, obj, "results_map"); obj 160 tools/testing/selftests/bpf/prog_tests/attach_probe.c bpf_object__close(obj); obj 21 tools/testing/selftests/bpf/prog_tests/bpf_verif_scale.c struct bpf_object *obj = NULL; obj 29 tools/testing/selftests/bpf/prog_tests/bpf_verif_scale.c err = bpf_prog_load_xattr(&attr, &obj, &prog_fd); obj 30 tools/testing/selftests/bpf/prog_tests/bpf_verif_scale.c bpf_object__close(obj); obj 298 tools/testing/selftests/bpf/prog_tests/core_reloc.c struct bpf_object *obj; obj 308 tools/testing/selftests/bpf/prog_tests/core_reloc.c obj = bpf_object__open(test_case->bpf_obj_file); obj 309 tools/testing/selftests/bpf/prog_tests/core_reloc.c if (CHECK(IS_ERR_OR_NULL(obj), "obj_open", obj 311 tools/testing/selftests/bpf/prog_tests/core_reloc.c test_case->bpf_obj_file, PTR_ERR(obj))) obj 314 tools/testing/selftests/bpf/prog_tests/core_reloc.c prog = bpf_object__find_program_by_title(obj, probe_name); obj 320 tools/testing/selftests/bpf/prog_tests/core_reloc.c load_attr.obj = obj; obj 340 tools/testing/selftests/bpf/prog_tests/core_reloc.c data_map = bpf_object__find_map_by_name(obj, "test_cor.bss"); obj 383 tools/testing/selftests/bpf/prog_tests/core_reloc.c bpf_object__close(obj); obj 449 tools/testing/selftests/bpf/prog_tests/flow_dissector.c struct bpf_object *obj; obj 452 tools/testing/selftests/bpf/prog_tests/flow_dissector.c err = bpf_flow_load(&obj, "./bpf_flow.o", "flow_dissector", obj 527 tools/testing/selftests/bpf/prog_tests/flow_dissector.c bpf_object__close(obj); obj 93 tools/testing/selftests/bpf/prog_tests/get_stack_raw_tp.c struct bpf_object *obj; obj 97 tools/testing/selftests/bpf/prog_tests/get_stack_raw_tp.c err = bpf_prog_load(file_err, BPF_PROG_TYPE_RAW_TRACEPOINT, &obj, &prog_fd); obj 101 tools/testing/selftests/bpf/prog_tests/get_stack_raw_tp.c err = bpf_prog_load(file, BPF_PROG_TYPE_RAW_TRACEPOINT, &obj, &prog_fd); obj 105 tools/testing/selftests/bpf/prog_tests/get_stack_raw_tp.c prog = bpf_object__find_program_by_title(obj, prog_name); obj 109 tools/testing/selftests/bpf/prog_tests/get_stack_raw_tp.c map = bpf_object__find_map_by_name(obj, "perfmap"); obj 148 tools/testing/selftests/bpf/prog_tests/get_stack_raw_tp.c bpf_object__close(obj); obj 4 tools/testing/selftests/bpf/prog_tests/global_data.c static void test_global_data_number(struct bpf_object *obj, __u32 duration) obj 9 tools/testing/selftests/bpf/prog_tests/global_data.c map_fd = bpf_find_map(__func__, obj, "result_number"); obj 39 tools/testing/selftests/bpf/prog_tests/global_data.c static void test_global_data_string(struct bpf_object *obj, __u32 duration) obj 44 tools/testing/selftests/bpf/prog_tests/global_data.c map_fd = bpf_find_map(__func__, obj, "result_string"); obj 74 tools/testing/selftests/bpf/prog_tests/global_data.c static void test_global_data_struct(struct bpf_object *obj, __u32 duration) obj 79 tools/testing/selftests/bpf/prog_tests/global_data.c map_fd = bpf_find_map(__func__, obj, "result_struct"); obj 102 tools/testing/selftests/bpf/prog_tests/global_data.c static void test_global_data_rdonly(struct bpf_object *obj, __u32 duration) obj 108 tools/testing/selftests/bpf/prog_tests/global_data.c map = bpf_object__find_map_by_name(obj, "test_glo.rodata"); obj 128 tools/testing/selftests/bpf/prog_tests/global_data.c struct bpf_object *obj; obj 131 tools/testing/selftests/bpf/prog_tests/global_data.c err = bpf_prog_load(file, BPF_PROG_TYPE_SCHED_CLS, &obj, &prog_fd); obj 141 tools/testing/selftests/bpf/prog_tests/global_data.c test_global_data_number(obj, duration); obj 142 tools/testing/selftests/bpf/prog_tests/global_data.c test_global_data_string(obj, duration); obj 143 tools/testing/selftests/bpf/prog_tests/global_data.c test_global_data_struct(obj, duration); obj 144 tools/testing/selftests/bpf/prog_tests/global_data.c test_global_data_rdonly(obj, duration); obj 146 tools/testing/selftests/bpf/prog_tests/global_data.c bpf_object__close(obj); obj 28 tools/testing/selftests/bpf/prog_tests/l4lb_all.c struct bpf_object *obj; obj 32 tools/testing/selftests/bpf/prog_tests/l4lb_all.c err = bpf_prog_load(file, BPF_PROG_TYPE_SCHED_CLS, &obj, &prog_fd); obj 36 tools/testing/selftests/bpf/prog_tests/l4lb_all.c map_fd = bpf_find_map(__func__, obj, "vip_map"); obj 41 tools/testing/selftests/bpf/prog_tests/l4lb_all.c map_fd = bpf_find_map(__func__, obj, "ch_rings"); obj 46 tools/testing/selftests/bpf/prog_tests/l4lb_all.c map_fd = bpf_find_map(__func__, obj, "reals"); obj 65 tools/testing/selftests/bpf/prog_tests/l4lb_all.c map_fd = bpf_find_map(__func__, obj, "stats"); obj 77 tools/testing/selftests/bpf/prog_tests/l4lb_all.c bpf_object__close(obj); obj 38 tools/testing/selftests/bpf/prog_tests/map_lock.c struct bpf_object *obj = NULL; obj 42 tools/testing/selftests/bpf/prog_tests/map_lock.c err = bpf_prog_load(file, BPF_PROG_TYPE_CGROUP_SKB, &obj, &prog_fd); obj 47 tools/testing/selftests/bpf/prog_tests/map_lock.c map_fd[0] = bpf_find_map(__func__, obj, "hash_map"); obj 50 tools/testing/selftests/bpf/prog_tests/map_lock.c map_fd[1] = bpf_find_map(__func__, obj, "array_map"); obj 74 tools/testing/selftests/bpf/prog_tests/map_lock.c bpf_object__close(obj); obj 30 tools/testing/selftests/bpf/prog_tests/perf_buffer.c struct bpf_object *obj; obj 49 tools/testing/selftests/bpf/prog_tests/perf_buffer.c err = bpf_prog_load(file, BPF_PROG_TYPE_KPROBE, &obj, &prog_fd); obj 51 tools/testing/selftests/bpf/prog_tests/perf_buffer.c obj = NULL; obj 55 tools/testing/selftests/bpf/prog_tests/perf_buffer.c prog = bpf_object__find_program_by_title(obj, prog_name); obj 60 tools/testing/selftests/bpf/prog_tests/perf_buffer.c perf_buf_map = bpf_object__find_map_by_name(obj, "perf_buf_map"); obj 111 tools/testing/selftests/bpf/prog_tests/perf_buffer.c bpf_object__close(obj); obj 7 tools/testing/selftests/bpf/prog_tests/pkt_access.c struct bpf_object *obj; obj 11 tools/testing/selftests/bpf/prog_tests/pkt_access.c err = bpf_prog_load(file, BPF_PROG_TYPE_SCHED_CLS, &obj, &prog_fd); obj 26 tools/testing/selftests/bpf/prog_tests/pkt_access.c bpf_object__close(obj); obj 7 tools/testing/selftests/bpf/prog_tests/pkt_md_access.c struct bpf_object *obj; obj 11 tools/testing/selftests/bpf/prog_tests/pkt_md_access.c err = bpf_prog_load(file, BPF_PROG_TYPE_SCHED_CLS, &obj, &prog_fd); obj 21 tools/testing/selftests/bpf/prog_tests/pkt_md_access.c bpf_object__close(obj); obj 7 tools/testing/selftests/bpf/prog_tests/prog_run_xattr.c struct bpf_object *obj; obj 18 tools/testing/selftests/bpf/prog_tests/prog_run_xattr.c err = bpf_prog_load(file, BPF_PROG_TYPE_SCHED_CLS, &obj, obj 48 tools/testing/selftests/bpf/prog_tests/prog_run_xattr.c bpf_object__close(obj); obj 15 tools/testing/selftests/bpf/prog_tests/queue_stack_map.c struct bpf_object *obj; obj 29 tools/testing/selftests/bpf/prog_tests/queue_stack_map.c err = bpf_prog_load(file, BPF_PROG_TYPE_SCHED_CLS, &obj, &prog_fd); obj 33 tools/testing/selftests/bpf/prog_tests/queue_stack_map.c map_in_fd = bpf_find_map(__func__, obj, "map_in"); obj 37 tools/testing/selftests/bpf/prog_tests/queue_stack_map.c map_out_fd = bpf_find_map(__func__, obj, "map_out"); obj 92 tools/testing/selftests/bpf/prog_tests/queue_stack_map.c bpf_object__close(obj); obj 7 tools/testing/selftests/bpf/prog_tests/reference_tracking.c struct bpf_object *obj; obj 12 tools/testing/selftests/bpf/prog_tests/reference_tracking.c obj = bpf_object__open(file); obj 13 tools/testing/selftests/bpf/prog_tests/reference_tracking.c if (CHECK_FAIL(IS_ERR(obj))) obj 16 tools/testing/selftests/bpf/prog_tests/reference_tracking.c bpf_object__for_each_program(prog, obj) { obj 38 tools/testing/selftests/bpf/prog_tests/reference_tracking.c bpf_object__close(obj); obj 17 tools/testing/selftests/bpf/prog_tests/send_signal.c struct bpf_object *obj = NULL; obj 76 tools/testing/selftests/bpf/prog_tests/send_signal.c err = bpf_prog_load(file, prog_type, &obj, &prog_fd); obj 100 tools/testing/selftests/bpf/prog_tests/send_signal.c info_map_fd = bpf_object__find_map_fd_by_name(obj, "info_map"); obj 104 tools/testing/selftests/bpf/prog_tests/send_signal.c status_map_fd = bpf_object__find_map_fd_by_name(obj, "status_map"); obj 136 tools/testing/selftests/bpf/prog_tests/send_signal.c bpf_object__close(obj); obj 22 tools/testing/selftests/bpf/prog_tests/skb_ctx.c struct bpf_object *obj; obj 26 tools/testing/selftests/bpf/prog_tests/skb_ctx.c err = bpf_prog_load("./test_skb_ctx.o", BPF_PROG_TYPE_SCHED_CLS, &obj, obj 139 tools/testing/selftests/bpf/prog_tests/sockopt_inherit.c static int prog_attach(struct bpf_object *obj, int cgroup_fd, const char *title) obj 152 tools/testing/selftests/bpf/prog_tests/sockopt_inherit.c prog = bpf_object__find_program_by_title(obj, title); obj 174 tools/testing/selftests/bpf/prog_tests/sockopt_inherit.c struct bpf_object *obj; obj 180 tools/testing/selftests/bpf/prog_tests/sockopt_inherit.c err = bpf_prog_load_xattr(&attr, &obj, &ignored); obj 184 tools/testing/selftests/bpf/prog_tests/sockopt_inherit.c err = prog_attach(obj, cgroup_fd, "cgroup/getsockopt"); obj 188 tools/testing/selftests/bpf/prog_tests/sockopt_inherit.c err = prog_attach(obj, cgroup_fd, "cgroup/setsockopt"); obj 222 tools/testing/selftests/bpf/prog_tests/sockopt_inherit.c bpf_object__close(obj); obj 5 tools/testing/selftests/bpf/prog_tests/sockopt_multi.c static int prog_attach(struct bpf_object *obj, int cgroup_fd, const char *title) obj 18 tools/testing/selftests/bpf/prog_tests/sockopt_multi.c prog = bpf_object__find_program_by_title(obj, title); obj 34 tools/testing/selftests/bpf/prog_tests/sockopt_multi.c static int prog_detach(struct bpf_object *obj, int cgroup_fd, const char *title) obj 45 tools/testing/selftests/bpf/prog_tests/sockopt_multi.c prog = bpf_object__find_program_by_title(obj, title); obj 57 tools/testing/selftests/bpf/prog_tests/sockopt_multi.c static int run_getsockopt_test(struct bpf_object *obj, int cg_parent, obj 92 tools/testing/selftests/bpf/prog_tests/sockopt_multi.c err = prog_attach(obj, cg_child, "cgroup/getsockopt/child"); obj 116 tools/testing/selftests/bpf/prog_tests/sockopt_multi.c err = prog_attach(obj, cg_parent, "cgroup/getsockopt/parent"); obj 159 tools/testing/selftests/bpf/prog_tests/sockopt_multi.c err = prog_detach(obj, cg_child, "cgroup/getsockopt/child"); obj 200 tools/testing/selftests/bpf/prog_tests/sockopt_multi.c prog_detach(obj, cg_child, "cgroup/getsockopt/child"); obj 201 tools/testing/selftests/bpf/prog_tests/sockopt_multi.c prog_detach(obj, cg_parent, "cgroup/getsockopt/parent"); obj 206 tools/testing/selftests/bpf/prog_tests/sockopt_multi.c static int run_setsockopt_test(struct bpf_object *obj, int cg_parent, obj 238 tools/testing/selftests/bpf/prog_tests/sockopt_multi.c err = prog_attach(obj, cg_child, "cgroup/setsockopt"); obj 265 tools/testing/selftests/bpf/prog_tests/sockopt_multi.c err = prog_attach(obj, cg_parent, "cgroup/setsockopt"); obj 291 tools/testing/selftests/bpf/prog_tests/sockopt_multi.c prog_detach(obj, cg_child, "cgroup/setsockopt"); obj 292 tools/testing/selftests/bpf/prog_tests/sockopt_multi.c prog_detach(obj, cg_parent, "cgroup/setsockopt"); obj 303 tools/testing/selftests/bpf/prog_tests/sockopt_multi.c struct bpf_object *obj = NULL; obj 316 tools/testing/selftests/bpf/prog_tests/sockopt_multi.c err = bpf_prog_load_xattr(&attr, &obj, &ignored); obj 324 tools/testing/selftests/bpf/prog_tests/sockopt_multi.c CHECK_FAIL(run_getsockopt_test(obj, cg_parent, cg_child, sock_fd)); obj 325 tools/testing/selftests/bpf/prog_tests/sockopt_multi.c CHECK_FAIL(run_setsockopt_test(obj, cg_parent, cg_child, sock_fd)); obj 329 tools/testing/selftests/bpf/prog_tests/sockopt_multi.c bpf_object__close(obj); obj 134 tools/testing/selftests/bpf/prog_tests/sockopt_sk.c static int prog_attach(struct bpf_object *obj, int cgroup_fd, const char *title) obj 147 tools/testing/selftests/bpf/prog_tests/sockopt_sk.c prog = bpf_object__find_program_by_title(obj, title); obj 168 tools/testing/selftests/bpf/prog_tests/sockopt_sk.c struct bpf_object *obj; obj 172 tools/testing/selftests/bpf/prog_tests/sockopt_sk.c err = bpf_prog_load_xattr(&attr, &obj, &ignored); obj 176 tools/testing/selftests/bpf/prog_tests/sockopt_sk.c err = prog_attach(obj, cgroup_fd, "cgroup/getsockopt"); obj 180 tools/testing/selftests/bpf/prog_tests/sockopt_sk.c err = prog_attach(obj, cgroup_fd, "cgroup/setsockopt"); obj 187 tools/testing/selftests/bpf/prog_tests/sockopt_sk.c bpf_object__close(obj); obj 8 tools/testing/selftests/bpf/prog_tests/spinlock.c struct bpf_object *obj = NULL; obj 13 tools/testing/selftests/bpf/prog_tests/spinlock.c err = bpf_prog_load(file, BPF_PROG_TYPE_CGROUP_SKB, &obj, &prog_fd); obj 28 tools/testing/selftests/bpf/prog_tests/spinlock.c bpf_object__close(obj); obj 12 tools/testing/selftests/bpf/prog_tests/stacktrace_build_id.c struct bpf_object *obj; obj 21 tools/testing/selftests/bpf/prog_tests/stacktrace_build_id.c err = bpf_prog_load(file, BPF_PROG_TYPE_TRACEPOINT, &obj, &prog_fd); obj 25 tools/testing/selftests/bpf/prog_tests/stacktrace_build_id.c prog = bpf_object__find_program_by_title(obj, prog_name); obj 34 tools/testing/selftests/bpf/prog_tests/stacktrace_build_id.c control_map_fd = bpf_find_map(__func__, obj, "control_map"); obj 39 tools/testing/selftests/bpf/prog_tests/stacktrace_build_id.c stackid_hmap_fd = bpf_find_map(__func__, obj, "stackid_hmap"); obj 44 tools/testing/selftests/bpf/prog_tests/stacktrace_build_id.c stackmap_fd = bpf_find_map(__func__, obj, "stackmap"); obj 49 tools/testing/selftests/bpf/prog_tests/stacktrace_build_id.c stack_amap_fd = bpf_find_map(__func__, obj, "stack_amap"); obj 112 tools/testing/selftests/bpf/prog_tests/stacktrace_build_id.c bpf_object__close(obj); obj 132 tools/testing/selftests/bpf/prog_tests/stacktrace_build_id.c bpf_object__close(obj); obj 30 tools/testing/selftests/bpf/prog_tests/stacktrace_build_id_nmi.c struct bpf_object *obj; obj 41 tools/testing/selftests/bpf/prog_tests/stacktrace_build_id_nmi.c err = bpf_prog_load(file, BPF_PROG_TYPE_PERF_EVENT, &obj, &prog_fd); obj 45 tools/testing/selftests/bpf/prog_tests/stacktrace_build_id_nmi.c prog = bpf_object__find_program_by_title(obj, prog_name); obj 69 tools/testing/selftests/bpf/prog_tests/stacktrace_build_id_nmi.c control_map_fd = bpf_find_map(__func__, obj, "control_map"); obj 74 tools/testing/selftests/bpf/prog_tests/stacktrace_build_id_nmi.c stackid_hmap_fd = bpf_find_map(__func__, obj, "stackid_hmap"); obj 79 tools/testing/selftests/bpf/prog_tests/stacktrace_build_id_nmi.c stackmap_fd = bpf_find_map(__func__, obj, "stackmap"); obj 84 tools/testing/selftests/bpf/prog_tests/stacktrace_build_id_nmi.c stack_amap_fd = bpf_find_map(__func__, obj, "stack_amap"); obj 147 tools/testing/selftests/bpf/prog_tests/stacktrace_build_id_nmi.c bpf_object__close(obj); obj 167 tools/testing/selftests/bpf/prog_tests/stacktrace_build_id_nmi.c bpf_object__close(obj); obj 12 tools/testing/selftests/bpf/prog_tests/stacktrace_map.c struct bpf_object *obj; obj 15 tools/testing/selftests/bpf/prog_tests/stacktrace_map.c err = bpf_prog_load(file, BPF_PROG_TYPE_TRACEPOINT, &obj, &prog_fd); obj 19 tools/testing/selftests/bpf/prog_tests/stacktrace_map.c prog = bpf_object__find_program_by_title(obj, prog_name); obj 28 tools/testing/selftests/bpf/prog_tests/stacktrace_map.c control_map_fd = bpf_find_map(__func__, obj, "control_map"); obj 32 tools/testing/selftests/bpf/prog_tests/stacktrace_map.c stackid_hmap_fd = bpf_find_map(__func__, obj, "stackid_hmap"); obj 36 tools/testing/selftests/bpf/prog_tests/stacktrace_map.c stackmap_fd = bpf_find_map(__func__, obj, "stackmap"); obj 40 tools/testing/selftests/bpf/prog_tests/stacktrace_map.c stack_amap_fd = bpf_find_map(__func__, obj, "stack_amap"); obj 74 tools/testing/selftests/bpf/prog_tests/stacktrace_map.c bpf_object__close(obj); obj 12 tools/testing/selftests/bpf/prog_tests/stacktrace_map_raw_tp.c struct bpf_object *obj; obj 15 tools/testing/selftests/bpf/prog_tests/stacktrace_map_raw_tp.c err = bpf_prog_load(file, BPF_PROG_TYPE_RAW_TRACEPOINT, &obj, &prog_fd); obj 19 tools/testing/selftests/bpf/prog_tests/stacktrace_map_raw_tp.c prog = bpf_object__find_program_by_title(obj, prog_name); obj 28 tools/testing/selftests/bpf/prog_tests/stacktrace_map_raw_tp.c control_map_fd = bpf_find_map(__func__, obj, "control_map"); obj 32 tools/testing/selftests/bpf/prog_tests/stacktrace_map_raw_tp.c stackid_hmap_fd = bpf_find_map(__func__, obj, "stackid_hmap"); obj 36 tools/testing/selftests/bpf/prog_tests/stacktrace_map_raw_tp.c stackmap_fd = bpf_find_map(__func__, obj, "stackmap"); obj 64 tools/testing/selftests/bpf/prog_tests/stacktrace_map_raw_tp.c bpf_object__close(obj); obj 9 tools/testing/selftests/bpf/prog_tests/task_fd_query_rawtp.c struct bpf_object *obj; obj 14 tools/testing/selftests/bpf/prog_tests/task_fd_query_rawtp.c err = bpf_prog_load(file, BPF_PROG_TYPE_RAW_TRACEPOINT, &obj, &prog_fd); obj 74 tools/testing/selftests/bpf/prog_tests/task_fd_query_rawtp.c bpf_object__close(obj); obj 12 tools/testing/selftests/bpf/prog_tests/task_fd_query_tp.c struct bpf_object *obj = NULL; obj 16 tools/testing/selftests/bpf/prog_tests/task_fd_query_tp.c err = bpf_prog_load(file, BPF_PROG_TYPE_TRACEPOINT, &obj, &prog_fd); obj 68 tools/testing/selftests/bpf/prog_tests/task_fd_query_tp.c bpf_object__close(obj); obj 8 tools/testing/selftests/bpf/prog_tests/tcp_estats.c struct bpf_object *obj; obj 11 tools/testing/selftests/bpf/prog_tests/tcp_estats.c err = bpf_prog_load(file, BPF_PROG_TYPE_TRACEPOINT, &obj, &prog_fd); obj 16 tools/testing/selftests/bpf/prog_tests/tcp_estats.c bpf_object__close(obj); obj 126 tools/testing/selftests/bpf/prog_tests/tcp_rtt.c struct bpf_object *obj; obj 133 tools/testing/selftests/bpf/prog_tests/tcp_rtt.c err = bpf_prog_load_xattr(&attr, &obj, &prog_fd); obj 139 tools/testing/selftests/bpf/prog_tests/tcp_rtt.c map = bpf_map__next(NULL, obj); obj 179 tools/testing/selftests/bpf/prog_tests/tcp_rtt.c bpf_object__close(obj); obj 12 tools/testing/selftests/bpf/prog_tests/tp_attach_query.c struct bpf_object *obj[num_progs]; obj 17 tools/testing/selftests/bpf/prog_tests/tp_attach_query.c obj[i] = NULL; obj 38 tools/testing/selftests/bpf/prog_tests/tp_attach_query.c err = bpf_prog_load(file, BPF_PROG_TYPE_TRACEPOINT, &obj[i], obj 132 tools/testing/selftests/bpf/prog_tests/tp_attach_query.c bpf_object__close(obj[i]); obj 11 tools/testing/selftests/bpf/prog_tests/xdp.c struct bpf_object *obj; obj 18 tools/testing/selftests/bpf/prog_tests/xdp.c err = bpf_prog_load(file, BPF_PROG_TYPE_XDP, &obj, &prog_fd); obj 22 tools/testing/selftests/bpf/prog_tests/xdp.c map_fd = bpf_find_map(__func__, obj, "vip2tnl"); obj 43 tools/testing/selftests/bpf/prog_tests/xdp.c bpf_object__close(obj); obj 7 tools/testing/selftests/bpf/prog_tests/xdp_adjust_tail.c struct bpf_object *obj; obj 12 tools/testing/selftests/bpf/prog_tests/xdp_adjust_tail.c err = bpf_prog_load(file, BPF_PROG_TYPE_XDP, &obj, &prog_fd); obj 28 tools/testing/selftests/bpf/prog_tests/xdp_adjust_tail.c bpf_object__close(obj); obj 29 tools/testing/selftests/bpf/prog_tests/xdp_noinline.c struct bpf_object *obj; obj 33 tools/testing/selftests/bpf/prog_tests/xdp_noinline.c err = bpf_prog_load(file, BPF_PROG_TYPE_XDP, &obj, &prog_fd); obj 37 tools/testing/selftests/bpf/prog_tests/xdp_noinline.c map_fd = bpf_find_map(__func__, obj, "vip_map"); obj 42 tools/testing/selftests/bpf/prog_tests/xdp_noinline.c map_fd = bpf_find_map(__func__, obj, "ch_rings"); obj 47 tools/testing/selftests/bpf/prog_tests/xdp_noinline.c map_fd = bpf_find_map(__func__, obj, "reals"); obj 66 tools/testing/selftests/bpf/prog_tests/xdp_noinline.c map_fd = bpf_find_map(__func__, obj, "stats"); obj 80 tools/testing/selftests/bpf/prog_tests/xdp_noinline.c bpf_object__close(obj); obj 4120 tools/testing/selftests/bpf/test_btf.c struct bpf_object *obj = NULL; obj 4147 tools/testing/selftests/bpf/test_btf.c obj = bpf_object__open(test->file); obj 4148 tools/testing/selftests/bpf/test_btf.c if (CHECK(IS_ERR(obj), "obj: %ld", PTR_ERR(obj))) obj 4149 tools/testing/selftests/bpf/test_btf.c return PTR_ERR(obj); obj 4151 tools/testing/selftests/bpf/test_btf.c err = bpf_object__btf_fd(obj); obj 4155 tools/testing/selftests/bpf/test_btf.c prog = bpf_program__next(NULL, obj); obj 4162 tools/testing/selftests/bpf/test_btf.c err = bpf_object__load(obj); obj 4167 tools/testing/selftests/bpf/test_btf.c map = bpf_object__find_map_by_name(obj, "btf_map"); obj 4274 tools/testing/selftests/bpf/test_btf.c bpf_object__close(obj); obj 25 tools/testing/selftests/bpf/test_dev_cgroup.c struct bpf_object *obj; obj 31 tools/testing/selftests/bpf/test_dev_cgroup.c &obj, &prog_fd)) { obj 53 tools/testing/selftests/bpf/test_libbpf_open.c int test_walk_progs(struct bpf_object *obj, bool verbose) obj 58 tools/testing/selftests/bpf/test_libbpf_open.c bpf_object__for_each_program(prog, obj) { obj 67 tools/testing/selftests/bpf/test_libbpf_open.c int test_walk_maps(struct bpf_object *obj, bool verbose) obj 72 tools/testing/selftests/bpf/test_libbpf_open.c bpf_object__for_each_map(map, obj) { obj 50 tools/testing/selftests/bpf/test_lirc_mode2_user.c struct bpf_object *obj; obj 62 tools/testing/selftests/bpf/test_lirc_mode2_user.c BPF_PROG_TYPE_LIRC_MODE2, &obj, &progfd); obj 659 tools/testing/selftests/bpf/test_maps.c struct bpf_object *obj; obj 821 tools/testing/selftests/bpf/test_maps.c BPF_PROG_TYPE_SK_SKB, &obj, &parse_prog); obj 828 tools/testing/selftests/bpf/test_maps.c BPF_PROG_TYPE_SK_MSG, &obj, &msg_prog); obj 835 tools/testing/selftests/bpf/test_maps.c BPF_PROG_TYPE_SK_SKB, &obj, &verdict_prog); obj 841 tools/testing/selftests/bpf/test_maps.c bpf_map_rx = bpf_object__find_map_by_name(obj, "sock_map_rx"); obj 853 tools/testing/selftests/bpf/test_maps.c bpf_map_tx = bpf_object__find_map_by_name(obj, "sock_map_tx"); obj 865 tools/testing/selftests/bpf/test_maps.c bpf_map_msg = bpf_object__find_map_by_name(obj, "sock_map_msg"); obj 877 tools/testing/selftests/bpf/test_maps.c bpf_map_break = bpf_object__find_map_by_name(obj, "sock_map_break"); obj 1123 tools/testing/selftests/bpf/test_maps.c bpf_object__close(obj); obj 1146 tools/testing/selftests/bpf/test_maps.c struct bpf_object *obj; obj 1151 tools/testing/selftests/bpf/test_maps.c obj = bpf_object__open(MAPINMAP_PROG); obj 1160 tools/testing/selftests/bpf/test_maps.c map = bpf_object__find_map_by_name(obj, "mim_array"); obj 1171 tools/testing/selftests/bpf/test_maps.c map = bpf_object__find_map_by_name(obj, "mim_hash"); obj 1182 tools/testing/selftests/bpf/test_maps.c bpf_object__for_each_program(prog, obj) { obj 1185 tools/testing/selftests/bpf/test_maps.c bpf_object__load(obj); obj 1187 tools/testing/selftests/bpf/test_maps.c map = bpf_object__find_map_by_name(obj, "mim_array"); obj 1204 tools/testing/selftests/bpf/test_maps.c map = bpf_object__find_map_by_name(obj, "mim_hash"); obj 1222 tools/testing/selftests/bpf/test_maps.c bpf_object__close(obj); obj 21 tools/testing/selftests/bpf/test_netcnt.c static int bpf_find_map(const char *test, struct bpf_object *obj, obj 26 tools/testing/selftests/bpf/test_netcnt.c map = bpf_object__find_map_by_name(obj, name); obj 41 tools/testing/selftests/bpf/test_netcnt.c struct bpf_object *obj; obj 56 tools/testing/selftests/bpf/test_netcnt.c &obj, &prog_fd)) { obj 95 tools/testing/selftests/bpf/test_netcnt.c map_fd = bpf_find_map(__func__, obj, "netcnt"); obj 101 tools/testing/selftests/bpf/test_netcnt.c percpu_map_fd = bpf_find_map(__func__, obj, "percpu_netcnt"); obj 184 tools/testing/selftests/bpf/test_progs.c int bpf_find_map(const char *test, struct bpf_object *obj, const char *name) obj 188 tools/testing/selftests/bpf/test_progs.c map = bpf_object__find_map_by_name(obj, name); obj 131 tools/testing/selftests/bpf/test_progs.h int bpf_find_map(const char *test, struct bpf_object *obj, const char *name); obj 38 tools/testing/selftests/bpf/test_select_reuseport.c static struct bpf_object *obj; obj 95 tools/testing/selftests/bpf/test_select_reuseport.c obj = bpf_object__open_xattr(&attr); obj 96 tools/testing/selftests/bpf/test_select_reuseport.c CHECK(IS_ERR_OR_NULL(obj), "open test_select_reuseport_kern.o", obj 97 tools/testing/selftests/bpf/test_select_reuseport.c "obj:%p PTR_ERR(obj):%ld\n", obj, PTR_ERR(obj)); obj 99 tools/testing/selftests/bpf/test_select_reuseport.c prog = bpf_program__next(NULL, obj); obj 103 tools/testing/selftests/bpf/test_select_reuseport.c map = bpf_object__find_map_by_name(obj, "outer_map"); obj 108 tools/testing/selftests/bpf/test_select_reuseport.c err = bpf_object__load(obj); obj 115 tools/testing/selftests/bpf/test_select_reuseport.c map = bpf_object__find_map_by_name(obj, "result_map"); obj 121 tools/testing/selftests/bpf/test_select_reuseport.c map = bpf_object__find_map_by_name(obj, "tmp_index_ovr_map"); obj 127 tools/testing/selftests/bpf/test_select_reuseport.c map = bpf_object__find_map_by_name(obj, "linum_map"); obj 133 tools/testing/selftests/bpf/test_select_reuseport.c map = bpf_object__find_map_by_name(obj, "data_check_map"); obj 692 tools/testing/selftests/bpf/test_select_reuseport.c bpf_object__close(obj); obj 832 tools/testing/selftests/bpf/test_sock_addr.c struct bpf_object *obj; obj 841 tools/testing/selftests/bpf/test_sock_addr.c if (bpf_prog_load_xattr(&attr, &obj, &prog_fd)) { obj 421 tools/testing/selftests/bpf/test_sock_fields.c struct bpf_object *obj; obj 438 tools/testing/selftests/bpf/test_sock_fields.c err = bpf_prog_load_xattr(&attr, &obj, &egress_fd); obj 441 tools/testing/selftests/bpf/test_sock_fields.c ingress_prog = bpf_object__find_program_by_title(obj, obj 458 tools/testing/selftests/bpf/test_sock_fields.c map = bpf_object__find_map_by_name(obj, "addr_map"); obj 462 tools/testing/selftests/bpf/test_sock_fields.c map = bpf_object__find_map_by_name(obj, "sock_result_map"); obj 466 tools/testing/selftests/bpf/test_sock_fields.c map = bpf_object__find_map_by_name(obj, "tcp_sock_result_map"); obj 470 tools/testing/selftests/bpf/test_sock_fields.c map = bpf_object__find_map_by_name(obj, "linum_map"); obj 474 tools/testing/selftests/bpf/test_sock_fields.c map = bpf_object__find_map_by_name(obj, "sk_pkt_out_cnt"); obj 478 tools/testing/selftests/bpf/test_sock_fields.c map = bpf_object__find_map_by_name(obj, "sk_pkt_out_cnt10"); obj 484 tools/testing/selftests/bpf/test_sock_fields.c bpf_object__close(obj); obj 1623 tools/testing/selftests/bpf/test_sockmap.c struct bpf_object *obj; obj 1627 tools/testing/selftests/bpf/test_sockmap.c obj = bpf_object__open(bpf_file); obj 1628 tools/testing/selftests/bpf/test_sockmap.c err = libbpf_get_error(obj); obj 1638 tools/testing/selftests/bpf/test_sockmap.c bpf_object__for_each_program(prog, obj) { obj 1645 tools/testing/selftests/bpf/test_sockmap.c i = bpf_object__load(obj); obj 1647 tools/testing/selftests/bpf/test_sockmap.c bpf_object__for_each_program(prog, obj) { obj 1653 tools/testing/selftests/bpf/test_sockmap.c maps[i] = bpf_object__find_map_by_name(obj, map_names[i]); obj 1457 tools/testing/selftests/bpf/test_sysctl.c struct bpf_object *obj; obj 1464 tools/testing/selftests/bpf/test_sysctl.c if (bpf_prog_load_xattr(&attr, &obj, &prog_fd)) { obj 77 tools/testing/selftests/bpf/test_tcpbpf_user.c static int bpf_find_map(const char *test, struct bpf_object *obj, obj 82 tools/testing/selftests/bpf/test_tcpbpf_user.c map = bpf_object__find_map_by_name(obj, name); obj 97 tools/testing/selftests/bpf/test_tcpbpf_user.c struct bpf_object *obj; obj 112 tools/testing/selftests/bpf/test_tcpbpf_user.c if (bpf_prog_load(file, BPF_PROG_TYPE_SOCK_OPS, &obj, &prog_fd)) { obj 129 tools/testing/selftests/bpf/test_tcpbpf_user.c map_fd = bpf_find_map(__func__, obj, "global_map"); obj 133 tools/testing/selftests/bpf/test_tcpbpf_user.c sock_map_fd = bpf_find_map(__func__, obj, "sockopt_results"); obj 80 tools/testing/selftests/bpf/test_tcpnotify_user.c struct bpf_object *obj; obj 99 tools/testing/selftests/bpf/test_tcpnotify_user.c if (bpf_prog_load(file, BPF_PROG_TYPE_SOCK_OPS, &obj, &prog_fd)) { obj 111 tools/testing/selftests/bpf/test_tcpnotify_user.c perf_map = bpf_object__find_map_by_name(obj, "perf_event_map"); obj 117 tools/testing/selftests/bpf/test_tcpnotify_user.c global_map = bpf_object__find_map_by_name(obj, "global_map"); obj 98 tools/testing/selftests/bpf/xdping.c struct bpf_object *obj; obj 176 tools/testing/selftests/bpf/xdping.c if (bpf_prog_load(filename, BPF_PROG_TYPE_XDP, &obj, &prog_fd)) { obj 181 tools/testing/selftests/bpf/xdping.c main_prog = bpf_object__find_program_by_title(obj, obj 191 tools/testing/selftests/bpf/xdping.c map = bpf_map__next(NULL, obj); obj 314 tools/usb/ffs-aio-example/simple/device_app/aio_simple.c if (e[i].obj->aio_fildes == ep[0]) { obj 317 tools/usb/ffs-aio-example/simple/device_app/aio_simple.c } else if (e[i].obj->aio_fildes == ep[1]) {