Lines Matching refs:entry
172 static inline void dump_entry_trace(struct dma_debug_entry *entry) in dump_entry_trace() argument
175 if (entry) { in dump_entry_trace()
177 print_stack_trace(&entry->stacktrace, 0); in dump_entry_trace()
223 #define err_printk(dev, entry, format, arg...) do { \ argument
230 dump_entry_trace(entry); \
242 static int hash_fn(struct dma_debug_entry *entry) in hash_fn() argument
248 return (entry->dev_addr >> HASH_FN_SHIFT) & HASH_FN_MASK; in hash_fn()
254 static struct hash_bucket *get_hash_bucket(struct dma_debug_entry *entry, in get_hash_bucket() argument
257 int idx = hash_fn(entry); in get_hash_bucket()
302 struct dma_debug_entry *entry, *ret = NULL; in __hash_bucket_find() local
305 list_for_each_entry(entry, &bucket->list, list) { in __hash_bucket_find()
306 if (!match(ref, entry)) in __hash_bucket_find()
321 entry->size == ref->size ? ++match_lvl : 0; in __hash_bucket_find()
322 entry->type == ref->type ? ++match_lvl : 0; in __hash_bucket_find()
323 entry->direction == ref->direction ? ++match_lvl : 0; in __hash_bucket_find()
324 entry->sg_call_ents == ref->sg_call_ents ? ++match_lvl : 0; in __hash_bucket_find()
328 return entry; in __hash_bucket_find()
335 ret = entry; in __hash_bucket_find()
360 struct dma_debug_entry *entry, index = *ref; in bucket_find_contain() local
364 entry = __hash_bucket_find(*bucket, ref, containing_match); in bucket_find_contain()
366 if (entry) in bucket_find_contain()
367 return entry; in bucket_find_contain()
385 struct dma_debug_entry *entry) in hash_bucket_add() argument
387 list_add_tail(&entry->list, &bucket->list); in hash_bucket_add()
393 static void hash_bucket_del(struct dma_debug_entry *entry) in hash_bucket_del() argument
395 list_del(&entry->list); in hash_bucket_del()
398 static unsigned long long phys_addr(struct dma_debug_entry *entry) in phys_addr() argument
400 return page_to_phys(pfn_to_page(entry->pfn)) + entry->offset; in phys_addr()
412 struct dma_debug_entry *entry; in debug_dma_dump_mappings() local
417 list_for_each_entry(entry, &bucket->list, list) { in debug_dma_dump_mappings()
418 if (!dev || dev == entry->dev) { in debug_dma_dump_mappings()
419 dev_info(entry->dev, in debug_dma_dump_mappings()
421 type2name[entry->type], idx, in debug_dma_dump_mappings()
422 phys_addr(entry), entry->pfn, in debug_dma_dump_mappings()
423 entry->dev_addr, entry->size, in debug_dma_dump_mappings()
424 dir2name[entry->direction], in debug_dma_dump_mappings()
425 maperr2str[entry->map_err_type]); in debug_dma_dump_mappings()
463 static phys_addr_t to_cacheline_number(struct dma_debug_entry *entry) in to_cacheline_number() argument
465 return (entry->pfn << CACHELINE_PER_PAGE_SHIFT) + in to_cacheline_number()
466 (entry->offset >> L1_CACHE_SHIFT); in to_cacheline_number()
519 static int active_cacheline_insert(struct dma_debug_entry *entry) in active_cacheline_insert() argument
521 phys_addr_t cln = to_cacheline_number(entry); in active_cacheline_insert()
529 if (entry->direction == DMA_TO_DEVICE) in active_cacheline_insert()
533 rc = radix_tree_insert(&dma_active_cacheline, cln, entry); in active_cacheline_insert()
541 static void active_cacheline_remove(struct dma_debug_entry *entry) in active_cacheline_remove() argument
543 phys_addr_t cln = to_cacheline_number(entry); in active_cacheline_remove()
547 if (entry->direction == DMA_TO_DEVICE) in active_cacheline_remove()
571 struct dma_debug_entry *entry = NULL; in debug_dma_assert_idle() local
591 entry = ents[i]; in debug_dma_assert_idle()
598 if (!entry) in debug_dma_assert_idle()
601 cln = to_cacheline_number(entry); in debug_dma_assert_idle()
602 err_printk(entry->dev, entry, in debug_dma_assert_idle()
611 static void add_dma_entry(struct dma_debug_entry *entry) in add_dma_entry() argument
617 bucket = get_hash_bucket(entry, &flags); in add_dma_entry()
618 hash_bucket_add(bucket, entry); in add_dma_entry()
621 rc = active_cacheline_insert(entry); in add_dma_entry()
634 struct dma_debug_entry *entry; in __dma_entry_alloc() local
636 entry = list_entry(free_entries.next, struct dma_debug_entry, list); in __dma_entry_alloc()
637 list_del(&entry->list); in __dma_entry_alloc()
638 memset(entry, 0, sizeof(*entry)); in __dma_entry_alloc()
644 return entry; in __dma_entry_alloc()
654 struct dma_debug_entry *entry; in dma_entry_alloc() local
666 entry = __dma_entry_alloc(); in dma_entry_alloc()
671 entry->stacktrace.max_entries = DMA_DEBUG_STACKTRACE_ENTRIES; in dma_entry_alloc()
672 entry->stacktrace.entries = entry->st_entries; in dma_entry_alloc()
673 entry->stacktrace.skip = 2; in dma_entry_alloc()
674 save_stack_trace(&entry->stacktrace); in dma_entry_alloc()
677 return entry; in dma_entry_alloc()
680 static void dma_entry_free(struct dma_debug_entry *entry) in dma_entry_free() argument
684 active_cacheline_remove(entry); in dma_entry_free()
691 list_add(&entry->list, &free_entries); in dma_entry_free()
700 struct dma_debug_entry *entry; in dma_debug_resize_entries() local
711 entry = kzalloc(sizeof(*entry), GFP_KERNEL); in dma_debug_resize_entries()
712 if (!entry) in dma_debug_resize_entries()
715 list_add_tail(&entry->list, &tmp); in dma_debug_resize_entries()
727 entry = __dma_entry_alloc(); in dma_debug_resize_entries()
728 kfree(entry); in dma_debug_resize_entries()
753 struct dma_debug_entry *entry, *next_entry; in prealloc_memory() local
757 entry = kzalloc(sizeof(*entry), GFP_KERNEL); in prealloc_memory()
758 if (!entry) in prealloc_memory()
761 list_add_tail(&entry->list, &free_entries); in prealloc_memory()
773 list_for_each_entry_safe(entry, next_entry, &free_entries, list) { in prealloc_memory()
774 list_del(&entry->list); in prealloc_memory()
775 kfree(entry); in prealloc_memory()
931 struct dma_debug_entry *entry; in device_dma_allocations() local
939 list_for_each_entry(entry, &dma_entry_hash[i].list, list) { in device_dma_allocations()
940 if (entry->dev == dev) { in device_dma_allocations()
942 *out_entry = entry; in device_dma_allocations()
956 struct dma_debug_entry *uninitialized_var(entry); in dma_debug_device_change()
964 count = device_dma_allocations(dev, &entry); in dma_debug_device_change()
967 err_printk(dev, entry, "DMA-API: device driver has pending " in dma_debug_device_change()
973 count, entry->dev_addr, entry->size, in dma_debug_device_change()
974 dir2name[entry->direction], type2name[entry->type]); in dma_debug_device_change()
1076 struct dma_debug_entry *entry; in check_unmap() local
1081 entry = bucket_find_exact(bucket, ref); in check_unmap()
1083 if (!entry) { in check_unmap()
1101 if (ref->size != entry->size) { in check_unmap()
1102 err_printk(ref->dev, entry, "DMA-API: device driver frees " in check_unmap()
1106 ref->dev_addr, entry->size, ref->size); in check_unmap()
1109 if (ref->type != entry->type) { in check_unmap()
1110 err_printk(ref->dev, entry, "DMA-API: device driver frees " in check_unmap()
1115 type2name[entry->type], type2name[ref->type]); in check_unmap()
1116 } else if ((entry->type == dma_debug_coherent) && in check_unmap()
1117 (phys_addr(ref) != phys_addr(entry))) { in check_unmap()
1118 err_printk(ref->dev, entry, "DMA-API: device driver frees " in check_unmap()
1124 phys_addr(entry), in check_unmap()
1129 ref->sg_call_ents != entry->sg_call_ents) { in check_unmap()
1130 err_printk(ref->dev, entry, "DMA-API: device driver frees " in check_unmap()
1133 entry->sg_call_ents, ref->sg_call_ents); in check_unmap()
1140 if (ref->direction != entry->direction) { in check_unmap()
1141 err_printk(ref->dev, entry, "DMA-API: device driver frees " in check_unmap()
1146 dir2name[entry->direction], in check_unmap()
1150 if (entry->map_err_type == MAP_ERR_NOT_CHECKED) { in check_unmap()
1151 err_printk(ref->dev, entry, in check_unmap()
1156 type2name[entry->type]); in check_unmap()
1159 hash_bucket_del(entry); in check_unmap()
1160 dma_entry_free(entry); in check_unmap()
1193 struct dma_debug_entry *entry; in check_sync() local
1199 entry = bucket_find_contain(&bucket, ref, &flags); in check_sync()
1201 if (!entry) { in check_sync()
1209 if (ref->size > entry->size) { in check_sync()
1210 err_printk(dev, entry, "DMA-API: device driver syncs" in check_sync()
1215 entry->dev_addr, entry->size, in check_sync()
1219 if (entry->direction == DMA_BIDIRECTIONAL) in check_sync()
1222 if (ref->direction != entry->direction) { in check_sync()
1223 err_printk(dev, entry, "DMA-API: device driver syncs " in check_sync()
1227 (unsigned long long)ref->dev_addr, entry->size, in check_sync()
1228 dir2name[entry->direction], in check_sync()
1232 if (to_cpu && !(entry->direction == DMA_FROM_DEVICE) && in check_sync()
1234 err_printk(dev, entry, "DMA-API: device driver syncs " in check_sync()
1238 (unsigned long long)ref->dev_addr, entry->size, in check_sync()
1239 dir2name[entry->direction], in check_sync()
1242 if (!to_cpu && !(entry->direction == DMA_TO_DEVICE) && in check_sync()
1244 err_printk(dev, entry, "DMA-API: device driver syncs " in check_sync()
1248 (unsigned long long)ref->dev_addr, entry->size, in check_sync()
1249 dir2name[entry->direction], in check_sync()
1253 ref->sg_call_ents != entry->sg_call_ents) { in check_sync()
1254 err_printk(ref->dev, entry, "DMA-API: device driver syncs " in check_sync()
1257 entry->sg_call_ents, ref->sg_call_ents); in check_sync()
1268 struct dma_debug_entry *entry; in debug_dma_map_page() local
1276 entry = dma_entry_alloc(); in debug_dma_map_page()
1277 if (!entry) in debug_dma_map_page()
1280 entry->dev = dev; in debug_dma_map_page()
1281 entry->type = dma_debug_page; in debug_dma_map_page()
1282 entry->pfn = page_to_pfn(page); in debug_dma_map_page()
1283 entry->offset = offset, in debug_dma_map_page()
1284 entry->dev_addr = dma_addr; in debug_dma_map_page()
1285 entry->size = size; in debug_dma_map_page()
1286 entry->direction = direction; in debug_dma_map_page()
1287 entry->map_err_type = MAP_ERR_NOT_CHECKED; in debug_dma_map_page()
1290 entry->type = dma_debug_single; in debug_dma_map_page()
1299 add_dma_entry(entry); in debug_dma_map_page()
1306 struct dma_debug_entry *entry; in debug_dma_mapping_error() local
1317 list_for_each_entry(entry, &bucket->list, list) { in debug_dma_mapping_error()
1318 if (!exact_match(&ref, entry)) in debug_dma_mapping_error()
1331 if (entry->map_err_type == MAP_ERR_NOT_CHECKED) { in debug_dma_mapping_error()
1332 entry->map_err_type = MAP_ERR_CHECKED; in debug_dma_mapping_error()
1365 struct dma_debug_entry *entry; in debug_dma_map_sg() local
1373 entry = dma_entry_alloc(); in debug_dma_map_sg()
1374 if (!entry) in debug_dma_map_sg()
1377 entry->type = dma_debug_sg; in debug_dma_map_sg()
1378 entry->dev = dev; in debug_dma_map_sg()
1379 entry->pfn = page_to_pfn(sg_page(s)); in debug_dma_map_sg()
1380 entry->offset = s->offset, in debug_dma_map_sg()
1381 entry->size = sg_dma_len(s); in debug_dma_map_sg()
1382 entry->dev_addr = sg_dma_address(s); in debug_dma_map_sg()
1383 entry->direction = direction; in debug_dma_map_sg()
1384 entry->sg_call_ents = nents; in debug_dma_map_sg()
1385 entry->sg_mapped_ents = mapped_ents; in debug_dma_map_sg()
1392 add_dma_entry(entry); in debug_dma_map_sg()
1400 struct dma_debug_entry *entry; in get_nr_mapped_entries() local
1406 entry = bucket_find_exact(bucket, ref); in get_nr_mapped_entries()
1409 if (entry) in get_nr_mapped_entries()
1410 mapped_ents = entry->sg_mapped_ents; in get_nr_mapped_entries()
1452 struct dma_debug_entry *entry; in debug_dma_alloc_coherent() local
1460 entry = dma_entry_alloc(); in debug_dma_alloc_coherent()
1461 if (!entry) in debug_dma_alloc_coherent()
1464 entry->type = dma_debug_coherent; in debug_dma_alloc_coherent()
1465 entry->dev = dev; in debug_dma_alloc_coherent()
1466 entry->pfn = page_to_pfn(virt_to_page(virt)); in debug_dma_alloc_coherent()
1467 entry->offset = (size_t) virt & ~PAGE_MASK; in debug_dma_alloc_coherent()
1468 entry->size = size; in debug_dma_alloc_coherent()
1469 entry->dev_addr = dma_addr; in debug_dma_alloc_coherent()
1470 entry->direction = DMA_BIDIRECTIONAL; in debug_dma_alloc_coherent()
1472 add_dma_entry(entry); in debug_dma_alloc_coherent()