Lines Matching refs:kdev

43 static struct knav_device *kdev;  variable
60 #define knav_queue_idx_to_inst(kdev, idx) \ argument
61 (kdev->instances + (idx << kdev->inst_shift))
66 #define for_each_instance(idx, inst, kdev) \ argument
67 for (idx = 0, inst = kdev->instances; \
68 idx < (kdev)->num_queues_in_use; \
69 idx++, inst = knav_queue_idx_to_inst(kdev, idx))
122 dev_warn(range->kdev->dev, in knav_queue_setup_irq()
187 knav_queue_match_id_to_inst(struct knav_device *kdev, unsigned id) in knav_queue_match_id_to_inst() argument
192 for_each_instance(idx, inst, kdev) { in knav_queue_match_id_to_inst()
201 if (kdev->base_id <= id && in knav_queue_find_by_id()
202 kdev->base_id + kdev->num_queues > id) { in knav_queue_find_by_id()
203 id -= kdev->base_id; in knav_queue_find_by_id()
204 return knav_queue_match_id_to_inst(kdev, id); in knav_queue_find_by_id()
216 qh = devm_kzalloc(inst->kdev->dev, sizeof(*qh), GFP_KERNEL); in __knav_queue_open()
236 devm_kfree(inst->kdev->dev, qh); in __knav_queue_open()
283 for_each_instance(idx, inst, kdev) { in knav_queue_open_by_type()
415 struct knav_device *kdev = inst->kdev; in knav_queue_debug_show_instance() local
422 kdev->base_id + inst->id, inst->name); in knav_queue_debug_show_instance()
447 dev_name(kdev->dev), kdev->base_id, in knav_queue_debug_show()
448 kdev->base_id + kdev->num_queues - 1); in knav_queue_debug_show()
449 for_each_instance(idx, inst, kdev) in knav_queue_debug_show()
553 devm_kfree(inst->kdev->dev, qh); in knav_queue_close()
574 ret = qh->inst->kdev->base_id + qh->inst->id; in knav_queue_device_control()
706 dev_dbg(pool->kdev->dev, in kdesc_empty_pool()
751 if (!kdev->dev) in knav_pool_create()
754 pool = devm_kzalloc(kdev->dev, sizeof(*pool), GFP_KERNEL); in knav_pool_create()
756 dev_err(kdev->dev, "out of memory allocating pool\n"); in knav_pool_create()
760 for_each_region(kdev, reg_itr) { in knav_pool_create()
768 dev_err(kdev->dev, "region-id(%d) not found\n", region_id); in knav_pool_create()
775 dev_err(kdev->dev, in knav_pool_create()
783 pool->kdev = kdev; in knav_pool_create()
784 pool->dev = kdev->dev; in knav_pool_create()
789 dev_err(kdev->dev, "out of descs in region(%d) for pool(%s)\n", in knav_pool_create()
816 list_add_tail(&pool->list, &kdev->pools); in knav_pool_create()
819 dev_err(kdev->dev, "pool(%s) create failed: fragmented desc pool in region(%d)\n", in knav_pool_create()
833 devm_kfree(kdev->dev, pool); in knav_pool_create()
861 devm_kfree(kdev->dev, pool); in knav_pool_destroy()
962 static void knav_queue_setup_region(struct knav_device *kdev, in knav_queue_setup_region() argument
974 dev_warn(kdev->dev, "unused region %s\n", region->name); in knav_queue_setup_region()
984 dev_warn(kdev->dev, "too few descriptors in region %s\n", in knav_queue_setup_region()
994 dev_err(kdev->dev, "memory alloc failed for region %s\n", in knav_queue_setup_region()
1001 region->dma_start = dma_map_page(kdev->dev, page, 0, size, in knav_queue_setup_region()
1003 if (dma_mapping_error(kdev->dev, region->dma_start)) { in knav_queue_setup_region()
1004 dev_err(kdev->dev, "dma map failed for region %s\n", in knav_queue_setup_region()
1010 pool = devm_kzalloc(kdev->dev, sizeof(*pool), GFP_KERNEL); in knav_queue_setup_region()
1012 dev_err(kdev->dev, "out of memory allocating dummy pool\n"); in knav_queue_setup_region()
1019 dev_dbg(kdev->dev, in knav_queue_setup_region()
1028 for_each_qmgr(kdev, qmgr) { in knav_queue_setup_region()
1039 dma_unmap_page(kdev->dev, region->dma_start, size, in knav_queue_setup_region()
1058 static int knav_queue_setup_regions(struct knav_device *kdev, in knav_queue_setup_regions() argument
1061 struct device *dev = kdev->dev; in knav_queue_setup_regions()
1101 list_add_tail(&region->list, &kdev->regions); in knav_queue_setup_regions()
1103 if (list_empty(&kdev->regions)) { in knav_queue_setup_regions()
1109 for_each_region(kdev, region) in knav_queue_setup_regions()
1110 knav_queue_setup_region(kdev, region); in knav_queue_setup_regions()
1115 static int knav_get_link_ram(struct knav_device *kdev, in knav_get_link_ram() argument
1119 struct platform_device *pdev = to_platform_device(kdev->dev); in knav_get_link_ram()
1148 block->virt = dmam_alloc_coherent(kdev->dev, in knav_get_link_ram()
1152 dev_err(kdev->dev, "failed to alloc linkram\n"); in knav_get_link_ram()
1162 static int knav_queue_setup_link_ram(struct knav_device *kdev) in knav_queue_setup_link_ram() argument
1167 for_each_qmgr(kdev, qmgr) { in knav_queue_setup_link_ram()
1168 block = &kdev->link_rams[0]; in knav_queue_setup_link_ram()
1169 dev_dbg(kdev->dev, "linkram0: phys:%x, virt:%p, size:%x\n", in knav_queue_setup_link_ram()
1178 dev_dbg(kdev->dev, "linkram1: phys:%x, virt:%p, size:%x\n", in knav_queue_setup_link_ram()
1186 static int knav_setup_queue_range(struct knav_device *kdev, in knav_setup_queue_range() argument
1189 struct device *dev = kdev->dev; in knav_setup_queue_range()
1201 range->kdev = kdev; in knav_setup_queue_range()
1205 range->queue_base = temp[0] - kdev->base_id; in knav_setup_queue_range()
1238 ret = knav_init_acc_range(kdev, node, range); in knav_setup_queue_range()
1248 for_each_qmgr(kdev, qmgr) { in knav_setup_queue_range()
1261 list_add_tail(&range->list, &kdev->queue_ranges); in knav_setup_queue_range()
1269 kdev->num_queues_in_use += range->num_queues; in knav_setup_queue_range()
1273 static int knav_setup_queue_pools(struct knav_device *kdev, in knav_setup_queue_pools() argument
1281 ret = knav_setup_queue_range(kdev, range); in knav_setup_queue_pools()
1287 if (list_empty(&kdev->queue_ranges)) { in knav_setup_queue_pools()
1288 dev_err(kdev->dev, "no valid queue range found\n"); in knav_setup_queue_pools()
1294 static void knav_free_queue_range(struct knav_device *kdev, in knav_free_queue_range() argument
1300 devm_kfree(kdev->dev, range); in knav_free_queue_range()
1303 static void knav_free_queue_ranges(struct knav_device *kdev) in knav_free_queue_ranges() argument
1308 range = first_queue_range(kdev); in knav_free_queue_ranges()
1311 knav_free_queue_range(kdev, range); in knav_free_queue_ranges()
1315 static void knav_queue_free_regions(struct knav_device *kdev) in knav_queue_free_regions() argument
1322 region = first_region(kdev); in knav_queue_free_regions()
1332 devm_kfree(kdev->dev, region); in knav_queue_free_regions()
1336 static void __iomem *knav_queue_map_reg(struct knav_device *kdev, in knav_queue_map_reg() argument
1345 dev_err(kdev->dev, "Can't translate of node(%s) address for index(%d)\n", in knav_queue_map_reg()
1350 regs = devm_ioremap_resource(kdev->dev, &res); in knav_queue_map_reg()
1352 dev_err(kdev->dev, "Failed to map register base for index(%d) node(%s)\n", in knav_queue_map_reg()
1357 static int knav_queue_init_qmgrs(struct knav_device *kdev, in knav_queue_init_qmgrs() argument
1360 struct device *dev = kdev->dev; in knav_queue_init_qmgrs()
1388 knav_queue_map_reg(kdev, child, in knav_queue_init_qmgrs()
1391 knav_queue_map_reg(kdev, child, in knav_queue_init_qmgrs()
1394 knav_queue_map_reg(kdev, child, in knav_queue_init_qmgrs()
1397 knav_queue_map_reg(kdev, child, in knav_queue_init_qmgrs()
1400 knav_queue_map_reg(kdev, child, in knav_queue_init_qmgrs()
1403 knav_queue_map_reg(kdev, child, in knav_queue_init_qmgrs()
1426 list_add_tail(&qmgr->list, &kdev->qmgrs); in knav_queue_init_qmgrs()
1436 static int knav_queue_init_pdsps(struct knav_device *kdev, in knav_queue_init_pdsps() argument
1439 struct device *dev = kdev->dev; in knav_queue_init_pdsps()
1463 knav_queue_map_reg(kdev, child, in knav_queue_init_pdsps()
1466 knav_queue_map_reg(kdev, child, in knav_queue_init_pdsps()
1469 knav_queue_map_reg(kdev, child, in knav_queue_init_pdsps()
1472 knav_queue_map_reg(kdev, child, in knav_queue_init_pdsps()
1491 list_add_tail(&pdsp->list, &kdev->pdsps); in knav_queue_init_pdsps()
1499 static int knav_queue_stop_pdsp(struct knav_device *kdev, in knav_queue_stop_pdsp() argument
1510 dev_err(kdev->dev, "timed out on pdsp %s stop\n", pdsp->name); in knav_queue_stop_pdsp()
1516 static int knav_queue_load_pdsp(struct knav_device *kdev, in knav_queue_load_pdsp() argument
1523 ret = request_firmware(&fw, pdsp->firmware, kdev->dev); in knav_queue_load_pdsp()
1525 dev_err(kdev->dev, "failed to get firmware %s for pdsp %s\n", in knav_queue_load_pdsp()
1540 static int knav_queue_start_pdsp(struct knav_device *kdev, in knav_queue_start_pdsp() argument
1563 dev_err(kdev->dev, in knav_queue_start_pdsp()
1571 static void knav_queue_stop_pdsps(struct knav_device *kdev) in knav_queue_stop_pdsps() argument
1576 for_each_pdsp(kdev, pdsp) in knav_queue_stop_pdsps()
1577 knav_queue_stop_pdsp(kdev, pdsp); in knav_queue_stop_pdsps()
1580 static int knav_queue_start_pdsps(struct knav_device *kdev) in knav_queue_start_pdsps() argument
1585 knav_queue_stop_pdsps(kdev); in knav_queue_start_pdsps()
1587 for_each_pdsp(kdev, pdsp) { in knav_queue_start_pdsps()
1588 ret = knav_queue_load_pdsp(kdev, pdsp); in knav_queue_start_pdsps()
1593 for_each_pdsp(kdev, pdsp) { in knav_queue_start_pdsps()
1594 ret = knav_queue_start_pdsp(kdev, pdsp); in knav_queue_start_pdsps()
1604 for_each_qmgr(kdev, qmgr) { in knav_find_qmgr()
1612 static int knav_queue_init_queue(struct knav_device *kdev, in knav_queue_init_queue() argument
1623 inst->kdev = kdev; in knav_queue_init_queue()
1636 static int knav_queue_init_queues(struct knav_device *kdev) in knav_queue_init_queues() argument
1648 kdev->inst_shift = order_base_2(size); in knav_queue_init_queues()
1649 size = (1 << kdev->inst_shift) * kdev->num_queues_in_use; in knav_queue_init_queues()
1650 kdev->instances = devm_kzalloc(kdev->dev, size, GFP_KERNEL); in knav_queue_init_queues()
1651 if (!kdev->instances) in knav_queue_init_queues()
1654 for_each_queue_range(kdev, range) { in knav_queue_init_queues()
1660 ret = knav_queue_init_queue(kdev, range, in knav_queue_init_queues()
1661 knav_queue_idx_to_inst(kdev, idx), id); in knav_queue_init_queues()
1666 knav_queue_idx_to_inst(kdev, base_idx); in knav_queue_init_queues()
1684 kdev = devm_kzalloc(dev, sizeof(struct knav_device), GFP_KERNEL); in knav_queue_probe()
1685 if (!kdev) { in knav_queue_probe()
1690 platform_set_drvdata(pdev, kdev); in knav_queue_probe()
1691 kdev->dev = dev; in knav_queue_probe()
1692 INIT_LIST_HEAD(&kdev->queue_ranges); in knav_queue_probe()
1693 INIT_LIST_HEAD(&kdev->qmgrs); in knav_queue_probe()
1694 INIT_LIST_HEAD(&kdev->pools); in knav_queue_probe()
1695 INIT_LIST_HEAD(&kdev->regions); in knav_queue_probe()
1696 INIT_LIST_HEAD(&kdev->pdsps); in knav_queue_probe()
1710 kdev->base_id = temp[0]; in knav_queue_probe()
1711 kdev->num_queues = temp[1]; in knav_queue_probe()
1720 ret = knav_queue_init_qmgrs(kdev, qmgrs); in knav_queue_probe()
1728 ret = knav_queue_init_pdsps(kdev, pdsps); in knav_queue_probe()
1732 ret = knav_queue_start_pdsps(kdev); in knav_queue_probe()
1745 ret = knav_setup_queue_pools(kdev, queue_pools); in knav_queue_probe()
1750 ret = knav_get_link_ram(kdev, "linkram0", &kdev->link_rams[0]); in knav_queue_probe()
1752 dev_err(kdev->dev, "could not setup linking ram\n"); in knav_queue_probe()
1756 ret = knav_get_link_ram(kdev, "linkram1", &kdev->link_rams[1]); in knav_queue_probe()
1764 ret = knav_queue_setup_link_ram(kdev); in knav_queue_probe()
1773 ret = knav_queue_setup_regions(kdev, regions); in knav_queue_probe()
1778 ret = knav_queue_init_queues(kdev); in knav_queue_probe()
1789 knav_queue_stop_pdsps(kdev); in knav_queue_probe()
1790 knav_queue_free_regions(kdev); in knav_queue_probe()
1791 knav_free_queue_ranges(kdev); in knav_queue_probe()