execlists 126 drivers/gpu/drm/i915/gt/intel_engine.h execlists_num_ports(const struct intel_engine_execlists * const execlists) execlists 128 drivers/gpu/drm/i915/gt/intel_engine.h return execlists->port_mask + 1; execlists 132 drivers/gpu/drm/i915/gt/intel_engine.h execlists_active(const struct intel_engine_execlists *execlists) execlists 134 drivers/gpu/drm/i915/gt/intel_engine.h GEM_BUG_ON(execlists->active - execlists->inflight > execlists 135 drivers/gpu/drm/i915/gt/intel_engine.h execlists_num_ports(execlists)); execlists 136 drivers/gpu/drm/i915/gt/intel_engine.h return READ_ONCE(*execlists->active); execlists 140 drivers/gpu/drm/i915/gt/intel_engine.h execlists_active_lock_bh(struct intel_engine_execlists *execlists) execlists 143 drivers/gpu/drm/i915/gt/intel_engine.h tasklet_lock(&execlists->tasklet); execlists 147 drivers/gpu/drm/i915/gt/intel_engine.h execlists_active_unlock_bh(struct intel_engine_execlists *execlists) execlists 149 drivers/gpu/drm/i915/gt/intel_engine.h tasklet_unlock(&execlists->tasklet); execlists 154 drivers/gpu/drm/i915/gt/intel_engine.h execlists_unwind_incomplete_requests(struct intel_engine_execlists *execlists); execlists 512 drivers/gpu/drm/i915/gt/intel_engine.h static inline bool inject_preempt_hang(struct intel_engine_execlists *execlists) execlists 514 drivers/gpu/drm/i915/gt/intel_engine.h if (!execlists->preempt_hang.inject_hang) execlists 517 drivers/gpu/drm/i915/gt/intel_engine.h complete(&execlists->preempt_hang.completion); execlists 523 drivers/gpu/drm/i915/gt/intel_engine.h static inline bool inject_preempt_hang(struct intel_engine_execlists *execlists) execlists 478 drivers/gpu/drm/i915/gt/intel_engine_cs.c struct intel_engine_execlists * const execlists = &engine->execlists; execlists 480 drivers/gpu/drm/i915/gt/intel_engine_cs.c execlists->port_mask = 1; execlists 481 drivers/gpu/drm/i915/gt/intel_engine_cs.c GEM_BUG_ON(!is_power_of_2(execlists_num_ports(execlists))); execlists 482 drivers/gpu/drm/i915/gt/intel_engine_cs.c GEM_BUG_ON(execlists_num_ports(execlists) > EXECLIST_MAX_PORTS); execlists 484 drivers/gpu/drm/i915/gt/intel_engine_cs.c memset(execlists->pending, 0, sizeof(execlists->pending)); execlists 485 drivers/gpu/drm/i915/gt/intel_engine_cs.c execlists->active = execlists 486 drivers/gpu/drm/i915/gt/intel_engine_cs.c memset(execlists->inflight, 0, sizeof(execlists->inflight)); execlists 488 drivers/gpu/drm/i915/gt/intel_engine_cs.c execlists->queue_priority_hint = INT_MIN; execlists 489 drivers/gpu/drm/i915/gt/intel_engine_cs.c execlists->queue = RB_ROOT_CACHED; execlists 1051 drivers/gpu/drm/i915/gt/intel_engine_cs.c if (execlists_active(&engine->execlists)) { execlists 1052 drivers/gpu/drm/i915/gt/intel_engine_cs.c struct tasklet_struct *t = &engine->execlists.tasklet; execlists 1068 drivers/gpu/drm/i915/gt/intel_engine_cs.c if (execlists_active(&engine->execlists)) execlists 1073 drivers/gpu/drm/i915/gt/intel_engine_cs.c if (!RB_EMPTY_ROOT(&engine->execlists.queue.rb_root)) execlists 1200 drivers/gpu/drm/i915/gt/intel_engine_cs.c struct intel_engine_execlists * const execlists = &engine->execlists; execlists 1253 drivers/gpu/drm/i915/gt/intel_engine_cs.c const u8 num_entries = execlists->csb_size; execlists 1262 drivers/gpu/drm/i915/gt/intel_engine_cs.c read = execlists->csb_head; execlists 1263 drivers/gpu/drm/i915/gt/intel_engine_cs.c write = READ_ONCE(*execlists->csb_write); execlists 1268 drivers/gpu/drm/i915/gt/intel_engine_cs.c &engine->execlists.tasklet.state)), execlists 1269 drivers/gpu/drm/i915/gt/intel_engine_cs.c enableddisabled(!atomic_read(&engine->execlists.tasklet.count))); execlists 1282 drivers/gpu/drm/i915/gt/intel_engine_cs.c execlists_active_lock_bh(execlists); execlists 1283 drivers/gpu/drm/i915/gt/intel_engine_cs.c for (port = execlists->active; (rq = *port); port++) { execlists 1289 drivers/gpu/drm/i915/gt/intel_engine_cs.c (int)(port - execlists->active)); execlists 1299 drivers/gpu/drm/i915/gt/intel_engine_cs.c for (port = execlists->pending; (rq = *port); port++) { execlists 1304 drivers/gpu/drm/i915/gt/intel_engine_cs.c (int)(port - execlists->pending), execlists 1310 drivers/gpu/drm/i915/gt/intel_engine_cs.c execlists_active_unlock_bh(execlists); execlists 1434 drivers/gpu/drm/i915/gt/intel_engine_cs.c struct intel_engine_execlists *execlists = &engine->execlists; execlists 1441 drivers/gpu/drm/i915/gt/intel_engine_cs.c execlists_active_lock_bh(execlists); execlists 1456 drivers/gpu/drm/i915/gt/intel_engine_cs.c for (port = execlists->active; (rq = *port); port++) execlists 1459 drivers/gpu/drm/i915/gt/intel_engine_cs.c for (port = execlists->pending; (rq = *port); port++) { execlists 1471 drivers/gpu/drm/i915/gt/intel_engine_cs.c execlists_active_unlock_bh(execlists); execlists 149 drivers/gpu/drm/i915/gt/intel_engine_pm.c GEM_BUG_ON(engine->execlists.queue_priority_hint != INT_MIN); execlists 159 drivers/gpu/drm/i915/gt/intel_engine_pm.c engine->execlists.no_priolist = false; execlists 472 drivers/gpu/drm/i915/gt/intel_engine_types.h struct intel_engine_execlists execlists; execlists 35 drivers/gpu/drm/i915/gt/intel_gt_irq.c tasklet_hi_schedule(&engine->execlists.tasklet); execlists 303 drivers/gpu/drm/i915/gt/intel_lrc.c static int queue_prio(const struct intel_engine_execlists *execlists) execlists 308 drivers/gpu/drm/i915/gt/intel_lrc.c rb = rb_first_cached(&execlists->queue); execlists 342 drivers/gpu/drm/i915/gt/intel_lrc.c if (!i915_scheduler_need_preempt(engine->execlists.queue_priority_hint, execlists 383 drivers/gpu/drm/i915/gt/intel_lrc.c return queue_prio(&engine->execlists) > last_prio; execlists 507 drivers/gpu/drm/i915/gt/intel_lrc.c GEM_BUG_ON(RB_EMPTY_ROOT(&engine->execlists.queue.rb_root)); execlists 536 drivers/gpu/drm/i915/gt/intel_lrc.c execlists_unwind_incomplete_requests(struct intel_engine_execlists *execlists) execlists 539 drivers/gpu/drm/i915/gt/intel_lrc.c container_of(execlists, typeof(*engine), execlists); execlists 600 drivers/gpu/drm/i915/gt/intel_lrc.c tasklet_schedule(&ve->base.execlists.tasklet); execlists 698 drivers/gpu/drm/i915/gt/intel_lrc.c static inline void write_desc(struct intel_engine_execlists *execlists, u64 desc, u32 port) execlists 700 drivers/gpu/drm/i915/gt/intel_lrc.c if (execlists->ctrl_reg) { execlists 701 drivers/gpu/drm/i915/gt/intel_lrc.c writel(lower_32_bits(desc), execlists->submit_reg + port * 2); execlists 702 drivers/gpu/drm/i915/gt/intel_lrc.c writel(upper_32_bits(desc), execlists->submit_reg + port * 2 + 1); execlists 704 drivers/gpu/drm/i915/gt/intel_lrc.c writel(upper_32_bits(desc), execlists->submit_reg); execlists 705 drivers/gpu/drm/i915/gt/intel_lrc.c writel(lower_32_bits(desc), execlists->submit_reg); execlists 710 drivers/gpu/drm/i915/gt/intel_lrc.c trace_ports(const struct intel_engine_execlists *execlists, execlists 715 drivers/gpu/drm/i915/gt/intel_lrc.c container_of(execlists, typeof(*engine), execlists); execlists 729 drivers/gpu/drm/i915/gt/intel_lrc.c assert_pending_valid(const struct intel_engine_execlists *execlists, execlists 735 drivers/gpu/drm/i915/gt/intel_lrc.c trace_ports(execlists, msg, execlists->pending); execlists 737 drivers/gpu/drm/i915/gt/intel_lrc.c if (!execlists->pending[0]) execlists 740 drivers/gpu/drm/i915/gt/intel_lrc.c if (execlists->pending[execlists_num_ports(execlists)]) execlists 743 drivers/gpu/drm/i915/gt/intel_lrc.c for (port = execlists->pending; (rq = *port); port++) { execlists 763 drivers/gpu/drm/i915/gt/intel_lrc.c struct intel_engine_execlists *execlists = &engine->execlists; execlists 766 drivers/gpu/drm/i915/gt/intel_lrc.c GEM_BUG_ON(!assert_pending_valid(execlists, "submit")); execlists 784 drivers/gpu/drm/i915/gt/intel_lrc.c for (n = execlists_num_ports(execlists); n--; ) { execlists 785 drivers/gpu/drm/i915/gt/intel_lrc.c struct i915_request *rq = execlists->pending[n]; execlists 787 drivers/gpu/drm/i915/gt/intel_lrc.c write_desc(execlists, execlists 793 drivers/gpu/drm/i915/gt/intel_lrc.c if (execlists->ctrl_reg) execlists 794 drivers/gpu/drm/i915/gt/intel_lrc.c writel(EL_CTRL_LOAD, execlists->ctrl_reg); execlists 926 drivers/gpu/drm/i915/gt/intel_lrc.c last_active(const struct intel_engine_execlists *execlists) execlists 928 drivers/gpu/drm/i915/gt/intel_lrc.c struct i915_request * const *last = READ_ONCE(execlists->active); execlists 1008 drivers/gpu/drm/i915/gt/intel_lrc.c engine->execlists.queue_priority_hint); execlists 1023 drivers/gpu/drm/i915/gt/intel_lrc.c enable_timeslice(const struct intel_engine_execlists *execlists) execlists 1025 drivers/gpu/drm/i915/gt/intel_lrc.c const struct i915_request *rq = *execlists->active; execlists 1030 drivers/gpu/drm/i915/gt/intel_lrc.c return execlists->switch_priority_hint >= effective_prio(rq); execlists 1033 drivers/gpu/drm/i915/gt/intel_lrc.c static void record_preemption(struct intel_engine_execlists *execlists) execlists 1035 drivers/gpu/drm/i915/gt/intel_lrc.c (void)I915_SELFTEST_ONLY(execlists->preempt_hang.count++); execlists 1040 drivers/gpu/drm/i915/gt/intel_lrc.c struct intel_engine_execlists * const execlists = &engine->execlists; execlists 1041 drivers/gpu/drm/i915/gt/intel_lrc.c struct i915_request **port = execlists->pending; execlists 1042 drivers/gpu/drm/i915/gt/intel_lrc.c struct i915_request ** const last_port = port + execlists->port_mask; execlists 1069 drivers/gpu/drm/i915/gt/intel_lrc.c for (rb = rb_first_cached(&execlists->virtual); rb; ) { execlists 1075 drivers/gpu/drm/i915/gt/intel_lrc.c rb_erase_cached(rb, &execlists->virtual); execlists 1077 drivers/gpu/drm/i915/gt/intel_lrc.c rb = rb_first_cached(&execlists->virtual); execlists 1097 drivers/gpu/drm/i915/gt/intel_lrc.c last = last_active(execlists); execlists 1105 drivers/gpu/drm/i915/gt/intel_lrc.c execlists->queue_priority_hint); execlists 1106 drivers/gpu/drm/i915/gt/intel_lrc.c record_preemption(execlists); execlists 1126 drivers/gpu/drm/i915/gt/intel_lrc.c !timer_pending(&engine->execlists.timer)) { execlists 1132 drivers/gpu/drm/i915/gt/intel_lrc.c execlists->queue_priority_hint); execlists 1177 drivers/gpu/drm/i915/gt/intel_lrc.c rb_erase_cached(rb, &execlists->virtual); execlists 1179 drivers/gpu/drm/i915/gt/intel_lrc.c rb = rb_first_cached(&execlists->virtual); execlists 1187 drivers/gpu/drm/i915/gt/intel_lrc.c if (rq_prio(rq) >= queue_prio(execlists)) { execlists 1209 drivers/gpu/drm/i915/gt/intel_lrc.c ve->base.execlists.queue_priority_hint = INT_MIN; execlists 1210 drivers/gpu/drm/i915/gt/intel_lrc.c rb_erase_cached(rb, &execlists->virtual); execlists 1259 drivers/gpu/drm/i915/gt/intel_lrc.c rb = rb_first_cached(&execlists->virtual); execlists 1268 drivers/gpu/drm/i915/gt/intel_lrc.c while ((rb = rb_first_cached(&execlists->queue))) { execlists 1320 drivers/gpu/drm/i915/gt/intel_lrc.c *port = execlists_schedule_in(last, port - execlists->pending); execlists 1334 drivers/gpu/drm/i915/gt/intel_lrc.c rb_erase_cached(&p->node, &execlists->queue); execlists 1355 drivers/gpu/drm/i915/gt/intel_lrc.c execlists->queue_priority_hint = queue_prio(execlists); execlists 1357 drivers/gpu/drm/i915/gt/intel_lrc.c engine->name, execlists->queue_priority_hint, execlists 1361 drivers/gpu/drm/i915/gt/intel_lrc.c *port = execlists_schedule_in(last, port - execlists->pending); execlists 1363 drivers/gpu/drm/i915/gt/intel_lrc.c execlists->switch_priority_hint = execlists 1364 drivers/gpu/drm/i915/gt/intel_lrc.c switch_prio(engine, *execlists->pending); execlists 1372 drivers/gpu/drm/i915/gt/intel_lrc.c cancel_port_requests(struct intel_engine_execlists * const execlists) execlists 1376 drivers/gpu/drm/i915/gt/intel_lrc.c for (port = execlists->pending; (rq = *port); port++) execlists 1378 drivers/gpu/drm/i915/gt/intel_lrc.c memset(execlists->pending, 0, sizeof(execlists->pending)); execlists 1380 drivers/gpu/drm/i915/gt/intel_lrc.c for (port = execlists->active; (rq = *port); port++) execlists 1382 drivers/gpu/drm/i915/gt/intel_lrc.c execlists->active = execlists 1383 drivers/gpu/drm/i915/gt/intel_lrc.c memset(execlists->inflight, 0, sizeof(execlists->inflight)); execlists 1394 drivers/gpu/drm/i915/gt/intel_lrc.c reset_in_progress(const struct intel_engine_execlists *execlists) execlists 1396 drivers/gpu/drm/i915/gt/intel_lrc.c return unlikely(!__tasklet_is_enabled(&execlists->tasklet)); execlists 1433 drivers/gpu/drm/i915/gt/intel_lrc.c gen12_csb_parse(const struct intel_engine_execlists *execlists, const u32 *csb) execlists 1461 drivers/gpu/drm/i915/gt/intel_lrc.c if (*execlists->active) { execlists 1470 drivers/gpu/drm/i915/gt/intel_lrc.c gen8_csb_parse(const struct intel_engine_execlists *execlists, const u32 *csb) execlists 1480 drivers/gpu/drm/i915/gt/intel_lrc.c if (*execlists->active) execlists 1488 drivers/gpu/drm/i915/gt/intel_lrc.c struct intel_engine_execlists * const execlists = &engine->execlists; execlists 1489 drivers/gpu/drm/i915/gt/intel_lrc.c const u32 * const buf = execlists->csb_status; execlists 1490 drivers/gpu/drm/i915/gt/intel_lrc.c const u8 num_entries = execlists->csb_size; execlists 1505 drivers/gpu/drm/i915/gt/intel_lrc.c head = execlists->csb_head; execlists 1506 drivers/gpu/drm/i915/gt/intel_lrc.c tail = READ_ONCE(*execlists->csb_write); execlists 1550 drivers/gpu/drm/i915/gt/intel_lrc.c csb_step = gen12_csb_parse(execlists, buf + 2 * head); execlists 1552 drivers/gpu/drm/i915/gt/intel_lrc.c csb_step = gen8_csb_parse(execlists, buf + 2 * head); execlists 1556 drivers/gpu/drm/i915/gt/intel_lrc.c trace_ports(execlists, "preempted", execlists->active); execlists 1558 drivers/gpu/drm/i915/gt/intel_lrc.c while (*execlists->active) execlists 1559 drivers/gpu/drm/i915/gt/intel_lrc.c execlists_schedule_out(*execlists->active++); execlists 1563 drivers/gpu/drm/i915/gt/intel_lrc.c GEM_BUG_ON(*execlists->active); execlists 1564 drivers/gpu/drm/i915/gt/intel_lrc.c GEM_BUG_ON(!assert_pending_valid(execlists, "promote")); execlists 1565 drivers/gpu/drm/i915/gt/intel_lrc.c execlists->active = execlists 1566 drivers/gpu/drm/i915/gt/intel_lrc.c memcpy(execlists->inflight, execlists 1567 drivers/gpu/drm/i915/gt/intel_lrc.c execlists->pending, execlists 1568 drivers/gpu/drm/i915/gt/intel_lrc.c execlists_num_ports(execlists) * execlists 1569 drivers/gpu/drm/i915/gt/intel_lrc.c sizeof(*execlists->pending)); execlists 1571 drivers/gpu/drm/i915/gt/intel_lrc.c if (enable_timeslice(execlists)) execlists 1572 drivers/gpu/drm/i915/gt/intel_lrc.c mod_timer(&execlists->timer, jiffies + 1); execlists 1574 drivers/gpu/drm/i915/gt/intel_lrc.c if (!inject_preempt_hang(execlists)) execlists 1577 drivers/gpu/drm/i915/gt/intel_lrc.c WRITE_ONCE(execlists->pending[0], NULL); execlists 1581 drivers/gpu/drm/i915/gt/intel_lrc.c trace_ports(execlists, "completed", execlists->active); execlists 1589 drivers/gpu/drm/i915/gt/intel_lrc.c GEM_BUG_ON(!i915_request_completed(*execlists->active) && execlists 1590 drivers/gpu/drm/i915/gt/intel_lrc.c !reset_in_progress(execlists)); execlists 1591 drivers/gpu/drm/i915/gt/intel_lrc.c execlists_schedule_out(*execlists->active++); execlists 1593 drivers/gpu/drm/i915/gt/intel_lrc.c GEM_BUG_ON(execlists->active - execlists->inflight > execlists 1594 drivers/gpu/drm/i915/gt/intel_lrc.c execlists_num_ports(execlists)); execlists 1602 drivers/gpu/drm/i915/gt/intel_lrc.c execlists->csb_head = head; execlists 1621 drivers/gpu/drm/i915/gt/intel_lrc.c if (!engine->execlists.pending[0]) { execlists 1638 drivers/gpu/drm/i915/gt/intel_lrc.c if (!READ_ONCE(engine->execlists.pending[0])) { execlists 1648 drivers/gpu/drm/i915/gt/intel_lrc.c from_timer(engine, timer, execlists.timer); execlists 1651 drivers/gpu/drm/i915/gt/intel_lrc.c tasklet_hi_schedule(&engine->execlists.tasklet); execlists 1664 drivers/gpu/drm/i915/gt/intel_lrc.c struct intel_engine_execlists * const execlists = &engine->execlists; execlists 1666 drivers/gpu/drm/i915/gt/intel_lrc.c if (reset_in_progress(execlists)) execlists 1669 drivers/gpu/drm/i915/gt/intel_lrc.c if (execlists->tasklet.func == execlists_submission_tasklet) execlists 1672 drivers/gpu/drm/i915/gt/intel_lrc.c tasklet_hi_schedule(&execlists->tasklet); execlists 1678 drivers/gpu/drm/i915/gt/intel_lrc.c struct intel_engine_execlists *execlists = &engine->execlists; execlists 1680 drivers/gpu/drm/i915/gt/intel_lrc.c if (rq_prio(rq) <= execlists->queue_priority_hint) execlists 1683 drivers/gpu/drm/i915/gt/intel_lrc.c execlists->queue_priority_hint = rq_prio(rq); execlists 1697 drivers/gpu/drm/i915/gt/intel_lrc.c GEM_BUG_ON(RB_EMPTY_ROOT(&engine->execlists.queue.rb_root)); execlists 2375 drivers/gpu/drm/i915/gt/intel_lrc.c struct intel_engine_execlists * const execlists = &engine->execlists; execlists 2379 drivers/gpu/drm/i915/gt/intel_lrc.c atomic_read(&execlists->tasklet.count)); execlists 2390 drivers/gpu/drm/i915/gt/intel_lrc.c __tasklet_disable_sync_once(&execlists->tasklet); execlists 2391 drivers/gpu/drm/i915/gt/intel_lrc.c GEM_BUG_ON(!reset_in_progress(execlists)); execlists 2414 drivers/gpu/drm/i915/gt/intel_lrc.c struct intel_engine_execlists * const execlists = &engine->execlists; execlists 2415 drivers/gpu/drm/i915/gt/intel_lrc.c const unsigned int reset_value = execlists->csb_size - 1; execlists 2428 drivers/gpu/drm/i915/gt/intel_lrc.c execlists->csb_head = reset_value; execlists 2429 drivers/gpu/drm/i915/gt/intel_lrc.c WRITE_ONCE(*execlists->csb_write, reset_value); execlists 2432 drivers/gpu/drm/i915/gt/intel_lrc.c invalidate_csb_entries(&execlists->csb_status[0], execlists 2433 drivers/gpu/drm/i915/gt/intel_lrc.c &execlists->csb_status[reset_value]); execlists 2461 drivers/gpu/drm/i915/gt/intel_lrc.c struct intel_engine_execlists * const execlists = &engine->execlists; execlists 2476 drivers/gpu/drm/i915/gt/intel_lrc.c rq = execlists_active(execlists); execlists 2545 drivers/gpu/drm/i915/gt/intel_lrc.c cancel_port_requests(execlists); execlists 2569 drivers/gpu/drm/i915/gt/intel_lrc.c struct intel_engine_execlists * const execlists = &engine->execlists; execlists 2599 drivers/gpu/drm/i915/gt/intel_lrc.c while ((rb = rb_first_cached(&execlists->queue))) { execlists 2608 drivers/gpu/drm/i915/gt/intel_lrc.c rb_erase_cached(&p->node, &execlists->queue); execlists 2613 drivers/gpu/drm/i915/gt/intel_lrc.c while ((rb = rb_first_cached(&execlists->virtual))) { execlists 2617 drivers/gpu/drm/i915/gt/intel_lrc.c rb_erase_cached(rb, &execlists->virtual); execlists 2629 drivers/gpu/drm/i915/gt/intel_lrc.c ve->base.execlists.queue_priority_hint = INT_MIN; execlists 2636 drivers/gpu/drm/i915/gt/intel_lrc.c execlists->queue_priority_hint = INT_MIN; execlists 2637 drivers/gpu/drm/i915/gt/intel_lrc.c execlists->queue = RB_ROOT_CACHED; execlists 2639 drivers/gpu/drm/i915/gt/intel_lrc.c GEM_BUG_ON(__tasklet_is_enabled(&execlists->tasklet)); execlists 2640 drivers/gpu/drm/i915/gt/intel_lrc.c execlists->tasklet.func = nop_submission_tasklet; execlists 2647 drivers/gpu/drm/i915/gt/intel_lrc.c struct intel_engine_execlists * const execlists = &engine->execlists; execlists 2654 drivers/gpu/drm/i915/gt/intel_lrc.c GEM_BUG_ON(!reset_in_progress(execlists)); execlists 2655 drivers/gpu/drm/i915/gt/intel_lrc.c if (!RB_EMPTY_ROOT(&execlists->queue.rb_root)) execlists 2656 drivers/gpu/drm/i915/gt/intel_lrc.c execlists->tasklet.func(execlists->tasklet.data); execlists 2658 drivers/gpu/drm/i915/gt/intel_lrc.c if (__tasklet_enable(&execlists->tasklet)) execlists 2660 drivers/gpu/drm/i915/gt/intel_lrc.c tasklet_hi_schedule(&execlists->tasklet); execlists 2662 drivers/gpu/drm/i915/gt/intel_lrc.c atomic_read(&execlists->tasklet.count)); execlists 2988 drivers/gpu/drm/i915/gt/intel_lrc.c del_timer(&engine->execlists.timer); execlists 2996 drivers/gpu/drm/i915/gt/intel_lrc.c engine->execlists.tasklet.func = execlists_submission_tasklet; execlists 3096 drivers/gpu/drm/i915/gt/intel_lrc.c tasklet_init(&engine->execlists.tasklet, execlists 3098 drivers/gpu/drm/i915/gt/intel_lrc.c timer_setup(&engine->execlists.timer, execlists_submission_timer, 0); execlists 3111 drivers/gpu/drm/i915/gt/intel_lrc.c struct intel_engine_execlists * const execlists = &engine->execlists; execlists 3130 drivers/gpu/drm/i915/gt/intel_lrc.c execlists->submit_reg = uncore->regs + execlists 3132 drivers/gpu/drm/i915/gt/intel_lrc.c execlists->ctrl_reg = uncore->regs + execlists 3135 drivers/gpu/drm/i915/gt/intel_lrc.c execlists->submit_reg = uncore->regs + execlists 3139 drivers/gpu/drm/i915/gt/intel_lrc.c execlists->csb_status = execlists 3142 drivers/gpu/drm/i915/gt/intel_lrc.c execlists->csb_write = execlists 3146 drivers/gpu/drm/i915/gt/intel_lrc.c execlists->csb_size = GEN8_CSB_ENTRIES; execlists 3148 drivers/gpu/drm/i915/gt/intel_lrc.c execlists->csb_size = GEN11_CSB_ENTRIES; execlists 3415 drivers/gpu/drm/i915/gt/intel_lrc.c return &ve->base.execlists.default_priolist.requests[0]; execlists 3439 drivers/gpu/drm/i915/gt/intel_lrc.c rb_erase_cached(node, &sibling->execlists.virtual); execlists 3443 drivers/gpu/drm/i915/gt/intel_lrc.c GEM_BUG_ON(__tasklet_is_scheduled(&ve->base.execlists.tasklet)); execlists 3545 drivers/gpu/drm/i915/gt/intel_lrc.c mask, ve->base.execlists.queue_priority_hint); execlists 3553 drivers/gpu/drm/i915/gt/intel_lrc.c const int prio = ve->base.execlists.queue_priority_hint; execlists 3574 drivers/gpu/drm/i915/gt/intel_lrc.c &sibling->execlists.virtual); execlists 3588 drivers/gpu/drm/i915/gt/intel_lrc.c first = rb_first_cached(&sibling->execlists.virtual) == execlists 3593 drivers/gpu/drm/i915/gt/intel_lrc.c rb_erase_cached(&node->rb, &sibling->execlists.virtual); execlists 3598 drivers/gpu/drm/i915/gt/intel_lrc.c parent = &sibling->execlists.virtual.rb_root.rb_node; execlists 3614 drivers/gpu/drm/i915/gt/intel_lrc.c &sibling->execlists.virtual, execlists 3620 drivers/gpu/drm/i915/gt/intel_lrc.c if (first && prio > sibling->execlists.queue_priority_hint) { execlists 3621 drivers/gpu/drm/i915/gt/intel_lrc.c sibling->execlists.queue_priority_hint = prio; execlists 3622 drivers/gpu/drm/i915/gt/intel_lrc.c tasklet_hi_schedule(&sibling->execlists.tasklet); execlists 3655 drivers/gpu/drm/i915/gt/intel_lrc.c ve->base.execlists.queue_priority_hint = INT_MIN; execlists 3658 drivers/gpu/drm/i915/gt/intel_lrc.c ve->base.execlists.queue_priority_hint = rq_prio(rq); execlists 3664 drivers/gpu/drm/i915/gt/intel_lrc.c tasklet_schedule(&ve->base.execlists.tasklet); execlists 3763 drivers/gpu/drm/i915/gt/intel_lrc.c ve->base.execlists.queue_priority_hint = INT_MIN; execlists 3764 drivers/gpu/drm/i915/gt/intel_lrc.c tasklet_init(&ve->base.execlists.tasklet, execlists 3788 drivers/gpu/drm/i915/gt/intel_lrc.c if (sibling->execlists.tasklet.func != execlists 3921 drivers/gpu/drm/i915/gt/intel_lrc.c const struct intel_engine_execlists *execlists = &engine->execlists; execlists 3948 drivers/gpu/drm/i915/gt/intel_lrc.c if (execlists->queue_priority_hint != INT_MIN) execlists 3950 drivers/gpu/drm/i915/gt/intel_lrc.c execlists->queue_priority_hint); execlists 3951 drivers/gpu/drm/i915/gt/intel_lrc.c for (rb = rb_first_cached(&execlists->queue); rb; rb = rb_next(rb)) { execlists 3973 drivers/gpu/drm/i915/gt/intel_lrc.c for (rb = rb_first_cached(&execlists->virtual); rb; rb = rb_next(rb)) { execlists 1614 drivers/gpu/drm/i915/gt/selftest_hangcheck.c struct tasklet_struct * const t = &engine->execlists.tasklet; execlists 781 drivers/gpu/drm/i915/gt/selftest_lrc.c engine->execlists.preempt_hang.count = 0; execlists 828 drivers/gpu/drm/i915/gt/selftest_lrc.c if (engine->execlists.preempt_hang.count) { execlists 830 drivers/gpu/drm/i915/gt/selftest_lrc.c engine->execlists.preempt_hang.count); execlists 900 drivers/gpu/drm/i915/gt/selftest_lrc.c engine->execlists.preempt_hang.count = 0; execlists 917 drivers/gpu/drm/i915/gt/selftest_lrc.c mod_timer(&engine->execlists.timer, jiffies + HZ); execlists 942 drivers/gpu/drm/i915/gt/selftest_lrc.c if (engine->execlists.preempt_hang.count) { execlists 945 drivers/gpu/drm/i915/gt/selftest_lrc.c engine->execlists.preempt_hang.count, execlists 1066 drivers/gpu/drm/i915/gt/selftest_lrc.c engine->execlists.preempt_hang.count = 0; execlists 1111 drivers/gpu/drm/i915/gt/selftest_lrc.c if (engine->execlists.preempt_hang.count) { execlists 1114 drivers/gpu/drm/i915/gt/selftest_lrc.c engine->execlists.preempt_hang.count, execlists 1357 drivers/gpu/drm/i915/gt/selftest_lrc.c init_completion(&engine->execlists.preempt_hang.completion); execlists 1358 drivers/gpu/drm/i915/gt/selftest_lrc.c engine->execlists.preempt_hang.inject_hang = true; execlists 1362 drivers/gpu/drm/i915/gt/selftest_lrc.c if (!wait_for_completion_timeout(&engine->execlists.preempt_hang.completion, execlists 1375 drivers/gpu/drm/i915/gt/selftest_lrc.c engine->execlists.preempt_hang.inject_hang = false; execlists 129 drivers/gpu/drm/i915/gt/selftest_reset.c tasklet_disable_nosync(&engine->execlists.tasklet); execlists 148 drivers/gpu/drm/i915/gt/selftest_reset.c tasklet_enable(&engine->execlists.tasklet); execlists 539 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c struct intel_engine_execlists * const execlists = &engine->execlists; execlists 540 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c struct i915_request **first = execlists->inflight; execlists 541 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c struct i915_request ** const last_port = first + execlists->port_mask; execlists 562 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c while ((rb = rb_first_cached(&execlists->queue))) { execlists 573 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c port - execlists->inflight); execlists 583 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c rb_erase_cached(&p->node, &execlists->queue); execlists 587 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c execlists->queue_priority_hint = execlists 590 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c *port = schedule_in(last, port - execlists->inflight); execlists 594 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c execlists->active = execlists->inflight; execlists 600 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c struct intel_engine_execlists * const execlists = &engine->execlists; execlists 606 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c for (port = execlists->inflight; (rq = *port); port++) { execlists 612 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c if (port != execlists->inflight) { execlists 613 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c int idx = port - execlists->inflight; execlists 614 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c int rem = ARRAY_SIZE(execlists->inflight) - idx; execlists 615 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c memmove(execlists->inflight, port, rem * sizeof(*port)); execlists 625 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c struct intel_engine_execlists * const execlists = &engine->execlists; execlists 638 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c __tasklet_disable_sync_once(&execlists->tasklet); execlists 642 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c cancel_port_requests(struct intel_engine_execlists * const execlists) execlists 648 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c for (port = execlists->active; (rq = *port); port++) execlists 650 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c execlists->active = execlists 651 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c memset(execlists->inflight, 0, sizeof(execlists->inflight)); execlists 656 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c struct intel_engine_execlists * const execlists = &engine->execlists; execlists 662 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c cancel_port_requests(execlists); execlists 665 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c rq = execlists_unwind_incomplete_requests(execlists); execlists 681 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c struct intel_engine_execlists * const execlists = &engine->execlists; execlists 705 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c cancel_port_requests(execlists); execlists 716 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c while ((rb = rb_first_cached(&execlists->queue))) { execlists 727 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c rb_erase_cached(&p->node, &execlists->queue); execlists 733 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c execlists->queue_priority_hint = INT_MIN; execlists 734 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c execlists->queue = RB_ROOT_CACHED; execlists 741 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c struct intel_engine_execlists * const execlists = &engine->execlists; execlists 743 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c if (__tasklet_enable(&execlists->tasklet)) execlists 745 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c tasklet_hi_schedule(&execlists->tasklet); execlists 748 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c atomic_read(&execlists->tasklet.count)); execlists 1092 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c engine->execlists.tasklet.func = guc_submission_tasklet; execlists 1135 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c BUILD_BUG_ON(ARRAY_SIZE(engine->execlists.inflight) * execlists 1239 drivers/gpu/drm/i915/i915_gpu_error.c const struct intel_engine_execlists * const execlists = &engine->execlists; execlists 1240 drivers/gpu/drm/i915/i915_gpu_error.c struct i915_request * const *port = execlists->active; execlists 43 drivers/gpu/drm/i915/i915_scheduler.c static void assert_priolists(struct intel_engine_execlists * const execlists) execlists 51 drivers/gpu/drm/i915/i915_scheduler.c GEM_BUG_ON(rb_first_cached(&execlists->queue) != execlists 52 drivers/gpu/drm/i915/i915_scheduler.c rb_first(&execlists->queue.rb_root)); execlists 55 drivers/gpu/drm/i915/i915_scheduler.c for (rb = rb_first_cached(&execlists->queue); rb; rb = rb_next(rb)) { execlists 74 drivers/gpu/drm/i915/i915_scheduler.c struct intel_engine_execlists * const execlists = &engine->execlists; execlists 81 drivers/gpu/drm/i915/i915_scheduler.c assert_priolists(execlists); execlists 86 drivers/gpu/drm/i915/i915_scheduler.c if (unlikely(execlists->no_priolist)) execlists 92 drivers/gpu/drm/i915/i915_scheduler.c parent = &execlists->queue.rb_root.rb_node; execlists 107 drivers/gpu/drm/i915/i915_scheduler.c p = &execlists->default_priolist; execlists 122 drivers/gpu/drm/i915/i915_scheduler.c execlists->no_priolist = true; execlists 131 drivers/gpu/drm/i915/i915_scheduler.c rb_insert_color_cached(&p->node, &execlists->queue, first); execlists 202 drivers/gpu/drm/i915/i915_scheduler.c if (prio <= engine->execlists.queue_priority_hint) execlists 208 drivers/gpu/drm/i915/i915_scheduler.c inflight = execlists_active(&engine->execlists); execlists 222 drivers/gpu/drm/i915/i915_scheduler.c engine->execlists.queue_priority_hint = prio; execlists 224 drivers/gpu/drm/i915/i915_scheduler.c tasklet_hi_schedule(&engine->execlists.tasklet); execlists 34 drivers/gpu/drm/i915/selftests/i915_live_selftests.h selftest(execlists, intel_execlists_live_selftests)