1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24 #ifndef __AMDGPU_SDMA_H__
25 #define __AMDGPU_SDMA_H__
26
27
28 #define AMDGPU_MAX_SDMA_INSTANCES 8
29
30 enum amdgpu_sdma_irq {
31 AMDGPU_SDMA_IRQ_INSTANCE0 = 0,
32 AMDGPU_SDMA_IRQ_INSTANCE1,
33 AMDGPU_SDMA_IRQ_INSTANCE2,
34 AMDGPU_SDMA_IRQ_INSTANCE3,
35 AMDGPU_SDMA_IRQ_INSTANCE4,
36 AMDGPU_SDMA_IRQ_INSTANCE5,
37 AMDGPU_SDMA_IRQ_INSTANCE6,
38 AMDGPU_SDMA_IRQ_INSTANCE7,
39 AMDGPU_SDMA_IRQ_LAST
40 };
41
42 struct amdgpu_sdma_instance {
43
44 const struct firmware *fw;
45 uint32_t fw_version;
46 uint32_t feature_version;
47
48 struct amdgpu_ring ring;
49 struct amdgpu_ring page;
50 bool burst_nop;
51 };
52
53 struct amdgpu_sdma {
54 struct amdgpu_sdma_instance instance[AMDGPU_MAX_SDMA_INSTANCES];
55 struct amdgpu_irq_src trap_irq;
56 struct amdgpu_irq_src illegal_inst_irq;
57 struct amdgpu_irq_src ecc_irq;
58 int num_instances;
59 uint32_t srbm_soft_reset;
60 bool has_page_queue;
61 struct ras_common_if *ras_if;
62 };
63
64
65
66
67
68 struct amdgpu_buffer_funcs {
69
70 uint32_t copy_max_bytes;
71
72
73 unsigned copy_num_dw;
74
75
76 void (*emit_copy_buffer)(struct amdgpu_ib *ib,
77
78 uint64_t src_offset,
79
80 uint64_t dst_offset,
81
82 uint32_t byte_count);
83
84
85 uint32_t fill_max_bytes;
86
87
88 unsigned fill_num_dw;
89
90
91 void (*emit_fill_buffer)(struct amdgpu_ib *ib,
92
93 uint32_t src_data,
94
95 uint64_t dst_offset,
96
97 uint32_t byte_count);
98 };
99
100 #define amdgpu_emit_copy_buffer(adev, ib, s, d, b) (adev)->mman.buffer_funcs->emit_copy_buffer((ib), (s), (d), (b))
101 #define amdgpu_emit_fill_buffer(adev, ib, s, d, b) (adev)->mman.buffer_funcs->emit_fill_buffer((ib), (s), (d), (b))
102
103 struct amdgpu_sdma_instance *
104 amdgpu_sdma_get_instance_from_ring(struct amdgpu_ring *ring);
105 int amdgpu_sdma_get_index_from_ring(struct amdgpu_ring *ring, uint32_t *index);
106 uint64_t amdgpu_sdma_get_csa_mc_addr(struct amdgpu_ring *ring, unsigned vmid);
107 #endif