This source file includes following definitions.
- ishtp_cl_alloc_dma_buf
- ishtp_cl_free_dma_buf
- ishtp_cl_get_dma_send_buf
- ishtp_cl_release_dma_acked_mem
1
2
3
4
5
6
7
8 #include <linux/slab.h>
9 #include <linux/sched.h>
10 #include <linux/wait.h>
11 #include <linux/delay.h>
12 #include <linux/dma-mapping.h>
13 #include "ishtp-dev.h"
14 #include "client.h"
15
16
17
18
19
20
21
22
23
24 void ishtp_cl_alloc_dma_buf(struct ishtp_device *dev)
25 {
26 dma_addr_t h;
27
28 if (dev->ishtp_host_dma_tx_buf)
29 return;
30
31 dev->ishtp_host_dma_tx_buf_size = 1024*1024;
32 dev->ishtp_host_dma_rx_buf_size = 1024*1024;
33
34
35 dev->ishtp_host_dma_tx_buf = dma_alloc_coherent(dev->devc,
36 dev->ishtp_host_dma_tx_buf_size,
37 &h, GFP_KERNEL);
38 if (dev->ishtp_host_dma_tx_buf)
39 dev->ishtp_host_dma_tx_buf_phys = h;
40
41 dev->ishtp_dma_num_slots = dev->ishtp_host_dma_tx_buf_size /
42 DMA_SLOT_SIZE;
43
44 dev->ishtp_dma_tx_map = kcalloc(dev->ishtp_dma_num_slots,
45 sizeof(uint8_t),
46 GFP_KERNEL);
47 spin_lock_init(&dev->ishtp_dma_tx_lock);
48
49
50 dev->ishtp_host_dma_rx_buf = dma_alloc_coherent(dev->devc,
51 dev->ishtp_host_dma_rx_buf_size,
52 &h, GFP_KERNEL);
53
54 if (dev->ishtp_host_dma_rx_buf)
55 dev->ishtp_host_dma_rx_buf_phys = h;
56 }
57
58
59
60
61
62
63
64
65
66 void ishtp_cl_free_dma_buf(struct ishtp_device *dev)
67 {
68 dma_addr_t h;
69
70 if (dev->ishtp_host_dma_tx_buf) {
71 h = dev->ishtp_host_dma_tx_buf_phys;
72 dma_free_coherent(dev->devc, dev->ishtp_host_dma_tx_buf_size,
73 dev->ishtp_host_dma_tx_buf, h);
74 }
75
76 if (dev->ishtp_host_dma_rx_buf) {
77 h = dev->ishtp_host_dma_rx_buf_phys;
78 dma_free_coherent(dev->devc, dev->ishtp_host_dma_rx_buf_size,
79 dev->ishtp_host_dma_rx_buf, h);
80 }
81
82 kfree(dev->ishtp_dma_tx_map);
83 dev->ishtp_host_dma_tx_buf = NULL;
84 dev->ishtp_host_dma_rx_buf = NULL;
85 dev->ishtp_dma_tx_map = NULL;
86 }
87
88
89
90
91
92
93
94
95
96
97
98 void *ishtp_cl_get_dma_send_buf(struct ishtp_device *dev,
99 uint32_t size)
100 {
101 unsigned long flags;
102 int i, j, free;
103
104 int required_slots = (size / DMA_SLOT_SIZE)
105 + 1 * (size % DMA_SLOT_SIZE != 0);
106
107 spin_lock_irqsave(&dev->ishtp_dma_tx_lock, flags);
108 for (i = 0; i <= (dev->ishtp_dma_num_slots - required_slots); i++) {
109 free = 1;
110 for (j = 0; j < required_slots; j++)
111 if (dev->ishtp_dma_tx_map[i+j]) {
112 free = 0;
113 i += j;
114 break;
115 }
116 if (free) {
117
118 for (j = 0; j < required_slots; j++)
119 dev->ishtp_dma_tx_map[i+j] = 1;
120 spin_unlock_irqrestore(&dev->ishtp_dma_tx_lock, flags);
121 return (i * DMA_SLOT_SIZE) +
122 (unsigned char *)dev->ishtp_host_dma_tx_buf;
123 }
124 }
125 spin_unlock_irqrestore(&dev->ishtp_dma_tx_lock, flags);
126 dev_err(dev->devc, "No free DMA buffer to send msg\n");
127 return NULL;
128 }
129
130
131
132
133
134
135
136
137
138
139 void ishtp_cl_release_dma_acked_mem(struct ishtp_device *dev,
140 void *msg_addr,
141 uint8_t size)
142 {
143 unsigned long flags;
144 int acked_slots = (size / DMA_SLOT_SIZE)
145 + 1 * (size % DMA_SLOT_SIZE != 0);
146 int i, j;
147
148 if ((msg_addr - dev->ishtp_host_dma_tx_buf) % DMA_SLOT_SIZE) {
149 dev_err(dev->devc, "Bad DMA Tx ack address\n");
150 return;
151 }
152
153 i = (msg_addr - dev->ishtp_host_dma_tx_buf) / DMA_SLOT_SIZE;
154 spin_lock_irqsave(&dev->ishtp_dma_tx_lock, flags);
155 for (j = 0; j < acked_slots; j++) {
156 if ((i + j) >= dev->ishtp_dma_num_slots ||
157 !dev->ishtp_dma_tx_map[i+j]) {
158
159 spin_unlock_irqrestore(&dev->ishtp_dma_tx_lock, flags);
160 dev_err(dev->devc, "Bad DMA Tx ack address\n");
161 return;
162 }
163 dev->ishtp_dma_tx_map[i+j] = 0;
164 }
165 spin_unlock_irqrestore(&dev->ishtp_dma_tx_lock, flags);
166 }