This source file includes following definitions.
- safexcel_init_ring_descriptors
- safexcel_select_ring
- safexcel_ring_next_wptr
- safexcel_ring_next_rptr
- safexcel_ring_curr_rptr
- safexcel_ring_first_rdr_index
- safexcel_ring_rdr_rdesc_index
- safexcel_ring_rollback_wptr
- safexcel_add_cdesc
- safexcel_add_rdesc
1
2
3
4
5
6
7
8 #include <linux/dma-mapping.h>
9 #include <linux/spinlock.h>
10
11 #include "safexcel.h"
12
13 int safexcel_init_ring_descriptors(struct safexcel_crypto_priv *priv,
14 struct safexcel_desc_ring *cdr,
15 struct safexcel_desc_ring *rdr)
16 {
17 cdr->offset = sizeof(u32) * priv->config.cd_offset;
18 cdr->base = dmam_alloc_coherent(priv->dev,
19 cdr->offset * EIP197_DEFAULT_RING_SIZE,
20 &cdr->base_dma, GFP_KERNEL);
21 if (!cdr->base)
22 return -ENOMEM;
23 cdr->write = cdr->base;
24 cdr->base_end = cdr->base + cdr->offset * (EIP197_DEFAULT_RING_SIZE - 1);
25 cdr->read = cdr->base;
26
27 rdr->offset = sizeof(u32) * priv->config.rd_offset;
28 rdr->base = dmam_alloc_coherent(priv->dev,
29 rdr->offset * EIP197_DEFAULT_RING_SIZE,
30 &rdr->base_dma, GFP_KERNEL);
31 if (!rdr->base)
32 return -ENOMEM;
33 rdr->write = rdr->base;
34 rdr->base_end = rdr->base + rdr->offset * (EIP197_DEFAULT_RING_SIZE - 1);
35 rdr->read = rdr->base;
36
37 return 0;
38 }
39
40 inline int safexcel_select_ring(struct safexcel_crypto_priv *priv)
41 {
42 return (atomic_inc_return(&priv->ring_used) % priv->config.rings);
43 }
44
45 static void *safexcel_ring_next_wptr(struct safexcel_crypto_priv *priv,
46 struct safexcel_desc_ring *ring)
47 {
48 void *ptr = ring->write;
49
50 if ((ring->write == ring->read - ring->offset) ||
51 (ring->read == ring->base && ring->write == ring->base_end))
52 return ERR_PTR(-ENOMEM);
53
54 if (ring->write == ring->base_end)
55 ring->write = ring->base;
56 else
57 ring->write += ring->offset;
58
59 return ptr;
60 }
61
62 void *safexcel_ring_next_rptr(struct safexcel_crypto_priv *priv,
63 struct safexcel_desc_ring *ring)
64 {
65 void *ptr = ring->read;
66
67 if (ring->write == ring->read)
68 return ERR_PTR(-ENOENT);
69
70 if (ring->read == ring->base_end)
71 ring->read = ring->base;
72 else
73 ring->read += ring->offset;
74
75 return ptr;
76 }
77
78 inline void *safexcel_ring_curr_rptr(struct safexcel_crypto_priv *priv,
79 int ring)
80 {
81 struct safexcel_desc_ring *rdr = &priv->ring[ring].rdr;
82
83 return rdr->read;
84 }
85
86 inline int safexcel_ring_first_rdr_index(struct safexcel_crypto_priv *priv,
87 int ring)
88 {
89 struct safexcel_desc_ring *rdr = &priv->ring[ring].rdr;
90
91 return (rdr->read - rdr->base) / rdr->offset;
92 }
93
94 inline int safexcel_ring_rdr_rdesc_index(struct safexcel_crypto_priv *priv,
95 int ring,
96 struct safexcel_result_desc *rdesc)
97 {
98 struct safexcel_desc_ring *rdr = &priv->ring[ring].rdr;
99
100 return ((void *)rdesc - rdr->base) / rdr->offset;
101 }
102
103 void safexcel_ring_rollback_wptr(struct safexcel_crypto_priv *priv,
104 struct safexcel_desc_ring *ring)
105 {
106 if (ring->write == ring->read)
107 return;
108
109 if (ring->write == ring->base)
110 ring->write = ring->base_end;
111 else
112 ring->write -= ring->offset;
113 }
114
115 struct safexcel_command_desc *safexcel_add_cdesc(struct safexcel_crypto_priv *priv,
116 int ring_id,
117 bool first, bool last,
118 dma_addr_t data, u32 data_len,
119 u32 full_data_len,
120 dma_addr_t context) {
121 struct safexcel_command_desc *cdesc;
122 int i;
123
124 cdesc = safexcel_ring_next_wptr(priv, &priv->ring[ring_id].cdr);
125 if (IS_ERR(cdesc))
126 return cdesc;
127
128 memset(cdesc, 0, sizeof(struct safexcel_command_desc));
129
130 cdesc->first_seg = first;
131 cdesc->last_seg = last;
132 cdesc->particle_size = data_len;
133 cdesc->data_lo = lower_32_bits(data);
134 cdesc->data_hi = upper_32_bits(data);
135
136 if (first && context) {
137 struct safexcel_token *token =
138 (struct safexcel_token *)cdesc->control_data.token;
139
140
141
142
143
144
145
146 cdesc->control_data.packet_length = full_data_len ?: 1;
147 cdesc->control_data.options = EIP197_OPTION_MAGIC_VALUE |
148 EIP197_OPTION_64BIT_CTX |
149 EIP197_OPTION_CTX_CTRL_IN_CMD;
150 cdesc->control_data.context_lo =
151 (lower_32_bits(context) & GENMASK(31, 2)) >> 2;
152 cdesc->control_data.context_hi = upper_32_bits(context);
153
154 if (priv->version == EIP197B_MRVL ||
155 priv->version == EIP197D_MRVL)
156 cdesc->control_data.options |= EIP197_OPTION_RC_AUTO;
157
158
159 cdesc->control_data.refresh = 2;
160
161 for (i = 0; i < EIP197_MAX_TOKENS; i++)
162 eip197_noop_token(&token[i]);
163 }
164
165 return cdesc;
166 }
167
168 struct safexcel_result_desc *safexcel_add_rdesc(struct safexcel_crypto_priv *priv,
169 int ring_id,
170 bool first, bool last,
171 dma_addr_t data, u32 len)
172 {
173 struct safexcel_result_desc *rdesc;
174
175 rdesc = safexcel_ring_next_wptr(priv, &priv->ring[ring_id].rdr);
176 if (IS_ERR(rdesc))
177 return rdesc;
178
179 memset(rdesc, 0, sizeof(struct safexcel_result_desc));
180
181 rdesc->first_seg = first;
182 rdesc->last_seg = last;
183 rdesc->particle_size = len;
184 rdesc->data_lo = lower_32_bits(data);
185 rdesc->data_hi = upper_32_bits(data);
186
187 return rdesc;
188 }