This source file includes following definitions.
- cryp_wait_until_done
- cryp_check
- cryp_activity
- cryp_flush_inoutfifo
- cryp_set_configuration
- cryp_configure_protection
- cryp_is_logic_busy
- cryp_configure_for_dma
- cryp_configure_key_values
- cryp_configure_init_vector
- cryp_save_device_context
- cryp_restore_device_context
1
2
3
4
5
6
7
8
9
10
11 #include <linux/errno.h>
12 #include <linux/kernel.h>
13 #include <linux/types.h>
14
15 #include "cryp_p.h"
16 #include "cryp.h"
17
18
19
20
21 void cryp_wait_until_done(struct cryp_device_data *device_data)
22 {
23 while (cryp_is_logic_busy(device_data))
24 cpu_relax();
25 }
26
27
28
29
30
31 int cryp_check(struct cryp_device_data *device_data)
32 {
33 int peripheralid2 = 0;
34
35 if (NULL == device_data)
36 return -EINVAL;
37
38 peripheralid2 = readl_relaxed(&device_data->base->periphId2);
39
40 if (peripheralid2 != CRYP_PERIPHERAL_ID2_DB8500)
41 return -EPERM;
42
43
44 if ((CRYP_PERIPHERAL_ID0 ==
45 readl_relaxed(&device_data->base->periphId0))
46 && (CRYP_PERIPHERAL_ID1 ==
47 readl_relaxed(&device_data->base->periphId1))
48 && (CRYP_PERIPHERAL_ID3 ==
49 readl_relaxed(&device_data->base->periphId3))
50 && (CRYP_PCELL_ID0 ==
51 readl_relaxed(&device_data->base->pcellId0))
52 && (CRYP_PCELL_ID1 ==
53 readl_relaxed(&device_data->base->pcellId1))
54 && (CRYP_PCELL_ID2 ==
55 readl_relaxed(&device_data->base->pcellId2))
56 && (CRYP_PCELL_ID3 ==
57 readl_relaxed(&device_data->base->pcellId3))) {
58 return 0;
59 }
60
61 return -EPERM;
62 }
63
64
65
66
67
68
69 void cryp_activity(struct cryp_device_data *device_data,
70 enum cryp_crypen cryp_crypen)
71 {
72 CRYP_PUT_BITS(&device_data->base->cr,
73 cryp_crypen,
74 CRYP_CR_CRYPEN_POS,
75 CRYP_CR_CRYPEN_MASK);
76 }
77
78
79
80
81
82 void cryp_flush_inoutfifo(struct cryp_device_data *device_data)
83 {
84
85
86
87
88
89
90 cryp_activity(device_data, CRYP_CRYPEN_DISABLE);
91 cryp_wait_until_done(device_data);
92
93 CRYP_SET_BITS(&device_data->base->cr, CRYP_CR_FFLUSH_MASK);
94
95
96
97
98
99 while (readl_relaxed(&device_data->base->sr) !=
100 CRYP_SR_INFIFO_READY_MASK)
101 cpu_relax();
102 }
103
104
105
106
107
108
109
110 int cryp_set_configuration(struct cryp_device_data *device_data,
111 struct cryp_config *cryp_config,
112 u32 *control_register)
113 {
114 u32 cr_for_kse;
115
116 if (NULL == device_data || NULL == cryp_config)
117 return -EINVAL;
118
119 *control_register |= (cryp_config->keysize << CRYP_CR_KEYSIZE_POS);
120
121
122 if ((CRYP_ALGORITHM_DECRYPT == cryp_config->algodir) &&
123 ((CRYP_ALGO_AES_ECB == cryp_config->algomode) ||
124 (CRYP_ALGO_AES_CBC == cryp_config->algomode))) {
125 cr_for_kse = *control_register;
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142 cr_for_kse |= ((CRYP_ALGORITHM_ENCRYPT << CRYP_CR_ALGODIR_POS) |
143 (CRYP_ALGO_AES_ECB << CRYP_CR_ALGOMODE_POS) |
144 (CRYP_CRYPEN_ENABLE << CRYP_CR_CRYPEN_POS) |
145 (KSE_ENABLED << CRYP_CR_KSE_POS));
146
147 writel_relaxed(cr_for_kse, &device_data->base->cr);
148 cryp_wait_until_done(device_data);
149 }
150
151 *control_register |=
152 ((cryp_config->algomode << CRYP_CR_ALGOMODE_POS) |
153 (cryp_config->algodir << CRYP_CR_ALGODIR_POS));
154
155 return 0;
156 }
157
158
159
160
161
162
163
164 int cryp_configure_protection(struct cryp_device_data *device_data,
165 struct cryp_protection_config *p_protect_config)
166 {
167 if (NULL == p_protect_config)
168 return -EINVAL;
169
170 CRYP_WRITE_BIT(&device_data->base->cr,
171 (u32) p_protect_config->secure_access,
172 CRYP_CR_SECURE_MASK);
173 CRYP_PUT_BITS(&device_data->base->cr,
174 p_protect_config->privilege_access,
175 CRYP_CR_PRLG_POS,
176 CRYP_CR_PRLG_MASK);
177
178 return 0;
179 }
180
181
182
183
184
185 int cryp_is_logic_busy(struct cryp_device_data *device_data)
186 {
187 return CRYP_TEST_BITS(&device_data->base->sr,
188 CRYP_SR_BUSY_MASK);
189 }
190
191
192
193
194
195
196 void cryp_configure_for_dma(struct cryp_device_data *device_data,
197 enum cryp_dma_req_type dma_req)
198 {
199 CRYP_SET_BITS(&device_data->base->dmacr,
200 (u32) dma_req);
201 }
202
203
204
205
206
207
208
209 int cryp_configure_key_values(struct cryp_device_data *device_data,
210 enum cryp_key_reg_index key_reg_index,
211 struct cryp_key_value key_value)
212 {
213 while (cryp_is_logic_busy(device_data))
214 cpu_relax();
215
216 switch (key_reg_index) {
217 case CRYP_KEY_REG_1:
218 writel_relaxed(key_value.key_value_left,
219 &device_data->base->key_1_l);
220 writel_relaxed(key_value.key_value_right,
221 &device_data->base->key_1_r);
222 break;
223 case CRYP_KEY_REG_2:
224 writel_relaxed(key_value.key_value_left,
225 &device_data->base->key_2_l);
226 writel_relaxed(key_value.key_value_right,
227 &device_data->base->key_2_r);
228 break;
229 case CRYP_KEY_REG_3:
230 writel_relaxed(key_value.key_value_left,
231 &device_data->base->key_3_l);
232 writel_relaxed(key_value.key_value_right,
233 &device_data->base->key_3_r);
234 break;
235 case CRYP_KEY_REG_4:
236 writel_relaxed(key_value.key_value_left,
237 &device_data->base->key_4_l);
238 writel_relaxed(key_value.key_value_right,
239 &device_data->base->key_4_r);
240 break;
241 default:
242 return -EINVAL;
243 }
244
245 return 0;
246 }
247
248
249
250
251
252
253
254 int cryp_configure_init_vector(struct cryp_device_data *device_data,
255 enum cryp_init_vector_index
256 init_vector_index,
257 struct cryp_init_vector_value
258 init_vector_value)
259 {
260 while (cryp_is_logic_busy(device_data))
261 cpu_relax();
262
263 switch (init_vector_index) {
264 case CRYP_INIT_VECTOR_INDEX_0:
265 writel_relaxed(init_vector_value.init_value_left,
266 &device_data->base->init_vect_0_l);
267 writel_relaxed(init_vector_value.init_value_right,
268 &device_data->base->init_vect_0_r);
269 break;
270 case CRYP_INIT_VECTOR_INDEX_1:
271 writel_relaxed(init_vector_value.init_value_left,
272 &device_data->base->init_vect_1_l);
273 writel_relaxed(init_vector_value.init_value_right,
274 &device_data->base->init_vect_1_r);
275 break;
276 default:
277 return -EINVAL;
278 }
279
280 return 0;
281 }
282
283
284
285
286
287
288
289 void cryp_save_device_context(struct cryp_device_data *device_data,
290 struct cryp_device_context *ctx,
291 int cryp_mode)
292 {
293 enum cryp_algo_mode algomode;
294 struct cryp_register __iomem *src_reg = device_data->base;
295 struct cryp_config *config =
296 (struct cryp_config *)device_data->current_ctx;
297
298
299
300
301
302 cryp_activity(device_data, CRYP_CRYPEN_DISABLE);
303 cryp_wait_until_done(device_data);
304
305 if (cryp_mode == CRYP_MODE_DMA)
306 cryp_configure_for_dma(device_data, CRYP_DMA_DISABLE_BOTH);
307
308 if (CRYP_TEST_BITS(&src_reg->sr, CRYP_SR_IFEM_MASK) == 0)
309 ctx->din = readl_relaxed(&src_reg->din);
310
311 ctx->cr = readl_relaxed(&src_reg->cr) & CRYP_CR_CONTEXT_SAVE_MASK;
312
313 switch (config->keysize) {
314 case CRYP_KEY_SIZE_256:
315 ctx->key_4_l = readl_relaxed(&src_reg->key_4_l);
316 ctx->key_4_r = readl_relaxed(&src_reg->key_4_r);
317
318
319 case CRYP_KEY_SIZE_192:
320 ctx->key_3_l = readl_relaxed(&src_reg->key_3_l);
321 ctx->key_3_r = readl_relaxed(&src_reg->key_3_r);
322
323
324 case CRYP_KEY_SIZE_128:
325 ctx->key_2_l = readl_relaxed(&src_reg->key_2_l);
326 ctx->key_2_r = readl_relaxed(&src_reg->key_2_r);
327
328
329 default:
330 ctx->key_1_l = readl_relaxed(&src_reg->key_1_l);
331 ctx->key_1_r = readl_relaxed(&src_reg->key_1_r);
332 }
333
334
335 algomode = ((ctx->cr & CRYP_CR_ALGOMODE_MASK) >> CRYP_CR_ALGOMODE_POS);
336 if (algomode == CRYP_ALGO_TDES_CBC ||
337 algomode == CRYP_ALGO_DES_CBC ||
338 algomode == CRYP_ALGO_AES_CBC) {
339 ctx->init_vect_0_l = readl_relaxed(&src_reg->init_vect_0_l);
340 ctx->init_vect_0_r = readl_relaxed(&src_reg->init_vect_0_r);
341 ctx->init_vect_1_l = readl_relaxed(&src_reg->init_vect_1_l);
342 ctx->init_vect_1_r = readl_relaxed(&src_reg->init_vect_1_r);
343 }
344 }
345
346
347
348
349
350
351
352 void cryp_restore_device_context(struct cryp_device_data *device_data,
353 struct cryp_device_context *ctx)
354 {
355 struct cryp_register __iomem *reg = device_data->base;
356 struct cryp_config *config =
357 (struct cryp_config *)device_data->current_ctx;
358
359
360
361
362
363 switch (config->keysize) {
364 case CRYP_KEY_SIZE_256:
365 writel_relaxed(ctx->key_4_l, ®->key_4_l);
366 writel_relaxed(ctx->key_4_r, ®->key_4_r);
367
368
369 case CRYP_KEY_SIZE_192:
370 writel_relaxed(ctx->key_3_l, ®->key_3_l);
371 writel_relaxed(ctx->key_3_r, ®->key_3_r);
372
373
374 case CRYP_KEY_SIZE_128:
375 writel_relaxed(ctx->key_2_l, ®->key_2_l);
376 writel_relaxed(ctx->key_2_r, ®->key_2_r);
377
378
379 default:
380 writel_relaxed(ctx->key_1_l, ®->key_1_l);
381 writel_relaxed(ctx->key_1_r, ®->key_1_r);
382 }
383
384
385 if (config->algomode == CRYP_ALGO_TDES_CBC ||
386 config->algomode == CRYP_ALGO_DES_CBC ||
387 config->algomode == CRYP_ALGO_AES_CBC) {
388 writel_relaxed(ctx->init_vect_0_l, ®->init_vect_0_l);
389 writel_relaxed(ctx->init_vect_0_r, ®->init_vect_0_r);
390 writel_relaxed(ctx->init_vect_1_l, ®->init_vect_1_l);
391 writel_relaxed(ctx->init_vect_1_r, ®->init_vect_1_r);
392 }
393 }