This source file includes following definitions.
- stm32_crc32_cra_init
- stm32_crc32c_cra_init
- stm32_crc_setkey
- stm32_crc_init
- stm32_crc_update
- stm32_crc_final
- stm32_crc_finup
- stm32_crc_digest
- stm32_crc_probe
- stm32_crc_remove
- stm32_crc_runtime_suspend
- stm32_crc_runtime_resume
1
2
3
4
5
6
7 #include <linux/bitrev.h>
8 #include <linux/clk.h>
9 #include <linux/crc32poly.h>
10 #include <linux/module.h>
11 #include <linux/mod_devicetable.h>
12 #include <linux/platform_device.h>
13 #include <linux/pm_runtime.h>
14
15 #include <crypto/internal/hash.h>
16
17 #include <asm/unaligned.h>
18
19 #define DRIVER_NAME "stm32-crc32"
20 #define CHKSUM_DIGEST_SIZE 4
21 #define CHKSUM_BLOCK_SIZE 1
22
23
24 #define CRC_DR 0x00000000
25 #define CRC_CR 0x00000008
26 #define CRC_INIT 0x00000010
27 #define CRC_POL 0x00000014
28
29
30 #define CRC_CR_RESET BIT(0)
31 #define CRC_CR_REVERSE (BIT(7) | BIT(6) | BIT(5))
32 #define CRC_INIT_DEFAULT 0xFFFFFFFF
33
34 #define CRC_AUTOSUSPEND_DELAY 50
35
36 struct stm32_crc {
37 struct list_head list;
38 struct device *dev;
39 void __iomem *regs;
40 struct clk *clk;
41 u8 pending_data[sizeof(u32)];
42 size_t nb_pending_bytes;
43 };
44
45 struct stm32_crc_list {
46 struct list_head dev_list;
47 spinlock_t lock;
48 };
49
50 static struct stm32_crc_list crc_list = {
51 .dev_list = LIST_HEAD_INIT(crc_list.dev_list),
52 .lock = __SPIN_LOCK_UNLOCKED(crc_list.lock),
53 };
54
55 struct stm32_crc_ctx {
56 u32 key;
57 u32 poly;
58 };
59
60 struct stm32_crc_desc_ctx {
61 u32 partial;
62 struct stm32_crc *crc;
63 };
64
65 static int stm32_crc32_cra_init(struct crypto_tfm *tfm)
66 {
67 struct stm32_crc_ctx *mctx = crypto_tfm_ctx(tfm);
68
69 mctx->key = CRC_INIT_DEFAULT;
70 mctx->poly = CRC32_POLY_LE;
71 return 0;
72 }
73
74 static int stm32_crc32c_cra_init(struct crypto_tfm *tfm)
75 {
76 struct stm32_crc_ctx *mctx = crypto_tfm_ctx(tfm);
77
78 mctx->key = CRC_INIT_DEFAULT;
79 mctx->poly = CRC32C_POLY_LE;
80 return 0;
81 }
82
83 static int stm32_crc_setkey(struct crypto_shash *tfm, const u8 *key,
84 unsigned int keylen)
85 {
86 struct stm32_crc_ctx *mctx = crypto_shash_ctx(tfm);
87
88 if (keylen != sizeof(u32)) {
89 crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
90 return -EINVAL;
91 }
92
93 mctx->key = get_unaligned_le32(key);
94 return 0;
95 }
96
97 static int stm32_crc_init(struct shash_desc *desc)
98 {
99 struct stm32_crc_desc_ctx *ctx = shash_desc_ctx(desc);
100 struct stm32_crc_ctx *mctx = crypto_shash_ctx(desc->tfm);
101 struct stm32_crc *crc;
102
103 spin_lock_bh(&crc_list.lock);
104 list_for_each_entry(crc, &crc_list.dev_list, list) {
105 ctx->crc = crc;
106 break;
107 }
108 spin_unlock_bh(&crc_list.lock);
109
110 pm_runtime_get_sync(ctx->crc->dev);
111
112
113 writel_relaxed(bitrev32(mctx->key), ctx->crc->regs + CRC_INIT);
114 writel_relaxed(bitrev32(mctx->poly), ctx->crc->regs + CRC_POL);
115 writel_relaxed(CRC_CR_RESET | CRC_CR_REVERSE, ctx->crc->regs + CRC_CR);
116
117
118 ctx->partial = readl_relaxed(ctx->crc->regs + CRC_DR);
119 ctx->crc->nb_pending_bytes = 0;
120
121 pm_runtime_mark_last_busy(ctx->crc->dev);
122 pm_runtime_put_autosuspend(ctx->crc->dev);
123
124 return 0;
125 }
126
127 static int stm32_crc_update(struct shash_desc *desc, const u8 *d8,
128 unsigned int length)
129 {
130 struct stm32_crc_desc_ctx *ctx = shash_desc_ctx(desc);
131 struct stm32_crc *crc = ctx->crc;
132 u32 *d32;
133 unsigned int i;
134
135 pm_runtime_get_sync(crc->dev);
136
137 if (unlikely(crc->nb_pending_bytes)) {
138 while (crc->nb_pending_bytes != sizeof(u32) && length) {
139
140 crc->pending_data[crc->nb_pending_bytes++] = *(d8++);
141 length--;
142 }
143
144 if (crc->nb_pending_bytes == sizeof(u32)) {
145
146 writel_relaxed(*(u32 *)crc->pending_data,
147 crc->regs + CRC_DR);
148 crc->nb_pending_bytes = 0;
149 }
150 }
151
152 d32 = (u32 *)d8;
153 for (i = 0; i < length >> 2; i++)
154
155 writel_relaxed(*(d32++), crc->regs + CRC_DR);
156
157
158 ctx->partial = readl_relaxed(crc->regs + CRC_DR);
159
160 pm_runtime_mark_last_busy(crc->dev);
161 pm_runtime_put_autosuspend(crc->dev);
162
163
164 length &= 3;
165 if (likely(!length))
166 return 0;
167
168 if ((crc->nb_pending_bytes + length) >= sizeof(u32)) {
169
170 dev_err(crc->dev, "Pending data overflow\n");
171 return -EINVAL;
172 }
173
174 d8 = (const u8 *)d32;
175 for (i = 0; i < length; i++)
176
177 crc->pending_data[crc->nb_pending_bytes++] = *(d8++);
178
179 return 0;
180 }
181
182 static int stm32_crc_final(struct shash_desc *desc, u8 *out)
183 {
184 struct stm32_crc_desc_ctx *ctx = shash_desc_ctx(desc);
185 struct stm32_crc_ctx *mctx = crypto_shash_ctx(desc->tfm);
186
187
188 put_unaligned_le32(mctx->poly == CRC32C_POLY_LE ?
189 ~ctx->partial : ctx->partial, out);
190
191 return 0;
192 }
193
194 static int stm32_crc_finup(struct shash_desc *desc, const u8 *data,
195 unsigned int length, u8 *out)
196 {
197 return stm32_crc_update(desc, data, length) ?:
198 stm32_crc_final(desc, out);
199 }
200
201 static int stm32_crc_digest(struct shash_desc *desc, const u8 *data,
202 unsigned int length, u8 *out)
203 {
204 return stm32_crc_init(desc) ?: stm32_crc_finup(desc, data, length, out);
205 }
206
207 static struct shash_alg algs[] = {
208
209 {
210 .setkey = stm32_crc_setkey,
211 .init = stm32_crc_init,
212 .update = stm32_crc_update,
213 .final = stm32_crc_final,
214 .finup = stm32_crc_finup,
215 .digest = stm32_crc_digest,
216 .descsize = sizeof(struct stm32_crc_desc_ctx),
217 .digestsize = CHKSUM_DIGEST_SIZE,
218 .base = {
219 .cra_name = "crc32",
220 .cra_driver_name = DRIVER_NAME,
221 .cra_priority = 200,
222 .cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
223 .cra_blocksize = CHKSUM_BLOCK_SIZE,
224 .cra_alignmask = 3,
225 .cra_ctxsize = sizeof(struct stm32_crc_ctx),
226 .cra_module = THIS_MODULE,
227 .cra_init = stm32_crc32_cra_init,
228 }
229 },
230
231 {
232 .setkey = stm32_crc_setkey,
233 .init = stm32_crc_init,
234 .update = stm32_crc_update,
235 .final = stm32_crc_final,
236 .finup = stm32_crc_finup,
237 .digest = stm32_crc_digest,
238 .descsize = sizeof(struct stm32_crc_desc_ctx),
239 .digestsize = CHKSUM_DIGEST_SIZE,
240 .base = {
241 .cra_name = "crc32c",
242 .cra_driver_name = DRIVER_NAME,
243 .cra_priority = 200,
244 .cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
245 .cra_blocksize = CHKSUM_BLOCK_SIZE,
246 .cra_alignmask = 3,
247 .cra_ctxsize = sizeof(struct stm32_crc_ctx),
248 .cra_module = THIS_MODULE,
249 .cra_init = stm32_crc32c_cra_init,
250 }
251 }
252 };
253
254 static int stm32_crc_probe(struct platform_device *pdev)
255 {
256 struct device *dev = &pdev->dev;
257 struct stm32_crc *crc;
258 int ret;
259
260 crc = devm_kzalloc(dev, sizeof(*crc), GFP_KERNEL);
261 if (!crc)
262 return -ENOMEM;
263
264 crc->dev = dev;
265
266 crc->regs = devm_platform_ioremap_resource(pdev, 0);
267 if (IS_ERR(crc->regs)) {
268 dev_err(dev, "Cannot map CRC IO\n");
269 return PTR_ERR(crc->regs);
270 }
271
272 crc->clk = devm_clk_get(dev, NULL);
273 if (IS_ERR(crc->clk)) {
274 dev_err(dev, "Could not get clock\n");
275 return PTR_ERR(crc->clk);
276 }
277
278 ret = clk_prepare_enable(crc->clk);
279 if (ret) {
280 dev_err(crc->dev, "Failed to enable clock\n");
281 return ret;
282 }
283
284 pm_runtime_set_autosuspend_delay(dev, CRC_AUTOSUSPEND_DELAY);
285 pm_runtime_use_autosuspend(dev);
286
287 pm_runtime_get_noresume(dev);
288 pm_runtime_set_active(dev);
289 pm_runtime_enable(dev);
290
291 platform_set_drvdata(pdev, crc);
292
293 spin_lock(&crc_list.lock);
294 list_add(&crc->list, &crc_list.dev_list);
295 spin_unlock(&crc_list.lock);
296
297 ret = crypto_register_shashes(algs, ARRAY_SIZE(algs));
298 if (ret) {
299 dev_err(dev, "Failed to register\n");
300 clk_disable_unprepare(crc->clk);
301 return ret;
302 }
303
304 dev_info(dev, "Initialized\n");
305
306 pm_runtime_put_sync(dev);
307
308 return 0;
309 }
310
311 static int stm32_crc_remove(struct platform_device *pdev)
312 {
313 struct stm32_crc *crc = platform_get_drvdata(pdev);
314 int ret = pm_runtime_get_sync(crc->dev);
315
316 if (ret < 0)
317 return ret;
318
319 spin_lock(&crc_list.lock);
320 list_del(&crc->list);
321 spin_unlock(&crc_list.lock);
322
323 crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
324
325 pm_runtime_disable(crc->dev);
326 pm_runtime_put_noidle(crc->dev);
327
328 clk_disable_unprepare(crc->clk);
329
330 return 0;
331 }
332
333 #ifdef CONFIG_PM
334 static int stm32_crc_runtime_suspend(struct device *dev)
335 {
336 struct stm32_crc *crc = dev_get_drvdata(dev);
337
338 clk_disable_unprepare(crc->clk);
339
340 return 0;
341 }
342
343 static int stm32_crc_runtime_resume(struct device *dev)
344 {
345 struct stm32_crc *crc = dev_get_drvdata(dev);
346 int ret;
347
348 ret = clk_prepare_enable(crc->clk);
349 if (ret) {
350 dev_err(crc->dev, "Failed to prepare_enable clock\n");
351 return ret;
352 }
353
354 return 0;
355 }
356 #endif
357
358 static const struct dev_pm_ops stm32_crc_pm_ops = {
359 SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend,
360 pm_runtime_force_resume)
361 SET_RUNTIME_PM_OPS(stm32_crc_runtime_suspend,
362 stm32_crc_runtime_resume, NULL)
363 };
364
365 static const struct of_device_id stm32_dt_ids[] = {
366 { .compatible = "st,stm32f7-crc", },
367 {},
368 };
369 MODULE_DEVICE_TABLE(of, stm32_dt_ids);
370
371 static struct platform_driver stm32_crc_driver = {
372 .probe = stm32_crc_probe,
373 .remove = stm32_crc_remove,
374 .driver = {
375 .name = DRIVER_NAME,
376 .pm = &stm32_crc_pm_ops,
377 .of_match_table = stm32_dt_ids,
378 },
379 };
380
381 module_platform_driver(stm32_crc_driver);
382
383 MODULE_AUTHOR("Fabien Dessenne <fabien.dessenne@st.com>");
384 MODULE_DESCRIPTION("STMicrolectronics STM32 CRC32 hardware driver");
385 MODULE_LICENSE("GPL");