Lines Matching refs:cpg
108 static struct crypto_priv *cpg; variable
153 int active = readl(cpg->reg + SEC_ACCEL_CMD) & SEC_CMD_EN_SEC_ACCL0; in mv_completion_timer_callback()
159 del_timer(&cpg->completion_timer); in mv_completion_timer_callback()
160 writel(SEC_CMD_DISABLE_SEC, cpg->reg + SEC_ACCEL_CMD); in mv_completion_timer_callback()
161 while(readl(cpg->reg + SEC_ACCEL_CMD) & SEC_CMD_DISABLE_SEC) in mv_completion_timer_callback()
163 cpg->eng_st = ENGINE_W_DEQUEUE; in mv_completion_timer_callback()
164 wake_up_process(cpg->queue_th); in mv_completion_timer_callback()
169 setup_timer(&cpg->completion_timer, &mv_completion_timer_callback, 0); in mv_setup_timer()
170 mod_timer(&cpg->completion_timer, in mv_setup_timer()
250 struct req_progress *p = &cpg->p; in setup_data_in()
252 min(p->hw_nbytes - p->hw_processed_bytes, cpg->max_req_size); in setup_data_in()
253 copy_src_to_buf(p, cpg->sram + SRAM_DATA_IN_START + p->crypt_len, in setup_data_in()
260 struct ablkcipher_request *req = ablkcipher_request_cast(cpg->cur_req); in mv_process_current_q()
275 memcpy(cpg->sram + SRAM_DATA_IV, req->info, 16); in mv_process_current_q()
280 memcpy(cpg->sram + SRAM_DATA_KEY_P, ctx->aes_dec_key, in mv_process_current_q()
284 memcpy(cpg->sram + SRAM_DATA_KEY_P, ctx->aes_enc_key, in mv_process_current_q()
304 op.enc_len = cpg->p.crypt_len; in mv_process_current_q()
305 memcpy(cpg->sram + SRAM_CONFIG, &op, in mv_process_current_q()
310 writel(SEC_CMD_EN_SEC_ACCL0, cpg->reg + SEC_ACCEL_CMD); in mv_process_current_q()
315 struct ablkcipher_request *req = ablkcipher_request_cast(cpg->cur_req); in mv_crypto_algo_completion()
318 sg_miter_stop(&cpg->p.src_sg_it); in mv_crypto_algo_completion()
319 sg_miter_stop(&cpg->p.dst_sg_it); in mv_crypto_algo_completion()
324 memcpy(req->info, cpg->sram + SRAM_DATA_IV_BUF, 16); in mv_crypto_algo_completion()
329 struct ahash_request *req = ahash_request_cast(cpg->cur_req); in mv_process_hash_current()
332 struct req_progress *p = &cpg->p; in mv_process_hash_current()
343 memcpy(cpg->sram + SRAM_HMAC_IV_IN, in mv_process_hash_current()
378 writel(req_ctx->state[0], cpg->reg + DIGEST_INITIAL_VAL_A); in mv_process_hash_current()
379 writel(req_ctx->state[1], cpg->reg + DIGEST_INITIAL_VAL_B); in mv_process_hash_current()
380 writel(req_ctx->state[2], cpg->reg + DIGEST_INITIAL_VAL_C); in mv_process_hash_current()
381 writel(req_ctx->state[3], cpg->reg + DIGEST_INITIAL_VAL_D); in mv_process_hash_current()
382 writel(req_ctx->state[4], cpg->reg + DIGEST_INITIAL_VAL_E); in mv_process_hash_current()
386 memcpy(cpg->sram + SRAM_CONFIG, &op, sizeof(struct sec_accel_config)); in mv_process_hash_current()
390 writel(SEC_CMD_EN_SEC_ACCL0, cpg->reg + SEC_ACCEL_CMD); in mv_process_hash_current()
433 ctx->state[0] = readl(cpg->reg + DIGEST_INITIAL_VAL_A); in mv_save_digest_state()
434 ctx->state[1] = readl(cpg->reg + DIGEST_INITIAL_VAL_B); in mv_save_digest_state()
435 ctx->state[2] = readl(cpg->reg + DIGEST_INITIAL_VAL_C); in mv_save_digest_state()
436 ctx->state[3] = readl(cpg->reg + DIGEST_INITIAL_VAL_D); in mv_save_digest_state()
437 ctx->state[4] = readl(cpg->reg + DIGEST_INITIAL_VAL_E); in mv_save_digest_state()
442 struct ahash_request *req = ahash_request_cast(cpg->cur_req); in mv_hash_algo_completion()
446 copy_src_to_buf(&cpg->p, ctx->buffer, ctx->extra_bytes); in mv_hash_algo_completion()
447 sg_miter_stop(&cpg->p.src_sg_it); in mv_hash_algo_completion()
451 memcpy(req->result, cpg->sram + SRAM_DIGEST_BUF, in mv_hash_algo_completion()
465 struct crypto_async_request *req = cpg->cur_req; in dequeue_complete_req()
468 cpg->p.hw_processed_bytes += cpg->p.crypt_len; in dequeue_complete_req()
469 if (cpg->p.copy_back) { in dequeue_complete_req()
470 int need_copy_len = cpg->p.crypt_len; in dequeue_complete_req()
475 if (!cpg->p.sg_dst_left) { in dequeue_complete_req()
476 ret = sg_miter_next(&cpg->p.dst_sg_it); in dequeue_complete_req()
478 cpg->p.sg_dst_left = cpg->p.dst_sg_it.length; in dequeue_complete_req()
479 cpg->p.dst_start = 0; in dequeue_complete_req()
482 buf = cpg->p.dst_sg_it.addr; in dequeue_complete_req()
483 buf += cpg->p.dst_start; in dequeue_complete_req()
485 dst_copy = min(need_copy_len, cpg->p.sg_dst_left); in dequeue_complete_req()
488 cpg->sram + SRAM_DATA_OUT_START + sram_offset, in dequeue_complete_req()
491 cpg->p.sg_dst_left -= dst_copy; in dequeue_complete_req()
493 cpg->p.dst_start += dst_copy; in dequeue_complete_req()
497 cpg->p.crypt_len = 0; in dequeue_complete_req()
499 BUG_ON(cpg->eng_st != ENGINE_W_DEQUEUE); in dequeue_complete_req()
500 if (cpg->p.hw_processed_bytes < cpg->p.hw_nbytes) { in dequeue_complete_req()
502 cpg->eng_st = ENGINE_BUSY; in dequeue_complete_req()
503 cpg->p.process(0); in dequeue_complete_req()
505 cpg->p.complete(); in dequeue_complete_req()
506 cpg->eng_st = ENGINE_IDLE; in dequeue_complete_req()
532 struct req_progress *p = &cpg->p; in mv_start_new_crypt_req()
535 cpg->cur_req = &req->base; in mv_start_new_crypt_req()
553 struct req_progress *p = &cpg->p; in mv_start_new_hash_req()
556 cpg->cur_req = &req->base; in mv_start_new_hash_req()
577 memcpy(cpg->sram + SRAM_DATA_IN_START, ctx->buffer, in mv_start_new_hash_req()
591 cpg->eng_st = ENGINE_IDLE; in mv_start_new_hash_req()
600 cpg->eng_st = ENGINE_IDLE; in queue_manag()
607 if (cpg->eng_st == ENGINE_W_DEQUEUE) in queue_manag()
610 spin_lock_irq(&cpg->lock); in queue_manag()
611 if (cpg->eng_st == ENGINE_IDLE) { in queue_manag()
612 backlog = crypto_get_backlog(&cpg->queue); in queue_manag()
613 async_req = crypto_dequeue_request(&cpg->queue); in queue_manag()
615 BUG_ON(cpg->eng_st != ENGINE_IDLE); in queue_manag()
616 cpg->eng_st = ENGINE_BUSY; in queue_manag()
619 spin_unlock_irq(&cpg->lock); in queue_manag()
651 spin_lock_irqsave(&cpg->lock, flags); in mv_handle_req()
652 ret = crypto_enqueue_request(&cpg->queue, req); in mv_handle_req()
653 spin_unlock_irqrestore(&cpg->lock, flags); in mv_handle_req()
654 wake_up_process(cpg->queue_th); in mv_handle_req()
914 val = readl(cpg->reg + SEC_ACCEL_INT_STATUS); in crypto_int()
918 if (!del_timer(&cpg->completion_timer)) { in crypto_int()
923 writel(val, cpg->reg + FPGA_INT_STATUS); in crypto_int()
924 writel(val, cpg->reg + SEC_ACCEL_INT_STATUS); in crypto_int()
925 BUG_ON(cpg->eng_st != ENGINE_BUSY); in crypto_int()
926 cpg->eng_st = ENGINE_W_DEQUEUE; in crypto_int()
927 wake_up_process(cpg->queue_th); in crypto_int()
1067 if (cpg) { in mv_probe()
1105 cpg = cp; in mv_probe()
1124 writel(0, cpg->reg + SEC_ACCEL_INT_STATUS); in mv_probe()
1125 writel(SEC_INT_ACCEL0_DONE, cpg->reg + SEC_ACCEL_INT_MASK); in mv_probe()
1126 writel(SEC_CFG_STOP_DIG_ERR, cpg->reg + SEC_ACCEL_CFG); in mv_probe()
1127 writel(SRAM_CONFIG, cpg->reg + SEC_ACCEL_DESC_P0); in mv_probe()
1145 cpg->has_sha1 = 1; in mv_probe()
1151 cpg->has_hmac_sha1 = 1; in mv_probe()
1170 cpg = NULL; in mv_probe()
1194 cpg = NULL; in mv_remove()