Lines Matching refs:hw

33 static void i40e_resume_aq(struct i40e_hw *hw);
51 static void i40e_adminq_init_regs(struct i40e_hw *hw) in i40e_adminq_init_regs() argument
54 if (i40e_is_vf(hw)) { in i40e_adminq_init_regs()
55 hw->aq.asq.tail = I40E_VF_ATQT1; in i40e_adminq_init_regs()
56 hw->aq.asq.head = I40E_VF_ATQH1; in i40e_adminq_init_regs()
57 hw->aq.asq.len = I40E_VF_ATQLEN1; in i40e_adminq_init_regs()
58 hw->aq.asq.bal = I40E_VF_ATQBAL1; in i40e_adminq_init_regs()
59 hw->aq.asq.bah = I40E_VF_ATQBAH1; in i40e_adminq_init_regs()
60 hw->aq.arq.tail = I40E_VF_ARQT1; in i40e_adminq_init_regs()
61 hw->aq.arq.head = I40E_VF_ARQH1; in i40e_adminq_init_regs()
62 hw->aq.arq.len = I40E_VF_ARQLEN1; in i40e_adminq_init_regs()
63 hw->aq.arq.bal = I40E_VF_ARQBAL1; in i40e_adminq_init_regs()
64 hw->aq.arq.bah = I40E_VF_ARQBAH1; in i40e_adminq_init_regs()
66 hw->aq.asq.tail = I40E_PF_ATQT; in i40e_adminq_init_regs()
67 hw->aq.asq.head = I40E_PF_ATQH; in i40e_adminq_init_regs()
68 hw->aq.asq.len = I40E_PF_ATQLEN; in i40e_adminq_init_regs()
69 hw->aq.asq.bal = I40E_PF_ATQBAL; in i40e_adminq_init_regs()
70 hw->aq.asq.bah = I40E_PF_ATQBAH; in i40e_adminq_init_regs()
71 hw->aq.arq.tail = I40E_PF_ARQT; in i40e_adminq_init_regs()
72 hw->aq.arq.head = I40E_PF_ARQH; in i40e_adminq_init_regs()
73 hw->aq.arq.len = I40E_PF_ARQLEN; in i40e_adminq_init_regs()
74 hw->aq.arq.bal = I40E_PF_ARQBAL; in i40e_adminq_init_regs()
75 hw->aq.arq.bah = I40E_PF_ARQBAH; in i40e_adminq_init_regs()
83 static i40e_status i40e_alloc_adminq_asq_ring(struct i40e_hw *hw) in i40e_alloc_adminq_asq_ring() argument
87 ret_code = i40e_allocate_dma_mem(hw, &hw->aq.asq.desc_buf, in i40e_alloc_adminq_asq_ring()
89 (hw->aq.num_asq_entries * in i40e_alloc_adminq_asq_ring()
95 ret_code = i40e_allocate_virt_mem(hw, &hw->aq.asq.cmd_buf, in i40e_alloc_adminq_asq_ring()
96 (hw->aq.num_asq_entries * in i40e_alloc_adminq_asq_ring()
99 i40e_free_dma_mem(hw, &hw->aq.asq.desc_buf); in i40e_alloc_adminq_asq_ring()
110 static i40e_status i40e_alloc_adminq_arq_ring(struct i40e_hw *hw) in i40e_alloc_adminq_arq_ring() argument
114 ret_code = i40e_allocate_dma_mem(hw, &hw->aq.arq.desc_buf, in i40e_alloc_adminq_arq_ring()
116 (hw->aq.num_arq_entries * in i40e_alloc_adminq_arq_ring()
130 static void i40e_free_adminq_asq(struct i40e_hw *hw) in i40e_free_adminq_asq() argument
132 i40e_free_dma_mem(hw, &hw->aq.asq.desc_buf); in i40e_free_adminq_asq()
142 static void i40e_free_adminq_arq(struct i40e_hw *hw) in i40e_free_adminq_arq() argument
144 i40e_free_dma_mem(hw, &hw->aq.arq.desc_buf); in i40e_free_adminq_arq()
151 static i40e_status i40e_alloc_arq_bufs(struct i40e_hw *hw) in i40e_alloc_arq_bufs() argument
163 ret_code = i40e_allocate_virt_mem(hw, &hw->aq.arq.dma_head, in i40e_alloc_arq_bufs()
164 (hw->aq.num_arq_entries * sizeof(struct i40e_dma_mem))); in i40e_alloc_arq_bufs()
167 hw->aq.arq.r.arq_bi = (struct i40e_dma_mem *)hw->aq.arq.dma_head.va; in i40e_alloc_arq_bufs()
170 for (i = 0; i < hw->aq.num_arq_entries; i++) { in i40e_alloc_arq_bufs()
171 bi = &hw->aq.arq.r.arq_bi[i]; in i40e_alloc_arq_bufs()
172 ret_code = i40e_allocate_dma_mem(hw, bi, in i40e_alloc_arq_bufs()
174 hw->aq.arq_buf_size, in i40e_alloc_arq_bufs()
180 desc = I40E_ADMINQ_DESC(hw->aq.arq, i); in i40e_alloc_arq_bufs()
183 if (hw->aq.arq_buf_size > I40E_AQ_LARGE_BUF) in i40e_alloc_arq_bufs()
208 i40e_free_dma_mem(hw, &hw->aq.arq.r.arq_bi[i]); in i40e_alloc_arq_bufs()
209 i40e_free_virt_mem(hw, &hw->aq.arq.dma_head); in i40e_alloc_arq_bufs()
218 static i40e_status i40e_alloc_asq_bufs(struct i40e_hw *hw) in i40e_alloc_asq_bufs() argument
225 ret_code = i40e_allocate_virt_mem(hw, &hw->aq.asq.dma_head, in i40e_alloc_asq_bufs()
226 (hw->aq.num_asq_entries * sizeof(struct i40e_dma_mem))); in i40e_alloc_asq_bufs()
229 hw->aq.asq.r.asq_bi = (struct i40e_dma_mem *)hw->aq.asq.dma_head.va; in i40e_alloc_asq_bufs()
232 for (i = 0; i < hw->aq.num_asq_entries; i++) { in i40e_alloc_asq_bufs()
233 bi = &hw->aq.asq.r.asq_bi[i]; in i40e_alloc_asq_bufs()
234 ret_code = i40e_allocate_dma_mem(hw, bi, in i40e_alloc_asq_bufs()
236 hw->aq.asq_buf_size, in i40e_alloc_asq_bufs()
248 i40e_free_dma_mem(hw, &hw->aq.asq.r.asq_bi[i]); in i40e_alloc_asq_bufs()
249 i40e_free_virt_mem(hw, &hw->aq.asq.dma_head); in i40e_alloc_asq_bufs()
258 static void i40e_free_arq_bufs(struct i40e_hw *hw) in i40e_free_arq_bufs() argument
263 for (i = 0; i < hw->aq.num_arq_entries; i++) in i40e_free_arq_bufs()
264 i40e_free_dma_mem(hw, &hw->aq.arq.r.arq_bi[i]); in i40e_free_arq_bufs()
267 i40e_free_dma_mem(hw, &hw->aq.arq.desc_buf); in i40e_free_arq_bufs()
270 i40e_free_virt_mem(hw, &hw->aq.arq.dma_head); in i40e_free_arq_bufs()
277 static void i40e_free_asq_bufs(struct i40e_hw *hw) in i40e_free_asq_bufs() argument
282 for (i = 0; i < hw->aq.num_asq_entries; i++) in i40e_free_asq_bufs()
283 if (hw->aq.asq.r.asq_bi[i].pa) in i40e_free_asq_bufs()
284 i40e_free_dma_mem(hw, &hw->aq.asq.r.asq_bi[i]); in i40e_free_asq_bufs()
287 i40e_free_virt_mem(hw, &hw->aq.asq.cmd_buf); in i40e_free_asq_bufs()
290 i40e_free_dma_mem(hw, &hw->aq.asq.desc_buf); in i40e_free_asq_bufs()
293 i40e_free_virt_mem(hw, &hw->aq.asq.dma_head); in i40e_free_asq_bufs()
302 static i40e_status i40e_config_asq_regs(struct i40e_hw *hw) in i40e_config_asq_regs() argument
308 wr32(hw, hw->aq.asq.head, 0); in i40e_config_asq_regs()
309 wr32(hw, hw->aq.asq.tail, 0); in i40e_config_asq_regs()
312 wr32(hw, hw->aq.asq.len, (hw->aq.num_asq_entries | in i40e_config_asq_regs()
314 wr32(hw, hw->aq.asq.bal, lower_32_bits(hw->aq.asq.desc_buf.pa)); in i40e_config_asq_regs()
315 wr32(hw, hw->aq.asq.bah, upper_32_bits(hw->aq.asq.desc_buf.pa)); in i40e_config_asq_regs()
318 reg = rd32(hw, hw->aq.asq.bal); in i40e_config_asq_regs()
319 if (reg != lower_32_bits(hw->aq.asq.desc_buf.pa)) in i40e_config_asq_regs()
331 static i40e_status i40e_config_arq_regs(struct i40e_hw *hw) in i40e_config_arq_regs() argument
337 wr32(hw, hw->aq.arq.head, 0); in i40e_config_arq_regs()
338 wr32(hw, hw->aq.arq.tail, 0); in i40e_config_arq_regs()
341 wr32(hw, hw->aq.arq.len, (hw->aq.num_arq_entries | in i40e_config_arq_regs()
343 wr32(hw, hw->aq.arq.bal, lower_32_bits(hw->aq.arq.desc_buf.pa)); in i40e_config_arq_regs()
344 wr32(hw, hw->aq.arq.bah, upper_32_bits(hw->aq.arq.desc_buf.pa)); in i40e_config_arq_regs()
347 wr32(hw, hw->aq.arq.tail, hw->aq.num_arq_entries - 1); in i40e_config_arq_regs()
350 reg = rd32(hw, hw->aq.arq.bal); in i40e_config_arq_regs()
351 if (reg != lower_32_bits(hw->aq.arq.desc_buf.pa)) in i40e_config_arq_regs()
370 static i40e_status i40e_init_asq(struct i40e_hw *hw) in i40e_init_asq() argument
374 if (hw->aq.asq.count > 0) { in i40e_init_asq()
381 if ((hw->aq.num_asq_entries == 0) || in i40e_init_asq()
382 (hw->aq.asq_buf_size == 0)) { in i40e_init_asq()
387 hw->aq.asq.next_to_use = 0; in i40e_init_asq()
388 hw->aq.asq.next_to_clean = 0; in i40e_init_asq()
389 hw->aq.asq.count = hw->aq.num_asq_entries; in i40e_init_asq()
392 ret_code = i40e_alloc_adminq_asq_ring(hw); in i40e_init_asq()
397 ret_code = i40e_alloc_asq_bufs(hw); in i40e_init_asq()
402 ret_code = i40e_config_asq_regs(hw); in i40e_init_asq()
410 i40e_free_adminq_asq(hw); in i40e_init_asq()
429 static i40e_status i40e_init_arq(struct i40e_hw *hw) in i40e_init_arq() argument
433 if (hw->aq.arq.count > 0) { in i40e_init_arq()
440 if ((hw->aq.num_arq_entries == 0) || in i40e_init_arq()
441 (hw->aq.arq_buf_size == 0)) { in i40e_init_arq()
446 hw->aq.arq.next_to_use = 0; in i40e_init_arq()
447 hw->aq.arq.next_to_clean = 0; in i40e_init_arq()
448 hw->aq.arq.count = hw->aq.num_arq_entries; in i40e_init_arq()
451 ret_code = i40e_alloc_adminq_arq_ring(hw); in i40e_init_arq()
456 ret_code = i40e_alloc_arq_bufs(hw); in i40e_init_arq()
461 ret_code = i40e_config_arq_regs(hw); in i40e_init_arq()
469 i40e_free_adminq_arq(hw); in i40e_init_arq()
481 static i40e_status i40e_shutdown_asq(struct i40e_hw *hw) in i40e_shutdown_asq() argument
485 if (hw->aq.asq.count == 0) in i40e_shutdown_asq()
489 wr32(hw, hw->aq.asq.head, 0); in i40e_shutdown_asq()
490 wr32(hw, hw->aq.asq.tail, 0); in i40e_shutdown_asq()
491 wr32(hw, hw->aq.asq.len, 0); in i40e_shutdown_asq()
492 wr32(hw, hw->aq.asq.bal, 0); in i40e_shutdown_asq()
493 wr32(hw, hw->aq.asq.bah, 0); in i40e_shutdown_asq()
496 mutex_lock(&hw->aq.asq_mutex); in i40e_shutdown_asq()
498 hw->aq.asq.count = 0; /* to indicate uninitialized queue */ in i40e_shutdown_asq()
501 i40e_free_asq_bufs(hw); in i40e_shutdown_asq()
503 mutex_unlock(&hw->aq.asq_mutex); in i40e_shutdown_asq()
514 static i40e_status i40e_shutdown_arq(struct i40e_hw *hw) in i40e_shutdown_arq() argument
518 if (hw->aq.arq.count == 0) in i40e_shutdown_arq()
522 wr32(hw, hw->aq.arq.head, 0); in i40e_shutdown_arq()
523 wr32(hw, hw->aq.arq.tail, 0); in i40e_shutdown_arq()
524 wr32(hw, hw->aq.arq.len, 0); in i40e_shutdown_arq()
525 wr32(hw, hw->aq.arq.bal, 0); in i40e_shutdown_arq()
526 wr32(hw, hw->aq.arq.bah, 0); in i40e_shutdown_arq()
529 mutex_lock(&hw->aq.arq_mutex); in i40e_shutdown_arq()
531 hw->aq.arq.count = 0; /* to indicate uninitialized queue */ in i40e_shutdown_arq()
534 i40e_free_arq_bufs(hw); in i40e_shutdown_arq()
536 mutex_unlock(&hw->aq.arq_mutex); in i40e_shutdown_arq()
552 i40e_status i40e_init_adminq(struct i40e_hw *hw) in i40e_init_adminq() argument
559 if ((hw->aq.num_arq_entries == 0) || in i40e_init_adminq()
560 (hw->aq.num_asq_entries == 0) || in i40e_init_adminq()
561 (hw->aq.arq_buf_size == 0) || in i40e_init_adminq()
562 (hw->aq.asq_buf_size == 0)) { in i40e_init_adminq()
568 mutex_init(&hw->aq.asq_mutex); in i40e_init_adminq()
569 mutex_init(&hw->aq.arq_mutex); in i40e_init_adminq()
572 i40e_adminq_init_regs(hw); in i40e_init_adminq()
575 hw->aq.asq_cmd_timeout = I40E_ASQ_CMD_TIMEOUT; in i40e_init_adminq()
578 ret_code = i40e_init_asq(hw); in i40e_init_adminq()
583 ret_code = i40e_init_arq(hw); in i40e_init_adminq()
592 ret_code = i40e_aq_get_firmware_version(hw, in i40e_init_adminq()
593 &hw->aq.fw_maj_ver, in i40e_init_adminq()
594 &hw->aq.fw_min_ver, in i40e_init_adminq()
595 &hw->aq.fw_build, in i40e_init_adminq()
596 &hw->aq.api_maj_ver, in i40e_init_adminq()
597 &hw->aq.api_min_ver, in i40e_init_adminq()
603 i40e_resume_aq(hw); in i40e_init_adminq()
609 i40e_read_nvm_word(hw, I40E_SR_NVM_DEV_STARTER_VERSION, in i40e_init_adminq()
610 &hw->nvm.version); in i40e_init_adminq()
611 i40e_read_nvm_word(hw, I40E_SR_NVM_EETRACK_LO, &eetrack_lo); in i40e_init_adminq()
612 i40e_read_nvm_word(hw, I40E_SR_NVM_EETRACK_HI, &eetrack_hi); in i40e_init_adminq()
613 hw->nvm.eetrack = (eetrack_hi << 16) | eetrack_lo; in i40e_init_adminq()
615 if (hw->aq.api_maj_ver > I40E_FW_API_VERSION_MAJOR) { in i40e_init_adminq()
621 i40e_aq_release_resource(hw, I40E_NVM_RESOURCE_ID, 0, NULL); in i40e_init_adminq()
622 hw->aq.nvm_release_on_done = false; in i40e_init_adminq()
623 hw->nvmupd_state = I40E_NVMUPD_STATE_INIT; in i40e_init_adminq()
625 ret_code = i40e_aq_set_hmc_resource_profile(hw, in i40e_init_adminq()
635 i40e_shutdown_arq(hw); in i40e_init_adminq()
637 i40e_shutdown_asq(hw); in i40e_init_adminq()
648 i40e_status i40e_shutdown_adminq(struct i40e_hw *hw) in i40e_shutdown_adminq() argument
652 if (i40e_check_asq_alive(hw)) in i40e_shutdown_adminq()
653 i40e_aq_queue_shutdown(hw, true); in i40e_shutdown_adminq()
655 i40e_shutdown_asq(hw); in i40e_shutdown_adminq()
656 i40e_shutdown_arq(hw); in i40e_shutdown_adminq()
669 static u16 i40e_clean_asq(struct i40e_hw *hw) in i40e_clean_asq() argument
671 struct i40e_adminq_ring *asq = &(hw->aq.asq); in i40e_clean_asq()
679 while (rd32(hw, hw->aq.asq.head) != ntc) { in i40e_clean_asq()
680 i40e_debug(hw, I40E_DEBUG_AQ_MESSAGE, in i40e_clean_asq()
682 rd32(hw, hw->aq.asq.head)); in i40e_clean_asq()
688 cb_func(hw, &desc_cb); in i40e_clean_asq()
711 static bool i40e_asq_done(struct i40e_hw *hw) in i40e_asq_done() argument
716 return rd32(hw, hw->aq.asq.head) == hw->aq.asq.next_to_use; in i40e_asq_done()
731 i40e_status i40e_asq_send_command(struct i40e_hw *hw, in i40e_asq_send_command() argument
745 val = rd32(hw, hw->aq.asq.head); in i40e_asq_send_command()
746 if (val >= hw->aq.num_asq_entries) { in i40e_asq_send_command()
747 i40e_debug(hw, I40E_DEBUG_AQ_MESSAGE, in i40e_asq_send_command()
753 if (hw->aq.asq.count == 0) { in i40e_asq_send_command()
754 i40e_debug(hw, I40E_DEBUG_AQ_MESSAGE, in i40e_asq_send_command()
760 details = I40E_ADMINQ_DETAILS(hw->aq.asq, hw->aq.asq.next_to_use); in i40e_asq_send_command()
782 mutex_lock(&hw->aq.asq_mutex); in i40e_asq_send_command()
784 if (buff_size > hw->aq.asq_buf_size) { in i40e_asq_send_command()
785 i40e_debug(hw, in i40e_asq_send_command()
794 i40e_debug(hw, in i40e_asq_send_command()
808 if (i40e_clean_asq(hw) == 0) { in i40e_asq_send_command()
809 i40e_debug(hw, in i40e_asq_send_command()
817 desc_on_ring = I40E_ADMINQ_DESC(hw->aq.asq, hw->aq.asq.next_to_use); in i40e_asq_send_command()
824 dma_buff = &(hw->aq.asq.r.asq_bi[hw->aq.asq.next_to_use]); in i40e_asq_send_command()
839 i40e_debug(hw, I40E_DEBUG_AQ_MESSAGE, "AQTX: desc and buffer:\n"); in i40e_asq_send_command()
840 i40e_debug_aq(hw, I40E_DEBUG_AQ_COMMAND, (void *)desc_on_ring, in i40e_asq_send_command()
842 (hw->aq.asq.next_to_use)++; in i40e_asq_send_command()
843 if (hw->aq.asq.next_to_use == hw->aq.asq.count) in i40e_asq_send_command()
844 hw->aq.asq.next_to_use = 0; in i40e_asq_send_command()
846 wr32(hw, hw->aq.asq.tail, hw->aq.asq.next_to_use); in i40e_asq_send_command()
858 if (i40e_asq_done(hw)) in i40e_asq_send_command()
862 } while (total_delay < hw->aq.asq_cmd_timeout); in i40e_asq_send_command()
866 if (i40e_asq_done(hw)) { in i40e_asq_send_command()
872 i40e_debug(hw, in i40e_asq_send_command()
885 hw->aq.asq_last_status = (enum i40e_admin_queue_err)retval; in i40e_asq_send_command()
888 i40e_debug(hw, I40E_DEBUG_AQ_MESSAGE, in i40e_asq_send_command()
890 i40e_debug_aq(hw, I40E_DEBUG_AQ_COMMAND, (void *)desc, buff, buff_size); in i40e_asq_send_command()
895 i40e_debug(hw, in i40e_asq_send_command()
902 mutex_unlock(&hw->aq.asq_mutex); in i40e_asq_send_command()
933 i40e_status i40e_clean_arq_element(struct i40e_hw *hw, in i40e_clean_arq_element() argument
938 u16 ntc = hw->aq.arq.next_to_clean; in i40e_clean_arq_element()
947 mutex_lock(&hw->aq.arq_mutex); in i40e_clean_arq_element()
950 ntu = (rd32(hw, hw->aq.arq.head) & I40E_PF_ARQH_ARQH_MASK); in i40e_clean_arq_element()
958 desc = I40E_ADMINQ_DESC(hw->aq.arq, ntc); in i40e_clean_arq_element()
964 hw->aq.arq_last_status = in i40e_clean_arq_element()
966 i40e_debug(hw, in i40e_clean_arq_element()
969 hw->aq.arq_last_status); in i40e_clean_arq_element()
976 memcpy(e->msg_buf, hw->aq.arq.r.arq_bi[desc_idx].va, in i40e_clean_arq_element()
979 i40e_debug(hw, I40E_DEBUG_AQ_MESSAGE, "AQRX: desc and buffer:\n"); in i40e_clean_arq_element()
980 i40e_debug_aq(hw, I40E_DEBUG_AQ_COMMAND, (void *)desc, e->msg_buf, in i40e_clean_arq_element()
981 hw->aq.arq_buf_size); in i40e_clean_arq_element()
987 bi = &hw->aq.arq.r.arq_bi[ntc]; in i40e_clean_arq_element()
991 if (hw->aq.arq_buf_size > I40E_AQ_LARGE_BUF) in i40e_clean_arq_element()
998 wr32(hw, hw->aq.arq.tail, ntc); in i40e_clean_arq_element()
1001 if (ntc == hw->aq.num_arq_entries) in i40e_clean_arq_element()
1003 hw->aq.arq.next_to_clean = ntc; in i40e_clean_arq_element()
1004 hw->aq.arq.next_to_use = ntu; in i40e_clean_arq_element()
1009 *pending = (ntc > ntu ? hw->aq.arq.count : 0) + (ntu - ntc); in i40e_clean_arq_element()
1010 mutex_unlock(&hw->aq.arq_mutex); in i40e_clean_arq_element()
1013 if (hw->aq.nvm_release_on_done) { in i40e_clean_arq_element()
1014 i40e_release_nvm(hw); in i40e_clean_arq_element()
1015 hw->aq.nvm_release_on_done = false; in i40e_clean_arq_element()
1022 static void i40e_resume_aq(struct i40e_hw *hw) in i40e_resume_aq() argument
1025 hw->aq.asq.next_to_use = 0; in i40e_resume_aq()
1026 hw->aq.asq.next_to_clean = 0; in i40e_resume_aq()
1028 i40e_config_asq_regs(hw); in i40e_resume_aq()
1030 hw->aq.arq.next_to_use = 0; in i40e_resume_aq()
1031 hw->aq.arq.next_to_clean = 0; in i40e_resume_aq()
1033 i40e_config_arq_regs(hw); in i40e_resume_aq()