Lines Matching refs:hw
33 static void i40e_resume_aq(struct i40e_hw *hw);
51 static void i40e_adminq_init_regs(struct i40e_hw *hw) in i40e_adminq_init_regs() argument
54 if (i40e_is_vf(hw)) { in i40e_adminq_init_regs()
55 hw->aq.asq.tail = I40E_VF_ATQT1; in i40e_adminq_init_regs()
56 hw->aq.asq.head = I40E_VF_ATQH1; in i40e_adminq_init_regs()
57 hw->aq.asq.len = I40E_VF_ATQLEN1; in i40e_adminq_init_regs()
58 hw->aq.asq.bal = I40E_VF_ATQBAL1; in i40e_adminq_init_regs()
59 hw->aq.asq.bah = I40E_VF_ATQBAH1; in i40e_adminq_init_regs()
60 hw->aq.arq.tail = I40E_VF_ARQT1; in i40e_adminq_init_regs()
61 hw->aq.arq.head = I40E_VF_ARQH1; in i40e_adminq_init_regs()
62 hw->aq.arq.len = I40E_VF_ARQLEN1; in i40e_adminq_init_regs()
63 hw->aq.arq.bal = I40E_VF_ARQBAL1; in i40e_adminq_init_regs()
64 hw->aq.arq.bah = I40E_VF_ARQBAH1; in i40e_adminq_init_regs()
66 hw->aq.asq.tail = I40E_PF_ATQT; in i40e_adminq_init_regs()
67 hw->aq.asq.head = I40E_PF_ATQH; in i40e_adminq_init_regs()
68 hw->aq.asq.len = I40E_PF_ATQLEN; in i40e_adminq_init_regs()
69 hw->aq.asq.bal = I40E_PF_ATQBAL; in i40e_adminq_init_regs()
70 hw->aq.asq.bah = I40E_PF_ATQBAH; in i40e_adminq_init_regs()
71 hw->aq.arq.tail = I40E_PF_ARQT; in i40e_adminq_init_regs()
72 hw->aq.arq.head = I40E_PF_ARQH; in i40e_adminq_init_regs()
73 hw->aq.arq.len = I40E_PF_ARQLEN; in i40e_adminq_init_regs()
74 hw->aq.arq.bal = I40E_PF_ARQBAL; in i40e_adminq_init_regs()
75 hw->aq.arq.bah = I40E_PF_ARQBAH; in i40e_adminq_init_regs()
83 static i40e_status i40e_alloc_adminq_asq_ring(struct i40e_hw *hw) in i40e_alloc_adminq_asq_ring() argument
87 ret_code = i40e_allocate_dma_mem(hw, &hw->aq.asq.desc_buf, in i40e_alloc_adminq_asq_ring()
89 (hw->aq.num_asq_entries * in i40e_alloc_adminq_asq_ring()
95 ret_code = i40e_allocate_virt_mem(hw, &hw->aq.asq.cmd_buf, in i40e_alloc_adminq_asq_ring()
96 (hw->aq.num_asq_entries * in i40e_alloc_adminq_asq_ring()
99 i40e_free_dma_mem(hw, &hw->aq.asq.desc_buf); in i40e_alloc_adminq_asq_ring()
110 static i40e_status i40e_alloc_adminq_arq_ring(struct i40e_hw *hw) in i40e_alloc_adminq_arq_ring() argument
114 ret_code = i40e_allocate_dma_mem(hw, &hw->aq.arq.desc_buf, in i40e_alloc_adminq_arq_ring()
116 (hw->aq.num_arq_entries * in i40e_alloc_adminq_arq_ring()
130 static void i40e_free_adminq_asq(struct i40e_hw *hw) in i40e_free_adminq_asq() argument
132 i40e_free_dma_mem(hw, &hw->aq.asq.desc_buf); in i40e_free_adminq_asq()
142 static void i40e_free_adminq_arq(struct i40e_hw *hw) in i40e_free_adminq_arq() argument
144 i40e_free_dma_mem(hw, &hw->aq.arq.desc_buf); in i40e_free_adminq_arq()
151 static i40e_status i40e_alloc_arq_bufs(struct i40e_hw *hw) in i40e_alloc_arq_bufs() argument
163 ret_code = i40e_allocate_virt_mem(hw, &hw->aq.arq.dma_head, in i40e_alloc_arq_bufs()
164 (hw->aq.num_arq_entries * sizeof(struct i40e_dma_mem))); in i40e_alloc_arq_bufs()
167 hw->aq.arq.r.arq_bi = (struct i40e_dma_mem *)hw->aq.arq.dma_head.va; in i40e_alloc_arq_bufs()
170 for (i = 0; i < hw->aq.num_arq_entries; i++) { in i40e_alloc_arq_bufs()
171 bi = &hw->aq.arq.r.arq_bi[i]; in i40e_alloc_arq_bufs()
172 ret_code = i40e_allocate_dma_mem(hw, bi, in i40e_alloc_arq_bufs()
174 hw->aq.arq_buf_size, in i40e_alloc_arq_bufs()
180 desc = I40E_ADMINQ_DESC(hw->aq.arq, i); in i40e_alloc_arq_bufs()
183 if (hw->aq.arq_buf_size > I40E_AQ_LARGE_BUF) in i40e_alloc_arq_bufs()
208 i40e_free_dma_mem(hw, &hw->aq.arq.r.arq_bi[i]); in i40e_alloc_arq_bufs()
209 i40e_free_virt_mem(hw, &hw->aq.arq.dma_head); in i40e_alloc_arq_bufs()
218 static i40e_status i40e_alloc_asq_bufs(struct i40e_hw *hw) in i40e_alloc_asq_bufs() argument
225 ret_code = i40e_allocate_virt_mem(hw, &hw->aq.asq.dma_head, in i40e_alloc_asq_bufs()
226 (hw->aq.num_asq_entries * sizeof(struct i40e_dma_mem))); in i40e_alloc_asq_bufs()
229 hw->aq.asq.r.asq_bi = (struct i40e_dma_mem *)hw->aq.asq.dma_head.va; in i40e_alloc_asq_bufs()
232 for (i = 0; i < hw->aq.num_asq_entries; i++) { in i40e_alloc_asq_bufs()
233 bi = &hw->aq.asq.r.asq_bi[i]; in i40e_alloc_asq_bufs()
234 ret_code = i40e_allocate_dma_mem(hw, bi, in i40e_alloc_asq_bufs()
236 hw->aq.asq_buf_size, in i40e_alloc_asq_bufs()
248 i40e_free_dma_mem(hw, &hw->aq.asq.r.asq_bi[i]); in i40e_alloc_asq_bufs()
249 i40e_free_virt_mem(hw, &hw->aq.asq.dma_head); in i40e_alloc_asq_bufs()
258 static void i40e_free_arq_bufs(struct i40e_hw *hw) in i40e_free_arq_bufs() argument
263 for (i = 0; i < hw->aq.num_arq_entries; i++) in i40e_free_arq_bufs()
264 i40e_free_dma_mem(hw, &hw->aq.arq.r.arq_bi[i]); in i40e_free_arq_bufs()
267 i40e_free_dma_mem(hw, &hw->aq.arq.desc_buf); in i40e_free_arq_bufs()
270 i40e_free_virt_mem(hw, &hw->aq.arq.dma_head); in i40e_free_arq_bufs()
277 static void i40e_free_asq_bufs(struct i40e_hw *hw) in i40e_free_asq_bufs() argument
282 for (i = 0; i < hw->aq.num_asq_entries; i++) in i40e_free_asq_bufs()
283 if (hw->aq.asq.r.asq_bi[i].pa) in i40e_free_asq_bufs()
284 i40e_free_dma_mem(hw, &hw->aq.asq.r.asq_bi[i]); in i40e_free_asq_bufs()
287 i40e_free_virt_mem(hw, &hw->aq.asq.cmd_buf); in i40e_free_asq_bufs()
290 i40e_free_dma_mem(hw, &hw->aq.asq.desc_buf); in i40e_free_asq_bufs()
293 i40e_free_virt_mem(hw, &hw->aq.asq.dma_head); in i40e_free_asq_bufs()
302 static i40e_status i40e_config_asq_regs(struct i40e_hw *hw) in i40e_config_asq_regs() argument
308 wr32(hw, hw->aq.asq.head, 0); in i40e_config_asq_regs()
309 wr32(hw, hw->aq.asq.tail, 0); in i40e_config_asq_regs()
312 wr32(hw, hw->aq.asq.len, (hw->aq.num_asq_entries | in i40e_config_asq_regs()
314 wr32(hw, hw->aq.asq.bal, lower_32_bits(hw->aq.asq.desc_buf.pa)); in i40e_config_asq_regs()
315 wr32(hw, hw->aq.asq.bah, upper_32_bits(hw->aq.asq.desc_buf.pa)); in i40e_config_asq_regs()
318 reg = rd32(hw, hw->aq.asq.bal); in i40e_config_asq_regs()
319 if (reg != lower_32_bits(hw->aq.asq.desc_buf.pa)) in i40e_config_asq_regs()
331 static i40e_status i40e_config_arq_regs(struct i40e_hw *hw) in i40e_config_arq_regs() argument
337 wr32(hw, hw->aq.arq.head, 0); in i40e_config_arq_regs()
338 wr32(hw, hw->aq.arq.tail, 0); in i40e_config_arq_regs()
341 wr32(hw, hw->aq.arq.len, (hw->aq.num_arq_entries | in i40e_config_arq_regs()
343 wr32(hw, hw->aq.arq.bal, lower_32_bits(hw->aq.arq.desc_buf.pa)); in i40e_config_arq_regs()
344 wr32(hw, hw->aq.arq.bah, upper_32_bits(hw->aq.arq.desc_buf.pa)); in i40e_config_arq_regs()
347 wr32(hw, hw->aq.arq.tail, hw->aq.num_arq_entries - 1); in i40e_config_arq_regs()
350 reg = rd32(hw, hw->aq.arq.bal); in i40e_config_arq_regs()
351 if (reg != lower_32_bits(hw->aq.arq.desc_buf.pa)) in i40e_config_arq_regs()
370 static i40e_status i40e_init_asq(struct i40e_hw *hw) in i40e_init_asq() argument
374 if (hw->aq.asq.count > 0) { in i40e_init_asq()
381 if ((hw->aq.num_asq_entries == 0) || in i40e_init_asq()
382 (hw->aq.asq_buf_size == 0)) { in i40e_init_asq()
387 hw->aq.asq.next_to_use = 0; in i40e_init_asq()
388 hw->aq.asq.next_to_clean = 0; in i40e_init_asq()
391 ret_code = i40e_alloc_adminq_asq_ring(hw); in i40e_init_asq()
396 ret_code = i40e_alloc_asq_bufs(hw); in i40e_init_asq()
401 ret_code = i40e_config_asq_regs(hw); in i40e_init_asq()
406 hw->aq.asq.count = hw->aq.num_asq_entries; in i40e_init_asq()
410 i40e_free_adminq_asq(hw); in i40e_init_asq()
429 static i40e_status i40e_init_arq(struct i40e_hw *hw) in i40e_init_arq() argument
433 if (hw->aq.arq.count > 0) { in i40e_init_arq()
440 if ((hw->aq.num_arq_entries == 0) || in i40e_init_arq()
441 (hw->aq.arq_buf_size == 0)) { in i40e_init_arq()
446 hw->aq.arq.next_to_use = 0; in i40e_init_arq()
447 hw->aq.arq.next_to_clean = 0; in i40e_init_arq()
450 ret_code = i40e_alloc_adminq_arq_ring(hw); in i40e_init_arq()
455 ret_code = i40e_alloc_arq_bufs(hw); in i40e_init_arq()
460 ret_code = i40e_config_arq_regs(hw); in i40e_init_arq()
465 hw->aq.arq.count = hw->aq.num_arq_entries; in i40e_init_arq()
469 i40e_free_adminq_arq(hw); in i40e_init_arq()
481 static i40e_status i40e_shutdown_asq(struct i40e_hw *hw) in i40e_shutdown_asq() argument
485 mutex_lock(&hw->aq.asq_mutex); in i40e_shutdown_asq()
487 if (hw->aq.asq.count == 0) { in i40e_shutdown_asq()
493 wr32(hw, hw->aq.asq.head, 0); in i40e_shutdown_asq()
494 wr32(hw, hw->aq.asq.tail, 0); in i40e_shutdown_asq()
495 wr32(hw, hw->aq.asq.len, 0); in i40e_shutdown_asq()
496 wr32(hw, hw->aq.asq.bal, 0); in i40e_shutdown_asq()
497 wr32(hw, hw->aq.asq.bah, 0); in i40e_shutdown_asq()
499 hw->aq.asq.count = 0; /* to indicate uninitialized queue */ in i40e_shutdown_asq()
502 i40e_free_asq_bufs(hw); in i40e_shutdown_asq()
505 mutex_unlock(&hw->aq.asq_mutex); in i40e_shutdown_asq()
515 static i40e_status i40e_shutdown_arq(struct i40e_hw *hw) in i40e_shutdown_arq() argument
519 mutex_lock(&hw->aq.arq_mutex); in i40e_shutdown_arq()
521 if (hw->aq.arq.count == 0) { in i40e_shutdown_arq()
527 wr32(hw, hw->aq.arq.head, 0); in i40e_shutdown_arq()
528 wr32(hw, hw->aq.arq.tail, 0); in i40e_shutdown_arq()
529 wr32(hw, hw->aq.arq.len, 0); in i40e_shutdown_arq()
530 wr32(hw, hw->aq.arq.bal, 0); in i40e_shutdown_arq()
531 wr32(hw, hw->aq.arq.bah, 0); in i40e_shutdown_arq()
533 hw->aq.arq.count = 0; /* to indicate uninitialized queue */ in i40e_shutdown_arq()
536 i40e_free_arq_bufs(hw); in i40e_shutdown_arq()
539 mutex_unlock(&hw->aq.arq_mutex); in i40e_shutdown_arq()
554 i40e_status i40e_init_adminq(struct i40e_hw *hw) in i40e_init_adminq() argument
562 if ((hw->aq.num_arq_entries == 0) || in i40e_init_adminq()
563 (hw->aq.num_asq_entries == 0) || in i40e_init_adminq()
564 (hw->aq.arq_buf_size == 0) || in i40e_init_adminq()
565 (hw->aq.asq_buf_size == 0)) { in i40e_init_adminq()
571 i40e_adminq_init_regs(hw); in i40e_init_adminq()
574 hw->aq.asq_cmd_timeout = I40E_ASQ_CMD_TIMEOUT; in i40e_init_adminq()
577 ret_code = i40e_init_asq(hw); in i40e_init_adminq()
582 ret_code = i40e_init_arq(hw); in i40e_init_adminq()
591 ret_code = i40e_aq_get_firmware_version(hw, in i40e_init_adminq()
592 &hw->aq.fw_maj_ver, in i40e_init_adminq()
593 &hw->aq.fw_min_ver, in i40e_init_adminq()
594 &hw->aq.fw_build, in i40e_init_adminq()
595 &hw->aq.api_maj_ver, in i40e_init_adminq()
596 &hw->aq.api_min_ver, in i40e_init_adminq()
602 i40e_resume_aq(hw); in i40e_init_adminq()
608 i40e_read_nvm_word(hw, I40E_SR_NVM_DEV_STARTER_VERSION, in i40e_init_adminq()
609 &hw->nvm.version); in i40e_init_adminq()
610 i40e_read_nvm_word(hw, I40E_SR_NVM_EETRACK_LO, &eetrack_lo); in i40e_init_adminq()
611 i40e_read_nvm_word(hw, I40E_SR_NVM_EETRACK_HI, &eetrack_hi); in i40e_init_adminq()
612 hw->nvm.eetrack = (eetrack_hi << 16) | eetrack_lo; in i40e_init_adminq()
613 i40e_read_nvm_word(hw, I40E_SR_BOOT_CONFIG_PTR, &cfg_ptr); in i40e_init_adminq()
614 i40e_read_nvm_word(hw, (cfg_ptr + I40E_NVM_OEM_VER_OFF), in i40e_init_adminq()
616 i40e_read_nvm_word(hw, (cfg_ptr + (I40E_NVM_OEM_VER_OFF + 1)), in i40e_init_adminq()
618 hw->nvm.oem_ver = ((u32)oem_hi << 16) | oem_lo; in i40e_init_adminq()
620 if (hw->aq.api_maj_ver > I40E_FW_API_VERSION_MAJOR) { in i40e_init_adminq()
626 i40e_aq_release_resource(hw, I40E_NVM_RESOURCE_ID, 0, NULL); in i40e_init_adminq()
627 hw->aq.nvm_release_on_done = false; in i40e_init_adminq()
628 hw->nvmupd_state = I40E_NVMUPD_STATE_INIT; in i40e_init_adminq()
630 ret_code = i40e_aq_set_hmc_resource_profile(hw, in i40e_init_adminq()
640 i40e_shutdown_arq(hw); in i40e_init_adminq()
642 i40e_shutdown_asq(hw); in i40e_init_adminq()
653 i40e_status i40e_shutdown_adminq(struct i40e_hw *hw) in i40e_shutdown_adminq() argument
657 if (i40e_check_asq_alive(hw)) in i40e_shutdown_adminq()
658 i40e_aq_queue_shutdown(hw, true); in i40e_shutdown_adminq()
660 i40e_shutdown_asq(hw); in i40e_shutdown_adminq()
661 i40e_shutdown_arq(hw); in i40e_shutdown_adminq()
663 if (hw->nvm_buff.va) in i40e_shutdown_adminq()
664 i40e_free_virt_mem(hw, &hw->nvm_buff); in i40e_shutdown_adminq()
675 static u16 i40e_clean_asq(struct i40e_hw *hw) in i40e_clean_asq() argument
677 struct i40e_adminq_ring *asq = &(hw->aq.asq); in i40e_clean_asq()
685 while (rd32(hw, hw->aq.asq.head) != ntc) { in i40e_clean_asq()
686 i40e_debug(hw, I40E_DEBUG_AQ_MESSAGE, in i40e_clean_asq()
687 "ntc %d head %d.\n", ntc, rd32(hw, hw->aq.asq.head)); in i40e_clean_asq()
693 cb_func(hw, &desc_cb); in i40e_clean_asq()
716 static bool i40e_asq_done(struct i40e_hw *hw) in i40e_asq_done() argument
721 return rd32(hw, hw->aq.asq.head) == hw->aq.asq.next_to_use; in i40e_asq_done()
736 i40e_status i40e_asq_send_command(struct i40e_hw *hw, in i40e_asq_send_command() argument
750 mutex_lock(&hw->aq.asq_mutex); in i40e_asq_send_command()
752 if (hw->aq.asq.count == 0) { in i40e_asq_send_command()
753 i40e_debug(hw, I40E_DEBUG_AQ_MESSAGE, in i40e_asq_send_command()
759 hw->aq.asq_last_status = I40E_AQ_RC_OK; in i40e_asq_send_command()
761 val = rd32(hw, hw->aq.asq.head); in i40e_asq_send_command()
762 if (val >= hw->aq.num_asq_entries) { in i40e_asq_send_command()
763 i40e_debug(hw, I40E_DEBUG_AQ_MESSAGE, in i40e_asq_send_command()
769 details = I40E_ADMINQ_DETAILS(hw->aq.asq, hw->aq.asq.next_to_use); in i40e_asq_send_command()
791 if (buff_size > hw->aq.asq_buf_size) { in i40e_asq_send_command()
792 i40e_debug(hw, in i40e_asq_send_command()
801 i40e_debug(hw, in i40e_asq_send_command()
815 if (i40e_clean_asq(hw) == 0) { in i40e_asq_send_command()
816 i40e_debug(hw, in i40e_asq_send_command()
824 desc_on_ring = I40E_ADMINQ_DESC(hw->aq.asq, hw->aq.asq.next_to_use); in i40e_asq_send_command()
831 dma_buff = &(hw->aq.asq.r.asq_bi[hw->aq.asq.next_to_use]); in i40e_asq_send_command()
846 i40e_debug(hw, I40E_DEBUG_AQ_MESSAGE, "AQTX: desc and buffer:\n"); in i40e_asq_send_command()
847 i40e_debug_aq(hw, I40E_DEBUG_AQ_COMMAND, (void *)desc_on_ring, in i40e_asq_send_command()
849 (hw->aq.asq.next_to_use)++; in i40e_asq_send_command()
850 if (hw->aq.asq.next_to_use == hw->aq.asq.count) in i40e_asq_send_command()
851 hw->aq.asq.next_to_use = 0; in i40e_asq_send_command()
853 wr32(hw, hw->aq.asq.tail, hw->aq.asq.next_to_use); in i40e_asq_send_command()
865 if (i40e_asq_done(hw)) in i40e_asq_send_command()
869 } while (total_delay < hw->aq.asq_cmd_timeout); in i40e_asq_send_command()
873 if (i40e_asq_done(hw)) { in i40e_asq_send_command()
879 i40e_debug(hw, in i40e_asq_send_command()
892 hw->aq.asq_last_status = (enum i40e_admin_queue_err)retval; in i40e_asq_send_command()
895 i40e_debug(hw, I40E_DEBUG_AQ_MESSAGE, in i40e_asq_send_command()
897 i40e_debug_aq(hw, I40E_DEBUG_AQ_COMMAND, (void *)desc, buff, buff_size); in i40e_asq_send_command()
906 i40e_debug(hw, in i40e_asq_send_command()
913 mutex_unlock(&hw->aq.asq_mutex); in i40e_asq_send_command()
943 i40e_status i40e_clean_arq_element(struct i40e_hw *hw, in i40e_clean_arq_element() argument
948 u16 ntc = hw->aq.arq.next_to_clean; in i40e_clean_arq_element()
957 mutex_lock(&hw->aq.arq_mutex); in i40e_clean_arq_element()
959 if (hw->aq.arq.count == 0) { in i40e_clean_arq_element()
960 i40e_debug(hw, I40E_DEBUG_AQ_MESSAGE, in i40e_clean_arq_element()
967 ntu = (rd32(hw, hw->aq.arq.head) & I40E_PF_ARQH_ARQH_MASK); in i40e_clean_arq_element()
975 desc = I40E_ADMINQ_DESC(hw->aq.arq, ntc); in i40e_clean_arq_element()
981 hw->aq.arq_last_status = in i40e_clean_arq_element()
983 i40e_debug(hw, in i40e_clean_arq_element()
986 hw->aq.arq_last_status); in i40e_clean_arq_element()
993 memcpy(e->msg_buf, hw->aq.arq.r.arq_bi[desc_idx].va, in i40e_clean_arq_element()
996 i40e_debug(hw, I40E_DEBUG_AQ_MESSAGE, "AQRX: desc and buffer:\n"); in i40e_clean_arq_element()
997 i40e_debug_aq(hw, I40E_DEBUG_AQ_COMMAND, (void *)desc, e->msg_buf, in i40e_clean_arq_element()
998 hw->aq.arq_buf_size); in i40e_clean_arq_element()
1004 bi = &hw->aq.arq.r.arq_bi[ntc]; in i40e_clean_arq_element()
1008 if (hw->aq.arq_buf_size > I40E_AQ_LARGE_BUF) in i40e_clean_arq_element()
1015 wr32(hw, hw->aq.arq.tail, ntc); in i40e_clean_arq_element()
1018 if (ntc == hw->aq.num_arq_entries) in i40e_clean_arq_element()
1020 hw->aq.arq.next_to_clean = ntc; in i40e_clean_arq_element()
1021 hw->aq.arq.next_to_use = ntu; in i40e_clean_arq_element()
1026 *pending = (ntc > ntu ? hw->aq.arq.count : 0) + (ntu - ntc); in i40e_clean_arq_element()
1029 mutex_unlock(&hw->aq.arq_mutex); in i40e_clean_arq_element()
1032 if (hw->aq.nvm_release_on_done) { in i40e_clean_arq_element()
1033 i40e_release_nvm(hw); in i40e_clean_arq_element()
1034 hw->aq.nvm_release_on_done = false; in i40e_clean_arq_element()
1037 switch (hw->nvmupd_state) { in i40e_clean_arq_element()
1039 hw->nvmupd_state = I40E_NVMUPD_STATE_INIT; in i40e_clean_arq_element()
1043 hw->nvmupd_state = I40E_NVMUPD_STATE_WRITING; in i40e_clean_arq_element()
1054 static void i40e_resume_aq(struct i40e_hw *hw) in i40e_resume_aq() argument
1057 hw->aq.asq.next_to_use = 0; in i40e_resume_aq()
1058 hw->aq.asq.next_to_clean = 0; in i40e_resume_aq()
1060 i40e_config_asq_regs(hw); in i40e_resume_aq()
1062 hw->aq.arq.next_to_use = 0; in i40e_resume_aq()
1063 hw->aq.arq.next_to_clean = 0; in i40e_resume_aq()
1065 i40e_config_arq_regs(hw); in i40e_resume_aq()