Home
last modified time | relevance | path

Searched refs:oct (Results 1 – 23 of 23) sorted by relevance

/linux-4.4.14/drivers/net/ethernet/cavium/liquidio/
Dcn66xx_device.c43 int lio_cn6xxx_soft_reset(struct octeon_device *oct) in lio_cn6xxx_soft_reset() argument
45 octeon_write_csr64(oct, CN6XXX_WIN_WR_MASK_REG, 0xFF); in lio_cn6xxx_soft_reset()
47 dev_dbg(&oct->pci_dev->dev, "BIST enabled for soft reset\n"); in lio_cn6xxx_soft_reset()
49 lio_pci_writeq(oct, 1, CN6XXX_CIU_SOFT_BIST); in lio_cn6xxx_soft_reset()
50 octeon_write_csr64(oct, CN6XXX_SLI_SCRATCH1, 0x1234ULL); in lio_cn6xxx_soft_reset()
52 lio_pci_readq(oct, CN6XXX_CIU_SOFT_RST); in lio_cn6xxx_soft_reset()
53 lio_pci_writeq(oct, 1, CN6XXX_CIU_SOFT_RST); in lio_cn6xxx_soft_reset()
61 if (octeon_read_csr64(oct, CN6XXX_SLI_SCRATCH1) == 0x1234ULL) { in lio_cn6xxx_soft_reset()
62 dev_err(&oct->pci_dev->dev, "Soft reset failed\n"); in lio_cn6xxx_soft_reset()
66 dev_dbg(&oct->pci_dev->dev, "Reset completed\n"); in lio_cn6xxx_soft_reset()
[all …]
Dcn68xx_device.c45 static void lio_cn68xx_set_dpi_regs(struct octeon_device *oct) in lio_cn68xx_set_dpi_regs() argument
50 lio_pci_writeq(oct, CN6XXX_DPI_DMA_CTL_MASK, CN6XXX_DPI_DMA_CONTROL); in lio_cn68xx_set_dpi_regs()
51 dev_dbg(&oct->pci_dev->dev, "DPI_DMA_CONTROL: 0x%016llx\n", in lio_cn68xx_set_dpi_regs()
52 lio_pci_readq(oct, CN6XXX_DPI_DMA_CONTROL)); in lio_cn68xx_set_dpi_regs()
59 lio_pci_writeq(oct, 0, CN6XXX_DPI_DMA_ENG_ENB(i)); in lio_cn68xx_set_dpi_regs()
60 lio_pci_writeq(oct, fifo_sizes[i], CN6XXX_DPI_DMA_ENG_BUF(i)); in lio_cn68xx_set_dpi_regs()
61 dev_dbg(&oct->pci_dev->dev, "DPI_ENG_BUF%d: 0x%016llx\n", i, in lio_cn68xx_set_dpi_regs()
62 lio_pci_readq(oct, CN6XXX_DPI_DMA_ENG_BUF(i))); in lio_cn68xx_set_dpi_regs()
69 lio_pci_writeq(oct, 1, CN6XXX_DPI_CTL); in lio_cn68xx_set_dpi_regs()
70 dev_dbg(&oct->pci_dev->dev, "DPI_CTL: 0x%016llx\n", in lio_cn68xx_set_dpi_regs()
[all …]
Docteon_device.c483 static void *__retrieve_octeon_config_info(struct octeon_device *oct, in __retrieve_octeon_config_info() argument
486 u32 oct_id = oct->octeon_id; in __retrieve_octeon_config_info()
491 if (oct->chip_id == OCTEON_CN66XX) { in __retrieve_octeon_config_info()
493 } else if ((oct->chip_id == OCTEON_CN68XX) && in __retrieve_octeon_config_info()
496 } else if ((oct->chip_id == OCTEON_CN68XX) && in __retrieve_octeon_config_info()
507 static int __verify_octeon_config_info(struct octeon_device *oct, void *conf) in __verify_octeon_config_info() argument
509 switch (oct->chip_id) { in __verify_octeon_config_info()
512 return lio_validate_cn6xxx_config_info(oct, conf); in __verify_octeon_config_info()
521 void *oct_get_config_info(struct octeon_device *oct, u16 card_type) in oct_get_config_info() argument
525 conf = __retrieve_octeon_config_info(oct, card_type); in oct_get_config_info()
[all …]
Docteon_console.c51 static u64 cvmx_bootmem_phy_named_block_find(struct octeon_device *oct,
168 #define CVMX_BOOTMEM_DESC_GET_FIELD(oct, field) \ argument
169 __cvmx_bootmem_desc_get(oct, oct->bootmem_desc_addr, \
187 #define CVMX_BOOTMEM_NAMED_GET_FIELD(oct, addr, field) \ argument
188 __cvmx_bootmem_desc_get(oct, addr, \
205 static inline u64 __cvmx_bootmem_desc_get(struct octeon_device *oct, in __cvmx_bootmem_desc_get() argument
213 return octeon_read_device_mem32(oct, base); in __cvmx_bootmem_desc_get()
215 return octeon_read_device_mem64(oct, base); in __cvmx_bootmem_desc_get()
231 static void CVMX_BOOTMEM_NAMED_GET_NAME(struct octeon_device *oct, in CVMX_BOOTMEM_NAMED_GET_NAME() argument
237 octeon_pci_read_core_mem(oct, addr, str, len); in CVMX_BOOTMEM_NAMED_GET_NAME()
[all …]
Drequest_manager.c54 static void __check_db_timeout(struct octeon_device *oct, unsigned long iq_no);
58 static inline int IQ_INSTR_MODE_64B(struct octeon_device *oct, int iq_no) in IQ_INSTR_MODE_64B() argument
61 (struct octeon_instr_queue *)oct->instr_queue[iq_no]; in IQ_INSTR_MODE_64B()
65 #define IQ_INSTR_MODE_32B(oct, iq_no) (!IQ_INSTR_MODE_64B(oct, iq_no)) argument
71 int octeon_init_instr_queue(struct octeon_device *oct, in octeon_init_instr_queue() argument
79 if (OCTEON_CN6XXX(oct)) in octeon_init_instr_queue()
80 conf = &(CFG_GET_IQ_CFG(CHIP_FIELD(oct, cn6xxx, conf))); in octeon_init_instr_queue()
83 dev_err(&oct->pci_dev->dev, "Unsupported Chip %x\n", in octeon_init_instr_queue()
84 oct->chip_id); in octeon_init_instr_queue()
89 dev_err(&oct->pci_dev->dev, in octeon_init_instr_queue()
[all …]
Docteon_mem_ops.c48 octeon_toggle_bar1_swapmode(struct octeon_device *oct __attribute__((unused)), in octeon_toggle_bar1_swapmode() argument
54 mask = oct->fn_list.bar1_idx_read(oct, idx); in octeon_toggle_bar1_swapmode()
56 oct->fn_list.bar1_idx_write(oct, idx, mask); in octeon_toggle_bar1_swapmode()
61 octeon_pci_fastwrite(struct octeon_device *oct, u8 __iomem *mapped_addr, in octeon_pci_fastwrite() argument
69 octeon_toggle_bar1_swapmode(oct, MEMOPS_IDX); in octeon_pci_fastwrite()
78 octeon_toggle_bar1_swapmode(oct, MEMOPS_IDX); in octeon_pci_fastwrite()
85 octeon_pci_fastread(struct octeon_device *oct, u8 __iomem *mapped_addr, in octeon_pci_fastread() argument
93 octeon_toggle_bar1_swapmode(oct, MEMOPS_IDX); in octeon_pci_fastread()
102 octeon_toggle_bar1_swapmode(oct, MEMOPS_IDX); in octeon_pci_fastread()
111 __octeon_pci_rw_core_mem(struct octeon_device *oct, u64 addr, in __octeon_pci_rw_core_mem() argument
[all …]
Dcn66xx_device.h73 int lio_cn6xxx_soft_reset(struct octeon_device *oct);
74 void lio_cn6xxx_enable_error_reporting(struct octeon_device *oct);
75 void lio_cn6xxx_setup_pcie_mps(struct octeon_device *oct,
77 void lio_cn6xxx_setup_pcie_mrrs(struct octeon_device *oct,
79 void lio_cn6xxx_setup_global_input_regs(struct octeon_device *oct);
80 void lio_cn6xxx_setup_global_output_regs(struct octeon_device *oct);
81 void lio_cn6xxx_setup_iq_regs(struct octeon_device *oct, u32 iq_no);
82 void lio_cn6xxx_setup_oq_regs(struct octeon_device *oct, u32 oq_no);
83 void lio_cn6xxx_enable_io_queues(struct octeon_device *oct);
84 void lio_cn6xxx_disable_io_queues(struct octeon_device *oct);
[all …]
Docteon_droq.c107 u32 octeon_droq_check_hw_for_pkts(struct octeon_device *oct, in octeon_droq_check_hw_for_pkts() argument
150 octeon_droq_destroy_ring_buffers(struct octeon_device *oct, in octeon_droq_destroy_ring_buffers() argument
158 lio_unmap_ring_info(oct->pci_dev, in octeon_droq_destroy_ring_buffers()
162 lio_unmap_ring(oct->pci_dev, in octeon_droq_destroy_ring_buffers()
176 octeon_droq_setup_ring_buffers(struct octeon_device *oct, in octeon_droq_setup_ring_buffers() argument
184 buf = recv_buffer_alloc(oct, droq->q_no, droq->buffer_size); in octeon_droq_setup_ring_buffers()
187 dev_err(&oct->pci_dev->dev, "%s buffer alloc failed\n", in octeon_droq_setup_ring_buffers()
200 lio_map_ring(oct->pci_dev, in octeon_droq_setup_ring_buffers()
212 int octeon_delete_droq(struct octeon_device *oct, u32 q_no) in octeon_delete_droq() argument
214 struct octeon_droq *droq = oct->droq[q_no]; in octeon_delete_droq()
[all …]
Dlio_main.c218 struct octeon_device *oct = (struct octeon_device *)pdev; in octeon_droq_bh() local
220 (struct octeon_device_priv *)oct->priv; in octeon_droq_bh()
224 if (!(oct->io_qmask.oq & (1UL << q_no))) in octeon_droq_bh()
226 reschedule |= octeon_droq_process_packets(oct, oct->droq[q_no], in octeon_droq_bh()
234 static int lio_wait_for_oq_pkts(struct octeon_device *oct) in lio_wait_for_oq_pkts() argument
237 (struct octeon_device_priv *)oct->priv; in lio_wait_for_oq_pkts()
245 if (!(oct->io_qmask.oq & (1UL << i))) in lio_wait_for_oq_pkts()
247 pkt_cnt += octeon_droq_check_hw_for_pkts(oct, in lio_wait_for_oq_pkts()
248 oct->droq[i]); in lio_wait_for_oq_pkts()
336 static void force_io_queues_off(struct octeon_device *oct) in force_io_queues_off() argument
[all …]
Docteon_main.h71 static inline void octeon_unmap_pci_barx(struct octeon_device *oct, int baridx) in octeon_unmap_pci_barx() argument
73 dev_dbg(&oct->pci_dev->dev, "Freeing PCI mapped regions for Bar%d\n", in octeon_unmap_pci_barx()
76 if (oct->mmio[baridx].done) in octeon_unmap_pci_barx()
77 iounmap(oct->mmio[baridx].hw_addr); in octeon_unmap_pci_barx()
79 if (oct->mmio[baridx].start) in octeon_unmap_pci_barx()
80 pci_release_region(oct->pci_dev, baridx * 2); in octeon_unmap_pci_barx()
89 static inline int octeon_map_pci_barx(struct octeon_device *oct, in octeon_map_pci_barx() argument
94 if (pci_request_region(oct->pci_dev, baridx * 2, DRV_NAME)) { in octeon_map_pci_barx()
95 dev_err(&oct->pci_dev->dev, "pci_request_region failed for bar %d\n", in octeon_map_pci_barx()
100 oct->mmio[baridx].start = pci_resource_start(oct->pci_dev, baridx * 2); in octeon_map_pci_barx()
[all …]
Docteon_device.h393 #define OCTEON_CN6XXX(oct) ((oct->chip_id == OCTEON_CN66XX) || \ argument
394 (oct->chip_id == OCTEON_CN68XX))
395 #define CHIP_FIELD(oct, TYPE, field) \ argument
396 (((struct octeon_ ## TYPE *)(oct->chip))->field)
470 static inline u16 OCTEON_MAJOR_REV(struct octeon_device *oct) in OCTEON_MAJOR_REV() argument
472 u16 rev = (oct->rev_id & 0xC) >> 2; in OCTEON_MAJOR_REV()
477 static inline u16 OCTEON_MINOR_REV(struct octeon_device *oct) in OCTEON_MINOR_REV() argument
479 return oct->rev_id & 0x3; in OCTEON_MINOR_REV()
492 u64 lio_pci_readq(struct octeon_device *oct, u64 addr);
504 void lio_pci_writeq(struct octeon_device *oct, u64 val, u64 addr);
[all …]
Dresponse_manager.c47 int octeon_setup_response_list(struct octeon_device *oct) in octeon_setup_response_list() argument
53 INIT_LIST_HEAD(&oct->response_list[i].head); in octeon_setup_response_list()
54 spin_lock_init(&oct->response_list[i].lock); in octeon_setup_response_list()
55 atomic_set(&oct->response_list[i].pending_req_count, 0); in octeon_setup_response_list()
58 oct->dma_comp_wq.wq = create_workqueue("dma-comp"); in octeon_setup_response_list()
59 if (!oct->dma_comp_wq.wq) { in octeon_setup_response_list()
60 dev_err(&oct->pci_dev->dev, "failed to create wq thread\n"); in octeon_setup_response_list()
64 cwq = &oct->dma_comp_wq; in octeon_setup_response_list()
66 cwq->wk.ctxptr = oct; in octeon_setup_response_list()
72 void octeon_delete_response_list(struct octeon_device *oct) in octeon_delete_response_list() argument
[all …]
Docteon_nic.c46 octeon_alloc_soft_command_resp(struct octeon_device *oct, in octeon_alloc_soft_command_resp() argument
56 octeon_alloc_soft_command(oct, 0, rdatasize, 0); in octeon_alloc_soft_command_resp()
75 rdp->pcie_port = oct->pcie_port; in octeon_alloc_soft_command_resp()
86 int octnet_send_nic_data_pkt(struct octeon_device *oct, in octnet_send_nic_data_pkt() argument
94 return octeon_send_command(oct, ndata->q_no, ring_doorbell, &ndata->cmd, in octnet_send_nic_data_pkt()
99 static void octnet_link_ctrl_callback(struct octeon_device *oct, in octnet_link_ctrl_callback() argument
117 octeon_free_soft_command(oct, sc); in octnet_link_ctrl_callback()
121 *octnic_alloc_ctrl_pkt_sc(struct octeon_device *oct, in octnic_alloc_ctrl_pkt_sc() argument
136 octeon_alloc_soft_command(oct, datasize, rdatasize, in octnic_alloc_ctrl_pkt_sc()
155 octeon_prepare_soft_command(oct, sc, OPCODE_NIC, OPCODE_NIC_CMD, in octnic_alloc_ctrl_pkt_sc()
[all …]
Dlio_ethtool.c110 struct octeon_device *oct = lio->oct_dev; in lio_get_settings() local
127 dev_err(&oct->pci_dev->dev, "Unknown link interface reported\n"); in lio_get_settings()
145 struct octeon_device *oct; in lio_get_drvinfo() local
148 oct = lio->oct_dev; in lio_get_drvinfo()
153 strncpy(drvinfo->fw_version, oct->fw_info.liquidio_firmware_version, in lio_get_drvinfo()
155 strncpy(drvinfo->bus_info, pci_name(oct->pci_dev), 32); in lio_get_drvinfo()
163 struct octeon_device *oct = lio->oct_dev; in lio_ethtool_get_channels() local
166 if (OCTEON_CN6XXX(oct)) { in lio_ethtool_get_channels()
167 struct octeon_config *conf6x = CHIP_FIELD(oct, cn6xxx, conf); in lio_ethtool_get_channels()
223 struct octeon_device *oct = lio->oct_dev; in octnet_gpio_access() local
[all …]
Docteon_iq.h259 int octeon_setup_sc_buffer_pool(struct octeon_device *oct);
260 int octeon_free_sc_buffer_pool(struct octeon_device *oct);
262 octeon_alloc_soft_command(struct octeon_device *oct,
265 void octeon_free_soft_command(struct octeon_device *oct,
293 int lio_wait_for_instr_fetch(struct octeon_device *oct);
296 octeon_register_reqtype_free_fn(struct octeon_device *oct, int reqtype,
300 lio_process_iq_request_list(struct octeon_device *oct,
303 int octeon_send_command(struct octeon_device *oct, u32 iq_no,
307 void octeon_prepare_soft_command(struct octeon_device *oct,
313 int octeon_send_soft_command(struct octeon_device *oct,
[all …]
Docteon_network.h135 *recv_buffer_alloc(struct octeon_device *oct __attribute__((unused)), in recv_buffer_alloc() argument
157 #define lio_dma_alloc(oct, size, dma_addr) \ argument
158 dma_alloc_coherent(&oct->pci_dev->dev, size, dma_addr, GFP_KERNEL)
159 #define lio_dma_free(oct, size, virt_addr, dma_addr) \ argument
160 dma_free_coherent(&oct->pci_dev->dev, size, virt_addr, dma_addr)
168 struct octeon_device *oct = droq->oct_dev; in lio_map_ring_info() local
170 dma_addr = dma_map_single(&oct->pci_dev->dev, &droq->info_list[i], in lio_map_ring_info()
173 BUG_ON(dma_mapping_error(&oct->pci_dev->dev, dma_addr)); in lio_map_ring_info()
208 static inline void *octeon_fast_packet_alloc(struct octeon_device *oct, in octeon_fast_packet_alloc() argument
212 return recv_buffer_alloc(oct, q_no, size); in octeon_fast_packet_alloc()
Docteon_mem_ops.h39 u64 octeon_read_device_mem64(struct octeon_device *oct, u64 core_addr);
47 u32 octeon_read_device_mem32(struct octeon_device *oct, u64 core_addr);
55 octeon_write_device_mem32(struct octeon_device *oct,
62 octeon_pci_read_core_mem(struct octeon_device *oct,
70 octeon_pci_write_core_mem(struct octeon_device *oct,
Docteon_droq.h366 octeon_register_droq_ops(struct octeon_device *oct,
378 int octeon_unregister_droq_ops(struct octeon_device *oct, u32 q_no);
392 int octeon_register_dispatch_fn(struct octeon_device *oct,
407 int octeon_unregister_dispatch_fn(struct octeon_device *oct,
413 u32 octeon_droq_check_hw_for_pkts(struct octeon_device *oct,
416 int octeon_create_droq(struct octeon_device *oct, u32 q_no,
419 int octeon_droq_process_packets(struct octeon_device *oct,
423 int octeon_process_droq_poll_cmd(struct octeon_device *oct, u32 q_no,
Docteon_nic.h120 static inline int octnet_iq_is_full(struct octeon_device *oct, u32 q_no) in octnet_iq_is_full() argument
122 return ((u32)atomic_read(&oct->instr_queue[q_no]->instr_pending) in octnet_iq_is_full()
123 >= (oct->instr_queue[q_no]->max_count - 2)); in octnet_iq_is_full()
200 octeon_alloc_soft_command_resp(struct octeon_device *oct,
211 int octnet_send_nic_data_pkt(struct octeon_device *oct,
223 octnet_send_nic_ctrl_pkt(struct octeon_device *oct,
Dcn68xx_device.h30 int lio_setup_cn68xx_octeon_device(struct octeon_device *oct);
31 int lio_is_210nv(struct octeon_device *oct);
/linux-4.4.14/arch/mips/mm/
DMakefile25 obj-$(CONFIG_CPU_CAVIUM_OCTEON) += c-octeon.o cex-oct.o tlb-r4k.o
/linux-4.4.14/Documentation/input/
Dwalkera0701.txt78 24 bin+oct values + 1 bin value = 24*4+1 bits = 97 bits
/linux-4.4.14/arch/x86/crypto/sha-mb/
Dsha1_x8_avx2.S58 ## code to compute oct SHA1 using SSE-256