cell_size          62 arch/arm/boot/compressed/atags_to_fdt.c 	uint32_t cell_size = 1;
cell_size          66 arch/arm/boot/compressed/atags_to_fdt.c 		cell_size = fdt32_to_cpu(*size_len);
cell_size          67 arch/arm/boot/compressed/atags_to_fdt.c 	return cell_size;
cell_size          32 drivers/net/ethernet/huawei/hinic/hinic_hw_api_cmd.c #define API_CMD_CELL_SIZE(cell_size)            \
cell_size          33 drivers/net/ethernet/huawei/hinic/hinic_hw_api_cmd.c 		(((cell_size) >= API_CMD_CELL_SIZE_MIN) ? \
cell_size          34 drivers/net/ethernet/huawei/hinic/hinic_hw_api_cmd.c 		 (1 << (fls(cell_size - 1))) : API_CMD_CELL_SIZE_MIN)
cell_size         450 drivers/net/ethernet/huawei/hinic/hinic_hw_api_cmd.c 	u16 cell_size;
cell_size         455 drivers/net/ethernet/huawei/hinic/hinic_hw_api_cmd.c 	cell_size = API_CMD_CELL_SIZE_VAL(chain->cell_size);
cell_size         467 drivers/net/ethernet/huawei/hinic/hinic_hw_api_cmd.c 		HINIC_API_CMD_CHAIN_CTRL_SET(cell_size, CELL_SIZE);
cell_size         657 drivers/net/ethernet/huawei/hinic/hinic_hw_api_cmd.c 	node = dma_alloc_coherent(&pdev->dev, chain->cell_size, &node_paddr,
cell_size         697 drivers/net/ethernet/huawei/hinic/hinic_hw_api_cmd.c 	dma_free_coherent(&pdev->dev, chain->cell_size, node, node_paddr);
cell_size         720 drivers/net/ethernet/huawei/hinic/hinic_hw_api_cmd.c 	node_size = chain->cell_size;
cell_size         803 drivers/net/ethernet/huawei/hinic/hinic_hw_api_cmd.c 	chain->cell_size = attr->cell_size;
cell_size         922 drivers/net/ethernet/huawei/hinic/hinic_hw_api_cmd.c 	attr.cell_size  = API_CMD_CELL_SIZE(hw_cell_sz);
cell_size         126 drivers/net/ethernet/huawei/hinic/hinic_hw_api_cmd.h 	u16                             cell_size;
cell_size         173 drivers/net/ethernet/huawei/hinic/hinic_hw_api_cmd.h 	u16                             cell_size;
cell_size         102 drivers/net/ethernet/mellanox/mlxsw/spectrum_buffers.c 	u32 cell_size;
cell_size         125 drivers/net/ethernet/mellanox/mlxsw/spectrum_buffers.c 	return mlxsw_sp->sb->cell_size * cells;
cell_size         130 drivers/net/ethernet/mellanox/mlxsw/spectrum_buffers.c 	return DIV_ROUND_UP(bytes, mlxsw_sp->sb->cell_size);
cell_size         916 drivers/net/ethernet/mellanox/mlxsw/spectrum_buffers.c 	mlxsw_sp->sb->cell_size = MLXSW_CORE_RES_GET(mlxsw_sp->core, CELL_SIZE);
cell_size         923 drivers/net/ethernet/mellanox/mlxsw/spectrum_buffers.c 						mlxsw_sp->sb->cell_size;
cell_size         999 drivers/net/ethernet/mellanox/mlxsw/spectrum_buffers.c 	pool_info->cell_size = mlxsw_sp->sb->cell_size;
cell_size          51 drivers/net/ethernet/netronome/nfp/nfp_shared_buf.c 	pool_info->cell_size = unit_size;
cell_size        1221 drivers/net/ethernet/ni/nixge.c 	size_t cell_size;
cell_size        1228 drivers/net/ethernet/ni/nixge.c 	mac = nvmem_cell_read(cell, &cell_size);
cell_size          52 drivers/of/fdt.c 	int cell_size;
cell_size          68 drivers/of/fdt.c 	cell_size = sizeof(uint32_t)*(nr_address_cells + nr_size_cells);
cell_size          73 drivers/of/fdt.c 		if (len > limit*cell_size) {
cell_size          74 drivers/of/fdt.c 			len = limit*cell_size;
cell_size          96 include/net/devlink.h 	u32 cell_size;
cell_size         619 include/uapi/linux/pkt_sched.h 	__u32	cell_size;
cell_size         962 net/core/devlink.c 			pool_info.cell_size))
cell_size          98 net/sched/sch_netem.c 	u32 cell_size;
cell_size         349 net/sched/sch_netem.c 	if (q->cell_size) {
cell_size         352 net/sched/sch_netem.c 		if (len > cells * q->cell_size)	/* extra cell needed for remainder */
cell_size         354 net/sched/sch_netem.c 		len = cells * (q->cell_size + q->cell_overhead);
cell_size         855 net/sched/sch_netem.c 	q->cell_size = r->cell_size;
cell_size         857 net/sched/sch_netem.c 	if (q->cell_size)
cell_size         858 net/sched/sch_netem.c 		q->cell_size_reciprocal = reciprocal_value(q->cell_size);
cell_size        1184 net/sched/sch_netem.c 	rate.cell_size = q->cell_size;
cell_size         618 tools/include/uapi/linux/pkt_sched.h 	__u32	cell_size;