seg_count          72 drivers/char/agp/compat_ioctl.c 	if ((unsigned) ureserve.seg_count >= ~0U/sizeof(struct agp_segment32))
seg_count          76 drivers/char/agp/compat_ioctl.c 	kreserve.seg_count = ureserve.seg_count;
seg_count          80 drivers/char/agp/compat_ioctl.c 	if (kreserve.seg_count == 0) {
seg_count          98 drivers/char/agp/compat_ioctl.c 		if (ureserve.seg_count >= 16384)
seg_count         101 drivers/char/agp/compat_ioctl.c 		usegment = kmalloc_array(ureserve.seg_count,
seg_count         107 drivers/char/agp/compat_ioctl.c 		ksegment = kmalloc_array(kreserve.seg_count,
seg_count         116 drivers/char/agp/compat_ioctl.c 				   sizeof(*usegment) * ureserve.seg_count)) {
seg_count         122 drivers/char/agp/compat_ioctl.c 		for (seg = 0; seg < ureserve.seg_count; seg++) {
seg_count          66 drivers/char/agp/compat_ioctl.h 	compat_size_t seg_count;	/* number of segments           */
seg_count         170 drivers/char/agp/frontend.c 	seg = kzalloc((sizeof(struct agp_segment_priv) * region->seg_count), GFP_KERNEL);
seg_count         178 drivers/char/agp/frontend.c 	for (i = 0; i < region->seg_count; i++) {
seg_count         192 drivers/char/agp/frontend.c 	agp_add_seg_to_client(client, ret_seg, region->seg_count);
seg_count         809 drivers/char/agp/frontend.c 	if ((unsigned) reserve.seg_count >= ~0U/sizeof(struct agp_segment))
seg_count         814 drivers/char/agp/frontend.c 	if (reserve.seg_count == 0) {
seg_count         830 drivers/char/agp/frontend.c 		if (reserve.seg_count >= 16384)
seg_count         833 drivers/char/agp/frontend.c 		segment = kmalloc((sizeof(struct agp_segment) * reserve.seg_count),
seg_count         840 drivers/char/agp/frontend.c 				   sizeof(struct agp_segment) * reserve.seg_count)) {
seg_count         677 drivers/gpu/drm/drm_bufs.c 	if (entry->seg_count) {
seg_count         678 drivers/gpu/drm/drm_bufs.c 		for (i = 0; i < entry->seg_count; i++) {
seg_count         685 drivers/gpu/drm/drm_bufs.c 		entry->seg_count = 0;
seg_count         854 drivers/gpu/drm/drm_bufs.c 	dma->seg_count += entry->seg_count;
seg_count         985 drivers/gpu/drm/drm_bufs.c 			entry->seg_count = count;
seg_count         992 drivers/gpu/drm/drm_bufs.c 		entry->seglist[entry->seg_count++] = dmah;
seg_count        1022 drivers/gpu/drm/drm_bufs.c 				entry->seg_count = count;
seg_count        1062 drivers/gpu/drm/drm_bufs.c 	dma->seg_count += entry->seg_count;
seg_count        1063 drivers/gpu/drm/drm_bufs.c 	dma->page_count += entry->seg_count << page_order;
seg_count        1064 drivers/gpu/drm/drm_bufs.c 	dma->byte_count += PAGE_SIZE * (entry->seg_count << page_order);
seg_count        1217 drivers/gpu/drm/drm_bufs.c 	dma->seg_count += entry->seg_count;
seg_count          95 drivers/gpu/drm/drm_dma.c 		if (dma->bufs[i].seg_count) {
seg_count         100 drivers/gpu/drm/drm_dma.c 				  dma->bufs[i].seg_count);
seg_count         101 drivers/gpu/drm/drm_dma.c 			for (j = 0; j < dma->bufs[i].seg_count; j++) {
seg_count        1024 drivers/infiniband/core/mad.c 		seg->num = ++send_buf->seg_count;
seg_count        1186 drivers/infiniband/core/mad.c 	if (mad_send_wr->send_buf.seg_count)
seg_count         588 drivers/infiniband/core/mad_rmpp.c 		paylen = (mad_send_wr->send_buf.seg_count *
seg_count         593 drivers/infiniband/core/mad_rmpp.c 	if (mad_send_wr->seg_num == mad_send_wr->send_buf.seg_count) {
seg_count         619 drivers/infiniband/core/mad_rmpp.c 	if ((mad_send_wr->last_ack == mad_send_wr->send_buf.seg_count) ||
seg_count         688 drivers/infiniband/core/mad_rmpp.c 	if ((mad_send_wr->last_ack == mad_send_wr->send_buf.seg_count) &&
seg_count         695 drivers/infiniband/core/mad_rmpp.c 	if ((mad_send_wr->last_ack == mad_send_wr->send_buf.seg_count) ||
seg_count         699 drivers/infiniband/core/mad_rmpp.c 	if (seg_num > mad_send_wr->send_buf.seg_count ||
seg_count         715 drivers/infiniband/core/mad_rmpp.c 	if (mad_send_wr->last_ack == mad_send_wr->send_buf.seg_count) {
seg_count         737 drivers/infiniband/core/mad_rmpp.c 		   mad_send_wr->seg_num < mad_send_wr->send_buf.seg_count) {
seg_count         930 drivers/infiniband/core/mad_rmpp.c 	if (mad_send_wr->last_ack == mad_send_wr->send_buf.seg_count) {
seg_count         937 drivers/infiniband/core/mad_rmpp.c 	    mad_send_wr->seg_num == mad_send_wr->send_buf.seg_count)
seg_count         958 drivers/infiniband/core/mad_rmpp.c 	if (mad_send_wr->last_ack == mad_send_wr->send_buf.seg_count)
seg_count         169 drivers/memstick/core/mspro_block.c 	unsigned int          seg_count;
seg_count         612 drivers/memstick/core/mspro_block.c 			if (msb->current_seg == msb->seg_count) {
seg_count         702 drivers/memstick/core/mspro_block.c 		msb->seg_count = blk_rq_map_sg(msb->block_req->q,
seg_count         706 drivers/memstick/core/mspro_block.c 		if (!msb->seg_count) {
seg_count         988 drivers/memstick/core/mspro_block.c 	msb->seg_count = 1;
seg_count        1089 drivers/memstick/core/mspro_block.c 		msb->seg_count = 1;
seg_count         276 drivers/net/ethernet/intel/ice/ice_flex_pipe.c 	for (i = 0; i < le32_to_cpu(pkg_hdr->seg_count); i++) {
seg_count         540 drivers/net/ethernet/intel/ice/ice_flex_pipe.c 	u32 seg_count;
seg_count         553 drivers/net/ethernet/intel/ice/ice_flex_pipe.c 	seg_count = le32_to_cpu(pkg->seg_count);
seg_count         554 drivers/net/ethernet/intel/ice/ice_flex_pipe.c 	if (seg_count < 1)
seg_count         558 drivers/net/ethernet/intel/ice/ice_flex_pipe.c 	if (len < sizeof(*pkg) + ((seg_count - 1) * sizeof(pkg->seg_offset)))
seg_count         562 drivers/net/ethernet/intel/ice/ice_flex_pipe.c 	for (i = 0; i < seg_count; i++) {
seg_count          21 drivers/net/ethernet/intel/ice/ice_flex_type.h 	__le32 seg_count;
seg_count        1939 drivers/net/ethernet/qlogic/qla3xxx.c 	if (tx_cb->seg_count == 0) {
seg_count        1951 drivers/net/ethernet/qlogic/qla3xxx.c 	tx_cb->seg_count--;
seg_count        1952 drivers/net/ethernet/qlogic/qla3xxx.c 	if (tx_cb->seg_count) {
seg_count        1953 drivers/net/ethernet/qlogic/qla3xxx.c 		for (i = 1; i < tx_cb->seg_count; i++) {
seg_count        2319 drivers/net/ethernet/qlogic/qla3xxx.c 	seg_cnt = tx_cb->seg_count;
seg_count        2475 drivers/net/ethernet/qlogic/qla3xxx.c 	tx_cb->seg_count = ql_get_seg_count(qdev,
seg_count        2477 drivers/net/ethernet/qlogic/qla3xxx.c 	if (tx_cb->seg_count == -1) {
seg_count        3650 drivers/net/ethernet/qlogic/qla3xxx.c 				for (j = 1; j < tx_cb->seg_count; j++) {
seg_count        1039 drivers/net/ethernet/qlogic/qla3xxx.h 	int seg_count;
seg_count          93 include/drm/drm_legacy.h 	int seg_count;
seg_count         109 include/drm/drm_legacy.h 	int seg_count;
seg_count          67 include/linux/agpgart.h 	size_t seg_count;	/* number of segments           */
seg_count         497 include/rdma/ib_mad.h 	int			seg_count;
seg_count          88 include/uapi/linux/agpgart.h 	__kernel_size_t seg_count;	/* number of segments   */