Lines Matching refs:count

200 		fibctx->count = 0;  in open_getadapter_fib()
299 fibctx->count--; in next_getadapter_fib()
356 fibctx->count--; in aac_close_fib_context()
508 if(copy_from_user(&fibsize, &user_srb->count,sizeof(u32))){ in aac_send_raw_srb()
560 if (user_srbcmd->sg.count > ARRAY_SIZE(sg_list)) { in aac_send_raw_srb()
562 le32_to_cpu(srbcmd->sg.count))); in aac_send_raw_srb()
567 ((user_srbcmd->sg.count & 0xff) * sizeof(struct sgentry)); in aac_send_raw_srb()
568 actual_fibsize64 = actual_fibsize + (user_srbcmd->sg.count & 0xff) * in aac_send_raw_srb()
576 actual_fibsize, actual_fibsize64, user_srbcmd->sg.count, in aac_send_raw_srb()
582 if ((data_dir == DMA_NONE) && user_srbcmd->sg.count) { in aac_send_raw_srb()
597 for (i = 0; i < upsg->count; i++) { in aac_send_raw_srb()
600 if (upsg->sg[i].count > in aac_send_raw_srb()
609 p = kmalloc(upsg->sg[i].count,GFP_KERNEL|__GFP_DMA); in aac_send_raw_srb()
612 upsg->sg[i].count,i,upsg->count)); in aac_send_raw_srb()
623 if(copy_from_user(p,sg_user[i],upsg->sg[i].count)){ in aac_send_raw_srb()
629 addr = pci_map_single(dev->pdev, p, upsg->sg[i].count, data_dir); in aac_send_raw_srb()
633 byte_count += upsg->sg[i].count; in aac_send_raw_srb()
634 psg->sg[i].count = cpu_to_le32(upsg->sg[i].count); in aac_send_raw_srb()
649 for (i = 0; i < usg->count; i++) { in aac_send_raw_srb()
652 if (usg->sg[i].count > in aac_send_raw_srb()
662 p = kmalloc(usg->sg[i].count,GFP_KERNEL|__GFP_DMA); in aac_send_raw_srb()
665 usg->sg[i].count,i,usg->count)); in aac_send_raw_srb()
675 if(copy_from_user(p,sg_user[i],upsg->sg[i].count)){ in aac_send_raw_srb()
682 addr = pci_map_single(dev->pdev, p, usg->sg[i].count, data_dir); in aac_send_raw_srb()
686 byte_count += usg->sg[i].count; in aac_send_raw_srb()
687 psg->sg[i].count = cpu_to_le32(usg->sg[i].count); in aac_send_raw_srb()
691 srbcmd->count = cpu_to_le32(byte_count); in aac_send_raw_srb()
692 if (user_srbcmd->sg.count) in aac_send_raw_srb()
693 psg->count = cpu_to_le32(sg_indx+1); in aac_send_raw_srb()
695 psg->count = 0; in aac_send_raw_srb()
703 for (i = 0; i < upsg->count; i++) { in aac_send_raw_srb()
706 if (usg->sg[i].count > in aac_send_raw_srb()
715 p = kmalloc(usg->sg[i].count,GFP_KERNEL|__GFP_DMA); in aac_send_raw_srb()
718 usg->sg[i].count,i,usg->count)); in aac_send_raw_srb()
729 if(copy_from_user(p,sg_user[i],usg->sg[i].count)){ in aac_send_raw_srb()
735 addr = pci_map_single(dev->pdev, p, usg->sg[i].count, data_dir); in aac_send_raw_srb()
738 byte_count += usg->sg[i].count; in aac_send_raw_srb()
739 psg->sg[i].count = cpu_to_le32(usg->sg[i].count); in aac_send_raw_srb()
742 for (i = 0; i < upsg->count; i++) { in aac_send_raw_srb()
745 if (upsg->sg[i].count > in aac_send_raw_srb()
753 p = kmalloc(upsg->sg[i].count, GFP_KERNEL); in aac_send_raw_srb()
756 upsg->sg[i].count, i, upsg->count)); in aac_send_raw_srb()
766 upsg->sg[i].count)) { in aac_send_raw_srb()
773 upsg->sg[i].count, data_dir); in aac_send_raw_srb()
776 byte_count += upsg->sg[i].count; in aac_send_raw_srb()
777 psg->sg[i].count = cpu_to_le32(upsg->sg[i].count); in aac_send_raw_srb()
780 srbcmd->count = cpu_to_le32(byte_count); in aac_send_raw_srb()
781 if (user_srbcmd->sg.count) in aac_send_raw_srb()
782 psg->count = cpu_to_le32(sg_indx+1); in aac_send_raw_srb()
784 psg->count = 0; in aac_send_raw_srb()
802 ? ((struct sgmap64*)&srbcmd->sg)->sg[i].count in aac_send_raw_srb()
803 : srbcmd->sg.sg[i].count); in aac_send_raw_srb()