Lines Matching refs:srb
335 struct ScsiReqBlk srb; member
344 static void data_out_phase0(struct AdapterCtlBlk *acb, struct ScsiReqBlk *srb,
346 static void data_in_phase0(struct AdapterCtlBlk *acb, struct ScsiReqBlk *srb,
348 static void command_phase0(struct AdapterCtlBlk *acb, struct ScsiReqBlk *srb,
350 static void status_phase0(struct AdapterCtlBlk *acb, struct ScsiReqBlk *srb,
352 static void msgout_phase0(struct AdapterCtlBlk *acb, struct ScsiReqBlk *srb,
354 static void msgin_phase0(struct AdapterCtlBlk *acb, struct ScsiReqBlk *srb,
356 static void data_out_phase1(struct AdapterCtlBlk *acb, struct ScsiReqBlk *srb,
358 static void data_in_phase1(struct AdapterCtlBlk *acb, struct ScsiReqBlk *srb,
360 static void command_phase1(struct AdapterCtlBlk *acb, struct ScsiReqBlk *srb,
362 static void status_phase1(struct AdapterCtlBlk *acb, struct ScsiReqBlk *srb,
364 static void msgout_phase1(struct AdapterCtlBlk *acb, struct ScsiReqBlk *srb,
366 static void msgin_phase1(struct AdapterCtlBlk *acb, struct ScsiReqBlk *srb,
368 static void nop0(struct AdapterCtlBlk *acb, struct ScsiReqBlk *srb,
370 static void nop1(struct AdapterCtlBlk *acb, struct ScsiReqBlk *srb,
374 struct ScsiReqBlk *srb);
377 struct ScsiReqBlk *srb, u16 io_dir);
381 struct ScsiReqBlk *srb);
383 struct ScsiReqBlk *srb);
385 struct ScsiReqBlk *srb);
389 static void pci_unmap_srb(struct AdapterCtlBlk *acb, struct ScsiReqBlk *srb);
391 struct ScsiReqBlk *srb);
393 struct ScsiReqBlk *srb);
395 struct ScsiReqBlk *srb);
736 static void free_tag(struct DeviceCtlBlk *dcb, struct ScsiReqBlk *srb) in free_tag() argument
738 if (srb->tag_number < 255) { in free_tag()
739 dcb->tag_mask &= ~(1 << srb->tag_number); /* free tag mask */ in free_tag()
740 srb->tag_number = 255; in free_tag()
760 struct ScsiReqBlk *srb = NULL; in srb_get_free() local
763 srb = list_entry(head->next, struct ScsiReqBlk, list); in srb_get_free()
765 dprintkdbg(DBG_0, "srb_get_free: srb=%p\n", srb); in srb_get_free()
767 return srb; in srb_get_free()
771 static void srb_free_insert(struct AdapterCtlBlk *acb, struct ScsiReqBlk *srb) in srb_free_insert() argument
773 dprintkdbg(DBG_0, "srb_free_insert: srb=%p\n", srb); in srb_free_insert()
774 list_add_tail(&srb->list, &acb->srb_free_list); in srb_free_insert()
779 struct ScsiReqBlk *srb) in srb_waiting_insert() argument
782 srb->cmd, dcb->target_id, dcb->target_lun, srb); in srb_waiting_insert()
783 list_add(&srb->list, &dcb->srb_waiting_list); in srb_waiting_insert()
788 struct ScsiReqBlk *srb) in srb_waiting_append() argument
791 srb->cmd, dcb->target_id, dcb->target_lun, srb); in srb_waiting_append()
792 list_add_tail(&srb->list, &dcb->srb_waiting_list); in srb_waiting_append()
796 static void srb_going_append(struct DeviceCtlBlk *dcb, struct ScsiReqBlk *srb) in srb_going_append() argument
799 srb->cmd, dcb->target_id, dcb->target_lun, srb); in srb_going_append()
800 list_add_tail(&srb->list, &dcb->srb_going_list); in srb_going_append()
804 static void srb_going_remove(struct DeviceCtlBlk *dcb, struct ScsiReqBlk *srb) in srb_going_remove() argument
809 srb->cmd, dcb->target_id, dcb->target_lun, srb); in srb_going_remove()
812 if (i == srb) { in srb_going_remove()
813 list_del(&srb->list); in srb_going_remove()
820 struct ScsiReqBlk *srb) in srb_waiting_remove() argument
825 srb->cmd, dcb->target_id, dcb->target_lun, srb); in srb_waiting_remove()
828 if (i == srb) { in srb_waiting_remove()
829 list_del(&srb->list); in srb_waiting_remove()
836 struct ScsiReqBlk *srb) in srb_going_to_waiting_move() argument
840 srb->cmd, dcb->target_id, dcb->target_lun, srb); in srb_going_to_waiting_move()
841 list_move(&srb->list, &dcb->srb_waiting_list); in srb_going_to_waiting_move()
846 struct ScsiReqBlk *srb) in srb_waiting_to_going_move() argument
850 srb->cmd, dcb->target_id, dcb->target_lun, srb); in srb_waiting_to_going_move()
851 list_move(&srb->list, &dcb->srb_going_list); in srb_waiting_to_going_move()
878 struct ScsiReqBlk *srb; in waiting_process_next() local
924 srb = list_entry(waiting_list_head->next, in waiting_process_next()
928 if (!start_scsi(acb, pos, srb)) in waiting_process_next()
929 srb_waiting_to_going_move(pos, srb); in waiting_process_next()
959 static void send_srb(struct AdapterCtlBlk *acb, struct ScsiReqBlk *srb) in send_srb() argument
961 struct DeviceCtlBlk *dcb = srb->dcb; in send_srb()
966 srb_waiting_append(dcb, srb); in send_srb()
971 if (!start_scsi(acb, dcb, srb)) in send_srb()
972 srb_going_append(dcb, srb); in send_srb()
974 srb_waiting_insert(dcb, srb); in send_srb()
981 struct ScsiReqBlk *srb) in build_srb() argument
988 srb->dcb = dcb; in build_srb()
989 srb->cmd = cmd; in build_srb()
990 srb->sg_count = 0; in build_srb()
991 srb->total_xfer_length = 0; in build_srb()
992 srb->sg_bus_addr = 0; in build_srb()
993 srb->sg_index = 0; in build_srb()
994 srb->adapter_status = 0; in build_srb()
995 srb->target_status = 0; in build_srb()
996 srb->msg_count = 0; in build_srb()
997 srb->status = 0; in build_srb()
998 srb->flag = 0; in build_srb()
999 srb->state = 0; in build_srb()
1000 srb->retry_count = 0; in build_srb()
1001 srb->tag_number = TAG_NONE; in build_srb()
1002 srb->scsi_phase = PH_BUS_FREE; /* initial phase */ in build_srb()
1003 srb->end_message = 0; in build_srb()
1012 srb->segment_x[0].address); in build_srb()
1017 struct SGentry *sgp = srb->segment_x; in build_srb()
1019 srb->sg_count = nseg; in build_srb()
1024 srb->sg_count); in build_srb()
1026 scsi_for_each_sg(cmd, sg, srb->sg_count, i) { in build_srb()
1031 srb->total_xfer_length += seglen; in build_srb()
1033 sgp += srb->sg_count - 1; in build_srb()
1039 if (srb->total_xfer_length > reqlen) { in build_srb()
1040 sgp->length -= (srb->total_xfer_length - reqlen); in build_srb()
1041 srb->total_xfer_length = reqlen; in build_srb()
1046 srb->total_xfer_length % 2) { in build_srb()
1047 srb->total_xfer_length++; in build_srb()
1051 srb->sg_bus_addr = pci_map_single(dcb->acb->dev, in build_srb()
1052 srb->segment_x, in build_srb()
1057 srb->segment_x, srb->sg_bus_addr, SEGMENTX_LEN); in build_srb()
1060 srb->request_length = srb->total_xfer_length; in build_srb()
1086 struct ScsiReqBlk *srb; in dc395x_queue_command_lck() local
1122 srb = srb_get_free(acb); in dc395x_queue_command_lck()
1123 if (!srb) in dc395x_queue_command_lck()
1133 build_srb(cmd, dcb, srb); in dc395x_queue_command_lck()
1137 srb_waiting_append(dcb, srb); in dc395x_queue_command_lck()
1141 send_srb(acb, srb); in dc395x_queue_command_lck()
1192 struct DeviceCtlBlk *dcb, struct ScsiReqBlk *srb) in dump_register_info() argument
1199 if (!srb && dcb) in dump_register_info()
1200 srb = dcb->active_srb; in dump_register_info()
1201 if (srb) { in dump_register_info()
1202 if (!srb->cmd) in dump_register_info()
1204 srb, srb->cmd); in dump_register_info()
1208 srb, srb->cmd, in dump_register_info()
1209 srb->cmd->cmnd[0], srb->cmd->device->id, in dump_register_info()
1210 (u8)srb->cmd->device->lun); in dump_register_info()
1212 srb->segment_x, srb->sg_count, srb->sg_index, in dump_register_info()
1213 srb->total_xfer_length); in dump_register_info()
1215 srb->state, srb->status, srb->scsi_phase, in dump_register_info()
1370 struct ScsiReqBlk *srb; in dc395x_eh_abort() local
1380 srb = find_cmd(cmd, &dcb->srb_waiting_list); in dc395x_eh_abort()
1381 if (srb) { in dc395x_eh_abort()
1382 srb_waiting_remove(dcb, srb); in dc395x_eh_abort()
1383 pci_unmap_srb_sense(acb, srb); in dc395x_eh_abort()
1384 pci_unmap_srb(acb, srb); in dc395x_eh_abort()
1385 free_tag(dcb, srb); in dc395x_eh_abort()
1386 srb_free_insert(acb, srb); in dc395x_eh_abort()
1391 srb = find_cmd(cmd, &dcb->srb_going_list); in dc395x_eh_abort()
1392 if (srb) { in dc395x_eh_abort()
1404 struct ScsiReqBlk *srb) in build_sdtr() argument
1406 u8 *ptr = srb->msgout_buf + srb->msg_count; in build_sdtr()
1407 if (srb->msg_count > 1) { in build_sdtr()
1410 srb->msg_count, srb->msgout_buf[0], in build_sdtr()
1411 srb->msgout_buf[1]); in build_sdtr()
1425 srb->msg_count += 5; in build_sdtr()
1426 srb->state |= SRB_DO_SYNC_NEGO; in build_sdtr()
1432 struct ScsiReqBlk *srb) in build_wdtr() argument
1436 u8 *ptr = srb->msgout_buf + srb->msg_count; in build_wdtr()
1437 if (srb->msg_count > 1) { in build_wdtr()
1440 srb->msg_count, srb->msgout_buf[0], in build_wdtr()
1441 srb->msgout_buf[1]); in build_wdtr()
1448 srb->msg_count += 4; in build_wdtr()
1449 srb->state |= SRB_DO_WIDE_NEGO; in build_wdtr()
1478 struct ScsiReqBlk *srb;
1485 srb = acb->active_dcb->active_srb;
1493 struct ScsiReqBlk* srb) in start_scsi() argument
1499 dcb->target_id, dcb->target_lun, srb); in start_scsi()
1501 srb->tag_number = TAG_NONE; /* acb->tag_max_num: had error read in eeprom */ in start_scsi()
1527 srb->cmd, in start_scsi()
1533 dprintkdbg(DBG_KG, "start_scsi: (0x%p) Failed (busy)\n", srb->cmd); in start_scsi()
1549 srb->scsi_phase = PH_BUS_FREE; /* initial phase */ in start_scsi()
1554 if (srb->flag & AUTO_REQSENSE) in start_scsi()
1557 if (((srb->cmd->cmnd[0] == INQUIRY) in start_scsi()
1558 || (srb->cmd->cmnd[0] == REQUEST_SENSE) in start_scsi()
1559 || (srb->flag & AUTO_REQSENSE)) in start_scsi()
1565 srb->msgout_buf[0] = identify_message; in start_scsi()
1566 srb->msg_count = 1; in start_scsi()
1568 srb->state = SRB_MSGOUT; in start_scsi()
1572 build_wdtr(acb, dcb, srb); in start_scsi()
1578 build_sdtr(acb, dcb, srb); in start_scsi()
1583 build_wdtr(acb, dcb, srb); in start_scsi()
1586 srb->msg_count = 0; in start_scsi()
1592 srb->state = SRB_START_; in start_scsi()
1607 srb->cmd, srb->cmd->device->id, in start_scsi()
1608 (u8)srb->cmd->device->lun); in start_scsi()
1609 srb->state = SRB_READY; in start_scsi()
1618 srb->tag_number = tag_number; in start_scsi()
1620 srb->state = SRB_START_; in start_scsi()
1626 srb->cmd, srb->cmd->device->id, (u8)srb->cmd->device->lun, in start_scsi()
1627 srb->cmd->cmnd[0], srb->tag_number); in start_scsi()
1628 if (srb->flag & AUTO_REQSENSE) { in start_scsi()
1636 ptr = (u8 *)srb->cmd->cmnd; in start_scsi()
1637 for (i = 0; i < srb->cmd->cmd_len; i++) in start_scsi()
1650 srb->cmd, dcb->target_id, dcb->target_lun); in start_scsi()
1651 srb->state = SRB_READY; in start_scsi()
1652 free_tag(dcb, srb); in start_scsi()
1653 srb->msg_count = 0; in start_scsi()
1661 srb->scsi_phase = PH_BUS_FREE; /* initial phase */ in start_scsi()
1662 dcb->active_srb = srb; in start_scsi()
1677 srb->state |= SRB_MSGOUT
1682 struct ScsiReqBlk *srb) in enable_msgout_abort() argument
1684 srb->msgout_buf[0] = ABORT; in enable_msgout_abort()
1685 srb->msg_count = 1; in enable_msgout_abort()
1687 srb->state &= ~SRB_MSGIN; in enable_msgout_abort()
1688 srb->state |= SRB_MSGOUT; in enable_msgout_abort()
1703 struct ScsiReqBlk *srb; in dc395x_handle_interrupt() local
1750 srb = dcb->active_srb; in dc395x_handle_interrupt()
1753 enable_msgout_abort(acb, srb); in dc395x_handle_interrupt()
1757 phase = (u16)srb->scsi_phase; in dc395x_handle_interrupt()
1773 dc395x_statev(acb, srb, &scsi_status); in dc395x_handle_interrupt()
1780 srb->scsi_phase = scsi_status & PHASEMASK; in dc395x_handle_interrupt()
1796 dc395x_statev(acb, srb, &scsi_status); in dc395x_handle_interrupt()
1842 static void msgout_phase0(struct AdapterCtlBlk *acb, struct ScsiReqBlk *srb, in msgout_phase0() argument
1845 dprintkdbg(DBG_0, "msgout_phase0: (0x%p)\n", srb->cmd); in msgout_phase0()
1846 if (srb->state & (SRB_UNEXPECT_RESEL + SRB_ABORT_SENT)) in msgout_phase0()
1850 srb->state &= ~SRB_MSGOUT; in msgout_phase0()
1854 static void msgout_phase1(struct AdapterCtlBlk *acb, struct ScsiReqBlk *srb, in msgout_phase1() argument
1859 dprintkdbg(DBG_0, "msgout_phase1: (0x%p)\n", srb->cmd); in msgout_phase1()
1862 if (!(srb->state & SRB_MSGOUT)) { in msgout_phase1()
1863 srb->state |= SRB_MSGOUT; in msgout_phase1()
1866 srb->cmd); /* So what ? */ in msgout_phase1()
1868 if (!srb->msg_count) { in msgout_phase1()
1870 srb->cmd); in msgout_phase1()
1876 ptr = (u8 *)srb->msgout_buf; in msgout_phase1()
1877 for (i = 0; i < srb->msg_count; i++) in msgout_phase1()
1879 srb->msg_count = 0; in msgout_phase1()
1880 if (srb->msgout_buf[0] == MSG_ABORT) in msgout_phase1()
1881 srb->state = SRB_ABORT_SENT; in msgout_phase1()
1887 static void command_phase0(struct AdapterCtlBlk *acb, struct ScsiReqBlk *srb, in command_phase0() argument
1890 dprintkdbg(DBG_0, "command_phase0: (0x%p)\n", srb->cmd); in command_phase0()
1895 static void command_phase1(struct AdapterCtlBlk *acb, struct ScsiReqBlk *srb, in command_phase1() argument
1901 dprintkdbg(DBG_0, "command_phase1: (0x%p)\n", srb->cmd); in command_phase1()
1905 if (!(srb->flag & AUTO_REQSENSE)) { in command_phase1()
1906 ptr = (u8 *)srb->cmd->cmnd; in command_phase1()
1907 for (i = 0; i < srb->cmd->cmd_len; i++) { in command_phase1()
1921 srb->state |= SRB_COMMAND; in command_phase1()
1933 static void sg_verify_length(struct ScsiReqBlk *srb) in sg_verify_length() argument
1937 unsigned idx = srb->sg_index; in sg_verify_length()
1938 struct SGentry *psge = srb->segment_x + idx; in sg_verify_length()
1939 for (; idx < srb->sg_count; psge++, idx++) in sg_verify_length()
1941 if (len != srb->total_xfer_length) in sg_verify_length()
1944 srb->total_xfer_length, len); in sg_verify_length()
1953 static void sg_update_list(struct ScsiReqBlk *srb, u32 left) in sg_update_list() argument
1956 u32 xferred = srb->total_xfer_length - left; /* bytes transferred */ in sg_update_list()
1957 struct SGentry *psge = srb->segment_x + srb->sg_index; in sg_update_list()
1961 xferred, srb->total_xfer_length, left); in sg_update_list()
1967 sg_verify_length(srb); in sg_update_list()
1968 srb->total_xfer_length = left; /* update remaining count */ in sg_update_list()
1969 for (idx = srb->sg_index; idx < srb->sg_count; idx++) { in sg_update_list()
1977 srb->sg_index = idx; in sg_update_list()
1978 pci_dma_sync_single_for_device(srb->dcb-> in sg_update_list()
1980 srb->sg_bus_addr, in sg_update_list()
1987 sg_verify_length(srb); in sg_update_list()
1997 static void sg_subtract_one(struct ScsiReqBlk *srb) in sg_subtract_one() argument
1999 sg_update_list(srb, srb->total_xfer_length - 1); in sg_subtract_one()
2012 struct ScsiReqBlk *srb) in cleanup_after_transfer() argument
2037 static void data_out_phase0(struct AdapterCtlBlk *acb, struct ScsiReqBlk *srb, in data_out_phase0() argument
2040 struct DeviceCtlBlk *dcb = srb->dcb; in data_out_phase0()
2044 srb->cmd, srb->cmd->device->id, (u8)srb->cmd->device->lun); in data_out_phase0()
2065 srb->total_xfer_length); in data_out_phase0()
2068 if (!(srb->state & SRB_XFERPAD)) { in data_out_phase0()
2070 srb->status |= PARITY_ERROR; in data_out_phase0()
2109 if (srb->total_xfer_length > DC395x_LASTPIO) in data_out_phase0()
2117 && scsi_bufflen(srb->cmd) % 2) { in data_out_phase0()
2134 srb->total_xfer_length = 0; in data_out_phase0()
2142 srb->total_xfer_length - d_left_counter; in data_out_phase0()
2145 sg_update_list(srb, d_left_counter); in data_out_phase0()
2147 if ((srb->segment_x[srb->sg_index].length == in data_out_phase0()
2148 diff && scsi_sg_count(srb->cmd)) in data_out_phase0()
2155 srb->total_xfer_length - diff; in data_out_phase0()
2156 sg_update_list(srb, d_left_counter); in data_out_phase0()
2165 cleanup_after_transfer(acb, srb); in data_out_phase0()
2170 static void data_out_phase1(struct AdapterCtlBlk *acb, struct ScsiReqBlk *srb, in data_out_phase1() argument
2174 srb->cmd, srb->cmd->device->id, (u8)srb->cmd->device->lun); in data_out_phase1()
2177 data_io_transfer(acb, srb, XFERDATAOUT); in data_out_phase1()
2180 static void data_in_phase0(struct AdapterCtlBlk *acb, struct ScsiReqBlk *srb, in data_in_phase0() argument
2186 srb->cmd, srb->cmd->device->id, (u8)srb->cmd->device->lun); in data_in_phase0()
2201 if (!(srb->state & SRB_XFERPAD)) { in data_in_phase0()
2207 "Parity Error\n", srb->cmd); in data_in_phase0()
2208 srb->status |= PARITY_ERROR; in data_in_phase0()
2244 << ((srb->dcb->sync_period & WIDE_SYNC) ? 1 : in data_in_phase0()
2251 (srb->dcb->sync_period & WIDE_SYNC) ? "words" : "bytes", in data_in_phase0()
2256 srb->total_xfer_length, d_left_counter); in data_in_phase0()
2260 && srb->total_xfer_length <= DC395x_LASTPIO) { in data_in_phase0()
2261 size_t left_io = srb->total_xfer_length; in data_in_phase0()
2268 (srb->dcb->sync_period & WIDE_SYNC) ? in data_in_phase0()
2270 srb->total_xfer_length); in data_in_phase0()
2271 if (srb->dcb->sync_period & WIDE_SYNC) in data_in_phase0()
2278 size_t offset = srb->request_length - left_io; in data_in_phase0()
2283 base = scsi_kmap_atomic_sg(scsi_sglist(srb->cmd), in data_in_phase0()
2284 srb->sg_count, &offset, &len); in data_in_phase0()
2298 sg_subtract_one(srb); in data_in_phase0()
2312 if (fc == 0x40 && (srb->dcb->sync_period & WIDE_SYNC)) { in data_in_phase0()
2314 if (srb->total_xfer_length > 0) { in data_in_phase0()
2318 srb->total_xfer_length--; in data_in_phase0()
2349 if (srb->dcb->sync_period & WIDE_SYNC) in data_in_phase0()
2371 srb->total_xfer_length = 0; in data_in_phase0()
2373 srb->total_xfer_length = d_left_counter; in data_in_phase0()
2383 sg_update_list(srb, d_left_counter); in data_in_phase0()
2388 cleanup_after_transfer(acb, srb); in data_in_phase0()
2393 static void data_in_phase1(struct AdapterCtlBlk *acb, struct ScsiReqBlk *srb, in data_in_phase1() argument
2397 srb->cmd, srb->cmd->device->id, (u8)srb->cmd->device->lun); in data_in_phase1()
2398 data_io_transfer(acb, srb, XFERDATAIN); in data_in_phase1()
2403 struct ScsiReqBlk *srb, u16 io_dir) in data_io_transfer() argument
2405 struct DeviceCtlBlk *dcb = srb->dcb; in data_io_transfer()
2409 srb->cmd, srb->cmd->device->id, (u8)srb->cmd->device->lun, in data_io_transfer()
2411 srb->total_xfer_length, srb->sg_index, srb->sg_count); in data_io_transfer()
2412 if (srb == acb->tmp_srb) in data_io_transfer()
2414 if (srb->sg_index >= srb->sg_count) { in data_io_transfer()
2419 if (srb->total_xfer_length > DC395x_LASTPIO) { in data_io_transfer()
2428 dump_register_info(acb, dcb, srb); in data_io_transfer()
2436 srb->state |= SRB_DATA_XFER; in data_io_transfer()
2438 if (scsi_sg_count(srb->cmd)) { /* with S/G */ in data_io_transfer()
2441 srb->sg_bus_addr + in data_io_transfer()
2443 srb->sg_index); in data_io_transfer()
2446 ((u32)(srb->sg_count - in data_io_transfer()
2447 srb->sg_index) << 3)); in data_io_transfer()
2451 srb->segment_x[0].address); in data_io_transfer()
2453 srb->segment_x[0].length); in data_io_transfer()
2457 srb->total_xfer_length); in data_io_transfer()
2471 else if (srb->total_xfer_length > 0) { /* The last four bytes: Do PIO */ in data_io_transfer()
2476 srb->state |= SRB_DATA_XFER; in data_io_transfer()
2479 srb->total_xfer_length); in data_io_transfer()
2485 int ln = srb->total_xfer_length; in data_io_transfer()
2486 size_t left_io = srb->total_xfer_length; in data_io_transfer()
2488 if (srb->dcb->sync_period & WIDE_SYNC) in data_io_transfer()
2496 size_t offset = srb->request_length - left_io; in data_io_transfer()
2500 base = scsi_kmap_atomic_sg(scsi_sglist(srb->cmd), in data_io_transfer()
2501 srb->sg_count, &offset, &len); in data_io_transfer()
2512 sg_subtract_one(srb); in data_io_transfer()
2518 if (srb->dcb->sync_period & WIDE_SYNC) { in data_io_transfer()
2536 if (srb->sg_count) { in data_io_transfer()
2537 srb->adapter_status = H_OVER_UNDER_RUN; in data_io_transfer()
2538 srb->status |= OVER_RUN; in data_io_transfer()
2569 srb->state |= SRB_XFERPAD; in data_io_transfer()
2578 static void status_phase0(struct AdapterCtlBlk *acb, struct ScsiReqBlk *srb, in status_phase0() argument
2582 srb->cmd, srb->cmd->device->id, (u8)srb->cmd->device->lun); in status_phase0()
2583 srb->target_status = DC395x_read8(acb, TRM_S1040_SCSI_FIFO); in status_phase0()
2584 srb->end_message = DC395x_read8(acb, TRM_S1040_SCSI_FIFO); /* get message */ in status_phase0()
2585 srb->state = SRB_COMPLETED; in status_phase0()
2592 static void status_phase1(struct AdapterCtlBlk *acb, struct ScsiReqBlk *srb, in status_phase1() argument
2596 srb->cmd, srb->cmd->device->id, (u8)srb->cmd->device->lun); in status_phase1()
2597 srb->state = SRB_STATUS; in status_phase1()
2619 struct ScsiReqBlk *srb) in msgin_reject() argument
2621 srb->msgout_buf[0] = MESSAGE_REJECT; in msgin_reject()
2622 srb->msg_count = 1; in msgin_reject()
2624 srb->state &= ~SRB_MSGIN; in msgin_reject()
2625 srb->state |= SRB_MSGOUT; in msgin_reject()
2627 srb->msgin_buf[0], in msgin_reject()
2628 srb->dcb->target_id, srb->dcb->target_lun); in msgin_reject()
2635 struct ScsiReqBlk *srb = NULL; in msgin_qtag() local
2638 srb->cmd, tag, srb); in msgin_qtag()
2649 srb = i; in msgin_qtag()
2653 if (!srb) in msgin_qtag()
2657 srb->cmd, srb->dcb->target_id, srb->dcb->target_lun); in msgin_qtag()
2660 enable_msgout_abort(acb, srb); in msgin_qtag()
2663 if (!(srb->state & SRB_DISCONNECT)) in msgin_qtag()
2666 memcpy(srb->msgin_buf, dcb->active_srb->msgin_buf, acb->msg_len); in msgin_qtag()
2667 srb->state |= dcb->active_srb->state; in msgin_qtag()
2668 srb->state |= SRB_DATA_XFER; in msgin_qtag()
2669 dcb->active_srb = srb; in msgin_qtag()
2671 return srb; in msgin_qtag()
2674 srb = acb->tmp_srb; in msgin_qtag()
2675 srb->state = SRB_UNEXPECT_RESEL; in msgin_qtag()
2676 dcb->active_srb = srb; in msgin_qtag()
2677 srb->msgout_buf[0] = MSG_ABORT_TAG; in msgin_qtag()
2678 srb->msg_count = 1; in msgin_qtag()
2681 return srb; in msgin_qtag()
2696 static void msgin_set_async(struct AdapterCtlBlk *acb, struct ScsiReqBlk *srb) in msgin_set_async() argument
2698 struct DeviceCtlBlk *dcb = srb->dcb; in msgin_set_async()
2707 srb->state &= ~SRB_DO_SYNC_NEGO; in msgin_set_async()
2711 build_wdtr(acb, dcb, srb); in msgin_set_async()
2719 static void msgin_set_sync(struct AdapterCtlBlk *acb, struct ScsiReqBlk *srb) in msgin_set_sync() argument
2721 struct DeviceCtlBlk *dcb = srb->dcb; in msgin_set_sync()
2726 dcb->target_id, srb->msgin_buf[3] << 2, in msgin_set_sync()
2727 (250 / srb->msgin_buf[3]), in msgin_set_sync()
2728 ((250 % srb->msgin_buf[3]) * 10) / srb->msgin_buf[3], in msgin_set_sync()
2729 srb->msgin_buf[4]); in msgin_set_sync()
2731 if (srb->msgin_buf[4] > 15) in msgin_set_sync()
2732 srb->msgin_buf[4] = 15; in msgin_set_sync()
2736 dcb->sync_offset = srb->msgin_buf[4]; in msgin_set_sync()
2737 if (srb->msgin_buf[4] > dcb->sync_offset) in msgin_set_sync()
2738 srb->msgin_buf[4] = dcb->sync_offset; in msgin_set_sync()
2740 dcb->sync_offset = srb->msgin_buf[4]; in msgin_set_sync()
2742 while (bval < 7 && (srb->msgin_buf[3] > clock_period[bval] in msgin_set_sync()
2746 if (srb->msgin_buf[3] < clock_period[bval]) in msgin_set_sync()
2750 srb->msgin_buf[3] = clock_period[bval]; in msgin_set_sync()
2753 dcb->min_nego_period = srb->msgin_buf[3]; in msgin_set_sync()
2768 if (!(srb->state & SRB_DO_SYNC_NEGO)) { in msgin_set_sync()
2771 srb->msgin_buf[3] << 2, srb->msgin_buf[4]); in msgin_set_sync()
2773 memcpy(srb->msgout_buf, srb->msgin_buf, 5); in msgin_set_sync()
2774 srb->msg_count = 5; in msgin_set_sync()
2780 build_wdtr(acb, dcb, srb); in msgin_set_sync()
2785 srb->state &= ~SRB_DO_SYNC_NEGO; in msgin_set_sync()
2793 struct ScsiReqBlk *srb) in msgin_set_nowide() argument
2795 struct DeviceCtlBlk *dcb = srb->dcb; in msgin_set_nowide()
2801 srb->state &= ~SRB_DO_WIDE_NEGO; in msgin_set_nowide()
2805 build_sdtr(acb, dcb, srb); in msgin_set_nowide()
2811 static void msgin_set_wide(struct AdapterCtlBlk *acb, struct ScsiReqBlk *srb) in msgin_set_wide() argument
2813 struct DeviceCtlBlk *dcb = srb->dcb; in msgin_set_wide()
2818 if (srb->msgin_buf[3] > wide) in msgin_set_wide()
2819 srb->msgin_buf[3] = wide; in msgin_set_wide()
2821 if (!(srb->state & SRB_DO_WIDE_NEGO)) { in msgin_set_wide()
2825 memcpy(srb->msgout_buf, srb->msgin_buf, 4); in msgin_set_wide()
2826 srb->msg_count = 4; in msgin_set_wide()
2827 srb->state |= SRB_DO_WIDE_NEGO; in msgin_set_wide()
2832 if (srb->msgin_buf[3] > 0) in msgin_set_wide()
2836 srb->state &= ~SRB_DO_WIDE_NEGO; in msgin_set_wide()
2840 (8 << srb->msgin_buf[3]), dcb->target_id); in msgin_set_wide()
2844 build_sdtr(acb, dcb, srb); in msgin_set_wide()
2863 static void msgin_phase0(struct AdapterCtlBlk *acb, struct ScsiReqBlk *srb, in msgin_phase0() argument
2867 dprintkdbg(DBG_0, "msgin_phase0: (0x%p)\n", srb->cmd); in msgin_phase0()
2869 srb->msgin_buf[acb->msg_len++] = DC395x_read8(acb, TRM_S1040_SCSI_FIFO); in msgin_phase0()
2870 if (msgin_completed(srb->msgin_buf, acb->msg_len)) { in msgin_phase0()
2872 switch (srb->msgin_buf[0]) { in msgin_phase0()
2874 srb->state = SRB_DISCONNECT; in msgin_phase0()
2880 srb = in msgin_phase0()
2882 srb->msgin_buf[1]); in msgin_phase0()
2889 if (srb->state & SRB_DO_SYNC_NEGO) { in msgin_phase0()
2890 msgin_set_async(acb, srb); in msgin_phase0()
2894 if (srb->state & SRB_DO_WIDE_NEGO) { in msgin_phase0()
2895 msgin_set_nowide(acb, srb); in msgin_phase0()
2898 enable_msgout_abort(acb, srb); in msgin_phase0()
2904 if (srb->msgin_buf[1] == 3 in msgin_phase0()
2905 && srb->msgin_buf[2] == EXTENDED_SDTR) { in msgin_phase0()
2906 msgin_set_sync(acb, srb); in msgin_phase0()
2910 if (srb->msgin_buf[1] == 2 in msgin_phase0()
2911 && srb->msgin_buf[2] == EXTENDED_WDTR in msgin_phase0()
2912 && srb->msgin_buf[3] <= 2) { /* sanity check ... */ in msgin_phase0()
2913 msgin_set_wide(acb, srb); in msgin_phase0()
2916 msgin_reject(acb, srb); in msgin_phase0()
2935 srb->cmd, srb->total_xfer_length); in msgin_phase0()
2945 srb->cmd, dcb->target_id, in msgin_phase0()
2948 enable_msgout_abort(acb, srb); in msgin_phase0()
2953 if (srb->msgin_buf[0] & IDENTIFY_BASE) { in msgin_phase0()
2955 srb->msg_count = 1; in msgin_phase0()
2956 srb->msgout_buf[0] = dcb->identify_msg; in msgin_phase0()
2958 srb->state |= SRB_MSGOUT; in msgin_phase0()
2961 msgin_reject(acb, srb); in msgin_phase0()
2965 srb->state &= ~SRB_MSGIN; in msgin_phase0()
2974 static void msgin_phase1(struct AdapterCtlBlk *acb, struct ScsiReqBlk *srb, in msgin_phase1() argument
2977 dprintkdbg(DBG_0, "msgin_phase1: (0x%p)\n", srb->cmd); in msgin_phase1()
2980 if (!(srb->state & SRB_MSGIN)) { in msgin_phase1()
2981 srb->state &= ~SRB_DISCONNECT; in msgin_phase1()
2982 srb->state |= SRB_MSGIN; in msgin_phase1()
2990 static void nop0(struct AdapterCtlBlk *acb, struct ScsiReqBlk *srb, in nop0() argument
2996 static void nop1(struct AdapterCtlBlk *acb, struct ScsiReqBlk *srb, in nop1() argument
3028 struct ScsiReqBlk *srb; in disconnect() local
3041 srb = dcb->active_srb; in disconnect()
3043 dprintkdbg(DBG_0, "disconnect: (0x%p)\n", srb->cmd); in disconnect()
3045 srb->scsi_phase = PH_BUS_FREE; /* initial phase */ in disconnect()
3048 if (srb->state & SRB_UNEXPECT_RESEL) { in disconnect()
3052 srb->state = 0; in disconnect()
3054 } else if (srb->state & SRB_ABORT_SENT) { in disconnect()
3058 doing_srb_done(acb, DID_ABORT, srb->cmd, 1); in disconnect()
3061 if ((srb->state & (SRB_START_ + SRB_MSGOUT)) in disconnect()
3062 || !(srb-> in disconnect()
3069 if (srb->state != SRB_START_ in disconnect()
3070 && srb->state != SRB_MSGOUT) { in disconnect()
3071 srb->state = SRB_READY; in disconnect()
3074 srb->cmd); in disconnect()
3075 srb->target_status = SCSI_STAT_SEL_TIMEOUT; in disconnect()
3080 "<%02i-%i> SelTO\n", srb->cmd, in disconnect()
3082 if (srb->retry_count++ > DC395x_MAX_RETRIES in disconnect()
3084 srb->target_status = in disconnect()
3088 free_tag(dcb, srb); in disconnect()
3089 srb_going_to_waiting_move(dcb, srb); in disconnect()
3092 srb->cmd); in disconnect()
3095 } else if (srb->state & SRB_DISCONNECT) { in disconnect()
3107 } else if (srb->state & SRB_COMPLETED) { in disconnect()
3112 free_tag(dcb, srb); in disconnect()
3114 srb->state = SRB_FREE; in disconnect()
3115 srb_done(acb, dcb, srb); in disconnect()
3124 struct ScsiReqBlk *srb = NULL; in reselect() local
3135 srb = dcb->active_srb; in reselect()
3136 if (!srb) { in reselect()
3146 srb->cmd, dcb->target_id, in reselect()
3152 srb->state = SRB_READY; in reselect()
3153 free_tag(dcb, srb); in reselect()
3154 srb_going_to_waiting_move(dcb, srb); in reselect()
3181 srb = acb->tmp_srb; in reselect()
3182 dcb->active_srb = srb; in reselect()
3185 srb = dcb->active_srb; in reselect()
3186 if (!srb || !(srb->state & SRB_DISCONNECT)) { in reselect()
3193 srb = acb->tmp_srb; in reselect()
3194 srb->state = SRB_UNEXPECT_RESEL; in reselect()
3195 dcb->active_srb = srb; in reselect()
3196 enable_msgout_abort(acb, srb); in reselect()
3200 enable_msgout_abort(acb, srb); in reselect()
3202 srb->state = SRB_DATA_XFER; in reselect()
3206 srb->scsi_phase = PH_BUS_FREE; /* initial phase */ in reselect()
3268 static void pci_unmap_srb(struct AdapterCtlBlk *acb, struct ScsiReqBlk *srb) in pci_unmap_srb() argument
3270 struct scsi_cmnd *cmd = srb->cmd; in pci_unmap_srb()
3276 srb->sg_bus_addr, SEGMENTX_LEN); in pci_unmap_srb()
3277 pci_unmap_single(acb->dev, srb->sg_bus_addr, in pci_unmap_srb()
3290 struct ScsiReqBlk *srb) in pci_unmap_srb_sense() argument
3292 if (!(srb->flag & AUTO_REQSENSE)) in pci_unmap_srb_sense()
3296 srb->segment_x[0].address); in pci_unmap_srb_sense()
3297 pci_unmap_single(acb->dev, srb->segment_x[0].address, in pci_unmap_srb_sense()
3298 srb->segment_x[0].length, PCI_DMA_FROMDEVICE); in pci_unmap_srb_sense()
3300 srb->total_xfer_length = srb->xferred; in pci_unmap_srb_sense()
3301 srb->segment_x[0].address = in pci_unmap_srb_sense()
3302 srb->segment_x[DC395x_MAX_SG_LISTENTRY - 1].address; in pci_unmap_srb_sense()
3303 srb->segment_x[0].length = in pci_unmap_srb_sense()
3304 srb->segment_x[DC395x_MAX_SG_LISTENTRY - 1].length; in pci_unmap_srb_sense()
3313 struct ScsiReqBlk *srb) in srb_done() argument
3316 struct scsi_cmnd *cmd = srb->cmd; in srb_done()
3320 dprintkdbg(DBG_1, "srb_done: (0x%p) <%02i-%i>\n", srb->cmd, in srb_done()
3321 srb->cmd->device->id, (u8)srb->cmd->device->lun); in srb_done()
3323 srb, scsi_sg_count(cmd), srb->sg_index, srb->sg_count, in srb_done()
3325 status = srb->target_status; in srb_done()
3326 if (srb->flag & AUTO_REQSENSE) { in srb_done()
3328 pci_unmap_srb_sense(acb, srb); in srb_done()
3332 srb->flag &= ~AUTO_REQSENSE; in srb_done()
3333 srb->adapter_status = 0; in srb_done()
3334 srb->target_status = CHECK_CONDITION << 1; in srb_done()
3387 if (srb->total_xfer_length in srb_done()
3388 && srb->total_xfer_length >= cmd->underflow) in srb_done()
3391 srb->end_message, CHECK_CONDITION); in srb_done()
3396 srb->end_message, CHECK_CONDITION); in srb_done()
3407 request_sense(acb, dcb, srb); in srb_done()
3416 free_tag(dcb, srb); in srb_done()
3417 srb_going_to_waiting_move(dcb, srb); in srb_done()
3419 srb->adapter_status = 0; in srb_done()
3420 srb->target_status = 0; in srb_done()
3423 srb->adapter_status = H_SEL_TIMEOUT; in srb_done()
3424 srb->target_status = 0; in srb_done()
3427 srb->adapter_status = 0; in srb_done()
3429 SET_RES_MSG(cmd->result, srb->end_message); in srb_done()
3437 status = srb->adapter_status; in srb_done()
3439 srb->target_status = 0; in srb_done()
3441 SET_RES_MSG(cmd->result, srb->end_message); in srb_done()
3442 } else if (srb->status & PARITY_ERROR) { in srb_done()
3444 SET_RES_MSG(cmd->result, srb->end_message); in srb_done()
3447 srb->adapter_status = 0; in srb_done()
3448 srb->target_status = 0; in srb_done()
3493 scsi_set_resid(cmd, srb->total_xfer_length); in srb_done()
3495 cmd->SCp.this_residual = srb->total_xfer_length; in srb_done()
3498 if (srb->total_xfer_length) in srb_done()
3502 cmd->cmnd[0], srb->total_xfer_length); in srb_done()
3505 srb_going_remove(dcb, srb); in srb_done()
3507 if (srb == acb->tmp_srb) in srb_done()
3512 srb_free_insert(acb, srb); in srb_done()
3514 pci_unmap_srb(acb, srb); in srb_done()
3529 struct ScsiReqBlk *srb; in doing_srb_done() local
3533 list_for_each_entry_safe(srb, tmp, &dcb->srb_going_list, list) { in doing_srb_done()
3537 p = srb->cmd; in doing_srb_done()
3542 srb_going_remove(dcb, srb); in doing_srb_done()
3543 free_tag(dcb, srb); in doing_srb_done()
3544 srb_free_insert(acb, srb); in doing_srb_done()
3546 pci_unmap_srb_sense(acb, srb); in doing_srb_done()
3547 pci_unmap_srb(acb, srb); in doing_srb_done()
3565 list_for_each_entry_safe(srb, tmp, &dcb->srb_waiting_list, list) { in doing_srb_done()
3567 p = srb->cmd; in doing_srb_done()
3572 srb_waiting_remove(dcb, srb); in doing_srb_done()
3573 srb_free_insert(acb, srb); in doing_srb_done()
3575 pci_unmap_srb_sense(acb, srb); in doing_srb_done()
3576 pci_unmap_srb(acb, srb); in doing_srb_done()
3676 struct ScsiReqBlk *srb) in request_sense() argument
3678 struct scsi_cmnd *cmd = srb->cmd; in request_sense()
3682 srb->flag |= AUTO_REQSENSE; in request_sense()
3683 srb->adapter_status = 0; in request_sense()
3684 srb->target_status = 0; in request_sense()
3690 srb->segment_x[DC395x_MAX_SG_LISTENTRY - 1].address = in request_sense()
3691 srb->segment_x[0].address; in request_sense()
3692 srb->segment_x[DC395x_MAX_SG_LISTENTRY - 1].length = in request_sense()
3693 srb->segment_x[0].length; in request_sense()
3694 srb->xferred = srb->total_xfer_length; in request_sense()
3696 srb->total_xfer_length = SCSI_SENSE_BUFFERSIZE; in request_sense()
3697 srb->segment_x[0].length = SCSI_SENSE_BUFFERSIZE; in request_sense()
3699 srb->segment_x[0].address = in request_sense()
3703 cmd->sense_buffer, srb->segment_x[0].address, in request_sense()
3705 srb->sg_count = 1; in request_sense()
3706 srb->sg_index = 0; in request_sense()
3708 if (start_scsi(acb, dcb, srb)) { /* Should only happen, if sb. else grabs the bus */ in request_sense()
3711 srb->cmd, dcb->target_id, dcb->target_lun); in request_sense()
3712 srb_going_to_waiting_move(dcb, srb); in request_sense()
4291 acb->srb.segment_x = in adapter_sg_tables_alloc()
4368 acb->tmp_srb = &acb->srb; in adapter_init_params()
4698 struct ScsiReqBlk *srb; in dc395x_show_info() local
4703 list_for_each_entry(srb, &dcb->srb_waiting_list, list) in dc395x_show_info()
4704 seq_printf(m, " %p", srb->cmd); in dc395x_show_info()
4709 list_for_each_entry(srb, &dcb->srb_going_list, list) in dc395x_show_info()
4710 seq_printf(m, " %p", srb->cmd); in dc395x_show_info()