Lines Matching refs:device

187 static const struct device *tgt_dev(const struct sbp2_target *tgt)  in tgt_dev()
189 return &tgt->unit->device; in tgt_dev()
192 static const struct device *lu_dev(const struct sbp2_logical_unit *lu) in lu_dev()
194 return &lu->tgt->unit->device; in lu_dev()
504 struct fw_device *device = target_parent_device(lu->tgt); in sbp2_send_orb() local
519 fw_send_request(device->card, &orb->t, TCODE_WRITE_BLOCK_REQUEST, in sbp2_send_orb()
520 node_id, generation, device->max_speed, offset, in sbp2_send_orb()
526 struct fw_device *device = target_parent_device(lu->tgt); in sbp2_cancel_orbs() local
538 if (fw_cancel_transaction(device->card, &orb->t) == 0) in sbp2_cancel_orbs()
564 struct fw_device *device = target_parent_device(lu->tgt); in sbp2_send_management_orb() local
569 if (function == SBP2_LOGOUT_REQUEST && fw_device_is_shutdown(device)) in sbp2_send_management_orb()
578 dma_map_single(device->card->device, &orb->response, in sbp2_send_management_orb()
580 if (dma_mapping_error(device->card->device, orb->response_bus)) in sbp2_send_management_orb()
612 dma_map_single(device->card->device, &orb->request, in sbp2_send_management_orb()
614 if (dma_mapping_error(device->card->device, orb->base.request_bus)) in sbp2_send_management_orb()
645 dma_unmap_single(device->card->device, orb->base.request_bus, in sbp2_send_management_orb()
648 dma_unmap_single(device->card->device, orb->response_bus, in sbp2_send_management_orb()
660 struct fw_device *device = target_parent_device(lu->tgt); in sbp2_agent_reset() local
663 fw_run_transaction(device->card, TCODE_WRITE_QUADLET_REQUEST, in sbp2_agent_reset()
664 lu->tgt->node_id, lu->generation, device->max_speed, in sbp2_agent_reset()
677 struct fw_device *device = target_parent_device(lu->tgt); in sbp2_agent_reset_no_wait() local
685 fw_send_request(device->card, t, TCODE_WRITE_QUADLET_REQUEST, in sbp2_agent_reset_no_wait()
686 lu->tgt->node_id, lu->generation, device->max_speed, in sbp2_agent_reset_no_wait()
798 struct fw_device *device = target_parent_device(lu->tgt); in sbp2_set_busy_timeout() local
801 fw_run_transaction(device->card, TCODE_WRITE_QUADLET_REQUEST, in sbp2_set_busy_timeout()
802 lu->tgt->node_id, lu->generation, device->max_speed, in sbp2_set_busy_timeout()
813 struct fw_device *device = target_parent_device(tgt); in sbp2_login() local
819 if (fw_device_is_shutdown(device)) in sbp2_login()
822 generation = device->generation; in sbp2_login()
824 node_id = device->node_id; in sbp2_login()
825 local_node_id = device->card->node_id; in sbp2_login()
829 sbp2_send_management_orb(lu, device->node_id, generation, in sbp2_login()
891 if (generation != device->card->generation) { in sbp2_login()
906 generation = device->generation; in sbp2_login()
909 sbp2_send_management_orb(lu, device->node_id, generation, in sbp2_login()
923 struct fw_device *device = target_parent_device(tgt); in sbp2_reconnect() local
926 if (fw_device_is_shutdown(device)) in sbp2_reconnect()
929 generation = device->generation; in sbp2_reconnect()
931 node_id = device->node_id; in sbp2_reconnect()
932 local_node_id = device->card->node_id; in sbp2_reconnect()
945 if (generation == device->card->generation || in sbp2_reconnect()
1137 struct fw_device *device = fw_parent_device(unit); in sbp2_probe() local
1144 if (device->is_local) in sbp2_probe()
1147 if (dma_get_max_seg_size(device->card->device) > SBP2_MAX_SEG_SIZE) in sbp2_probe()
1148 WARN_ON(dma_set_max_seg_size(device->card->device, in sbp2_probe()
1156 dev_set_drvdata(&unit->device, tgt); in sbp2_probe()
1160 tgt->guid = (u64)device->config_rom[3] << 32 | device->config_rom[4]; in sbp2_probe()
1162 if (fw_device_enable_phys_dma(device) < 0) in sbp2_probe()
1167 if (scsi_add_host_with_dma(shost, &unit->device, in sbp2_probe()
1168 device->card->device) < 0) in sbp2_probe()
1172 tgt->directory_id = ((unit->directory - device->config_rom) * 4 in sbp2_probe()
1191 tgt->max_payload = min3(device->max_speed + 7, 10U, in sbp2_probe()
1192 device->card->max_receive - 1); in sbp2_probe()
1211 struct sbp2_target *tgt = dev_get_drvdata(&unit->device); in sbp2_update()
1229 struct fw_device *device = fw_parent_device(unit); in sbp2_remove() local
1230 struct sbp2_target *tgt = dev_get_drvdata(&unit->device); in sbp2_remove()
1253 generation = device->generation; in sbp2_remove()
1255 node_id = device->node_id; in sbp2_remove()
1265 dev_notice(&unit->device, "released target %d:0:0\n", shost->host_no); in sbp2_remove()
1295 static void sbp2_unmap_scatterlist(struct device *card_device, in sbp2_unmap_scatterlist()
1356 struct fw_device *device = target_parent_device(base_orb->lu->tgt); in complete_command_orb() local
1390 dma_unmap_single(device->card->device, orb->base.request_bus, in complete_command_orb()
1392 sbp2_unmap_scatterlist(device->card->device, orb); in complete_command_orb()
1399 struct fw_device *device, struct sbp2_logical_unit *lu) in sbp2_map_scatterlist() argument
1431 dma_map_single(device->card->device, orb->page_table, in sbp2_map_scatterlist()
1433 if (dma_mapping_error(device->card->device, orb->page_table_bus)) in sbp2_map_scatterlist()
1461 struct sbp2_logical_unit *lu = cmd->device->hostdata; in sbp2_scsi_queuecommand()
1462 struct fw_device *device = target_parent_device(lu->tgt); in sbp2_scsi_queuecommand() local
1477 COMMAND_ORB_SPEED(device->max_speed) | in sbp2_scsi_queuecommand()
1483 generation = device->generation; in sbp2_scsi_queuecommand()
1486 if (scsi_sg_count(cmd) && sbp2_map_scatterlist(orb, device, lu) < 0) in sbp2_scsi_queuecommand()
1493 dma_map_single(device->card->device, &orb->request, in sbp2_scsi_queuecommand()
1495 if (dma_mapping_error(device->card->device, orb->base.request_bus)) { in sbp2_scsi_queuecommand()
1496 sbp2_unmap_scatterlist(device->card->device, orb); in sbp2_scsi_queuecommand()
1564 struct sbp2_logical_unit *lu = cmd->device->hostdata; in sbp2_scsi_abort()
1580 static ssize_t sbp2_sysfs_ieee1394_id_show(struct device *dev, in sbp2_sysfs_ieee1394_id_show()