curr_sde 516 drivers/infiniband/hw/hfi1/sdma.c struct sdma_engine *curr_sde = &sde->dd->per_sdma[index]; curr_sde 520 drivers/infiniband/hw/hfi1/sdma.c if (curr_sde == sde) curr_sde 527 drivers/infiniband/hw/hfi1/sdma.c spin_lock_irqsave(&curr_sde->tail_lock, flags); curr_sde 528 drivers/infiniband/hw/hfi1/sdma.c write_seqlock(&curr_sde->head_lock); curr_sde 531 drivers/infiniband/hw/hfi1/sdma.c if (curr_sde->state.current_state != sdma_state_s99_running) { curr_sde 532 drivers/infiniband/hw/hfi1/sdma.c write_sequnlock(&curr_sde->head_lock); curr_sde 533 drivers/infiniband/hw/hfi1/sdma.c spin_unlock_irqrestore(&curr_sde->tail_lock, flags); curr_sde 537 drivers/infiniband/hw/hfi1/sdma.c if ((curr_sde->descq_head != curr_sde->descq_tail) && curr_sde 538 drivers/infiniband/hw/hfi1/sdma.c (curr_sde->descq_head == curr_sde 539 drivers/infiniband/hw/hfi1/sdma.c curr_sde->progress_check_head)) curr_sde 540 drivers/infiniband/hw/hfi1/sdma.c __sdma_process_event(curr_sde, curr_sde 542 drivers/infiniband/hw/hfi1/sdma.c write_sequnlock(&curr_sde->head_lock); curr_sde 543 drivers/infiniband/hw/hfi1/sdma.c spin_unlock_irqrestore(&curr_sde->tail_lock, flags);