hw_blk 26 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_blk.c void dpu_hw_blk_init(struct dpu_hw_blk *hw_blk, u32 type, int id, hw_blk 29 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_blk.c INIT_LIST_HEAD(&hw_blk->list); hw_blk 30 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_blk.c hw_blk->type = type; hw_blk 31 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_blk.c hw_blk->id = id; hw_blk 32 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_blk.c atomic_set(&hw_blk->refcount, 0); hw_blk 35 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_blk.c hw_blk->ops = *ops; hw_blk 38 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_blk.c list_add(&hw_blk->list, &dpu_hw_blk_list); hw_blk 47 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_blk.c void dpu_hw_blk_destroy(struct dpu_hw_blk *hw_blk) hw_blk 49 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_blk.c if (!hw_blk) { hw_blk 54 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_blk.c if (atomic_read(&hw_blk->refcount)) hw_blk 55 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_blk.c pr_err("hw_blk:%d.%d invalid refcount\n", hw_blk->type, hw_blk 56 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_blk.c hw_blk->id); hw_blk 59 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_blk.c list_del(&hw_blk->list); hw_blk 70 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_blk.c struct dpu_hw_blk *dpu_hw_blk_get(struct dpu_hw_blk *hw_blk, u32 type, int id) hw_blk 75 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_blk.c if (!hw_blk) { hw_blk 84 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_blk.c hw_blk = curr; hw_blk 90 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_blk.c if (!hw_blk) { hw_blk 95 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_blk.c refcount = atomic_inc_return(&hw_blk->refcount); hw_blk 97 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_blk.c if (refcount == 1 && hw_blk->ops.start) { hw_blk 98 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_blk.c rc = hw_blk->ops.start(hw_blk); hw_blk 105 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_blk.c pr_debug("hw_blk:%d.%d refcount:%d\n", hw_blk->type, hw_blk 106 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_blk.c hw_blk->id, refcount); hw_blk 107 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_blk.c return hw_blk; hw_blk 110 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_blk.c dpu_hw_blk_put(hw_blk); hw_blk 119 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_blk.c void dpu_hw_blk_put(struct dpu_hw_blk *hw_blk) hw_blk 121 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_blk.c if (!hw_blk) { hw_blk 126 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_blk.c pr_debug("hw_blk:%d.%d refcount:%d\n", hw_blk->type, hw_blk->id, hw_blk 127 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_blk.c atomic_read(&hw_blk->refcount)); hw_blk 129 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_blk.c if (!atomic_read(&hw_blk->refcount)) { hw_blk 130 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_blk.c pr_err("hw_blk:%d.%d invalid put\n", hw_blk->type, hw_blk->id); hw_blk 134 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_blk.c if (atomic_dec_return(&hw_blk->refcount)) hw_blk 137 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_blk.c if (hw_blk->ops.stop) hw_blk 138 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_blk.c hw_blk->ops.stop(hw_blk); hw_blk 39 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_blk.h void dpu_hw_blk_init(struct dpu_hw_blk *hw_blk, u32 type, int id, hw_blk 41 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_blk.h void dpu_hw_blk_destroy(struct dpu_hw_blk *hw_blk); hw_blk 43 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_blk.h struct dpu_hw_blk *dpu_hw_blk_get(struct dpu_hw_blk *hw_blk, u32 type, int id); hw_blk 44 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_blk.h void dpu_hw_blk_put(struct dpu_hw_blk *hw_blk); hw_blk 1586 drivers/net/ethernet/hisilicon/hns3/hns3pf/hclge_err.c static const struct hclge_hw_blk hw_blk[] = { hw_blk 1620 drivers/net/ethernet/hisilicon/hns3/hns3pf/hclge_err.c const struct hclge_hw_blk *module = hw_blk;