Lines Matching refs:gl

723 	struct gfs2_glock *gl;  in gfs2_clear_rgrpd()  local
727 gl = rgd->rd_gl; in gfs2_clear_rgrpd()
731 if (gl) { in gfs2_clear_rgrpd()
732 spin_lock(&gl->gl_lockref.lock); in gfs2_clear_rgrpd()
733 gl->gl_object = NULL; in gfs2_clear_rgrpd()
734 spin_unlock(&gl->gl_lockref.lock); in gfs2_clear_rgrpd()
735 gfs2_glock_add_to_lru(gl); in gfs2_clear_rgrpd()
736 gfs2_glock_put(gl); in gfs2_clear_rgrpd()
1033 struct gfs2_glock *gl = ip->i_gl; in gfs2_rindex_update() local
1040 if (!gfs2_glock_is_locked_by_me(gl)) { in gfs2_rindex_update()
1041 error = gfs2_glock_nq_init(gl, LM_ST_SHARED, 0, &ri_gh); in gfs2_rindex_update()
1150 struct gfs2_glock *gl = rgd->rd_gl; in gfs2_rgrp_bh_get() local
1161 error = gfs2_meta_read(gl, rgd->rd_addr + x, 0, &bi->bi_bh); in gfs2_rgrp_bh_get()
1782 struct gfs2_glock *gl; in try_rgrp_unlink() local
1807 error = gfs2_glock_get(sdp, block, &gfs2_inode_glops, CREATE, &gl); in try_rgrp_unlink()
1818 ip = gl->gl_object; in try_rgrp_unlink()
1820 if (ip || queue_work(gfs2_delete_workqueue, &gl->gl_delete) == 0) in try_rgrp_unlink()
1821 gfs2_glock_put(gl); in try_rgrp_unlink()
1863 const struct gfs2_glock *gl = rgd->rd_gl; in gfs2_rgrp_congested() local
1864 const struct gfs2_sbd *sdp = gl->gl_name.ln_sbd; in gfs2_rgrp_congested()
1886 gl->gl_stats.stats[GFS2_LKS_SRTTVARB]; in gfs2_rgrp_congested()
1889 l_srttb = gl->gl_stats.stats[GFS2_LKS_SRTTB]; in gfs2_rgrp_congested()
1890 l_dcount = gl->gl_stats.stats[GFS2_LKS_DCOUNT]; in gfs2_rgrp_congested()
1956 struct gfs2_glock *gl = rgd->rd_gl; in fast_to_acquire() local
1958 if (gl->gl_state != LM_ST_UNLOCKED && list_empty(&gl->gl_holders) && in fast_to_acquire()
1959 !test_bit(GLF_DEMOTE_IN_PROGRESS, &gl->gl_flags) && in fast_to_acquire()
1960 !test_bit(GLF_DEMOTE, &gl->gl_flags)) in fast_to_acquire()
2226 void gfs2_rgrp_dump(struct seq_file *seq, const struct gfs2_glock *gl) in gfs2_rgrp_dump() argument
2228 struct gfs2_rgrpd *rgd = gl->gl_object; in gfs2_rgrp_dump()