Home
last modified time | relevance | path

Searched refs:gl_sbd (Results 1 – 11 of 11) sorted by relevance

/linux-4.1.27/fs/gfs2/
Dglops.c35 fs_err(gl->gl_sbd, "AIL buffer %p: blocknr %llu state 0x%08lx mapping %p page state 0x%lx\n", in gfs2_ail_error()
38 fs_err(gl->gl_sbd, "AIL glock %u:%llu mapping %p\n", in gfs2_ail_error()
41 gfs2_lm_withdraw(gl->gl_sbd, "AIL error\n"); in gfs2_ail_error()
55 struct gfs2_sbd *sdp = gl->gl_sbd; in __gfs2_ail_flush()
83 struct gfs2_sbd *sdp = gl->gl_sbd; in gfs2_ail_empty_gl()
112 struct gfs2_sbd *sdp = gl->gl_sbd; in gfs2_ail_flush()
142 struct gfs2_sbd *sdp = gl->gl_sbd; in rgrp_go_sync()
176 struct gfs2_sbd *sdp = gl->gl_sbd; in rgrp_go_inval()
213 gfs2_log_flush(gl->gl_sbd, gl, NORMAL_FLUSH); in inode_go_sync()
247 gfs2_assert_withdraw(gl->gl_sbd, !atomic_read(&gl->gl_ail_count)); in inode_go_inval()
[all …]
Dtrace_gfs2.h107 __entry->dev = gl->gl_sbd->sd_vfs->s_dev;
143 __entry->dev = gl->gl_sbd->sd_vfs->s_dev;
177 __entry->dev = gl->gl_sbd->sd_vfs->s_dev;
212 __entry->dev = gh->gh_gl->gl_sbd->sd_vfs->s_dev;
242 __entry->dev = gh->gh_gl->gl_sbd->sd_vfs->s_dev;
281 __entry->dev = gl->gl_sbd->sd_vfs->s_dev;
336 __entry->dev = bd->bd_gl->gl_sbd->sd_vfs->s_dev;
452 __entry->dev = ip->i_gl->gl_sbd->sd_vfs->s_dev;
492 __entry->dev = rgd->rd_gl->gl_sbd->sd_vfs->s_dev;
Dmeta_io.c117 struct gfs2_sbd *sdp = gl->gl_sbd; in gfs2_getbuf()
203 struct gfs2_sbd *sdp = gl->gl_sbd; in gfs2_meta_read()
365 struct gfs2_sbd *sdp = gl->gl_sbd; in gfs2_meta_ra()
Dglock.c122 struct gfs2_sbd *sdp = gl->gl_sbd; in gfs2_glock_free()
195 struct gfs2_sbd *sdp = gl->gl_sbd; in gfs2_glock_put()
232 if (gl->gl_sbd != sdp) in search_bucket()
509 struct gfs2_sbd *sdp = gl->gl_sbd; in do_xmote()
631 struct gfs2_sbd *sdp = gl->gl_sbd; in delete_work_func()
742 gl->gl_sbd = sdp; in gfs2_glock_get()
931 struct gfs2_sbd *sdp = gl->gl_sbd; in add_to_queue()
1009 struct gfs2_sbd *sdp = gl->gl_sbd; in gfs2_glock_nq()
1315 struct lm_lockstruct *ls = &gl->gl_sbd->sd_lockstruct; in gfs2_glock_complete()
1473 if ((gl->gl_sbd == sdp) && lockref_get_not_dead(&gl->gl_lockref)) in examine_bucket()
[all …]
Dquota.c122 sdp = qd->qd_gl->gl_sbd; in gfs2_qd_dispose()
305 struct gfs2_sbd *sdp = qd->qd_gl->gl_sbd; in qd_hold()
370 struct gfs2_sbd *sdp = qd->qd_gl->gl_sbd; in bh_get()
417 struct gfs2_sbd *sdp = qd->qd_gl->gl_sbd; in bh_put()
489 gfs2_assert_warn(qd->qd_gl->gl_sbd, in qd_unlock()
617 struct gfs2_sbd *sdp = qd->qd_gl->gl_sbd; in do_qc()
793 struct gfs2_sbd *sdp = (*qda)->qd_gl->gl_sbd; in do_sync()
884 gfs2_log_flush(ip->i_gl->gl_sbd, ip->i_gl, NORMAL_FLUSH); in do_sync()
916 struct gfs2_sbd *sdp = qd->qd_gl->gl_sbd; in do_glock()
999 struct gfs2_sbd *sdp = qd->qd_gl->gl_sbd; in need_sync()
[all …]
Dtrans.c161 struct gfs2_sbd *sdp = gl->gl_sbd; in gfs2_trans_add_data()
227 struct gfs2_sbd *sdp = gl->gl_sbd; in gfs2_trans_add_meta()
Dmeta_io.h47 return (((struct gfs2_glock *)mapping) - 1)->gl_sbd; in gfs2_mapping2sbd()
Dlock_dlm.c83 lks = this_cpu_ptr(gl->gl_sbd->sd_lkstats); in gfs2_update_reply_times()
111 lks = this_cpu_ptr(gl->gl_sbd->sd_lkstats); in gfs2_update_request_times()
256 struct lm_lockstruct *ls = &gl->gl_sbd->sd_lockstruct; in gdlm_lock()
284 struct gfs2_sbd *sdp = gl->gl_sbd; in gdlm_put_lock()
322 struct lm_lockstruct *ls = &gl->gl_sbd->sd_lockstruct; in gdlm_cancel()
Dlops.c73 struct gfs2_sbd *sdp = gl->gl_sbd; in maybe_release_space()
588 struct gfs2_sbd *sdp = gl->gl_sbd; in gfs2_meta_sync()
598 gfs2_io_error(gl->gl_sbd); in gfs2_meta_sync()
Dincore.h329 struct gfs2_sbd *gl_sbd; member
836 const struct gfs2_sbd *sdp = gl->gl_sbd; in gfs2_sbstats_inc()
Drgrp.c1850 const struct gfs2_sbd *sdp = gl->gl_sbd; in gfs2_rgrp_congested()