Lines Matching refs:gl_name
78 .key_offset = offsetof(struct gfs2_glock, gl_name),
121 wait_queue_head_t *wq = glock_waitqueue(&gl->gl_name); in wake_up_glock()
124 __wake_up(wq, TASK_NORMAL, 1, &gl->gl_name); in wake_up_glock()
155 struct gfs2_sbd *sdp = gl->gl_name.ln_sbd; in glock_blocked_by_withdraw()
162 gl->gl_name.ln_number == sdp->sd_jdesc->jd_no_addr) in glock_blocked_by_withdraw()
176 struct gfs2_sbd *sdp = gl->gl_name.ln_sbd; in gfs2_glock_free()
184 struct gfs2_sbd *sdp = gl->gl_name.ln_sbd; in gfs2_glock_free_later()
248 struct gfs2_sbd *sdp = gl->gl_name.ln_sbd; in gfs2_glock_queue_work()
264 struct gfs2_sbd *sdp = gl->gl_name.ln_sbd; in __gfs2_glock_put()
386 struct gfs2_sbd *sdp = gh->gh_gl->gl_name.ln_sbd; in gfs2_holder_wake()
568 struct gfs2_sbd *sdp = gl->gl_name.ln_sbd; in gfs2_set_demote()
639 fs_err(gl->gl_name.ln_sbd, "wanted %u got %u\n", in finish_xmote()
669 struct gfs2_sbd *sdp = gl->gl_name.ln_sbd; in is_system_glock()
691 struct gfs2_sbd *sdp = gl->gl_name.ln_sbd; in do_xmote()
901 if (gfs2_assert_warn(gl->gl_name.ln_sbd, prev_object == NULL)) { in glock_set_object()
903 gl->gl_name.ln_type, in glock_set_object()
904 (unsigned long long)gl->gl_name.ln_number); in glock_set_object()
922 if (gfs2_assert_warn(gl->gl_name.ln_sbd, prev_object == object)) { in glock_clear_object()
924 gl->gl_name.ln_type, in glock_clear_object()
925 (unsigned long long)gl->gl_name.ln_number); in glock_clear_object()
1008 struct gfs2_sbd *sdp = gl->gl_name.ln_sbd; in gfs2_queue_try_to_evict()
1018 struct gfs2_sbd *sdp = gl->gl_name.ln_sbd; in gfs2_queue_verify_evict()
1030 struct gfs2_sbd *sdp = gl->gl_name.ln_sbd; in delete_work_func()
1032 u64 no_addr = gl->gl_name.ln_number; in delete_work_func()
1094 if (gl->gl_name.ln_type == LM_TYPE_INODE) { in glock_work_func()
1220 gl->gl_name = name; in gfs2_glock_get()
1237 if (gl->gl_name.ln_type == LM_TYPE_IOPEN) in gfs2_glock_get()
1390 struct gfs2_sbd *sdp = ghs[0].gh_gl->gl_name.ln_sbd; in gfs2_glock_async_wait()
1505 struct gfs2_sbd *sdp = gl->gl_name.ln_sbd; in add_to_queue()
1563 gh2->gh_gl->gl_name.ln_type, gh2->gh_state); in add_to_queue()
1567 gh->gh_gl->gl_name.ln_type, gh->gh_state); in add_to_queue()
1676 gl->gl_name.ln_type == LM_TYPE_INODE) in __gfs2_glock_dq()
1690 struct gfs2_sbd *sdp = gl->gl_name.ln_sbd; in gfs2_glock_dq()
1704 gl->gl_name.ln_sbd->sd_lockstruct.ls_ops->lm_cancel(gl); in gfs2_glock_dq()
1791 const struct lm_lockname *a = &gh_a->gh_gl->gl_name; in glock_compare()
1792 const struct lm_lockname *b = &gh_b->gh_gl->gl_name; in glock_compare()
1892 gl->gl_name.ln_type == LM_TYPE_INODE) { in gfs2_glock_cb()
1949 struct lm_lockstruct *ls = &gl->gl_name.ln_sbd->sd_lockstruct; in gfs2_glock_complete()
1976 if (gla->gl_name.ln_number > glb->gl_name.ln_number) in glock_cmp()
1978 if (gla->gl_name.ln_number < glb->gl_name.ln_number) in glock_cmp()
1986 struct gfs2_sbd *sdp = gl->gl_name.ln_sbd; in can_free_glock()
2108 if (gl->gl_name.ln_sbd == sdp) in glock_hash_walk()
2128 if (gl->gl_name.ln_type == LM_TYPE_IOPEN) { in flush_delete_work()
2129 struct gfs2_sbd *sdp = gl->gl_name.ln_sbd; in flush_delete_work()
2404 struct gfs2_sbd *sdp = gl->gl_name.ln_sbd; in gfs2_dump_glock()
2423 gl->gl_name.ln_type, in gfs2_dump_glock()
2424 (unsigned long long)gl->gl_name.ln_number, in gfs2_dump_glock()
2444 gl->gl_name.ln_type, in gfs2_glstats_seq_show()
2445 (unsigned long long)gl->gl_name.ln_number, in gfs2_glstats_seq_show()
2561 if (gl->gl_name.ln_sbd != gi->sdp) in gfs2_glock_iter_next()
2837 struct lm_lockname gl_name = { .ln_type = LM_TYPE_RESERVED }; in gfs2_glockfd_seq_show_flock() local
2844 gl_name = fl_gh->gh_gl->gl_name; in gfs2_glockfd_seq_show_flock()
2847 if (gl_name.ln_type != LM_TYPE_RESERVED) { in gfs2_glockfd_seq_show_flock()
2849 i->tgid, i->fd, gl_name.ln_type, in gfs2_glockfd_seq_show_flock()
2850 (unsigned long long)gl_name.ln_number); in gfs2_glockfd_seq_show_flock()
2864 i->tgid, i->fd, gl->gl_name.ln_type, in gfs2_glockfd_seq_show()
2865 (unsigned long long)gl->gl_name.ln_number); in gfs2_glockfd_seq_show()