Lines Matching full:du

109 	struct vmw_display_unit *du =  in crc_generate_worker()  local
111 struct drm_crtc *crtc = &du->crtc; in crc_generate_worker()
118 spin_lock_irq(&du->vkms.crc_state_lock); in crc_generate_worker()
119 crc_pending = du->vkms.crc_pending; in crc_generate_worker()
120 spin_unlock_irq(&du->vkms.crc_state_lock); in crc_generate_worker()
129 spin_lock_irq(&du->vkms.crc_state_lock); in crc_generate_worker()
130 surf = vmw_surface_reference(du->vkms.surface); in crc_generate_worker()
131 spin_unlock_irq(&du->vkms.crc_state_lock); in crc_generate_worker()
145 spin_lock_irq(&du->vkms.crc_state_lock); in crc_generate_worker()
146 frame_start = du->vkms.frame_start; in crc_generate_worker()
147 frame_end = du->vkms.frame_end; in crc_generate_worker()
148 du->vkms.frame_start = 0; in crc_generate_worker()
149 du->vkms.frame_end = 0; in crc_generate_worker()
150 du->vkms.crc_pending = false; in crc_generate_worker()
151 spin_unlock_irq(&du->vkms.crc_state_lock); in crc_generate_worker()
163 struct vmw_display_unit *du = container_of(timer, struct vmw_display_unit, vkms.timer); in vmw_vkms_vblank_simulate() local
164 struct drm_crtc *crtc = &du->crtc; in vmw_vkms_vblank_simulate()
170 ret_overrun = hrtimer_forward_now(&du->vkms.timer, in vmw_vkms_vblank_simulate()
171 du->vkms.period_ns); in vmw_vkms_vblank_simulate()
181 has_surface = du->vkms.surface != NULL; in vmw_vkms_vblank_simulate()
184 if (du->vkms.crc_enabled && has_surface) { in vmw_vkms_vblank_simulate()
187 spin_lock(&du->vkms.crc_state_lock); in vmw_vkms_vblank_simulate()
188 if (!du->vkms.crc_pending) in vmw_vkms_vblank_simulate()
189 du->vkms.frame_start = frame; in vmw_vkms_vblank_simulate()
193 du->vkms.frame_start, frame); in vmw_vkms_vblank_simulate()
194 du->vkms.frame_end = frame; in vmw_vkms_vblank_simulate()
195 du->vkms.crc_pending = true; in vmw_vkms_vblank_simulate()
196 spin_unlock(&du->vkms.crc_state_lock); in vmw_vkms_vblank_simulate()
198 ret = queue_work(vmw->crc_workq, &du->vkms.crc_generator_work); in vmw_vkms_vblank_simulate()
251 struct vmw_display_unit *du = vmw_crtc_to_du(crtc); in vmw_vkms_get_vblank_timestamp() local
262 *vblank_time = READ_ONCE(du->vkms.timer.node.expires); in vmw_vkms_get_vblank_timestamp()
274 *vblank_time -= du->vkms.period_ns; in vmw_vkms_get_vblank_timestamp()
286 struct vmw_display_unit *du = vmw_crtc_to_du(crtc); in vmw_vkms_enable_vblank() local
293 hrtimer_init(&du->vkms.timer, CLOCK_MONOTONIC, HRTIMER_MODE_REL); in vmw_vkms_enable_vblank()
294 du->vkms.timer.function = &vmw_vkms_vblank_simulate; in vmw_vkms_enable_vblank()
295 du->vkms.period_ns = ktime_set(0, vblank->framedur_ns); in vmw_vkms_enable_vblank()
296 hrtimer_start(&du->vkms.timer, du->vkms.period_ns, HRTIMER_MODE_REL); in vmw_vkms_enable_vblank()
304 struct vmw_display_unit *du = vmw_crtc_to_du(crtc); in vmw_vkms_disable_vblank() local
310 hrtimer_cancel(&du->vkms.timer); in vmw_vkms_disable_vblank()
311 du->vkms.surface = NULL; in vmw_vkms_disable_vblank()
312 du->vkms.period_ns = ktime_set(0, 0); in vmw_vkms_disable_vblank()
324 struct vmw_display_unit *du = vmw_crtc_to_du(crtc); in vmw_vkms_crtc_init() local
326 atomic_set(&du->vkms.atomic_lock, VMW_VKMS_LOCK_UNLOCKED); in vmw_vkms_crtc_init()
327 spin_lock_init(&du->vkms.crc_state_lock); in vmw_vkms_crtc_init()
329 INIT_WORK(&du->vkms.crc_generator_work, crc_generate_worker); in vmw_vkms_crtc_init()
330 du->vkms.surface = NULL; in vmw_vkms_crtc_init()
336 struct vmw_display_unit *du = vmw_crtc_to_du(crtc); in vmw_vkms_crtc_cleanup() local
338 if (du->vkms.surface) in vmw_vkms_crtc_cleanup()
339 vmw_surface_unreference(&du->vkms.surface); in vmw_vkms_crtc_cleanup()
340 WARN_ON(work_pending(&du->vkms.crc_generator_work)); in vmw_vkms_crtc_cleanup()
341 hrtimer_cancel(&du->vkms.timer); in vmw_vkms_crtc_cleanup()
470 struct vmw_display_unit *du = vmw_crtc_to_du(crtc); in vmw_vkms_set_crc_source() local
483 prev_enabled = du->vkms.crc_enabled; in vmw_vkms_set_crc_source()
484 du->vkms.crc_enabled = enabled; in vmw_vkms_set_crc_source()
498 struct vmw_display_unit *du = vmw_crtc_to_du(crtc); in vmw_vkms_set_crc_surface() local
501 if (vmw->vkms_enabled && du->vkms.surface != surf) { in vmw_vkms_set_crc_surface()
502 WARN_ON(atomic_read(&du->vkms.atomic_lock) != VMW_VKMS_LOCK_MODESET); in vmw_vkms_set_crc_surface()
503 if (du->vkms.surface) in vmw_vkms_set_crc_surface()
504 vmw_surface_unreference(&du->vkms.surface); in vmw_vkms_set_crc_surface()
506 du->vkms.surface = vmw_surface_reference(surf); in vmw_vkms_set_crc_surface()
512 * @du: The vmw_display_unit from which to grab the vblank timings
519 vmw_vkms_lock_max_wait_ns(struct vmw_display_unit *du) in vmw_vkms_lock_max_wait_ns() argument
521 s64 nsecs = ktime_to_ns(du->vkms.period_ns); in vmw_vkms_lock_max_wait_ns()
547 struct vmw_display_unit *du = vmw_crtc_to_du(crtc); in vmw_vkms_modeset_lock() local
549 const u64 MAX_NSECS_DELAY = vmw_vkms_lock_max_wait_ns(du); in vmw_vkms_modeset_lock()
554 ret = atomic_cmpxchg(&du->vkms.atomic_lock, in vmw_vkms_modeset_lock()
565 total_delay, ret, atomic_read(&du->vkms.atomic_lock)); in vmw_vkms_modeset_lock()
581 struct vmw_display_unit *du = vmw_crtc_to_du(crtc); in vmw_vkms_modeset_lock_relaxed() local
583 const u64 MAX_NSECS_DELAY = vmw_vkms_lock_max_wait_ns(du); in vmw_vkms_modeset_lock_relaxed()
588 ret = atomic_cmpxchg(&du->vkms.atomic_lock, in vmw_vkms_modeset_lock_relaxed()
618 struct vmw_display_unit *du = vmw_crtc_to_du(crtc); in vmw_vkms_vblank_trylock() local
621 ret = atomic_cmpxchg(&du->vkms.atomic_lock, in vmw_vkms_vblank_trylock()
631 struct vmw_display_unit *du = vmw_crtc_to_du(crtc); in vmw_vkms_unlock() local
634 atomic_set(&du->vkms.atomic_lock, VMW_VKMS_LOCK_UNLOCKED); in vmw_vkms_unlock()