Home
last modified time | relevance | path

Searched refs:new_dbuf_state (Results 1 – 2 of 2) sorted by relevance

/linux-6.12.1/drivers/gpu/drm/i915/display/
Dskl_watermark.c651 struct intel_dbuf_state *new_dbuf_state = in skl_crtc_allocate_ddb() local
662 if (new_dbuf_state->weight[pipe] == 0) { in skl_crtc_allocate_ddb()
663 skl_ddb_entry_init(&new_dbuf_state->ddb[pipe], 0, 0); in skl_crtc_allocate_ddb()
667 dbuf_slice_mask = new_dbuf_state->slices[pipe]; in skl_crtc_allocate_ddb()
673 intel_crtc_dbuf_weights(new_dbuf_state, pipe, in skl_crtc_allocate_ddb()
679 skl_ddb_entry_init(&new_dbuf_state->ddb[pipe], in skl_crtc_allocate_ddb()
684 if (old_dbuf_state->slices[pipe] == new_dbuf_state->slices[pipe] && in skl_crtc_allocate_ddb()
686 &new_dbuf_state->ddb[pipe])) in skl_crtc_allocate_ddb()
689 ret = intel_atomic_lock_global_state(&new_dbuf_state->base); in skl_crtc_allocate_ddb()
701 crtc_state->wm.skl.ddb.start = mbus_offset + new_dbuf_state->ddb[pipe].start; in skl_crtc_allocate_ddb()
[all …]
Dintel_pmdemand.c264 const struct intel_dbuf_state *new_dbuf_state, *old_dbuf_state; in intel_pmdemand_needs_update() local
274 new_dbuf_state = intel_atomic_get_new_dbuf_state(state); in intel_pmdemand_needs_update()
276 if (new_dbuf_state && in intel_pmdemand_needs_update()
277 (new_dbuf_state->active_pipes != in intel_pmdemand_needs_update()
279 new_dbuf_state->enabled_slices != in intel_pmdemand_needs_update()
305 const struct intel_dbuf_state *new_dbuf_state; in intel_pmdemand_atomic_check() local
326 new_dbuf_state = intel_atomic_get_dbuf_state(state); in intel_pmdemand_atomic_check()
327 if (IS_ERR(new_dbuf_state)) in intel_pmdemand_atomic_check()
328 return PTR_ERR(new_dbuf_state); in intel_pmdemand_atomic_check()
331 min_t(u8, hweight8(new_dbuf_state->active_pipes), 3); in intel_pmdemand_atomic_check()
[all …]