Lines Matching +full:full +full:- +full:frame
82 (0x13830 - 0x7030) >> 2,
129 spin_lock_irqsave(&adev->audio_endpt_idx_lock, flags); in dce_v8_0_audio_endpt_rreg()
132 spin_unlock_irqrestore(&adev->audio_endpt_idx_lock, flags); in dce_v8_0_audio_endpt_rreg()
142 spin_lock_irqsave(&adev->audio_endpt_idx_lock, flags); in dce_v8_0_audio_endpt_wreg()
145 spin_unlock_irqrestore(&adev->audio_endpt_idx_lock, flags); in dce_v8_0_audio_endpt_wreg()
150 if (crtc >= adev->mode_info.num_crtc) in dce_v8_0_vblank_get_counter()
161 for (i = 0; i < adev->mode_info.num_crtc; i++) in dce_v8_0_pageflip_interrupt_init()
162 amdgpu_irq_get(adev, &adev->pageflip_irq, i); in dce_v8_0_pageflip_interrupt_init()
170 for (i = 0; i < adev->mode_info.num_crtc; i++) in dce_v8_0_pageflip_interrupt_fini()
171 amdgpu_irq_put(adev, &adev->pageflip_irq, i); in dce_v8_0_pageflip_interrupt_fini()
175 * dce_v8_0_page_flip - pageflip callback.
188 struct amdgpu_crtc *amdgpu_crtc = adev->mode_info.crtcs[crtc_id]; in dce_v8_0_page_flip()
189 struct drm_framebuffer *fb = amdgpu_crtc->base.primary->fb; in dce_v8_0_page_flip()
192 WREG32(mmGRPH_FLIP_CONTROL + amdgpu_crtc->crtc_offset, async ? in dce_v8_0_page_flip()
195 WREG32(mmGRPH_PITCH + amdgpu_crtc->crtc_offset, in dce_v8_0_page_flip()
196 fb->pitches[0] / fb->format->cpp[0]); in dce_v8_0_page_flip()
198 WREG32(mmGRPH_PRIMARY_SURFACE_ADDRESS_HIGH + amdgpu_crtc->crtc_offset, in dce_v8_0_page_flip()
201 WREG32(mmGRPH_PRIMARY_SURFACE_ADDRESS + amdgpu_crtc->crtc_offset, in dce_v8_0_page_flip()
204 RREG32(mmGRPH_PRIMARY_SURFACE_ADDRESS + amdgpu_crtc->crtc_offset); in dce_v8_0_page_flip()
210 if ((crtc < 0) || (crtc >= adev->mode_info.num_crtc)) in dce_v8_0_crtc_get_scanoutpos()
211 return -EINVAL; in dce_v8_0_crtc_get_scanoutpos()
220 * dce_v8_0_hpd_sense - hpd sense callback.
233 if (hpd >= adev->mode_info.num_hpd) in dce_v8_0_hpd_sense()
244 * dce_v8_0_hpd_set_polarity - hpd set polarity callback.
257 if (hpd >= adev->mode_info.num_hpd) in dce_v8_0_hpd_set_polarity()
273 if (hpd >= adev->mode_info.num_hpd) { in dce_v8_0_hpd_int_ack()
284 * dce_v8_0_hpd_init - hpd setup callback.
302 if (amdgpu_connector->hpd.hpd >= adev->mode_info.num_hpd) in dce_v8_0_hpd_init()
305 tmp = RREG32(mmDC_HPD1_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd]); in dce_v8_0_hpd_init()
307 WREG32(mmDC_HPD1_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd], tmp); in dce_v8_0_hpd_init()
309 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP || in dce_v8_0_hpd_init()
310 connector->connector_type == DRM_MODE_CONNECTOR_LVDS) { in dce_v8_0_hpd_init()
316 tmp = RREG32(mmDC_HPD1_INT_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd]); in dce_v8_0_hpd_init()
318 WREG32(mmDC_HPD1_INT_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd], tmp); in dce_v8_0_hpd_init()
322 dce_v8_0_hpd_int_ack(adev, amdgpu_connector->hpd.hpd); in dce_v8_0_hpd_init()
323 dce_v8_0_hpd_set_polarity(adev, amdgpu_connector->hpd.hpd); in dce_v8_0_hpd_init()
324 amdgpu_irq_get(adev, &adev->hpd_irq, amdgpu_connector->hpd.hpd); in dce_v8_0_hpd_init()
330 * dce_v8_0_hpd_fini - hpd tear down callback.
348 if (amdgpu_connector->hpd.hpd >= adev->mode_info.num_hpd) in dce_v8_0_hpd_fini()
351 tmp = RREG32(mmDC_HPD1_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd]); in dce_v8_0_hpd_fini()
353 WREG32(mmDC_HPD1_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd], tmp); in dce_v8_0_hpd_fini()
355 amdgpu_irq_put(adev, &adev->hpd_irq, amdgpu_connector->hpd.hpd); in dce_v8_0_hpd_fini()
371 for (i = 0; i < adev->mode_info.num_crtc; i++) { in dce_v8_0_is_display_hung()
379 for (i = 0; i < adev->mode_info.num_crtc; i++) { in dce_v8_0_is_display_hung()
420 switch (adev->asic_type) { in dce_v8_0_get_num_crtc()
464 struct drm_device *dev = encoder->dev; in dce_v8_0_program_fmt()
467 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc); in dce_v8_0_program_fmt()
476 dither = amdgpu_connector->dither; in dce_v8_0_program_fmt()
480 if (amdgpu_encoder->devices & ATOM_DEVICE_LCD_SUPPORT) in dce_v8_0_program_fmt()
484 if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) || in dce_v8_0_program_fmt()
485 (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2)) in dce_v8_0_program_fmt()
532 WREG32(mmFMT_BIT_DEPTH_CONTROL + amdgpu_crtc->crtc_offset, tmp); in dce_v8_0_program_fmt()
538 * dce_v8_0_line_buffer_adjust - Set up the line buffer
554 u32 pipe_offset = amdgpu_crtc->crtc_id * 0x8; in dce_v8_0_line_buffer_adjust()
563 if (amdgpu_crtc->base.enabled && mode) { in dce_v8_0_line_buffer_adjust()
564 if (mode->crtc_hdisplay < 1920) { in dce_v8_0_line_buffer_adjust()
567 } else if (mode->crtc_hdisplay < 2560) { in dce_v8_0_line_buffer_adjust()
570 } else if (mode->crtc_hdisplay < 4096) { in dce_v8_0_line_buffer_adjust()
572 buffer_alloc = (adev->flags & AMD_IS_APU) ? 2 : 4; in dce_v8_0_line_buffer_adjust()
576 buffer_alloc = (adev->flags & AMD_IS_APU) ? 2 : 4; in dce_v8_0_line_buffer_adjust()
583 WREG32(mmLB_MEMORY_CTRL + amdgpu_crtc->crtc_offset, in dce_v8_0_line_buffer_adjust()
589 for (i = 0; i < adev->usec_timeout; i++) { in dce_v8_0_line_buffer_adjust()
596 if (amdgpu_crtc->base.enabled && mode) { in dce_v8_0_line_buffer_adjust()
613 * cik_get_number_of_dram_channels - get the number of dram channels
665 * dce_v8_0_dram_bandwidth - get the dram bandwidth
680 a.full = dfixed_const(1000); in dce_v8_0_dram_bandwidth()
681 yclk.full = dfixed_const(wm->yclk); in dce_v8_0_dram_bandwidth()
682 yclk.full = dfixed_div(yclk, a); in dce_v8_0_dram_bandwidth()
683 dram_channels.full = dfixed_const(wm->dram_channels * 4); in dce_v8_0_dram_bandwidth()
684 a.full = dfixed_const(10); in dce_v8_0_dram_bandwidth()
685 dram_efficiency.full = dfixed_const(7); in dce_v8_0_dram_bandwidth()
686 dram_efficiency.full = dfixed_div(dram_efficiency, a); in dce_v8_0_dram_bandwidth()
687 bandwidth.full = dfixed_mul(dram_channels, yclk); in dce_v8_0_dram_bandwidth()
688 bandwidth.full = dfixed_mul(bandwidth, dram_efficiency); in dce_v8_0_dram_bandwidth()
694 * dce_v8_0_dram_bandwidth_for_display - get the dram bandwidth for display
709 a.full = dfixed_const(1000); in dce_v8_0_dram_bandwidth_for_display()
710 yclk.full = dfixed_const(wm->yclk); in dce_v8_0_dram_bandwidth_for_display()
711 yclk.full = dfixed_div(yclk, a); in dce_v8_0_dram_bandwidth_for_display()
712 dram_channels.full = dfixed_const(wm->dram_channels * 4); in dce_v8_0_dram_bandwidth_for_display()
713 a.full = dfixed_const(10); in dce_v8_0_dram_bandwidth_for_display()
714 disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */ in dce_v8_0_dram_bandwidth_for_display()
715 disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a); in dce_v8_0_dram_bandwidth_for_display()
716 bandwidth.full = dfixed_mul(dram_channels, yclk); in dce_v8_0_dram_bandwidth_for_display()
717 bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation); in dce_v8_0_dram_bandwidth_for_display()
723 * dce_v8_0_data_return_bandwidth - get the data return bandwidth
738 a.full = dfixed_const(1000); in dce_v8_0_data_return_bandwidth()
739 sclk.full = dfixed_const(wm->sclk); in dce_v8_0_data_return_bandwidth()
740 sclk.full = dfixed_div(sclk, a); in dce_v8_0_data_return_bandwidth()
741 a.full = dfixed_const(10); in dce_v8_0_data_return_bandwidth()
742 return_efficiency.full = dfixed_const(8); in dce_v8_0_data_return_bandwidth()
743 return_efficiency.full = dfixed_div(return_efficiency, a); in dce_v8_0_data_return_bandwidth()
744 a.full = dfixed_const(32); in dce_v8_0_data_return_bandwidth()
745 bandwidth.full = dfixed_mul(a, sclk); in dce_v8_0_data_return_bandwidth()
746 bandwidth.full = dfixed_mul(bandwidth, return_efficiency); in dce_v8_0_data_return_bandwidth()
752 * dce_v8_0_dmif_request_bandwidth - get the dmif bandwidth
767 a.full = dfixed_const(1000); in dce_v8_0_dmif_request_bandwidth()
768 disp_clk.full = dfixed_const(wm->disp_clk); in dce_v8_0_dmif_request_bandwidth()
769 disp_clk.full = dfixed_div(disp_clk, a); in dce_v8_0_dmif_request_bandwidth()
770 a.full = dfixed_const(32); in dce_v8_0_dmif_request_bandwidth()
771 b.full = dfixed_mul(a, disp_clk); in dce_v8_0_dmif_request_bandwidth()
773 a.full = dfixed_const(10); in dce_v8_0_dmif_request_bandwidth()
774 disp_clk_request_efficiency.full = dfixed_const(8); in dce_v8_0_dmif_request_bandwidth()
775 disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a); in dce_v8_0_dmif_request_bandwidth()
777 bandwidth.full = dfixed_mul(b, disp_clk_request_efficiency); in dce_v8_0_dmif_request_bandwidth()
783 * dce_v8_0_available_bandwidth - get the min available bandwidth
802 * dce_v8_0_average_bandwidth - get the average available bandwidth
822 a.full = dfixed_const(1000); in dce_v8_0_average_bandwidth()
823 line_time.full = dfixed_const(wm->active_time + wm->blank_time); in dce_v8_0_average_bandwidth()
824 line_time.full = dfixed_div(line_time, a); in dce_v8_0_average_bandwidth()
825 bpp.full = dfixed_const(wm->bytes_per_pixel); in dce_v8_0_average_bandwidth()
826 src_width.full = dfixed_const(wm->src_width); in dce_v8_0_average_bandwidth()
827 bandwidth.full = dfixed_mul(src_width, bpp); in dce_v8_0_average_bandwidth()
828 bandwidth.full = dfixed_mul(bandwidth, wm->vsc); in dce_v8_0_average_bandwidth()
829 bandwidth.full = dfixed_div(bandwidth, line_time); in dce_v8_0_average_bandwidth()
835 * dce_v8_0_latency_watermark - get the latency watermark
850 u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */ in dce_v8_0_latency_watermark()
851 u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) + in dce_v8_0_latency_watermark()
852 (wm->num_heads * cursor_line_pair_return_time); in dce_v8_0_latency_watermark()
858 if (wm->num_heads == 0) in dce_v8_0_latency_watermark()
861 a.full = dfixed_const(2); in dce_v8_0_latency_watermark()
862 b.full = dfixed_const(1); in dce_v8_0_latency_watermark()
863 if ((wm->vsc.full > a.full) || in dce_v8_0_latency_watermark()
864 ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) || in dce_v8_0_latency_watermark()
865 (wm->vtaps >= 5) || in dce_v8_0_latency_watermark()
866 ((wm->vsc.full >= a.full) && wm->interlaced)) in dce_v8_0_latency_watermark()
871 a.full = dfixed_const(available_bandwidth); in dce_v8_0_latency_watermark()
872 b.full = dfixed_const(wm->num_heads); in dce_v8_0_latency_watermark()
873 a.full = dfixed_div(a, b); in dce_v8_0_latency_watermark()
874 tmp = div_u64((u64) dmif_size * (u64) wm->disp_clk, mc_latency + 512); in dce_v8_0_latency_watermark()
877 lb_fill_bw = min(tmp, wm->disp_clk * wm->bytes_per_pixel / 1000); in dce_v8_0_latency_watermark()
879 a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel); in dce_v8_0_latency_watermark()
880 b.full = dfixed_const(1000); in dce_v8_0_latency_watermark()
881 c.full = dfixed_const(lb_fill_bw); in dce_v8_0_latency_watermark()
882 b.full = dfixed_div(c, b); in dce_v8_0_latency_watermark()
883 a.full = dfixed_div(a, b); in dce_v8_0_latency_watermark()
886 if (line_fill_time < wm->active_time) in dce_v8_0_latency_watermark()
889 return latency + (line_fill_time - wm->active_time); in dce_v8_0_latency_watermark()
894 * dce_v8_0_average_bandwidth_vs_dram_bandwidth_for_display - check
907 (dce_v8_0_dram_bandwidth_for_display(wm) / wm->num_heads)) in dce_v8_0_average_bandwidth_vs_dram_bandwidth_for_display()
914 * dce_v8_0_average_bandwidth_vs_available_bandwidth - check
927 (dce_v8_0_available_bandwidth(wm) / wm->num_heads)) in dce_v8_0_average_bandwidth_vs_available_bandwidth()
934 * dce_v8_0_check_latency_hiding - check latency hiding
944 u32 lb_partitions = wm->lb_size / wm->src_width; in dce_v8_0_check_latency_hiding()
945 u32 line_time = wm->active_time + wm->blank_time; in dce_v8_0_check_latency_hiding()
950 a.full = dfixed_const(1); in dce_v8_0_check_latency_hiding()
951 if (wm->vsc.full > a.full) in dce_v8_0_check_latency_hiding()
954 if (lb_partitions <= (wm->vtaps + 1)) in dce_v8_0_check_latency_hiding()
960 latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time); in dce_v8_0_check_latency_hiding()
969 * dce_v8_0_program_watermarks - program display watermarks
983 struct drm_display_mode *mode = &amdgpu_crtc->base.mode; in dce_v8_0_program_watermarks()
990 if (amdgpu_crtc->base.enabled && num_heads && mode) { in dce_v8_0_program_watermarks()
991 active_time = (u32) div_u64((u64)mode->crtc_hdisplay * 1000000, in dce_v8_0_program_watermarks()
992 (u32)mode->clock); in dce_v8_0_program_watermarks()
993 line_time = (u32) div_u64((u64)mode->crtc_htotal * 1000000, in dce_v8_0_program_watermarks()
994 (u32)mode->clock); in dce_v8_0_program_watermarks()
998 if (adev->pm.dpm_enabled) { in dce_v8_0_program_watermarks()
1004 wm_high.yclk = adev->pm.current_mclk * 10; in dce_v8_0_program_watermarks()
1005 wm_high.sclk = adev->pm.current_sclk * 10; in dce_v8_0_program_watermarks()
1008 wm_high.disp_clk = mode->clock; in dce_v8_0_program_watermarks()
1009 wm_high.src_width = mode->crtc_hdisplay; in dce_v8_0_program_watermarks()
1011 wm_high.blank_time = line_time - wm_high.active_time; in dce_v8_0_program_watermarks()
1013 if (mode->flags & DRM_MODE_FLAG_INTERLACE) in dce_v8_0_program_watermarks()
1015 wm_high.vsc = amdgpu_crtc->vsc; in dce_v8_0_program_watermarks()
1017 if (amdgpu_crtc->rmx_type != RMX_OFF) in dce_v8_0_program_watermarks()
1032 (adev->mode_info.disp_priority == 2)) { in dce_v8_0_program_watermarks()
1037 if (adev->pm.dpm_enabled) { in dce_v8_0_program_watermarks()
1043 wm_low.yclk = adev->pm.current_mclk * 10; in dce_v8_0_program_watermarks()
1044 wm_low.sclk = adev->pm.current_sclk * 10; in dce_v8_0_program_watermarks()
1047 wm_low.disp_clk = mode->clock; in dce_v8_0_program_watermarks()
1048 wm_low.src_width = mode->crtc_hdisplay; in dce_v8_0_program_watermarks()
1050 wm_low.blank_time = line_time - wm_low.active_time; in dce_v8_0_program_watermarks()
1052 if (mode->flags & DRM_MODE_FLAG_INTERLACE) in dce_v8_0_program_watermarks()
1054 wm_low.vsc = amdgpu_crtc->vsc; in dce_v8_0_program_watermarks()
1056 if (amdgpu_crtc->rmx_type != RMX_OFF) in dce_v8_0_program_watermarks()
1071 (adev->mode_info.disp_priority == 2)) { in dce_v8_0_program_watermarks()
1074 lb_vblank_lead_lines = DIV_ROUND_UP(lb_size, mode->crtc_hdisplay); in dce_v8_0_program_watermarks()
1078 wm_mask = RREG32(mmDPG_WATERMARK_MASK_CONTROL + amdgpu_crtc->crtc_offset); in dce_v8_0_program_watermarks()
1082 WREG32(mmDPG_WATERMARK_MASK_CONTROL + amdgpu_crtc->crtc_offset, tmp); in dce_v8_0_program_watermarks()
1083 WREG32(mmDPG_PIPE_URGENCY_CONTROL + amdgpu_crtc->crtc_offset, in dce_v8_0_program_watermarks()
1087 tmp = RREG32(mmDPG_WATERMARK_MASK_CONTROL + amdgpu_crtc->crtc_offset); in dce_v8_0_program_watermarks()
1090 WREG32(mmDPG_WATERMARK_MASK_CONTROL + amdgpu_crtc->crtc_offset, tmp); in dce_v8_0_program_watermarks()
1091 WREG32(mmDPG_PIPE_URGENCY_CONTROL + amdgpu_crtc->crtc_offset, in dce_v8_0_program_watermarks()
1095 WREG32(mmDPG_WATERMARK_MASK_CONTROL + amdgpu_crtc->crtc_offset, wm_mask); in dce_v8_0_program_watermarks()
1098 amdgpu_crtc->line_time = line_time; in dce_v8_0_program_watermarks()
1099 amdgpu_crtc->wm_high = latency_watermark_a; in dce_v8_0_program_watermarks()
1100 amdgpu_crtc->wm_low = latency_watermark_b; in dce_v8_0_program_watermarks()
1102 amdgpu_crtc->lb_vblank_lead_lines = lb_vblank_lead_lines; in dce_v8_0_program_watermarks()
1106 * dce_v8_0_bandwidth_update - program display watermarks
1121 for (i = 0; i < adev->mode_info.num_crtc; i++) { in dce_v8_0_bandwidth_update()
1122 if (adev->mode_info.crtcs[i]->base.enabled) in dce_v8_0_bandwidth_update()
1125 for (i = 0; i < adev->mode_info.num_crtc; i++) { in dce_v8_0_bandwidth_update()
1126 mode = &adev->mode_info.crtcs[i]->base.mode; in dce_v8_0_bandwidth_update()
1127 lb_size = dce_v8_0_line_buffer_adjust(adev, adev->mode_info.crtcs[i], mode); in dce_v8_0_bandwidth_update()
1128 dce_v8_0_program_watermarks(adev, adev->mode_info.crtcs[i], in dce_v8_0_bandwidth_update()
1138 for (i = 0; i < adev->mode_info.audio.num_pins; i++) { in dce_v8_0_audio_get_connected_pins()
1139 offset = adev->mode_info.audio.pin[i].offset; in dce_v8_0_audio_get_connected_pins()
1145 adev->mode_info.audio.pin[i].connected = false; in dce_v8_0_audio_get_connected_pins()
1147 adev->mode_info.audio.pin[i].connected = true; in dce_v8_0_audio_get_connected_pins()
1157 for (i = 0; i < adev->mode_info.audio.num_pins; i++) { in dce_v8_0_audio_get_pin()
1158 if (adev->mode_info.audio.pin[i].connected) in dce_v8_0_audio_get_pin()
1159 return &adev->mode_info.audio.pin[i]; in dce_v8_0_audio_get_pin()
1167 struct amdgpu_device *adev = drm_to_adev(encoder->dev); in dce_v8_0_afmt_audio_select_pin()
1169 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; in dce_v8_0_afmt_audio_select_pin()
1172 if (!dig || !dig->afmt || !dig->afmt->pin) in dce_v8_0_afmt_audio_select_pin()
1175 offset = dig->afmt->offset; in dce_v8_0_afmt_audio_select_pin()
1178 (dig->afmt->pin->id << AFMT_AUDIO_SRC_CONTROL__AFMT_AUDIO_SRC_SELECT__SHIFT)); in dce_v8_0_afmt_audio_select_pin()
1184 struct drm_device *dev = encoder->dev; in dce_v8_0_audio_write_latency_fields()
1187 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; in dce_v8_0_audio_write_latency_fields()
1193 if (!dig || !dig->afmt || !dig->afmt->pin) in dce_v8_0_audio_write_latency_fields()
1196 offset = dig->afmt->pin->offset; in dce_v8_0_audio_write_latency_fields()
1200 if (connector->encoder == encoder) { in dce_v8_0_audio_write_latency_fields()
1212 if (mode->flags & DRM_MODE_FLAG_INTERLACE) { in dce_v8_0_audio_write_latency_fields()
1213 if (connector->latency_present[1]) in dce_v8_0_audio_write_latency_fields()
1215 (connector->video_latency[1] << in dce_v8_0_audio_write_latency_fields()
1217 (connector->audio_latency[1] << in dce_v8_0_audio_write_latency_fields()
1226 if (connector->latency_present[0]) in dce_v8_0_audio_write_latency_fields()
1228 (connector->video_latency[0] << in dce_v8_0_audio_write_latency_fields()
1230 (connector->audio_latency[0] << in dce_v8_0_audio_write_latency_fields()
1245 struct drm_device *dev = encoder->dev; in dce_v8_0_audio_write_speaker_allocation()
1248 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; in dce_v8_0_audio_write_speaker_allocation()
1256 if (!dig || !dig->afmt || !dig->afmt->pin) in dce_v8_0_audio_write_speaker_allocation()
1259 offset = dig->afmt->pin->offset; in dce_v8_0_audio_write_speaker_allocation()
1263 if (connector->encoder == encoder) { in dce_v8_0_audio_write_speaker_allocation()
1275 sad_count = drm_edid_to_speaker_allocation(amdgpu_connector->edid, &sadb); in dce_v8_0_audio_write_speaker_allocation()
1298 struct drm_device *dev = encoder->dev; in dce_v8_0_audio_write_sad_regs()
1301 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; in dce_v8_0_audio_write_sad_regs()
1324 if (!dig || !dig->afmt || !dig->afmt->pin) in dce_v8_0_audio_write_sad_regs()
1327 offset = dig->afmt->pin->offset; in dce_v8_0_audio_write_sad_regs()
1331 if (connector->encoder == encoder) { in dce_v8_0_audio_write_sad_regs()
1343 sad_count = drm_edid_to_sad(amdgpu_connector->edid, &sads); in dce_v8_0_audio_write_sad_regs()
1353 int max_channels = -1; in dce_v8_0_audio_write_sad_regs()
1359 if (sad->format == eld_reg_to_type[i][1]) { in dce_v8_0_audio_write_sad_regs()
1360 if (sad->channels > max_channels) { in dce_v8_0_audio_write_sad_regs()
1361 value = (sad->channels << in dce_v8_0_audio_write_sad_regs()
1363 (sad->byte2 << in dce_v8_0_audio_write_sad_regs()
1365 (sad->freq << in dce_v8_0_audio_write_sad_regs()
1367 max_channels = sad->channels; in dce_v8_0_audio_write_sad_regs()
1370 if (sad->format == HDMI_AUDIO_CODING_TYPE_PCM) in dce_v8_0_audio_write_sad_regs()
1371 stereo_freqs |= sad->freq; in dce_v8_0_audio_write_sad_regs()
1393 WREG32_AUDIO_ENDPT(pin->offset, ixAZALIA_F0_CODEC_PIN_CONTROL_HOT_PLUG_CONTROL, in dce_v8_0_audio_enable()
1398 (0x1780 - 0x1780),
1399 (0x1786 - 0x1780),
1400 (0x178c - 0x1780),
1401 (0x1792 - 0x1780),
1402 (0x1798 - 0x1780),
1403 (0x179d - 0x1780),
1404 (0x17a4 - 0x1780),
1414 adev->mode_info.audio.enabled = true; in dce_v8_0_audio_init()
1416 if (adev->asic_type == CHIP_KAVERI) /* KV: 4 streams, 7 endpoints */ in dce_v8_0_audio_init()
1417 adev->mode_info.audio.num_pins = 7; in dce_v8_0_audio_init()
1418 else if ((adev->asic_type == CHIP_KABINI) || in dce_v8_0_audio_init()
1419 (adev->asic_type == CHIP_MULLINS)) /* KB/ML: 2 streams, 3 endpoints */ in dce_v8_0_audio_init()
1420 adev->mode_info.audio.num_pins = 3; in dce_v8_0_audio_init()
1421 else if ((adev->asic_type == CHIP_BONAIRE) || in dce_v8_0_audio_init()
1422 (adev->asic_type == CHIP_HAWAII))/* BN/HW: 6 streams, 7 endpoints */ in dce_v8_0_audio_init()
1423 adev->mode_info.audio.num_pins = 7; in dce_v8_0_audio_init()
1425 adev->mode_info.audio.num_pins = 3; in dce_v8_0_audio_init()
1427 for (i = 0; i < adev->mode_info.audio.num_pins; i++) { in dce_v8_0_audio_init()
1428 adev->mode_info.audio.pin[i].channels = -1; in dce_v8_0_audio_init()
1429 adev->mode_info.audio.pin[i].rate = -1; in dce_v8_0_audio_init()
1430 adev->mode_info.audio.pin[i].bits_per_sample = -1; in dce_v8_0_audio_init()
1431 adev->mode_info.audio.pin[i].status_bits = 0; in dce_v8_0_audio_init()
1432 adev->mode_info.audio.pin[i].category_code = 0; in dce_v8_0_audio_init()
1433 adev->mode_info.audio.pin[i].connected = false; in dce_v8_0_audio_init()
1434 adev->mode_info.audio.pin[i].offset = pin_offsets[i]; in dce_v8_0_audio_init()
1435 adev->mode_info.audio.pin[i].id = i; in dce_v8_0_audio_init()
1438 dce_v8_0_audio_enable(adev, &adev->mode_info.audio.pin[i], false); in dce_v8_0_audio_init()
1451 if (!adev->mode_info.audio.enabled) in dce_v8_0_audio_fini()
1454 for (i = 0; i < adev->mode_info.audio.num_pins; i++) in dce_v8_0_audio_fini()
1455 dce_v8_0_audio_enable(adev, &adev->mode_info.audio.pin[i], false); in dce_v8_0_audio_fini()
1457 adev->mode_info.audio.enabled = false; in dce_v8_0_audio_fini()
1465 struct drm_device *dev = encoder->dev; in dce_v8_0_afmt_update_ACR()
1469 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; in dce_v8_0_afmt_update_ACR()
1470 uint32_t offset = dig->afmt->offset; in dce_v8_0_afmt_update_ACR()
1483 * build a HDMI Video Info Frame
1488 struct drm_device *dev = encoder->dev; in dce_v8_0_afmt_update_avi_infoframe()
1491 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; in dce_v8_0_afmt_update_avi_infoframe()
1492 uint32_t offset = dig->afmt->offset; in dce_v8_0_afmt_update_avi_infoframe()
1493 uint8_t *frame = buffer + 3; in dce_v8_0_afmt_update_avi_infoframe() local
1497 frame[0x0] | (frame[0x1] << 8) | (frame[0x2] << 16) | (frame[0x3] << 24)); in dce_v8_0_afmt_update_avi_infoframe()
1499 frame[0x4] | (frame[0x5] << 8) | (frame[0x6] << 16) | (frame[0x7] << 24)); in dce_v8_0_afmt_update_avi_infoframe()
1501 frame[0x8] | (frame[0x9] << 8) | (frame[0xA] << 16) | (frame[0xB] << 24)); in dce_v8_0_afmt_update_avi_infoframe()
1503 frame[0xC] | (frame[0xD] << 8) | (header[1] << 24)); in dce_v8_0_afmt_update_avi_infoframe()
1508 struct drm_device *dev = encoder->dev; in dce_v8_0_audio_set_dto()
1511 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; in dce_v8_0_audio_set_dto()
1512 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc); in dce_v8_0_audio_set_dto()
1516 if (!dig || !dig->afmt) in dce_v8_0_audio_set_dto()
1524 …WREG32(mmDCCG_AUDIO_DTO_SOURCE, (amdgpu_crtc->crtc_id << DCCG_AUDIO_DTO_SOURCE__DCCG_AUDIO_DTO0_SO… in dce_v8_0_audio_set_dto()
1535 struct drm_device *dev = encoder->dev; in dce_v8_0_afmt_setmode()
1538 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; in dce_v8_0_afmt_setmode()
1541 struct hdmi_avi_infoframe frame; in dce_v8_0_afmt_setmode() local
1546 if (!dig || !dig->afmt) in dce_v8_0_afmt_setmode()
1550 if (!dig->afmt->enabled) in dce_v8_0_afmt_setmode()
1553 offset = dig->afmt->offset; in dce_v8_0_afmt_setmode()
1556 if (encoder->crtc) { in dce_v8_0_afmt_setmode()
1557 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc); in dce_v8_0_afmt_setmode()
1558 bpc = amdgpu_crtc->bpc; in dce_v8_0_afmt_setmode()
1562 dig->afmt->pin = dce_v8_0_audio_get_pin(adev); in dce_v8_0_afmt_setmode()
1563 dce_v8_0_audio_enable(adev, dig->afmt->pin, false); in dce_v8_0_afmt_setmode()
1565 dce_v8_0_audio_set_dto(encoder, mode->clock); in dce_v8_0_afmt_setmode()
1583 connector->name, bpc); in dce_v8_0_afmt_setmode()
1589 connector->name); in dce_v8_0_afmt_setmode()
1595 connector->name); in dce_v8_0_afmt_setmode()
1604 HDMI_VBI_PACKET_CONTROL__HDMI_GC_CONT_MASK); /* send general control packets every frame */ in dce_v8_0_afmt_setmode()
1635 dce_v8_0_afmt_update_ACR(encoder, mode->clock); in dce_v8_0_afmt_setmode()
1661 err = drm_hdmi_avi_infoframe_from_display_mode(&frame, connector, mode); in dce_v8_0_afmt_setmode()
1667 err = hdmi_avi_infoframe_pack(&frame, buffer, sizeof(buffer)); in dce_v8_0_afmt_setmode()
1692 dce_v8_0_audio_enable(adev, dig->afmt->pin, true); in dce_v8_0_afmt_setmode()
1697 struct drm_device *dev = encoder->dev; in dce_v8_0_afmt_enable()
1700 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; in dce_v8_0_afmt_enable()
1702 if (!dig || !dig->afmt) in dce_v8_0_afmt_enable()
1706 if (enable && dig->afmt->enabled) in dce_v8_0_afmt_enable()
1708 if (!enable && !dig->afmt->enabled) in dce_v8_0_afmt_enable()
1711 if (!enable && dig->afmt->pin) { in dce_v8_0_afmt_enable()
1712 dce_v8_0_audio_enable(adev, dig->afmt->pin, false); in dce_v8_0_afmt_enable()
1713 dig->afmt->pin = NULL; in dce_v8_0_afmt_enable()
1716 dig->afmt->enabled = enable; in dce_v8_0_afmt_enable()
1719 enable ? "En" : "Dis", dig->afmt->offset, amdgpu_encoder->encoder_id); in dce_v8_0_afmt_enable()
1726 for (i = 0; i < adev->mode_info.num_dig; i++) in dce_v8_0_afmt_init()
1727 adev->mode_info.afmt[i] = NULL; in dce_v8_0_afmt_init()
1730 for (i = 0; i < adev->mode_info.num_dig; i++) { in dce_v8_0_afmt_init()
1731 adev->mode_info.afmt[i] = kzalloc(sizeof(struct amdgpu_afmt), GFP_KERNEL); in dce_v8_0_afmt_init()
1732 if (adev->mode_info.afmt[i]) { in dce_v8_0_afmt_init()
1733 adev->mode_info.afmt[i]->offset = dig_offsets[i]; in dce_v8_0_afmt_init()
1734 adev->mode_info.afmt[i]->id = i; in dce_v8_0_afmt_init()
1738 kfree(adev->mode_info.afmt[j]); in dce_v8_0_afmt_init()
1739 adev->mode_info.afmt[j] = NULL; in dce_v8_0_afmt_init()
1741 return -ENOMEM; in dce_v8_0_afmt_init()
1751 for (i = 0; i < adev->mode_info.num_dig; i++) { in dce_v8_0_afmt_fini()
1752 kfree(adev->mode_info.afmt[i]); in dce_v8_0_afmt_fini()
1753 adev->mode_info.afmt[i] = NULL; in dce_v8_0_afmt_fini()
1769 struct drm_device *dev = crtc->dev; in dce_v8_0_vga_enable()
1773 vga_control = RREG32(vga_control_regs[amdgpu_crtc->crtc_id]) & ~1; in dce_v8_0_vga_enable()
1775 WREG32(vga_control_regs[amdgpu_crtc->crtc_id], vga_control | 1); in dce_v8_0_vga_enable()
1777 WREG32(vga_control_regs[amdgpu_crtc->crtc_id], vga_control); in dce_v8_0_vga_enable()
1783 struct drm_device *dev = crtc->dev; in dce_v8_0_grph_enable()
1787 WREG32(mmGRPH_ENABLE + amdgpu_crtc->crtc_offset, 1); in dce_v8_0_grph_enable()
1789 WREG32(mmGRPH_ENABLE + amdgpu_crtc->crtc_offset, 0); in dce_v8_0_grph_enable()
1797 struct drm_device *dev = crtc->dev; in dce_v8_0_crtc_do_set_base()
1811 if (!atomic && !crtc->primary->fb) { in dce_v8_0_crtc_do_set_base()
1819 target_fb = crtc->primary->fb; in dce_v8_0_crtc_do_set_base()
1824 obj = target_fb->obj[0]; in dce_v8_0_crtc_do_set_base()
1831 abo->flags |= AMDGPU_GEM_CREATE_VRAM_CONTIGUOUS; in dce_v8_0_crtc_do_set_base()
1835 return -EINVAL; in dce_v8_0_crtc_do_set_base()
1845 switch (target_fb->format->format) { in dce_v8_0_crtc_do_set_base()
1896 /* Greater 8 bpc fb needs to bypass hw-lut to retain precision */ in dce_v8_0_crtc_do_set_base()
1906 /* Greater 8 bpc fb needs to bypass hw-lut to retain precision */ in dce_v8_0_crtc_do_set_base()
1921 &target_fb->format->format); in dce_v8_0_crtc_do_set_base()
1922 return -EINVAL; in dce_v8_0_crtc_do_set_base()
1952 WREG32(mmGRPH_FLIP_CONTROL + amdgpu_crtc->crtc_offset, 0); in dce_v8_0_crtc_do_set_base()
1954 WREG32(mmGRPH_PRIMARY_SURFACE_ADDRESS_HIGH + amdgpu_crtc->crtc_offset, in dce_v8_0_crtc_do_set_base()
1956 WREG32(mmGRPH_SECONDARY_SURFACE_ADDRESS_HIGH + amdgpu_crtc->crtc_offset, in dce_v8_0_crtc_do_set_base()
1958 WREG32(mmGRPH_PRIMARY_SURFACE_ADDRESS + amdgpu_crtc->crtc_offset, in dce_v8_0_crtc_do_set_base()
1960 WREG32(mmGRPH_SECONDARY_SURFACE_ADDRESS + amdgpu_crtc->crtc_offset, in dce_v8_0_crtc_do_set_base()
1962 WREG32(mmGRPH_CONTROL + amdgpu_crtc->crtc_offset, fb_format); in dce_v8_0_crtc_do_set_base()
1963 WREG32(mmGRPH_SWAP_CNTL + amdgpu_crtc->crtc_offset, fb_swap); in dce_v8_0_crtc_do_set_base()
1968 * retain the full precision throughout the pipeline. in dce_v8_0_crtc_do_set_base()
1970 WREG32_P(mmGRPH_LUT_10BIT_BYPASS_CONTROL + amdgpu_crtc->crtc_offset, in dce_v8_0_crtc_do_set_base()
1977 WREG32(mmGRPH_SURFACE_OFFSET_X + amdgpu_crtc->crtc_offset, 0); in dce_v8_0_crtc_do_set_base()
1978 WREG32(mmGRPH_SURFACE_OFFSET_Y + amdgpu_crtc->crtc_offset, 0); in dce_v8_0_crtc_do_set_base()
1979 WREG32(mmGRPH_X_START + amdgpu_crtc->crtc_offset, 0); in dce_v8_0_crtc_do_set_base()
1980 WREG32(mmGRPH_Y_START + amdgpu_crtc->crtc_offset, 0); in dce_v8_0_crtc_do_set_base()
1981 WREG32(mmGRPH_X_END + amdgpu_crtc->crtc_offset, target_fb->width); in dce_v8_0_crtc_do_set_base()
1982 WREG32(mmGRPH_Y_END + amdgpu_crtc->crtc_offset, target_fb->height); in dce_v8_0_crtc_do_set_base()
1984 fb_pitch_pixels = target_fb->pitches[0] / target_fb->format->cpp[0]; in dce_v8_0_crtc_do_set_base()
1985 WREG32(mmGRPH_PITCH + amdgpu_crtc->crtc_offset, fb_pitch_pixels); in dce_v8_0_crtc_do_set_base()
1989 WREG32(mmLB_DESKTOP_HEIGHT + amdgpu_crtc->crtc_offset, in dce_v8_0_crtc_do_set_base()
1990 target_fb->height); in dce_v8_0_crtc_do_set_base()
1994 WREG32(mmVIEWPORT_START + amdgpu_crtc->crtc_offset, in dce_v8_0_crtc_do_set_base()
1996 viewport_w = crtc->mode.hdisplay; in dce_v8_0_crtc_do_set_base()
1997 viewport_h = (crtc->mode.vdisplay + 1) & ~1; in dce_v8_0_crtc_do_set_base()
1998 WREG32(mmVIEWPORT_SIZE + amdgpu_crtc->crtc_offset, in dce_v8_0_crtc_do_set_base()
2002 WREG32(mmMASTER_UPDATE_MODE + amdgpu_crtc->crtc_offset, 0); in dce_v8_0_crtc_do_set_base()
2004 if (!atomic && fb && fb != crtc->primary->fb) { in dce_v8_0_crtc_do_set_base()
2005 abo = gem_to_amdgpu_bo(fb->obj[0]); in dce_v8_0_crtc_do_set_base()
2022 struct drm_device *dev = crtc->dev; in dce_v8_0_set_interleave()
2026 if (mode->flags & DRM_MODE_FLAG_INTERLACE) in dce_v8_0_set_interleave()
2027 WREG32(mmLB_DATA_FORMAT + amdgpu_crtc->crtc_offset, in dce_v8_0_set_interleave()
2030 WREG32(mmLB_DATA_FORMAT + amdgpu_crtc->crtc_offset, 0); in dce_v8_0_set_interleave()
2036 struct drm_device *dev = crtc->dev; in dce_v8_0_crtc_load_lut()
2041 DRM_DEBUG_KMS("%d\n", amdgpu_crtc->crtc_id); in dce_v8_0_crtc_load_lut()
2043 WREG32(mmINPUT_CSC_CONTROL + amdgpu_crtc->crtc_offset, in dce_v8_0_crtc_load_lut()
2046 WREG32(mmPRESCALE_GRPH_CONTROL + amdgpu_crtc->crtc_offset, in dce_v8_0_crtc_load_lut()
2048 WREG32(mmPRESCALE_OVL_CONTROL + amdgpu_crtc->crtc_offset, in dce_v8_0_crtc_load_lut()
2050 WREG32(mmINPUT_GAMMA_CONTROL + amdgpu_crtc->crtc_offset, in dce_v8_0_crtc_load_lut()
2054 WREG32(mmDC_LUT_CONTROL + amdgpu_crtc->crtc_offset, 0); in dce_v8_0_crtc_load_lut()
2056 WREG32(mmDC_LUT_BLACK_OFFSET_BLUE + amdgpu_crtc->crtc_offset, 0); in dce_v8_0_crtc_load_lut()
2057 WREG32(mmDC_LUT_BLACK_OFFSET_GREEN + amdgpu_crtc->crtc_offset, 0); in dce_v8_0_crtc_load_lut()
2058 WREG32(mmDC_LUT_BLACK_OFFSET_RED + amdgpu_crtc->crtc_offset, 0); in dce_v8_0_crtc_load_lut()
2060 WREG32(mmDC_LUT_WHITE_OFFSET_BLUE + amdgpu_crtc->crtc_offset, 0xffff); in dce_v8_0_crtc_load_lut()
2061 WREG32(mmDC_LUT_WHITE_OFFSET_GREEN + amdgpu_crtc->crtc_offset, 0xffff); in dce_v8_0_crtc_load_lut()
2062 WREG32(mmDC_LUT_WHITE_OFFSET_RED + amdgpu_crtc->crtc_offset, 0xffff); in dce_v8_0_crtc_load_lut()
2064 WREG32(mmDC_LUT_RW_MODE + amdgpu_crtc->crtc_offset, 0); in dce_v8_0_crtc_load_lut()
2065 WREG32(mmDC_LUT_WRITE_EN_MASK + amdgpu_crtc->crtc_offset, 0x00000007); in dce_v8_0_crtc_load_lut()
2067 WREG32(mmDC_LUT_RW_INDEX + amdgpu_crtc->crtc_offset, 0); in dce_v8_0_crtc_load_lut()
2068 r = crtc->gamma_store; in dce_v8_0_crtc_load_lut()
2069 g = r + crtc->gamma_size; in dce_v8_0_crtc_load_lut()
2070 b = g + crtc->gamma_size; in dce_v8_0_crtc_load_lut()
2072 WREG32(mmDC_LUT_30_COLOR + amdgpu_crtc->crtc_offset, in dce_v8_0_crtc_load_lut()
2078 WREG32(mmDEGAMMA_CONTROL + amdgpu_crtc->crtc_offset, in dce_v8_0_crtc_load_lut()
2082 WREG32(mmGAMUT_REMAP_CONTROL + amdgpu_crtc->crtc_offset, in dce_v8_0_crtc_load_lut()
2085 WREG32(mmREGAMMA_CONTROL + amdgpu_crtc->crtc_offset, in dce_v8_0_crtc_load_lut()
2088 WREG32(mmOUTPUT_CSC_CONTROL + amdgpu_crtc->crtc_offset, in dce_v8_0_crtc_load_lut()
2092 WREG32(0x1a50 + amdgpu_crtc->crtc_offset, 0); in dce_v8_0_crtc_load_lut()
2096 WREG32(mmALPHA_CONTROL + amdgpu_crtc->crtc_offset, in dce_v8_0_crtc_load_lut()
2103 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; in dce_v8_0_pick_dig_encoder()
2105 switch (amdgpu_encoder->encoder_id) { in dce_v8_0_pick_dig_encoder()
2107 if (dig->linkb) in dce_v8_0_pick_dig_encoder()
2112 if (dig->linkb) in dce_v8_0_pick_dig_encoder()
2117 if (dig->linkb) in dce_v8_0_pick_dig_encoder()
2124 DRM_ERROR("invalid encoder_id: 0x%x\n", amdgpu_encoder->encoder_id); in dce_v8_0_pick_dig_encoder()
2130 * dce_v8_0_pick_pll - Allocate a PPLL for use by the crtc.
2135 * a single PPLL can be used for all DP crtcs/encoders. For non-DP
2146 * - PPLL1, PPLL2 are available for all UNIPHY (both DP and non-DP)
2148 * - PPLL0, PPLL1, PPLL2 are available for all UNIPHY (both DP and non-DP) and DAC
2154 struct drm_device *dev = crtc->dev; in dce_v8_0_pick_pll()
2159 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(amdgpu_crtc->encoder))) { in dce_v8_0_pick_pll()
2160 if (adev->clock.dp_extclk) in dce_v8_0_pick_pll()
2176 if ((adev->asic_type == CHIP_KABINI) || in dce_v8_0_pick_pll()
2177 (adev->asic_type == CHIP_MULLINS)) { in dce_v8_0_pick_pll()
2203 struct amdgpu_device *adev = drm_to_adev(crtc->dev); in dce_v8_0_lock_cursor()
2207 cur_lock = RREG32(mmCUR_UPDATE + amdgpu_crtc->crtc_offset); in dce_v8_0_lock_cursor()
2212 WREG32(mmCUR_UPDATE + amdgpu_crtc->crtc_offset, cur_lock); in dce_v8_0_lock_cursor()
2218 struct amdgpu_device *adev = drm_to_adev(crtc->dev); in dce_v8_0_hide_cursor()
2220 WREG32(mmCUR_CONTROL + amdgpu_crtc->crtc_offset, in dce_v8_0_hide_cursor()
2228 struct amdgpu_device *adev = drm_to_adev(crtc->dev); in dce_v8_0_show_cursor()
2230 WREG32(mmCUR_SURFACE_ADDRESS_HIGH + amdgpu_crtc->crtc_offset, in dce_v8_0_show_cursor()
2231 upper_32_bits(amdgpu_crtc->cursor_addr)); in dce_v8_0_show_cursor()
2232 WREG32(mmCUR_SURFACE_ADDRESS + amdgpu_crtc->crtc_offset, in dce_v8_0_show_cursor()
2233 lower_32_bits(amdgpu_crtc->cursor_addr)); in dce_v8_0_show_cursor()
2235 WREG32(mmCUR_CONTROL + amdgpu_crtc->crtc_offset, in dce_v8_0_show_cursor()
2245 struct amdgpu_device *adev = drm_to_adev(crtc->dev); in dce_v8_0_cursor_move_locked()
2248 amdgpu_crtc->cursor_x = x; in dce_v8_0_cursor_move_locked()
2249 amdgpu_crtc->cursor_y = y; in dce_v8_0_cursor_move_locked()
2252 x += crtc->x; in dce_v8_0_cursor_move_locked()
2253 y += crtc->y; in dce_v8_0_cursor_move_locked()
2254 DRM_DEBUG("x %d y %d c->x %d c->y %d\n", x, y, crtc->x, crtc->y); in dce_v8_0_cursor_move_locked()
2257 xorigin = min(-x, amdgpu_crtc->max_cursor_width - 1); in dce_v8_0_cursor_move_locked()
2261 yorigin = min(-y, amdgpu_crtc->max_cursor_height - 1); in dce_v8_0_cursor_move_locked()
2265 WREG32(mmCUR_POSITION + amdgpu_crtc->crtc_offset, (x << 16) | y); in dce_v8_0_cursor_move_locked()
2266 WREG32(mmCUR_HOT_SPOT + amdgpu_crtc->crtc_offset, (xorigin << 16) | yorigin); in dce_v8_0_cursor_move_locked()
2267 WREG32(mmCUR_SIZE + amdgpu_crtc->crtc_offset, in dce_v8_0_cursor_move_locked()
2268 ((amdgpu_crtc->cursor_width - 1) << 16) | (amdgpu_crtc->cursor_height - 1)); in dce_v8_0_cursor_move_locked()
2305 if ((width > amdgpu_crtc->max_cursor_width) || in dce_v8_0_crtc_cursor_set2()
2306 (height > amdgpu_crtc->max_cursor_height)) { in dce_v8_0_crtc_cursor_set2()
2308 return -EINVAL; in dce_v8_0_crtc_cursor_set2()
2313 DRM_ERROR("Cannot find cursor object %x for crtc %d\n", handle, amdgpu_crtc->crtc_id); in dce_v8_0_crtc_cursor_set2()
2314 return -ENOENT; in dce_v8_0_crtc_cursor_set2()
2324 aobj->flags |= AMDGPU_GEM_CREATE_VRAM_CONTIGUOUS; in dce_v8_0_crtc_cursor_set2()
2332 amdgpu_crtc->cursor_addr = amdgpu_bo_gpu_offset(aobj); in dce_v8_0_crtc_cursor_set2()
2336 if (width != amdgpu_crtc->cursor_width || in dce_v8_0_crtc_cursor_set2()
2337 height != amdgpu_crtc->cursor_height || in dce_v8_0_crtc_cursor_set2()
2338 hot_x != amdgpu_crtc->cursor_hot_x || in dce_v8_0_crtc_cursor_set2()
2339 hot_y != amdgpu_crtc->cursor_hot_y) { in dce_v8_0_crtc_cursor_set2()
2342 x = amdgpu_crtc->cursor_x + amdgpu_crtc->cursor_hot_x - hot_x; in dce_v8_0_crtc_cursor_set2()
2343 y = amdgpu_crtc->cursor_y + amdgpu_crtc->cursor_hot_y - hot_y; in dce_v8_0_crtc_cursor_set2()
2347 amdgpu_crtc->cursor_width = width; in dce_v8_0_crtc_cursor_set2()
2348 amdgpu_crtc->cursor_height = height; in dce_v8_0_crtc_cursor_set2()
2349 amdgpu_crtc->cursor_hot_x = hot_x; in dce_v8_0_crtc_cursor_set2()
2350 amdgpu_crtc->cursor_hot_y = hot_y; in dce_v8_0_crtc_cursor_set2()
2357 if (amdgpu_crtc->cursor_bo) { in dce_v8_0_crtc_cursor_set2()
2358 struct amdgpu_bo *aobj = gem_to_amdgpu_bo(amdgpu_crtc->cursor_bo); in dce_v8_0_crtc_cursor_set2()
2364 drm_gem_object_put(amdgpu_crtc->cursor_bo); in dce_v8_0_crtc_cursor_set2()
2367 amdgpu_crtc->cursor_bo = obj; in dce_v8_0_crtc_cursor_set2()
2375 if (amdgpu_crtc->cursor_bo) { in dce_v8_0_cursor_reset()
2378 dce_v8_0_cursor_move_locked(crtc, amdgpu_crtc->cursor_x, in dce_v8_0_cursor_reset()
2379 amdgpu_crtc->cursor_y); in dce_v8_0_cursor_reset()
2419 struct drm_device *dev = crtc->dev; in dce_v8_0_crtc_dpms()
2426 amdgpu_crtc->enabled = true; in dce_v8_0_crtc_dpms()
2433 amdgpu_crtc->crtc_id); in dce_v8_0_crtc_dpms()
2434 amdgpu_irq_update(adev, &adev->crtc_irq, type); in dce_v8_0_crtc_dpms()
2435 amdgpu_irq_update(adev, &adev->pageflip_irq, type); in dce_v8_0_crtc_dpms()
2443 if (amdgpu_crtc->enabled) { in dce_v8_0_crtc_dpms()
2449 amdgpu_crtc->enabled = false; in dce_v8_0_crtc_dpms()
2473 struct drm_device *dev = crtc->dev; in dce_v8_0_crtc_disable()
2479 if (crtc->primary->fb) { in dce_v8_0_crtc_disable()
2483 abo = gem_to_amdgpu_bo(crtc->primary->fb->obj[0]); in dce_v8_0_crtc_disable()
2497 for (i = 0; i < adev->mode_info.num_crtc; i++) { in dce_v8_0_crtc_disable()
2498 if (adev->mode_info.crtcs[i] && in dce_v8_0_crtc_disable()
2499 adev->mode_info.crtcs[i]->enabled && in dce_v8_0_crtc_disable()
2500 i != amdgpu_crtc->crtc_id && in dce_v8_0_crtc_disable()
2501 amdgpu_crtc->pll_id == adev->mode_info.crtcs[i]->pll_id) { in dce_v8_0_crtc_disable()
2509 switch (amdgpu_crtc->pll_id) { in dce_v8_0_crtc_disable()
2513 amdgpu_atombios_crtc_program_pll(crtc, amdgpu_crtc->crtc_id, amdgpu_crtc->pll_id, in dce_v8_0_crtc_disable()
2518 if ((adev->asic_type == CHIP_KAVERI) || in dce_v8_0_crtc_disable()
2519 (adev->asic_type == CHIP_BONAIRE) || in dce_v8_0_crtc_disable()
2520 (adev->asic_type == CHIP_HAWAII)) in dce_v8_0_crtc_disable()
2521 amdgpu_atombios_crtc_program_pll(crtc, amdgpu_crtc->crtc_id, amdgpu_crtc->pll_id, in dce_v8_0_crtc_disable()
2528 amdgpu_crtc->pll_id = ATOM_PPLL_INVALID; in dce_v8_0_crtc_disable()
2529 amdgpu_crtc->adjusted_clock = 0; in dce_v8_0_crtc_disable()
2530 amdgpu_crtc->encoder = NULL; in dce_v8_0_crtc_disable()
2531 amdgpu_crtc->connector = NULL; in dce_v8_0_crtc_disable()
2541 if (!amdgpu_crtc->adjusted_clock) in dce_v8_0_crtc_mode_set()
2542 return -EINVAL; in dce_v8_0_crtc_mode_set()
2551 amdgpu_crtc->hw_mode = *adjusted_mode; in dce_v8_0_crtc_mode_set()
2561 struct drm_device *dev = crtc->dev; in dce_v8_0_crtc_mode_fixup()
2565 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { in dce_v8_0_crtc_mode_fixup()
2566 if (encoder->crtc == crtc) { in dce_v8_0_crtc_mode_fixup()
2567 amdgpu_crtc->encoder = encoder; in dce_v8_0_crtc_mode_fixup()
2568 amdgpu_crtc->connector = amdgpu_get_connector_for_encoder(encoder); in dce_v8_0_crtc_mode_fixup()
2572 if ((amdgpu_crtc->encoder == NULL) || (amdgpu_crtc->connector == NULL)) { in dce_v8_0_crtc_mode_fixup()
2573 amdgpu_crtc->encoder = NULL; in dce_v8_0_crtc_mode_fixup()
2574 amdgpu_crtc->connector = NULL; in dce_v8_0_crtc_mode_fixup()
2582 amdgpu_crtc->pll_id = dce_v8_0_pick_pll(crtc); in dce_v8_0_crtc_mode_fixup()
2583 /* if we can't get a PPLL for a non-DP encoder, fail */ in dce_v8_0_crtc_mode_fixup()
2584 if ((amdgpu_crtc->pll_id == ATOM_PPLL_INVALID) && in dce_v8_0_crtc_mode_fixup()
2585 !ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(amdgpu_crtc->encoder))) in dce_v8_0_crtc_mode_fixup()
2623 return -ENOMEM; in dce_v8_0_crtc_init()
2625 drm_crtc_init(adev_to_drm(adev), &amdgpu_crtc->base, &dce_v8_0_crtc_funcs); in dce_v8_0_crtc_init()
2627 drm_mode_crtc_set_gamma_size(&amdgpu_crtc->base, 256); in dce_v8_0_crtc_init()
2628 amdgpu_crtc->crtc_id = index; in dce_v8_0_crtc_init()
2629 adev->mode_info.crtcs[index] = amdgpu_crtc; in dce_v8_0_crtc_init()
2631 amdgpu_crtc->max_cursor_width = CIK_CURSOR_WIDTH; in dce_v8_0_crtc_init()
2632 amdgpu_crtc->max_cursor_height = CIK_CURSOR_HEIGHT; in dce_v8_0_crtc_init()
2633 adev_to_drm(adev)->mode_config.cursor_width = amdgpu_crtc->max_cursor_width; in dce_v8_0_crtc_init()
2634 adev_to_drm(adev)->mode_config.cursor_height = amdgpu_crtc->max_cursor_height; in dce_v8_0_crtc_init()
2636 amdgpu_crtc->crtc_offset = crtc_offsets[amdgpu_crtc->crtc_id]; in dce_v8_0_crtc_init()
2638 amdgpu_crtc->pll_id = ATOM_PPLL_INVALID; in dce_v8_0_crtc_init()
2639 amdgpu_crtc->adjusted_clock = 0; in dce_v8_0_crtc_init()
2640 amdgpu_crtc->encoder = NULL; in dce_v8_0_crtc_init()
2641 amdgpu_crtc->connector = NULL; in dce_v8_0_crtc_init()
2642 drm_crtc_helper_add(&amdgpu_crtc->base, &dce_v8_0_crtc_helper_funcs); in dce_v8_0_crtc_init()
2651 adev->audio_endpt_rreg = &dce_v8_0_audio_endpt_rreg; in dce_v8_0_early_init()
2652 adev->audio_endpt_wreg = &dce_v8_0_audio_endpt_wreg; in dce_v8_0_early_init()
2656 adev->mode_info.num_crtc = dce_v8_0_get_num_crtc(adev); in dce_v8_0_early_init()
2658 switch (adev->asic_type) { in dce_v8_0_early_init()
2661 adev->mode_info.num_hpd = 6; in dce_v8_0_early_init()
2662 adev->mode_info.num_dig = 6; in dce_v8_0_early_init()
2665 adev->mode_info.num_hpd = 6; in dce_v8_0_early_init()
2666 adev->mode_info.num_dig = 7; in dce_v8_0_early_init()
2670 adev->mode_info.num_hpd = 6; in dce_v8_0_early_init()
2671 adev->mode_info.num_dig = 6; /* ? */ in dce_v8_0_early_init()
2675 return -EINVAL; in dce_v8_0_early_init()
2688 for (i = 0; i < adev->mode_info.num_crtc; i++) { in dce_v8_0_sw_init()
2689 r = amdgpu_irq_add_id(adev, AMDGPU_IRQ_CLIENTID_LEGACY, i + 1, &adev->crtc_irq); in dce_v8_0_sw_init()
2695 r = amdgpu_irq_add_id(adev, AMDGPU_IRQ_CLIENTID_LEGACY, i, &adev->pageflip_irq); in dce_v8_0_sw_init()
2701 r = amdgpu_irq_add_id(adev, AMDGPU_IRQ_CLIENTID_LEGACY, 42, &adev->hpd_irq); in dce_v8_0_sw_init()
2705 adev_to_drm(adev)->mode_config.funcs = &amdgpu_mode_funcs; in dce_v8_0_sw_init()
2707 adev_to_drm(adev)->mode_config.async_page_flip = true; in dce_v8_0_sw_init()
2709 adev_to_drm(adev)->mode_config.max_width = 16384; in dce_v8_0_sw_init()
2710 adev_to_drm(adev)->mode_config.max_height = 16384; in dce_v8_0_sw_init()
2712 adev_to_drm(adev)->mode_config.preferred_depth = 24; in dce_v8_0_sw_init()
2713 if (adev->asic_type == CHIP_HAWAII) in dce_v8_0_sw_init()
2715 adev_to_drm(adev)->mode_config.prefer_shadow = 0; in dce_v8_0_sw_init()
2717 adev_to_drm(adev)->mode_config.prefer_shadow = 1; in dce_v8_0_sw_init()
2719 adev_to_drm(adev)->mode_config.fb_modifiers_not_supported = true; in dce_v8_0_sw_init()
2725 adev_to_drm(adev)->mode_config.max_width = 16384; in dce_v8_0_sw_init()
2726 adev_to_drm(adev)->mode_config.max_height = 16384; in dce_v8_0_sw_init()
2729 for (i = 0; i < adev->mode_info.num_crtc; i++) { in dce_v8_0_sw_init()
2738 return -EINVAL; in dce_v8_0_sw_init()
2749 /* Disable vblank IRQs aggressively for power-saving */ in dce_v8_0_sw_init()
2751 adev_to_drm(adev)->vblank_disable_immediate = true; in dce_v8_0_sw_init()
2753 r = drm_vblank_init(adev_to_drm(adev), adev->mode_info.num_crtc); in dce_v8_0_sw_init()
2757 /* Pre-DCE11 */ in dce_v8_0_sw_init()
2758 INIT_DELAYED_WORK(&adev->hotplug_work, in dce_v8_0_sw_init()
2763 adev->mode_info.mode_config_initialized = true; in dce_v8_0_sw_init()
2771 drm_edid_free(adev->mode_info.bios_hardcoded_edid); in dce_v8_0_sw_fini()
2780 adev->mode_info.mode_config_initialized = false; in dce_v8_0_sw_fini()
2794 amdgpu_atombios_crtc_set_disp_eng_pll(adev, adev->clock.default_dispclk); in dce_v8_0_hw_init()
2799 for (i = 0; i < adev->mode_info.audio.num_pins; i++) { in dce_v8_0_hw_init()
2800 dce_v8_0_audio_enable(adev, &adev->mode_info.audio.pin[i], false); in dce_v8_0_hw_init()
2815 for (i = 0; i < adev->mode_info.audio.num_pins; i++) { in dce_v8_0_hw_fini()
2816 dce_v8_0_audio_enable(adev, &adev->mode_info.audio.pin[i], false); in dce_v8_0_hw_fini()
2821 flush_delayed_work(&adev->hotplug_work); in dce_v8_0_hw_fini()
2835 adev->mode_info.bl_level = in dce_v8_0_suspend()
2847 adev->mode_info.bl_level); in dce_v8_0_resume()
2852 if (adev->mode_info.bl_encoder) { in dce_v8_0_resume()
2854 adev->mode_info.bl_encoder); in dce_v8_0_resume()
2855 amdgpu_display_backlight_set_level(adev, adev->mode_info.bl_encoder, in dce_v8_0_resume()
2885 dev_info(adev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp); in dce_v8_0_soft_reset()
2907 if (crtc >= adev->mode_info.num_crtc) { in dce_v8_0_set_crtc_vblank_interrupt_state()
2958 if (crtc >= adev->mode_info.num_crtc) { in dce_v8_0_set_crtc_vline_interrupt_state()
3010 if (type >= adev->mode_info.num_hpd) { in dce_v8_0_set_hpd_interrupt_state()
3085 unsigned crtc = entry->src_id - 1; in dce_v8_0_crtc_irq()
3090 switch (entry->src_data[0]) { in dce_v8_0_crtc_irq()
3111 DRM_DEBUG("Unhandled interrupt: %d %d\n", entry->src_id, entry->src_data[0]); in dce_v8_0_crtc_irq()
3125 if (type >= adev->mode_info.num_crtc) { in dce_v8_0_set_pageflip_interrupt_state()
3127 return -EINVAL; in dce_v8_0_set_pageflip_interrupt_state()
3150 crtc_id = (entry->src_id - 8) >> 1; in dce_v8_0_pageflip_irq()
3151 amdgpu_crtc = adev->mode_info.crtcs[crtc_id]; in dce_v8_0_pageflip_irq()
3153 if (crtc_id >= adev->mode_info.num_crtc) { in dce_v8_0_pageflip_irq()
3155 return -EINVAL; in dce_v8_0_pageflip_irq()
3167 spin_lock_irqsave(&adev_to_drm(adev)->event_lock, flags); in dce_v8_0_pageflip_irq()
3168 works = amdgpu_crtc->pflip_works; in dce_v8_0_pageflip_irq()
3169 if (amdgpu_crtc->pflip_status != AMDGPU_FLIP_SUBMITTED) { in dce_v8_0_pageflip_irq()
3170 DRM_DEBUG_DRIVER("amdgpu_crtc->pflip_status = %d != " in dce_v8_0_pageflip_irq()
3172 amdgpu_crtc->pflip_status, in dce_v8_0_pageflip_irq()
3174 spin_unlock_irqrestore(&adev_to_drm(adev)->event_lock, flags); in dce_v8_0_pageflip_irq()
3179 amdgpu_crtc->pflip_status = AMDGPU_FLIP_NONE; in dce_v8_0_pageflip_irq()
3180 amdgpu_crtc->pflip_works = NULL; in dce_v8_0_pageflip_irq()
3183 if (works->event) in dce_v8_0_pageflip_irq()
3184 drm_crtc_send_vblank_event(&amdgpu_crtc->base, works->event); in dce_v8_0_pageflip_irq()
3186 spin_unlock_irqrestore(&adev_to_drm(adev)->event_lock, flags); in dce_v8_0_pageflip_irq()
3188 drm_crtc_vblank_put(&amdgpu_crtc->base); in dce_v8_0_pageflip_irq()
3189 schedule_work(&works->unpin_work); in dce_v8_0_pageflip_irq()
3201 if (entry->src_data[0] >= adev->mode_info.num_hpd) { in dce_v8_0_hpd_irq()
3202 DRM_DEBUG("Unhandled interrupt: %d %d\n", entry->src_id, entry->src_data[0]); in dce_v8_0_hpd_irq()
3206 hpd = entry->src_data[0]; in dce_v8_0_hpd_irq()
3212 schedule_delayed_work(&adev->hotplug_work, 0); in dce_v8_0_hpd_irq()
3258 amdgpu_encoder->pixel_clock = adjusted_mode->clock; in dce_v8_0_encoder_mode_set()
3264 dce_v8_0_set_interleave(encoder->crtc, mode); in dce_v8_0_encoder_mode_set()
3274 struct amdgpu_device *adev = drm_to_adev(encoder->dev); in dce_v8_0_encoder_prepare()
3278 if ((amdgpu_encoder->active_device & in dce_v8_0_encoder_prepare()
3282 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; in dce_v8_0_encoder_prepare()
3284 dig->dig_encoder = dce_v8_0_pick_dig_encoder(encoder); in dce_v8_0_encoder_prepare()
3285 if (amdgpu_encoder->active_device & ATOM_DEVICE_DFP_SUPPORT) in dce_v8_0_encoder_prepare()
3286 dig->afmt = adev->mode_info.afmt[dig->dig_encoder]; in dce_v8_0_encoder_prepare()
3296 if (amdgpu_connector->router.cd_valid) in dce_v8_0_encoder_prepare()
3300 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) in dce_v8_0_encoder_prepare()
3313 struct drm_device *dev = encoder->dev; in dce_v8_0_encoder_commit()
3331 dig = amdgpu_encoder->enc_priv; in dce_v8_0_encoder_disable()
3332 dig->dig_encoder = -1; in dce_v8_0_encoder_disable()
3334 amdgpu_encoder->active_device = 0; in dce_v8_0_encoder_disable()
3398 if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) in dce_v8_0_encoder_destroy()
3400 kfree(amdgpu_encoder->enc_priv); in dce_v8_0_encoder_destroy()
3419 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { in dce_v8_0_encoder_add()
3421 if (amdgpu_encoder->encoder_enum == encoder_enum) { in dce_v8_0_encoder_add()
3422 amdgpu_encoder->devices |= supported_device; in dce_v8_0_encoder_add()
3433 encoder = &amdgpu_encoder->base; in dce_v8_0_encoder_add()
3434 switch (adev->mode_info.num_crtc) { in dce_v8_0_encoder_add()
3436 encoder->possible_crtcs = 0x1; in dce_v8_0_encoder_add()
3440 encoder->possible_crtcs = 0x3; in dce_v8_0_encoder_add()
3443 encoder->possible_crtcs = 0xf; in dce_v8_0_encoder_add()
3446 encoder->possible_crtcs = 0x3f; in dce_v8_0_encoder_add()
3450 amdgpu_encoder->enc_priv = NULL; in dce_v8_0_encoder_add()
3452 amdgpu_encoder->encoder_enum = encoder_enum; in dce_v8_0_encoder_add()
3453 amdgpu_encoder->encoder_id = (encoder_enum & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT; in dce_v8_0_encoder_add()
3454 amdgpu_encoder->devices = supported_device; in dce_v8_0_encoder_add()
3455 amdgpu_encoder->rmx_type = RMX_OFF; in dce_v8_0_encoder_add()
3456 amdgpu_encoder->underscan_type = UNDERSCAN_OFF; in dce_v8_0_encoder_add()
3457 amdgpu_encoder->is_ext_encoder = false; in dce_v8_0_encoder_add()
3458 amdgpu_encoder->caps = caps; in dce_v8_0_encoder_add()
3460 switch (amdgpu_encoder->encoder_id) { in dce_v8_0_encoder_add()
3472 if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) { in dce_v8_0_encoder_add()
3473 amdgpu_encoder->rmx_type = RMX_FULL; in dce_v8_0_encoder_add()
3476 amdgpu_encoder->enc_priv = amdgpu_atombios_encoder_get_lcd_info(amdgpu_encoder); in dce_v8_0_encoder_add()
3477 } else if (amdgpu_encoder->devices & (ATOM_DEVICE_CRT_SUPPORT)) { in dce_v8_0_encoder_add()
3480 amdgpu_encoder->enc_priv = amdgpu_atombios_encoder_get_dig_info(amdgpu_encoder); in dce_v8_0_encoder_add()
3484 amdgpu_encoder->enc_priv = amdgpu_atombios_encoder_get_dig_info(amdgpu_encoder); in dce_v8_0_encoder_add()
3498 amdgpu_encoder->is_ext_encoder = true; in dce_v8_0_encoder_add()
3499 if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) in dce_v8_0_encoder_add()
3502 else if (amdgpu_encoder->devices & (ATOM_DEVICE_CRT_SUPPORT)) in dce_v8_0_encoder_add()
3529 adev->mode_info.funcs = &dce_v8_0_display_funcs; in dce_v8_0_set_display_funcs()
3549 if (adev->mode_info.num_crtc > 0) in dce_v8_0_set_irq_funcs()
3550 adev->crtc_irq.num_types = AMDGPU_CRTC_IRQ_VLINE1 + adev->mode_info.num_crtc; in dce_v8_0_set_irq_funcs()
3552 adev->crtc_irq.num_types = 0; in dce_v8_0_set_irq_funcs()
3553 adev->crtc_irq.funcs = &dce_v8_0_crtc_irq_funcs; in dce_v8_0_set_irq_funcs()
3555 adev->pageflip_irq.num_types = adev->mode_info.num_crtc; in dce_v8_0_set_irq_funcs()
3556 adev->pageflip_irq.funcs = &dce_v8_0_pageflip_irq_funcs; in dce_v8_0_set_irq_funcs()
3558 adev->hpd_irq.num_types = adev->mode_info.num_hpd; in dce_v8_0_set_irq_funcs()
3559 adev->hpd_irq.funcs = &dce_v8_0_hpd_irq_funcs; in dce_v8_0_set_irq_funcs()