Lines Matching full:intr

209 static inline struct dpu_hw_intr_entry *dpu_core_irq_get_entry(struct dpu_hw_intr *intr,  in dpu_core_irq_get_entry()  argument
212 return &intr->irq_tbl[irq_idx - 1]; in dpu_core_irq_get_entry()
243 struct dpu_hw_intr *intr = dpu_kms->hw_intr; in dpu_core_irq() local
251 if (!intr) in dpu_core_irq()
254 spin_lock_irqsave(&intr->irq_lock, irq_flags); in dpu_core_irq()
256 if (!test_bit(reg_idx, &intr->irq_mask)) in dpu_core_irq()
260 irq_status = DPU_REG_READ(&intr->hw, intr->intr_set[reg_idx].status_off); in dpu_core_irq()
263 enable_mask = DPU_REG_READ(&intr->hw, intr->intr_set[reg_idx].en_off); in dpu_core_irq()
267 DPU_REG_WRITE(&intr->hw, intr->intr_set[reg_idx].clr_off, in dpu_core_irq()
277 * Search through matching intr status. in dpu_core_irq()
296 spin_unlock_irqrestore(&intr->irq_lock, irq_flags); in dpu_core_irq()
301 static int dpu_hw_intr_enable_irq_locked(struct dpu_hw_intr *intr, in dpu_hw_intr_enable_irq_locked() argument
309 if (!intr) in dpu_hw_intr_enable_irq_locked()
323 assert_spin_locked(&intr->irq_lock); in dpu_hw_intr_enable_irq_locked()
326 reg = &intr->intr_set[reg_idx]; in dpu_hw_intr_enable_irq_locked()
332 cache_irq_mask = intr->cache_irq_mask[reg_idx]; in dpu_hw_intr_enable_irq_locked()
340 DPU_REG_WRITE(&intr->hw, reg->clr_off, DPU_IRQ_MASK(irq_idx)); in dpu_hw_intr_enable_irq_locked()
342 DPU_REG_WRITE(&intr->hw, reg->en_off, cache_irq_mask); in dpu_hw_intr_enable_irq_locked()
347 intr->cache_irq_mask[reg_idx] = cache_irq_mask; in dpu_hw_intr_enable_irq_locked()
357 static int dpu_hw_intr_disable_irq_locked(struct dpu_hw_intr *intr, in dpu_hw_intr_disable_irq_locked() argument
365 if (!intr) in dpu_hw_intr_disable_irq_locked()
379 assert_spin_locked(&intr->irq_lock); in dpu_hw_intr_disable_irq_locked()
382 reg = &intr->intr_set[reg_idx]; in dpu_hw_intr_disable_irq_locked()
384 cache_irq_mask = intr->cache_irq_mask[reg_idx]; in dpu_hw_intr_disable_irq_locked()
392 DPU_REG_WRITE(&intr->hw, reg->en_off, cache_irq_mask); in dpu_hw_intr_disable_irq_locked()
394 DPU_REG_WRITE(&intr->hw, reg->clr_off, DPU_IRQ_MASK(irq_idx)); in dpu_hw_intr_disable_irq_locked()
399 intr->cache_irq_mask[reg_idx] = cache_irq_mask; in dpu_hw_intr_disable_irq_locked()
411 struct dpu_hw_intr *intr = dpu_kms->hw_intr; in dpu_clear_irqs() local
414 if (!intr) in dpu_clear_irqs()
418 if (test_bit(i, &intr->irq_mask)) in dpu_clear_irqs()
419 DPU_REG_WRITE(&intr->hw, in dpu_clear_irqs()
420 intr->intr_set[i].clr_off, 0xffffffff); in dpu_clear_irqs()
429 struct dpu_hw_intr *intr = dpu_kms->hw_intr; in dpu_disable_all_irqs() local
432 if (!intr) in dpu_disable_all_irqs()
436 if (test_bit(i, &intr->irq_mask)) in dpu_disable_all_irqs()
437 DPU_REG_WRITE(&intr->hw, in dpu_disable_all_irqs()
438 intr->intr_set[i].en_off, 0x00000000); in dpu_disable_all_irqs()
448 struct dpu_hw_intr *intr = dpu_kms->hw_intr; in dpu_core_irq_read() local
453 if (!intr) in dpu_core_irq_read()
461 spin_lock_irqsave(&intr->irq_lock, irq_flags); in dpu_core_irq_read()
464 intr_status = DPU_REG_READ(&intr->hw, in dpu_core_irq_read()
465 intr->intr_set[reg_idx].status_off) & in dpu_core_irq_read()
468 DPU_REG_WRITE(&intr->hw, intr->intr_set[reg_idx].clr_off, in dpu_core_irq_read()
474 spin_unlock_irqrestore(&intr->irq_lock, irq_flags); in dpu_core_irq_read()
483 struct dpu_hw_intr *intr; in dpu_hw_intr_init() local
489 intr = drmm_kzalloc(dev, sizeof(*intr), GFP_KERNEL); in dpu_hw_intr_init()
490 if (!intr) in dpu_hw_intr_init()
494 intr->intr_set = dpu_intr_set_7xxx; in dpu_hw_intr_init()
496 intr->intr_set = dpu_intr_set_legacy; in dpu_hw_intr_init()
498 intr->hw.blk_addr = addr + m->mdp[0].base; in dpu_hw_intr_init()
500 intr->irq_mask = BIT(MDP_SSPP_TOP0_INTR) | in dpu_hw_intr_init()
509 intr->irq_mask |= BIT(MDP_INTFn_INTR(intf->id)); in dpu_hw_intr_init()
512 intr->irq_mask |= BIT(DPU_IRQ_REG(intf->intr_tear_rd_ptr)); in dpu_hw_intr_init()
515 spin_lock_init(&intr->irq_lock); in dpu_hw_intr_init()
517 return intr; in dpu_hw_intr_init()