Lines Matching +full:bcm2835 +full:- +full:pm

1 // SPDX-License-Identifier: GPL-2.0-only
99 struct drm_debugfs_entry *entry = m->private; in vc4_v3d_debugfs_ident()
100 struct drm_device *dev = entry->dev; in vc4_v3d_debugfs_ident()
130 if (WARN_ON_ONCE(vc4->is_vc5)) in vc4_v3d_pm_get()
131 return -ENODEV; in vc4_v3d_pm_get()
133 mutex_lock(&vc4->power_lock); in vc4_v3d_pm_get()
134 if (vc4->power_refcount++ == 0) { in vc4_v3d_pm_get()
135 int ret = pm_runtime_get_sync(&vc4->v3d->pdev->dev); in vc4_v3d_pm_get()
138 vc4->power_refcount--; in vc4_v3d_pm_get()
139 mutex_unlock(&vc4->power_lock); in vc4_v3d_pm_get()
143 mutex_unlock(&vc4->power_lock); in vc4_v3d_pm_get()
151 if (WARN_ON_ONCE(vc4->is_vc5)) in vc4_v3d_pm_put()
154 mutex_lock(&vc4->power_lock); in vc4_v3d_pm_put()
155 if (--vc4->power_refcount == 0) { in vc4_v3d_pm_put()
156 pm_runtime_mark_last_busy(&vc4->v3d->pdev->dev); in vc4_v3d_pm_put()
157 pm_runtime_put_autosuspend(&vc4->v3d->pdev->dev); in vc4_v3d_pm_put()
159 mutex_unlock(&vc4->power_lock); in vc4_v3d_pm_put()
175 struct drm_device *dev = &vc4->base; in vc4_v3d_get_bin_slot()
181 if (WARN_ON_ONCE(vc4->is_vc5)) in vc4_v3d_get_bin_slot()
182 return -ENODEV; in vc4_v3d_get_bin_slot()
185 spin_lock_irqsave(&vc4->job_lock, irqflags); in vc4_v3d_get_bin_slot()
186 slot = ffs(~vc4->bin_alloc_used); in vc4_v3d_get_bin_slot()
188 /* Switch from ffs() bit index to a 0-based index. */ in vc4_v3d_get_bin_slot()
189 slot--; in vc4_v3d_get_bin_slot()
190 vc4->bin_alloc_used |= BIT(slot); in vc4_v3d_get_bin_slot()
191 spin_unlock_irqrestore(&vc4->job_lock, irqflags); in vc4_v3d_get_bin_slot()
200 seqno = exec->seqno; in vc4_v3d_get_bin_slot()
201 spin_unlock_irqrestore(&vc4->job_lock, irqflags); in vc4_v3d_get_bin_slot()
212 return -ENOMEM; in vc4_v3d_get_bin_slot()
216 * bin_bo_alloc() - allocates the memory that will be used for
233 * real-world applications run into allocation failures from the
239 struct vc4_v3d *v3d = vc4->v3d; in bin_bo_alloc()
245 return -ENODEV; in bin_bo_alloc()
256 struct vc4_bo *bo = vc4_bo_create(&vc4->base, size, true, in bin_bo_alloc()
262 dev_err(&v3d->pdev->dev, in bin_bo_alloc()
271 if ((bo->base.dma_addr & 0xf0000000) == in bin_bo_alloc()
272 ((bo->base.dma_addr + bo->base.base.size - 1) & 0xf0000000)) { in bin_bo_alloc()
273 vc4->bin_bo = bo; in bin_bo_alloc()
285 * for a total of 320kb for our worst-case. in bin_bo_alloc()
291 vc4->bin_alloc_size = 512 * 1024; in bin_bo_alloc()
292 vc4->bin_alloc_used = 0; in bin_bo_alloc()
293 vc4->bin_alloc_overflow = 0; in bin_bo_alloc()
294 WARN_ON_ONCE(sizeof(vc4->bin_alloc_used) * 8 != in bin_bo_alloc()
295 bo->base.base.size / vc4->bin_alloc_size); in bin_bo_alloc()
297 kref_init(&vc4->bin_bo_kref); in bin_bo_alloc()
299 /* Enable the out-of-memory interrupt to set our in bin_bo_alloc()
300 * newly-allocated binner BO, potentially from an in bin_bo_alloc()
301 * already-pending-but-masked interrupt. in bin_bo_alloc()
309 list_add(&bo->unref_head, &list); in bin_bo_alloc()
317 list_del(&bo->unref_head); in bin_bo_alloc()
318 drm_gem_object_put(&bo->base.base); in bin_bo_alloc()
328 if (WARN_ON_ONCE(vc4->is_vc5)) in vc4_v3d_bin_bo_get()
329 return -ENODEV; in vc4_v3d_bin_bo_get()
331 mutex_lock(&vc4->bin_bo_lock); in vc4_v3d_bin_bo_get()
336 if (vc4->bin_bo) in vc4_v3d_bin_bo_get()
337 kref_get(&vc4->bin_bo_kref); in vc4_v3d_bin_bo_get()
345 mutex_unlock(&vc4->bin_bo_lock); in vc4_v3d_bin_bo_get()
354 if (WARN_ON_ONCE(!vc4->bin_bo)) in bin_bo_release()
357 drm_gem_object_put(&vc4->bin_bo->base.base); in bin_bo_release()
358 vc4->bin_bo = NULL; in bin_bo_release()
363 if (WARN_ON_ONCE(vc4->is_vc5)) in vc4_v3d_bin_bo_put()
366 mutex_lock(&vc4->bin_bo_lock); in vc4_v3d_bin_bo_put()
367 kref_put(&vc4->bin_bo_kref, bin_bo_release); in vc4_v3d_bin_bo_put()
368 mutex_unlock(&vc4->bin_bo_lock); in vc4_v3d_bin_bo_put()
375 struct vc4_dev *vc4 = v3d->vc4; in vc4_v3d_runtime_suspend()
377 vc4_irq_disable(&vc4->base); in vc4_v3d_runtime_suspend()
379 clk_disable_unprepare(v3d->clk); in vc4_v3d_runtime_suspend()
387 struct vc4_dev *vc4 = v3d->vc4; in vc4_v3d_runtime_resume()
390 ret = clk_prepare_enable(v3d->clk); in vc4_v3d_runtime_resume()
394 vc4_v3d_init_hw(&vc4->base); in vc4_v3d_runtime_resume()
396 vc4_irq_enable(&vc4->base); in vc4_v3d_runtime_resume()
404 struct drm_device *drm = minor->dev; in vc4_v3d_debugfs_init()
406 struct vc4_v3d *v3d = vc4->v3d; in vc4_v3d_debugfs_init()
408 if (!vc4->v3d) in vc4_v3d_debugfs_init()
409 return -ENODEV; in vc4_v3d_debugfs_init()
413 vc4_debugfs_add_regset32(drm, "v3d_regs", &v3d->regset); in vc4_v3d_debugfs_init()
426 v3d = devm_kzalloc(&pdev->dev, sizeof(*v3d), GFP_KERNEL); in vc4_v3d_bind()
428 return -ENOMEM; in vc4_v3d_bind()
432 v3d->pdev = pdev; in vc4_v3d_bind()
434 v3d->regs = vc4_ioremap_regs(pdev, 0); in vc4_v3d_bind()
435 if (IS_ERR(v3d->regs)) in vc4_v3d_bind()
436 return PTR_ERR(v3d->regs); in vc4_v3d_bind()
437 v3d->regset.base = v3d->regs; in vc4_v3d_bind()
438 v3d->regset.regs = v3d_regs; in vc4_v3d_bind()
439 v3d->regset.nregs = ARRAY_SIZE(v3d_regs); in vc4_v3d_bind()
441 vc4->v3d = v3d; in vc4_v3d_bind()
442 v3d->vc4 = vc4; in vc4_v3d_bind()
444 v3d->clk = devm_clk_get_optional(dev, NULL); in vc4_v3d_bind()
445 if (IS_ERR(v3d->clk)) in vc4_v3d_bind()
446 return dev_err_probe(dev, PTR_ERR(v3d->clk), "Failed to get V3D clock\n"); in vc4_v3d_bind()
451 vc4->irq = ret; in vc4_v3d_bind()
464 ret = -EINVAL; in vc4_v3d_bind()
474 ret = vc4_irq_install(drm, vc4->irq); in vc4_v3d_bind()
506 vc4->v3d = NULL; in vc4_v3d_unbind()
520 return component_add(&pdev->dev, &vc4_v3d_ops); in vc4_v3d_dev_probe()
525 component_del(&pdev->dev, &vc4_v3d_ops); in vc4_v3d_dev_remove()
529 { .compatible = "brcm,bcm2835-v3d" },
530 { .compatible = "brcm,cygnus-v3d" },
531 { .compatible = "brcm,vc4-v3d" },
541 .pm = &vc4_v3d_pm_ops,