Lines Matching full:area

28 	int (*mmap)(struct snd_dma_buffer *dmab, struct vm_area_struct *area);
49 * snd_dma_alloc_dir_pages - allocate the buffer area according to the given
79 dmab->area = __snd_dma_alloc_pages(dmab, size); in snd_dma_alloc_dir_pages()
80 if (!dmab->area) in snd_dma_alloc_dir_pages()
88 * snd_dma_alloc_pages_fallback - allocate the buffer area according to the given type with fallback
115 if (! dmab->area) in snd_dma_alloc_pages_fallback()
187 * @area: VM area information
192 struct vm_area_struct *area) in snd_dma_buffer_mmap() argument
200 return ops->mmap(dmab, area); in snd_dma_buffer_mmap()
258 return virt_to_page(dmab->area + offset); in snd_sgbuf_get_page()
333 do_free_pages(dmab->area, dmab->bytes, false); in snd_dma_continuous_free()
337 struct vm_area_struct *area) in snd_dma_continuous_mmap() argument
339 return remap_pfn_range(area, area->vm_start, in snd_dma_continuous_mmap()
341 area->vm_end - area->vm_start, in snd_dma_continuous_mmap()
342 area->vm_page_prot); in snd_dma_continuous_mmap()
361 vfree(dmab->area); in snd_dma_vmalloc_free()
365 struct vm_area_struct *area) in snd_dma_vmalloc_mmap() argument
367 return remap_vmalloc_range(area, dmab->area, 0); in snd_dma_vmalloc_mmap()
371 page_to_phys(vmalloc_to_page((dmab)->area + (offset)))
382 return vmalloc_to_page(dmab->area + offset); in snd_dma_vmalloc_get_page()
449 if (pool && dmab->area) in snd_dma_iram_free()
450 gen_pool_free(pool, (unsigned long)dmab->area, dmab->bytes); in snd_dma_iram_free()
454 struct vm_area_struct *area) in snd_dma_iram_mmap() argument
456 area->vm_page_prot = pgprot_writecombine(area->vm_page_prot); in snd_dma_iram_mmap()
457 return remap_pfn_range(area, area->vm_start, in snd_dma_iram_mmap()
459 area->vm_end - area->vm_start, in snd_dma_iram_mmap()
460 area->vm_page_prot); in snd_dma_iram_mmap()
480 dma_free_coherent(dmab->dev.dev, dmab->bytes, dmab->area, dmab->addr); in snd_dma_dev_free()
484 struct vm_area_struct *area) in snd_dma_dev_mmap() argument
486 return dma_mmap_coherent(dmab->dev.dev, area, in snd_dma_dev_mmap()
487 dmab->area, dmab->addr, dmab->bytes); in snd_dma_dev_mmap()
509 do_free_pages(dmab->area, size, true); in snd_dma_wc_alloc()
519 do_free_pages(dmab->area, dmab->bytes, true); in snd_dma_wc_free()
523 struct vm_area_struct *area) in snd_dma_wc_mmap() argument
525 area->vm_page_prot = pgprot_writecombine(area->vm_page_prot); in snd_dma_wc_mmap()
526 return dma_mmap_coherent(dmab->dev.dev, area, in snd_dma_wc_mmap()
527 dmab->area, dmab->addr, dmab->bytes); in snd_dma_wc_mmap()
537 dma_free_wc(dmab->dev.dev, dmab->bytes, dmab->area, dmab->addr); in snd_dma_wc_free()
541 struct vm_area_struct *area) in snd_dma_wc_mmap() argument
543 return dma_mmap_wc(dmab->dev.dev, area, in snd_dma_wc_mmap()
544 dmab->area, dmab->addr, dmab->bytes); in snd_dma_wc_mmap()
582 dma_vunmap_noncontiguous(dmab->dev.dev, dmab->area); in snd_dma_noncontig_free()
588 struct vm_area_struct *area) in snd_dma_noncontig_mmap() argument
590 return dma_mmap_noncontiguous(dmab->dev.dev, area, in snd_dma_noncontig_mmap()
600 invalidate_kernel_vmap_range(dmab->area, dmab->bytes); in snd_dma_noncontig_sync()
606 flush_kernel_vmap_range(dmab->area, dmab->bytes); in snd_dma_noncontig_sync()
785 vunmap(dmab->area); in snd_dma_sg_fallback_free()
792 struct vm_area_struct *area) in snd_dma_sg_fallback_mmap() argument
797 area->vm_page_prot = pgprot_writecombine(area->vm_page_prot); in snd_dma_sg_fallback_mmap()
798 return vm_map_pages(area, sgbuf->pages, sgbuf->count); in snd_dma_sg_fallback_mmap()
847 dma_free_noncoherent(dmab->dev.dev, dmab->bytes, dmab->area, in snd_dma_noncoherent_free()
852 struct vm_area_struct *area) in snd_dma_noncoherent_mmap() argument
854 area->vm_page_prot = vm_get_page_prot(area->vm_flags); in snd_dma_noncoherent_mmap()
855 return dma_mmap_pages(dmab->dev.dev, area, in snd_dma_noncoherent_mmap()
856 area->vm_end - area->vm_start, in snd_dma_noncoherent_mmap()
857 virt_to_page(dmab->area)); in snd_dma_noncoherent_mmap()