Lines Matching refs:dma
144 return &mem->dma; in videobuf_to_dma()
148 static void videobuf_dma_init(struct videobuf_dmabuf *dma) in videobuf_dma_init() argument
150 memset(dma, 0, sizeof(*dma)); in videobuf_dma_init()
151 dma->magic = MAGIC_DMABUF; in videobuf_dma_init()
154 static int videobuf_dma_init_user_locked(struct videobuf_dmabuf *dma, in videobuf_dma_init_user_locked() argument
161 dma->direction = direction; in videobuf_dma_init_user_locked()
162 switch (dma->direction) { in videobuf_dma_init_user_locked()
175 dma->offset = data & ~PAGE_MASK; in videobuf_dma_init_user_locked()
176 dma->size = size; in videobuf_dma_init_user_locked()
177 dma->nr_pages = last-first+1; in videobuf_dma_init_user_locked()
178 dma->pages = kmalloc_array(dma->nr_pages, sizeof(struct page *), in videobuf_dma_init_user_locked()
180 if (NULL == dma->pages) in videobuf_dma_init_user_locked()
187 data, size, dma->nr_pages); in videobuf_dma_init_user_locked()
189 err = get_user_pages_longterm(data & PAGE_MASK, dma->nr_pages, in videobuf_dma_init_user_locked()
190 flags, dma->pages, NULL); in videobuf_dma_init_user_locked()
192 if (err != dma->nr_pages) { in videobuf_dma_init_user_locked()
193 dma->nr_pages = (err >= 0) ? err : 0; in videobuf_dma_init_user_locked()
195 dma->nr_pages); in videobuf_dma_init_user_locked()
201 static int videobuf_dma_init_user(struct videobuf_dmabuf *dma, int direction, in videobuf_dma_init_user() argument
207 ret = videobuf_dma_init_user_locked(dma, direction, data, size); in videobuf_dma_init_user()
213 static int videobuf_dma_init_kernel(struct videobuf_dmabuf *dma, int direction, in videobuf_dma_init_kernel() argument
220 dma->direction = direction; in videobuf_dma_init_kernel()
221 dma->vaddr_pages = kcalloc(nr_pages, sizeof(*dma->vaddr_pages), in videobuf_dma_init_kernel()
223 if (!dma->vaddr_pages) in videobuf_dma_init_kernel()
226 dma->dma_addr = kcalloc(nr_pages, sizeof(*dma->dma_addr), GFP_KERNEL); in videobuf_dma_init_kernel()
227 if (!dma->dma_addr) { in videobuf_dma_init_kernel()
228 kfree(dma->vaddr_pages); in videobuf_dma_init_kernel()
234 addr = dma_alloc_coherent(dma->dev, PAGE_SIZE, in videobuf_dma_init_kernel()
235 &(dma->dma_addr[i]), GFP_KERNEL); in videobuf_dma_init_kernel()
239 dma->vaddr_pages[i] = virt_to_page(addr); in videobuf_dma_init_kernel()
241 dma->vaddr = vmap(dma->vaddr_pages, nr_pages, VM_MAP | VM_IOREMAP, in videobuf_dma_init_kernel()
243 if (NULL == dma->vaddr) { in videobuf_dma_init_kernel()
249 dma->vaddr, nr_pages << PAGE_SHIFT); in videobuf_dma_init_kernel()
251 memset(dma->vaddr, 0, nr_pages << PAGE_SHIFT); in videobuf_dma_init_kernel()
252 dma->nr_pages = nr_pages; in videobuf_dma_init_kernel()
260 addr = page_address(dma->vaddr_pages[i]); in videobuf_dma_init_kernel()
261 dma_free_coherent(dma->dev, PAGE_SIZE, addr, dma->dma_addr[i]); in videobuf_dma_init_kernel()
263 kfree(dma->dma_addr); in videobuf_dma_init_kernel()
264 dma->dma_addr = NULL; in videobuf_dma_init_kernel()
265 kfree(dma->vaddr_pages); in videobuf_dma_init_kernel()
266 dma->vaddr_pages = NULL; in videobuf_dma_init_kernel()
272 static int videobuf_dma_init_overlay(struct videobuf_dmabuf *dma, int direction, in videobuf_dma_init_overlay() argument
277 dma->direction = direction; in videobuf_dma_init_overlay()
282 dma->bus_addr = addr; in videobuf_dma_init_overlay()
283 dma->nr_pages = nr_pages; in videobuf_dma_init_overlay()
288 static int videobuf_dma_map(struct device *dev, struct videobuf_dmabuf *dma) in videobuf_dma_map() argument
290 MAGIC_CHECK(dma->magic, MAGIC_DMABUF); in videobuf_dma_map()
291 BUG_ON(0 == dma->nr_pages); in videobuf_dma_map()
293 if (dma->pages) { in videobuf_dma_map()
294 dma->sglist = videobuf_pages_to_sg(dma->pages, dma->nr_pages, in videobuf_dma_map()
295 dma->offset, dma->size); in videobuf_dma_map()
297 if (dma->vaddr) { in videobuf_dma_map()
298 dma->sglist = videobuf_vmalloc_to_sg(dma->vaddr, in videobuf_dma_map()
299 dma->nr_pages); in videobuf_dma_map()
301 if (dma->bus_addr) { in videobuf_dma_map()
302 dma->sglist = vmalloc(sizeof(*dma->sglist)); in videobuf_dma_map()
303 if (NULL != dma->sglist) { in videobuf_dma_map()
304 dma->sglen = 1; in videobuf_dma_map()
305 sg_dma_address(&dma->sglist[0]) = dma->bus_addr in videobuf_dma_map()
307 dma->sglist[0].offset = dma->bus_addr & ~PAGE_MASK; in videobuf_dma_map()
308 sg_dma_len(&dma->sglist[0]) = dma->nr_pages * PAGE_SIZE; in videobuf_dma_map()
311 if (NULL == dma->sglist) { in videobuf_dma_map()
315 if (!dma->bus_addr) { in videobuf_dma_map()
316 dma->sglen = dma_map_sg(dev, dma->sglist, in videobuf_dma_map()
317 dma->nr_pages, dma->direction); in videobuf_dma_map()
318 if (0 == dma->sglen) { in videobuf_dma_map()
321 vfree(dma->sglist); in videobuf_dma_map()
322 dma->sglist = NULL; in videobuf_dma_map()
323 dma->sglen = 0; in videobuf_dma_map()
331 int videobuf_dma_unmap(struct device *dev, struct videobuf_dmabuf *dma) in videobuf_dma_unmap() argument
333 MAGIC_CHECK(dma->magic, MAGIC_DMABUF); in videobuf_dma_unmap()
335 if (!dma->sglen) in videobuf_dma_unmap()
338 dma_unmap_sg(dev, dma->sglist, dma->nr_pages, dma->direction); in videobuf_dma_unmap()
340 vfree(dma->sglist); in videobuf_dma_unmap()
341 dma->sglist = NULL; in videobuf_dma_unmap()
342 dma->sglen = 0; in videobuf_dma_unmap()
348 int videobuf_dma_free(struct videobuf_dmabuf *dma) in videobuf_dma_free() argument
351 MAGIC_CHECK(dma->magic, MAGIC_DMABUF); in videobuf_dma_free()
352 BUG_ON(dma->sglen); in videobuf_dma_free()
354 if (dma->pages) { in videobuf_dma_free()
355 for (i = 0; i < dma->nr_pages; i++) { in videobuf_dma_free()
356 if (dma->direction == DMA_FROM_DEVICE) in videobuf_dma_free()
357 set_page_dirty_lock(dma->pages[i]); in videobuf_dma_free()
358 put_page(dma->pages[i]); in videobuf_dma_free()
360 kfree(dma->pages); in videobuf_dma_free()
361 dma->pages = NULL; in videobuf_dma_free()
364 if (dma->dma_addr) { in videobuf_dma_free()
365 for (i = 0; i < dma->nr_pages; i++) { in videobuf_dma_free()
368 addr = page_address(dma->vaddr_pages[i]); in videobuf_dma_free()
369 dma_free_coherent(dma->dev, PAGE_SIZE, addr, in videobuf_dma_free()
370 dma->dma_addr[i]); in videobuf_dma_free()
372 kfree(dma->dma_addr); in videobuf_dma_free()
373 dma->dma_addr = NULL; in videobuf_dma_free()
374 kfree(dma->vaddr_pages); in videobuf_dma_free()
375 dma->vaddr_pages = NULL; in videobuf_dma_free()
376 vunmap(dma->vaddr); in videobuf_dma_free()
377 dma->vaddr = NULL; in videobuf_dma_free()
380 if (dma->bus_addr) in videobuf_dma_free()
381 dma->bus_addr = 0; in videobuf_dma_free()
382 dma->direction = DMA_NONE; in videobuf_dma_free()
486 videobuf_dma_init(&mem->dma); in __videobuf_alloc_vb()
502 return mem->dma.vaddr; in __videobuf_to_vaddr()
516 if (!mem->dma.dev) in __videobuf_iolock()
517 mem->dma.dev = q->dev; in __videobuf_iolock()
519 WARN_ON(mem->dma.dev != q->dev); in __videobuf_iolock()
527 err = videobuf_dma_init_kernel(&mem->dma, in __videobuf_iolock()
534 err = videobuf_dma_init_user(&mem->dma, in __videobuf_iolock()
545 err = videobuf_dma_init_user_locked(&mem->dma, in __videobuf_iolock()
563 err = videobuf_dma_init_overlay(&mem->dma, DMA_FROM_DEVICE, in __videobuf_iolock()
571 err = videobuf_dma_map(q->dev, &mem->dma); in __videobuf_iolock()
582 BUG_ON(!mem || !mem->dma.sglen); in __videobuf_sync()
585 MAGIC_CHECK(mem->dma.magic, MAGIC_DMABUF); in __videobuf_sync()
587 dma_sync_sg_for_cpu(q->dev, mem->dma.sglist, in __videobuf_sync()
588 mem->dma.nr_pages, mem->dma.direction); in __videobuf_sync()