Lines Matching refs:dev
14 void *dma_alloc_coherent(struct device *dev, size_t size,
17 void dma_free_coherent(struct device *dev, size_t size,
21 dma_map_single(struct device *dev, void *ptr, size_t size,
31 dma_unmap_single(struct device *dev, dma_addr_t dma_addr, size_t size,
38 dma_map_sg(struct device *dev, struct scatterlist *sglist, int nents,
58 dma_map_page(struct device *dev, struct page *page, unsigned long offset,
66 dma_unmap_page(struct device *dev, dma_addr_t dma_address, size_t size,
74 dma_unmap_sg(struct device *dev, struct scatterlist *sg, int nhwentries,
81 dma_sync_single_for_cpu(struct device *dev, dma_addr_t dma_handle, size_t size,
87 dma_sync_single_for_device(struct device *dev, dma_addr_t dma_handle, size_t size,
94 dma_sync_single_range_for_cpu(struct device *dev, dma_addr_t dma_handle,
101 dma_sync_single_range_for_device(struct device *dev, dma_addr_t dma_handle,
109 dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, int nelems,
115 dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg, int nelems,
130 dma_supported(struct device *dev, u64 mask)
148 dma_set_mask(struct device *dev, u64 mask)
150 if(!dev->dma_mask || !dma_supported(dev, mask))
153 *dev->dma_mask = mask;
169 dma_cache_sync(struct device *dev, void *vaddr, size_t size,
177 dma_declare_coherent_memory(struct device *dev, dma_addr_t bus_addr,
181 dma_release_declared_memory(struct device *dev);
184 dma_mark_declared_memory_occupied(struct device *dev,