{
return etnaviv_iommu_get_cmdbuf_va(buf->gpu, buf);
}
+
+dma_addr_t etnaviv_cmdbuf_get_pa(struct etnaviv_cmdbuf *buf)
+{
+ return buf->paddr;
+}
};
u32 etnaviv_cmdbuf_get_va(struct etnaviv_cmdbuf *buf);
+dma_addr_t etnaviv_cmdbuf_get_pa(struct etnaviv_cmdbuf *buf);
#endif /* __ETNAVIV_CMDBUF_H__ */
u32 i;
seq_printf(m, "virt %p - phys 0x%llx - free 0x%08x\n",
- buf->vaddr, (u64)buf->paddr, size - buf->user_size);
+ buf->vaddr, (u64)etnaviv_cmdbuf_get_pa(buf),
+ size - buf->user_size);
for (i = 0; i < size / 4; i++) {
if (i && !(i % 4))
prefetch = etnaviv_buffer_config_mmuv2(gpu,
(u32)etnaviv_domain->mtlb_dma,
(u32)etnaviv_domain->bad_page_dma);
- etnaviv_gpu_start_fe(gpu, gpu->buffer->paddr, prefetch);
+ etnaviv_gpu_start_fe(gpu, (u32)etnaviv_cmdbuf_get_pa(gpu->buffer),
+ prefetch);
etnaviv_gpu_wait_idle(gpu, 100);
gpu_write(gpu, VIVS_MMUv2_CONTROL, VIVS_MMUv2_CONTROL_ENABLE);