drm/i915: explicit store base gem object in dma_buf->priv
Makes it more obviously correct what tricks we play by reusing the drm prime release helper. Reviewed-by: Chris Wilson <chris@chris-wilson.co.uk> Signed-off-by: Daniel Vetter <daniel.vetter@ffwll.ch>
This commit is contained in:
parent
f214266c0d
commit
608806a549
|
@ -27,10 +27,15 @@
|
|||
#include "i915_drv.h"
|
||||
#include <linux/dma-buf.h>
|
||||
|
||||
static struct drm_i915_gem_object *dma_buf_to_obj(struct dma_buf *buf)
|
||||
{
|
||||
return to_intel_bo(buf->priv);
|
||||
}
|
||||
|
||||
static struct sg_table *i915_gem_map_dma_buf(struct dma_buf_attachment *attachment,
|
||||
enum dma_data_direction dir)
|
||||
{
|
||||
struct drm_i915_gem_object *obj = attachment->dmabuf->priv;
|
||||
struct drm_i915_gem_object *obj = dma_buf_to_obj(attachment->dmabuf);
|
||||
struct sg_table *st;
|
||||
struct scatterlist *src, *dst;
|
||||
int ret, i;
|
||||
|
@ -85,7 +90,7 @@ static void i915_gem_unmap_dma_buf(struct dma_buf_attachment *attachment,
|
|||
struct sg_table *sg,
|
||||
enum dma_data_direction dir)
|
||||
{
|
||||
struct drm_i915_gem_object *obj = attachment->dmabuf->priv;
|
||||
struct drm_i915_gem_object *obj = dma_buf_to_obj(attachment->dmabuf);
|
||||
|
||||
mutex_lock(&obj->base.dev->struct_mutex);
|
||||
|
||||
|
@ -111,7 +116,7 @@ static void i915_gem_dmabuf_release(struct dma_buf *dma_buf)
|
|||
|
||||
static void *i915_gem_dmabuf_vmap(struct dma_buf *dma_buf)
|
||||
{
|
||||
struct drm_i915_gem_object *obj = dma_buf->priv;
|
||||
struct drm_i915_gem_object *obj = dma_buf_to_obj(dma_buf);
|
||||
struct drm_device *dev = obj->base.dev;
|
||||
struct sg_page_iter sg_iter;
|
||||
struct page **pages;
|
||||
|
@ -159,7 +164,7 @@ static void *i915_gem_dmabuf_vmap(struct dma_buf *dma_buf)
|
|||
|
||||
static void i915_gem_dmabuf_vunmap(struct dma_buf *dma_buf, void *vaddr)
|
||||
{
|
||||
struct drm_i915_gem_object *obj = dma_buf->priv;
|
||||
struct drm_i915_gem_object *obj = dma_buf_to_obj(dma_buf);
|
||||
struct drm_device *dev = obj->base.dev;
|
||||
int ret;
|
||||
|
||||
|
@ -202,7 +207,7 @@ static int i915_gem_dmabuf_mmap(struct dma_buf *dma_buf, struct vm_area_struct *
|
|||
|
||||
static int i915_gem_begin_cpu_access(struct dma_buf *dma_buf, size_t start, size_t length, enum dma_data_direction direction)
|
||||
{
|
||||
struct drm_i915_gem_object *obj = dma_buf->priv;
|
||||
struct drm_i915_gem_object *obj = dma_buf_to_obj(dma_buf);
|
||||
struct drm_device *dev = obj->base.dev;
|
||||
int ret;
|
||||
bool write = (direction == DMA_BIDIRECTIONAL || direction == DMA_TO_DEVICE);
|
||||
|
@ -233,9 +238,7 @@ static const struct dma_buf_ops i915_dmabuf_ops = {
|
|||
struct dma_buf *i915_gem_prime_export(struct drm_device *dev,
|
||||
struct drm_gem_object *gem_obj, int flags)
|
||||
{
|
||||
struct drm_i915_gem_object *obj = to_intel_bo(gem_obj);
|
||||
|
||||
return dma_buf_export(obj, &i915_dmabuf_ops, obj->base.size, flags);
|
||||
return dma_buf_export(gem_obj, &i915_dmabuf_ops, gem_obj->size, flags);
|
||||
}
|
||||
|
||||
static int i915_gem_object_get_pages_dmabuf(struct drm_i915_gem_object *obj)
|
||||
|
@ -272,7 +275,7 @@ struct drm_gem_object *i915_gem_prime_import(struct drm_device *dev,
|
|||
|
||||
/* is this one of own objects? */
|
||||
if (dma_buf->ops == &i915_dmabuf_ops) {
|
||||
obj = dma_buf->priv;
|
||||
obj = dma_buf_to_obj(dma_buf);
|
||||
/* is it from our device? */
|
||||
if (obj->base.dev == dev) {
|
||||
/*
|
||||
|
|
Loading…
Reference in New Issue