extern struct drm_gem_object *
drm_gem_object_lookup(struct drm_file *file, u32 handle);
+__attribute__((nonnull))
+static inline struct drm_i915_gem_object *
+i915_gem_object_get(struct drm_i915_gem_object *obj)
+{
+ drm_gem_object_reference(&obj->base);
+ return obj;
+}
+
+__deprecated
+extern void drm_gem_object_reference(struct drm_gem_object *);
+
static inline bool
i915_gem_object_has_struct_page(const struct drm_i915_gem_object *obj)
{
struct i915_vma *vma, *next;
int ret;
- drm_gem_object_reference(&obj->base);
+ i915_gem_object_get(obj);
list_for_each_entry_safe(vma, next, &obj->vma_list, obj_link)
if (i915_vma_unbind(vma))
break;
/* Add a reference if we're newly entering the active list. */
if (obj->active == 0)
- drm_gem_object_reference(&obj->base);
+ i915_gem_object_get(obj);
obj->active |= intel_engine_flag(engine);
list_move_tail(&obj->engine_list[engine->id], &engine->active_list);
* Importing dmabuf exported from out own gem increases
* refcount on gem itself instead of f_count of dmabuf.
*/
- drm_gem_object_reference(&obj->base);
- return &obj->base;
+ return &i915_gem_object_get(obj)->base;
}
}
exec_list);
if (drm_mm_scan_remove_block(&vma->node)) {
list_move(&vma->exec_list, &eviction_list);
- drm_gem_object_reference(&vma->obj->base);
+ i915_gem_object_get(vma->obj);
continue;
}
list_del_init(&vma->exec_list);
goto err;
}
- drm_gem_object_reference(&obj->base);
+ i915_gem_object_get(obj);
list_add_tail(&obj->obj_exec_link, &objects);
}
spin_unlock(&file->table_lock);
vma = i915_gem_obj_to_ggtt(shadow_batch_obj);
vma->exec_entry = shadow_exec_entry;
vma->exec_entry->flags = __EXEC_OBJECT_HAS_PIN;
- drm_gem_object_reference(&shadow_batch_obj->base);
+ i915_gem_object_get(shadow_batch_obj);
list_add_tail(&vma->exec_list, &eb->vmas);
shadow_batch_obj->base.pending_read_domains = I915_GEM_DOMAIN_COMMAND;
if (!can_release_pages(obj))
continue;
- drm_gem_object_reference(&obj->base);
+ i915_gem_object_get(obj);
/* For the unbound phase, this should be a no-op! */
list_for_each_entry_safe(vma, v,
obj->userptr.work = &work->work;
obj->userptr.workers++;
- work->obj = obj;
- drm_gem_object_reference(&obj->base);
+ work->obj = i915_gem_object_get(obj);
work->task = current;
get_task_struct(work->task);
/* Reference the objects for the scheduled work. */
drm_framebuffer_reference(work->old_fb);
- drm_gem_object_reference(&obj->base);
crtc->primary->fb = fb;
update_state_fb(crtc->primary);
intel_fbc_pre_update(intel_crtc, intel_crtc->config,
to_intel_plane_state(primary->state));
- work->pending_flip_obj = obj;
+ work->pending_flip_obj = i915_gem_object_get(obj);
ret = i915_mutex_lock_interruptible(dev);
if (ret)