1 From 93a9ca37250681ec8fd2dd61b1db9abbd3f00455 Mon Sep 17 00:00:00 2001
2 From: Maxime Ripard <maxime@cerno.tech>
3 Date: Fri, 17 Feb 2023 13:29:27 +0100
4 Subject: [PATCH 0586/1085] drm/vc4: Make v3d paths unavailable on any
5 generation newer than vc4
7 The V3D IP has been separate since BCM2711, so let's make sure we issue
8 a WARN if we're running not only on BCM2711, but also anything newer.
10 Signed-off-by: Maxime Ripard <maxime@cerno.tech>
12 drivers/gpu/drm/vc4/vc4_bo.c | 28 +++++++++++-----------
13 drivers/gpu/drm/vc4/vc4_crtc.c | 4 ++--
14 drivers/gpu/drm/vc4/vc4_drv.c | 8 +++----
15 drivers/gpu/drm/vc4/vc4_gem.c | 24 +++++++++----------
16 drivers/gpu/drm/vc4/vc4_irq.c | 10 ++++----
17 drivers/gpu/drm/vc4/vc4_kms.c | 2 +-
18 drivers/gpu/drm/vc4/vc4_perfmon.c | 20 ++++++++--------
19 drivers/gpu/drm/vc4/vc4_render_cl.c | 2 +-
20 drivers/gpu/drm/vc4/vc4_v3d.c | 10 ++++----
21 drivers/gpu/drm/vc4/vc4_validate.c | 8 +++----
22 drivers/gpu/drm/vc4/vc4_validate_shaders.c | 2 +-
23 11 files changed, 59 insertions(+), 59 deletions(-)
25 --- a/drivers/gpu/drm/vc4/vc4_bo.c
26 +++ b/drivers/gpu/drm/vc4/vc4_bo.c
27 @@ -251,7 +251,7 @@ void vc4_bo_add_to_purgeable_pool(struct
29 struct vc4_dev *vc4 = to_vc4_dev(bo->base.base.dev);
31 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
32 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
35 mutex_lock(&vc4->purgeable.lock);
36 @@ -265,7 +265,7 @@ static void vc4_bo_remove_from_purgeable
38 struct vc4_dev *vc4 = to_vc4_dev(bo->base.base.dev);
40 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
41 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
44 /* list_del_init() is used here because the caller might release
45 @@ -396,7 +396,7 @@ struct drm_gem_object *vc4_create_object
46 struct vc4_dev *vc4 = to_vc4_dev(dev);
49 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
50 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
51 return ERR_PTR(-ENODEV);
53 bo = kzalloc(sizeof(*bo), GFP_KERNEL);
54 @@ -427,7 +427,7 @@ struct vc4_bo *vc4_bo_create(struct drm_
55 struct drm_gem_dma_object *dma_obj;
58 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
59 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
60 return ERR_PTR(-ENODEV);
63 @@ -496,7 +496,7 @@ int vc4_bo_dumb_create(struct drm_file *
64 struct vc4_bo *bo = NULL;
67 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
68 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
71 ret = vc4_dumb_fixup_args(args);
72 @@ -622,7 +622,7 @@ int vc4_bo_inc_usecnt(struct vc4_bo *bo)
73 struct vc4_dev *vc4 = to_vc4_dev(bo->base.base.dev);
76 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
77 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
80 /* Fast path: if the BO is already retained by someone, no need to
81 @@ -661,7 +661,7 @@ void vc4_bo_dec_usecnt(struct vc4_bo *bo
83 struct vc4_dev *vc4 = to_vc4_dev(bo->base.base.dev);
85 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
86 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
89 /* Fast path: if the BO is still retained by someone, no need to test
90 @@ -783,7 +783,7 @@ int vc4_create_bo_ioctl(struct drm_devic
91 struct vc4_bo *bo = NULL;
94 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
95 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
98 ret = vc4_grab_bin_bo(vc4, vc4file);
99 @@ -813,7 +813,7 @@ int vc4_mmap_bo_ioctl(struct drm_device
100 struct drm_vc4_mmap_bo *args = data;
101 struct drm_gem_object *gem_obj;
103 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
104 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
107 gem_obj = drm_gem_object_lookup(file_priv, args->handle);
108 @@ -839,7 +839,7 @@ vc4_create_shader_bo_ioctl(struct drm_de
109 struct vc4_bo *bo = NULL;
112 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
113 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
117 @@ -918,7 +918,7 @@ int vc4_set_tiling_ioctl(struct drm_devi
121 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
122 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
125 if (args->flags != 0)
126 @@ -964,7 +964,7 @@ int vc4_get_tiling_ioctl(struct drm_devi
127 struct drm_gem_object *gem_obj;
130 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
131 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
134 if (args->flags != 0 || args->modifier != 0)
135 @@ -1007,7 +1007,7 @@ int vc4_bo_cache_init(struct drm_device
139 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
140 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
143 /* Create the initial set of BO labels that the kernel will
144 @@ -1071,7 +1071,7 @@ int vc4_label_bo_ioctl(struct drm_device
145 struct drm_gem_object *gem_obj;
148 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
149 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
153 --- a/drivers/gpu/drm/vc4/vc4_crtc.c
154 +++ b/drivers/gpu/drm/vc4/vc4_crtc.c
155 @@ -1024,7 +1024,7 @@ static int vc4_async_page_flip(struct dr
156 struct vc4_bo *bo = to_vc4_bo(&dma_bo->base);
159 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
160 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
164 @@ -1067,7 +1067,7 @@ int vc4_page_flip(struct drm_crtc *crtc,
165 struct drm_device *dev = crtc->dev;
166 struct vc4_dev *vc4 = to_vc4_dev(dev);
168 - if (vc4->gen == VC4_GEN_5)
169 + if (vc4->gen > VC4_GEN_4)
170 return vc5_async_page_flip(crtc, fb, event, flags);
172 return vc4_async_page_flip(crtc, fb, event, flags);
173 --- a/drivers/gpu/drm/vc4/vc4_drv.c
174 +++ b/drivers/gpu/drm/vc4/vc4_drv.c
175 @@ -98,7 +98,7 @@ static int vc4_get_param_ioctl(struct dr
179 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
180 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
184 @@ -147,7 +147,7 @@ static int vc4_open(struct drm_device *d
185 struct vc4_dev *vc4 = to_vc4_dev(dev);
186 struct vc4_file *vc4file;
188 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
189 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
192 vc4file = kzalloc(sizeof(*vc4file), GFP_KERNEL);
193 @@ -165,7 +165,7 @@ static void vc4_close(struct drm_device
194 struct vc4_dev *vc4 = to_vc4_dev(dev);
195 struct vc4_file *vc4file = file->driver_priv;
197 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
198 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
201 if (vc4file->bin_bo_used)
202 @@ -314,7 +314,7 @@ static int vc4_drm_bind(struct device *d
206 - if (gen == VC4_GEN_5)
207 + if (gen > VC4_GEN_4)
208 driver = &vc5_drm_driver;
210 driver = &vc4_drm_driver;
211 --- a/drivers/gpu/drm/vc4/vc4_gem.c
212 +++ b/drivers/gpu/drm/vc4/vc4_gem.c
213 @@ -76,7 +76,7 @@ vc4_get_hang_state_ioctl(struct drm_devi
217 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
218 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
222 @@ -389,7 +389,7 @@ vc4_wait_for_seqno(struct drm_device *de
223 unsigned long timeout_expire;
226 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
227 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
230 if (vc4->finished_seqno >= seqno)
231 @@ -474,7 +474,7 @@ vc4_submit_next_bin_job(struct drm_devic
232 struct vc4_dev *vc4 = to_vc4_dev(dev);
233 struct vc4_exec_info *exec;
235 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
236 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
240 @@ -522,7 +522,7 @@ vc4_submit_next_render_job(struct drm_de
244 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
245 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
248 /* A previous RCL may have written to one of our textures, and
249 @@ -543,7 +543,7 @@ vc4_move_job_to_render(struct drm_device
250 struct vc4_dev *vc4 = to_vc4_dev(dev);
251 bool was_empty = list_empty(&vc4->render_job_list);
253 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
254 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
257 list_move_tail(&exec->head, &vc4->render_job_list);
258 @@ -970,7 +970,7 @@ vc4_job_handle_completed(struct vc4_dev
259 unsigned long irqflags;
260 struct vc4_seqno_cb *cb, *cb_temp;
262 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
263 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
266 spin_lock_irqsave(&vc4->job_lock, irqflags);
267 @@ -1009,7 +1009,7 @@ int vc4_queue_seqno_cb(struct drm_device
268 struct vc4_dev *vc4 = to_vc4_dev(dev);
269 unsigned long irqflags;
271 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
272 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
276 @@ -1065,7 +1065,7 @@ vc4_wait_seqno_ioctl(struct drm_device *
277 struct vc4_dev *vc4 = to_vc4_dev(dev);
278 struct drm_vc4_wait_seqno *args = data;
280 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
281 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
284 return vc4_wait_for_seqno_ioctl_helper(dev, args->seqno,
285 @@ -1082,7 +1082,7 @@ vc4_wait_bo_ioctl(struct drm_device *dev
286 struct drm_gem_object *gem_obj;
289 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
290 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
294 @@ -1131,7 +1131,7 @@ vc4_submit_cl_ioctl(struct drm_device *d
295 args->shader_rec_size,
296 args->bo_handle_count);
298 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
299 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
303 @@ -1268,7 +1268,7 @@ int vc4_gem_init(struct drm_device *dev)
304 struct vc4_dev *vc4 = to_vc4_dev(dev);
307 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
308 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
311 vc4->dma_fence_context = dma_fence_context_alloc(1);
312 @@ -1327,7 +1327,7 @@ int vc4_gem_madvise_ioctl(struct drm_dev
316 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
317 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
320 switch (args->madv) {
321 --- a/drivers/gpu/drm/vc4/vc4_irq.c
322 +++ b/drivers/gpu/drm/vc4/vc4_irq.c
323 @@ -263,7 +263,7 @@ vc4_irq_enable(struct drm_device *dev)
325 struct vc4_dev *vc4 = to_vc4_dev(dev);
327 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
328 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
332 @@ -280,7 +280,7 @@ vc4_irq_disable(struct drm_device *dev)
334 struct vc4_dev *vc4 = to_vc4_dev(dev);
336 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
337 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
341 @@ -303,7 +303,7 @@ int vc4_irq_install(struct drm_device *d
342 struct vc4_dev *vc4 = to_vc4_dev(dev);
345 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
346 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
349 if (irq == IRQ_NOTCONNECTED)
350 @@ -324,7 +324,7 @@ void vc4_irq_uninstall(struct drm_device
352 struct vc4_dev *vc4 = to_vc4_dev(dev);
354 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
355 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
358 vc4_irq_disable(dev);
359 @@ -337,7 +337,7 @@ void vc4_irq_reset(struct drm_device *de
360 struct vc4_dev *vc4 = to_vc4_dev(dev);
361 unsigned long irqflags;
363 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
364 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
367 /* Acknowledge any stale IRQs. */
368 --- a/drivers/gpu/drm/vc4/vc4_kms.c
369 +++ b/drivers/gpu/drm/vc4/vc4_kms.c
370 @@ -476,7 +476,7 @@ static struct drm_framebuffer *vc4_fb_cr
371 struct vc4_dev *vc4 = to_vc4_dev(dev);
372 struct drm_mode_fb_cmd2 mode_cmd_local;
374 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
375 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
376 return ERR_PTR(-ENODEV);
378 /* If the user didn't specify a modifier, use the
379 --- a/drivers/gpu/drm/vc4/vc4_perfmon.c
380 +++ b/drivers/gpu/drm/vc4/vc4_perfmon.c
381 @@ -23,7 +23,7 @@ void vc4_perfmon_get(struct vc4_perfmon
385 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
386 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
389 refcount_inc(&perfmon->refcnt);
390 @@ -37,7 +37,7 @@ void vc4_perfmon_put(struct vc4_perfmon
394 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
395 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
398 if (refcount_dec_and_test(&perfmon->refcnt))
399 @@ -49,7 +49,7 @@ void vc4_perfmon_start(struct vc4_dev *v
403 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
404 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
407 if (WARN_ON_ONCE(!perfmon || vc4->active_perfmon))
408 @@ -69,7 +69,7 @@ void vc4_perfmon_stop(struct vc4_dev *vc
412 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
413 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
416 if (WARN_ON_ONCE(!vc4->active_perfmon ||
417 @@ -90,7 +90,7 @@ struct vc4_perfmon *vc4_perfmon_find(str
418 struct vc4_dev *vc4 = vc4file->dev;
419 struct vc4_perfmon *perfmon;
421 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
422 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
425 mutex_lock(&vc4file->perfmon.lock);
426 @@ -105,7 +105,7 @@ void vc4_perfmon_open_file(struct vc4_fi
428 struct vc4_dev *vc4 = vc4file->dev;
430 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
431 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
434 mutex_init(&vc4file->perfmon.lock);
435 @@ -126,7 +126,7 @@ void vc4_perfmon_close_file(struct vc4_f
437 struct vc4_dev *vc4 = vc4file->dev;
439 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
440 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
443 mutex_lock(&vc4file->perfmon.lock);
444 @@ -146,7 +146,7 @@ int vc4_perfmon_create_ioctl(struct drm_
448 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
449 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
453 @@ -200,7 +200,7 @@ int vc4_perfmon_destroy_ioctl(struct drm
454 struct drm_vc4_perfmon_destroy *req = data;
455 struct vc4_perfmon *perfmon;
457 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
458 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
462 @@ -228,7 +228,7 @@ int vc4_perfmon_get_values_ioctl(struct
463 struct vc4_perfmon *perfmon;
466 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
467 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
471 --- a/drivers/gpu/drm/vc4/vc4_render_cl.c
472 +++ b/drivers/gpu/drm/vc4/vc4_render_cl.c
473 @@ -599,7 +599,7 @@ int vc4_get_rcl(struct drm_device *dev,
474 bool has_bin = args->bin_cl_size != 0;
477 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
478 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
481 if (args->min_x_tile > args->max_x_tile ||
482 --- a/drivers/gpu/drm/vc4/vc4_v3d.c
483 +++ b/drivers/gpu/drm/vc4/vc4_v3d.c
484 @@ -127,7 +127,7 @@ static int vc4_v3d_debugfs_ident(struct
486 vc4_v3d_pm_get(struct vc4_dev *vc4)
488 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
489 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
492 mutex_lock(&vc4->power_lock);
493 @@ -148,7 +148,7 @@ vc4_v3d_pm_get(struct vc4_dev *vc4)
495 vc4_v3d_pm_put(struct vc4_dev *vc4)
497 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
498 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
501 mutex_lock(&vc4->power_lock);
502 @@ -178,7 +178,7 @@ int vc4_v3d_get_bin_slot(struct vc4_dev
504 struct vc4_exec_info *exec;
506 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
507 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
511 @@ -325,7 +325,7 @@ int vc4_v3d_bin_bo_get(struct vc4_dev *v
515 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
516 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
519 mutex_lock(&vc4->bin_bo_lock);
520 @@ -360,7 +360,7 @@ static void bin_bo_release(struct kref *
522 void vc4_v3d_bin_bo_put(struct vc4_dev *vc4)
524 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
525 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
528 mutex_lock(&vc4->bin_bo_lock);
529 --- a/drivers/gpu/drm/vc4/vc4_validate.c
530 +++ b/drivers/gpu/drm/vc4/vc4_validate.c
531 @@ -109,7 +109,7 @@ vc4_use_bo(struct vc4_exec_info *exec, u
532 struct drm_gem_dma_object *obj;
535 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
536 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
539 if (hindex >= exec->bo_count) {
540 @@ -169,7 +169,7 @@ vc4_check_tex_size(struct vc4_exec_info
541 uint32_t utile_w = utile_width(cpp);
542 uint32_t utile_h = utile_height(cpp);
544 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
545 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
548 /* The shaded vertex format stores signed 12.4 fixed point
549 @@ -495,7 +495,7 @@ vc4_validate_bin_cl(struct drm_device *d
550 uint32_t dst_offset = 0;
551 uint32_t src_offset = 0;
553 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
554 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
557 while (src_offset < len) {
558 @@ -942,7 +942,7 @@ vc4_validate_shader_recs(struct drm_devi
562 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
563 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
566 for (i = 0; i < exec->shader_state_count; i++) {
567 --- a/drivers/gpu/drm/vc4/vc4_validate_shaders.c
568 +++ b/drivers/gpu/drm/vc4/vc4_validate_shaders.c
569 @@ -786,7 +786,7 @@ vc4_validate_shader(struct drm_gem_dma_o
570 struct vc4_validated_shader_info *validated_shader = NULL;
571 struct vc4_shader_validation_state validation_state;
573 - if (WARN_ON_ONCE(vc4->gen == VC4_GEN_5))
574 + if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4))
577 memset(&validation_state, 0, sizeof(validation_state));