1 From 9556f5777a8961dec3259f4c09c267ca5c77c90a Mon Sep 17 00:00:00 2001
2 From: Dave Stevenson <dave.stevenson@raspberrypi.com>
3 Date: Fri, 15 Oct 2021 17:57:27 +0100
4 Subject: [PATCH] media/bcm2835-unicam: Add support for configuration
7 Adds Media Controller API support for more complex pipelines.
8 libcamera is about to switch to using this mechanism for configuring
11 This can be enabled by either a module parameter, or device tree.
13 Various functions have been moved to group video-centric and
14 mc-centric functions together.
16 Based on a similar conversion done to ti-vpe.
18 Signed-off-by: Dave Stevenson <dave.stevenson@raspberrypi.com>
20 media: bcm2835-unicam: Fixup for 5.18 and new get_mbus_config struct
22 The number of active CSI2 data lanes has moved within the struct
23 v4l2_mbus_config used by the get_mbus_config API call.
24 Update the driver to match the changes in mainline.
26 Signed-off-by: Dave Stevenson <dave.stevenson@raspberrypi.com>
28 .../media/platform/bcm2835/bcm2835-unicam.c | 2111 ++++++++++-------
29 1 file changed, 1306 insertions(+), 805 deletions(-)
31 --- a/drivers/media/platform/bcm2835/bcm2835-unicam.c
32 +++ b/drivers/media/platform/bcm2835/bcm2835-unicam.c
33 @@ -83,6 +83,10 @@ static int debug;
34 module_param(debug, int, 0644);
35 MODULE_PARM_DESC(debug, "Debug level 0-3");
37 +static int media_controller;
38 +module_param(media_controller, int, 0644);
39 +MODULE_PARM_DESC(media_controller, "Use media controller API");
41 #define unicam_dbg(level, dev, fmt, arg...) \
42 v4l2_dbg(level, debug, &(dev)->v4l2_dev, fmt, ##arg)
43 #define unicam_info(dev, fmt, arg...) \
44 @@ -119,7 +123,7 @@ MODULE_PARM_DESC(debug, "Debug level 0-3
47 /* Default size of the embedded buffer */
48 -#define UNICAM_EMBEDDED_SIZE 8192
49 +#define UNICAM_EMBEDDED_SIZE 16384
52 * Size of the dummy buffer. Can be any size really, but the DMA
53 @@ -133,6 +137,22 @@ enum pad_types {
57 +#define MASK_CS_DEFAULT BIT(V4L2_COLORSPACE_DEFAULT)
58 +#define MASK_CS_SMPTE170M BIT(V4L2_COLORSPACE_SMPTE170M)
59 +#define MASK_CS_SMPTE240M BIT(V4L2_COLORSPACE_SMPTE240M)
60 +#define MASK_CS_REC709 BIT(V4L2_COLORSPACE_REC709)
61 +#define MASK_CS_BT878 BIT(V4L2_COLORSPACE_BT878)
62 +#define MASK_CS_470_M BIT(V4L2_COLORSPACE_470_SYSTEM_M)
63 +#define MASK_CS_470_BG BIT(V4L2_COLORSPACE_470_SYSTEM_BG)
64 +#define MASK_CS_JPEG BIT(V4L2_COLORSPACE_JPEG)
65 +#define MASK_CS_SRGB BIT(V4L2_COLORSPACE_SRGB)
66 +#define MASK_CS_OPRGB BIT(V4L2_COLORSPACE_OPRGB)
67 +#define MASK_CS_BT2020 BIT(V4L2_COLORSPACE_BT2020)
68 +#define MASK_CS_RAW BIT(V4L2_COLORSPACE_RAW)
69 +#define MASK_CS_DCI_P3 BIT(V4L2_COLORSPACE_DCI_P3)
71 +#define MAX_COLORSPACE 32
74 * struct unicam_fmt - Unicam media bus format information
75 * @pixelformat: V4L2 pixel format FCC identifier. 0 if n/a.
76 @@ -141,8 +161,14 @@ enum pad_types {
77 * @code: V4L2 media bus format code.
78 * @depth: Bits per pixel as delivered from the source.
79 * @csi_dt: CSI data type.
80 + * @valid_colorspaces: Bitmask of valid colorspaces so that the Media Controller
81 + * centric try_fmt can validate the colorspace and pass
83 * @check_variants: Flag to denote that there are multiple mediabus formats
84 * still in the list that could match this V4L2 format.
85 + * @mc_skip: Media Controller shouldn't list this format via ENUM_FMT as it is
86 + * a duplicate of an earlier format.
87 + * @metadata_fmt: This format only applies to the metadata pad.
91 @@ -150,7 +176,10 @@ struct unicam_fmt {
96 + u32 valid_colorspaces;
97 + u8 check_variants:1;
102 static const struct unicam_fmt formats[] = {
103 @@ -161,173 +190,216 @@ static const struct unicam_fmt formats[]
107 + .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
110 .fourcc = V4L2_PIX_FMT_UYVY,
111 .code = MEDIA_BUS_FMT_UYVY8_2X8,
115 + .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
118 .fourcc = V4L2_PIX_FMT_YVYU,
119 .code = MEDIA_BUS_FMT_YVYU8_2X8,
123 + .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
126 .fourcc = V4L2_PIX_FMT_VYUY,
127 .code = MEDIA_BUS_FMT_VYUY8_2X8,
131 + .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
134 .fourcc = V4L2_PIX_FMT_YUYV,
135 .code = MEDIA_BUS_FMT_YUYV8_1X16,
139 + .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
142 .fourcc = V4L2_PIX_FMT_UYVY,
143 .code = MEDIA_BUS_FMT_UYVY8_1X16,
147 + .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
150 .fourcc = V4L2_PIX_FMT_YVYU,
151 .code = MEDIA_BUS_FMT_YVYU8_1X16,
155 + .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
158 .fourcc = V4L2_PIX_FMT_VYUY,
159 .code = MEDIA_BUS_FMT_VYUY8_1X16,
163 + .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
167 .fourcc = V4L2_PIX_FMT_RGB565, /* gggbbbbb rrrrrggg */
168 .code = MEDIA_BUS_FMT_RGB565_2X8_LE,
171 + .valid_colorspaces = MASK_CS_SRGB,
173 .fourcc = V4L2_PIX_FMT_RGB565X, /* rrrrrggg gggbbbbb */
174 .code = MEDIA_BUS_FMT_RGB565_2X8_BE,
178 + .valid_colorspaces = MASK_CS_SRGB,
180 .fourcc = V4L2_PIX_FMT_RGB555, /* gggbbbbb arrrrrgg */
181 .code = MEDIA_BUS_FMT_RGB555_2X8_PADHI_LE,
184 + .valid_colorspaces = MASK_CS_SRGB,
186 .fourcc = V4L2_PIX_FMT_RGB555X, /* arrrrrgg gggbbbbb */
187 .code = MEDIA_BUS_FMT_RGB555_2X8_PADHI_BE,
190 + .valid_colorspaces = MASK_CS_SRGB,
192 .fourcc = V4L2_PIX_FMT_RGB24, /* rgb */
193 .code = MEDIA_BUS_FMT_RGB888_1X24,
196 + .valid_colorspaces = MASK_CS_SRGB,
198 .fourcc = V4L2_PIX_FMT_BGR24, /* bgr */
199 .code = MEDIA_BUS_FMT_BGR888_1X24,
202 + .valid_colorspaces = MASK_CS_SRGB,
204 .fourcc = V4L2_PIX_FMT_RGB32, /* argb */
205 .code = MEDIA_BUS_FMT_ARGB8888_1X32,
208 + .valid_colorspaces = MASK_CS_SRGB,
211 .fourcc = V4L2_PIX_FMT_SBGGR8,
212 .code = MEDIA_BUS_FMT_SBGGR8_1X8,
215 + .valid_colorspaces = MASK_CS_RAW,
217 .fourcc = V4L2_PIX_FMT_SGBRG8,
218 .code = MEDIA_BUS_FMT_SGBRG8_1X8,
221 + .valid_colorspaces = MASK_CS_RAW,
223 .fourcc = V4L2_PIX_FMT_SGRBG8,
224 .code = MEDIA_BUS_FMT_SGRBG8_1X8,
227 + .valid_colorspaces = MASK_CS_RAW,
229 .fourcc = V4L2_PIX_FMT_SRGGB8,
230 .code = MEDIA_BUS_FMT_SRGGB8_1X8,
233 + .valid_colorspaces = MASK_CS_RAW,
235 .fourcc = V4L2_PIX_FMT_SBGGR10P,
236 .repacked_fourcc = V4L2_PIX_FMT_SBGGR10,
237 .code = MEDIA_BUS_FMT_SBGGR10_1X10,
240 + .valid_colorspaces = MASK_CS_RAW,
242 .fourcc = V4L2_PIX_FMT_SGBRG10P,
243 .repacked_fourcc = V4L2_PIX_FMT_SGBRG10,
244 .code = MEDIA_BUS_FMT_SGBRG10_1X10,
247 + .valid_colorspaces = MASK_CS_RAW,
249 .fourcc = V4L2_PIX_FMT_SGRBG10P,
250 .repacked_fourcc = V4L2_PIX_FMT_SGRBG10,
251 .code = MEDIA_BUS_FMT_SGRBG10_1X10,
254 + .valid_colorspaces = MASK_CS_RAW,
256 .fourcc = V4L2_PIX_FMT_SRGGB10P,
257 .repacked_fourcc = V4L2_PIX_FMT_SRGGB10,
258 .code = MEDIA_BUS_FMT_SRGGB10_1X10,
261 + .valid_colorspaces = MASK_CS_RAW,
263 .fourcc = V4L2_PIX_FMT_SBGGR12P,
264 .repacked_fourcc = V4L2_PIX_FMT_SBGGR12,
265 .code = MEDIA_BUS_FMT_SBGGR12_1X12,
268 + .valid_colorspaces = MASK_CS_RAW,
270 .fourcc = V4L2_PIX_FMT_SGBRG12P,
271 .repacked_fourcc = V4L2_PIX_FMT_SGBRG12,
272 .code = MEDIA_BUS_FMT_SGBRG12_1X12,
275 + .valid_colorspaces = MASK_CS_RAW,
277 .fourcc = V4L2_PIX_FMT_SGRBG12P,
278 .repacked_fourcc = V4L2_PIX_FMT_SGRBG12,
279 .code = MEDIA_BUS_FMT_SGRBG12_1X12,
282 + .valid_colorspaces = MASK_CS_RAW,
284 .fourcc = V4L2_PIX_FMT_SRGGB12P,
285 .repacked_fourcc = V4L2_PIX_FMT_SRGGB12,
286 .code = MEDIA_BUS_FMT_SRGGB12_1X12,
289 + .valid_colorspaces = MASK_CS_RAW,
291 .fourcc = V4L2_PIX_FMT_SBGGR14P,
292 .repacked_fourcc = V4L2_PIX_FMT_SBGGR14,
293 .code = MEDIA_BUS_FMT_SBGGR14_1X14,
296 + .valid_colorspaces = MASK_CS_RAW,
298 .fourcc = V4L2_PIX_FMT_SGBRG14P,
299 .repacked_fourcc = V4L2_PIX_FMT_SGBRG14,
300 .code = MEDIA_BUS_FMT_SGBRG14_1X14,
303 + .valid_colorspaces = MASK_CS_RAW,
305 .fourcc = V4L2_PIX_FMT_SGRBG14P,
306 .repacked_fourcc = V4L2_PIX_FMT_SGRBG14,
307 .code = MEDIA_BUS_FMT_SGRBG14_1X14,
310 + .valid_colorspaces = MASK_CS_RAW,
312 .fourcc = V4L2_PIX_FMT_SRGGB14P,
313 .repacked_fourcc = V4L2_PIX_FMT_SRGGB14,
314 .code = MEDIA_BUS_FMT_SRGGB14_1X14,
317 + .valid_colorspaces = MASK_CS_RAW,
320 * 16 bit Bayer formats could be supported, but there is no CSI2
321 @@ -340,30 +412,35 @@ static const struct unicam_fmt formats[]
322 .code = MEDIA_BUS_FMT_Y8_1X8,
325 + .valid_colorspaces = MASK_CS_RAW,
327 .fourcc = V4L2_PIX_FMT_Y10P,
328 .repacked_fourcc = V4L2_PIX_FMT_Y10,
329 .code = MEDIA_BUS_FMT_Y10_1X10,
332 + .valid_colorspaces = MASK_CS_RAW,
334 .fourcc = V4L2_PIX_FMT_Y12P,
335 .repacked_fourcc = V4L2_PIX_FMT_Y12,
336 .code = MEDIA_BUS_FMT_Y12_1X12,
339 + .valid_colorspaces = MASK_CS_RAW,
341 .fourcc = V4L2_PIX_FMT_Y14P,
342 .repacked_fourcc = V4L2_PIX_FMT_Y14,
343 .code = MEDIA_BUS_FMT_Y14_1X14,
346 + .valid_colorspaces = MASK_CS_RAW,
348 /* Embedded data format */
350 .fourcc = V4L2_META_FMT_SENSOR_DATA,
351 .code = MEDIA_BUS_FMT_SENSOR_DATA,
357 @@ -408,6 +485,7 @@ struct unicam_node {
358 struct unicam_device *dev;
359 struct media_pad pad;
360 unsigned int embedded_lines;
361 + struct media_pipeline pipe;
363 * Dummy buffer intended to be used by unicam
364 * if we have no other queued buffers to swap to.
365 @@ -459,6 +537,8 @@ struct unicam_device {
367 struct unicam_node node[MAX_NODES];
368 struct v4l2_ctrl_handler ctrl_handler;
373 static inline struct unicam_device *
374 @@ -908,6 +988,7 @@ static irqreturn_t unicam_isr(int irq, v
378 +/* V4L2 Common IOCTLs */
379 static int unicam_querycap(struct file *file, void *priv,
380 struct v4l2_capability *cap)
382 @@ -925,6 +1006,38 @@ static int unicam_querycap(struct file *
386 +static int unicam_log_status(struct file *file, void *fh)
388 + struct unicam_node *node = video_drvdata(file);
389 + struct unicam_device *dev = node->dev;
392 + /* status for sub devices */
393 + v4l2_device_call_all(&dev->v4l2_dev, 0, core, log_status);
395 + unicam_info(dev, "-----Receiver status-----\n");
396 + unicam_info(dev, "V4L2 width/height: %ux%u\n",
397 + node->v_fmt.fmt.pix.width, node->v_fmt.fmt.pix.height);
398 + unicam_info(dev, "Mediabus format: %08x\n", node->fmt->code);
399 + unicam_info(dev, "V4L2 format: %08x\n",
400 + node->v_fmt.fmt.pix.pixelformat);
401 + reg = reg_read(dev, UNICAM_IPIPE);
402 + unicam_info(dev, "Unpacking/packing: %u / %u\n",
403 + get_field(reg, UNICAM_PUM_MASK),
404 + get_field(reg, UNICAM_PPM_MASK));
405 + unicam_info(dev, "----Live data----\n");
406 + unicam_info(dev, "Programmed stride: %4u\n",
407 + reg_read(dev, UNICAM_IBLS));
408 + unicam_info(dev, "Detected resolution: %ux%u\n",
409 + reg_read(dev, UNICAM_IHSTA),
410 + reg_read(dev, UNICAM_IVSTA));
411 + unicam_info(dev, "Write pointer: %08x\n",
412 + reg_read(dev, UNICAM_IBWP));
417 +/* V4L2 Video Centric IOCTLs */
418 static int unicam_enum_fmt_vid_cap(struct file *file, void *priv,
419 struct v4l2_fmtdesc *f)
421 @@ -1269,6 +1382,727 @@ static int unicam_g_fmt_meta_cap(struct
425 +static int unicam_enum_input(struct file *file, void *priv,
426 + struct v4l2_input *inp)
428 + struct unicam_node *node = video_drvdata(file);
429 + struct unicam_device *dev = node->dev;
432 + if (inp->index != 0)
435 + inp->type = V4L2_INPUT_TYPE_CAMERA;
436 + if (v4l2_subdev_has_op(dev->sensor, video, s_dv_timings)) {
437 + inp->capabilities = V4L2_IN_CAP_DV_TIMINGS;
439 + } else if (v4l2_subdev_has_op(dev->sensor, video, s_std)) {
440 + inp->capabilities = V4L2_IN_CAP_STD;
441 + if (v4l2_subdev_call(dev->sensor, video, g_tvnorms, &inp->std) < 0)
442 + inp->std = V4L2_STD_ALL;
444 + inp->capabilities = 0;
448 + if (v4l2_subdev_has_op(dev->sensor, video, g_input_status)) {
449 + ret = v4l2_subdev_call(dev->sensor, video, g_input_status,
455 + snprintf(inp->name, sizeof(inp->name), "Camera 0");
459 +static int unicam_g_input(struct file *file, void *priv, unsigned int *i)
466 +static int unicam_s_input(struct file *file, void *priv, unsigned int i)
469 + * FIXME: Ideally we would like to be able to query the source
470 + * subdevice for information over the input connectors it supports,
471 + * and map that through in to a call to video_ops->s_routing.
472 + * There is no infrastructure support for defining that within
473 + * devicetree at present. Until that is implemented we can't
474 + * map a user physical connector number to s_routing input number.
482 +static int unicam_querystd(struct file *file, void *priv,
485 + struct unicam_node *node = video_drvdata(file);
486 + struct unicam_device *dev = node->dev;
488 + return v4l2_subdev_call(dev->sensor, video, querystd, std);
491 +static int unicam_g_std(struct file *file, void *priv, v4l2_std_id *std)
493 + struct unicam_node *node = video_drvdata(file);
494 + struct unicam_device *dev = node->dev;
496 + return v4l2_subdev_call(dev->sensor, video, g_std, std);
499 +static int unicam_s_std(struct file *file, void *priv, v4l2_std_id std)
501 + struct unicam_node *node = video_drvdata(file);
502 + struct unicam_device *dev = node->dev;
504 + v4l2_std_id current_std;
506 + ret = v4l2_subdev_call(dev->sensor, video, g_std, ¤t_std);
510 + if (std == current_std)
513 + if (vb2_is_busy(&node->buffer_queue))
516 + ret = v4l2_subdev_call(dev->sensor, video, s_std, std);
518 + /* Force recomputation of bytesperline */
519 + node->v_fmt.fmt.pix.bytesperline = 0;
521 + unicam_reset_format(node);
526 +static int unicam_s_edid(struct file *file, void *priv, struct v4l2_edid *edid)
528 + struct unicam_node *node = video_drvdata(file);
529 + struct unicam_device *dev = node->dev;
531 + return v4l2_subdev_call(dev->sensor, pad, set_edid, edid);
534 +static int unicam_g_edid(struct file *file, void *priv, struct v4l2_edid *edid)
536 + struct unicam_node *node = video_drvdata(file);
537 + struct unicam_device *dev = node->dev;
539 + return v4l2_subdev_call(dev->sensor, pad, get_edid, edid);
542 +static int unicam_s_selection(struct file *file, void *priv,
543 + struct v4l2_selection *sel)
545 + struct unicam_node *node = video_drvdata(file);
546 + struct unicam_device *dev = node->dev;
547 + struct v4l2_subdev_selection sdsel = {
548 + .which = V4L2_SUBDEV_FORMAT_ACTIVE,
549 + .target = sel->target,
550 + .flags = sel->flags,
554 + if (sel->type != V4L2_BUF_TYPE_VIDEO_CAPTURE)
557 + return v4l2_subdev_call(dev->sensor, pad, set_selection, NULL, &sdsel);
560 +static int unicam_g_selection(struct file *file, void *priv,
561 + struct v4l2_selection *sel)
563 + struct unicam_node *node = video_drvdata(file);
564 + struct unicam_device *dev = node->dev;
565 + struct v4l2_subdev_selection sdsel = {
566 + .which = V4L2_SUBDEV_FORMAT_ACTIVE,
567 + .target = sel->target,
571 + if (sel->type != V4L2_BUF_TYPE_VIDEO_CAPTURE)
574 + ret = v4l2_subdev_call(dev->sensor, pad, get_selection, NULL, &sdsel);
581 +static int unicam_enum_framesizes(struct file *file, void *priv,
582 + struct v4l2_frmsizeenum *fsize)
584 + struct unicam_node *node = video_drvdata(file);
585 + struct unicam_device *dev = node->dev;
586 + const struct unicam_fmt *fmt;
587 + struct v4l2_subdev_frame_size_enum fse;
590 + /* check for valid format */
591 + fmt = find_format_by_pix(dev, fsize->pixel_format);
593 + unicam_dbg(3, dev, "Invalid pixel code: %x\n",
594 + fsize->pixel_format);
597 + fse.code = fmt->code;
599 + fse.which = V4L2_SUBDEV_FORMAT_ACTIVE;
600 + fse.index = fsize->index;
601 + fse.pad = node->src_pad_id;
603 + ret = v4l2_subdev_call(dev->sensor, pad, enum_frame_size, NULL, &fse);
607 + unicam_dbg(1, dev, "%s: index: %d code: %x W:[%d,%d] H:[%d,%d]\n",
608 + __func__, fse.index, fse.code, fse.min_width, fse.max_width,
609 + fse.min_height, fse.max_height);
611 + fsize->type = V4L2_FRMSIZE_TYPE_DISCRETE;
612 + fsize->discrete.width = fse.max_width;
613 + fsize->discrete.height = fse.max_height;
618 +static int unicam_enum_frameintervals(struct file *file, void *priv,
619 + struct v4l2_frmivalenum *fival)
621 + struct unicam_node *node = video_drvdata(file);
622 + struct unicam_device *dev = node->dev;
623 + const struct unicam_fmt *fmt;
624 + struct v4l2_subdev_frame_interval_enum fie = {
625 + .index = fival->index,
626 + .pad = node->src_pad_id,
627 + .width = fival->width,
628 + .height = fival->height,
629 + .which = V4L2_SUBDEV_FORMAT_ACTIVE,
633 + fmt = find_format_by_pix(dev, fival->pixel_format);
637 + fie.code = fmt->code;
638 + ret = v4l2_subdev_call(dev->sensor, pad, enum_frame_interval,
643 + fival->type = V4L2_FRMIVAL_TYPE_DISCRETE;
644 + fival->discrete = fie.interval;
649 +static int unicam_g_parm(struct file *file, void *fh, struct v4l2_streamparm *a)
651 + struct unicam_node *node = video_drvdata(file);
652 + struct unicam_device *dev = node->dev;
654 + return v4l2_g_parm_cap(video_devdata(file), dev->sensor, a);
657 +static int unicam_s_parm(struct file *file, void *fh, struct v4l2_streamparm *a)
659 + struct unicam_node *node = video_drvdata(file);
660 + struct unicam_device *dev = node->dev;
662 + return v4l2_s_parm_cap(video_devdata(file), dev->sensor, a);
665 +static int unicam_g_dv_timings(struct file *file, void *priv,
666 + struct v4l2_dv_timings *timings)
668 + struct unicam_node *node = video_drvdata(file);
669 + struct unicam_device *dev = node->dev;
671 + return v4l2_subdev_call(dev->sensor, video, g_dv_timings, timings);
674 +static int unicam_s_dv_timings(struct file *file, void *priv,
675 + struct v4l2_dv_timings *timings)
677 + struct unicam_node *node = video_drvdata(file);
678 + struct unicam_device *dev = node->dev;
679 + struct v4l2_dv_timings current_timings;
682 + ret = v4l2_subdev_call(dev->sensor, video, g_dv_timings,
688 + if (v4l2_match_dv_timings(timings, ¤t_timings, 0, false))
691 + if (vb2_is_busy(&node->buffer_queue))
694 + ret = v4l2_subdev_call(dev->sensor, video, s_dv_timings, timings);
696 + /* Force recomputation of bytesperline */
697 + node->v_fmt.fmt.pix.bytesperline = 0;
699 + unicam_reset_format(node);
704 +static int unicam_query_dv_timings(struct file *file, void *priv,
705 + struct v4l2_dv_timings *timings)
707 + struct unicam_node *node = video_drvdata(file);
708 + struct unicam_device *dev = node->dev;
710 + return v4l2_subdev_call(dev->sensor, video, query_dv_timings, timings);
713 +static int unicam_enum_dv_timings(struct file *file, void *priv,
714 + struct v4l2_enum_dv_timings *timings)
716 + struct unicam_node *node = video_drvdata(file);
717 + struct unicam_device *dev = node->dev;
720 + timings->pad = node->src_pad_id;
721 + ret = v4l2_subdev_call(dev->sensor, pad, enum_dv_timings, timings);
722 + timings->pad = node->pad_id;
727 +static int unicam_dv_timings_cap(struct file *file, void *priv,
728 + struct v4l2_dv_timings_cap *cap)
730 + struct unicam_node *node = video_drvdata(file);
731 + struct unicam_device *dev = node->dev;
734 + cap->pad = node->src_pad_id;
735 + ret = v4l2_subdev_call(dev->sensor, pad, dv_timings_cap, cap);
736 + cap->pad = node->pad_id;
741 +static int unicam_subscribe_event(struct v4l2_fh *fh,
742 + const struct v4l2_event_subscription *sub)
744 + switch (sub->type) {
745 + case V4L2_EVENT_FRAME_SYNC:
746 + return v4l2_event_subscribe(fh, sub, 2, NULL);
747 + case V4L2_EVENT_SOURCE_CHANGE:
748 + return v4l2_event_subscribe(fh, sub, 4, NULL);
751 + return v4l2_ctrl_subscribe_event(fh, sub);
754 +static void unicam_notify(struct v4l2_subdev *sd,
755 + unsigned int notification, void *arg)
757 + struct unicam_device *dev = to_unicam_device(sd->v4l2_dev);
759 + switch (notification) {
760 + case V4L2_DEVICE_NOTIFY_EVENT:
761 + v4l2_event_queue(&dev->node[IMAGE_PAD].video_dev, arg);
768 +/* unicam capture ioctl operations */
769 +static const struct v4l2_ioctl_ops unicam_ioctl_ops = {
770 + .vidioc_querycap = unicam_querycap,
771 + .vidioc_enum_fmt_vid_cap = unicam_enum_fmt_vid_cap,
772 + .vidioc_g_fmt_vid_cap = unicam_g_fmt_vid_cap,
773 + .vidioc_s_fmt_vid_cap = unicam_s_fmt_vid_cap,
774 + .vidioc_try_fmt_vid_cap = unicam_try_fmt_vid_cap,
776 + .vidioc_enum_fmt_meta_cap = unicam_enum_fmt_meta_cap,
777 + .vidioc_g_fmt_meta_cap = unicam_g_fmt_meta_cap,
778 + .vidioc_s_fmt_meta_cap = unicam_g_fmt_meta_cap,
779 + .vidioc_try_fmt_meta_cap = unicam_g_fmt_meta_cap,
781 + .vidioc_enum_input = unicam_enum_input,
782 + .vidioc_g_input = unicam_g_input,
783 + .vidioc_s_input = unicam_s_input,
785 + .vidioc_querystd = unicam_querystd,
786 + .vidioc_s_std = unicam_s_std,
787 + .vidioc_g_std = unicam_g_std,
789 + .vidioc_g_edid = unicam_g_edid,
790 + .vidioc_s_edid = unicam_s_edid,
792 + .vidioc_enum_framesizes = unicam_enum_framesizes,
793 + .vidioc_enum_frameintervals = unicam_enum_frameintervals,
795 + .vidioc_g_selection = unicam_g_selection,
796 + .vidioc_s_selection = unicam_s_selection,
798 + .vidioc_g_parm = unicam_g_parm,
799 + .vidioc_s_parm = unicam_s_parm,
801 + .vidioc_s_dv_timings = unicam_s_dv_timings,
802 + .vidioc_g_dv_timings = unicam_g_dv_timings,
803 + .vidioc_query_dv_timings = unicam_query_dv_timings,
804 + .vidioc_enum_dv_timings = unicam_enum_dv_timings,
805 + .vidioc_dv_timings_cap = unicam_dv_timings_cap,
807 + .vidioc_reqbufs = vb2_ioctl_reqbufs,
808 + .vidioc_create_bufs = vb2_ioctl_create_bufs,
809 + .vidioc_prepare_buf = vb2_ioctl_prepare_buf,
810 + .vidioc_querybuf = vb2_ioctl_querybuf,
811 + .vidioc_qbuf = vb2_ioctl_qbuf,
812 + .vidioc_dqbuf = vb2_ioctl_dqbuf,
813 + .vidioc_expbuf = vb2_ioctl_expbuf,
814 + .vidioc_streamon = vb2_ioctl_streamon,
815 + .vidioc_streamoff = vb2_ioctl_streamoff,
817 + .vidioc_log_status = unicam_log_status,
818 + .vidioc_subscribe_event = unicam_subscribe_event,
819 + .vidioc_unsubscribe_event = v4l2_event_unsubscribe,
822 +/* V4L2 Media Controller Centric IOCTLs */
824 +static int unicam_mc_enum_fmt_vid_cap(struct file *file, void *priv,
825 + struct v4l2_fmtdesc *f)
829 + for (i = 0, j = 0; i < ARRAY_SIZE(formats); i++) {
830 + if (f->mbus_code && formats[i].code != f->mbus_code)
832 + if (formats[i].mc_skip || formats[i].metadata_fmt)
835 + if (formats[i].fourcc) {
836 + if (j == f->index) {
837 + f->pixelformat = formats[i].fourcc;
838 + f->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
843 + if (formats[i].repacked_fourcc) {
844 + if (j == f->index) {
845 + f->pixelformat = formats[i].repacked_fourcc;
846 + f->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
856 +static int unicam_mc_g_fmt_vid_cap(struct file *file, void *priv,
857 + struct v4l2_format *f)
859 + struct unicam_node *node = video_drvdata(file);
861 + if (node->pad_id != IMAGE_PAD)
869 +static void unicam_mc_try_fmt(struct unicam_node *node, struct v4l2_format *f,
870 + const struct unicam_fmt **ret_fmt)
872 + struct v4l2_pix_format *v4l2_format = &f->fmt.pix;
873 + struct unicam_device *dev = node->dev;
874 + const struct unicam_fmt *fmt;
878 + * Default to the first format if the requested pixel format code isn't
881 + fmt = find_format_by_pix(dev, v4l2_format->pixelformat);
884 + v4l2_format->pixelformat = fmt->fourcc;
887 + unicam_calc_format_size_bpl(dev, fmt, f);
889 + if (v4l2_format->field == V4L2_FIELD_ANY)
890 + v4l2_format->field = V4L2_FIELD_NONE;
895 + if (v4l2_format->colorspace >= MAX_COLORSPACE ||
896 + !(fmt->valid_colorspaces & (1 << v4l2_format->colorspace))) {
897 + v4l2_format->colorspace = __ffs(fmt->valid_colorspaces);
899 + v4l2_format->xfer_func =
900 + V4L2_MAP_XFER_FUNC_DEFAULT(v4l2_format->colorspace);
901 + v4l2_format->ycbcr_enc =
902 + V4L2_MAP_YCBCR_ENC_DEFAULT(v4l2_format->colorspace);
903 + is_rgb = v4l2_format->colorspace == V4L2_COLORSPACE_SRGB;
904 + v4l2_format->quantization =
905 + V4L2_MAP_QUANTIZATION_DEFAULT(is_rgb,
906 + v4l2_format->colorspace,
907 + v4l2_format->ycbcr_enc);
910 + unicam_dbg(3, dev, "%s: %08x %ux%u (bytesperline %u sizeimage %u)\n",
911 + __func__, v4l2_format->pixelformat,
912 + v4l2_format->width, v4l2_format->height,
913 + v4l2_format->bytesperline, v4l2_format->sizeimage);
916 +static int unicam_mc_try_fmt_vid_cap(struct file *file, void *priv,
917 + struct v4l2_format *f)
919 + struct unicam_node *node = video_drvdata(file);
921 + unicam_mc_try_fmt(node, f, NULL);
925 +static int unicam_mc_s_fmt_vid_cap(struct file *file, void *priv,
926 + struct v4l2_format *f)
928 + struct unicam_node *node = video_drvdata(file);
929 + struct unicam_device *dev = node->dev;
930 + const struct unicam_fmt *fmt;
932 + if (vb2_is_busy(&node->buffer_queue)) {
933 + unicam_dbg(3, dev, "%s device busy\n", __func__);
937 + unicam_mc_try_fmt(node, f, &fmt);
945 +static int unicam_mc_enum_framesizes(struct file *file, void *fh,
946 + struct v4l2_frmsizeenum *fsize)
948 + struct unicam_node *node = video_drvdata(file);
949 + struct unicam_device *dev = node->dev;
951 + if (fsize->index > 0)
954 + if (!find_format_by_pix(dev, fsize->pixel_format)) {
955 + unicam_dbg(3, dev, "Invalid pixel format 0x%08x\n",
956 + fsize->pixel_format);
960 + fsize->type = V4L2_FRMSIZE_TYPE_STEPWISE;
961 + fsize->stepwise.min_width = MIN_WIDTH;
962 + fsize->stepwise.max_width = MAX_WIDTH;
963 + fsize->stepwise.step_width = 1;
964 + fsize->stepwise.min_height = MIN_HEIGHT;
965 + fsize->stepwise.max_height = MAX_HEIGHT;
966 + fsize->stepwise.step_height = 1;
971 +static int unicam_mc_enum_fmt_meta_cap(struct file *file, void *priv,
972 + struct v4l2_fmtdesc *f)
976 + for (i = 0, j = 0; i < ARRAY_SIZE(formats); i++) {
977 + if (f->mbus_code && formats[i].code != f->mbus_code)
979 + if (!formats[i].metadata_fmt)
982 + if (formats[i].fourcc) {
983 + if (j == f->index) {
984 + f->pixelformat = formats[i].fourcc;
985 + f->type = V4L2_BUF_TYPE_META_CAPTURE;
995 +static int unicam_mc_g_fmt_meta_cap(struct file *file, void *priv,
996 + struct v4l2_format *f)
998 + struct unicam_node *node = video_drvdata(file);
1000 + if (node->pad_id != METADATA_PAD)
1008 +static int unicam_mc_try_fmt_meta_cap(struct file *file, void *priv,
1009 + struct v4l2_format *f)
1011 + struct unicam_node *node = video_drvdata(file);
1013 + if (node->pad_id != METADATA_PAD)
1016 + f->fmt.meta.dataformat = V4L2_META_FMT_SENSOR_DATA;
1021 +static int unicam_mc_s_fmt_meta_cap(struct file *file, void *priv,
1022 + struct v4l2_format *f)
1024 + struct unicam_node *node = video_drvdata(file);
1026 + if (node->pad_id != METADATA_PAD)
1029 + unicam_mc_try_fmt_meta_cap(file, priv, f);
1036 +static const struct v4l2_ioctl_ops unicam_mc_ioctl_ops = {
1037 + .vidioc_querycap = unicam_querycap,
1038 + .vidioc_enum_fmt_vid_cap = unicam_mc_enum_fmt_vid_cap,
1039 + .vidioc_g_fmt_vid_cap = unicam_mc_g_fmt_vid_cap,
1040 + .vidioc_try_fmt_vid_cap = unicam_mc_try_fmt_vid_cap,
1041 + .vidioc_s_fmt_vid_cap = unicam_mc_s_fmt_vid_cap,
1043 + .vidioc_enum_fmt_meta_cap = unicam_mc_enum_fmt_meta_cap,
1044 + .vidioc_g_fmt_meta_cap = unicam_mc_g_fmt_meta_cap,
1045 + .vidioc_try_fmt_meta_cap = unicam_mc_try_fmt_meta_cap,
1046 + .vidioc_s_fmt_meta_cap = unicam_mc_s_fmt_meta_cap,
1048 + .vidioc_enum_framesizes = unicam_mc_enum_framesizes,
1049 + .vidioc_reqbufs = vb2_ioctl_reqbufs,
1050 + .vidioc_create_bufs = vb2_ioctl_create_bufs,
1051 + .vidioc_prepare_buf = vb2_ioctl_prepare_buf,
1052 + .vidioc_querybuf = vb2_ioctl_querybuf,
1053 + .vidioc_qbuf = vb2_ioctl_qbuf,
1054 + .vidioc_dqbuf = vb2_ioctl_dqbuf,
1055 + .vidioc_expbuf = vb2_ioctl_expbuf,
1056 + .vidioc_streamon = vb2_ioctl_streamon,
1057 + .vidioc_streamoff = vb2_ioctl_streamoff,
1059 + .vidioc_log_status = unicam_log_status,
1060 + .vidioc_subscribe_event = unicam_subscribe_event,
1061 + .vidioc_unsubscribe_event = v4l2_event_unsubscribe,
1065 +unicam_mc_subdev_link_validate_get_format(struct media_pad *pad,
1066 + struct v4l2_subdev_format *fmt)
1068 + if (is_media_entity_v4l2_subdev(pad->entity)) {
1069 + struct v4l2_subdev *sd =
1070 + media_entity_to_v4l2_subdev(pad->entity);
1072 + fmt->which = V4L2_SUBDEV_FORMAT_ACTIVE;
1073 + fmt->pad = pad->index;
1074 + return v4l2_subdev_call(sd, pad, get_fmt, NULL, fmt);
1080 +static int unicam_mc_video_link_validate(struct media_link *link)
1082 + struct video_device *vd = container_of(link->sink->entity,
1083 + struct video_device, entity);
1084 + struct unicam_node *node = container_of(vd, struct unicam_node,
1086 + struct unicam_device *unicam = node->dev;
1087 + struct v4l2_subdev_format source_fmt;
1090 + if (!media_entity_remote_source_pad_unique(link->sink->entity)) {
1091 + unicam_dbg(1, unicam,
1092 + "video node %s pad not connected\n", vd->name);
1096 + ret = unicam_mc_subdev_link_validate_get_format(link->source,
1101 + if (node->pad_id == IMAGE_PAD) {
1102 + struct v4l2_pix_format *pix_fmt = &node->v_fmt.fmt.pix;
1103 + const struct unicam_fmt *fmt;
1105 + if (source_fmt.format.width != pix_fmt->width ||
1106 + source_fmt.format.height != pix_fmt->height) {
1107 + unicam_err(unicam,
1108 + "Wrong width or height %ux%u (remote pad set to %ux%u)\n",
1109 + pix_fmt->width, pix_fmt->height,
1110 + source_fmt.format.width,
1111 + source_fmt.format.height);
1115 + fmt = find_format_by_code(source_fmt.format.code);
1117 + if (!fmt || (fmt->fourcc != pix_fmt->pixelformat &&
1118 + fmt->repacked_fourcc != pix_fmt->pixelformat))
1121 + struct v4l2_meta_format *meta_fmt = &node->v_fmt.fmt.meta;
1123 + if (source_fmt.format.width != meta_fmt->buffersize ||
1124 + source_fmt.format.height != 1 ||
1125 + source_fmt.format.code != MEDIA_BUS_FMT_SENSOR_DATA) {
1126 + unicam_err(unicam,
1127 + "Wrong metadata width/height/code %ux%u %08x (remote pad set to %ux%u %08x)\n",
1128 + meta_fmt->buffersize, 1,
1129 + MEDIA_BUS_FMT_SENSOR_DATA,
1130 + source_fmt.format.width,
1131 + source_fmt.format.height,
1132 + source_fmt.format.code);
1140 +static const struct media_entity_operations unicam_mc_entity_ops = {
1141 + .link_validate = unicam_mc_video_link_validate,
1144 +/* videobuf2 Operations */
1146 static int unicam_queue_setup(struct vb2_queue *vq,
1147 unsigned int *nbuffers,
1148 unsigned int *nplanes,
1149 @@ -1495,7 +2329,7 @@ static void unicam_start_rx(struct unica
1151 set_field(&val, 1, UNICAM_CLE);
1152 set_field(&val, 1, UNICAM_CLLPE);
1153 - if (dev->bus_flags & V4L2_MBUS_CSI2_CONTINUOUS_CLOCK) {
1154 + if (!(dev->bus_flags & V4L2_MBUS_CSI2_NONCONTINUOUS_CLOCK)) {
1155 set_field(&val, 1, UNICAM_CLTRE);
1156 set_field(&val, 1, UNICAM_CLHSE);
1158 @@ -1517,7 +2351,7 @@ static void unicam_start_rx(struct unica
1160 set_field(&val, 1, UNICAM_DLE);
1161 set_field(&val, 1, UNICAM_DLLPE);
1162 - if (dev->bus_flags & V4L2_MBUS_CSI2_CONTINUOUS_CLOCK) {
1163 + if (!(dev->bus_flags & V4L2_MBUS_CSI2_NONCONTINUOUS_CLOCK)) {
1164 set_field(&val, 1, UNICAM_DLTRE);
1165 set_field(&val, 1, UNICAM_DLHSE);
1167 @@ -1666,6 +2500,12 @@ static int unicam_start_streaming(struct
1171 + ret = media_pipeline_start(&node->video_dev.entity, &node->pipe);
1173 + unicam_err(dev, "Failed to start media pipeline: %d\n", ret);
1177 dev->active_data_lanes = dev->max_data_lanes;
1179 if (dev->bus_type == V4L2_MBUS_CSI2_DPHY) {
1180 @@ -1675,12 +2515,10 @@ static int unicam_start_streaming(struct
1182 if (ret < 0 && ret != -ENOIOCTLCMD) {
1183 unicam_dbg(3, dev, "g_mbus_config failed\n");
1185 + goto error_pipeline;
1188 - dev->active_data_lanes =
1189 - (mbus_config.flags & V4L2_MBUS_CSI2_LANE_MASK) >>
1190 - __ffs(V4L2_MBUS_CSI2_LANE_MASK);
1191 + dev->active_data_lanes = mbus_config.bus.mipi_csi2.num_data_lanes;
1192 if (!dev->active_data_lanes)
1193 dev->active_data_lanes = dev->max_data_lanes;
1194 if (dev->active_data_lanes > dev->max_data_lanes) {
1195 @@ -1688,7 +2526,7 @@ static int unicam_start_streaming(struct
1196 dev->active_data_lanes,
1197 dev->max_data_lanes);
1200 + goto error_pipeline;
1204 @@ -1698,13 +2536,13 @@ static int unicam_start_streaming(struct
1205 ret = clk_set_min_rate(dev->vpu_clock, MIN_VPU_CLOCK_RATE);
1207 unicam_err(dev, "failed to set up VPU clock\n");
1209 + goto error_pipeline;
1212 ret = clk_prepare_enable(dev->vpu_clock);
1214 unicam_err(dev, "Failed to enable VPU clock: %d\n", ret);
1216 + goto error_pipeline;
1219 ret = clk_set_rate(dev->clock, 100 * 1000 * 1000);
1220 @@ -1755,6 +2593,8 @@ err_vpu_clock:
1221 if (clk_set_min_rate(dev->vpu_clock, 0))
1222 unicam_err(dev, "failed to reset the VPU clock\n");
1223 clk_disable_unprepare(dev->vpu_clock);
1225 + media_pipeline_stop(&node->video_dev.entity);
1227 unicam_runtime_put(dev);
1229 @@ -1782,6 +2622,8 @@ static void unicam_stop_streaming(struct
1231 unicam_disable(dev);
1233 + media_pipeline_stop(&node->video_dev.entity);
1235 if (dev->clocks_enabled) {
1236 if (clk_set_min_rate(dev->vpu_clock, 0))
1237 unicam_err(dev, "failed to reset the min VPU clock\n");
1238 @@ -1806,379 +2648,6 @@ static void unicam_stop_streaming(struct
1239 unicam_return_buffers(node, VB2_BUF_STATE_ERROR);
1242 -static int unicam_enum_input(struct file *file, void *priv,
1243 - struct v4l2_input *inp)
1245 - struct unicam_node *node = video_drvdata(file);
1246 - struct unicam_device *dev = node->dev;
1249 - if (inp->index != 0)
1252 - inp->type = V4L2_INPUT_TYPE_CAMERA;
1253 - if (v4l2_subdev_has_op(dev->sensor, video, s_dv_timings)) {
1254 - inp->capabilities = V4L2_IN_CAP_DV_TIMINGS;
1256 - } else if (v4l2_subdev_has_op(dev->sensor, video, s_std)) {
1257 - inp->capabilities = V4L2_IN_CAP_STD;
1258 - if (v4l2_subdev_call(dev->sensor, video, g_tvnorms, &inp->std) < 0)
1259 - inp->std = V4L2_STD_ALL;
1261 - inp->capabilities = 0;
1265 - if (v4l2_subdev_has_op(dev->sensor, video, g_input_status)) {
1266 - ret = v4l2_subdev_call(dev->sensor, video, g_input_status,
1272 - snprintf(inp->name, sizeof(inp->name), "Camera 0");
1276 -static int unicam_g_input(struct file *file, void *priv, unsigned int *i)
1283 -static int unicam_s_input(struct file *file, void *priv, unsigned int i)
1286 - * FIXME: Ideally we would like to be able to query the source
1287 - * subdevice for information over the input connectors it supports,
1288 - * and map that through in to a call to video_ops->s_routing.
1289 - * There is no infrastructure support for defining that within
1290 - * devicetree at present. Until that is implemented we can't
1291 - * map a user physical connector number to s_routing input number.
1299 -static int unicam_querystd(struct file *file, void *priv,
1302 - struct unicam_node *node = video_drvdata(file);
1303 - struct unicam_device *dev = node->dev;
1305 - return v4l2_subdev_call(dev->sensor, video, querystd, std);
1308 -static int unicam_g_std(struct file *file, void *priv, v4l2_std_id *std)
1310 - struct unicam_node *node = video_drvdata(file);
1311 - struct unicam_device *dev = node->dev;
1313 - return v4l2_subdev_call(dev->sensor, video, g_std, std);
1316 -static int unicam_s_std(struct file *file, void *priv, v4l2_std_id std)
1318 - struct unicam_node *node = video_drvdata(file);
1319 - struct unicam_device *dev = node->dev;
1321 - v4l2_std_id current_std;
1323 - ret = v4l2_subdev_call(dev->sensor, video, g_std, ¤t_std);
1327 - if (std == current_std)
1330 - if (vb2_is_busy(&node->buffer_queue))
1333 - ret = v4l2_subdev_call(dev->sensor, video, s_std, std);
1335 - /* Force recomputation of bytesperline */
1336 - node->v_fmt.fmt.pix.bytesperline = 0;
1338 - unicam_reset_format(node);
1343 -static int unicam_s_edid(struct file *file, void *priv, struct v4l2_edid *edid)
1345 - struct unicam_node *node = video_drvdata(file);
1346 - struct unicam_device *dev = node->dev;
1348 - return v4l2_subdev_call(dev->sensor, pad, set_edid, edid);
1351 -static int unicam_g_edid(struct file *file, void *priv, struct v4l2_edid *edid)
1353 - struct unicam_node *node = video_drvdata(file);
1354 - struct unicam_device *dev = node->dev;
1356 - return v4l2_subdev_call(dev->sensor, pad, get_edid, edid);
1359 -static int unicam_s_selection(struct file *file, void *priv,
1360 - struct v4l2_selection *sel)
1362 - struct unicam_node *node = video_drvdata(file);
1363 - struct unicam_device *dev = node->dev;
1364 - struct v4l2_subdev_selection sdsel = {
1365 - .which = V4L2_SUBDEV_FORMAT_ACTIVE,
1366 - .target = sel->target,
1367 - .flags = sel->flags,
1371 - if (sel->type != V4L2_BUF_TYPE_VIDEO_CAPTURE)
1374 - return v4l2_subdev_call(dev->sensor, pad, set_selection, NULL, &sdsel);
1377 -static int unicam_g_selection(struct file *file, void *priv,
1378 - struct v4l2_selection *sel)
1380 - struct unicam_node *node = video_drvdata(file);
1381 - struct unicam_device *dev = node->dev;
1382 - struct v4l2_subdev_selection sdsel = {
1383 - .which = V4L2_SUBDEV_FORMAT_ACTIVE,
1384 - .target = sel->target,
1388 - if (sel->type != V4L2_BUF_TYPE_VIDEO_CAPTURE)
1391 - ret = v4l2_subdev_call(dev->sensor, pad, get_selection, NULL, &sdsel);
1398 -static int unicam_enum_framesizes(struct file *file, void *priv,
1399 - struct v4l2_frmsizeenum *fsize)
1401 - struct unicam_node *node = video_drvdata(file);
1402 - struct unicam_device *dev = node->dev;
1403 - const struct unicam_fmt *fmt;
1404 - struct v4l2_subdev_frame_size_enum fse;
1407 - /* check for valid format */
1408 - fmt = find_format_by_pix(dev, fsize->pixel_format);
1410 - unicam_dbg(3, dev, "Invalid pixel code: %x\n",
1411 - fsize->pixel_format);
1414 - fse.code = fmt->code;
1416 - fse.which = V4L2_SUBDEV_FORMAT_ACTIVE;
1417 - fse.index = fsize->index;
1418 - fse.pad = node->src_pad_id;
1420 - ret = v4l2_subdev_call(dev->sensor, pad, enum_frame_size, NULL, &fse);
1424 - unicam_dbg(1, dev, "%s: index: %d code: %x W:[%d,%d] H:[%d,%d]\n",
1425 - __func__, fse.index, fse.code, fse.min_width, fse.max_width,
1426 - fse.min_height, fse.max_height);
1428 - fsize->type = V4L2_FRMSIZE_TYPE_DISCRETE;
1429 - fsize->discrete.width = fse.max_width;
1430 - fsize->discrete.height = fse.max_height;
1435 -static int unicam_enum_frameintervals(struct file *file, void *priv,
1436 - struct v4l2_frmivalenum *fival)
1438 - struct unicam_node *node = video_drvdata(file);
1439 - struct unicam_device *dev = node->dev;
1440 - const struct unicam_fmt *fmt;
1441 - struct v4l2_subdev_frame_interval_enum fie = {
1442 - .index = fival->index,
1443 - .pad = node->src_pad_id,
1444 - .width = fival->width,
1445 - .height = fival->height,
1446 - .which = V4L2_SUBDEV_FORMAT_ACTIVE,
1450 - fmt = find_format_by_pix(dev, fival->pixel_format);
1454 - fie.code = fmt->code;
1455 - ret = v4l2_subdev_call(dev->sensor, pad, enum_frame_interval,
1460 - fival->type = V4L2_FRMIVAL_TYPE_DISCRETE;
1461 - fival->discrete = fie.interval;
1466 -static int unicam_g_parm(struct file *file, void *fh, struct v4l2_streamparm *a)
1468 - struct unicam_node *node = video_drvdata(file);
1469 - struct unicam_device *dev = node->dev;
1471 - return v4l2_g_parm_cap(video_devdata(file), dev->sensor, a);
1474 -static int unicam_s_parm(struct file *file, void *fh, struct v4l2_streamparm *a)
1476 - struct unicam_node *node = video_drvdata(file);
1477 - struct unicam_device *dev = node->dev;
1479 - return v4l2_s_parm_cap(video_devdata(file), dev->sensor, a);
1482 -static int unicam_g_dv_timings(struct file *file, void *priv,
1483 - struct v4l2_dv_timings *timings)
1485 - struct unicam_node *node = video_drvdata(file);
1486 - struct unicam_device *dev = node->dev;
1488 - return v4l2_subdev_call(dev->sensor, video, g_dv_timings, timings);
1491 -static int unicam_s_dv_timings(struct file *file, void *priv,
1492 - struct v4l2_dv_timings *timings)
1494 - struct unicam_node *node = video_drvdata(file);
1495 - struct unicam_device *dev = node->dev;
1496 - struct v4l2_dv_timings current_timings;
1499 - ret = v4l2_subdev_call(dev->sensor, video, g_dv_timings,
1500 - ¤t_timings);
1505 - if (v4l2_match_dv_timings(timings, ¤t_timings, 0, false))
1508 - if (vb2_is_busy(&node->buffer_queue))
1511 - ret = v4l2_subdev_call(dev->sensor, video, s_dv_timings, timings);
1513 - /* Force recomputation of bytesperline */
1514 - node->v_fmt.fmt.pix.bytesperline = 0;
1516 - unicam_reset_format(node);
1521 -static int unicam_query_dv_timings(struct file *file, void *priv,
1522 - struct v4l2_dv_timings *timings)
1524 - struct unicam_node *node = video_drvdata(file);
1525 - struct unicam_device *dev = node->dev;
1527 - return v4l2_subdev_call(dev->sensor, video, query_dv_timings, timings);
1530 -static int unicam_enum_dv_timings(struct file *file, void *priv,
1531 - struct v4l2_enum_dv_timings *timings)
1533 - struct unicam_node *node = video_drvdata(file);
1534 - struct unicam_device *dev = node->dev;
1537 - timings->pad = node->src_pad_id;
1538 - ret = v4l2_subdev_call(dev->sensor, pad, enum_dv_timings, timings);
1539 - timings->pad = node->pad_id;
1544 -static int unicam_dv_timings_cap(struct file *file, void *priv,
1545 - struct v4l2_dv_timings_cap *cap)
1547 - struct unicam_node *node = video_drvdata(file);
1548 - struct unicam_device *dev = node->dev;
1551 - cap->pad = node->src_pad_id;
1552 - ret = v4l2_subdev_call(dev->sensor, pad, dv_timings_cap, cap);
1553 - cap->pad = node->pad_id;
1558 -static int unicam_subscribe_event(struct v4l2_fh *fh,
1559 - const struct v4l2_event_subscription *sub)
1561 - switch (sub->type) {
1562 - case V4L2_EVENT_FRAME_SYNC:
1563 - return v4l2_event_subscribe(fh, sub, 2, NULL);
1564 - case V4L2_EVENT_SOURCE_CHANGE:
1565 - return v4l2_event_subscribe(fh, sub, 4, NULL);
1568 - return v4l2_ctrl_subscribe_event(fh, sub);
1571 -static int unicam_log_status(struct file *file, void *fh)
1573 - struct unicam_node *node = video_drvdata(file);
1574 - struct unicam_device *dev = node->dev;
1577 - /* status for sub devices */
1578 - v4l2_device_call_all(&dev->v4l2_dev, 0, core, log_status);
1580 - unicam_info(dev, "-----Receiver status-----\n");
1581 - unicam_info(dev, "V4L2 width/height: %ux%u\n",
1582 - node->v_fmt.fmt.pix.width, node->v_fmt.fmt.pix.height);
1583 - unicam_info(dev, "Mediabus format: %08x\n", node->fmt->code);
1584 - unicam_info(dev, "V4L2 format: %08x\n",
1585 - node->v_fmt.fmt.pix.pixelformat);
1586 - reg = reg_read(dev, UNICAM_IPIPE);
1587 - unicam_info(dev, "Unpacking/packing: %u / %u\n",
1588 - get_field(reg, UNICAM_PUM_MASK),
1589 - get_field(reg, UNICAM_PPM_MASK));
1590 - unicam_info(dev, "----Live data----\n");
1591 - unicam_info(dev, "Programmed stride: %4u\n",
1592 - reg_read(dev, UNICAM_IBLS));
1593 - unicam_info(dev, "Detected resolution: %ux%u\n",
1594 - reg_read(dev, UNICAM_IHSTA),
1595 - reg_read(dev, UNICAM_IVSTA));
1596 - unicam_info(dev, "Write pointer: %08x\n",
1597 - reg_read(dev, UNICAM_IBWP));
1602 -static void unicam_notify(struct v4l2_subdev *sd,
1603 - unsigned int notification, void *arg)
1605 - struct unicam_device *dev = to_unicam_device(sd->v4l2_dev);
1607 - switch (notification) {
1608 - case V4L2_DEVICE_NOTIFY_EVENT:
1609 - v4l2_event_queue(&dev->node[IMAGE_PAD].video_dev, arg);
1616 static const struct vb2_ops unicam_video_qops = {
1617 .wait_prepare = vb2_ops_wait_prepare,
1618 @@ -2261,60 +2730,6 @@ static const struct v4l2_file_operations
1619 .mmap = vb2_fop_mmap,
1622 -/* unicam capture ioctl operations */
1623 -static const struct v4l2_ioctl_ops unicam_ioctl_ops = {
1624 - .vidioc_querycap = unicam_querycap,
1625 - .vidioc_enum_fmt_vid_cap = unicam_enum_fmt_vid_cap,
1626 - .vidioc_g_fmt_vid_cap = unicam_g_fmt_vid_cap,
1627 - .vidioc_s_fmt_vid_cap = unicam_s_fmt_vid_cap,
1628 - .vidioc_try_fmt_vid_cap = unicam_try_fmt_vid_cap,
1630 - .vidioc_enum_fmt_meta_cap = unicam_enum_fmt_meta_cap,
1631 - .vidioc_g_fmt_meta_cap = unicam_g_fmt_meta_cap,
1632 - .vidioc_s_fmt_meta_cap = unicam_g_fmt_meta_cap,
1633 - .vidioc_try_fmt_meta_cap = unicam_g_fmt_meta_cap,
1635 - .vidioc_enum_input = unicam_enum_input,
1636 - .vidioc_g_input = unicam_g_input,
1637 - .vidioc_s_input = unicam_s_input,
1639 - .vidioc_querystd = unicam_querystd,
1640 - .vidioc_s_std = unicam_s_std,
1641 - .vidioc_g_std = unicam_g_std,
1643 - .vidioc_g_edid = unicam_g_edid,
1644 - .vidioc_s_edid = unicam_s_edid,
1646 - .vidioc_enum_framesizes = unicam_enum_framesizes,
1647 - .vidioc_enum_frameintervals = unicam_enum_frameintervals,
1649 - .vidioc_g_selection = unicam_g_selection,
1650 - .vidioc_s_selection = unicam_s_selection,
1652 - .vidioc_g_parm = unicam_g_parm,
1653 - .vidioc_s_parm = unicam_s_parm,
1655 - .vidioc_s_dv_timings = unicam_s_dv_timings,
1656 - .vidioc_g_dv_timings = unicam_g_dv_timings,
1657 - .vidioc_query_dv_timings = unicam_query_dv_timings,
1658 - .vidioc_enum_dv_timings = unicam_enum_dv_timings,
1659 - .vidioc_dv_timings_cap = unicam_dv_timings_cap,
1661 - .vidioc_reqbufs = vb2_ioctl_reqbufs,
1662 - .vidioc_create_bufs = vb2_ioctl_create_bufs,
1663 - .vidioc_prepare_buf = vb2_ioctl_prepare_buf,
1664 - .vidioc_querybuf = vb2_ioctl_querybuf,
1665 - .vidioc_qbuf = vb2_ioctl_qbuf,
1666 - .vidioc_dqbuf = vb2_ioctl_dqbuf,
1667 - .vidioc_expbuf = vb2_ioctl_expbuf,
1668 - .vidioc_streamon = vb2_ioctl_streamon,
1669 - .vidioc_streamoff = vb2_ioctl_streamoff,
1671 - .vidioc_log_status = unicam_log_status,
1672 - .vidioc_subscribe_event = unicam_subscribe_event,
1673 - .vidioc_unsubscribe_event = v4l2_event_unsubscribe,
1677 unicam_async_bound(struct v4l2_async_notifier *notifier,
1678 struct v4l2_subdev *subdev,
1679 @@ -2365,11 +2780,11 @@ static void unicam_node_release(struct v
1680 unicam_put(node->dev);
1683 -static int register_node(struct unicam_device *unicam, struct unicam_node *node,
1684 - enum v4l2_buf_type type, int pad_id)
1685 +static int unicam_set_default_format(struct unicam_device *unicam,
1686 + struct unicam_node *node,
1688 + const struct unicam_fmt **ret_fmt)
1690 - struct video_device *vdev;
1691 - struct vb2_queue *q;
1692 struct v4l2_mbus_framefmt mbus_fmt = {0};
1693 const struct unicam_fmt *fmt;
1695 @@ -2414,15 +2829,69 @@ static int register_node(struct unicam_d
1696 node->v_fmt.fmt.meta.dataformat = fmt->fourcc;
1704 +static void unicam_mc_set_default_format(struct unicam_node *node, int pad_id)
1706 + if (pad_id == IMAGE_PAD) {
1707 + struct v4l2_pix_format *pix_fmt = &node->v_fmt.fmt.pix;
1709 + pix_fmt->width = 640;
1710 + pix_fmt->height = 480;
1711 + pix_fmt->field = V4L2_FIELD_NONE;
1712 + pix_fmt->colorspace = V4L2_COLORSPACE_SRGB;
1713 + pix_fmt->ycbcr_enc = V4L2_YCBCR_ENC_601;
1714 + pix_fmt->quantization = V4L2_QUANTIZATION_LIM_RANGE;
1715 + pix_fmt->xfer_func = V4L2_XFER_FUNC_SRGB;
1716 + pix_fmt->pixelformat = formats[0].fourcc;
1717 + unicam_calc_format_size_bpl(node->dev, &formats[0],
1719 + node->v_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
1721 + node->fmt = &formats[0];
1723 + const struct unicam_fmt *fmt;
1725 + /* Fix this node format as embedded data. */
1726 + fmt = find_format_by_code(MEDIA_BUS_FMT_SENSOR_DATA);
1727 + node->v_fmt.fmt.meta.dataformat = fmt->fourcc;
1730 + node->v_fmt.fmt.meta.buffersize = UNICAM_EMBEDDED_SIZE;
1731 + node->embedded_lines = 1;
1732 + node->v_fmt.type = V4L2_BUF_TYPE_META_CAPTURE;
1736 +static int register_node(struct unicam_device *unicam, struct unicam_node *node,
1737 + enum v4l2_buf_type type, int pad_id)
1739 + struct video_device *vdev;
1740 + struct vb2_queue *q;
1744 node->pad_id = pad_id;
1747 - /* Read current subdev format */
1749 - unicam_reset_format(node);
1750 + if (!unicam->mc_api) {
1751 + const struct unicam_fmt *fmt;
1753 - if (v4l2_subdev_has_op(unicam->sensor, video, s_std)) {
1754 + ret = unicam_set_default_format(unicam, node, pad_id, &fmt);
1758 + /* Read current subdev format */
1760 + unicam_reset_format(node);
1762 + unicam_mc_set_default_format(node, pad_id);
1765 + if (!unicam->mc_api &&
1766 + v4l2_subdev_has_op(unicam->sensor, video, s_std)) {
1767 v4l2_std_id tvnorms;
1769 if (WARN_ON(!v4l2_subdev_has_op(unicam->sensor, video,
1770 @@ -2445,12 +2914,15 @@ static int register_node(struct unicam_d
1772 vdev = &node->video_dev;
1773 if (pad_id == IMAGE_PAD) {
1774 - /* Add controls from the subdevice */
1775 - ret = v4l2_ctrl_add_handler(&unicam->ctrl_handler,
1776 - unicam->sensor->ctrl_handler, NULL,
1780 + if (!unicam->mc_api) {
1781 + /* Add controls from the subdevice */
1782 + ret = v4l2_ctrl_add_handler(&unicam->ctrl_handler,
1783 + unicam->sensor->ctrl_handler,
1791 * If the sensor subdevice has any controls, associate the node
1792 @@ -2482,7 +2954,8 @@ static int register_node(struct unicam_d
1794 vdev->release = unicam_node_release;
1795 vdev->fops = &unicam_fops;
1796 - vdev->ioctl_ops = &unicam_ioctl_ops;
1797 + vdev->ioctl_ops = unicam->mc_api ? &unicam_mc_ioctl_ops :
1798 + &unicam_ioctl_ops;
1799 vdev->v4l2_dev = &unicam->v4l2_dev;
1800 vdev->vfl_dir = VFL_DIR_RX;
1802 @@ -2490,6 +2963,10 @@ static int register_node(struct unicam_d
1803 vdev->device_caps = (pad_id == IMAGE_PAD) ?
1804 V4L2_CAP_VIDEO_CAPTURE : V4L2_CAP_META_CAPTURE;
1805 vdev->device_caps |= V4L2_CAP_READWRITE | V4L2_CAP_STREAMING;
1806 + if (unicam->mc_api) {
1807 + vdev->device_caps |= V4L2_CAP_IO_MC;
1808 + vdev->entity.ops = &unicam_mc_entity_ops;
1811 /* Define the device names */
1812 snprintf(vdev->name, sizeof(vdev->name), "%s-%s", UNICAM_MODULE_NAME,
1813 @@ -2509,48 +2986,61 @@ static int register_node(struct unicam_d
1814 unicam_err(unicam, "Unable to allocate dummy buffer.\n");
1818 - if (pad_id == METADATA_PAD ||
1819 - !v4l2_subdev_has_op(unicam->sensor, video, s_std)) {
1820 - v4l2_disable_ioctl(&node->video_dev, VIDIOC_S_STD);
1821 - v4l2_disable_ioctl(&node->video_dev, VIDIOC_G_STD);
1822 - v4l2_disable_ioctl(&node->video_dev, VIDIOC_ENUMSTD);
1824 - if (pad_id == METADATA_PAD ||
1825 - !v4l2_subdev_has_op(unicam->sensor, video, querystd))
1826 - v4l2_disable_ioctl(&node->video_dev, VIDIOC_QUERYSTD);
1827 - if (pad_id == METADATA_PAD ||
1828 - !v4l2_subdev_has_op(unicam->sensor, video, s_dv_timings)) {
1829 - v4l2_disable_ioctl(&node->video_dev, VIDIOC_S_EDID);
1830 - v4l2_disable_ioctl(&node->video_dev, VIDIOC_G_EDID);
1831 - v4l2_disable_ioctl(&node->video_dev, VIDIOC_DV_TIMINGS_CAP);
1832 - v4l2_disable_ioctl(&node->video_dev, VIDIOC_G_DV_TIMINGS);
1833 - v4l2_disable_ioctl(&node->video_dev, VIDIOC_S_DV_TIMINGS);
1834 - v4l2_disable_ioctl(&node->video_dev, VIDIOC_ENUM_DV_TIMINGS);
1835 - v4l2_disable_ioctl(&node->video_dev, VIDIOC_QUERY_DV_TIMINGS);
1837 - if (pad_id == METADATA_PAD ||
1838 - !v4l2_subdev_has_op(unicam->sensor, pad, enum_frame_interval))
1839 - v4l2_disable_ioctl(&node->video_dev,
1840 - VIDIOC_ENUM_FRAMEINTERVALS);
1841 - if (pad_id == METADATA_PAD ||
1842 - !v4l2_subdev_has_op(unicam->sensor, video, g_frame_interval))
1843 - v4l2_disable_ioctl(&node->video_dev, VIDIOC_G_PARM);
1844 - if (pad_id == METADATA_PAD ||
1845 - !v4l2_subdev_has_op(unicam->sensor, video, s_frame_interval))
1846 - v4l2_disable_ioctl(&node->video_dev, VIDIOC_S_PARM);
1848 - if (pad_id == METADATA_PAD ||
1849 - !v4l2_subdev_has_op(unicam->sensor, pad, enum_frame_size))
1850 - v4l2_disable_ioctl(&node->video_dev, VIDIOC_ENUM_FRAMESIZES);
1852 - if (node->pad_id == METADATA_PAD ||
1853 - !v4l2_subdev_has_op(unicam->sensor, pad, set_selection))
1854 - v4l2_disable_ioctl(&node->video_dev, VIDIOC_S_SELECTION);
1856 - if (node->pad_id == METADATA_PAD ||
1857 - !v4l2_subdev_has_op(unicam->sensor, pad, get_selection))
1858 - v4l2_disable_ioctl(&node->video_dev, VIDIOC_G_SELECTION);
1859 + if (!unicam->mc_api) {
1860 + if (pad_id == METADATA_PAD ||
1861 + !v4l2_subdev_has_op(unicam->sensor, video, s_std)) {
1862 + v4l2_disable_ioctl(&node->video_dev, VIDIOC_S_STD);
1863 + v4l2_disable_ioctl(&node->video_dev, VIDIOC_G_STD);
1864 + v4l2_disable_ioctl(&node->video_dev, VIDIOC_ENUMSTD);
1866 + if (pad_id == METADATA_PAD ||
1867 + !v4l2_subdev_has_op(unicam->sensor, video, querystd))
1868 + v4l2_disable_ioctl(&node->video_dev, VIDIOC_QUERYSTD);
1869 + if (pad_id == METADATA_PAD ||
1870 + !v4l2_subdev_has_op(unicam->sensor, video, s_dv_timings)) {
1871 + v4l2_disable_ioctl(&node->video_dev, VIDIOC_S_EDID);
1872 + v4l2_disable_ioctl(&node->video_dev, VIDIOC_G_EDID);
1873 + v4l2_disable_ioctl(&node->video_dev,
1874 + VIDIOC_DV_TIMINGS_CAP);
1875 + v4l2_disable_ioctl(&node->video_dev,
1876 + VIDIOC_G_DV_TIMINGS);
1877 + v4l2_disable_ioctl(&node->video_dev,
1878 + VIDIOC_S_DV_TIMINGS);
1879 + v4l2_disable_ioctl(&node->video_dev,
1880 + VIDIOC_ENUM_DV_TIMINGS);
1881 + v4l2_disable_ioctl(&node->video_dev,
1882 + VIDIOC_QUERY_DV_TIMINGS);
1884 + if (pad_id == METADATA_PAD ||
1885 + !v4l2_subdev_has_op(unicam->sensor, pad,
1886 + enum_frame_interval))
1887 + v4l2_disable_ioctl(&node->video_dev,
1888 + VIDIOC_ENUM_FRAMEINTERVALS);
1889 + if (pad_id == METADATA_PAD ||
1890 + !v4l2_subdev_has_op(unicam->sensor, video,
1891 + g_frame_interval))
1892 + v4l2_disable_ioctl(&node->video_dev, VIDIOC_G_PARM);
1893 + if (pad_id == METADATA_PAD ||
1894 + !v4l2_subdev_has_op(unicam->sensor, video,
1895 + s_frame_interval))
1896 + v4l2_disable_ioctl(&node->video_dev, VIDIOC_S_PARM);
1898 + if (pad_id == METADATA_PAD ||
1899 + !v4l2_subdev_has_op(unicam->sensor, pad,
1901 + v4l2_disable_ioctl(&node->video_dev,
1902 + VIDIOC_ENUM_FRAMESIZES);
1904 + if (node->pad_id == METADATA_PAD ||
1905 + !v4l2_subdev_has_op(unicam->sensor, pad, set_selection))
1906 + v4l2_disable_ioctl(&node->video_dev,
1907 + VIDIOC_S_SELECTION);
1909 + if (node->pad_id == METADATA_PAD ||
1910 + !v4l2_subdev_has_op(unicam->sensor, pad, get_selection))
1911 + v4l2_disable_ioctl(&node->video_dev,
1912 + VIDIOC_G_SELECTION);
1915 ret = video_register_device(vdev, VFL_TYPE_VIDEO, -1);
1917 @@ -2619,7 +3109,7 @@ static int unicam_async_complete(struct
1918 if (unicam->sensor->entity.pads[i].flags & MEDIA_PAD_FL_SOURCE) {
1919 if (source_pads < MAX_NODES) {
1920 unicam->node[source_pads].src_pad_id = i;
1921 - unicam_err(unicam, "source pad %u is index %u\n",
1922 + unicam_dbg(3, unicam, "source pad %u is index %u\n",
1926 @@ -2648,7 +3138,10 @@ static int unicam_async_complete(struct
1930 - ret = v4l2_device_register_ro_subdev_nodes(&unicam->v4l2_dev);
1931 + if (unicam->mc_api)
1932 + ret = v4l2_device_register_subdev_nodes(&unicam->v4l2_dev);
1934 + ret = v4l2_device_register_ro_subdev_nodes(&unicam->v4l2_dev);
1936 unicam_err(unicam, "Unable to register subdev nodes.\n");
1938 @@ -2808,6 +3301,14 @@ static int unicam_probe(struct platform_
1939 kref_init(&unicam->kref);
1940 unicam->pdev = pdev;
1943 + * Adopt the current setting of the module parameter, and check if
1944 + * device tree requests it.
1946 + unicam->mc_api = media_controller;
1947 + if (of_property_read_bool(pdev->dev.of_node, "brcm,media-controller"))
1948 + unicam->mc_api = true;
1950 unicam->base = devm_platform_ioremap_resource(pdev, 0);
1951 if (IS_ERR(unicam->base)) {
1952 unicam_err(unicam, "Failed to get main io block\n");