diff options
author | Álvaro Fernández Rojas <noltari@gmail.com> | 2020-05-27 15:12:04 +0200 |
---|---|---|
committer | Álvaro Fernández Rojas <noltari@gmail.com> | 2020-05-28 10:36:27 +0200 |
commit | 584d4bf1d3c2265a810e1494eb5c8ef0a72ee934 (patch) | |
tree | 20b3364aee884d3f65f246c16ee7f8bd4a465ea2 /target/linux/bcm27xx/patches-5.4/950-0652-media-bcm2835-unicam-Add-support-for-mulitple-device.patch | |
parent | 9e467a764b4e30a04dd0431ea277f6acd26babe0 (diff) | |
download | upstream-584d4bf1d3c2265a810e1494eb5c8ef0a72ee934.tar.gz upstream-584d4bf1d3c2265a810e1494eb5c8ef0a72ee934.tar.bz2 upstream-584d4bf1d3c2265a810e1494eb5c8ef0a72ee934.zip |
bcm27xx: update patches from RPi foundation
bcm2708: boot tested on RPi B+ v1.2
bcm2709: boot tested on RPi 3B v1.2
bcm2710: boot tested on RPi 3B v1.2
bcm2711: boot tested on RPi 4B v1.1 4G
Signed-off-by: Álvaro Fernández Rojas <noltari@gmail.com>
Diffstat (limited to 'target/linux/bcm27xx/patches-5.4/950-0652-media-bcm2835-unicam-Add-support-for-mulitple-device.patch')
-rw-r--r-- | target/linux/bcm27xx/patches-5.4/950-0652-media-bcm2835-unicam-Add-support-for-mulitple-device.patch | 1084 |
1 files changed, 1084 insertions, 0 deletions
diff --git a/target/linux/bcm27xx/patches-5.4/950-0652-media-bcm2835-unicam-Add-support-for-mulitple-device.patch b/target/linux/bcm27xx/patches-5.4/950-0652-media-bcm2835-unicam-Add-support-for-mulitple-device.patch new file mode 100644 index 0000000000..315feff5d3 --- /dev/null +++ b/target/linux/bcm27xx/patches-5.4/950-0652-media-bcm2835-unicam-Add-support-for-mulitple-device.patch @@ -0,0 +1,1084 @@ +From b466d74b45466b417e364c85c7fce71e9fc3fc7c Mon Sep 17 00:00:00 2001 +From: Naushir Patuck <naush@raspberrypi.com> +Date: Tue, 7 Apr 2020 10:42:14 +0100 +Subject: [PATCH] media: bcm2835-unicam: Add support for mulitple + device nodes. + +Move device node specific state out of the device state structure and +into a new node structure. This separation will be needed for future +changes where we will add an embedded data node to the driver to work +alongside the existing image data node. + +Currently only use a single image node, so this commit does not add +any functional changes. + +Signed-off-by: Naushir Patuck <naush@raspberrypi.com> +--- + .../media/platform/bcm2835/bcm2835-unicam.c | 484 ++++++++++-------- + 1 file changed, 283 insertions(+), 201 deletions(-) + +--- a/drivers/media/platform/bcm2835/bcm2835-unicam.c ++++ b/drivers/media/platform/bcm2835/bcm2835-unicam.c +@@ -109,7 +109,8 @@ MODULE_PARM_DESC(debug, "Debug level 0-3 + /* Define a nominal minimum image size */ + #define MIN_WIDTH 16 + #define MIN_HEIGHT 16 +- ++/* Maximum number of simulataneous streams Uncaim can handle. */ ++#define MAX_NODES 2 + /* + * struct unicam_fmt - Unicam media bus format information + * @pixelformat: V4L2 pixel format FCC identifier. 0 if n/a. +@@ -346,11 +347,37 @@ struct unicam_cfg { + + #define MAX_POSSIBLE_PIX_FMTS (ARRAY_SIZE(formats)) + +-struct unicam_device { +- /* V4l2 specific parameters */ ++struct unicam_node { ++ bool registered; ++ unsigned int pad_id; ++ /* Pointer pointing to current v4l2_buffer */ ++ struct unicam_buffer *cur_frm; ++ /* Pointer pointing to next v4l2_buffer */ ++ struct unicam_buffer *next_frm; ++ /* video capture */ ++ const struct unicam_fmt *fmt; ++ /* Used to store current pixel format */ ++ struct v4l2_format v_fmt; ++ /* Used to store current mbus frame format */ ++ struct v4l2_mbus_framefmt m_fmt; ++ /* Buffer queue used in video-buf */ ++ struct vb2_queue buffer_queue; ++ /* Queue of filled frames */ ++ struct unicam_dmaqueue dma_queue; ++ /* IRQ lock for DMA queue */ ++ spinlock_t dma_queue_lock; ++ /* lock used to access this structure */ ++ struct mutex lock; + /* Identifies video device for this channel */ + struct video_device video_dev; ++ /* Pointer to the parent handle */ ++ struct unicam_device *dev; ++ struct media_pad pad; + struct v4l2_ctrl_handler ctrl_handler; ++}; ++ ++struct unicam_device { ++ /* V4l2 specific parameters */ + + struct v4l2_fwnode_endpoint endpoint; + +@@ -363,7 +390,6 @@ struct unicam_device { + /* V4l2 device */ + struct v4l2_device v4l2_dev; + struct media_device mdev; +- struct media_pad pad; + + /* parent device */ + struct platform_device *pdev; +@@ -378,18 +404,6 @@ struct unicam_device { + /* current input at the sub device */ + int current_input; + +- /* Pointer pointing to current v4l2_buffer */ +- struct unicam_buffer *cur_frm; +- /* Pointer pointing to next v4l2_buffer */ +- struct unicam_buffer *next_frm; +- +- /* video capture */ +- const struct unicam_fmt *fmt; +- /* Used to store current pixel format */ +- struct v4l2_format v_fmt; +- /* Used to store current mbus frame format */ +- struct v4l2_mbus_framefmt m_fmt; +- + unsigned int virtual_channel; + enum v4l2_mbus_type bus_type; + /* +@@ -401,20 +415,10 @@ struct unicam_device { + unsigned int active_data_lanes; + + struct v4l2_rect crop; +- +- /* Currently selected input on subdev */ +- int input; +- +- /* Buffer queue used in video-buf */ +- struct vb2_queue buffer_queue; +- /* Queue of filled frames */ +- struct unicam_dmaqueue dma_queue; +- /* IRQ lock for DMA queue */ +- spinlock_t dma_queue_lock; +- /* lock used to access this structure */ +- struct mutex lock; + /* Flag to denote that we are processing buffers */ + int streaming; ++ ++ struct unicam_node node[MAX_NODES]; + }; + + /* Hardware access */ +@@ -526,10 +530,11 @@ static inline unsigned int bytes_per_lin + } + + static int __subdev_get_format(struct unicam_device *dev, +- struct v4l2_mbus_framefmt *fmt) ++ struct v4l2_mbus_framefmt *fmt, int pad_id) + { + struct v4l2_subdev_format sd_fmt = { + .which = V4L2_SUBDEV_FORMAT_ACTIVE, ++ .pad = pad_id + }; + int ret; + +@@ -598,29 +603,30 @@ static int unicam_calc_format_size_bpl(s + return 0; + } + +-static int unicam_reset_format(struct unicam_device *dev) ++static int unicam_reset_format(struct unicam_node *node) + { ++ struct unicam_device *dev = node->dev; + struct v4l2_mbus_framefmt mbus_fmt; + int ret; + +- ret = __subdev_get_format(dev, &mbus_fmt); ++ ret = __subdev_get_format(dev, &mbus_fmt, node->pad_id); + if (ret) { + unicam_err(dev, "Failed to get_format - ret %d\n", ret); + return ret; + } + +- if (mbus_fmt.code != dev->fmt->code) { ++ if (mbus_fmt.code != dev->node[0].fmt->code) { + unicam_err(dev, "code mismatch - fmt->code %08x, mbus_fmt.code %08x\n", +- dev->fmt->code, mbus_fmt.code); ++ dev->node[0].fmt->code, mbus_fmt.code); + return ret; + } + +- v4l2_fill_pix_format(&dev->v_fmt.fmt.pix, &mbus_fmt); +- dev->v_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; ++ v4l2_fill_pix_format(&dev->node[0].v_fmt.fmt.pix, &mbus_fmt); ++ dev->node[0].v_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + +- unicam_calc_format_size_bpl(dev, dev->fmt, &dev->v_fmt); ++ unicam_calc_format_size_bpl(dev, dev->node[0].fmt, &dev->node[0].v_fmt); + +- dev->m_fmt = mbus_fmt; ++ dev->node[0].m_fmt = mbus_fmt; + + return 0; + } +@@ -635,14 +641,14 @@ static void unicam_wr_dma_addr(struct un + + reg_write(&dev->cfg, UNICAM_IBSA0, dmaaddr); + reg_write(&dev->cfg, UNICAM_IBEA0, +- dmaaddr + dev->v_fmt.fmt.pix.sizeimage); ++ dmaaddr + dev->node[0].v_fmt.fmt.pix.sizeimage); + } + + static inline unsigned int unicam_get_lines_done(struct unicam_device *dev) + { + dma_addr_t start_addr, cur_addr; +- unsigned int stride = dev->v_fmt.fmt.pix.bytesperline; +- struct unicam_buffer *frm = dev->cur_frm; ++ unsigned int stride = dev->node[0].v_fmt.fmt.pix.bytesperline; ++ struct unicam_buffer *frm = dev->node[0].cur_frm; + + if (!frm) + return 0; +@@ -654,12 +660,12 @@ static inline unsigned int unicam_get_li + + static inline void unicam_schedule_next_buffer(struct unicam_device *dev) + { +- struct unicam_dmaqueue *dma_q = &dev->dma_queue; ++ struct unicam_dmaqueue *dma_q = &dev->node[0].dma_queue; + struct unicam_buffer *buf; + dma_addr_t addr; + + buf = list_entry(dma_q->active.next, struct unicam_buffer, list); +- dev->next_frm = buf; ++ dev->node[0].next_frm = buf; + list_del(&buf->list); + + addr = vb2_dma_contig_plane_dma_addr(&buf->vb.vb2_buf, 0); +@@ -668,11 +674,11 @@ static inline void unicam_schedule_next_ + + static inline void unicam_process_buffer_complete(struct unicam_device *dev) + { +- dev->cur_frm->vb.field = dev->m_fmt.field; +- dev->cur_frm->vb.sequence = dev->sequence++; ++ dev->node[0].cur_frm->vb.field = dev->node[0].m_fmt.field; ++ dev->node[0].cur_frm->vb.sequence = dev->sequence++; + +- vb2_buffer_done(&dev->cur_frm->vb.vb2_buf, VB2_BUF_STATE_DONE); +- dev->cur_frm = dev->next_frm; ++ vb2_buffer_done(&dev->node[0].cur_frm->vb.vb2_buf, VB2_BUF_STATE_DONE); ++ dev->node[0].cur_frm = dev->node[0].next_frm; + } + + /* +@@ -687,7 +693,7 @@ static irqreturn_t unicam_isr(int irq, v + { + struct unicam_device *unicam = (struct unicam_device *)dev; + struct unicam_cfg *cfg = &unicam->cfg; +- struct unicam_dmaqueue *dma_q = &unicam->dma_queue; ++ struct unicam_dmaqueue *dma_q = &unicam->node[0].dma_queue; + unsigned int lines_done = unicam_get_lines_done(dev); + unsigned int sequence = unicam->sequence; + int ista, sta; +@@ -720,8 +726,9 @@ static irqreturn_t unicam_isr(int irq, v + * Timestamp is to be when the first data byte was captured, + * aka frame start. + */ +- if (unicam->cur_frm) +- unicam->cur_frm->vb.vb2_buf.timestamp = ktime_get_ns(); ++ if (unicam->node[0].cur_frm) ++ unicam->node[0].cur_frm->vb.vb2_buf.timestamp = ++ ktime_get_ns(); + } + if (ista & UNICAM_FEI || sta & UNICAM_PI0) { + /* +@@ -729,7 +736,8 @@ static irqreturn_t unicam_isr(int irq, v + * stop the peripheral. Overwrite the frame we've just + * captured instead. + */ +- if (unicam->cur_frm && unicam->cur_frm != unicam->next_frm) ++ if (unicam->node[0].cur_frm && ++ unicam->node[0].cur_frm != unicam->node[0].next_frm) + unicam_process_buffer_complete(unicam); + } + +@@ -738,11 +746,11 @@ static irqreturn_t unicam_isr(int irq, v + * already started. + */ + if (ista & (UNICAM_FSI | UNICAM_LCI) && !(ista & UNICAM_FEI)) { +- spin_lock(&unicam->dma_queue_lock); ++ spin_lock(&unicam->node[0].dma_queue_lock); + if (!list_empty(&dma_q->active) && +- unicam->cur_frm == unicam->next_frm) ++ unicam->node[0].cur_frm == unicam->node[0].next_frm) + unicam_schedule_next_buffer(unicam); +- spin_unlock(&unicam->dma_queue_lock); ++ spin_unlock(&unicam->node[0].dma_queue_lock); + } + + if (reg_read(&unicam->cfg, UNICAM_ICTL) & UNICAM_FCM) { +@@ -756,7 +764,8 @@ static irqreturn_t unicam_isr(int irq, v + static int unicam_querycap(struct file *file, void *priv, + struct v4l2_capability *cap) + { +- struct unicam_device *dev = video_drvdata(file); ++ struct unicam_node *node = video_drvdata(file); ++ struct unicam_device *dev = node->dev; + + strlcpy(cap->driver, UNICAM_MODULE_NAME, sizeof(cap->driver)); + strlcpy(cap->card, UNICAM_MODULE_NAME, sizeof(cap->card)); +@@ -770,7 +779,8 @@ static int unicam_querycap(struct file * + static int unicam_enum_fmt_vid_cap(struct file *file, void *priv, + struct v4l2_fmtdesc *f) + { +- struct unicam_device *dev = video_drvdata(file); ++ struct unicam_node *node = video_drvdata(file); ++ struct unicam_device *dev = node->dev; + struct v4l2_subdev_mbus_code_enum mbus_code; + const struct unicam_fmt *fmt = NULL; + int index = 0; +@@ -815,9 +825,9 @@ static int unicam_enum_fmt_vid_cap(struc + static int unicam_g_fmt_vid_cap(struct file *file, void *priv, + struct v4l2_format *f) + { +- struct unicam_device *dev = video_drvdata(file); ++ struct unicam_node *node = video_drvdata(file); + +- *f = dev->v_fmt; ++ *f = node->v_fmt; + + return 0; + } +@@ -859,9 +869,11 @@ const struct unicam_fmt *get_first_suppo + static int unicam_try_fmt_vid_cap(struct file *file, void *priv, + struct v4l2_format *f) + { +- struct unicam_device *dev = video_drvdata(file); ++ struct unicam_node *node = video_drvdata(file); ++ struct unicam_device *dev = node->dev; + struct v4l2_subdev_format sd_fmt = { + .which = V4L2_SUBDEV_FORMAT_TRY, ++ .pad = 0 + }; + struct v4l2_mbus_framefmt *mbus_fmt = &sd_fmt.format; + const struct unicam_fmt *fmt; +@@ -939,8 +951,9 @@ static int unicam_try_fmt_vid_cap(struct + static int unicam_s_fmt_vid_cap(struct file *file, void *priv, + struct v4l2_format *f) + { +- struct unicam_device *dev = video_drvdata(file); +- struct vb2_queue *q = &dev->buffer_queue; ++ struct unicam_node *node = video_drvdata(file); ++ struct unicam_device *dev = node->dev; ++ struct vb2_queue *q = &node->buffer_queue; + struct v4l2_mbus_framefmt mbus_fmt = {0}; + const struct unicam_fmt *fmt; + int ret; +@@ -985,17 +998,18 @@ static int unicam_s_fmt_vid_cap(struct f + return -EINVAL; + } + +- dev->fmt = fmt; +- dev->v_fmt.fmt.pix.pixelformat = f->fmt.pix.pixelformat; +- dev->v_fmt.fmt.pix.bytesperline = f->fmt.pix.bytesperline; +- unicam_reset_format(dev); +- +- unicam_dbg(3, dev, "%s %dx%d, mbus_fmt 0x%08X, V4L2 pix 0x%08X.\n", +- __func__, dev->v_fmt.fmt.pix.width, +- dev->v_fmt.fmt.pix.height, mbus_fmt.code, +- dev->v_fmt.fmt.pix.pixelformat); ++ node->fmt = fmt; ++ node->v_fmt.fmt.pix.pixelformat = f->fmt.pix.pixelformat; ++ node->v_fmt.fmt.pix.bytesperline = f->fmt.pix.bytesperline; ++ unicam_reset_format(node); ++ ++ unicam_dbg(3, dev, ++ "%s %dx%d, mbus_fmt 0x%08X, V4L2 pix 0x%08X.\n", ++ __func__, node->v_fmt.fmt.pix.width, ++ node->v_fmt.fmt.pix.height, mbus_fmt.code, ++ node->v_fmt.fmt.pix.pixelformat); + +- *f = dev->v_fmt; ++ *f = node->v_fmt; + + return 0; + } +@@ -1006,8 +1020,9 @@ static int unicam_queue_setup(struct vb2 + unsigned int sizes[], + struct device *alloc_devs[]) + { +- struct unicam_device *dev = vb2_get_drv_priv(vq); +- unsigned int size = dev->v_fmt.fmt.pix.sizeimage; ++ struct unicam_node *node = vb2_get_drv_priv(vq); ++ struct unicam_device *dev = node->dev; ++ unsigned int size = node->v_fmt.fmt.pix.sizeimage; + + if (vq->num_buffers + *nbuffers < 3) + *nbuffers = 3 - vq->num_buffers; +@@ -1029,15 +1044,16 @@ static int unicam_queue_setup(struct vb2 + + static int unicam_buffer_prepare(struct vb2_buffer *vb) + { +- struct unicam_device *dev = vb2_get_drv_priv(vb->vb2_queue); ++ struct unicam_node *node = vb2_get_drv_priv(vb->vb2_queue); ++ struct unicam_device *dev = node->dev; + struct unicam_buffer *buf = container_of(vb, struct unicam_buffer, + vb.vb2_buf); + unsigned long size; + +- if (WARN_ON(!dev->fmt)) ++ if (WARN_ON(!node->fmt)) + return -EINVAL; + +- size = dev->v_fmt.fmt.pix.sizeimage; ++ size = node->v_fmt.fmt.pix.sizeimage; + if (vb2_plane_size(vb, 0) < size) { + unicam_err(dev, "data will not fit into plane (%lu < %lu)\n", + vb2_plane_size(vb, 0), size); +@@ -1050,15 +1066,15 @@ static int unicam_buffer_prepare(struct + + static void unicam_buffer_queue(struct vb2_buffer *vb) + { +- struct unicam_device *dev = vb2_get_drv_priv(vb->vb2_queue); ++ struct unicam_node *node = vb2_get_drv_priv(vb->vb2_queue); + struct unicam_buffer *buf = container_of(vb, struct unicam_buffer, + vb.vb2_buf); +- struct unicam_dmaqueue *dma_queue = &dev->dma_queue; ++ struct unicam_dmaqueue *dma_queue = &node->dma_queue; + unsigned long flags = 0; + +- spin_lock_irqsave(&dev->dma_queue_lock, flags); ++ spin_lock_irqsave(&node->dma_queue_lock, flags); + list_add_tail(&buf->list, &dma_queue->active); +- spin_unlock_irqrestore(&dev->dma_queue_lock, flags); ++ spin_unlock_irqrestore(&node->dma_queue_lock, flags); + } + + static void unicam_set_packing_config(struct unicam_device *dev) +@@ -1066,11 +1082,12 @@ static void unicam_set_packing_config(st + int pack, unpack; + u32 val; + +- if (dev->v_fmt.fmt.pix.pixelformat == dev->fmt->fourcc) { ++ if (dev->node[0].v_fmt.fmt.pix.pixelformat == ++ dev->node[0].fmt->fourcc) { + unpack = UNICAM_PUM_NONE; + pack = UNICAM_PPM_NONE; + } else { +- switch (dev->fmt->depth) { ++ switch (dev->node[0].fmt->depth) { + case 8: + unpack = UNICAM_PUM_UNPACK8; + break; +@@ -1108,17 +1125,17 @@ static void unicam_cfg_image_id(struct u + if (dev->bus_type == V4L2_MBUS_CSI2_DPHY) { + /* CSI2 mode */ + reg_write(cfg, UNICAM_IDI0, +- (dev->virtual_channel << 6) | dev->fmt->csi_dt); ++ (dev->virtual_channel << 6) | dev->node[0].fmt->csi_dt); + } else { + /* CCP2 mode */ +- reg_write(cfg, UNICAM_IDI0, (0x80 | dev->fmt->csi_dt)); ++ reg_write(cfg, UNICAM_IDI0, (0x80 | dev->node[0].fmt->csi_dt)); + } + } + + static void unicam_start_rx(struct unicam_device *dev, unsigned long addr) + { + struct unicam_cfg *cfg = &dev->cfg; +- int line_int_freq = dev->v_fmt.fmt.pix.height >> 2; ++ int line_int_freq = dev->node[0].v_fmt.fmt.pix.height >> 2; + unsigned int i; + u32 val; + +@@ -1266,7 +1283,8 @@ static void unicam_start_rx(struct unica + reg_write(cfg, UNICAM_DAT3, val); + } + +- reg_write(&dev->cfg, UNICAM_IBLS, dev->v_fmt.fmt.pix.bytesperline); ++ reg_write(&dev->cfg, UNICAM_IBLS, ++ dev->node[0].v_fmt.fmt.pix.bytesperline); + unicam_wr_dma_addr(dev, addr); + unicam_set_packing_config(dev); + unicam_cfg_image_id(dev); +@@ -1327,21 +1345,22 @@ static void unicam_disable(struct unicam + + static int unicam_start_streaming(struct vb2_queue *vq, unsigned int count) + { +- struct unicam_device *dev = vb2_get_drv_priv(vq); +- struct unicam_dmaqueue *dma_q = &dev->dma_queue; ++ struct unicam_node *node = vb2_get_drv_priv(vq); ++ struct unicam_device *dev = node->dev; ++ struct unicam_dmaqueue *dma_q = &node->dma_queue; + struct unicam_buffer *buf, *tmp; + unsigned long addr = 0; + unsigned long flags; + int ret; + +- spin_lock_irqsave(&dev->dma_queue_lock, flags); ++ spin_lock_irqsave(&node->dma_queue_lock, flags); + buf = list_entry(dma_q->active.next, struct unicam_buffer, list); +- dev->cur_frm = buf; +- dev->next_frm = buf; ++ node->cur_frm = buf; ++ node->next_frm = buf; + list_del(&buf->list); +- spin_unlock_irqrestore(&dev->dma_queue_lock, flags); ++ spin_unlock_irqrestore(&node->dma_queue_lock, flags); + +- addr = vb2_dma_contig_plane_dma_addr(&dev->cur_frm->vb.vb2_buf, 0); ++ addr = vb2_dma_contig_plane_dma_addr(&node->cur_frm->vb.vb2_buf, 0); + dev->sequence = 0; + + ret = unicam_runtime_get(dev); +@@ -1411,20 +1430,21 @@ err_release_buffers: + list_del(&buf->list); + vb2_buffer_done(&buf->vb.vb2_buf, VB2_BUF_STATE_QUEUED); + } +- if (dev->cur_frm != dev->next_frm) +- vb2_buffer_done(&dev->next_frm->vb.vb2_buf, ++ if (node->cur_frm != node->next_frm) ++ vb2_buffer_done(&node->next_frm->vb.vb2_buf, + VB2_BUF_STATE_QUEUED); +- vb2_buffer_done(&dev->cur_frm->vb.vb2_buf, VB2_BUF_STATE_QUEUED); +- dev->next_frm = NULL; +- dev->cur_frm = NULL; ++ vb2_buffer_done(&node->cur_frm->vb.vb2_buf, VB2_BUF_STATE_QUEUED); ++ node->next_frm = NULL; ++ node->cur_frm = NULL; + + return ret; + } + + static void unicam_stop_streaming(struct vb2_queue *vq) + { +- struct unicam_device *dev = vb2_get_drv_priv(vq); +- struct unicam_dmaqueue *dma_q = &dev->dma_queue; ++ struct unicam_node *node = vb2_get_drv_priv(vq); ++ struct unicam_device *dev = node->dev; ++ struct unicam_dmaqueue *dma_q = &node->dma_queue; + struct unicam_buffer *buf, *tmp; + unsigned long flags; + +@@ -1434,22 +1454,24 @@ static void unicam_stop_streaming(struct + unicam_disable(dev); + + /* Release all active buffers */ +- spin_lock_irqsave(&dev->dma_queue_lock, flags); ++ spin_lock_irqsave(&node->dma_queue_lock, flags); + list_for_each_entry_safe(buf, tmp, &dma_q->active, list) { + list_del(&buf->list); + vb2_buffer_done(&buf->vb.vb2_buf, VB2_BUF_STATE_ERROR); + } + +- if (dev->cur_frm == dev->next_frm) { +- vb2_buffer_done(&dev->cur_frm->vb.vb2_buf, VB2_BUF_STATE_ERROR); ++ if (node->cur_frm == node->next_frm) { ++ vb2_buffer_done(&node->cur_frm->vb.vb2_buf, ++ VB2_BUF_STATE_ERROR); + } else { +- vb2_buffer_done(&dev->cur_frm->vb.vb2_buf, VB2_BUF_STATE_ERROR); +- vb2_buffer_done(&dev->next_frm->vb.vb2_buf, ++ vb2_buffer_done(&node->cur_frm->vb.vb2_buf, ++ VB2_BUF_STATE_ERROR); ++ vb2_buffer_done(&node->next_frm->vb.vb2_buf, + VB2_BUF_STATE_ERROR); + } +- dev->cur_frm = NULL; +- dev->next_frm = NULL; +- spin_unlock_irqrestore(&dev->dma_queue_lock, flags); ++ node->cur_frm = NULL; ++ node->next_frm = NULL; ++ spin_unlock_irqrestore(&node->dma_queue_lock, flags); + + clk_disable_unprepare(dev->clock); + unicam_runtime_put(dev); +@@ -1458,7 +1480,8 @@ static void unicam_stop_streaming(struct + static int unicam_enum_input(struct file *file, void *priv, + struct v4l2_input *inp) + { +- struct unicam_device *dev = video_drvdata(file); ++ struct unicam_node *node = video_drvdata(file); ++ struct unicam_device *dev = node->dev; + + if (inp->index != 0) + return -EINVAL; +@@ -1506,21 +1529,24 @@ static int unicam_s_input(struct file *f + static int unicam_querystd(struct file *file, void *priv, + v4l2_std_id *std) + { +- struct unicam_device *dev = video_drvdata(file); ++ struct unicam_node *node = video_drvdata(file); ++ struct unicam_device *dev = node->dev; + + return v4l2_subdev_call(dev->sensor, video, querystd, std); + } + + static int unicam_g_std(struct file *file, void *priv, v4l2_std_id *std) + { +- struct unicam_device *dev = video_drvdata(file); ++ struct unicam_node *node = video_drvdata(file); ++ struct unicam_device *dev = node->dev; + + return v4l2_subdev_call(dev->sensor, video, g_std, std); + } + + static int unicam_s_std(struct file *file, void *priv, v4l2_std_id std) + { +- struct unicam_device *dev = video_drvdata(file); ++ struct unicam_node *node = video_drvdata(file); ++ struct unicam_device *dev = node->dev; + int ret; + v4l2_std_id current_std; + +@@ -1531,29 +1557,31 @@ static int unicam_s_std(struct file *fil + if (std == current_std) + return 0; + +- if (vb2_is_busy(&dev->buffer_queue)) ++ if (vb2_is_busy(&node->buffer_queue)) + return -EBUSY; + + ret = v4l2_subdev_call(dev->sensor, video, s_std, std); + + /* Force recomputation of bytesperline */ +- dev->v_fmt.fmt.pix.bytesperline = 0; ++ node->v_fmt.fmt.pix.bytesperline = 0; + +- unicam_reset_format(dev); ++ unicam_reset_format(node); + + return ret; + } + + static int unicam_s_edid(struct file *file, void *priv, struct v4l2_edid *edid) + { +- struct unicam_device *dev = video_drvdata(file); ++ struct unicam_node *node = video_drvdata(file); ++ struct unicam_device *dev = node->dev; + + return v4l2_subdev_call(dev->sensor, pad, set_edid, edid); + } + + static int unicam_g_edid(struct file *file, void *priv, struct v4l2_edid *edid) + { +- struct unicam_device *dev = video_drvdata(file); ++ struct unicam_node *node = video_drvdata(file); ++ struct unicam_device *dev = node->dev; + + return v4l2_subdev_call(dev->sensor, pad, get_edid, edid); + } +@@ -1561,7 +1589,8 @@ static int unicam_g_edid(struct file *fi + static int unicam_enum_framesizes(struct file *file, void *priv, + struct v4l2_frmsizeenum *fsize) + { +- struct unicam_device *dev = video_drvdata(file); ++ struct unicam_node *node = video_drvdata(file); ++ struct unicam_device *dev = node->dev; + const struct unicam_fmt *fmt; + struct v4l2_subdev_frame_size_enum fse; + int ret; +@@ -1596,7 +1625,8 @@ static int unicam_enum_framesizes(struct + static int unicam_enum_frameintervals(struct file *file, void *priv, + struct v4l2_frmivalenum *fival) + { +- struct unicam_device *dev = video_drvdata(file); ++ struct unicam_node *node = video_drvdata(file); ++ struct unicam_device *dev = node->dev; + const struct unicam_fmt *fmt; + struct v4l2_subdev_frame_interval_enum fie = { + .index = fival->index, +@@ -1624,14 +1654,16 @@ static int unicam_enum_frameintervals(st + + static int unicam_g_parm(struct file *file, void *fh, struct v4l2_streamparm *a) + { +- struct unicam_device *dev = video_drvdata(file); ++ struct unicam_node *node = video_drvdata(file); ++ struct unicam_device *dev = node->dev; + + return v4l2_g_parm_cap(video_devdata(file), dev->sensor, a); + } + + static int unicam_s_parm(struct file *file, void *fh, struct v4l2_streamparm *a) + { +- struct unicam_device *dev = video_drvdata(file); ++ struct unicam_node *node = video_drvdata(file); ++ struct unicam_device *dev = node->dev; + + return v4l2_s_parm_cap(video_devdata(file), dev->sensor, a); + } +@@ -1639,7 +1671,8 @@ static int unicam_s_parm(struct file *fi + static int unicam_g_dv_timings(struct file *file, void *priv, + struct v4l2_dv_timings *timings) + { +- struct unicam_device *dev = video_drvdata(file); ++ struct unicam_node *node = video_drvdata(file); ++ struct unicam_device *dev = node->dev; + + return v4l2_subdev_call(dev->sensor, video, g_dv_timings, timings); + } +@@ -1647,7 +1680,8 @@ static int unicam_g_dv_timings(struct fi + static int unicam_s_dv_timings(struct file *file, void *priv, + struct v4l2_dv_timings *timings) + { +- struct unicam_device *dev = video_drvdata(file); ++ struct unicam_node *node = video_drvdata(file); ++ struct unicam_device *dev = node->dev; + struct v4l2_dv_timings current_timings; + int ret; + +@@ -1657,15 +1691,15 @@ static int unicam_s_dv_timings(struct fi + if (v4l2_match_dv_timings(timings, ¤t_timings, 0, false)) + return 0; + +- if (vb2_is_busy(&dev->buffer_queue)) ++ if (vb2_is_busy(&node->buffer_queue)) + return -EBUSY; + + ret = v4l2_subdev_call(dev->sensor, video, s_dv_timings, timings); + + /* Force recomputation of bytesperline */ +- dev->v_fmt.fmt.pix.bytesperline = 0; ++ node->v_fmt.fmt.pix.bytesperline = 0; + +- unicam_reset_format(dev); ++ unicam_reset_format(node); + + return ret; + } +@@ -1673,7 +1707,8 @@ static int unicam_s_dv_timings(struct fi + static int unicam_query_dv_timings(struct file *file, void *priv, + struct v4l2_dv_timings *timings) + { +- struct unicam_device *dev = video_drvdata(file); ++ struct unicam_node *node = video_drvdata(file); ++ struct unicam_device *dev = node->dev; + + return v4l2_subdev_call(dev->sensor, video, query_dv_timings, timings); + } +@@ -1681,7 +1716,8 @@ static int unicam_query_dv_timings(struc + static int unicam_enum_dv_timings(struct file *file, void *priv, + struct v4l2_enum_dv_timings *timings) + { +- struct unicam_device *dev = video_drvdata(file); ++ struct unicam_node *node = video_drvdata(file); ++ struct unicam_device *dev = node->dev; + + return v4l2_subdev_call(dev->sensor, pad, enum_dv_timings, timings); + } +@@ -1689,7 +1725,8 @@ static int unicam_enum_dv_timings(struct + static int unicam_dv_timings_cap(struct file *file, void *priv, + struct v4l2_dv_timings_cap *cap) + { +- struct unicam_device *dev = video_drvdata(file); ++ struct unicam_node *node = video_drvdata(file); ++ struct unicam_device *dev = node->dev; + + return v4l2_subdev_call(dev->sensor, pad, dv_timings_cap, cap); + } +@@ -1707,7 +1744,8 @@ static int unicam_subscribe_event(struct + + static int unicam_log_status(struct file *file, void *fh) + { +- struct unicam_device *dev = video_drvdata(file); ++ struct unicam_node *node = video_drvdata(file); ++ struct unicam_device *dev = node->dev; + struct unicam_cfg *cfg = &dev->cfg; + u32 reg; + +@@ -1716,10 +1754,10 @@ static int unicam_log_status(struct file + + unicam_info(dev, "-----Receiver status-----\n"); + unicam_info(dev, "V4L2 width/height: %ux%u\n", +- dev->v_fmt.fmt.pix.width, dev->v_fmt.fmt.pix.height); +- unicam_info(dev, "Mediabus format: %08x\n", dev->fmt->code); ++ node->v_fmt.fmt.pix.width, node->v_fmt.fmt.pix.height); ++ unicam_info(dev, "Mediabus format: %08x\n", node->fmt->code); + unicam_info(dev, "V4L2 format: %08x\n", +- dev->v_fmt.fmt.pix.pixelformat); ++ node->v_fmt.fmt.pix.pixelformat); + reg = reg_read(&dev->cfg, UNICAM_IPIPE); + unicam_info(dev, "Unpacking/packing: %u / %u\n", + get_field(reg, UNICAM_PUM_MASK), +@@ -1744,7 +1782,7 @@ static void unicam_notify(struct v4l2_su + + switch (notification) { + case V4L2_DEVICE_NOTIFY_EVENT: +- v4l2_event_queue(&dev->video_dev, arg); ++ v4l2_event_queue(&dev->node[0].video_dev, arg); + break; + default: + break; +@@ -1767,10 +1805,11 @@ static const struct vb2_ops unicam_video + */ + static int unicam_open(struct file *file) + { +- struct unicam_device *dev = video_drvdata(file); ++ struct unicam_node *node = video_drvdata(file); ++ struct unicam_device *dev = node->dev; + int ret; + +- mutex_lock(&dev->lock); ++ mutex_lock(&node->lock); + + ret = v4l2_fh_open(file); + if (ret) { +@@ -1790,18 +1829,19 @@ static int unicam_open(struct file *file + ret = 0; + + unlock: +- mutex_unlock(&dev->lock); ++ mutex_unlock(&node->lock); + return ret; + } + + static int unicam_release(struct file *file) + { +- struct unicam_device *dev = video_drvdata(file); ++ struct unicam_node *node = video_drvdata(file); ++ struct unicam_device *dev = node->dev; + struct v4l2_subdev *sd = dev->sensor; + bool fh_singular; + int ret; + +- mutex_lock(&dev->lock); ++ mutex_lock(&node->lock); + + fh_singular = v4l2_fh_is_singular_file(file); + +@@ -1810,7 +1850,7 @@ static int unicam_release(struct file *f + if (fh_singular) + v4l2_subdev_call(sd, core, s_power, 0); + +- mutex_unlock(&dev->lock); ++ mutex_unlock(&node->lock); + + return ret; + } +@@ -1892,7 +1932,8 @@ unicam_async_bound(struct v4l2_async_not + return 0; + } + +-static int unicam_probe_complete(struct unicam_device *unicam) ++static int register_node(struct unicam_device *unicam, struct unicam_node *node, ++ enum v4l2_buf_type type, int pad_id) + { + struct video_device *vdev; + struct vb2_queue *q; +@@ -1900,15 +1941,7 @@ static int unicam_probe_complete(struct + const struct unicam_fmt *fmt; + int ret; + +- v4l2_set_subdev_hostdata(unicam->sensor, unicam); +- +- unicam->v4l2_dev.notify = unicam_notify; +- +- unicam->sensor_config = v4l2_subdev_alloc_pad_config(unicam->sensor); +- if (!unicam->sensor_config) +- return -ENOMEM; +- +- ret = __subdev_get_format(unicam, &mbus_fmt); ++ ret = __subdev_get_format(unicam, &mbus_fmt, pad_id); + if (ret) { + unicam_err(unicam, "Failed to get_format - ret %d\n", ret); + return ret; +@@ -1938,14 +1971,15 @@ static int unicam_probe_complete(struct + return -EINVAL; + } + +- unicam->fmt = fmt; ++ node->pad_id = pad_id; ++ node->fmt = fmt; + if (fmt->fourcc) +- unicam->v_fmt.fmt.pix.pixelformat = fmt->fourcc; ++ node->v_fmt.fmt.pix.pixelformat = fmt->fourcc; + else +- unicam->v_fmt.fmt.pix.pixelformat = fmt->repacked_fourcc; ++ node->v_fmt.fmt.pix.pixelformat = fmt->repacked_fourcc; + + /* Read current subdev format */ +- unicam_reset_format(unicam); ++ unicam_reset_format(node); + + if (v4l2_subdev_has_op(unicam->sensor, video, s_std)) { + v4l2_std_id tvnorms; +@@ -1962,27 +1996,30 @@ static int unicam_probe_complete(struct + g_tvnorms, &tvnorms); + if (WARN_ON(ret)) + return -EINVAL; +- unicam->video_dev.tvnorms |= tvnorms; ++ node->video_dev.tvnorms |= tvnorms; + } + +- spin_lock_init(&unicam->dma_queue_lock); +- mutex_init(&unicam->lock); ++ spin_lock_init(&node->dma_queue_lock); ++ mutex_init(&node->lock); + +- /* Add controls from the subdevice */ +- ret = v4l2_ctrl_add_handler(&unicam->ctrl_handler, +- unicam->sensor->ctrl_handler, NULL, true); +- if (ret < 0) +- return ret; ++ if (type == V4L2_BUF_TYPE_VIDEO_CAPTURE) { ++ /* Add controls from the subdevice */ ++ ret = v4l2_ctrl_add_handler(&node->ctrl_handler, ++ unicam->sensor->ctrl_handler, NULL, ++ true); ++ if (ret < 0) ++ return ret; ++ } + +- q = &unicam->buffer_queue; +- q->type = V4L2_BUF_TYPE_VIDEO_CAPTURE; ++ q = &node->buffer_queue; ++ q->type = type; + q->io_modes = VB2_MMAP | VB2_DMABUF | VB2_READ; +- q->drv_priv = unicam; ++ q->drv_priv = node; + q->ops = &unicam_video_qops; + q->mem_ops = &vb2_dma_contig_memops; + q->buf_struct_size = sizeof(struct unicam_buffer); + q->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC; +- q->lock = &unicam->lock; ++ q->lock = &node->lock; + q->min_buffers_needed = 2; + q->dev = &unicam->pdev->dev; + +@@ -1992,9 +2029,9 @@ static int unicam_probe_complete(struct + return ret; + } + +- INIT_LIST_HEAD(&unicam->dma_queue.active); ++ INIT_LIST_HEAD(&node->dma_queue.active); + +- vdev = &unicam->video_dev; ++ vdev = &node->video_dev; + strlcpy(vdev->name, UNICAM_MODULE_NAME, sizeof(vdev->name)); + vdev->release = video_device_release_empty; + vdev->fops = &unicam_fops; +@@ -2002,69 +2039,113 @@ static int unicam_probe_complete(struct + vdev->v4l2_dev = &unicam->v4l2_dev; + vdev->vfl_dir = VFL_DIR_RX; + vdev->queue = q; +- vdev->lock = &unicam->lock; ++ vdev->lock = &node->lock; + vdev->device_caps = V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_STREAMING | + V4L2_CAP_READWRITE; +- + /* If the source has no controls then remove our ctrl handler. */ +- if (list_empty(&unicam->ctrl_handler.ctrls)) ++ if (list_empty(&node->ctrl_handler.ctrls)) + unicam->v4l2_dev.ctrl_handler = NULL; + +- video_set_drvdata(vdev, unicam); ++ node->dev = unicam; ++ video_set_drvdata(vdev, node); + vdev->entity.flags |= MEDIA_ENT_FL_DEFAULT; + + if (!v4l2_subdev_has_op(unicam->sensor, video, s_std)) { +- v4l2_disable_ioctl(&unicam->video_dev, VIDIOC_S_STD); +- v4l2_disable_ioctl(&unicam->video_dev, VIDIOC_G_STD); +- v4l2_disable_ioctl(&unicam->video_dev, VIDIOC_ENUMSTD); ++ v4l2_disable_ioctl(&node->video_dev, VIDIOC_S_STD); ++ v4l2_disable_ioctl(&node->video_dev, VIDIOC_G_STD); ++ v4l2_disable_ioctl(&node->video_dev, VIDIOC_ENUMSTD); + } + if (!v4l2_subdev_has_op(unicam->sensor, video, querystd)) +- v4l2_disable_ioctl(&unicam->video_dev, VIDIOC_QUERYSTD); ++ v4l2_disable_ioctl(&node->video_dev, VIDIOC_QUERYSTD); + if (!v4l2_subdev_has_op(unicam->sensor, video, s_dv_timings)) { +- v4l2_disable_ioctl(&unicam->video_dev, VIDIOC_S_EDID); +- v4l2_disable_ioctl(&unicam->video_dev, VIDIOC_G_EDID); +- v4l2_disable_ioctl(&unicam->video_dev, VIDIOC_DV_TIMINGS_CAP); +- v4l2_disable_ioctl(&unicam->video_dev, VIDIOC_G_DV_TIMINGS); +- v4l2_disable_ioctl(&unicam->video_dev, VIDIOC_S_DV_TIMINGS); +- v4l2_disable_ioctl(&unicam->video_dev, VIDIOC_ENUM_DV_TIMINGS); +- v4l2_disable_ioctl(&unicam->video_dev, VIDIOC_QUERY_DV_TIMINGS); ++ v4l2_disable_ioctl(&node->video_dev, VIDIOC_S_EDID); ++ v4l2_disable_ioctl(&node->video_dev, VIDIOC_G_EDID); ++ v4l2_disable_ioctl(&node->video_dev, VIDIOC_DV_TIMINGS_CAP); ++ v4l2_disable_ioctl(&node->video_dev, VIDIOC_G_DV_TIMINGS); ++ v4l2_disable_ioctl(&node->video_dev, VIDIOC_S_DV_TIMINGS); ++ v4l2_disable_ioctl(&node->video_dev, VIDIOC_ENUM_DV_TIMINGS); ++ v4l2_disable_ioctl(&node->video_dev, VIDIOC_QUERY_DV_TIMINGS); + } + if (!v4l2_subdev_has_op(unicam->sensor, pad, enum_frame_interval)) +- v4l2_disable_ioctl(&unicam->video_dev, ++ v4l2_disable_ioctl(&node->video_dev, + VIDIOC_ENUM_FRAMEINTERVALS); + if (!v4l2_subdev_has_op(unicam->sensor, video, g_frame_interval)) +- v4l2_disable_ioctl(&unicam->video_dev, VIDIOC_G_PARM); ++ v4l2_disable_ioctl(&node->video_dev, VIDIOC_G_PARM); + if (!v4l2_subdev_has_op(unicam->sensor, video, s_frame_interval)) +- v4l2_disable_ioctl(&unicam->video_dev, VIDIOC_S_PARM); ++ v4l2_disable_ioctl(&node->video_dev, VIDIOC_S_PARM); + + if (!v4l2_subdev_has_op(unicam->sensor, pad, enum_frame_size)) +- v4l2_disable_ioctl(&unicam->video_dev, VIDIOC_ENUM_FRAMESIZES); ++ v4l2_disable_ioctl(&node->video_dev, VIDIOC_ENUM_FRAMESIZES); + + ret = video_register_device(vdev, VFL_TYPE_GRABBER, -1); + if (ret) { + unicam_err(unicam, "Unable to register video device.\n"); + return ret; + } ++ node->registered = true; + +- ret = v4l2_device_register_ro_subdev_nodes(&unicam->v4l2_dev); ++ ret = media_create_pad_link(&unicam->sensor->entity, ++ 0, &node->video_dev.entity, 0, ++ MEDIA_LNK_FL_ENABLED | ++ MEDIA_LNK_FL_IMMUTABLE); ++ if (ret) ++ unicam_err(unicam, "Unable to create pad links.\n"); ++ ++ return ret; ++} ++ ++static void unregister_nodes(struct unicam_device *unicam) ++{ ++ if (unicam->node[0].registered) { ++ video_unregister_device(&unicam->node[0].video_dev); ++ unicam->node[0].registered = false; ++ } ++ if (unicam->node[1].registered) { ++ video_unregister_device(&unicam->node[1].video_dev); ++ unicam->node[1].registered = false; ++ } ++} ++ ++static int unicam_probe_complete(struct unicam_device *unicam) ++{ ++ int ret; ++ ++ v4l2_set_subdev_hostdata(unicam->sensor, unicam); ++ ++ unicam->v4l2_dev.notify = unicam_notify; ++ ++ unicam->sensor_config = v4l2_subdev_alloc_pad_config(unicam->sensor); ++ if (!unicam->sensor_config) ++ return -ENOMEM; ++ ++ ret = register_node(unicam, &unicam->node[0], ++ V4L2_BUF_TYPE_VIDEO_CAPTURE, 0); + if (ret) { +- unicam_err(unicam, +- "Unable to register subdev nodes.\n"); +- video_unregister_device(&unicam->video_dev); +- return ret; ++ unicam_err(unicam, "Unable to register subdev node 0.\n"); ++ goto unregister; ++ } ++ if (unicam->sensor->entity.num_pads >= 2) { ++ ret = register_node(unicam, &unicam->node[1], ++ V4L2_BUF_TYPE_META_CAPTURE, 1); ++ if (ret) { ++ unicam_err(unicam, ++ "Unable to register subdev node 1.\n"); ++ goto unregister; ++ } + } + +- ret = media_create_pad_link(&unicam->sensor->entity, 0, +- &unicam->video_dev.entity, 0, +- MEDIA_LNK_FL_ENABLED | +- MEDIA_LNK_FL_IMMUTABLE); ++ ret = v4l2_device_register_ro_subdev_nodes(&unicam->v4l2_dev); + if (ret) { +- unicam_err(unicam, "Unable to create pad links.\n"); +- video_unregister_device(&unicam->video_dev); +- return ret; ++ unicam_err(unicam, "Unable to register subdev nodes.\n"); ++ goto unregister; + } + + return 0; ++ ++unregister: ++ unregister_nodes(unicam); ++ ++ return ret; + } + + static int unicam_async_complete(struct v4l2_async_notifier *notifier) +@@ -2274,7 +2355,8 @@ static int unicam_probe(struct platform_ + pdev->dev.driver->name, dev_name(&pdev->dev)); + unicam->mdev.hw_revision = 1; + +- media_entity_pads_init(&unicam->video_dev.entity, 1, &unicam->pad); ++ media_entity_pads_init(&unicam->node[0].video_dev.entity, 1, ++ &unicam->node[0].pad); + media_device_init(&unicam->mdev); + + unicam->v4l2_dev.mdev = &unicam->mdev; +@@ -2294,7 +2376,7 @@ static int unicam_probe(struct platform_ + } + + /* Reserve space for the controls */ +- hdl = &unicam->ctrl_handler; ++ hdl = &unicam->node[0].ctrl_handler; + ret = v4l2_ctrl_handler_init(hdl, 16); + if (ret < 0) + goto media_unregister; +@@ -2335,9 +2417,9 @@ static int unicam_remove(struct platform + pm_runtime_disable(&pdev->dev); + + v4l2_async_notifier_unregister(&unicam->notifier); +- v4l2_ctrl_handler_free(&unicam->ctrl_handler); ++ v4l2_ctrl_handler_free(&unicam->node[0].ctrl_handler); + v4l2_device_unregister(&unicam->v4l2_dev); +- video_unregister_device(&unicam->video_dev); ++ unregister_nodes(unicam); + if (unicam->sensor_config) + v4l2_subdev_free_pad_config(unicam->sensor_config); + media_device_unregister(&unicam->mdev); |