summaryrefslogtreecommitdiff
path: root/drivers/media/platform
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/media/platform')
-rw-r--r--drivers/media/platform/am437x/am437x-vpfe.c32
-rw-r--r--drivers/media/platform/blackfin/bfin_capture.c40
-rw-r--r--drivers/media/platform/davinci/vpfe_capture.c19
-rw-r--r--drivers/media/platform/m2m-deinterlace.c1
-rw-r--r--drivers/media/platform/marvell-ccic/cafe-driver.c13
-rw-r--r--drivers/media/platform/marvell-ccic/mcam-core.c471
-rw-r--r--drivers/media/platform/marvell-ccic/mcam-core.h3
-rw-r--r--drivers/media/platform/marvell-ccic/mmp-driver.c1
-rw-r--r--drivers/media/platform/s3c-camif/camif-capture.c13
-rw-r--r--drivers/media/platform/s5p-mfc/s5p_mfc.c2
-rw-r--r--drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c2
-rw-r--r--drivers/media/platform/s5p-tv/hdmi_drv.c12
-rw-r--r--drivers/media/platform/s5p-tv/mixer_drv.c15
-rw-r--r--drivers/media/platform/s5p-tv/sdo_drv.c14
-rw-r--r--drivers/media/platform/sh_vou.c61
-rw-r--r--drivers/media/platform/soc_camera/atmel-isi.c74
-rw-r--r--drivers/media/platform/soc_camera/mx2_camera.c113
-rw-r--r--drivers/media/platform/soc_camera/mx3_camera.c105
-rw-r--r--drivers/media/platform/soc_camera/omap1_camera.c106
-rw-r--r--drivers/media/platform/soc_camera/pxa_camera.c99
-rw-r--r--drivers/media/platform/soc_camera/rcar_vin.c109
-rw-r--r--drivers/media/platform/soc_camera/sh_mobile_ceu_camera.c115
-rw-r--r--drivers/media/platform/soc_camera/sh_mobile_csi2.c35
-rw-r--r--drivers/media/platform/soc_camera/soc_camera.c30
-rw-r--r--drivers/media/platform/soc_camera/soc_camera_platform.c24
-rw-r--r--drivers/media/platform/soc_camera/soc_scale_crop.c37
-rw-r--r--drivers/media/platform/via-camera.c19
-rw-r--r--drivers/media/platform/vim2m.c4
-rw-r--r--drivers/media/platform/vivid/vivid-core.c13
-rw-r--r--drivers/media/platform/vivid/vivid-core.h1
-rw-r--r--drivers/media/platform/vivid/vivid-radio-rx.c2
-rw-r--r--drivers/media/platform/vivid/vivid-tpg.c109
-rw-r--r--drivers/media/platform/vivid/vivid-tpg.h1
-rw-r--r--drivers/media/platform/vivid/vivid-vid-cap.c17
-rw-r--r--drivers/media/platform/vivid/vivid-vid-common.c50
-rw-r--r--drivers/media/platform/vivid/vivid-vid-out.c3
-rw-r--r--drivers/media/platform/xilinx/Kconfig2
37 files changed, 908 insertions, 859 deletions
diff --git a/drivers/media/platform/am437x/am437x-vpfe.c b/drivers/media/platform/am437x/am437x-vpfe.c
index a30cc2f7e4f1..ea971bb0d9c6 100644
--- a/drivers/media/platform/am437x/am437x-vpfe.c
+++ b/drivers/media/platform/am437x/am437x-vpfe.c
@@ -430,7 +430,7 @@ vpfe_ccdc_update_raw_params(struct vpfe_ccdc *ccdc,
struct vpfe_ccdc_config_params_raw *config_params =
&ccdc->ccdc_cfg.bayer.config_params;
- config_params = raw_params;
+ *config_params = *raw_params;
}
/*
@@ -510,7 +510,7 @@ static int vpfe_ccdc_set_params(struct vpfe_ccdc *ccdc, void __user *params)
if (!vpfe_ccdc_validate_param(ccdc, &raw_params)) {
vpfe_ccdc_update_raw_params(ccdc, &raw_params);
- return 0;
+ return 0;
}
return -EINVAL;
@@ -1095,7 +1095,7 @@ static int vpfe_config_ccdc_image_format(struct vpfe_device *vpfe)
* For a given standard, this functions sets up the default
* pix format & crop values in the vpfe device and ccdc. It first
* starts with defaults based values from the standard table.
- * It then checks if sub device support g_mbus_fmt and then override the
+ * It then checks if sub device supports get_fmt and then override the
* values based on that.Sets crop values to match with scan resolution
* starting at 0,0. It calls vpfe_config_ccdc_image_format() set the
* values in ccdc
@@ -1432,8 +1432,8 @@ static int __vpfe_get_format(struct vpfe_device *vpfe,
} else {
ret = v4l2_device_call_until_err(&vpfe->v4l2_dev,
sdinfo->grp_id,
- video, g_mbus_fmt,
- &mbus_fmt);
+ pad, get_fmt,
+ NULL, &fmt);
if (ret && ret != -ENOIOCTLCMD && ret != -ENODEV)
return ret;
v4l2_fill_pix_format(&format->fmt.pix, &mbus_fmt);
@@ -1455,7 +1455,6 @@ static int __vpfe_get_format(struct vpfe_device *vpfe,
static int __vpfe_set_format(struct vpfe_device *vpfe,
struct v4l2_format *format, unsigned int *bpp)
{
- struct v4l2_mbus_framefmt mbus_fmt;
struct vpfe_subdev_info *sdinfo;
struct v4l2_subdev_format fmt;
int ret;
@@ -1472,23 +1471,11 @@ static int __vpfe_set_format(struct vpfe_device *vpfe,
pix_to_mbus(vpfe, &format->fmt.pix, &fmt.format);
ret = v4l2_subdev_call(sdinfo->sd, pad, set_fmt, NULL, &fmt);
- if (ret && ret != -ENOIOCTLCMD && ret != -ENODEV)
+ if (ret)
return ret;
- if (!ret) {
- v4l2_fill_pix_format(&format->fmt.pix, &fmt.format);
- mbus_to_pix(vpfe, &fmt.format, &format->fmt.pix, bpp);
- } else {
- ret = v4l2_device_call_until_err(&vpfe->v4l2_dev,
- sdinfo->grp_id,
- video, s_mbus_fmt,
- &mbus_fmt);
- if (ret && ret != -ENOIOCTLCMD && ret != -ENODEV)
- return ret;
-
- v4l2_fill_pix_format(&format->fmt.pix, &mbus_fmt);
- mbus_to_pix(vpfe, &mbus_fmt, &format->fmt.pix, bpp);
- }
+ v4l2_fill_pix_format(&format->fmt.pix, &fmt.format);
+ mbus_to_pix(vpfe, &fmt.format, &format->fmt.pix, bpp);
format->type = vpfe->fmt.type;
@@ -1675,12 +1662,9 @@ vpfe_get_subdev_input_index(struct vpfe_device *vpfe,
int *subdev_input_index,
int app_input_index)
{
- struct vpfe_config *cfg = vpfe->cfg;
- struct vpfe_subdev_info *sdinfo;
int i, j = 0;
for (i = 0; i < ARRAY_SIZE(vpfe->cfg->asd); i++) {
- sdinfo = &cfg->sub_devs[i];
if (app_input_index < (j + 1)) {
*subdev_index = i;
*subdev_input_index = app_input_index - j;
diff --git a/drivers/media/platform/blackfin/bfin_capture.c b/drivers/media/platform/blackfin/bfin_capture.c
index 6a437f86dcdc..b7e70fb05eb8 100644
--- a/drivers/media/platform/blackfin/bfin_capture.c
+++ b/drivers/media/platform/blackfin/bfin_capture.c
@@ -156,14 +156,18 @@ static struct bcap_buffer *to_bcap_vb(struct vb2_buffer *vb)
static int bcap_init_sensor_formats(struct bcap_device *bcap_dev)
{
- u32 code;
+ struct v4l2_subdev_mbus_code_enum code = {
+ .which = V4L2_SUBDEV_FORMAT_ACTIVE,
+ };
struct bcap_format *sf;
unsigned int num_formats = 0;
int i, j;
- while (!v4l2_subdev_call(bcap_dev->sd, video,
- enum_mbus_fmt, num_formats, &code))
+ while (!v4l2_subdev_call(bcap_dev->sd, pad,
+ enum_mbus_code, NULL, &code)) {
num_formats++;
+ code.index++;
+ }
if (!num_formats)
return -ENXIO;
@@ -172,10 +176,11 @@ static int bcap_init_sensor_formats(struct bcap_device *bcap_dev)
return -ENOMEM;
for (i = 0; i < num_formats; i++) {
- v4l2_subdev_call(bcap_dev->sd, video,
- enum_mbus_fmt, i, &code);
+ code.index = i;
+ v4l2_subdev_call(bcap_dev->sd, pad,
+ enum_mbus_code, NULL, &code);
for (j = 0; j < BCAP_MAX_FMTS; j++)
- if (code == bcap_formats[j].mbus_code)
+ if (code.code == bcap_formats[j].mbus_code)
break;
if (j == BCAP_MAX_FMTS) {
/* we don't allow this sensor working with our bridge */
@@ -597,7 +602,10 @@ static int bcap_try_format(struct bcap_device *bcap,
{
struct bcap_format *sf = bcap->sensor_formats;
struct bcap_format *fmt = NULL;
- struct v4l2_mbus_framefmt mbus_fmt;
+ struct v4l2_subdev_pad_config pad_cfg;
+ struct v4l2_subdev_format format = {
+ .which = V4L2_SUBDEV_FORMAT_TRY,
+ };
int ret, i;
for (i = 0; i < bcap->num_sensor_formats; i++) {
@@ -608,16 +616,16 @@ static int bcap_try_format(struct bcap_device *bcap,
if (i == bcap->num_sensor_formats)
fmt = &sf[0];
- v4l2_fill_mbus_format(&mbus_fmt, pixfmt, fmt->mbus_code);
- ret = v4l2_subdev_call(bcap->sd, video,
- try_mbus_fmt, &mbus_fmt);
+ v4l2_fill_mbus_format(&format.format, pixfmt, fmt->mbus_code);
+ ret = v4l2_subdev_call(bcap->sd, pad, set_fmt, &pad_cfg,
+ &format);
if (ret < 0)
return ret;
- v4l2_fill_pix_format(pixfmt, &mbus_fmt);
+ v4l2_fill_pix_format(pixfmt, &format.format);
if (bcap_fmt) {
for (i = 0; i < bcap->num_sensor_formats; i++) {
fmt = &sf[i];
- if (mbus_fmt.code == fmt->mbus_code)
+ if (format.format.code == fmt->mbus_code)
break;
}
*bcap_fmt = *fmt;
@@ -666,7 +674,9 @@ static int bcap_s_fmt_vid_cap(struct file *file, void *priv,
struct v4l2_format *fmt)
{
struct bcap_device *bcap_dev = video_drvdata(file);
- struct v4l2_mbus_framefmt mbus_fmt;
+ struct v4l2_subdev_format format = {
+ .which = V4L2_SUBDEV_FORMAT_ACTIVE,
+ };
struct bcap_format bcap_fmt;
struct v4l2_pix_format *pixfmt = &fmt->fmt.pix;
int ret;
@@ -679,8 +689,8 @@ static int bcap_s_fmt_vid_cap(struct file *file, void *priv,
if (ret < 0)
return ret;
- v4l2_fill_mbus_format(&mbus_fmt, pixfmt, bcap_fmt.mbus_code);
- ret = v4l2_subdev_call(bcap_dev->sd, video, s_mbus_fmt, &mbus_fmt);
+ v4l2_fill_mbus_format(&format.format, pixfmt, bcap_fmt.mbus_code);
+ ret = v4l2_subdev_call(bcap_dev->sd, pad, set_fmt, NULL, &format);
if (ret < 0)
return ret;
bcap_dev->fmt = *pixfmt;
diff --git a/drivers/media/platform/davinci/vpfe_capture.c b/drivers/media/platform/davinci/vpfe_capture.c
index ccfcf3f528d3..7767e072d623 100644
--- a/drivers/media/platform/davinci/vpfe_capture.c
+++ b/drivers/media/platform/davinci/vpfe_capture.c
@@ -370,7 +370,7 @@ static int vpfe_config_ccdc_image_format(struct vpfe_device *vpfe_dev)
* For a given standard, this functions sets up the default
* pix format & crop values in the vpfe device and ccdc. It first
* starts with defaults based values from the standard table.
- * It then checks if sub device support g_mbus_fmt and then override the
+ * It then checks if sub device supports get_fmt and then override the
* values based on that.Sets crop values to match with scan resolution
* starting at 0,0. It calls vpfe_config_ccdc_image_format() set the
* values in ccdc
@@ -379,7 +379,10 @@ static int vpfe_config_image_format(struct vpfe_device *vpfe_dev,
v4l2_std_id std_id)
{
struct vpfe_subdev_info *sdinfo = vpfe_dev->current_subdev;
- struct v4l2_mbus_framefmt mbus_fmt;
+ struct v4l2_subdev_format fmt = {
+ .which = V4L2_SUBDEV_FORMAT_ACTIVE,
+ };
+ struct v4l2_mbus_framefmt *mbus_fmt = &fmt.format;
struct v4l2_pix_format *pix = &vpfe_dev->fmt.fmt.pix;
int i, ret = 0;
@@ -413,26 +416,26 @@ static int vpfe_config_image_format(struct vpfe_device *vpfe_dev,
pix->field = V4L2_FIELD_INTERLACED;
/* assume V4L2_PIX_FMT_UYVY as default */
pix->pixelformat = V4L2_PIX_FMT_UYVY;
- v4l2_fill_mbus_format(&mbus_fmt, pix,
+ v4l2_fill_mbus_format(mbus_fmt, pix,
MEDIA_BUS_FMT_YUYV10_2X10);
} else {
pix->field = V4L2_FIELD_NONE;
/* assume V4L2_PIX_FMT_SBGGR8 */
pix->pixelformat = V4L2_PIX_FMT_SBGGR8;
- v4l2_fill_mbus_format(&mbus_fmt, pix,
+ v4l2_fill_mbus_format(mbus_fmt, pix,
MEDIA_BUS_FMT_SBGGR8_1X8);
}
- /* if sub device supports g_mbus_fmt, override the defaults */
+ /* if sub device supports get_fmt, override the defaults */
ret = v4l2_device_call_until_err(&vpfe_dev->v4l2_dev,
- sdinfo->grp_id, video, g_mbus_fmt, &mbus_fmt);
+ sdinfo->grp_id, pad, get_fmt, NULL, &fmt);
if (ret && ret != -ENOIOCTLCMD) {
v4l2_err(&vpfe_dev->v4l2_dev,
- "error in getting g_mbus_fmt from sub device\n");
+ "error in getting get_fmt from sub device\n");
return ret;
}
- v4l2_fill_pix_format(pix, &mbus_fmt);
+ v4l2_fill_pix_format(pix, mbus_fmt);
pix->bytesperline = pix->width * 2;
pix->sizeimage = pix->bytesperline * pix->height;
diff --git a/drivers/media/platform/m2m-deinterlace.c b/drivers/media/platform/m2m-deinterlace.c
index 92d954973ccf..c07f367aa436 100644
--- a/drivers/media/platform/m2m-deinterlace.c
+++ b/drivers/media/platform/m2m-deinterlace.c
@@ -1060,7 +1060,6 @@ static int deinterlace_probe(struct platform_device *pdev)
return 0;
- v4l2_m2m_release(pcdev->m2m_dev);
err_m2m:
video_unregister_device(&pcdev->vfd);
err_ctx:
diff --git a/drivers/media/platform/marvell-ccic/cafe-driver.c b/drivers/media/platform/marvell-ccic/cafe-driver.c
index 562845361246..77890bd0deab 100644
--- a/drivers/media/platform/marvell-ccic/cafe-driver.c
+++ b/drivers/media/platform/marvell-ccic/cafe-driver.c
@@ -339,17 +339,21 @@ static int cafe_smbus_setup(struct cafe_camera *cam)
adap = kzalloc(sizeof(*adap), GFP_KERNEL);
if (adap == NULL)
return -ENOMEM;
- cam->mcam.i2c_adapter = adap;
- cafe_smbus_enable_irq(cam);
adap->owner = THIS_MODULE;
adap->algo = &cafe_smbus_algo;
strcpy(adap->name, "cafe_ccic");
adap->dev.parent = &cam->pdev->dev;
i2c_set_adapdata(adap, cam);
ret = i2c_add_adapter(adap);
- if (ret)
+ if (ret) {
printk(KERN_ERR "Unable to register cafe i2c adapter\n");
- return ret;
+ kfree(adap);
+ return ret;
+ }
+
+ cam->mcam.i2c_adapter = adap;
+ cafe_smbus_enable_irq(cam);
+ return 0;
}
static void cafe_smbus_shutdown(struct cafe_camera *cam)
@@ -476,6 +480,7 @@ static int cafe_pci_probe(struct pci_dev *pdev,
mcam->plat_power_up = cafe_ctlr_power_up;
mcam->plat_power_down = cafe_ctlr_power_down;
mcam->dev = &pdev->dev;
+ snprintf(mcam->bus_info, sizeof(mcam->bus_info), "PCI:%s", pci_name(pdev));
/*
* Set the clock speed for the XO 1; I don't believe this
* driver has ever run anywhere else.
diff --git a/drivers/media/platform/marvell-ccic/mcam-core.c b/drivers/media/platform/marvell-ccic/mcam-core.c
index 110fd70c7326..ce5b0eaf0c20 100644
--- a/drivers/media/platform/marvell-ccic/mcam-core.c
+++ b/drivers/media/platform/marvell-ccic/mcam-core.c
@@ -24,6 +24,7 @@
#include <media/v4l2-device.h>
#include <media/v4l2-ioctl.h>
#include <media/v4l2-ctrls.h>
+#include <media/v4l2-event.h>
#include <media/ov7670.h>
#include <media/videobuf2-vmalloc.h>
#include <media/videobuf2-dma-contig.h>
@@ -123,24 +124,17 @@ static struct mcam_format_struct {
.planar = false,
},
{
- .desc = "YUV 4:2:2 PLANAR",
- .pixelformat = V4L2_PIX_FMT_YUV422P,
- .mbus_code = MEDIA_BUS_FMT_YUYV8_2X8,
- .bpp = 2,
- .planar = true,
- },
- {
.desc = "YUV 4:2:0 PLANAR",
.pixelformat = V4L2_PIX_FMT_YUV420,
.mbus_code = MEDIA_BUS_FMT_YUYV8_2X8,
- .bpp = 2,
+ .bpp = 1,
.planar = true,
},
{
.desc = "YVU 4:2:0 PLANAR",
.pixelformat = V4L2_PIX_FMT_YVU420,
.mbus_code = MEDIA_BUS_FMT_YUYV8_2X8,
- .bpp = 2,
+ .bpp = 1,
.planar = true,
},
{
@@ -188,6 +182,7 @@ static const struct v4l2_pix_format mcam_def_pix_format = {
.field = V4L2_FIELD_NONE,
.bytesperline = VGA_WIDTH*2,
.sizeimage = VGA_WIDTH*VGA_HEIGHT*2,
+ .colorspace = V4L2_COLORSPACE_SRGB,
};
static const u32 mcam_def_mbus_code = MEDIA_BUS_FMT_YUYV8_2X8;
@@ -204,12 +199,6 @@ struct mcam_dma_desc {
u32 segment_len;
};
-struct yuv_pointer_t {
- dma_addr_t y;
- dma_addr_t u;
- dma_addr_t v;
-};
-
/*
* Our buffer type for working with videobuf2. Note that the vb2
* developers have decreed that struct vb2_buffer must be at the
@@ -221,7 +210,6 @@ struct mcam_vb_buffer {
struct mcam_dma_desc *dma_desc; /* Descriptor virtual address */
dma_addr_t dma_desc_pa; /* Descriptor physical address */
int dma_desc_nent; /* Number of mapped descriptors */
- struct yuv_pointer_t yuv_p;
};
static inline struct mcam_vb_buffer *vb_to_mvb(struct vb2_buffer *vb)
@@ -237,6 +225,8 @@ static void mcam_buffer_done(struct mcam_camera *cam, int frame,
{
vbuf->v4l2_buf.bytesused = cam->pix_format.sizeimage;
vbuf->v4l2_buf.sequence = cam->buf_seq[frame];
+ vbuf->v4l2_buf.field = V4L2_FIELD_NONE;
+ v4l2_get_timestamp(&vbuf->v4l2_buf.timestamp);
vb2_set_plane_payload(vbuf, 0, cam->pix_format.sizeimage);
vb2_buffer_done(vbuf, VB2_BUF_STATE_DONE);
}
@@ -337,6 +327,43 @@ static void mcam_disable_mipi(struct mcam_camera *mcam)
mcam->mipi_enabled = false;
}
+static bool mcam_fmt_is_planar(__u32 pfmt)
+{
+ struct mcam_format_struct *f;
+
+ f = mcam_find_format(pfmt);
+ return f->planar;
+}
+
+static void mcam_write_yuv_bases(struct mcam_camera *cam,
+ unsigned frame, dma_addr_t base)
+{
+ struct v4l2_pix_format *fmt = &cam->pix_format;
+ u32 pixel_count = fmt->width * fmt->height;
+ dma_addr_t y, u = 0, v = 0;
+
+ y = base;
+
+ switch (fmt->pixelformat) {
+ case V4L2_PIX_FMT_YUV420:
+ u = y + pixel_count;
+ v = u + pixel_count / 4;
+ break;
+ case V4L2_PIX_FMT_YVU420:
+ v = y + pixel_count;
+ u = v + pixel_count / 4;
+ break;
+ default:
+ break;
+ }
+
+ mcam_reg_write(cam, REG_Y0BAR + frame * 4, y);
+ if (mcam_fmt_is_planar(fmt->pixelformat)) {
+ mcam_reg_write(cam, REG_U0BAR + frame * 4, u);
+ mcam_reg_write(cam, REG_V0BAR + frame * 4, v);
+ }
+}
+
/* ------------------------------------------------------------------- */
#ifdef MCAM_MODE_VMALLOC
@@ -407,15 +434,14 @@ static void mcam_free_dma_bufs(struct mcam_camera *cam)
static void mcam_ctlr_dma_vmalloc(struct mcam_camera *cam)
{
/*
- * Store the first two Y buffers (we aren't supporting
- * planar formats for now, so no UV bufs). Then either
+ * Store the first two YUV buffers. Then either
* set the third if it exists, or tell the controller
* to just use two.
*/
- mcam_reg_write(cam, REG_Y0BAR, cam->dma_handles[0]);
- mcam_reg_write(cam, REG_Y1BAR, cam->dma_handles[1]);
+ mcam_write_yuv_bases(cam, 0, cam->dma_handles[0]);
+ mcam_write_yuv_bases(cam, 1, cam->dma_handles[1]);
if (cam->nbufs > 2) {
- mcam_reg_write(cam, REG_Y2BAR, cam->dma_handles[2]);
+ mcam_write_yuv_bases(cam, 2, cam->dma_handles[2]);
mcam_reg_clear_bit(cam, REG_CTRL1, C1_TWOBUFS);
} else
mcam_reg_set_bit(cam, REG_CTRL1, C1_TWOBUFS);
@@ -510,14 +536,6 @@ static inline int mcam_check_dma_buffers(struct mcam_camera *cam)
* DMA-contiguous code.
*/
-static bool mcam_fmt_is_planar(__u32 pfmt)
-{
- struct mcam_format_struct *f;
-
- f = mcam_find_format(pfmt);
- return f->planar;
-}
-
/*
* Set up a contiguous buffer for the given frame. Here also is where
* the underrun strategy is set: if there is no buffer available, reuse
@@ -529,9 +547,7 @@ static bool mcam_fmt_is_planar(__u32 pfmt)
static void mcam_set_contig_buffer(struct mcam_camera *cam, int frame)
{
struct mcam_vb_buffer *buf;
- struct v4l2_pix_format *fmt = &cam->pix_format;
dma_addr_t dma_handle;
- u32 pixel_count = fmt->width * fmt->height;
struct vb2_buffer *vb;
/*
@@ -555,32 +571,7 @@ static void mcam_set_contig_buffer(struct mcam_camera *cam, int frame)
vb = &buf->vb_buf;
dma_handle = vb2_dma_contig_plane_dma_addr(vb, 0);
- buf->yuv_p.y = dma_handle;
-
- switch (cam->pix_format.pixelformat) {
- case V4L2_PIX_FMT_YUV422P:
- buf->yuv_p.u = buf->yuv_p.y + pixel_count;
- buf->yuv_p.v = buf->yuv_p.u + pixel_count / 2;
- break;
- case V4L2_PIX_FMT_YUV420:
- buf->yuv_p.u = buf->yuv_p.y + pixel_count;
- buf->yuv_p.v = buf->yuv_p.u + pixel_count / 4;
- break;
- case V4L2_PIX_FMT_YVU420:
- buf->yuv_p.v = buf->yuv_p.y + pixel_count;
- buf->yuv_p.u = buf->yuv_p.v + pixel_count / 4;
- break;
- default:
- break;
- }
-
- mcam_reg_write(cam, frame == 0 ? REG_Y0BAR : REG_Y1BAR, buf->yuv_p.y);
- if (mcam_fmt_is_planar(fmt->pixelformat)) {
- mcam_reg_write(cam, frame == 0 ?
- REG_U0BAR : REG_U1BAR, buf->yuv_p.u);
- mcam_reg_write(cam, frame == 0 ?
- REG_V0BAR : REG_V1BAR, buf->yuv_p.v);
- }
+ mcam_write_yuv_bases(cam, frame, dma_handle);
}
/*
@@ -603,6 +594,7 @@ static void mcam_dma_contig_done(struct mcam_camera *cam, int frame)
if (!test_bit(CF_SINGLE_BUFFER, &cam->flags)) {
cam->frame_state.delivered++;
+ cam->vb_bufs[frame] = NULL;
mcam_buffer_done(cam, frame, &buf->vb_buf);
}
mcam_set_contig_buffer(cam, frame);
@@ -752,12 +744,6 @@ static void mcam_ctlr_image(struct mcam_camera *cam)
widthy = fmt->width * 2;
widthuv = 0;
break;
- case V4L2_PIX_FMT_JPEG:
- imgsz_h = (fmt->sizeimage / fmt->bytesperline) << IMGSZ_V_SHIFT;
- widthy = fmt->bytesperline;
- widthuv = 0;
- break;
- case V4L2_PIX_FMT_YUV422P:
case V4L2_PIX_FMT_YUV420:
case V4L2_PIX_FMT_YVU420:
widthy = fmt->width;
@@ -766,6 +752,7 @@ static void mcam_ctlr_image(struct mcam_camera *cam)
default:
widthy = fmt->bytesperline;
widthuv = 0;
+ break;
}
mcam_reg_write_mask(cam, REG_IMGPITCH, widthuv << 16 | widthy,
@@ -777,10 +764,6 @@ static void mcam_ctlr_image(struct mcam_camera *cam)
* Tell the controller about the image format we are using.
*/
switch (fmt->pixelformat) {
- case V4L2_PIX_FMT_YUV422P:
- mcam_reg_write_mask(cam, REG_CTRL0,
- C0_DF_YUV | C0_YUV_PLANAR | C0_YUVE_YVYU, C0_DF_MASK);
- break;
case V4L2_PIX_FMT_YUV420:
case V4L2_PIX_FMT_YVU420:
mcam_reg_write_mask(cam, REG_CTRL0,
@@ -794,10 +777,6 @@ static void mcam_ctlr_image(struct mcam_camera *cam)
mcam_reg_write_mask(cam, REG_CTRL0,
C0_DF_YUV | C0_YUV_PACKED | C0_YUVE_SWAP24, C0_DF_MASK);
break;
- case V4L2_PIX_FMT_JPEG:
- mcam_reg_write_mask(cam, REG_CTRL0,
- C0_DF_YUV | C0_YUV_PACKED | C0_YUVE_YUYV, C0_DF_MASK);
- break;
case V4L2_PIX_FMT_RGB444:
mcam_reg_write_mask(cam, REG_CTRL0,
C0_DF_RGB | C0_RGBF_444 | C0_RGB4_XRGB, C0_DF_MASK);
@@ -807,6 +786,10 @@ static void mcam_ctlr_image(struct mcam_camera *cam)
mcam_reg_write_mask(cam, REG_CTRL0,
C0_DF_RGB | C0_RGBF_565 | C0_RGB5_BGGR, C0_DF_MASK);
break;
+ case V4L2_PIX_FMT_SBGGR8:
+ mcam_reg_write_mask(cam, REG_CTRL0,
+ C0_DF_RGB | C0_RGB5_GRBG, C0_DF_MASK);
+ break;
default:
cam_err(cam, "camera: unknown format: %#x\n", fmt->pixelformat);
break;
@@ -969,7 +952,6 @@ static int mcam_cam_init(struct mcam_camera *cam)
{
int ret;
- mutex_lock(&cam->s_mutex);
if (cam->state != S_NOTREADY)
cam_warn(cam, "Cam init with device in funky state %d",
cam->state);
@@ -977,7 +959,6 @@ static int mcam_cam_init(struct mcam_camera *cam)
/* Get/set parameters? */
cam->state = S_IDLE;
mcam_ctlr_power_down(cam);
- mutex_unlock(&cam->s_mutex);
return ret;
}
@@ -998,13 +979,15 @@ static int mcam_cam_set_flip(struct mcam_camera *cam)
static int mcam_cam_configure(struct mcam_camera *cam)
{
- struct v4l2_mbus_framefmt mbus_fmt;
+ struct v4l2_subdev_format format = {
+ .which = V4L2_SUBDEV_FORMAT_ACTIVE,
+ };
int ret;
- v4l2_fill_mbus_format(&mbus_fmt, &cam->pix_format, cam->mbus_code);
+ v4l2_fill_mbus_format(&format.format, &cam->pix_format, cam->mbus_code);
ret = sensor_call(cam, core, init, 0);
if (ret == 0)
- ret = sensor_call(cam, video, s_mbus_fmt, &mbus_fmt);
+ ret = sensor_call(cam, pad, set_fmt, NULL, &format);
/*
* OV7670 does weird things if flip is set *before* format...
*/
@@ -1073,7 +1056,9 @@ static int mcam_vb_queue_setup(struct vb2_queue *vq,
struct mcam_camera *cam = vb2_get_drv_priv(vq);
int minbufs = (cam->buffer_mode == B_DMA_contig) ? 3 : 2;
- sizes[0] = cam->pix_format.sizeimage;
+ if (fmt && fmt->fmt.pix.sizeimage < cam->pix_format.sizeimage)
+ return -EINVAL;
+ sizes[0] = fmt ? fmt->fmt.pix.sizeimage : cam->pix_format.sizeimage;
*num_planes = 1; /* Someday we have to support planar formats... */
if (*nbufs < minbufs)
*nbufs = minbufs;
@@ -1102,6 +1087,30 @@ static void mcam_vb_buf_queue(struct vb2_buffer *vb)
mcam_read_setup(cam);
}
+static void mcam_vb_requeue_bufs(struct vb2_queue *vq,
+ enum vb2_buffer_state state)
+{
+ struct mcam_camera *cam = vb2_get_drv_priv(vq);
+ struct mcam_vb_buffer *buf, *node;
+ unsigned long flags;
+ unsigned i;
+
+ spin_lock_irqsave(&cam->dev_lock, flags);
+ list_for_each_entry_safe(buf, node, &cam->buffers, queue) {
+ vb2_buffer_done(&buf->vb_buf, state);
+ list_del(&buf->queue);
+ }
+ for (i = 0; i < MAX_DMA_BUFS; i++) {
+ buf = cam->vb_bufs[i];
+
+ if (buf) {
+ vb2_buffer_done(&buf->vb_buf, state);
+ cam->vb_bufs[i] = NULL;
+ }
+ }
+ spin_unlock_irqrestore(&cam->dev_lock, flags);
+}
+
/*
* These need to be called with the mutex held from vb2
*/
@@ -1109,11 +1118,15 @@ static int mcam_vb_start_streaming(struct vb2_queue *vq, unsigned int count)
{
struct mcam_camera *cam = vb2_get_drv_priv(vq);
unsigned int frame;
+ int ret;
if (cam->state != S_IDLE) {
- INIT_LIST_HEAD(&cam->buffers);
+ mcam_vb_requeue_bufs(vq, VB2_BUF_STATE_QUEUED);
return -EINVAL;
}
+ cam->frame_state.frames = 0;
+ cam->frame_state.singles = 0;
+ cam->frame_state.delivered = 0;
cam->sequence = 0;
/*
* Videobuf2 sneakily hoards all the buffers and won't
@@ -1134,14 +1147,19 @@ static int mcam_vb_start_streaming(struct vb2_queue *vq, unsigned int count)
for (frame = 0; frame < cam->nbufs; frame++)
clear_bit(CF_FRAME_SOF0 + frame, &cam->flags);
- return mcam_read_setup(cam);
+ ret = mcam_read_setup(cam);
+ if (ret)
+ mcam_vb_requeue_bufs(vq, VB2_BUF_STATE_QUEUED);
+ return ret;
}
static void mcam_vb_stop_streaming(struct vb2_queue *vq)
{
struct mcam_camera *cam = vb2_get_drv_priv(vq);
- unsigned long flags;
+ cam_dbg(cam, "stop_streaming: %d frames, %d singles, %d delivered\n",
+ cam->frame_state.frames, cam->frame_state.singles,
+ cam->frame_state.delivered);
if (cam->state == S_BUFWAIT) {
/* They never gave us buffers */
cam->state = S_IDLE;
@@ -1160,9 +1178,7 @@ static void mcam_vb_stop_streaming(struct vb2_queue *vq)
* VB2 reclaims the buffers, so we need to forget
* about them.
*/
- spin_lock_irqsave(&cam->dev_lock, flags);
- INIT_LIST_HEAD(&cam->buffers);
- spin_unlock_irqrestore(&cam->dev_lock, flags);
+ mcam_vb_requeue_bufs(vq, VB2_BUF_STATE_ERROR);
}
@@ -1246,14 +1262,15 @@ static int mcam_setup_vb2(struct mcam_camera *cam)
vq->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
vq->drv_priv = cam;
vq->lock = &cam->s_mutex;
+ vq->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC;
+ vq->io_modes = VB2_MMAP | VB2_USERPTR | VB2_DMABUF | VB2_READ;
+ vq->buf_struct_size = sizeof(struct mcam_vb_buffer);
INIT_LIST_HEAD(&cam->buffers);
switch (cam->buffer_mode) {
case B_DMA_contig:
#ifdef MCAM_MODE_DMA_CONTIG
vq->ops = &mcam_vb2_ops;
vq->mem_ops = &vb2_dma_contig_memops;
- vq->buf_struct_size = sizeof(struct mcam_vb_buffer);
- vq->io_modes = VB2_MMAP | VB2_USERPTR;
cam->dma_setup = mcam_ctlr_dma_contig;
cam->frame_complete = mcam_dma_contig_done;
cam->vb_alloc_ctx = vb2_dma_contig_init_ctx(cam->dev);
@@ -1265,8 +1282,6 @@ static int mcam_setup_vb2(struct mcam_camera *cam)
#ifdef MCAM_MODE_DMA_SG
vq->ops = &mcam_vb2_sg_ops;
vq->mem_ops = &vb2_dma_sg_memops;
- vq->buf_struct_size = sizeof(struct mcam_vb_buffer);
- vq->io_modes = VB2_MMAP | VB2_USERPTR;
cam->dma_setup = mcam_ctlr_dma_sg;
cam->frame_complete = mcam_dma_sg_done;
cam->vb_alloc_ctx_sg = vb2_dma_sg_init_ctx(cam->dev);
@@ -1280,8 +1295,6 @@ static int mcam_setup_vb2(struct mcam_camera *cam)
(unsigned long) cam);
vq->ops = &mcam_vb2_ops;
vq->mem_ops = &vb2_vmalloc_memops;
- vq->buf_struct_size = sizeof(struct mcam_vb_buffer);
- vq->io_modes = VB2_MMAP;
cam->dma_setup = mcam_ctlr_dma_vmalloc;
cam->frame_complete = mcam_vmalloc_done;
#endif
@@ -1292,7 +1305,6 @@ static int mcam_setup_vb2(struct mcam_camera *cam)
static void mcam_cleanup_vb2(struct mcam_camera *cam)
{
- vb2_queue_release(&cam->vb_queue);
#ifdef MCAM_MODE_DMA_CONTIG
if (cam->buffer_mode == B_DMA_contig)
vb2_dma_contig_cleanup_ctx(cam->vb_alloc_ctx);
@@ -1309,86 +1321,14 @@ static void mcam_cleanup_vb2(struct mcam_camera *cam)
* The long list of V4L2 ioctl() operations.
*/
-static int mcam_vidioc_streamon(struct file *filp, void *priv,
- enum v4l2_buf_type type)
-{
- struct mcam_camera *cam = filp->private_data;
- int ret;
-
- mutex_lock(&cam->s_mutex);
- ret = vb2_streamon(&cam->vb_queue, type);
- mutex_unlock(&cam->s_mutex);
- return ret;
-}
-
-
-static int mcam_vidioc_streamoff(struct file *filp, void *priv,
- enum v4l2_buf_type type)
-{
- struct mcam_camera *cam = filp->private_data;
- int ret;
-
- mutex_lock(&cam->s_mutex);
- ret = vb2_streamoff(&cam->vb_queue, type);
- mutex_unlock(&cam->s_mutex);
- return ret;
-}
-
-
-static int mcam_vidioc_reqbufs(struct file *filp, void *priv,
- struct v4l2_requestbuffers *req)
-{
- struct mcam_camera *cam = filp->private_data;
- int ret;
-
- mutex_lock(&cam->s_mutex);
- ret = vb2_reqbufs(&cam->vb_queue, req);
- mutex_unlock(&cam->s_mutex);
- return ret;
-}
-
-
-static int mcam_vidioc_querybuf(struct file *filp, void *priv,
- struct v4l2_buffer *buf)
-{
- struct mcam_camera *cam = filp->private_data;
- int ret;
-
- mutex_lock(&cam->s_mutex);
- ret = vb2_querybuf(&cam->vb_queue, buf);
- mutex_unlock(&cam->s_mutex);
- return ret;
-}
-
-static int mcam_vidioc_qbuf(struct file *filp, void *priv,
- struct v4l2_buffer *buf)
-{
- struct mcam_camera *cam = filp->private_data;
- int ret;
-
- mutex_lock(&cam->s_mutex);
- ret = vb2_qbuf(&cam->vb_queue, buf);
- mutex_unlock(&cam->s_mutex);
- return ret;
-}
-
-static int mcam_vidioc_dqbuf(struct file *filp, void *priv,
- struct v4l2_buffer *buf)
-{
- struct mcam_camera *cam = filp->private_data;
- int ret;
-
- mutex_lock(&cam->s_mutex);
- ret = vb2_dqbuf(&cam->vb_queue, buf, filp->f_flags & O_NONBLOCK);
- mutex_unlock(&cam->s_mutex);
- return ret;
-}
-
static int mcam_vidioc_querycap(struct file *file, void *priv,
struct v4l2_capability *cap)
{
+ struct mcam_camera *cam = video_drvdata(file);
+
strcpy(cap->driver, "marvell_ccic");
strcpy(cap->card, "marvell_ccic");
+ strlcpy(cap->bus_info, cam->bus_info, sizeof(cap->bus_info));
cap->device_caps = V4L2_CAP_VIDEO_CAPTURE |
V4L2_CAP_READWRITE | V4L2_CAP_STREAMING;
cap->capabilities = cap->device_caps | V4L2_CAP_DEVICE_CAPS;
@@ -1410,36 +1350,38 @@ static int mcam_vidioc_enum_fmt_vid_cap(struct file *filp,
static int mcam_vidioc_try_fmt_vid_cap(struct file *filp, void *priv,
struct v4l2_format *fmt)
{
- struct mcam_camera *cam = priv;
+ struct mcam_camera *cam = video_drvdata(filp);
struct mcam_format_struct *f;
struct v4l2_pix_format *pix = &fmt->fmt.pix;
- struct v4l2_mbus_framefmt mbus_fmt;
+ struct v4l2_subdev_pad_config pad_cfg;
+ struct v4l2_subdev_format format = {
+ .which = V4L2_SUBDEV_FORMAT_TRY,
+ };
int ret;
f = mcam_find_format(pix->pixelformat);
pix->pixelformat = f->pixelformat;
- v4l2_fill_mbus_format(&mbus_fmt, pix, f->mbus_code);
- mutex_lock(&cam->s_mutex);
- ret = sensor_call(cam, video, try_mbus_fmt, &mbus_fmt);
- mutex_unlock(&cam->s_mutex);
- v4l2_fill_pix_format(pix, &mbus_fmt);
+ v4l2_fill_mbus_format(&format.format, pix, f->mbus_code);
+ ret = sensor_call(cam, pad, set_fmt, &pad_cfg, &format);
+ v4l2_fill_pix_format(pix, &format.format);
+ pix->bytesperline = pix->width * f->bpp;
switch (f->pixelformat) {
case V4L2_PIX_FMT_YUV420:
case V4L2_PIX_FMT_YVU420:
- pix->bytesperline = pix->width * 3 / 2;
+ pix->sizeimage = pix->height * pix->bytesperline * 3 / 2;
break;
default:
- pix->bytesperline = pix->width * f->bpp;
+ pix->sizeimage = pix->height * pix->bytesperline;
break;
}
- pix->sizeimage = pix->height * pix->bytesperline;
+ pix->colorspace = V4L2_COLORSPACE_SRGB;
return ret;
}
static int mcam_vidioc_s_fmt_vid_cap(struct file *filp, void *priv,
struct v4l2_format *fmt)
{
- struct mcam_camera *cam = priv;
+ struct mcam_camera *cam = video_drvdata(filp);
struct mcam_format_struct *f;
int ret;
@@ -1447,7 +1389,7 @@ static int mcam_vidioc_s_fmt_vid_cap(struct file *filp, void *priv,
* Can't do anything if the device is not idle
* Also can't if there are streaming buffers in place.
*/
- if (cam->state != S_IDLE || cam->vb_queue.num_buffers > 0)
+ if (cam->state != S_IDLE || vb2_is_busy(&cam->vb_queue))
return -EBUSY;
f = mcam_find_format(fmt->fmt.pix.pixelformat);
@@ -1462,7 +1404,6 @@ static int mcam_vidioc_s_fmt_vid_cap(struct file *filp, void *priv,
* Now we start to change things for real, so let's do it
* under lock.
*/
- mutex_lock(&cam->s_mutex);
cam->pix_format = fmt->fmt.pix;
cam->mbus_code = f->mbus_code;
@@ -1476,7 +1417,6 @@ static int mcam_vidioc_s_fmt_vid_cap(struct file *filp, void *priv,
}
mcam_set_config_needed(cam, 1);
out:
- mutex_unlock(&cam->s_mutex);
return ret;
}
@@ -1488,7 +1428,7 @@ out:
static int mcam_vidioc_g_fmt_vid_cap(struct file *filp, void *priv,
struct v4l2_format *f)
{
- struct mcam_camera *cam = priv;
+ struct mcam_camera *cam = video_drvdata(filp);
f->fmt.pix = cam->pix_format;
return 0;
@@ -1504,7 +1444,6 @@ static int mcam_vidioc_enum_input(struct file *filp, void *priv,
return -EINVAL;
input->type = V4L2_INPUT_TYPE_CAMERA;
- input->std = V4L2_STD_ALL; /* Not sure what should go here */
strcpy(input->name, "Camera");
return 0;
}
@@ -1522,18 +1461,6 @@ static int mcam_vidioc_s_input(struct file *filp, void *priv, unsigned int i)
return 0;
}
-/* from vivi.c */
-static int mcam_vidioc_s_std(struct file *filp, void *priv, v4l2_std_id a)
-{
- return 0;
-}
-
-static int mcam_vidioc_g_std(struct file *filp, void *priv, v4l2_std_id *a)
-{
- *a = V4L2_STD_NTSC_M;
- return 0;
-}
-
/*
* G/S_PARM. Most of this is done by the sensor, but we are
* the level which controls the number of read buffers.
@@ -1541,12 +1468,10 @@ static int mcam_vidioc_g_std(struct file *filp, void *priv, v4l2_std_id *a)
static int mcam_vidioc_g_parm(struct file *filp, void *priv,
struct v4l2_streamparm *parms)
{
- struct mcam_camera *cam = priv;
+ struct mcam_camera *cam = video_drvdata(filp);
int ret;
- mutex_lock(&cam->s_mutex);
ret = sensor_call(cam, video, g_parm, parms);
- mutex_unlock(&cam->s_mutex);
parms->parm.capture.readbuffers = n_dma_bufs;
return ret;
}
@@ -1554,12 +1479,10 @@ static int mcam_vidioc_g_parm(struct file *filp, void *priv,
static int mcam_vidioc_s_parm(struct file *filp, void *priv,
struct v4l2_streamparm *parms)
{
- struct mcam_camera *cam = priv;
+ struct mcam_camera *cam = video_drvdata(filp);
int ret;
- mutex_lock(&cam->s_mutex);
ret = sensor_call(cam, video, s_parm, parms);
- mutex_unlock(&cam->s_mutex);
parms->parm.capture.readbuffers = n_dma_bufs;
return ret;
}
@@ -1567,7 +1490,7 @@ static int mcam_vidioc_s_parm(struct file *filp, void *priv,
static int mcam_vidioc_enum_framesizes(struct file *filp, void *priv,
struct v4l2_frmsizeenum *sizes)
{
- struct mcam_camera *cam = priv;
+ struct mcam_camera *cam = video_drvdata(filp);
struct mcam_format_struct *f;
struct v4l2_subdev_frame_size_enum fse = {
.index = sizes->index,
@@ -1579,9 +1502,7 @@ static int mcam_vidioc_enum_framesizes(struct file *filp, void *priv,
if (f->pixelformat != sizes->pixel_format)
return -EINVAL;
fse.code = f->mbus_code;
- mutex_lock(&cam->s_mutex);
ret = sensor_call(cam, pad, enum_frame_size, NULL, &fse);
- mutex_unlock(&cam->s_mutex);
if (ret)
return ret;
if (fse.min_width == fse.max_width &&
@@ -1604,7 +1525,7 @@ static int mcam_vidioc_enum_framesizes(struct file *filp, void *priv,
static int mcam_vidioc_enum_frameintervals(struct file *filp, void *priv,
struct v4l2_frmivalenum *interval)
{
- struct mcam_camera *cam = priv;
+ struct mcam_camera *cam = video_drvdata(filp);
struct mcam_format_struct *f;
struct v4l2_subdev_frame_interval_enum fie = {
.index = interval->index,
@@ -1618,9 +1539,7 @@ static int mcam_vidioc_enum_frameintervals(struct file *filp, void *priv,
if (f->pixelformat != interval->pixel_format)
return -EINVAL;
fie.code = f->mbus_code;
- mutex_lock(&cam->s_mutex);
ret = sensor_call(cam, pad, enum_frame_interval, NULL, &fie);
- mutex_unlock(&cam->s_mutex);
if (ret)
return ret;
interval->type = V4L2_FRMIVAL_TYPE_DISCRETE;
@@ -1632,7 +1551,7 @@ static int mcam_vidioc_enum_frameintervals(struct file *filp, void *priv,
static int mcam_vidioc_g_register(struct file *file, void *priv,
struct v4l2_dbg_register *reg)
{
- struct mcam_camera *cam = priv;
+ struct mcam_camera *cam = video_drvdata(file);
if (reg->reg > cam->regs_size - 4)
return -EINVAL;
@@ -1644,7 +1563,7 @@ static int mcam_vidioc_g_register(struct file *file, void *priv,
static int mcam_vidioc_s_register(struct file *file, void *priv,
const struct v4l2_dbg_register *reg)
{
- struct mcam_camera *cam = priv;
+ struct mcam_camera *cam = video_drvdata(file);
if (reg->reg > cam->regs_size - 4)
return -EINVAL;
@@ -1662,18 +1581,20 @@ static const struct v4l2_ioctl_ops mcam_v4l_ioctl_ops = {
.vidioc_enum_input = mcam_vidioc_enum_input,
.vidioc_g_input = mcam_vidioc_g_input,
.vidioc_s_input = mcam_vidioc_s_input,
- .vidioc_s_std = mcam_vidioc_s_std,
- .vidioc_g_std = mcam_vidioc_g_std,
- .vidioc_reqbufs = mcam_vidioc_reqbufs,
- .vidioc_querybuf = mcam_vidioc_querybuf,
- .vidioc_qbuf = mcam_vidioc_qbuf,
- .vidioc_dqbuf = mcam_vidioc_dqbuf,
- .vidioc_streamon = mcam_vidioc_streamon,
- .vidioc_streamoff = mcam_vidioc_streamoff,
+ .vidioc_reqbufs = vb2_ioctl_reqbufs,
+ .vidioc_create_bufs = vb2_ioctl_create_bufs,
+ .vidioc_querybuf = vb2_ioctl_querybuf,
+ .vidioc_qbuf = vb2_ioctl_qbuf,
+ .vidioc_dqbuf = vb2_ioctl_dqbuf,
+ .vidioc_expbuf = vb2_ioctl_expbuf,
+ .vidioc_streamon = vb2_ioctl_streamon,
+ .vidioc_streamoff = vb2_ioctl_streamoff,
.vidioc_g_parm = mcam_vidioc_g_parm,
.vidioc_s_parm = mcam_vidioc_s_parm,
.vidioc_enum_framesizes = mcam_vidioc_enum_framesizes,
.vidioc_enum_frameintervals = mcam_vidioc_enum_frameintervals,
+ .vidioc_subscribe_event = v4l2_ctrl_subscribe_event,
+ .vidioc_unsubscribe_event = v4l2_event_unsubscribe,
#ifdef CONFIG_VIDEO_ADV_DEBUG
.vidioc_g_register = mcam_vidioc_g_register,
.vidioc_s_register = mcam_vidioc_s_register,
@@ -1687,43 +1608,36 @@ static const struct v4l2_ioctl_ops mcam_v4l_ioctl_ops = {
static int mcam_v4l_open(struct file *filp)
{
struct mcam_camera *cam = video_drvdata(filp);
- int ret = 0;
-
- filp->private_data = cam;
+ int ret;
- cam->frame_state.frames = 0;
- cam->frame_state.singles = 0;
- cam->frame_state.delivered = 0;
mutex_lock(&cam->s_mutex);
- if (cam->users == 0) {
- ret = mcam_setup_vb2(cam);
- if (ret)
- goto out;
+ ret = v4l2_fh_open(filp);
+ if (ret)
+ goto out;
+ if (v4l2_fh_is_singular_file(filp)) {
ret = mcam_ctlr_power_up(cam);
if (ret)
goto out;
__mcam_cam_reset(cam);
mcam_set_config_needed(cam, 1);
}
- (cam->users)++;
out:
mutex_unlock(&cam->s_mutex);
+ if (ret)
+ v4l2_fh_release(filp);
return ret;
}
static int mcam_v4l_release(struct file *filp)
{
- struct mcam_camera *cam = filp->private_data;
+ struct mcam_camera *cam = video_drvdata(filp);
+ bool last_open;
- cam_dbg(cam, "Release, %d frames, %d singles, %d delivered\n",
- cam->frame_state.frames, cam->frame_state.singles,
- cam->frame_state.delivered);
mutex_lock(&cam->s_mutex);
- (cam->users)--;
- if (cam->users == 0) {
- mcam_ctlr_stop_dma(cam);
- mcam_cleanup_vb2(cam);
+ last_open = v4l2_fh_is_singular_file(filp);
+ _vb2_fop_release(filp, NULL);
+ if (last_open) {
mcam_disable_mipi(cam);
mcam_ctlr_power_down(cam);
if (cam->buffer_mode == B_vmalloc && alloc_bufs_at_read)
@@ -1734,54 +1648,13 @@ static int mcam_v4l_release(struct file *filp)
return 0;
}
-static ssize_t mcam_v4l_read(struct file *filp,
- char __user *buffer, size_t len, loff_t *pos)
-{
- struct mcam_camera *cam = filp->private_data;
- int ret;
-
- mutex_lock(&cam->s_mutex);
- ret = vb2_read(&cam->vb_queue, buffer, len, pos,
- filp->f_flags & O_NONBLOCK);
- mutex_unlock(&cam->s_mutex);
- return ret;
-}
-
-
-
-static unsigned int mcam_v4l_poll(struct file *filp,
- struct poll_table_struct *pt)
-{
- struct mcam_camera *cam = filp->private_data;
- int ret;
-
- mutex_lock(&cam->s_mutex);
- ret = vb2_poll(&cam->vb_queue, filp, pt);
- mutex_unlock(&cam->s_mutex);
- return ret;
-}
-
-
-static int mcam_v4l_mmap(struct file *filp, struct vm_area_struct *vma)
-{
- struct mcam_camera *cam = filp->private_data;
- int ret;
-
- mutex_lock(&cam->s_mutex);
- ret = vb2_mmap(&cam->vb_queue, vma);
- mutex_unlock(&cam->s_mutex);
- return ret;
-}
-
-
-
static const struct v4l2_file_operations mcam_v4l_fops = {
.owner = THIS_MODULE,
.open = mcam_v4l_open,
.release = mcam_v4l_release,
- .read = mcam_v4l_read,
- .poll = mcam_v4l_poll,
- .mmap = mcam_v4l_mmap,
+ .read = vb2_fop_read,
+ .poll = vb2_fop_poll,
+ .mmap = vb2_fop_mmap,
.unlocked_ioctl = video_ioctl2,
};
@@ -1792,8 +1665,6 @@ static const struct v4l2_file_operations mcam_v4l_fops = {
*/
static struct video_device mcam_v4l_template = {
.name = "mcam",
- .tvnorms = V4L2_STD_NTSC_M,
-
.fops = &mcam_v4l_fops,
.ioctl_ops = &mcam_v4l_ioctl_ops,
.release = video_device_release_empty,
@@ -1811,7 +1682,7 @@ static void mcam_frame_complete(struct mcam_camera *cam, int frame)
set_bit(frame, &cam->flags);
clear_bit(CF_DMA_ACTIVE, &cam->flags);
cam->next_buf = frame;
- cam->buf_seq[frame] = ++(cam->sequence);
+ cam->buf_seq[frame] = cam->sequence++;
cam->frame_state.frames++;
/*
* "This should never happen"
@@ -1924,10 +1795,17 @@ int mccic_register(struct mcam_camera *cam)
mcam_set_config_needed(cam, 1);
cam->pix_format = mcam_def_pix_format;
cam->mbus_code = mcam_def_mbus_code;
- INIT_LIST_HEAD(&cam->buffers);
mcam_ctlr_init(cam);
/*
+ * Get the v4l2 setup done.
+ */
+ ret = v4l2_ctrl_handler_init(&cam->ctrl_handler, 10);
+ if (ret)
+ goto out_unregister;
+ cam->v4l2_dev.ctrl_handler = &cam->ctrl_handler;
+
+ /*
* Try to find the sensor.
*/
sensor_cfg.clock_speed = cam->clock_speed;
@@ -1943,21 +1821,22 @@ int mccic_register(struct mcam_camera *cam)
ret = mcam_cam_init(cam);
if (ret)
goto out_unregister;
- /*
- * Get the v4l2 setup done.
- */
- ret = v4l2_ctrl_handler_init(&cam->ctrl_handler, 10);
+
+ ret = mcam_setup_vb2(cam);
if (ret)
goto out_unregister;
- cam->v4l2_dev.ctrl_handler = &cam->ctrl_handler;
mutex_lock(&cam->s_mutex);
cam->vdev = mcam_v4l_template;
cam->vdev.v4l2_dev = &cam->v4l2_dev;
+ cam->vdev.lock = &cam->s_mutex;
+ cam->vdev.queue = &cam->vb_queue;
video_set_drvdata(&cam->vdev, cam);
ret = video_register_device(&cam->vdev, VFL_TYPE_GRABBER, -1);
- if (ret)
- goto out;
+ if (ret) {
+ mutex_unlock(&cam->s_mutex);
+ goto out_unregister;
+ }
/*
* If so requested, try to get our DMA buffers now.
@@ -1968,11 +1847,11 @@ int mccic_register(struct mcam_camera *cam)
" will try again later.");
}
-out:
- v4l2_ctrl_handler_free(&cam->ctrl_handler);
mutex_unlock(&cam->s_mutex);
- return ret;
+ return 0;
+
out_unregister:
+ v4l2_ctrl_handler_free(&cam->ctrl_handler);
v4l2_device_unregister(&cam->v4l2_dev);
return ret;
}
@@ -1986,11 +1865,11 @@ void mccic_shutdown(struct mcam_camera *cam)
* take it down again will wedge the machine, which is frowned
* upon.
*/
- if (cam->users > 0) {
+ if (!list_empty(&cam->vdev.fh_list)) {
cam_warn(cam, "Removing a device with users!\n");
mcam_ctlr_power_down(cam);
}
- vb2_queue_release(&cam->vb_queue);
+ mcam_cleanup_vb2(cam);
if (cam->buffer_mode == B_vmalloc)
mcam_free_dma_bufs(cam);
video_unregister_device(&cam->vdev);
@@ -2006,7 +1885,7 @@ void mccic_shutdown(struct mcam_camera *cam)
void mccic_suspend(struct mcam_camera *cam)
{
mutex_lock(&cam->s_mutex);
- if (cam->users > 0) {
+ if (!list_empty(&cam->vdev.fh_list)) {
enum mcam_state cstate = cam->state;
mcam_ctlr_stop_dma(cam);
@@ -2021,7 +1900,7 @@ int mccic_resume(struct mcam_camera *cam)
int ret = 0;
mutex_lock(&cam->s_mutex);
- if (cam->users > 0) {
+ if (!list_empty(&cam->vdev.fh_list)) {
ret = mcam_ctlr_power_up(cam);
if (ret) {
mutex_unlock(&cam->s_mutex);
diff --git a/drivers/media/platform/marvell-ccic/mcam-core.h b/drivers/media/platform/marvell-ccic/mcam-core.h
index 7ffdf4dbaf8c..97167f6ffd1e 100644
--- a/drivers/media/platform/marvell-ccic/mcam-core.h
+++ b/drivers/media/platform/marvell-ccic/mcam-core.h
@@ -146,7 +146,6 @@ struct mcam_camera {
struct v4l2_ctrl_handler ctrl_handler;
enum mcam_state state;
unsigned long flags; /* Buffer status, mainly (dev_lock) */
- int users; /* How many open FDs */
struct mcam_frame_state frame_state; /* Frame state counter */
/*
@@ -163,6 +162,8 @@ struct mcam_camera {
unsigned int nbufs; /* How many are alloc'd */
int next_buf; /* Next to consume (dev_lock) */
+ char bus_info[32]; /* querycap bus_info */
+
/* DMA buffers - vmalloc mode */
#ifdef MCAM_MODE_VMALLOC
unsigned int dma_buf_size; /* allocated size */
diff --git a/drivers/media/platform/marvell-ccic/mmp-driver.c b/drivers/media/platform/marvell-ccic/mmp-driver.c
index 0ed9b3adfcdf..b5f165a68566 100644
--- a/drivers/media/platform/marvell-ccic/mmp-driver.c
+++ b/drivers/media/platform/marvell-ccic/mmp-driver.c
@@ -371,6 +371,7 @@ static int mmpcam_probe(struct platform_device *pdev)
mcam->lane = pdata->lane;
mcam->chip_id = MCAM_ARMADA610;
mcam->buffer_mode = B_DMA_sg;
+ strlcpy(mcam->bus_info, "platform:mmp-camera", sizeof(mcam->bus_info));
spin_lock_init(&mcam->dev_lock);
/*
* Get our I/O memory.
diff --git a/drivers/media/platform/s3c-camif/camif-capture.c b/drivers/media/platform/s3c-camif/camif-capture.c
index f6a61b9ceff4..76e6289a5612 100644
--- a/drivers/media/platform/s3c-camif/camif-capture.c
+++ b/drivers/media/platform/s3c-camif/camif-capture.c
@@ -115,7 +115,7 @@ static int sensor_set_power(struct camif_dev *camif, int on)
struct cam_sensor *sensor = &camif->sensor;
int err = 0;
- if (!on == camif->sensor.power_count)
+ if (camif->sensor.power_count == !on)
err = v4l2_subdev_call(sensor->sd, core, s_power, on);
if (!err)
sensor->power_count += on ? 1 : -1;
@@ -131,7 +131,7 @@ static int sensor_set_streaming(struct camif_dev *camif, int on)
struct cam_sensor *sensor = &camif->sensor;
int err = 0;
- if (!on == camif->sensor.stream_count)
+ if (camif->sensor.stream_count == !on)
err = v4l2_subdev_call(sensor->sd, video, s_stream, on);
if (!err)
sensor->stream_count += on ? 1 : -1;
@@ -449,19 +449,22 @@ static int queue_setup(struct vb2_queue *vq, const struct v4l2_format *pfmt,
struct camif_vp *vp = vb2_get_drv_priv(vq);
struct camif_dev *camif = vp->camif;
struct camif_frame *frame = &vp->out_frame;
- const struct camif_fmt *fmt = vp->out_fmt;
+ const struct camif_fmt *fmt;
unsigned int size;
if (pfmt) {
pix = &pfmt->fmt.pix;
fmt = s3c_camif_find_format(vp, &pix->pixelformat, -1);
+ if (fmt == NULL)
+ return -EINVAL;
size = (pix->width * pix->height * fmt->depth) / 8;
} else {
+ fmt = vp->out_fmt;
+ if (fmt == NULL)
+ return -EINVAL;
size = (frame->f_width * frame->f_height * fmt->depth) / 8;
}
- if (fmt == NULL)
- return -EINVAL;
*num_planes = 1;
if (pix)
diff --git a/drivers/media/platform/s5p-mfc/s5p_mfc.c b/drivers/media/platform/s5p-mfc/s5p_mfc.c
index 8333fbc2fe96..1263d99d638e 100644
--- a/drivers/media/platform/s5p-mfc/s5p_mfc.c
+++ b/drivers/media/platform/s5p-mfc/s5p_mfc.c
@@ -1337,8 +1337,6 @@ static int s5p_mfc_runtime_resume(struct device *dev)
struct platform_device *pdev = to_platform_device(dev);
struct s5p_mfc_dev *m_dev = platform_get_drvdata(pdev);
- if (!m_dev->alloc_ctx)
- return 0;
atomic_set(&m_dev->pm.power, 1);
return 0;
}
diff --git a/drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c b/drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c
index b09bcd140491..c7adc3d26792 100644
--- a/drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c
+++ b/drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c
@@ -184,7 +184,7 @@ static int s5p_mfc_alloc_codec_buffers_v5(struct s5p_mfc_ctx *ctx)
ret = s5p_mfc_alloc_priv_buf(dev->mem_dev_r, &ctx->bank2);
if (ret) {
mfc_err("Failed to allocate Bank2 temporary buffer\n");
- s5p_mfc_release_priv_buf(ctx->dev->mem_dev_l, &ctx->bank1);
+ s5p_mfc_release_priv_buf(ctx->dev->mem_dev_l, &ctx->bank1);
return ret;
}
BUG_ON(ctx->bank2.dma & ((1 << MFC_BANK2_ALIGN_ORDER) - 1));
diff --git a/drivers/media/platform/s5p-tv/hdmi_drv.c b/drivers/media/platform/s5p-tv/hdmi_drv.c
index 0e74aabf5f9a..618ecd1d5b27 100644
--- a/drivers/media/platform/s5p-tv/hdmi_drv.c
+++ b/drivers/media/platform/s5p-tv/hdmi_drv.c
@@ -648,15 +648,20 @@ static int hdmi_g_dv_timings(struct v4l2_subdev *sd,
return 0;
}
-static int hdmi_g_mbus_fmt(struct v4l2_subdev *sd,
- struct v4l2_mbus_framefmt *fmt)
+static int hdmi_get_fmt(struct v4l2_subdev *sd,
+ struct v4l2_subdev_pad_config *cfg,
+ struct v4l2_subdev_format *format)
{
+ struct v4l2_mbus_framefmt *fmt = &format->format;
struct hdmi_device *hdev = sd_to_hdmi_dev(sd);
const struct hdmi_timings *t = hdev->cur_conf;
dev_dbg(hdev->dev, "%s\n", __func__);
if (!hdev->cur_conf)
return -EINVAL;
+ if (format->pad)
+ return -EINVAL;
+
memset(fmt, 0, sizeof(*fmt));
fmt->width = t->hact.end - t->hact.beg;
fmt->height = t->vact[0].end - t->vact[0].beg;
@@ -712,18 +717,19 @@ static const struct v4l2_subdev_core_ops hdmi_sd_core_ops = {
static const struct v4l2_subdev_video_ops hdmi_sd_video_ops = {
.s_dv_timings = hdmi_s_dv_timings,
.g_dv_timings = hdmi_g_dv_timings,
- .g_mbus_fmt = hdmi_g_mbus_fmt,
.s_stream = hdmi_s_stream,
};
static const struct v4l2_subdev_pad_ops hdmi_sd_pad_ops = {
.enum_dv_timings = hdmi_enum_dv_timings,
.dv_timings_cap = hdmi_dv_timings_cap,
+ .get_fmt = hdmi_get_fmt,
};
static const struct v4l2_subdev_ops hdmi_sd_ops = {
.core = &hdmi_sd_core_ops,
.video = &hdmi_sd_video_ops,
+ .pad = &hdmi_sd_pad_ops,
};
static int hdmi_runtime_suspend(struct device *dev)
diff --git a/drivers/media/platform/s5p-tv/mixer_drv.c b/drivers/media/platform/s5p-tv/mixer_drv.c
index 2a9501d7e7c8..5ef67774971d 100644
--- a/drivers/media/platform/s5p-tv/mixer_drv.c
+++ b/drivers/media/platform/s5p-tv/mixer_drv.c
@@ -46,11 +46,15 @@ void mxr_get_mbus_fmt(struct mxr_device *mdev,
struct v4l2_mbus_framefmt *mbus_fmt)
{
struct v4l2_subdev *sd;
+ struct v4l2_subdev_format fmt = {
+ .which = V4L2_SUBDEV_FORMAT_ACTIVE,
+ };
int ret;
mutex_lock(&mdev->mutex);
sd = to_outsd(mdev);
- ret = v4l2_subdev_call(sd, video, g_mbus_fmt, mbus_fmt);
+ ret = v4l2_subdev_call(sd, pad, get_fmt, NULL, &fmt);
+ *mbus_fmt = fmt.format;
WARN(ret, "failed to get mbus_fmt for output %s\n", sd->name);
mutex_unlock(&mdev->mutex);
}
@@ -62,7 +66,10 @@ void mxr_streamer_get(struct mxr_device *mdev)
mxr_dbg(mdev, "%s(%d)\n", __func__, mdev->n_streamer);
if (mdev->n_streamer == 1) {
struct v4l2_subdev *sd = to_outsd(mdev);
- struct v4l2_mbus_framefmt mbus_fmt;
+ struct v4l2_subdev_format fmt = {
+ .which = V4L2_SUBDEV_FORMAT_ACTIVE,
+ };
+ struct v4l2_mbus_framefmt *mbus_fmt = &fmt.format;
struct mxr_resources *res = &mdev->res;
int ret;
@@ -72,12 +79,12 @@ void mxr_streamer_get(struct mxr_device *mdev)
clk_set_parent(res->sclk_mixer, res->sclk_hdmi);
mxr_reg_s_output(mdev, to_output(mdev)->cookie);
- ret = v4l2_subdev_call(sd, video, g_mbus_fmt, &mbus_fmt);
+ ret = v4l2_subdev_call(sd, pad, get_fmt, NULL, &fmt);
WARN(ret, "failed to get mbus_fmt for output %s\n", sd->name);
ret = v4l2_subdev_call(sd, video, s_stream, 1);
WARN(ret, "starting stream failed for output %s\n", sd->name);
- mxr_reg_set_mbus_fmt(mdev, &mbus_fmt);
+ mxr_reg_set_mbus_fmt(mdev, mbus_fmt);
mxr_reg_streamon(mdev);
ret = mxr_reg_wait4vsync(mdev);
WARN(ret, "failed to get vsync (%d) from output\n", ret);
diff --git a/drivers/media/platform/s5p-tv/sdo_drv.c b/drivers/media/platform/s5p-tv/sdo_drv.c
index 3621af91d460..c75d4354d182 100644
--- a/drivers/media/platform/s5p-tv/sdo_drv.c
+++ b/drivers/media/platform/s5p-tv/sdo_drv.c
@@ -160,13 +160,17 @@ static int sdo_g_std_output(struct v4l2_subdev *sd, v4l2_std_id *std)
return 0;
}
-static int sdo_g_mbus_fmt(struct v4l2_subdev *sd,
- struct v4l2_mbus_framefmt *fmt)
+static int sdo_get_fmt(struct v4l2_subdev *sd,
+ struct v4l2_subdev_pad_config *cfg,
+ struct v4l2_subdev_format *format)
{
+ struct v4l2_mbus_framefmt *fmt = &format->format;
struct sdo_device *sdev = sd_to_sdev(sd);
if (!sdev->fmt)
return -ENXIO;
+ if (format->pad)
+ return -EINVAL;
/* all modes are 720 pixels wide */
fmt->width = 720;
fmt->height = sdev->fmt->height;
@@ -256,13 +260,17 @@ static const struct v4l2_subdev_video_ops sdo_sd_video_ops = {
.s_std_output = sdo_s_std_output,
.g_std_output = sdo_g_std_output,
.g_tvnorms_output = sdo_g_tvnorms_output,
- .g_mbus_fmt = sdo_g_mbus_fmt,
.s_stream = sdo_s_stream,
};
+static const struct v4l2_subdev_pad_ops sdo_sd_pad_ops = {
+ .get_fmt = sdo_get_fmt,
+};
+
static const struct v4l2_subdev_ops sdo_sd_ops = {
.core = &sdo_sd_core_ops,
.video = &sdo_sd_video_ops,
+ .pad = &sdo_sd_pad_ops,
};
static int sdo_runtime_suspend(struct device *dev)
diff --git a/drivers/media/platform/sh_vou.c b/drivers/media/platform/sh_vou.c
index dde1ccc730be..829e85c26610 100644
--- a/drivers/media/platform/sh_vou.c
+++ b/drivers/media/platform/sh_vou.c
@@ -679,12 +679,14 @@ static int sh_vou_s_fmt_vid_out(struct file *file, void *priv,
unsigned int img_height_max;
int pix_idx;
struct sh_vou_geometry geo;
- struct v4l2_mbus_framefmt mbfmt = {
+ struct v4l2_subdev_format format = {
+ .which = V4L2_SUBDEV_FORMAT_ACTIVE,
/* Revisit: is this the correct code? */
- .code = MEDIA_BUS_FMT_YUYV8_2X8,
- .field = V4L2_FIELD_INTERLACED,
- .colorspace = V4L2_COLORSPACE_SMPTE170M,
+ .format.code = MEDIA_BUS_FMT_YUYV8_2X8,
+ .format.field = V4L2_FIELD_INTERLACED,
+ .format.colorspace = V4L2_COLORSPACE_SMPTE170M,
};
+ struct v4l2_mbus_framefmt *mbfmt = &format.format;
int ret;
dev_dbg(vou_dev->v4l2_dev.dev, "%s(): %ux%u -> %ux%u\n", __func__,
@@ -720,27 +722,27 @@ static int sh_vou_s_fmt_vid_out(struct file *file, void *priv,
vou_adjust_output(&geo, vou_dev->std);
- mbfmt.width = geo.output.width;
- mbfmt.height = geo.output.height;
- ret = v4l2_device_call_until_err(&vou_dev->v4l2_dev, 0, video,
- s_mbus_fmt, &mbfmt);
+ mbfmt->width = geo.output.width;
+ mbfmt->height = geo.output.height;
+ ret = v4l2_device_call_until_err(&vou_dev->v4l2_dev, 0, pad,
+ set_fmt, NULL, &format);
/* Must be implemented, so, don't check for -ENOIOCTLCMD */
if (ret < 0)
return ret;
dev_dbg(vou_dev->v4l2_dev.dev, "%s(): %ux%u -> %ux%u\n", __func__,
- geo.output.width, geo.output.height, mbfmt.width, mbfmt.height);
+ geo.output.width, geo.output.height, mbfmt->width, mbfmt->height);
/* Sanity checks */
- if ((unsigned)mbfmt.width > VOU_MAX_IMAGE_WIDTH ||
- (unsigned)mbfmt.height > img_height_max ||
- mbfmt.code != MEDIA_BUS_FMT_YUYV8_2X8)
+ if ((unsigned)mbfmt->width > VOU_MAX_IMAGE_WIDTH ||
+ (unsigned)mbfmt->height > img_height_max ||
+ mbfmt->code != MEDIA_BUS_FMT_YUYV8_2X8)
return -EIO;
- if (mbfmt.width != geo.output.width ||
- mbfmt.height != geo.output.height) {
- geo.output.width = mbfmt.width;
- geo.output.height = mbfmt.height;
+ if (mbfmt->width != geo.output.width ||
+ mbfmt->height != geo.output.height) {
+ geo.output.width = mbfmt->width;
+ geo.output.height = mbfmt->height;
vou_adjust_input(&geo, vou_dev->std);
}
@@ -942,11 +944,12 @@ static int sh_vou_s_crop(struct file *file, void *fh, const struct v4l2_crop *a)
struct v4l2_crop sd_crop = {.type = V4L2_BUF_TYPE_VIDEO_OUTPUT};
struct v4l2_pix_format *pix = &vou_dev->pix;
struct sh_vou_geometry geo;
- struct v4l2_mbus_framefmt mbfmt = {
+ struct v4l2_subdev_format format = {
+ .which = V4L2_SUBDEV_FORMAT_ACTIVE,
/* Revisit: is this the correct code? */
- .code = MEDIA_BUS_FMT_YUYV8_2X8,
- .field = V4L2_FIELD_INTERLACED,
- .colorspace = V4L2_COLORSPACE_SMPTE170M,
+ .format.code = MEDIA_BUS_FMT_YUYV8_2X8,
+ .format.field = V4L2_FIELD_INTERLACED,
+ .format.colorspace = V4L2_COLORSPACE_SMPTE170M,
};
unsigned int img_height_max;
int ret;
@@ -984,22 +987,22 @@ static int sh_vou_s_crop(struct file *file, void *fh, const struct v4l2_crop *a)
*/
v4l2_device_call_until_err(&vou_dev->v4l2_dev, 0, video,
s_crop, &sd_crop);
- mbfmt.width = geo.output.width;
- mbfmt.height = geo.output.height;
- ret = v4l2_device_call_until_err(&vou_dev->v4l2_dev, 0, video,
- s_mbus_fmt, &mbfmt);
+ format.format.width = geo.output.width;
+ format.format.height = geo.output.height;
+ ret = v4l2_device_call_until_err(&vou_dev->v4l2_dev, 0, pad,
+ set_fmt, NULL, &format);
/* Must be implemented, so, don't check for -ENOIOCTLCMD */
if (ret < 0)
return ret;
/* Sanity checks */
- if ((unsigned)mbfmt.width > VOU_MAX_IMAGE_WIDTH ||
- (unsigned)mbfmt.height > img_height_max ||
- mbfmt.code != MEDIA_BUS_FMT_YUYV8_2X8)
+ if ((unsigned)format.format.width > VOU_MAX_IMAGE_WIDTH ||
+ (unsigned)format.format.height > img_height_max ||
+ format.format.code != MEDIA_BUS_FMT_YUYV8_2X8)
return -EIO;
- geo.output.width = mbfmt.width;
- geo.output.height = mbfmt.height;
+ geo.output.width = format.format.width;
+ geo.output.height = format.format.height;
/*
* No down-scaling. According to the API, current call has precedence:
diff --git a/drivers/media/platform/soc_camera/atmel-isi.c b/drivers/media/platform/soc_camera/atmel-isi.c
index c835beb2a1a8..287902681164 100644
--- a/drivers/media/platform/soc_camera/atmel-isi.c
+++ b/drivers/media/platform/soc_camera/atmel-isi.c
@@ -487,7 +487,10 @@ static int isi_camera_set_fmt(struct soc_camera_device *icd,
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
const struct soc_camera_format_xlate *xlate;
struct v4l2_pix_format *pix = &f->fmt.pix;
- struct v4l2_mbus_framefmt mf;
+ struct v4l2_subdev_format format = {
+ .which = V4L2_SUBDEV_FORMAT_ACTIVE,
+ };
+ struct v4l2_mbus_framefmt *mf = &format.format;
int ret;
xlate = soc_camera_xlate_by_fourcc(icd, pix->pixelformat);
@@ -500,27 +503,27 @@ static int isi_camera_set_fmt(struct soc_camera_device *icd,
dev_dbg(icd->parent, "Plan to set format %dx%d\n",
pix->width, pix->height);
- mf.width = pix->width;
- mf.height = pix->height;
- mf.field = pix->field;
- mf.colorspace = pix->colorspace;
- mf.code = xlate->code;
+ mf->width = pix->width;
+ mf->height = pix->height;
+ mf->field = pix->field;
+ mf->colorspace = pix->colorspace;
+ mf->code = xlate->code;
- ret = v4l2_subdev_call(sd, video, s_mbus_fmt, &mf);
+ ret = v4l2_subdev_call(sd, pad, set_fmt, NULL, &format);
if (ret < 0)
return ret;
- if (mf.code != xlate->code)
+ if (mf->code != xlate->code)
return -EINVAL;
ret = configure_geometry(isi, pix->width, pix->height, xlate->code);
if (ret < 0)
return ret;
- pix->width = mf.width;
- pix->height = mf.height;
- pix->field = mf.field;
- pix->colorspace = mf.colorspace;
+ pix->width = mf->width;
+ pix->height = mf->height;
+ pix->field = mf->field;
+ pix->colorspace = mf->colorspace;
icd->current_fmt = xlate;
dev_dbg(icd->parent, "Finally set format %dx%d\n",
@@ -535,7 +538,11 @@ static int isi_camera_try_fmt(struct soc_camera_device *icd,
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
const struct soc_camera_format_xlate *xlate;
struct v4l2_pix_format *pix = &f->fmt.pix;
- struct v4l2_mbus_framefmt mf;
+ struct v4l2_subdev_pad_config pad_cfg;
+ struct v4l2_subdev_format format = {
+ .which = V4L2_SUBDEV_FORMAT_TRY,
+ };
+ struct v4l2_mbus_framefmt *mf = &format.format;
u32 pixfmt = pix->pixelformat;
int ret;
@@ -552,21 +559,21 @@ static int isi_camera_try_fmt(struct soc_camera_device *icd,
pix->width = MAX_SUPPORT_WIDTH;
/* limit to sensor capabilities */
- mf.width = pix->width;
- mf.height = pix->height;
- mf.field = pix->field;
- mf.colorspace = pix->colorspace;
- mf.code = xlate->code;
+ mf->width = pix->width;
+ mf->height = pix->height;
+ mf->field = pix->field;
+ mf->colorspace = pix->colorspace;
+ mf->code = xlate->code;
- ret = v4l2_subdev_call(sd, video, try_mbus_fmt, &mf);
+ ret = v4l2_subdev_call(sd, pad, set_fmt, &pad_cfg, &format);
if (ret < 0)
return ret;
- pix->width = mf.width;
- pix->height = mf.height;
- pix->colorspace = mf.colorspace;
+ pix->width = mf->width;
+ pix->height = mf->height;
+ pix->colorspace = mf->colorspace;
- switch (mf.field) {
+ switch (mf->field) {
case V4L2_FIELD_ANY:
pix->field = V4L2_FIELD_NONE;
break;
@@ -574,7 +581,7 @@ static int isi_camera_try_fmt(struct soc_camera_device *icd,
break;
default:
dev_err(icd->parent, "Field type %d unsupported.\n",
- mf.field);
+ mf->field);
ret = -EINVAL;
}
@@ -648,19 +655,22 @@ static int isi_camera_get_formats(struct soc_camera_device *icd,
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
int formats = 0, ret;
/* sensor format */
- u32 code;
+ struct v4l2_subdev_mbus_code_enum code = {
+ .which = V4L2_SUBDEV_FORMAT_ACTIVE,
+ .index = idx,
+ };
/* soc camera host format */
const struct soc_mbus_pixelfmt *fmt;
- ret = v4l2_subdev_call(sd, video, enum_mbus_fmt, idx, &code);
+ ret = v4l2_subdev_call(sd, pad, enum_mbus_code, NULL, &code);
if (ret < 0)
/* No more formats */
return 0;
- fmt = soc_mbus_get_fmtdesc(code);
+ fmt = soc_mbus_get_fmtdesc(code.code);
if (!fmt) {
dev_err(icd->parent,
- "Invalid format code #%u: %d\n", idx, code);
+ "Invalid format code #%u: %d\n", idx, code.code);
return 0;
}
@@ -672,7 +682,7 @@ static int isi_camera_get_formats(struct soc_camera_device *icd,
return 0;
}
- switch (code) {
+ switch (code.code) {
case MEDIA_BUS_FMT_UYVY8_2X8:
case MEDIA_BUS_FMT_VYUY8_2X8:
case MEDIA_BUS_FMT_YUYV8_2X8:
@@ -680,10 +690,10 @@ static int isi_camera_get_formats(struct soc_camera_device *icd,
formats++;
if (xlate) {
xlate->host_fmt = &isi_camera_formats[0];
- xlate->code = code;
+ xlate->code = code.code;
xlate++;
dev_dbg(icd->parent, "Providing format %s using code %d\n",
- isi_camera_formats[0].name, code);
+ isi_camera_formats[0].name, code.code);
}
break;
default:
@@ -699,7 +709,7 @@ static int isi_camera_get_formats(struct soc_camera_device *icd,
formats++;
if (xlate) {
xlate->host_fmt = fmt;
- xlate->code = code;
+ xlate->code = code.code;
xlate++;
}
diff --git a/drivers/media/platform/soc_camera/mx2_camera.c b/drivers/media/platform/soc_camera/mx2_camera.c
index 192377f55840..ea4c423f0cf8 100644
--- a/drivers/media/platform/soc_camera/mx2_camera.c
+++ b/drivers/media/platform/soc_camera/mx2_camera.c
@@ -912,7 +912,10 @@ static int mx2_camera_set_crop(struct soc_camera_device *icd,
struct v4l2_crop a_writable = *a;
struct v4l2_rect *rect = &a_writable.c;
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
- struct v4l2_mbus_framefmt mf;
+ struct v4l2_subdev_format fmt = {
+ .which = V4L2_SUBDEV_FORMAT_ACTIVE,
+ };
+ struct v4l2_mbus_framefmt *mf = &fmt.format;
int ret;
soc_camera_limit_side(&rect->left, &rect->width, 0, 2, 4096);
@@ -923,15 +926,15 @@ static int mx2_camera_set_crop(struct soc_camera_device *icd,
return ret;
/* The capture device might have changed its output */
- ret = v4l2_subdev_call(sd, video, g_mbus_fmt, &mf);
+ ret = v4l2_subdev_call(sd, pad, get_fmt, NULL, &fmt);
if (ret < 0)
return ret;
dev_dbg(icd->parent, "Sensor cropped %dx%d\n",
- mf.width, mf.height);
+ mf->width, mf->height);
- icd->user_width = mf.width;
- icd->user_height = mf.height;
+ icd->user_width = mf->width;
+ icd->user_height = mf->height;
return ret;
}
@@ -943,22 +946,25 @@ static int mx2_camera_get_formats(struct soc_camera_device *icd,
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
const struct soc_mbus_pixelfmt *fmt;
struct device *dev = icd->parent;
- u32 code;
+ struct v4l2_subdev_mbus_code_enum code = {
+ .which = V4L2_SUBDEV_FORMAT_ACTIVE,
+ .index = idx,
+ };
int ret, formats = 0;
- ret = v4l2_subdev_call(sd, video, enum_mbus_fmt, idx, &code);
+ ret = v4l2_subdev_call(sd, pad, enum_mbus_code, NULL, &code);
if (ret < 0)
/* no more formats */
return 0;
- fmt = soc_mbus_get_fmtdesc(code);
+ fmt = soc_mbus_get_fmtdesc(code.code);
if (!fmt) {
- dev_err(dev, "Invalid format code #%u: %d\n", idx, code);
+ dev_err(dev, "Invalid format code #%u: %d\n", idx, code.code);
return 0;
}
- if (code == MEDIA_BUS_FMT_YUYV8_2X8 ||
- code == MEDIA_BUS_FMT_UYVY8_2X8) {
+ if (code.code == MEDIA_BUS_FMT_YUYV8_2X8 ||
+ code.code == MEDIA_BUS_FMT_UYVY8_2X8) {
formats++;
if (xlate) {
/*
@@ -967,21 +973,21 @@ static int mx2_camera_get_formats(struct soc_camera_device *icd,
*/
xlate->host_fmt =
soc_mbus_get_fmtdesc(MEDIA_BUS_FMT_YUYV8_1_5X8);
- xlate->code = code;
+ xlate->code = code.code;
dev_dbg(dev, "Providing host format %s for sensor code %d\n",
- xlate->host_fmt->name, code);
+ xlate->host_fmt->name, code.code);
xlate++;
}
}
- if (code == MEDIA_BUS_FMT_UYVY8_2X8) {
+ if (code.code == MEDIA_BUS_FMT_UYVY8_2X8) {
formats++;
if (xlate) {
xlate->host_fmt =
soc_mbus_get_fmtdesc(MEDIA_BUS_FMT_YUYV8_2X8);
- xlate->code = code;
+ xlate->code = code.code;
dev_dbg(dev, "Providing host format %s for sensor code %d\n",
- xlate->host_fmt->name, code);
+ xlate->host_fmt->name, code.code);
xlate++;
}
}
@@ -990,7 +996,7 @@ static int mx2_camera_get_formats(struct soc_camera_device *icd,
formats++;
if (xlate) {
xlate->host_fmt = fmt;
- xlate->code = code;
+ xlate->code = code.code;
xlate++;
}
return formats;
@@ -1121,7 +1127,10 @@ static int mx2_camera_set_fmt(struct soc_camera_device *icd,
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
const struct soc_camera_format_xlate *xlate;
struct v4l2_pix_format *pix = &f->fmt.pix;
- struct v4l2_mbus_framefmt mf;
+ struct v4l2_subdev_format format = {
+ .which = V4L2_SUBDEV_FORMAT_ACTIVE,
+ };
+ struct v4l2_mbus_framefmt *mf = &format.format;
int ret;
dev_dbg(icd->parent, "%s: requested params: width = %d, height = %d\n",
@@ -1134,19 +1143,19 @@ static int mx2_camera_set_fmt(struct soc_camera_device *icd,
return -EINVAL;
}
- mf.width = pix->width;
- mf.height = pix->height;
- mf.field = pix->field;
- mf.colorspace = pix->colorspace;
- mf.code = xlate->code;
+ mf->width = pix->width;
+ mf->height = pix->height;
+ mf->field = pix->field;
+ mf->colorspace = pix->colorspace;
+ mf->code = xlate->code;
- ret = v4l2_subdev_call(sd, video, s_mbus_fmt, &mf);
+ ret = v4l2_subdev_call(sd, pad, set_fmt, NULL, &format);
if (ret < 0 && ret != -ENOIOCTLCMD)
return ret;
/* Store width and height returned by the sensor for resizing */
- pcdev->s_width = mf.width;
- pcdev->s_height = mf.height;
+ pcdev->s_width = mf->width;
+ pcdev->s_height = mf->height;
dev_dbg(icd->parent, "%s: sensor params: width = %d, height = %d\n",
__func__, pcdev->s_width, pcdev->s_height);
@@ -1154,19 +1163,19 @@ static int mx2_camera_set_fmt(struct soc_camera_device *icd,
xlate->host_fmt->fourcc);
memset(pcdev->resizing, 0, sizeof(pcdev->resizing));
- if ((mf.width != pix->width || mf.height != pix->height) &&
+ if ((mf->width != pix->width || mf->height != pix->height) &&
pcdev->emma_prp->cfg.in_fmt == PRP_CNTL_DATA_IN_YUV422) {
- if (mx2_emmaprp_resize(pcdev, &mf, pix, true) < 0)
+ if (mx2_emmaprp_resize(pcdev, mf, pix, true) < 0)
dev_dbg(icd->parent, "%s: can't resize\n", __func__);
}
- if (mf.code != xlate->code)
+ if (mf->code != xlate->code)
return -EINVAL;
- pix->width = mf.width;
- pix->height = mf.height;
- pix->field = mf.field;
- pix->colorspace = mf.colorspace;
+ pix->width = mf->width;
+ pix->height = mf->height;
+ pix->field = mf->field;
+ pix->colorspace = mf->colorspace;
icd->current_fmt = xlate;
dev_dbg(icd->parent, "%s: returned params: width = %d, height = %d\n",
@@ -1181,7 +1190,11 @@ static int mx2_camera_try_fmt(struct soc_camera_device *icd,
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
const struct soc_camera_format_xlate *xlate;
struct v4l2_pix_format *pix = &f->fmt.pix;
- struct v4l2_mbus_framefmt mf;
+ struct v4l2_subdev_pad_config pad_cfg;
+ struct v4l2_subdev_format format = {
+ .which = V4L2_SUBDEV_FORMAT_TRY,
+ };
+ struct v4l2_mbus_framefmt *mf = &format.format;
__u32 pixfmt = pix->pixelformat;
struct soc_camera_host *ici = to_soc_camera_host(icd->parent);
struct mx2_camera_dev *pcdev = ici->priv;
@@ -1204,13 +1217,13 @@ static int mx2_camera_try_fmt(struct soc_camera_device *icd,
pix->width &= ~0x7;
/* limit to sensor capabilities */
- mf.width = pix->width;
- mf.height = pix->height;
- mf.field = pix->field;
- mf.colorspace = pix->colorspace;
- mf.code = xlate->code;
+ mf->width = pix->width;
+ mf->height = pix->height;
+ mf->field = pix->field;
+ mf->colorspace = pix->colorspace;
+ mf->code = xlate->code;
- ret = v4l2_subdev_call(sd, video, try_mbus_fmt, &mf);
+ ret = v4l2_subdev_call(sd, pad, set_fmt, &pad_cfg, &format);
if (ret < 0)
return ret;
@@ -1221,29 +1234,29 @@ static int mx2_camera_try_fmt(struct soc_camera_device *icd,
emma_prp = mx27_emma_prp_get_format(xlate->code,
xlate->host_fmt->fourcc);
- if ((mf.width != pix->width || mf.height != pix->height) &&
+ if ((mf->width != pix->width || mf->height != pix->height) &&
emma_prp->cfg.in_fmt == PRP_CNTL_DATA_IN_YUV422) {
- if (mx2_emmaprp_resize(pcdev, &mf, pix, false) < 0)
+ if (mx2_emmaprp_resize(pcdev, mf, pix, false) < 0)
dev_dbg(icd->parent, "%s: can't resize\n", __func__);
}
- if (mf.field == V4L2_FIELD_ANY)
- mf.field = V4L2_FIELD_NONE;
+ if (mf->field == V4L2_FIELD_ANY)
+ mf->field = V4L2_FIELD_NONE;
/*
* Driver supports interlaced images provided they have
* both fields so that they can be processed as if they
* were progressive.
*/
- if (mf.field != V4L2_FIELD_NONE && !V4L2_FIELD_HAS_BOTH(mf.field)) {
+ if (mf->field != V4L2_FIELD_NONE && !V4L2_FIELD_HAS_BOTH(mf->field)) {
dev_err(icd->parent, "Field type %d unsupported.\n",
- mf.field);
+ mf->field);
return -EINVAL;
}
- pix->width = mf.width;
- pix->height = mf.height;
- pix->field = mf.field;
- pix->colorspace = mf.colorspace;
+ pix->width = mf->width;
+ pix->height = mf->height;
+ pix->field = mf->field;
+ pix->colorspace = mf->colorspace;
dev_dbg(icd->parent, "%s: returned params: width = %d, height = %d\n",
__func__, pix->width, pix->height);
diff --git a/drivers/media/platform/soc_camera/mx3_camera.c b/drivers/media/platform/soc_camera/mx3_camera.c
index 3435fd2ca8ec..ace41f53caca 100644
--- a/drivers/media/platform/soc_camera/mx3_camera.c
+++ b/drivers/media/platform/soc_camera/mx3_camera.c
@@ -659,18 +659,21 @@ static int mx3_camera_get_formats(struct soc_camera_device *icd, unsigned int id
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
struct device *dev = icd->parent;
int formats = 0, ret;
- u32 code;
+ struct v4l2_subdev_mbus_code_enum code = {
+ .which = V4L2_SUBDEV_FORMAT_ACTIVE,
+ .index = idx,
+ };
const struct soc_mbus_pixelfmt *fmt;
- ret = v4l2_subdev_call(sd, video, enum_mbus_fmt, idx, &code);
+ ret = v4l2_subdev_call(sd, pad, enum_mbus_code, NULL, &code);
if (ret < 0)
/* No more formats */
return 0;
- fmt = soc_mbus_get_fmtdesc(code);
+ fmt = soc_mbus_get_fmtdesc(code.code);
if (!fmt) {
dev_warn(icd->parent,
- "Unsupported format code #%u: 0x%x\n", idx, code);
+ "Unsupported format code #%u: 0x%x\n", idx, code.code);
return 0;
}
@@ -679,25 +682,25 @@ static int mx3_camera_get_formats(struct soc_camera_device *icd, unsigned int id
if (ret < 0)
return 0;
- switch (code) {
+ switch (code.code) {
case MEDIA_BUS_FMT_SBGGR10_1X10:
formats++;
if (xlate) {
xlate->host_fmt = &mx3_camera_formats[0];
- xlate->code = code;
+ xlate->code = code.code;
xlate++;
dev_dbg(dev, "Providing format %s using code 0x%x\n",
- mx3_camera_formats[0].name, code);
+ mx3_camera_formats[0].name, code.code);
}
break;
case MEDIA_BUS_FMT_Y10_1X10:
formats++;
if (xlate) {
xlate->host_fmt = &mx3_camera_formats[1];
- xlate->code = code;
+ xlate->code = code.code;
xlate++;
dev_dbg(dev, "Providing format %s using code 0x%x\n",
- mx3_camera_formats[1].name, code);
+ mx3_camera_formats[1].name, code.code);
}
break;
default:
@@ -709,7 +712,7 @@ static int mx3_camera_get_formats(struct soc_camera_device *icd, unsigned int id
formats++;
if (xlate) {
xlate->host_fmt = fmt;
- xlate->code = code;
+ xlate->code = code.code;
dev_dbg(dev, "Providing format %c%c%c%c in pass-through mode\n",
(fmt->fourcc >> (0*8)) & 0xFF,
(fmt->fourcc >> (1*8)) & 0xFF,
@@ -801,7 +804,10 @@ static int mx3_camera_set_crop(struct soc_camera_device *icd,
struct soc_camera_host *ici = to_soc_camera_host(icd->parent);
struct mx3_camera_dev *mx3_cam = ici->priv;
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
- struct v4l2_mbus_framefmt mf;
+ struct v4l2_subdev_format fmt = {
+ .which = V4L2_SUBDEV_FORMAT_ACTIVE,
+ };
+ struct v4l2_mbus_framefmt *mf = &fmt.format;
int ret;
soc_camera_limit_side(&rect->left, &rect->width, 0, 2, 4096);
@@ -812,30 +818,30 @@ static int mx3_camera_set_crop(struct soc_camera_device *icd,
return ret;
/* The capture device might have changed its output sizes */
- ret = v4l2_subdev_call(sd, video, g_mbus_fmt, &mf);
+ ret = v4l2_subdev_call(sd, pad, get_fmt, NULL, &fmt);
if (ret < 0)
return ret;
- if (mf.code != icd->current_fmt->code)
+ if (mf->code != icd->current_fmt->code)
return -EINVAL;
- if (mf.width & 7) {
+ if (mf->width & 7) {
/* Ouch! We can only handle 8-byte aligned width... */
- stride_align(&mf.width);
- ret = v4l2_subdev_call(sd, video, s_mbus_fmt, &mf);
+ stride_align(&mf->width);
+ ret = v4l2_subdev_call(sd, pad, set_fmt, NULL, &fmt);
if (ret < 0)
return ret;
}
- if (mf.width != icd->user_width || mf.height != icd->user_height)
- configure_geometry(mx3_cam, mf.width, mf.height,
+ if (mf->width != icd->user_width || mf->height != icd->user_height)
+ configure_geometry(mx3_cam, mf->width, mf->height,
icd->current_fmt->host_fmt);
dev_dbg(icd->parent, "Sensor cropped %dx%d\n",
- mf.width, mf.height);
+ mf->width, mf->height);
- icd->user_width = mf.width;
- icd->user_height = mf.height;
+ icd->user_width = mf->width;
+ icd->user_height = mf->height;
return ret;
}
@@ -848,7 +854,10 @@ static int mx3_camera_set_fmt(struct soc_camera_device *icd,
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
const struct soc_camera_format_xlate *xlate;
struct v4l2_pix_format *pix = &f->fmt.pix;
- struct v4l2_mbus_framefmt mf;
+ struct v4l2_subdev_format format = {
+ .which = V4L2_SUBDEV_FORMAT_ACTIVE,
+ };
+ struct v4l2_mbus_framefmt *mf = &format.format;
int ret;
xlate = soc_camera_xlate_by_fourcc(icd, pix->pixelformat);
@@ -869,17 +878,17 @@ static int mx3_camera_set_fmt(struct soc_camera_device *icd,
configure_geometry(mx3_cam, pix->width, pix->height, xlate->host_fmt);
- mf.width = pix->width;
- mf.height = pix->height;
- mf.field = pix->field;
- mf.colorspace = pix->colorspace;
- mf.code = xlate->code;
+ mf->width = pix->width;
+ mf->height = pix->height;
+ mf->field = pix->field;
+ mf->colorspace = pix->colorspace;
+ mf->code = xlate->code;
- ret = v4l2_subdev_call(sd, video, s_mbus_fmt, &mf);
+ ret = v4l2_subdev_call(sd, pad, set_fmt, NULL, &format);
if (ret < 0)
return ret;
- if (mf.code != xlate->code)
+ if (mf->code != xlate->code)
return -EINVAL;
if (!mx3_cam->idmac_channel[0]) {
@@ -888,11 +897,11 @@ static int mx3_camera_set_fmt(struct soc_camera_device *icd,
return ret;
}
- pix->width = mf.width;
- pix->height = mf.height;
- pix->field = mf.field;
- mx3_cam->field = mf.field;
- pix->colorspace = mf.colorspace;
+ pix->width = mf->width;
+ pix->height = mf->height;
+ pix->field = mf->field;
+ mx3_cam->field = mf->field;
+ pix->colorspace = mf->colorspace;
icd->current_fmt = xlate;
dev_dbg(icd->parent, "Sensor set %dx%d\n", pix->width, pix->height);
@@ -906,7 +915,11 @@ static int mx3_camera_try_fmt(struct soc_camera_device *icd,
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
const struct soc_camera_format_xlate *xlate;
struct v4l2_pix_format *pix = &f->fmt.pix;
- struct v4l2_mbus_framefmt mf;
+ struct v4l2_subdev_pad_config pad_cfg;
+ struct v4l2_subdev_format format = {
+ .which = V4L2_SUBDEV_FORMAT_TRY,
+ };
+ struct v4l2_mbus_framefmt *mf = &format.format;
__u32 pixfmt = pix->pixelformat;
int ret;
@@ -923,21 +936,21 @@ static int mx3_camera_try_fmt(struct soc_camera_device *icd,
pix->width = 4096;
/* limit to sensor capabilities */
- mf.width = pix->width;
- mf.height = pix->height;
- mf.field = pix->field;
- mf.colorspace = pix->colorspace;
- mf.code = xlate->code;
+ mf->width = pix->width;
+ mf->height = pix->height;
+ mf->field = pix->field;
+ mf->colorspace = pix->colorspace;
+ mf->code = xlate->code;
- ret = v4l2_subdev_call(sd, video, try_mbus_fmt, &mf);
+ ret = v4l2_subdev_call(sd, pad, set_fmt, &pad_cfg, &format);
if (ret < 0)
return ret;
- pix->width = mf.width;
- pix->height = mf.height;
- pix->colorspace = mf.colorspace;
+ pix->width = mf->width;
+ pix->height = mf->height;
+ pix->colorspace = mf->colorspace;
- switch (mf.field) {
+ switch (mf->field) {
case V4L2_FIELD_ANY:
pix->field = V4L2_FIELD_NONE;
break;
@@ -945,7 +958,7 @@ static int mx3_camera_try_fmt(struct soc_camera_device *icd,
break;
default:
dev_err(icd->parent, "Field type %d unsupported.\n",
- mf.field);
+ mf->field);
ret = -EINVAL;
}
diff --git a/drivers/media/platform/soc_camera/omap1_camera.c b/drivers/media/platform/soc_camera/omap1_camera.c
index 16f65ecb70a3..ba8dcd11ae0e 100644
--- a/drivers/media/platform/soc_camera/omap1_camera.c
+++ b/drivers/media/platform/soc_camera/omap1_camera.c
@@ -1068,18 +1068,21 @@ static int omap1_cam_get_formats(struct soc_camera_device *icd,
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
struct device *dev = icd->parent;
int formats = 0, ret;
- u32 code;
+ struct v4l2_subdev_mbus_code_enum code = {
+ .which = V4L2_SUBDEV_FORMAT_ACTIVE,
+ .index = idx,
+ };
const struct soc_mbus_pixelfmt *fmt;
- ret = v4l2_subdev_call(sd, video, enum_mbus_fmt, idx, &code);
+ ret = v4l2_subdev_call(sd, pad, enum_mbus_code, NULL, &code);
if (ret < 0)
/* No more formats */
return 0;
- fmt = soc_mbus_get_fmtdesc(code);
+ fmt = soc_mbus_get_fmtdesc(code.code);
if (!fmt) {
dev_warn(dev, "%s: unsupported format code #%d: %d\n", __func__,
- idx, code);
+ idx, code.code);
return 0;
}
@@ -1087,7 +1090,7 @@ static int omap1_cam_get_formats(struct soc_camera_device *icd,
if (fmt->bits_per_sample != 8)
return 0;
- switch (code) {
+ switch (code.code) {
case MEDIA_BUS_FMT_YUYV8_2X8:
case MEDIA_BUS_FMT_YVYU8_2X8:
case MEDIA_BUS_FMT_UYVY8_2X8:
@@ -1098,14 +1101,14 @@ static int omap1_cam_get_formats(struct soc_camera_device *icd,
case MEDIA_BUS_FMT_RGB565_2X8_LE:
formats++;
if (xlate) {
- xlate->host_fmt = soc_mbus_find_fmtdesc(code,
+ xlate->host_fmt = soc_mbus_find_fmtdesc(code.code,
omap1_cam_formats,
ARRAY_SIZE(omap1_cam_formats));
- xlate->code = code;
+ xlate->code = code.code;
xlate++;
dev_dbg(dev,
"%s: providing format %s as byte swapped code #%d\n",
- __func__, xlate->host_fmt->name, code);
+ __func__, xlate->host_fmt->name, code.code);
}
default:
if (xlate)
@@ -1116,7 +1119,7 @@ static int omap1_cam_get_formats(struct soc_camera_device *icd,
formats++;
if (xlate) {
xlate->host_fmt = fmt;
- xlate->code = code;
+ xlate->code = code.code;
xlate++;
}
@@ -1154,7 +1157,7 @@ static int dma_align(int *width, int *height,
return 1;
}
-#define subdev_call_with_sense(pcdev, dev, icd, sd, function, args...) \
+#define subdev_call_with_sense(pcdev, dev, icd, sd, op, function, args...) \
({ \
struct soc_camera_sense sense = { \
.master_clock = pcdev->camexclk, \
@@ -1165,7 +1168,7 @@ static int dma_align(int *width, int *height,
if (pcdev->pdata) \
sense.pixel_clock_max = pcdev->pdata->lclk_khz_max * 1000; \
icd->sense = &sense; \
- __ret = v4l2_subdev_call(sd, video, function, ##args); \
+ __ret = v4l2_subdev_call(sd, op, function, ##args); \
icd->sense = NULL; \
\
if (sense.flags & SOCAM_SENSE_PCLK_CHANGED) { \
@@ -1179,16 +1182,17 @@ static int dma_align(int *width, int *height,
__ret; \
})
-static int set_mbus_format(struct omap1_cam_dev *pcdev, struct device *dev,
+static int set_format(struct omap1_cam_dev *pcdev, struct device *dev,
struct soc_camera_device *icd, struct v4l2_subdev *sd,
- struct v4l2_mbus_framefmt *mf,
+ struct v4l2_subdev_format *format,
const struct soc_camera_format_xlate *xlate)
{
s32 bytes_per_line;
- int ret = subdev_call_with_sense(pcdev, dev, icd, sd, s_mbus_fmt, mf);
+ struct v4l2_mbus_framefmt *mf = &format->format;
+ int ret = subdev_call_with_sense(pcdev, dev, icd, sd, pad, set_fmt, NULL, format);
if (ret < 0) {
- dev_err(dev, "%s: s_mbus_fmt failed\n", __func__);
+ dev_err(dev, "%s: set_fmt failed\n", __func__);
return ret;
}
@@ -1221,42 +1225,45 @@ static int omap1_cam_set_crop(struct soc_camera_device *icd,
struct device *dev = icd->parent;
struct soc_camera_host *ici = to_soc_camera_host(dev);
struct omap1_cam_dev *pcdev = ici->priv;
- struct v4l2_mbus_framefmt mf;
+ struct v4l2_subdev_format fmt = {
+ .which = V4L2_SUBDEV_FORMAT_ACTIVE,
+ };
+ struct v4l2_mbus_framefmt *mf = &fmt.format;
int ret;
- ret = subdev_call_with_sense(pcdev, dev, icd, sd, s_crop, crop);
+ ret = subdev_call_with_sense(pcdev, dev, icd, sd, video, s_crop, crop);
if (ret < 0) {
dev_warn(dev, "%s: failed to crop to %ux%u@%u:%u\n", __func__,
rect->width, rect->height, rect->left, rect->top);
return ret;
}
- ret = v4l2_subdev_call(sd, video, g_mbus_fmt, &mf);
+ ret = v4l2_subdev_call(sd, pad, get_fmt, NULL, &fmt);
if (ret < 0) {
dev_warn(dev, "%s: failed to fetch current format\n", __func__);
return ret;
}
- ret = dma_align(&mf.width, &mf.height, xlate->host_fmt, pcdev->vb_mode,
+ ret = dma_align(&mf->width, &mf->height, xlate->host_fmt, pcdev->vb_mode,
false);
if (ret < 0) {
dev_err(dev, "%s: failed to align %ux%u %s with DMA\n",
- __func__, mf.width, mf.height,
+ __func__, mf->width, mf->height,
xlate->host_fmt->name);
return ret;
}
if (!ret) {
/* sensor returned geometry not DMA aligned, trying to fix */
- ret = set_mbus_format(pcdev, dev, icd, sd, &mf, xlate);
+ ret = set_format(pcdev, dev, icd, sd, &fmt, xlate);
if (ret < 0) {
dev_err(dev, "%s: failed to set format\n", __func__);
return ret;
}
}
- icd->user_width = mf.width;
- icd->user_height = mf.height;
+ icd->user_width = mf->width;
+ icd->user_height = mf->height;
return 0;
}
@@ -1270,7 +1277,10 @@ static int omap1_cam_set_fmt(struct soc_camera_device *icd,
struct soc_camera_host *ici = to_soc_camera_host(dev);
struct omap1_cam_dev *pcdev = ici->priv;
struct v4l2_pix_format *pix = &f->fmt.pix;
- struct v4l2_mbus_framefmt mf;
+ struct v4l2_subdev_format format = {
+ .which = V4L2_SUBDEV_FORMAT_ACTIVE,
+ };
+ struct v4l2_mbus_framefmt *mf = &format.format;
int ret;
xlate = soc_camera_xlate_by_fourcc(icd, pix->pixelformat);
@@ -1280,13 +1290,13 @@ static int omap1_cam_set_fmt(struct soc_camera_device *icd,
return -EINVAL;
}
- mf.width = pix->width;
- mf.height = pix->height;
- mf.field = pix->field;
- mf.colorspace = pix->colorspace;
- mf.code = xlate->code;
+ mf->width = pix->width;
+ mf->height = pix->height;
+ mf->field = pix->field;
+ mf->colorspace = pix->colorspace;
+ mf->code = xlate->code;
- ret = dma_align(&mf.width, &mf.height, xlate->host_fmt, pcdev->vb_mode,
+ ret = dma_align(&mf->width, &mf->height, xlate->host_fmt, pcdev->vb_mode,
true);
if (ret < 0) {
dev_err(dev, "%s: failed to align %ux%u %s with DMA\n",
@@ -1295,16 +1305,16 @@ static int omap1_cam_set_fmt(struct soc_camera_device *icd,
return ret;
}
- ret = set_mbus_format(pcdev, dev, icd, sd, &mf, xlate);
+ ret = set_format(pcdev, dev, icd, sd, &format, xlate);
if (ret < 0) {
dev_err(dev, "%s: failed to set format\n", __func__);
return ret;
}
- pix->width = mf.width;
- pix->height = mf.height;
- pix->field = mf.field;
- pix->colorspace = mf.colorspace;
+ pix->width = mf->width;
+ pix->height = mf->height;
+ pix->field = mf->field;
+ pix->colorspace = mf->colorspace;
icd->current_fmt = xlate;
return 0;
@@ -1316,7 +1326,11 @@ static int omap1_cam_try_fmt(struct soc_camera_device *icd,
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
const struct soc_camera_format_xlate *xlate;
struct v4l2_pix_format *pix = &f->fmt.pix;
- struct v4l2_mbus_framefmt mf;
+ struct v4l2_subdev_pad_config pad_cfg;
+ struct v4l2_subdev_format format = {
+ .which = V4L2_SUBDEV_FORMAT_TRY,
+ };
+ struct v4l2_mbus_framefmt *mf = &format.format;
int ret;
/* TODO: limit to mx1 hardware capabilities */
@@ -1327,21 +1341,21 @@ static int omap1_cam_try_fmt(struct soc_camera_device *icd,
return -EINVAL;
}
- mf.width = pix->width;
- mf.height = pix->height;
- mf.field = pix->field;
- mf.colorspace = pix->colorspace;
- mf.code = xlate->code;
+ mf->width = pix->width;
+ mf->height = pix->height;
+ mf->field = pix->field;
+ mf->colorspace = pix->colorspace;
+ mf->code = xlate->code;
/* limit to sensor capabilities */
- ret = v4l2_subdev_call(sd, video, try_mbus_fmt, &mf);
+ ret = v4l2_subdev_call(sd, pad, set_fmt, &pad_cfg, &format);
if (ret < 0)
return ret;
- pix->width = mf.width;
- pix->height = mf.height;
- pix->field = mf.field;
- pix->colorspace = mf.colorspace;
+ pix->width = mf->width;
+ pix->height = mf->height;
+ pix->field = mf->field;
+ pix->colorspace = mf->colorspace;
return 0;
}
diff --git a/drivers/media/platform/soc_camera/pxa_camera.c b/drivers/media/platform/soc_camera/pxa_camera.c
index 8d6e343fec0f..fcb942de0c7f 100644
--- a/drivers/media/platform/soc_camera/pxa_camera.c
+++ b/drivers/media/platform/soc_camera/pxa_camera.c
@@ -1253,17 +1253,20 @@ static int pxa_camera_get_formats(struct soc_camera_device *icd, unsigned int id
struct device *dev = icd->parent;
int formats = 0, ret;
struct pxa_cam *cam;
- u32 code;
+ struct v4l2_subdev_mbus_code_enum code = {
+ .which = V4L2_SUBDEV_FORMAT_ACTIVE,
+ .index = idx,
+ };
const struct soc_mbus_pixelfmt *fmt;
- ret = v4l2_subdev_call(sd, video, enum_mbus_fmt, idx, &code);
+ ret = v4l2_subdev_call(sd, pad, enum_mbus_code, NULL, &code);
if (ret < 0)
/* No more formats */
return 0;
- fmt = soc_mbus_get_fmtdesc(code);
+ fmt = soc_mbus_get_fmtdesc(code.code);
if (!fmt) {
- dev_err(dev, "Invalid format code #%u: %d\n", idx, code);
+ dev_err(dev, "Invalid format code #%u: %d\n", idx, code.code);
return 0;
}
@@ -1282,15 +1285,15 @@ static int pxa_camera_get_formats(struct soc_camera_device *icd, unsigned int id
cam = icd->host_priv;
}
- switch (code) {
+ switch (code.code) {
case MEDIA_BUS_FMT_UYVY8_2X8:
formats++;
if (xlate) {
xlate->host_fmt = &pxa_camera_formats[0];
- xlate->code = code;
+ xlate->code = code.code;
xlate++;
dev_dbg(dev, "Providing format %s using code %d\n",
- pxa_camera_formats[0].name, code);
+ pxa_camera_formats[0].name, code.code);
}
case MEDIA_BUS_FMT_VYUY8_2X8:
case MEDIA_BUS_FMT_YUYV8_2X8:
@@ -1314,7 +1317,7 @@ static int pxa_camera_get_formats(struct soc_camera_device *icd, unsigned int id
formats++;
if (xlate) {
xlate->host_fmt = fmt;
- xlate->code = code;
+ xlate->code = code.code;
xlate++;
}
@@ -1346,7 +1349,10 @@ static int pxa_camera_set_crop(struct soc_camera_device *icd,
.master_clock = pcdev->mclk,
.pixel_clock_max = pcdev->ciclk / 4,
};
- struct v4l2_mbus_framefmt mf;
+ struct v4l2_subdev_format fmt = {
+ .which = V4L2_SUBDEV_FORMAT_ACTIVE,
+ };
+ struct v4l2_mbus_framefmt *mf = &fmt.format;
struct pxa_cam *cam = icd->host_priv;
u32 fourcc = icd->current_fmt->host_fmt->fourcc;
int ret;
@@ -1365,23 +1371,23 @@ static int pxa_camera_set_crop(struct soc_camera_device *icd,
return ret;
}
- ret = v4l2_subdev_call(sd, video, g_mbus_fmt, &mf);
+ ret = v4l2_subdev_call(sd, pad, get_fmt, NULL, &fmt);
if (ret < 0)
return ret;
- if (pxa_camera_check_frame(mf.width, mf.height)) {
+ if (pxa_camera_check_frame(mf->width, mf->height)) {
/*
* Camera cropping produced a frame beyond our capabilities.
* FIXME: just extract a subframe, that we can process.
*/
- v4l_bound_align_image(&mf.width, 48, 2048, 1,
- &mf.height, 32, 2048, 0,
+ v4l_bound_align_image(&mf->width, 48, 2048, 1,
+ &mf->height, 32, 2048, 0,
fourcc == V4L2_PIX_FMT_YUV422P ? 4 : 0);
- ret = v4l2_subdev_call(sd, video, s_mbus_fmt, &mf);
+ ret = v4l2_subdev_call(sd, pad, set_fmt, NULL, &fmt);
if (ret < 0)
return ret;
- if (pxa_camera_check_frame(mf.width, mf.height)) {
+ if (pxa_camera_check_frame(mf->width, mf->height)) {
dev_warn(icd->parent,
"Inconsistent state. Use S_FMT to repair\n");
return -EINVAL;
@@ -1398,8 +1404,8 @@ static int pxa_camera_set_crop(struct soc_camera_device *icd,
recalculate_fifo_timeout(pcdev, sense.pixel_clock);
}
- icd->user_width = mf.width;
- icd->user_height = mf.height;
+ icd->user_width = mf->width;
+ icd->user_height = mf->height;
pxa_camera_setup_cicr(icd, cam->flags, fourcc);
@@ -1419,7 +1425,10 @@ static int pxa_camera_set_fmt(struct soc_camera_device *icd,
.pixel_clock_max = pcdev->ciclk / 4,
};
struct v4l2_pix_format *pix = &f->fmt.pix;
- struct v4l2_mbus_framefmt mf;
+ struct v4l2_subdev_format format = {
+ .which = V4L2_SUBDEV_FORMAT_ACTIVE,
+ };
+ struct v4l2_mbus_framefmt *mf = &format.format;
int ret;
xlate = soc_camera_xlate_by_fourcc(icd, pix->pixelformat);
@@ -1433,15 +1442,15 @@ static int pxa_camera_set_fmt(struct soc_camera_device *icd,
/* The caller holds a mutex. */
icd->sense = &sense;
- mf.width = pix->width;
- mf.height = pix->height;
- mf.field = pix->field;
- mf.colorspace = pix->colorspace;
- mf.code = xlate->code;
+ mf->width = pix->width;
+ mf->height = pix->height;
+ mf->field = pix->field;
+ mf->colorspace = pix->colorspace;
+ mf->code = xlate->code;
- ret = v4l2_subdev_call(sd, video, s_mbus_fmt, &mf);
+ ret = v4l2_subdev_call(sd, pad, set_fmt, NULL, &format);
- if (mf.code != xlate->code)
+ if (mf->code != xlate->code)
return -EINVAL;
icd->sense = NULL;
@@ -1449,10 +1458,10 @@ static int pxa_camera_set_fmt(struct soc_camera_device *icd,
if (ret < 0) {
dev_warn(dev, "Failed to configure for format %x\n",
pix->pixelformat);
- } else if (pxa_camera_check_frame(mf.width, mf.height)) {
+ } else if (pxa_camera_check_frame(mf->width, mf->height)) {
dev_warn(dev,
"Camera driver produced an unsupported frame %dx%d\n",
- mf.width, mf.height);
+ mf->width, mf->height);
ret = -EINVAL;
} else if (sense.flags & SOCAM_SENSE_PCLK_CHANGED) {
if (sense.pixel_clock > sense.pixel_clock_max) {
@@ -1467,10 +1476,10 @@ static int pxa_camera_set_fmt(struct soc_camera_device *icd,
if (ret < 0)
return ret;
- pix->width = mf.width;
- pix->height = mf.height;
- pix->field = mf.field;
- pix->colorspace = mf.colorspace;
+ pix->width = mf->width;
+ pix->height = mf->height;
+ pix->field = mf->field;
+ pix->colorspace = mf->colorspace;
icd->current_fmt = xlate;
return ret;
@@ -1482,7 +1491,11 @@ static int pxa_camera_try_fmt(struct soc_camera_device *icd,
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
const struct soc_camera_format_xlate *xlate;
struct v4l2_pix_format *pix = &f->fmt.pix;
- struct v4l2_mbus_framefmt mf;
+ struct v4l2_subdev_pad_config pad_cfg;
+ struct v4l2_subdev_format format = {
+ .which = V4L2_SUBDEV_FORMAT_TRY,
+ };
+ struct v4l2_mbus_framefmt *mf = &format.format;
__u32 pixfmt = pix->pixelformat;
int ret;
@@ -1503,22 +1516,22 @@ static int pxa_camera_try_fmt(struct soc_camera_device *icd,
pixfmt == V4L2_PIX_FMT_YUV422P ? 4 : 0);
/* limit to sensor capabilities */
- mf.width = pix->width;
- mf.height = pix->height;
+ mf->width = pix->width;
+ mf->height = pix->height;
/* Only progressive video supported so far */
- mf.field = V4L2_FIELD_NONE;
- mf.colorspace = pix->colorspace;
- mf.code = xlate->code;
+ mf->field = V4L2_FIELD_NONE;
+ mf->colorspace = pix->colorspace;
+ mf->code = xlate->code;
- ret = v4l2_subdev_call(sd, video, try_mbus_fmt, &mf);
+ ret = v4l2_subdev_call(sd, pad, set_fmt, &pad_cfg, &format);
if (ret < 0)
return ret;
- pix->width = mf.width;
- pix->height = mf.height;
- pix->colorspace = mf.colorspace;
+ pix->width = mf->width;
+ pix->height = mf->height;
+ pix->colorspace = mf->colorspace;
- switch (mf.field) {
+ switch (mf->field) {
case V4L2_FIELD_ANY:
case V4L2_FIELD_NONE:
pix->field = V4L2_FIELD_NONE;
@@ -1526,7 +1539,7 @@ static int pxa_camera_try_fmt(struct soc_camera_device *icd,
default:
/* TODO: support interlaced at least in pass-through mode */
dev_err(icd->parent, "Field type %d unsupported.\n",
- mf.field);
+ mf->field);
return -EINVAL;
}
diff --git a/drivers/media/platform/soc_camera/rcar_vin.c b/drivers/media/platform/soc_camera/rcar_vin.c
index 6460f8e1b07f..4c30d0dbdb30 100644
--- a/drivers/media/platform/soc_camera/rcar_vin.c
+++ b/drivers/media/platform/soc_camera/rcar_vin.c
@@ -1323,16 +1323,19 @@ static int rcar_vin_get_formats(struct soc_camera_device *icd, unsigned int idx,
int ret, k, n;
int formats = 0;
struct rcar_vin_cam *cam;
- u32 code;
+ struct v4l2_subdev_mbus_code_enum code = {
+ .which = V4L2_SUBDEV_FORMAT_ACTIVE,
+ .index = idx,
+ };
const struct soc_mbus_pixelfmt *fmt;
- ret = v4l2_subdev_call(sd, video, enum_mbus_fmt, idx, &code);
+ ret = v4l2_subdev_call(sd, pad, enum_mbus_code, NULL, &code);
if (ret < 0)
return 0;
- fmt = soc_mbus_get_fmtdesc(code);
+ fmt = soc_mbus_get_fmtdesc(code.code);
if (!fmt) {
- dev_warn(dev, "unsupported format code #%u: %d\n", idx, code);
+ dev_warn(dev, "unsupported format code #%u: %d\n", idx, code.code);
return 0;
}
@@ -1341,12 +1344,15 @@ static int rcar_vin_get_formats(struct soc_camera_device *icd, unsigned int idx,
return 0;
if (!icd->host_priv) {
- struct v4l2_mbus_framefmt mf;
+ struct v4l2_subdev_format fmt = {
+ .which = V4L2_SUBDEV_FORMAT_ACTIVE,
+ };
+ struct v4l2_mbus_framefmt *mf = &fmt.format;
struct v4l2_rect rect;
struct device *dev = icd->parent;
int shift;
- ret = v4l2_subdev_call(sd, video, g_mbus_fmt, &mf);
+ ret = v4l2_subdev_call(sd, pad, get_fmt, NULL, &fmt);
if (ret < 0)
return ret;
@@ -1356,8 +1362,8 @@ static int rcar_vin_get_formats(struct soc_camera_device *icd, unsigned int idx,
/* Sensor driver doesn't support cropping */
rect.left = 0;
rect.top = 0;
- rect.width = mf.width;
- rect.height = mf.height;
+ rect.width = mf->width;
+ rect.height = mf->height;
} else if (ret < 0) {
return ret;
}
@@ -1367,16 +1373,16 @@ static int rcar_vin_get_formats(struct soc_camera_device *icd, unsigned int idx,
* 1280x960, 640x480, 320x240
*/
for (shift = 0; shift < 3; shift++) {
- if (mf.width <= VIN_MAX_WIDTH &&
- mf.height <= VIN_MAX_HEIGHT)
+ if (mf->width <= VIN_MAX_WIDTH &&
+ mf->height <= VIN_MAX_HEIGHT)
break;
- mf.width = 1280 >> shift;
- mf.height = 960 >> shift;
+ mf->width = 1280 >> shift;
+ mf->height = 960 >> shift;
ret = v4l2_device_call_until_err(sd->v4l2_dev,
soc_camera_grp_id(icd),
- video, s_mbus_fmt,
- &mf);
+ pad, set_fmt, NULL,
+ &fmt);
if (ret < 0)
return ret;
}
@@ -1384,11 +1390,11 @@ static int rcar_vin_get_formats(struct soc_camera_device *icd, unsigned int idx,
if (shift == 3) {
dev_err(dev,
"Failed to configure the client below %ux%u\n",
- mf.width, mf.height);
+ mf->width, mf->height);
return -EIO;
}
- dev_dbg(dev, "camera fmt %ux%u\n", mf.width, mf.height);
+ dev_dbg(dev, "camera fmt %ux%u\n", mf->width, mf->height);
cam = kzalloc(sizeof(*cam), GFP_KERNEL);
if (!cam)
@@ -1399,10 +1405,10 @@ static int rcar_vin_get_formats(struct soc_camera_device *icd, unsigned int idx,
*/
cam->rect = rect;
cam->subrect = rect;
- cam->width = mf.width;
- cam->height = mf.height;
- cam->out_width = mf.width;
- cam->out_height = mf.height;
+ cam->width = mf->width;
+ cam->height = mf->height;
+ cam->out_width = mf->width;
+ cam->out_height = mf->height;
icd->host_priv = cam;
} else {
@@ -1413,7 +1419,7 @@ static int rcar_vin_get_formats(struct soc_camera_device *icd, unsigned int idx,
if (!idx)
cam->extra_fmt = NULL;
- switch (code) {
+ switch (code.code) {
case MEDIA_BUS_FMT_YUYV8_1X16:
case MEDIA_BUS_FMT_YUYV8_2X8:
case MEDIA_BUS_FMT_YUYV10_2X10:
@@ -1427,9 +1433,9 @@ static int rcar_vin_get_formats(struct soc_camera_device *icd, unsigned int idx,
formats += n;
for (k = 0; xlate && k < n; k++, xlate++) {
xlate->host_fmt = &rcar_vin_formats[k];
- xlate->code = code;
+ xlate->code = code.code;
dev_dbg(dev, "Providing format %s using code %d\n",
- rcar_vin_formats[k].name, code);
+ rcar_vin_formats[k].name, code.code);
}
break;
default:
@@ -1445,7 +1451,7 @@ static int rcar_vin_get_formats(struct soc_camera_device *icd, unsigned int idx,
formats++;
if (xlate) {
xlate->host_fmt = fmt;
- xlate->code = code;
+ xlate->code = code.code;
xlate++;
}
@@ -1470,7 +1476,10 @@ static int rcar_vin_set_crop(struct soc_camera_device *icd,
struct v4l2_rect *cam_rect = &cam_crop.c;
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
struct device *dev = icd->parent;
- struct v4l2_mbus_framefmt mf;
+ struct v4l2_subdev_format fmt = {
+ .which = V4L2_SUBDEV_FORMAT_ACTIVE,
+ };
+ struct v4l2_mbus_framefmt *mf = &fmt.format;
u32 vnmc;
int ret, i;
@@ -1494,16 +1503,16 @@ static int rcar_vin_set_crop(struct soc_camera_device *icd,
/* On success cam_crop contains current camera crop */
/* Retrieve camera output window */
- ret = v4l2_subdev_call(sd, video, g_mbus_fmt, &mf);
+ ret = v4l2_subdev_call(sd, pad, get_fmt, NULL, &fmt);
if (ret < 0)
return ret;
- if (mf.width > VIN_MAX_WIDTH || mf.height > VIN_MAX_HEIGHT)
+ if (mf->width > VIN_MAX_WIDTH || mf->height > VIN_MAX_HEIGHT)
return -EINVAL;
/* Cache camera output window */
- cam->width = mf.width;
- cam->height = mf.height;
+ cam->width = mf->width;
+ cam->height = mf->height;
icd->user_width = cam->width;
icd->user_height = cam->height;
@@ -1679,7 +1688,11 @@ static int rcar_vin_try_fmt(struct soc_camera_device *icd,
const struct soc_camera_format_xlate *xlate;
struct v4l2_pix_format *pix = &f->fmt.pix;
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
- struct v4l2_mbus_framefmt mf;
+ struct v4l2_subdev_pad_config pad_cfg;
+ struct v4l2_subdev_format format = {
+ .which = V4L2_SUBDEV_FORMAT_TRY,
+ };
+ struct v4l2_mbus_framefmt *mf = &format.format;
__u32 pixfmt = pix->pixelformat;
int width, height;
int ret;
@@ -1706,25 +1719,25 @@ static int rcar_vin_try_fmt(struct soc_camera_device *icd,
pix->sizeimage = 0;
/* limit to sensor capabilities */
- mf.width = pix->width;
- mf.height = pix->height;
- mf.field = pix->field;
- mf.code = xlate->code;
- mf.colorspace = pix->colorspace;
+ mf->width = pix->width;
+ mf->height = pix->height;
+ mf->field = pix->field;
+ mf->code = xlate->code;
+ mf->colorspace = pix->colorspace;
ret = v4l2_device_call_until_err(sd->v4l2_dev, soc_camera_grp_id(icd),
- video, try_mbus_fmt, &mf);
+ pad, set_fmt, &pad_cfg, &format);
if (ret < 0)
return ret;
/* Adjust only if VIN cannot scale */
- if (pix->width > mf.width * 2)
- pix->width = mf.width * 2;
- if (pix->height > mf.height * 3)
- pix->height = mf.height * 3;
+ if (pix->width > mf->width * 2)
+ pix->width = mf->width * 2;
+ if (pix->height > mf->height * 3)
+ pix->height = mf->height * 3;
- pix->field = mf.field;
- pix->colorspace = mf.colorspace;
+ pix->field = mf->field;
+ pix->colorspace = mf->colorspace;
if (pixfmt == V4L2_PIX_FMT_NV16) {
/* FIXME: check against rect_max after converting soc-camera */
@@ -1735,12 +1748,12 @@ static int rcar_vin_try_fmt(struct soc_camera_device *icd,
* requested a bigger rectangle, it will not return a
* smaller one.
*/
- mf.width = VIN_MAX_WIDTH;
- mf.height = VIN_MAX_HEIGHT;
+ mf->width = VIN_MAX_WIDTH;
+ mf->height = VIN_MAX_HEIGHT;
ret = v4l2_device_call_until_err(sd->v4l2_dev,
soc_camera_grp_id(icd),
- video, try_mbus_fmt,
- &mf);
+ pad, set_fmt, &pad_cfg,
+ &format);
if (ret < 0) {
dev_err(icd->parent,
"client try_fmt() = %d\n", ret);
@@ -1748,9 +1761,9 @@ static int rcar_vin_try_fmt(struct soc_camera_device *icd,
}
}
/* We will scale exactly */
- if (mf.width > width)
+ if (mf->width > width)
pix->width = width;
- if (mf.height > height)
+ if (mf->height > height)
pix->height = height;
}
diff --git a/drivers/media/platform/soc_camera/sh_mobile_ceu_camera.c b/drivers/media/platform/soc_camera/sh_mobile_ceu_camera.c
index 9ce202f53934..c5c6c4e91f7b 100644
--- a/drivers/media/platform/soc_camera/sh_mobile_ceu_camera.c
+++ b/drivers/media/platform/soc_camera/sh_mobile_ceu_camera.c
@@ -1048,17 +1048,20 @@ static int sh_mobile_ceu_get_formats(struct soc_camera_device *icd, unsigned int
int ret, k, n;
int formats = 0;
struct sh_mobile_ceu_cam *cam;
- u32 code;
+ struct v4l2_subdev_mbus_code_enum code = {
+ .which = V4L2_SUBDEV_FORMAT_ACTIVE,
+ .index = idx,
+ };
const struct soc_mbus_pixelfmt *fmt;
- ret = v4l2_subdev_call(sd, video, enum_mbus_fmt, idx, &code);
+ ret = v4l2_subdev_call(sd, pad, enum_mbus_code, NULL, &code);
if (ret < 0)
/* No more formats */
return 0;
- fmt = soc_mbus_get_fmtdesc(code);
+ fmt = soc_mbus_get_fmtdesc(code.code);
if (!fmt) {
- dev_warn(dev, "unsupported format code #%u: %d\n", idx, code);
+ dev_warn(dev, "unsupported format code #%u: %d\n", idx, code.code);
return 0;
}
@@ -1070,7 +1073,10 @@ static int sh_mobile_ceu_get_formats(struct soc_camera_device *icd, unsigned int
}
if (!icd->host_priv) {
- struct v4l2_mbus_framefmt mf;
+ struct v4l2_subdev_format fmt = {
+ .which = V4L2_SUBDEV_FORMAT_ACTIVE,
+ };
+ struct v4l2_mbus_framefmt *mf = &fmt.format;
struct v4l2_rect rect;
int shift = 0;
@@ -1088,7 +1094,7 @@ static int sh_mobile_ceu_get_formats(struct soc_camera_device *icd, unsigned int
return ret;
/* First time */
- ret = v4l2_subdev_call(sd, video, g_mbus_fmt, &mf);
+ ret = v4l2_subdev_call(sd, pad, get_fmt, NULL, &fmt);
if (ret < 0)
return ret;
@@ -1099,14 +1105,14 @@ static int sh_mobile_ceu_get_formats(struct soc_camera_device *icd, unsigned int
* sizes, just try VGA multiples. If needed, this can be
* adjusted in the future.
*/
- while ((mf.width > pcdev->max_width ||
- mf.height > pcdev->max_height) && shift < 4) {
+ while ((mf->width > pcdev->max_width ||
+ mf->height > pcdev->max_height) && shift < 4) {
/* Try 2560x1920, 1280x960, 640x480, 320x240 */
- mf.width = 2560 >> shift;
- mf.height = 1920 >> shift;
+ mf->width = 2560 >> shift;
+ mf->height = 1920 >> shift;
ret = v4l2_device_call_until_err(sd->v4l2_dev,
- soc_camera_grp_id(icd), video,
- s_mbus_fmt, &mf);
+ soc_camera_grp_id(icd), pad,
+ set_fmt, NULL, &fmt);
if (ret < 0)
return ret;
shift++;
@@ -1114,11 +1120,11 @@ static int sh_mobile_ceu_get_formats(struct soc_camera_device *icd, unsigned int
if (shift == 4) {
dev_err(dev, "Failed to configure the client below %ux%x\n",
- mf.width, mf.height);
+ mf->width, mf->height);
return -EIO;
}
- dev_geo(dev, "camera fmt %ux%u\n", mf.width, mf.height);
+ dev_geo(dev, "camera fmt %ux%u\n", mf->width, mf->height);
cam = kzalloc(sizeof(*cam), GFP_KERNEL);
if (!cam)
@@ -1128,8 +1134,8 @@ static int sh_mobile_ceu_get_formats(struct soc_camera_device *icd, unsigned int
cam->rect = rect;
cam->subrect = rect;
- cam->width = mf.width;
- cam->height = mf.height;
+ cam->width = mf->width;
+ cam->height = mf->height;
icd->host_priv = cam;
} else {
@@ -1140,7 +1146,7 @@ static int sh_mobile_ceu_get_formats(struct soc_camera_device *icd, unsigned int
if (!idx)
cam->extra_fmt = NULL;
- switch (code) {
+ switch (code.code) {
case MEDIA_BUS_FMT_UYVY8_2X8:
case MEDIA_BUS_FMT_VYUY8_2X8:
case MEDIA_BUS_FMT_YUYV8_2X8:
@@ -1163,10 +1169,10 @@ static int sh_mobile_ceu_get_formats(struct soc_camera_device *icd, unsigned int
formats += n;
for (k = 0; xlate && k < n; k++) {
xlate->host_fmt = &sh_mobile_ceu_formats[k];
- xlate->code = code;
+ xlate->code = code.code;
xlate++;
dev_dbg(dev, "Providing format %s using code %d\n",
- sh_mobile_ceu_formats[k].name, code);
+ sh_mobile_ceu_formats[k].name, code.code);
}
break;
default:
@@ -1178,7 +1184,7 @@ static int sh_mobile_ceu_get_formats(struct soc_camera_device *icd, unsigned int
formats++;
if (xlate) {
xlate->host_fmt = fmt;
- xlate->code = code;
+ xlate->code = code.code;
xlate++;
dev_dbg(dev, "Providing format %s in pass-through mode\n",
fmt->name);
@@ -1214,7 +1220,10 @@ static int sh_mobile_ceu_set_crop(struct soc_camera_device *icd,
struct sh_mobile_ceu_cam *cam = icd->host_priv;
struct v4l2_rect *cam_rect = &cam_crop.c;
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
- struct v4l2_mbus_framefmt mf;
+ struct v4l2_subdev_format fmt = {
+ .which = V4L2_SUBDEV_FORMAT_ACTIVE,
+ };
+ struct v4l2_mbus_framefmt *mf = &fmt.format;
unsigned int scale_cam_h, scale_cam_v, scale_ceu_h, scale_ceu_v,
out_width, out_height;
int interm_width, interm_height;
@@ -1244,16 +1253,16 @@ static int sh_mobile_ceu_set_crop(struct soc_camera_device *icd,
/* On success cam_crop contains current camera crop */
/* 3. Retrieve camera output window */
- ret = v4l2_subdev_call(sd, video, g_mbus_fmt, &mf);
+ ret = v4l2_subdev_call(sd, pad, get_fmt, NULL, &fmt);
if (ret < 0)
return ret;
- if (mf.width > pcdev->max_width || mf.height > pcdev->max_height)
+ if (mf->width > pcdev->max_width || mf->height > pcdev->max_height)
return -EINVAL;
/* 4. Calculate camera scales */
- scale_cam_h = calc_generic_scale(cam_rect->width, mf.width);
- scale_cam_v = calc_generic_scale(cam_rect->height, mf.height);
+ scale_cam_h = calc_generic_scale(cam_rect->width, mf->width);
+ scale_cam_v = calc_generic_scale(cam_rect->height, mf->height);
/* Calculate intermediate window */
interm_width = scale_down(rect->width, scale_cam_h);
@@ -1264,7 +1273,7 @@ static int sh_mobile_ceu_set_crop(struct soc_camera_device *icd,
new_scale_h = calc_generic_scale(rect->width, icd->user_width);
- mf.width = scale_down(cam_rect->width, new_scale_h);
+ mf->width = scale_down(cam_rect->width, new_scale_h);
}
if (interm_height < icd->user_height) {
@@ -1272,26 +1281,26 @@ static int sh_mobile_ceu_set_crop(struct soc_camera_device *icd,
new_scale_v = calc_generic_scale(rect->height, icd->user_height);
- mf.height = scale_down(cam_rect->height, new_scale_v);
+ mf->height = scale_down(cam_rect->height, new_scale_v);
}
if (interm_width < icd->user_width || interm_height < icd->user_height) {
ret = v4l2_device_call_until_err(sd->v4l2_dev,
- soc_camera_grp_id(icd), video,
- s_mbus_fmt, &mf);
+ soc_camera_grp_id(icd), pad,
+ set_fmt, NULL, &fmt);
if (ret < 0)
return ret;
- dev_geo(dev, "New camera output %ux%u\n", mf.width, mf.height);
- scale_cam_h = calc_generic_scale(cam_rect->width, mf.width);
- scale_cam_v = calc_generic_scale(cam_rect->height, mf.height);
+ dev_geo(dev, "New camera output %ux%u\n", mf->width, mf->height);
+ scale_cam_h = calc_generic_scale(cam_rect->width, mf->width);
+ scale_cam_v = calc_generic_scale(cam_rect->height, mf->height);
interm_width = scale_down(rect->width, scale_cam_h);
interm_height = scale_down(rect->height, scale_cam_v);
}
/* Cache camera output window */
- cam->width = mf.width;
- cam->height = mf.height;
+ cam->width = mf->width;
+ cam->height = mf->height;
if (pcdev->image_mode) {
out_width = min(interm_width, icd->user_width);
@@ -1490,7 +1499,11 @@ static int sh_mobile_ceu_try_fmt(struct soc_camera_device *icd,
const struct soc_camera_format_xlate *xlate;
struct v4l2_pix_format *pix = &f->fmt.pix;
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
- struct v4l2_mbus_framefmt mf;
+ struct v4l2_subdev_pad_config pad_cfg;
+ struct v4l2_subdev_format format = {
+ .which = V4L2_SUBDEV_FORMAT_TRY,
+ };
+ struct v4l2_mbus_framefmt *mf = &format.format;
__u32 pixfmt = pix->pixelformat;
int width, height;
int ret;
@@ -1518,21 +1531,21 @@ static int sh_mobile_ceu_try_fmt(struct soc_camera_device *icd,
height = pix->height;
/* limit to sensor capabilities */
- mf.width = pix->width;
- mf.height = pix->height;
- mf.field = pix->field;
- mf.code = xlate->code;
- mf.colorspace = pix->colorspace;
+ mf->width = pix->width;
+ mf->height = pix->height;
+ mf->field = pix->field;
+ mf->code = xlate->code;
+ mf->colorspace = pix->colorspace;
ret = v4l2_device_call_until_err(sd->v4l2_dev, soc_camera_grp_id(icd),
- video, try_mbus_fmt, &mf);
+ pad, set_fmt, &pad_cfg, &format);
if (ret < 0)
return ret;
- pix->width = mf.width;
- pix->height = mf.height;
- pix->field = mf.field;
- pix->colorspace = mf.colorspace;
+ pix->width = mf->width;
+ pix->height = mf->height;
+ pix->field = mf->field;
+ pix->colorspace = mf->colorspace;
switch (pixfmt) {
case V4L2_PIX_FMT_NV12:
@@ -1547,11 +1560,11 @@ static int sh_mobile_ceu_try_fmt(struct soc_camera_device *icd,
* requested a bigger rectangle, it will not return a
* smaller one.
*/
- mf.width = pcdev->max_width;
- mf.height = pcdev->max_height;
+ mf->width = pcdev->max_width;
+ mf->height = pcdev->max_height;
ret = v4l2_device_call_until_err(sd->v4l2_dev,
- soc_camera_grp_id(icd), video,
- try_mbus_fmt, &mf);
+ soc_camera_grp_id(icd), pad,
+ set_fmt, &pad_cfg, &format);
if (ret < 0) {
/* Shouldn't actually happen... */
dev_err(icd->parent,
@@ -1560,9 +1573,9 @@ static int sh_mobile_ceu_try_fmt(struct soc_camera_device *icd,
}
}
/* We will scale exactly */
- if (mf.width > width)
+ if (mf->width > width)
pix->width = width;
- if (mf.height > height)
+ if (mf->height > height)
pix->height = height;
pix->bytesperline = max(pix->bytesperline, pix->width);
diff --git a/drivers/media/platform/soc_camera/sh_mobile_csi2.c b/drivers/media/platform/soc_camera/sh_mobile_csi2.c
index cd93241eb497..12d3626ecf22 100644
--- a/drivers/media/platform/soc_camera/sh_mobile_csi2.c
+++ b/drivers/media/platform/soc_camera/sh_mobile_csi2.c
@@ -45,11 +45,17 @@ struct sh_csi2 {
static void sh_csi2_hwinit(struct sh_csi2 *priv);
-static int sh_csi2_try_fmt(struct v4l2_subdev *sd,
- struct v4l2_mbus_framefmt *mf)
+static int sh_csi2_set_fmt(struct v4l2_subdev *sd,
+ struct v4l2_subdev_pad_config *cfg,
+ struct v4l2_subdev_format *format)
{
struct sh_csi2 *priv = container_of(sd, struct sh_csi2, subdev);
struct sh_csi2_pdata *pdata = priv->pdev->dev.platform_data;
+ struct v4l2_mbus_framefmt *mf = &format->format;
+ u32 tmp = (priv->client->channel & 3) << 8;
+
+ if (format->pad)
+ return -EINVAL;
if (mf->width > 8188)
mf->width = 8188;
@@ -85,21 +91,11 @@ static int sh_csi2_try_fmt(struct v4l2_subdev *sd,
break;
}
- return 0;
-}
-
-/*
- * We have done our best in try_fmt to try and tell the sensor, which formats
- * we support. If now the configuration is unsuitable for us we can only
- * error out.
- */
-static int sh_csi2_s_fmt(struct v4l2_subdev *sd,
- struct v4l2_mbus_framefmt *mf)
-{
- struct sh_csi2 *priv = container_of(sd, struct sh_csi2, subdev);
- u32 tmp = (priv->client->channel & 3) << 8;
+ if (format->which == V4L2_SUBDEV_FORMAT_TRY) {
+ cfg->try_fmt = *mf;
+ return 0;
+ }
- dev_dbg(sd->v4l2_dev->dev, "%s(%u)\n", __func__, mf->code);
if (mf->width > 8188 || mf->width & 1)
return -EINVAL;
@@ -211,12 +207,14 @@ static int sh_csi2_s_mbus_config(struct v4l2_subdev *sd,
}
static struct v4l2_subdev_video_ops sh_csi2_subdev_video_ops = {
- .s_mbus_fmt = sh_csi2_s_fmt,
- .try_mbus_fmt = sh_csi2_try_fmt,
.g_mbus_config = sh_csi2_g_mbus_config,
.s_mbus_config = sh_csi2_s_mbus_config,
};
+static struct v4l2_subdev_pad_ops sh_csi2_subdev_pad_ops = {
+ .set_fmt = sh_csi2_set_fmt,
+};
+
static void sh_csi2_hwinit(struct sh_csi2 *priv)
{
struct sh_csi2_pdata *pdata = priv->pdev->dev.platform_data;
@@ -313,6 +311,7 @@ static struct v4l2_subdev_core_ops sh_csi2_subdev_core_ops = {
static struct v4l2_subdev_ops sh_csi2_subdev_ops = {
.core = &sh_csi2_subdev_core_ops,
.video = &sh_csi2_subdev_video_ops,
+ .pad = &sh_csi2_subdev_pad_ops,
};
static int sh_csi2_probe(struct platform_device *pdev)
diff --git a/drivers/media/platform/soc_camera/soc_camera.c b/drivers/media/platform/soc_camera/soc_camera.c
index 7bfe7665687f..d708df410f74 100644
--- a/drivers/media/platform/soc_camera/soc_camera.c
+++ b/drivers/media/platform/soc_camera/soc_camera.c
@@ -484,10 +484,14 @@ static int soc_camera_init_user_formats(struct soc_camera_device *icd)
struct soc_camera_host *ici = to_soc_camera_host(icd->parent);
unsigned int i, fmts = 0, raw_fmts = 0;
int ret;
- u32 code;
+ struct v4l2_subdev_mbus_code_enum code = {
+ .which = V4L2_SUBDEV_FORMAT_ACTIVE,
+ };
- while (!v4l2_subdev_call(sd, video, enum_mbus_fmt, raw_fmts, &code))
+ while (!v4l2_subdev_call(sd, pad, enum_mbus_code, NULL, &code)) {
raw_fmts++;
+ code.index++;
+ }
if (!ici->ops->get_formats)
/*
@@ -521,11 +525,12 @@ static int soc_camera_init_user_formats(struct soc_camera_device *icd)
fmts = 0;
for (i = 0; i < raw_fmts; i++)
if (!ici->ops->get_formats) {
- v4l2_subdev_call(sd, video, enum_mbus_fmt, i, &code);
+ code.index = i;
+ v4l2_subdev_call(sd, pad, enum_mbus_code, NULL, &code);
icd->user_formats[fmts].host_fmt =
- soc_mbus_get_fmtdesc(code);
+ soc_mbus_get_fmtdesc(code.code);
if (icd->user_formats[fmts].host_fmt)
- icd->user_formats[fmts++].code = code;
+ icd->user_formats[fmts++].code = code.code;
} else {
ret = ici->ops->get_formats(icd, i,
&icd->user_formats[fmts]);
@@ -1284,7 +1289,10 @@ static struct soc_camera_device *soc_camera_add_pdev(struct soc_camera_async_cli
static int soc_camera_probe_finish(struct soc_camera_device *icd)
{
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
- struct v4l2_mbus_framefmt mf;
+ struct v4l2_subdev_format fmt = {
+ .which = V4L2_SUBDEV_FORMAT_ACTIVE,
+ };
+ struct v4l2_mbus_framefmt *mf = &fmt.format;
int ret;
sd->grp_id = soc_camera_grp_id(icd);
@@ -1314,11 +1322,11 @@ static int soc_camera_probe_finish(struct soc_camera_device *icd)
goto evidstart;
/* Try to improve our guess of a reasonable window format */
- if (!v4l2_subdev_call(sd, video, g_mbus_fmt, &mf)) {
- icd->user_width = mf.width;
- icd->user_height = mf.height;
- icd->colorspace = mf.colorspace;
- icd->field = mf.field;
+ if (!v4l2_subdev_call(sd, pad, get_fmt, NULL, &fmt)) {
+ icd->user_width = mf->width;
+ icd->user_height = mf->height;
+ icd->colorspace = mf->colorspace;
+ icd->field = mf->field;
}
soc_camera_remove_device(icd);
diff --git a/drivers/media/platform/soc_camera/soc_camera_platform.c b/drivers/media/platform/soc_camera/soc_camera_platform.c
index f535910b4187..cc8eb0758219 100644
--- a/drivers/media/platform/soc_camera/soc_camera_platform.c
+++ b/drivers/media/platform/soc_camera/soc_camera_platform.c
@@ -37,9 +37,11 @@ static int soc_camera_platform_s_stream(struct v4l2_subdev *sd, int enable)
}
static int soc_camera_platform_fill_fmt(struct v4l2_subdev *sd,
- struct v4l2_mbus_framefmt *mf)
+ struct v4l2_subdev_pad_config *cfg,
+ struct v4l2_subdev_format *format)
{
struct soc_camera_platform_info *p = v4l2_get_subdevdata(sd);
+ struct v4l2_mbus_framefmt *mf = &format->format;
mf->width = p->format.width;
mf->height = p->format.height;
@@ -61,15 +63,16 @@ static struct v4l2_subdev_core_ops platform_subdev_core_ops = {
.s_power = soc_camera_platform_s_power,
};
-static int soc_camera_platform_enum_fmt(struct v4l2_subdev *sd, unsigned int index,
- u32 *code)
+static int soc_camera_platform_enum_mbus_code(struct v4l2_subdev *sd,
+ struct v4l2_subdev_pad_config *cfg,
+ struct v4l2_subdev_mbus_code_enum *code)
{
struct soc_camera_platform_info *p = v4l2_get_subdevdata(sd);
- if (index)
+ if (code->pad || code->index)
return -EINVAL;
- *code = p->format.code;
+ code->code = p->format.code;
return 0;
}
@@ -117,18 +120,21 @@ static int soc_camera_platform_g_mbus_config(struct v4l2_subdev *sd,
static struct v4l2_subdev_video_ops platform_subdev_video_ops = {
.s_stream = soc_camera_platform_s_stream,
- .enum_mbus_fmt = soc_camera_platform_enum_fmt,
.cropcap = soc_camera_platform_cropcap,
.g_crop = soc_camera_platform_g_crop,
- .try_mbus_fmt = soc_camera_platform_fill_fmt,
- .g_mbus_fmt = soc_camera_platform_fill_fmt,
- .s_mbus_fmt = soc_camera_platform_fill_fmt,
.g_mbus_config = soc_camera_platform_g_mbus_config,
};
+static const struct v4l2_subdev_pad_ops platform_subdev_pad_ops = {
+ .enum_mbus_code = soc_camera_platform_enum_mbus_code,
+ .get_fmt = soc_camera_platform_fill_fmt,
+ .set_fmt = soc_camera_platform_fill_fmt,
+};
+
static struct v4l2_subdev_ops platform_subdev_ops = {
.core = &platform_subdev_core_ops,
.video = &platform_subdev_video_ops,
+ .pad = &platform_subdev_pad_ops,
};
static int soc_camera_platform_probe(struct platform_device *pdev)
diff --git a/drivers/media/platform/soc_camera/soc_scale_crop.c b/drivers/media/platform/soc_camera/soc_scale_crop.c
index 8e74fb7f2a07..bda29bc1b933 100644
--- a/drivers/media/platform/soc_camera/soc_scale_crop.c
+++ b/drivers/media/platform/soc_camera/soc_scale_crop.c
@@ -211,22 +211,23 @@ int soc_camera_client_s_crop(struct v4l2_subdev *sd,
}
EXPORT_SYMBOL(soc_camera_client_s_crop);
-/* Iterative s_mbus_fmt, also updates cached client crop on success */
-static int client_s_fmt(struct soc_camera_device *icd,
+/* Iterative set_fmt, also updates cached client crop on success */
+static int client_set_fmt(struct soc_camera_device *icd,
struct v4l2_rect *rect, struct v4l2_rect *subrect,
unsigned int max_width, unsigned int max_height,
- struct v4l2_mbus_framefmt *mf, bool host_can_scale)
+ struct v4l2_subdev_format *format, bool host_can_scale)
{
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
struct device *dev = icd->parent;
+ struct v4l2_mbus_framefmt *mf = &format->format;
unsigned int width = mf->width, height = mf->height, tmp_w, tmp_h;
struct v4l2_cropcap cap;
bool host_1to1;
int ret;
ret = v4l2_device_call_until_err(sd->v4l2_dev,
- soc_camera_grp_id(icd), video,
- s_mbus_fmt, mf);
+ soc_camera_grp_id(icd), pad,
+ set_fmt, NULL, format);
if (ret < 0)
return ret;
@@ -265,8 +266,8 @@ static int client_s_fmt(struct soc_camera_device *icd,
mf->width = tmp_w;
mf->height = tmp_h;
ret = v4l2_device_call_until_err(sd->v4l2_dev,
- soc_camera_grp_id(icd), video,
- s_mbus_fmt, mf);
+ soc_camera_grp_id(icd), pad,
+ set_fmt, NULL, format);
dev_geo(dev, "Camera scaled to %ux%u\n",
mf->width, mf->height);
if (ret < 0) {
@@ -309,7 +310,11 @@ int soc_camera_client_scale(struct soc_camera_device *icd,
bool host_can_scale, unsigned int shift)
{
struct device *dev = icd->parent;
- struct v4l2_mbus_framefmt mf_tmp = *mf;
+ struct v4l2_subdev_format fmt_tmp = {
+ .which = V4L2_SUBDEV_FORMAT_ACTIVE,
+ .format = *mf,
+ };
+ struct v4l2_mbus_framefmt *mf_tmp = &fmt_tmp.format;
unsigned int scale_h, scale_v;
int ret;
@@ -317,25 +322,25 @@ int soc_camera_client_scale(struct soc_camera_device *icd,
* 5. Apply iterative camera S_FMT for camera user window (also updates
* client crop cache and the imaginary sub-rectangle).
*/
- ret = client_s_fmt(icd, rect, subrect, *width, *height,
- &mf_tmp, host_can_scale);
+ ret = client_set_fmt(icd, rect, subrect, *width, *height,
+ &fmt_tmp, host_can_scale);
if (ret < 0)
return ret;
dev_geo(dev, "5: camera scaled to %ux%u\n",
- mf_tmp.width, mf_tmp.height);
+ mf_tmp->width, mf_tmp->height);
/* 6. Retrieve camera output window (g_fmt) */
/* unneeded - it is already in "mf_tmp" */
/* 7. Calculate new client scales. */
- scale_h = soc_camera_calc_scale(rect->width, shift, mf_tmp.width);
- scale_v = soc_camera_calc_scale(rect->height, shift, mf_tmp.height);
+ scale_h = soc_camera_calc_scale(rect->width, shift, mf_tmp->width);
+ scale_v = soc_camera_calc_scale(rect->height, shift, mf_tmp->height);
- mf->width = mf_tmp.width;
- mf->height = mf_tmp.height;
- mf->colorspace = mf_tmp.colorspace;
+ mf->width = mf_tmp->width;
+ mf->height = mf_tmp->height;
+ mf->colorspace = mf_tmp->colorspace;
/*
* 8. Calculate new host crop - apply camera scales to previously
diff --git a/drivers/media/platform/via-camera.c b/drivers/media/platform/via-camera.c
index 678ed9f353cb..32e4ff46daf3 100644
--- a/drivers/media/platform/via-camera.c
+++ b/drivers/media/platform/via-camera.c
@@ -249,13 +249,15 @@ static int viacam_set_flip(struct via_camera *cam)
*/
static int viacam_configure_sensor(struct via_camera *cam)
{
- struct v4l2_mbus_framefmt mbus_fmt;
+ struct v4l2_subdev_format format = {
+ .which = V4L2_SUBDEV_FORMAT_ACTIVE,
+ };
int ret;
- v4l2_fill_mbus_format(&mbus_fmt, &cam->sensor_format, cam->mbus_code);
+ v4l2_fill_mbus_format(&format.format, &cam->sensor_format, cam->mbus_code);
ret = sensor_call(cam, core, init, 0);
if (ret == 0)
- ret = sensor_call(cam, video, s_mbus_fmt, &mbus_fmt);
+ ret = sensor_call(cam, pad, set_fmt, NULL, &format);
/*
* OV7670 does weird things if flip is set *before* format...
*/
@@ -903,14 +905,17 @@ static int viacam_do_try_fmt(struct via_camera *cam,
struct v4l2_pix_format *upix, struct v4l2_pix_format *spix)
{
int ret;
- struct v4l2_mbus_framefmt mbus_fmt;
+ struct v4l2_subdev_pad_config pad_cfg;
+ struct v4l2_subdev_format format = {
+ .which = V4L2_SUBDEV_FORMAT_TRY,
+ };
struct via_format *f = via_find_format(upix->pixelformat);
upix->pixelformat = f->pixelformat;
viacam_fmt_pre(upix, spix);
- v4l2_fill_mbus_format(&mbus_fmt, spix, f->mbus_code);
- ret = sensor_call(cam, video, try_mbus_fmt, &mbus_fmt);
- v4l2_fill_pix_format(spix, &mbus_fmt);
+ v4l2_fill_mbus_format(&format.format, spix, f->mbus_code);
+ ret = sensor_call(cam, pad, set_fmt, &pad_cfg, &format);
+ v4l2_fill_pix_format(spix, &format.format);
viacam_fmt_post(upix, spix);
return ret;
}
diff --git a/drivers/media/platform/vim2m.c b/drivers/media/platform/vim2m.c
index 4d6b4cc57c57..cecfd75b58d7 100644
--- a/drivers/media/platform/vim2m.c
+++ b/drivers/media/platform/vim2m.c
@@ -80,7 +80,6 @@ static struct platform_device vim2m_pdev = {
};
struct vim2m_fmt {
- char *name;
u32 fourcc;
int depth;
/* Types the format can be used for */
@@ -89,14 +88,12 @@ struct vim2m_fmt {
static struct vim2m_fmt formats[] = {
{
- .name = "RGB565 (BE)",
.fourcc = V4L2_PIX_FMT_RGB565X, /* rrrrrggg gggbbbbb */
.depth = 16,
/* Both capture and output format */
.types = MEM2MEM_CAPTURE | MEM2MEM_OUTPUT,
},
{
- .name = "4:2:2, packed, YUYV",
.fourcc = V4L2_PIX_FMT_YUYV,
.depth = 16,
/* Output-only format */
@@ -458,7 +455,6 @@ static int enum_fmt(struct v4l2_fmtdesc *f, u32 type)
if (i < NUM_FORMATS) {
/* Format found */
fmt = &formats[i];
- strncpy(f->description, fmt->name, sizeof(f->description) - 1);
f->pixelformat = fmt->fourcc;
return 0;
}
diff --git a/drivers/media/platform/vivid/vivid-core.c b/drivers/media/platform/vivid/vivid-core.c
index d33f16495dbc..d6caeeb5758d 100644
--- a/drivers/media/platform/vivid/vivid-core.c
+++ b/drivers/media/platform/vivid/vivid-core.c
@@ -392,6 +392,17 @@ static int vidioc_s_parm(struct file *file, void *fh,
return vivid_vid_out_g_parm(file, fh, parm);
}
+static int vidioc_log_status(struct file *file, void *fh)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ struct video_device *vdev = video_devdata(file);
+
+ v4l2_ctrl_log_status(file, fh);
+ if (vdev->vfl_dir == VFL_DIR_RX && vdev->vfl_type == VFL_TYPE_GRABBER)
+ tpg_log_status(&dev->tpg);
+ return 0;
+}
+
static ssize_t vivid_radio_read(struct file *file, char __user *buf,
size_t size, loff_t *offset)
{
@@ -610,7 +621,7 @@ static const struct v4l2_ioctl_ops vivid_ioctl_ops = {
.vidioc_g_edid = vidioc_g_edid,
.vidioc_s_edid = vidioc_s_edid,
- .vidioc_log_status = v4l2_ctrl_log_status,
+ .vidioc_log_status = vidioc_log_status,
.vidioc_subscribe_event = vidioc_subscribe_event,
.vidioc_unsubscribe_event = v4l2_event_unsubscribe,
};
diff --git a/drivers/media/platform/vivid/vivid-core.h b/drivers/media/platform/vivid/vivid-core.h
index 9e15aee9a52e..bf26173f1e3b 100644
--- a/drivers/media/platform/vivid/vivid-core.h
+++ b/drivers/media/platform/vivid/vivid-core.h
@@ -77,7 +77,6 @@ extern const struct v4l2_rect vivid_max_rect;
extern unsigned vivid_debug;
struct vivid_fmt {
- const char *name;
u32 fourcc; /* v4l2 format id */
bool is_yuv;
bool can_do_overlay;
diff --git a/drivers/media/platform/vivid/vivid-radio-rx.c b/drivers/media/platform/vivid/vivid-radio-rx.c
index c7651a506668..f99092ca8f5c 100644
--- a/drivers/media/platform/vivid/vivid-radio-rx.c
+++ b/drivers/media/platform/vivid/vivid-radio-rx.c
@@ -195,6 +195,8 @@ int vivid_radio_rx_s_hw_freq_seek(struct file *file, void *fh, const struct v4l2
if (dev->radio_rx_freq >= vivid_radio_bands[band].rangelow &&
dev->radio_rx_freq <= vivid_radio_bands[band].rangehigh)
break;
+ if (band == TOT_BANDS)
+ return -EINVAL;
low = vivid_radio_bands[band].rangelow;
high = vivid_radio_bands[band].rangehigh;
}
diff --git a/drivers/media/platform/vivid/vivid-tpg.c b/drivers/media/platform/vivid/vivid-tpg.c
index cb766eb154e7..4df755aa7e48 100644
--- a/drivers/media/platform/vivid/vivid-tpg.c
+++ b/drivers/media/platform/vivid/vivid-tpg.c
@@ -479,44 +479,71 @@ static void color_to_ycbcr(struct tpg_data *tpg, int r, int g, int b,
{ COEFF(-0.116, 224), COEFF(-0.384, 224), COEFF(0.5, 224) },
{ COEFF(0.5, 224), COEFF(-0.445, 224), COEFF(-0.055, 224) },
};
+ static const int smpte240m_full[3][3] = {
+ { COEFF(0.212, 255), COEFF(0.701, 255), COEFF(0.087, 255) },
+ { COEFF(-0.116, 255), COEFF(-0.384, 255), COEFF(0.5, 255) },
+ { COEFF(0.5, 255), COEFF(-0.445, 255), COEFF(-0.055, 255) },
+ };
static const int bt2020[3][3] = {
{ COEFF(0.2627, 219), COEFF(0.6780, 219), COEFF(0.0593, 219) },
{ COEFF(-0.1396, 224), COEFF(-0.3604, 224), COEFF(0.5, 224) },
{ COEFF(0.5, 224), COEFF(-0.4598, 224), COEFF(-0.0402, 224) },
};
+ static const int bt2020_full[3][3] = {
+ { COEFF(0.2627, 255), COEFF(0.6780, 255), COEFF(0.0593, 255) },
+ { COEFF(-0.1396, 255), COEFF(-0.3604, 255), COEFF(0.5, 255) },
+ { COEFF(0.5, 255), COEFF(-0.4698, 255), COEFF(-0.0402, 255) },
+ };
+ static const int bt2020c[4] = {
+ COEFF(1.0 / 1.9404, 224), COEFF(1.0 / 1.5816, 224),
+ COEFF(1.0 / 1.7184, 224), COEFF(1.0 / 0.9936, 224),
+ };
+ static const int bt2020c_full[4] = {
+ COEFF(1.0 / 1.9404, 255), COEFF(1.0 / 1.5816, 255),
+ COEFF(1.0 / 1.7184, 255), COEFF(1.0 / 0.9936, 255),
+ };
+
bool full = tpg->real_quantization == V4L2_QUANTIZATION_FULL_RANGE;
unsigned y_offset = full ? 0 : 16;
int lin_y, yc;
switch (tpg->real_ycbcr_enc) {
case V4L2_YCBCR_ENC_601:
- case V4L2_YCBCR_ENC_XV601:
case V4L2_YCBCR_ENC_SYCC:
rgb2ycbcr(full ? bt601_full : bt601, r, g, b, y_offset, y, cb, cr);
break;
+ case V4L2_YCBCR_ENC_XV601:
+ /* Ignore quantization range, there is only one possible
+ * Y'CbCr encoding. */
+ rgb2ycbcr(bt601, r, g, b, 16, y, cb, cr);
+ break;
+ case V4L2_YCBCR_ENC_XV709:
+ /* Ignore quantization range, there is only one possible
+ * Y'CbCr encoding. */
+ rgb2ycbcr(rec709, r, g, b, 16, y, cb, cr);
+ break;
case V4L2_YCBCR_ENC_BT2020:
- rgb2ycbcr(bt2020, r, g, b, 16, y, cb, cr);
+ rgb2ycbcr(full ? bt2020_full : bt2020, r, g, b, y_offset, y, cb, cr);
break;
case V4L2_YCBCR_ENC_BT2020_CONST_LUM:
lin_y = (COEFF(0.2627, 255) * rec709_to_linear(r) +
COEFF(0.6780, 255) * rec709_to_linear(g) +
COEFF(0.0593, 255) * rec709_to_linear(b)) >> 16;
yc = linear_to_rec709(lin_y);
- *y = (yc * 219) / 255 + (16 << 4);
+ *y = full ? yc : (yc * 219) / 255 + (16 << 4);
if (b <= yc)
- *cb = (((b - yc) * COEFF(1.0 / 1.9404, 224)) >> 16) + (128 << 4);
+ *cb = (((b - yc) * (full ? bt2020c_full[0] : bt2020c[0])) >> 16) + (128 << 4);
else
- *cb = (((b - yc) * COEFF(1.0 / 1.5816, 224)) >> 16) + (128 << 4);
+ *cb = (((b - yc) * (full ? bt2020c_full[1] : bt2020c[1])) >> 16) + (128 << 4);
if (r <= yc)
- *cr = (((r - yc) * COEFF(1.0 / 1.7184, 224)) >> 16) + (128 << 4);
+ *cr = (((r - yc) * (full ? bt2020c_full[2] : bt2020c[2])) >> 16) + (128 << 4);
else
- *cr = (((r - yc) * COEFF(1.0 / 0.9936, 224)) >> 16) + (128 << 4);
+ *cr = (((r - yc) * (full ? bt2020c_full[3] : bt2020c[3])) >> 16) + (128 << 4);
break;
case V4L2_YCBCR_ENC_SMPTE240M:
- rgb2ycbcr(smpte240m, r, g, b, 16, y, cb, cr);
+ rgb2ycbcr(full ? smpte240m_full : smpte240m, r, g, b, y_offset, y, cb, cr);
break;
case V4L2_YCBCR_ENC_709:
- case V4L2_YCBCR_ENC_XV709:
default:
rgb2ycbcr(full ? rec709_full : rec709, r, g, b, y_offset, y, cb, cr);
break;
@@ -567,42 +594,71 @@ static void ycbcr_to_color(struct tpg_data *tpg, int y, int cb, int cr,
{ COEFF(1, 219), COEFF(-0.2253, 224), COEFF(-0.4767, 224) },
{ COEFF(1, 219), COEFF(1.8270, 224), COEFF(0, 224) },
};
+ static const int smpte240m_full[3][3] = {
+ { COEFF(1, 255), COEFF(0, 255), COEFF(1.5756, 255) },
+ { COEFF(1, 255), COEFF(-0.2253, 255), COEFF(-0.4767, 255) },
+ { COEFF(1, 255), COEFF(1.8270, 255), COEFF(0, 255) },
+ };
static const int bt2020[3][3] = {
{ COEFF(1, 219), COEFF(0, 224), COEFF(1.4746, 224) },
{ COEFF(1, 219), COEFF(-0.1646, 224), COEFF(-0.5714, 224) },
{ COEFF(1, 219), COEFF(1.8814, 224), COEFF(0, 224) },
};
+ static const int bt2020_full[3][3] = {
+ { COEFF(1, 255), COEFF(0, 255), COEFF(1.4746, 255) },
+ { COEFF(1, 255), COEFF(-0.1646, 255), COEFF(-0.5714, 255) },
+ { COEFF(1, 255), COEFF(1.8814, 255), COEFF(0, 255) },
+ };
+ static const int bt2020c[4] = {
+ COEFF(1.9404, 224), COEFF(1.5816, 224),
+ COEFF(1.7184, 224), COEFF(0.9936, 224),
+ };
+ static const int bt2020c_full[4] = {
+ COEFF(1.9404, 255), COEFF(1.5816, 255),
+ COEFF(1.7184, 255), COEFF(0.9936, 255),
+ };
+
bool full = tpg->real_quantization == V4L2_QUANTIZATION_FULL_RANGE;
unsigned y_offset = full ? 0 : 16;
+ int y_fac = full ? COEFF(1.0, 255) : COEFF(1.0, 219);
int lin_r, lin_g, lin_b, lin_y;
switch (tpg->real_ycbcr_enc) {
case V4L2_YCBCR_ENC_601:
- case V4L2_YCBCR_ENC_XV601:
case V4L2_YCBCR_ENC_SYCC:
ycbcr2rgb(full ? bt601_full : bt601, y, cb, cr, y_offset, r, g, b);
break;
+ case V4L2_YCBCR_ENC_XV601:
+ /* Ignore quantization range, there is only one possible
+ * Y'CbCr encoding. */
+ ycbcr2rgb(bt601, y, cb, cr, 16, r, g, b);
+ break;
+ case V4L2_YCBCR_ENC_XV709:
+ /* Ignore quantization range, there is only one possible
+ * Y'CbCr encoding. */
+ ycbcr2rgb(rec709, y, cb, cr, 16, r, g, b);
+ break;
case V4L2_YCBCR_ENC_BT2020:
- ycbcr2rgb(bt2020, y, cb, cr, 16, r, g, b);
+ ycbcr2rgb(full ? bt2020_full : bt2020, y, cb, cr, y_offset, r, g, b);
break;
case V4L2_YCBCR_ENC_BT2020_CONST_LUM:
- y -= 16 << 4;
+ y -= full ? 0 : 16 << 4;
cb -= 128 << 4;
cr -= 128 << 4;
if (cb <= 0)
- *b = COEFF(1.0, 219) * y + COEFF(1.9404, 224) * cb;
+ *b = y_fac * y + (full ? bt2020c_full[0] : bt2020c[0]) * cb;
else
- *b = COEFF(1.0, 219) * y + COEFF(1.5816, 224) * cb;
+ *b = y_fac * y + (full ? bt2020c_full[1] : bt2020c[1]) * cb;
*b = *b >> 12;
if (cr <= 0)
- *r = COEFF(1.0, 219) * y + COEFF(1.7184, 224) * cr;
+ *r = y_fac * y + (full ? bt2020c_full[2] : bt2020c[2]) * cr;
else
- *r = COEFF(1.0, 219) * y + COEFF(0.9936, 224) * cr;
+ *r = y_fac * y + (full ? bt2020c_full[3] : bt2020c[3]) * cr;
*r = *r >> 12;
lin_r = rec709_to_linear(*r);
lin_b = rec709_to_linear(*b);
- lin_y = rec709_to_linear((y * 255) / 219);
+ lin_y = rec709_to_linear((y * 255) / (full ? 255 : 219));
lin_g = COEFF(1.0 / 0.6780, 255) * lin_y -
COEFF(0.2627 / 0.6780, 255) * lin_r -
@@ -610,10 +666,9 @@ static void ycbcr_to_color(struct tpg_data *tpg, int y, int cb, int cr,
*g = linear_to_rec709(lin_g >> 12);
break;
case V4L2_YCBCR_ENC_SMPTE240M:
- ycbcr2rgb(smpte240m, y, cb, cr, 16, r, g, b);
+ ycbcr2rgb(full ? smpte240m_full : smpte240m, y, cb, cr, y_offset, r, g, b);
break;
case V4L2_YCBCR_ENC_709:
- case V4L2_YCBCR_ENC_XV709:
default:
ycbcr2rgb(full ? rec709_full : rec709, y, cb, cr, y_offset, r, g, b);
break;
@@ -1670,6 +1725,22 @@ static int tpg_pattern_avg(const struct tpg_data *tpg,
return -1;
}
+void tpg_log_status(struct tpg_data *tpg)
+{
+ pr_info("tpg source WxH: %ux%u (%s)\n",
+ tpg->src_width, tpg->src_height,
+ tpg->is_yuv ? "YCbCr" : "RGB");
+ pr_info("tpg field: %u\n", tpg->field);
+ pr_info("tpg crop: %ux%u@%dx%d\n", tpg->crop.width, tpg->crop.height,
+ tpg->crop.left, tpg->crop.top);
+ pr_info("tpg compose: %ux%u@%dx%d\n", tpg->compose.width, tpg->compose.height,
+ tpg->compose.left, tpg->compose.top);
+ pr_info("tpg colorspace: %d\n", tpg->colorspace);
+ pr_info("tpg Y'CbCr encoding: %d/%d\n", tpg->ycbcr_enc, tpg->real_ycbcr_enc);
+ pr_info("tpg quantization: %d/%d\n", tpg->quantization, tpg->real_quantization);
+ pr_info("tpg RGB range: %d/%d\n", tpg->rgb_range, tpg->real_rgb_range);
+}
+
/*
* This struct contains common parameters used by both the drawing of the
* test pattern and the drawing of the extras (borders, square, etc.)
diff --git a/drivers/media/platform/vivid/vivid-tpg.h b/drivers/media/platform/vivid/vivid-tpg.h
index a50cd2e2535b..ef8638f945be 100644
--- a/drivers/media/platform/vivid/vivid-tpg.h
+++ b/drivers/media/platform/vivid/vivid-tpg.h
@@ -192,6 +192,7 @@ int tpg_alloc(struct tpg_data *tpg, unsigned max_w);
void tpg_free(struct tpg_data *tpg);
void tpg_reset_source(struct tpg_data *tpg, unsigned width, unsigned height,
u32 field);
+void tpg_log_status(struct tpg_data *tpg);
void tpg_set_font(const u8 *f);
void tpg_gen_text(const struct tpg_data *tpg,
diff --git a/drivers/media/platform/vivid/vivid-vid-cap.c b/drivers/media/platform/vivid/vivid-vid-cap.c
index dab5990f45a0..fd7adc43f63d 100644
--- a/drivers/media/platform/vivid/vivid-vid-cap.c
+++ b/drivers/media/platform/vivid/vivid-vid-cap.c
@@ -40,7 +40,6 @@ static const struct v4l2_fract
static const struct vivid_fmt formats_ovl[] = {
{
- .name = "RGB565 (LE)",
.fourcc = V4L2_PIX_FMT_RGB565, /* gggbbbbb rrrrrggg */
.vdownsampling = { 1 },
.bit_depth = { 16 },
@@ -48,7 +47,6 @@ static const struct vivid_fmt formats_ovl[] = {
.buffers = 1,
},
{
- .name = "XRGB555 (LE)",
.fourcc = V4L2_PIX_FMT_XRGB555, /* gggbbbbb arrrrrgg */
.vdownsampling = { 1 },
.bit_depth = { 16 },
@@ -56,7 +54,6 @@ static const struct vivid_fmt formats_ovl[] = {
.buffers = 1,
},
{
- .name = "ARGB555 (LE)",
.fourcc = V4L2_PIX_FMT_ARGB555, /* gggbbbbb arrrrrgg */
.vdownsampling = { 1 },
.bit_depth = { 16 },
@@ -66,7 +63,7 @@ static const struct vivid_fmt formats_ovl[] = {
};
/* The number of discrete webcam framesizes */
-#define VIVID_WEBCAM_SIZES 3
+#define VIVID_WEBCAM_SIZES 4
/* The number of discrete webcam frameintervals */
#define VIVID_WEBCAM_IVALS (VIVID_WEBCAM_SIZES * 2)
@@ -75,6 +72,7 @@ static const struct v4l2_frmsize_discrete webcam_sizes[VIVID_WEBCAM_SIZES] = {
{ 320, 180 },
{ 640, 360 },
{ 1280, 720 },
+ { 1920, 1080 },
};
/*
@@ -82,6 +80,8 @@ static const struct v4l2_frmsize_discrete webcam_sizes[VIVID_WEBCAM_SIZES] = {
* elements in this array as there are in webcam_sizes.
*/
static const struct v4l2_fract webcam_intervals[VIVID_WEBCAM_IVALS] = {
+ { 1, 2 },
+ { 1, 5 },
{ 1, 10 },
{ 1, 15 },
{ 1, 25 },
@@ -720,8 +720,8 @@ int vivid_s_fmt_vid_cap(struct file *file, void *priv,
webcam_sizes[i].height == mp->height)
break;
dev->webcam_size_idx = i;
- if (dev->webcam_ival_idx >= 2 * (3 - i))
- dev->webcam_ival_idx = 2 * (3 - i) - 1;
+ if (dev->webcam_ival_idx >= 2 * (VIVID_WEBCAM_SIZES - i))
+ dev->webcam_ival_idx = 2 * (VIVID_WEBCAM_SIZES - i) - 1;
vivid_update_format_cap(dev, false);
} else {
struct v4l2_rect r = { 0, 0, mp->width, mp->height };
@@ -1030,7 +1030,6 @@ int vidioc_enum_fmt_vid_overlay(struct file *file, void *priv,
fmt = &formats_ovl[f->index];
- strlcpy(f->description, fmt->name, sizeof(f->description));
f->pixelformat = fmt->fourcc;
return 0;
}
@@ -1768,7 +1767,7 @@ int vidioc_enum_frameintervals(struct file *file, void *priv,
break;
if (i == ARRAY_SIZE(webcam_sizes))
return -EINVAL;
- if (fival->index >= 2 * (3 - i))
+ if (fival->index >= 2 * (VIVID_WEBCAM_SIZES - i))
return -EINVAL;
fival->type = V4L2_FRMIVAL_TYPE_DISCRETE;
fival->discrete = webcam_intervals[fival->index];
@@ -1798,7 +1797,7 @@ int vivid_vid_cap_s_parm(struct file *file, void *priv,
struct v4l2_streamparm *parm)
{
struct vivid_dev *dev = video_drvdata(file);
- unsigned ival_sz = 2 * (3 - dev->webcam_size_idx);
+ unsigned ival_sz = 2 * (VIVID_WEBCAM_SIZES - dev->webcam_size_idx);
struct v4l2_fract tpf;
unsigned i;
diff --git a/drivers/media/platform/vivid/vivid-vid-common.c b/drivers/media/platform/vivid/vivid-vid-common.c
index aa446271ad34..6ba874420485 100644
--- a/drivers/media/platform/vivid/vivid-vid-common.c
+++ b/drivers/media/platform/vivid/vivid-vid-common.c
@@ -45,7 +45,6 @@ const struct v4l2_dv_timings_cap vivid_dv_timings_cap = {
struct vivid_fmt vivid_formats[] = {
{
- .name = "4:2:2, packed, YUYV",
.fourcc = V4L2_PIX_FMT_YUYV,
.vdownsampling = { 1 },
.bit_depth = { 16 },
@@ -55,7 +54,6 @@ struct vivid_fmt vivid_formats[] = {
.data_offset = { PLANE0_DATA_OFFSET },
},
{
- .name = "4:2:2, packed, UYVY",
.fourcc = V4L2_PIX_FMT_UYVY,
.vdownsampling = { 1 },
.bit_depth = { 16 },
@@ -64,7 +62,6 @@ struct vivid_fmt vivid_formats[] = {
.buffers = 1,
},
{
- .name = "4:2:2, packed, YVYU",
.fourcc = V4L2_PIX_FMT_YVYU,
.vdownsampling = { 1 },
.bit_depth = { 16 },
@@ -73,7 +70,6 @@ struct vivid_fmt vivid_formats[] = {
.buffers = 1,
},
{
- .name = "4:2:2, packed, VYUY",
.fourcc = V4L2_PIX_FMT_VYUY,
.vdownsampling = { 1 },
.bit_depth = { 16 },
@@ -82,7 +78,6 @@ struct vivid_fmt vivid_formats[] = {
.buffers = 1,
},
{
- .name = "YUV 4:2:2 triplanar",
.fourcc = V4L2_PIX_FMT_YUV422P,
.vdownsampling = { 1, 1, 1 },
.bit_depth = { 8, 4, 4 },
@@ -91,7 +86,6 @@ struct vivid_fmt vivid_formats[] = {
.buffers = 1,
},
{
- .name = "YUV 4:2:0 triplanar",
.fourcc = V4L2_PIX_FMT_YUV420,
.vdownsampling = { 1, 2, 2 },
.bit_depth = { 8, 4, 4 },
@@ -100,7 +94,6 @@ struct vivid_fmt vivid_formats[] = {
.buffers = 1,
},
{
- .name = "YVU 4:2:0 triplanar",
.fourcc = V4L2_PIX_FMT_YVU420,
.vdownsampling = { 1, 2, 2 },
.bit_depth = { 8, 4, 4 },
@@ -109,7 +102,6 @@ struct vivid_fmt vivid_formats[] = {
.buffers = 1,
},
{
- .name = "YUV 4:2:0 biplanar",
.fourcc = V4L2_PIX_FMT_NV12,
.vdownsampling = { 1, 2 },
.bit_depth = { 8, 8 },
@@ -118,7 +110,6 @@ struct vivid_fmt vivid_formats[] = {
.buffers = 1,
},
{
- .name = "YVU 4:2:0 biplanar",
.fourcc = V4L2_PIX_FMT_NV21,
.vdownsampling = { 1, 2 },
.bit_depth = { 8, 8 },
@@ -127,7 +118,6 @@ struct vivid_fmt vivid_formats[] = {
.buffers = 1,
},
{
- .name = "YUV 4:2:2 biplanar",
.fourcc = V4L2_PIX_FMT_NV16,
.vdownsampling = { 1, 1 },
.bit_depth = { 8, 8 },
@@ -136,7 +126,6 @@ struct vivid_fmt vivid_formats[] = {
.buffers = 1,
},
{
- .name = "YVU 4:2:2 biplanar",
.fourcc = V4L2_PIX_FMT_NV61,
.vdownsampling = { 1, 1 },
.bit_depth = { 8, 8 },
@@ -145,7 +134,6 @@ struct vivid_fmt vivid_formats[] = {
.buffers = 1,
},
{
- .name = "YUV 4:4:4 biplanar",
.fourcc = V4L2_PIX_FMT_NV24,
.vdownsampling = { 1, 1 },
.bit_depth = { 8, 16 },
@@ -154,7 +142,6 @@ struct vivid_fmt vivid_formats[] = {
.buffers = 1,
},
{
- .name = "YVU 4:4:4 biplanar",
.fourcc = V4L2_PIX_FMT_NV42,
.vdownsampling = { 1, 1 },
.bit_depth = { 8, 16 },
@@ -163,7 +150,6 @@ struct vivid_fmt vivid_formats[] = {
.buffers = 1,
},
{
- .name = "YUV555 (LE)",
.fourcc = V4L2_PIX_FMT_YUV555, /* uuuvvvvv ayyyyyuu */
.vdownsampling = { 1 },
.bit_depth = { 16 },
@@ -172,7 +158,6 @@ struct vivid_fmt vivid_formats[] = {
.alpha_mask = 0x8000,
},
{
- .name = "YUV565 (LE)",
.fourcc = V4L2_PIX_FMT_YUV565, /* uuuvvvvv yyyyyuuu */
.vdownsampling = { 1 },
.bit_depth = { 16 },
@@ -180,7 +165,6 @@ struct vivid_fmt vivid_formats[] = {
.buffers = 1,
},
{
- .name = "YUV444",
.fourcc = V4L2_PIX_FMT_YUV444, /* uuuuvvvv aaaayyyy */
.vdownsampling = { 1 },
.bit_depth = { 16 },
@@ -189,7 +173,6 @@ struct vivid_fmt vivid_formats[] = {
.alpha_mask = 0xf000,
},
{
- .name = "YUV32 (LE)",
.fourcc = V4L2_PIX_FMT_YUV32, /* ayuv */
.vdownsampling = { 1 },
.bit_depth = { 32 },
@@ -198,7 +181,6 @@ struct vivid_fmt vivid_formats[] = {
.alpha_mask = 0x000000ff,
},
{
- .name = "Monochrome",
.fourcc = V4L2_PIX_FMT_GREY,
.vdownsampling = { 1 },
.bit_depth = { 8 },
@@ -207,7 +189,6 @@ struct vivid_fmt vivid_formats[] = {
.buffers = 1,
},
{
- .name = "RGB332",
.fourcc = V4L2_PIX_FMT_RGB332, /* rrrgggbb */
.vdownsampling = { 1 },
.bit_depth = { 8 },
@@ -215,7 +196,6 @@ struct vivid_fmt vivid_formats[] = {
.buffers = 1,
},
{
- .name = "RGB565 (LE)",
.fourcc = V4L2_PIX_FMT_RGB565, /* gggbbbbb rrrrrggg */
.vdownsampling = { 1 },
.bit_depth = { 16 },
@@ -224,7 +204,6 @@ struct vivid_fmt vivid_formats[] = {
.can_do_overlay = true,
},
{
- .name = "RGB565 (BE)",
.fourcc = V4L2_PIX_FMT_RGB565X, /* rrrrrggg gggbbbbb */
.vdownsampling = { 1 },
.bit_depth = { 16 },
@@ -233,7 +212,6 @@ struct vivid_fmt vivid_formats[] = {
.can_do_overlay = true,
},
{
- .name = "RGB444",
.fourcc = V4L2_PIX_FMT_RGB444, /* xxxxrrrr ggggbbbb */
.vdownsampling = { 1 },
.bit_depth = { 16 },
@@ -241,7 +219,6 @@ struct vivid_fmt vivid_formats[] = {
.buffers = 1,
},
{
- .name = "XRGB444",
.fourcc = V4L2_PIX_FMT_XRGB444, /* xxxxrrrr ggggbbbb */
.vdownsampling = { 1 },
.bit_depth = { 16 },
@@ -249,7 +226,6 @@ struct vivid_fmt vivid_formats[] = {
.buffers = 1,
},
{
- .name = "ARGB444",
.fourcc = V4L2_PIX_FMT_ARGB444, /* aaaarrrr ggggbbbb */
.vdownsampling = { 1 },
.bit_depth = { 16 },
@@ -258,7 +234,6 @@ struct vivid_fmt vivid_formats[] = {
.alpha_mask = 0x00f0,
},
{
- .name = "RGB555 (LE)",
.fourcc = V4L2_PIX_FMT_RGB555, /* gggbbbbb xrrrrrgg */
.vdownsampling = { 1 },
.bit_depth = { 16 },
@@ -267,7 +242,6 @@ struct vivid_fmt vivid_formats[] = {
.can_do_overlay = true,
},
{
- .name = "XRGB555 (LE)",
.fourcc = V4L2_PIX_FMT_XRGB555, /* gggbbbbb xrrrrrgg */
.vdownsampling = { 1 },
.bit_depth = { 16 },
@@ -276,7 +250,6 @@ struct vivid_fmt vivid_formats[] = {
.can_do_overlay = true,
},
{
- .name = "ARGB555 (LE)",
.fourcc = V4L2_PIX_FMT_ARGB555, /* gggbbbbb arrrrrgg */
.vdownsampling = { 1 },
.bit_depth = { 16 },
@@ -286,7 +259,6 @@ struct vivid_fmt vivid_formats[] = {
.alpha_mask = 0x8000,
},
{
- .name = "RGB555 (BE)",
.fourcc = V4L2_PIX_FMT_RGB555X, /* xrrrrrgg gggbbbbb */
.vdownsampling = { 1 },
.bit_depth = { 16 },
@@ -294,7 +266,6 @@ struct vivid_fmt vivid_formats[] = {
.buffers = 1,
},
{
- .name = "XRGB555 (BE)",
.fourcc = V4L2_PIX_FMT_XRGB555X, /* xrrrrrgg gggbbbbb */
.vdownsampling = { 1 },
.bit_depth = { 16 },
@@ -302,7 +273,6 @@ struct vivid_fmt vivid_formats[] = {
.buffers = 1,
},
{
- .name = "ARGB555 (BE)",
.fourcc = V4L2_PIX_FMT_ARGB555X, /* arrrrrgg gggbbbbb */
.vdownsampling = { 1 },
.bit_depth = { 16 },
@@ -311,7 +281,6 @@ struct vivid_fmt vivid_formats[] = {
.alpha_mask = 0x0080,
},
{
- .name = "RGB24 (LE)",
.fourcc = V4L2_PIX_FMT_RGB24, /* rgb */
.vdownsampling = { 1 },
.bit_depth = { 24 },
@@ -319,7 +288,6 @@ struct vivid_fmt vivid_formats[] = {
.buffers = 1,
},
{
- .name = "RGB24 (BE)",
.fourcc = V4L2_PIX_FMT_BGR24, /* bgr */
.vdownsampling = { 1 },
.bit_depth = { 24 },
@@ -327,7 +295,6 @@ struct vivid_fmt vivid_formats[] = {
.buffers = 1,
},
{
- .name = "BGR666",
.fourcc = V4L2_PIX_FMT_BGR666, /* bbbbbbgg ggggrrrr rrxxxxxx */
.vdownsampling = { 1 },
.bit_depth = { 32 },
@@ -335,7 +302,6 @@ struct vivid_fmt vivid_formats[] = {
.buffers = 1,
},
{
- .name = "RGB32 (LE)",
.fourcc = V4L2_PIX_FMT_RGB32, /* xrgb */
.vdownsampling = { 1 },
.bit_depth = { 32 },
@@ -343,7 +309,6 @@ struct vivid_fmt vivid_formats[] = {
.buffers = 1,
},
{
- .name = "RGB32 (BE)",
.fourcc = V4L2_PIX_FMT_BGR32, /* bgrx */
.vdownsampling = { 1 },
.bit_depth = { 32 },
@@ -351,7 +316,6 @@ struct vivid_fmt vivid_formats[] = {
.buffers = 1,
},
{
- .name = "XRGB32 (LE)",
.fourcc = V4L2_PIX_FMT_XRGB32, /* xrgb */
.vdownsampling = { 1 },
.bit_depth = { 32 },
@@ -359,7 +323,6 @@ struct vivid_fmt vivid_formats[] = {
.buffers = 1,
},
{
- .name = "XRGB32 (BE)",
.fourcc = V4L2_PIX_FMT_XBGR32, /* bgrx */
.vdownsampling = { 1 },
.bit_depth = { 32 },
@@ -367,7 +330,6 @@ struct vivid_fmt vivid_formats[] = {
.buffers = 1,
},
{
- .name = "ARGB32 (LE)",
.fourcc = V4L2_PIX_FMT_ARGB32, /* argb */
.vdownsampling = { 1 },
.bit_depth = { 32 },
@@ -376,7 +338,6 @@ struct vivid_fmt vivid_formats[] = {
.alpha_mask = 0x000000ff,
},
{
- .name = "ARGB32 (BE)",
.fourcc = V4L2_PIX_FMT_ABGR32, /* bgra */
.vdownsampling = { 1 },
.bit_depth = { 32 },
@@ -385,7 +346,6 @@ struct vivid_fmt vivid_formats[] = {
.alpha_mask = 0xff000000,
},
{
- .name = "Bayer BG/GR",
.fourcc = V4L2_PIX_FMT_SBGGR8, /* Bayer BG/GR */
.vdownsampling = { 1 },
.bit_depth = { 8 },
@@ -393,7 +353,6 @@ struct vivid_fmt vivid_formats[] = {
.buffers = 1,
},
{
- .name = "Bayer GB/RG",
.fourcc = V4L2_PIX_FMT_SGBRG8, /* Bayer GB/RG */
.vdownsampling = { 1 },
.bit_depth = { 8 },
@@ -401,7 +360,6 @@ struct vivid_fmt vivid_formats[] = {
.buffers = 1,
},
{
- .name = "Bayer GR/BG",
.fourcc = V4L2_PIX_FMT_SGRBG8, /* Bayer GR/BG */
.vdownsampling = { 1 },
.bit_depth = { 8 },
@@ -409,7 +367,6 @@ struct vivid_fmt vivid_formats[] = {
.buffers = 1,
},
{
- .name = "Bayer RG/GB",
.fourcc = V4L2_PIX_FMT_SRGGB8, /* Bayer RG/GB */
.vdownsampling = { 1 },
.bit_depth = { 8 },
@@ -417,7 +374,6 @@ struct vivid_fmt vivid_formats[] = {
.buffers = 1,
},
{
- .name = "4:2:2, biplanar, YUV",
.fourcc = V4L2_PIX_FMT_NV16M,
.vdownsampling = { 1, 1 },
.bit_depth = { 8, 8 },
@@ -427,7 +383,6 @@ struct vivid_fmt vivid_formats[] = {
.data_offset = { PLANE0_DATA_OFFSET, 0 },
},
{
- .name = "4:2:2, biplanar, YVU",
.fourcc = V4L2_PIX_FMT_NV61M,
.vdownsampling = { 1, 1 },
.bit_depth = { 8, 8 },
@@ -437,7 +392,6 @@ struct vivid_fmt vivid_formats[] = {
.data_offset = { 0, PLANE0_DATA_OFFSET },
},
{
- .name = "4:2:0, triplanar, YUV",
.fourcc = V4L2_PIX_FMT_YUV420M,
.vdownsampling = { 1, 2, 2 },
.bit_depth = { 8, 4, 4 },
@@ -446,7 +400,6 @@ struct vivid_fmt vivid_formats[] = {
.buffers = 3,
},
{
- .name = "4:2:0, triplanar, YVU",
.fourcc = V4L2_PIX_FMT_YVU420M,
.vdownsampling = { 1, 2, 2 },
.bit_depth = { 8, 4, 4 },
@@ -455,7 +408,6 @@ struct vivid_fmt vivid_formats[] = {
.buffers = 3,
},
{
- .name = "4:2:0, biplanar, YUV",
.fourcc = V4L2_PIX_FMT_NV12M,
.vdownsampling = { 1, 2 },
.bit_depth = { 8, 8 },
@@ -464,7 +416,6 @@ struct vivid_fmt vivid_formats[] = {
.buffers = 2,
},
{
- .name = "4:2:0, biplanar, YVU",
.fourcc = V4L2_PIX_FMT_NV21M,
.vdownsampling = { 1, 2 },
.bit_depth = { 8, 8 },
@@ -750,7 +701,6 @@ int vivid_enum_fmt_vid(struct file *file, void *priv,
fmt = &vivid_formats[f->index];
- strlcpy(f->description, fmt->name, sizeof(f->description));
f->pixelformat = fmt->fourcc;
return 0;
}
diff --git a/drivers/media/platform/vivid/vivid-vid-out.c b/drivers/media/platform/vivid/vivid-vid-out.c
index 0af43dc7715c..00f42df947c0 100644
--- a/drivers/media/platform/vivid/vivid-vid-out.c
+++ b/drivers/media/platform/vivid/vivid-vid-out.c
@@ -1152,7 +1152,8 @@ int vivid_vid_out_g_parm(struct file *file, void *priv,
parm->parm.output.capability = V4L2_CAP_TIMEPERFRAME;
parm->parm.output.timeperframe = dev->timeperframe_vid_out;
parm->parm.output.writebuffers = 1;
-return 0;
+
+ return 0;
}
int vidioc_subscribe_event(struct v4l2_fh *fh,
diff --git a/drivers/media/platform/xilinx/Kconfig b/drivers/media/platform/xilinx/Kconfig
index d7324c726fc2..84bae795b70d 100644
--- a/drivers/media/platform/xilinx/Kconfig
+++ b/drivers/media/platform/xilinx/Kconfig
@@ -1,6 +1,6 @@
config VIDEO_XILINX
tristate "Xilinx Video IP (EXPERIMENTAL)"
- depends on VIDEO_V4L2 && VIDEO_V4L2_SUBDEV_API && OF
+ depends on VIDEO_V4L2 && VIDEO_V4L2_SUBDEV_API && OF && HAS_DMA
select VIDEOBUF2_DMA_CONTIG
---help---
Driver for Xilinx Video IP Pipelines