// SPDX-License-Identifier: GPL-2.0 /* * Driver for STM32 Digital Camera Memory Interface * * Copyright (C) STMicroelectronics SA 2017 * Authors: Yannick Fertre <yannick.fertre@st.com> * Hugues Fruchet <hugues.fruchet@st.com> * for STMicroelectronics. * * This driver is based on atmel_isi.c *
*/
if (dcmi->state != RUNNING) {
spin_unlock_irq(&dcmi->irqlock); return -EINVAL;
}
/* Restart a new DMA transfer with next buffer */ if (list_empty(&dcmi->buffers)) {
dev_dbg(dcmi->dev, "Capture restart is deferred to next buffer queueing\n");
dcmi->state = WAIT_FOR_BUFFER;
spin_unlock_irq(&dcmi->irqlock); return 0;
}
buf = list_entry(dcmi->buffers.next, struct dcmi_buf, list);
dcmi->active = buf;
/* Check DMA status */
status = dmaengine_tx_status(dcmi->dma_chan, dcmi->dma_cookie, &state);
switch (status) { case DMA_IN_PROGRESS:
dev_dbg(dcmi->dev, "%s: Received DMA_IN_PROGRESS\n", __func__); break; case DMA_PAUSED:
dev_err(dcmi->dev, "%s: Received DMA_PAUSED\n", __func__); break; case DMA_ERROR:
dev_err(dcmi->dev, "%s: Received DMA_ERROR\n", __func__);
/* Return buffer to V4L2 in error state */
dcmi_buffer_done(dcmi, buf, 0, -EIO); break; case DMA_COMPLETE:
dev_dbg(dcmi->dev, "%s: Received DMA_COMPLETE\n", __func__);
/* Return buffer to V4L2 */
dcmi_buffer_done(dcmi, buf, buf->size, 0);
spin_unlock_irq(&dcmi->irqlock);
/* Restart capture */ if (dcmi_restart_capture(dcmi))
dev_err(dcmi->dev, "%s: Cannot restart capture on DMA complete\n",
__func__); return; default:
dev_err(dcmi->dev, "%s: Received unknown status\n", __func__); break;
}
/* * Avoid call of dmaengine_terminate_sync() between * dmaengine_prep_slave_single() and dmaengine_submit() * by locking the whole DMA submission sequence
*/
mutex_lock(&dcmi->dma_lock);
/* * Because of variable JPEG buffer size sent by sensor, * DMA transfer never completes due to transfer size never reached. * In order to ensure that all the JPEG data are transferred * in active buffer memory, DMA is drained. * Then DMA tx status gives the amount of data transferred * to memory, which is then returned to V4L2 through the active * buffer payload.
*/
if (vb2_plane_size(vb, 0) < size) {
dev_err(dcmi->dev, "%s data will not fit into plane (%lu < %lu)\n",
__func__, vb2_plane_size(vb, 0), size); return -EINVAL;
}
vb2_set_plane_payload(vb, 0, size);
if (!buf->prepared) { /* Get memory addresses */
buf->size = vb2_plane_size(&buf->vb.vb2_buf, 0); if (buf->size > dcmi->dma_max_burst)
num_sgs = DIV_ROUND_UP(buf->size, dcmi->dma_max_burst);
ret = sg_alloc_table(&buf->sgt, num_sgs, GFP_ATOMIC); if (ret) {
dev_err(dcmi->dev, "sg table alloc failed\n"); return ret;
}
/* * Starting from sensor subdevice, walk within * pipeline and set format on each subdevice
*/ while (1) { unsignedint i;
/* Search if current entity has a source pad */ for (i = 0; i < entity->num_pads; i++) {
pad = &entity->pads[i]; if (pad->flags & MEDIA_PAD_FL_SOURCE) {
src_pad = pad;
found = true; break;
}
} if (!found) break;
subdev = media_entity_to_v4l2_subdev(entity);
/* Propagate format on sink pad if any, otherwise source pad */ if (sink_pad)
pad = sink_pad;
dev_dbg(dcmi->dev, "\"%s\":%d pad format set to 0x%x %ux%u\n",
subdev->name, pad->index, format->format.code,
format->format.width, format->format.height);
fmt.pad = pad->index;
ret = v4l2_subdev_call(subdev, pad, set_fmt, NULL, &fmt); if (ret < 0) {
dev_err(dcmi->dev, "%s: Failed to set format 0x%x %ux%u on \"%s\":%d pad (%d)\n",
__func__, format->format.code,
format->format.width, format->format.height,
subdev->name, pad->index, ret); return ret;
}
if (fmt.format.code != format->format.code ||
fmt.format.width != format->format.width ||
fmt.format.height != format->format.height) {
dev_dbg(dcmi->dev, "\"%s\":%d pad format has been changed to 0x%x %ux%u\n",
subdev->name, pad->index, fmt.format.code,
fmt.format.width, fmt.format.height);
}
/* Walk to next entity */
sink_pad = media_pad_remote_pad_first(src_pad); if (!sink_pad || !is_media_entity_v4l2_subdev(sink_pad->entity)) break;
ret = pm_runtime_resume_and_get(dcmi->dev); if (ret < 0) {
dev_err(dcmi->dev, "%s: Failed to start streaming, cannot get sync (%d)\n",
__func__, ret); goto err_unlocked;
}
ret = video_device_pipeline_start(dcmi->vdev, &dcmi->pipeline); if (ret < 0) {
dev_err(dcmi->dev, "%s: Failed to start streaming, media pipeline start error (%d)\n",
__func__, ret); goto err_pm_put;
}
ret = v4l2_subdev_call(dcmi->s_subdev, video, s_stream, 1); if (ret < 0) {
dev_err(dcmi->dev, "%s: Failed to start source subdev, error (%d)\n",
__func__, ret); goto err_media_pipeline_stop;
}
spin_lock_irq(&dcmi->irqlock);
/* Set bus width */ switch (dcmi->bus.bus_width) { case 14:
val |= CR_EDM_0 | CR_EDM_1; break; case 12:
val |= CR_EDM_1; break; case 10:
val |= CR_EDM_0; break; default: /* Set bus width to 8 bits by default */ break;
}
/* Set vertical synchronization polarity */ if (dcmi->bus.flags & V4L2_MBUS_VSYNC_ACTIVE_HIGH)
val |= CR_VSPOL;
/* Set horizontal synchronization polarity */ if (dcmi->bus.flags & V4L2_MBUS_HSYNC_ACTIVE_HIGH)
val |= CR_HSPOL;
/* Set pixel clock polarity */ if (dcmi->bus.flags & V4L2_MBUS_PCLK_SAMPLE_RISING)
val |= CR_PCKPOL;
/* * BT656 embedded synchronisation bus mode. * * Default SAV/EAV mode is supported here with default codes * SAV=0xff000080 & EAV=0xff00009d. * With DCMI this means LSC=SAV=0x80 & LEC=EAV=0x9d.
*/ if (dcmi->bus_type == V4L2_MBUS_BT656) {
val |= CR_ESS;
/* Unmask all codes */
reg_write(dcmi->regs, DCMI_ESUR, 0xffffffff);/* FEC:LEC:LSC:FSC */
/* Trig on LSC=0x80 & LEC=0x9d codes, ignore FSC and FEC */
reg_write(dcmi->regs, DCMI_ESCR, 0xff9d80ff);/* FEC:LEC:LSC:FSC */
}
reg_write(dcmi->regs, DCMI_CR, val);
/* Set crop */ if (dcmi->do_crop)
dcmi_set_crop(dcmi);
/* * Start transfer if at least one buffer has been queued, * otherwise transfer is deferred at buffer queueing
*/ if (list_empty(&dcmi->buffers)) {
dev_dbg(dcmi->dev, "Start streaming is deferred to next buffer queueing\n");
dcmi->state = WAIT_FOR_BUFFER;
spin_unlock_irq(&dcmi->irqlock); return 0;
}
err_pm_put:
pm_runtime_put(dcmi->dev);
err_unlocked:
spin_lock_irq(&dcmi->irqlock); /* * Return all buffers to vb2 in QUEUED state. * This will give ownership back to userspace
*/
list_for_each_entry_safe(buf, node, &dcmi->buffers, list) {
list_del_init(&buf->list);
vb2_buffer_done(&buf->vb.vb2_buf, VB2_BUF_STATE_QUEUED);
}
dcmi->active = NULL;
spin_unlock_irq(&dcmi->irqlock);
/* Return all queued buffers to vb2 in ERROR state */
list_for_each_entry_safe(buf, node, &dcmi->buffers, list) {
list_del_init(&buf->list);
vb2_buffer_done(&buf->vb.vb2_buf, VB2_BUF_STATE_ERROR);
}
dcmi->active = NULL;
dcmi->state = STOPPED;
spin_unlock_irq(&dcmi->irqlock);
/* Stop all pending DMA operations */
mutex_lock(&dcmi->dma_lock);
dmaengine_terminate_sync(dcmi->dma_chan);
mutex_unlock(&dcmi->dma_lock);
pm_runtime_put(dcmi->dev);
if (dcmi->errors_count)
dev_warn(dcmi->dev, "Some errors found while streaming: errors=%d (overrun=%d), buffers=%d\n",
dcmi->errors_count, dcmi->overrun_count,
dcmi->buffers_count);
dev_dbg(dcmi->dev, "Stop streaming, errors=%d (overrun=%d), buffers=%d\n",
dcmi->errors_count, dcmi->overrun_count,
dcmi->buffers_count);
}
/* No crop if JPEG is requested */
do_crop = dcmi->do_crop && (pix->pixelformat != V4L2_PIX_FMT_JPEG);
if (do_crop && dcmi->num_of_sd_framesizes) { struct dcmi_framesize outer_sd_fsize; /* * If crop is requested and sensor have discrete frame sizes, * select the frame size that is just larger than request
*/
__find_outer_frame_size(dcmi, pix, &outer_sd_fsize);
pix->width = outer_sd_fsize.width;
pix->height = outer_sd_fsize.height;
}
v4l2_fill_mbus_format(&format.format, pix, sd_fmt->mbus_code);
ret = v4l2_subdev_call_state_try(dcmi->source, pad, set_fmt, &format); if (ret < 0) return ret;
/* Update pix regarding to what sensor can do */
v4l2_fill_pix_format(pix, &format.format);
/* Save resolution that sensor can actually do */
sd_fsize.width = pix->width;
sd_fsize.height = pix->height;
if (do_crop) { struct v4l2_rect c = dcmi->crop; struct v4l2_rect max_rect;
/* * Adjust crop by making the intersection between * format resolution request and crop request
*/
max_rect.top = 0;
max_rect.left = 0;
max_rect.width = pix->width;
max_rect.height = pix->height;
v4l2_rect_map_inside(&c, &max_rect);
c.top = clamp_t(s32, c.top, 0, pix->height - c.height);
c.left = clamp_t(s32, c.left, 0, pix->width - c.width);
dcmi->crop = c;
/* Adjust format resolution request to crop */
pix->width = dcmi->crop.width;
pix->height = dcmi->crop.height;
}
/* * Try format, fmt.width/height could have been changed * to match sensor capability or crop request * sd_format & sd_framesize will contain what subdev * can do for this request.
*/
ret = dcmi_try_fmt(dcmi, f, &sd_format, &sd_framesize); if (ret) return ret;
/* Disable crop if JPEG is requested or BT656 bus is selected */ if (pix->pixelformat == V4L2_PIX_FMT_JPEG &&
dcmi->bus_type != V4L2_MBUS_BT656)
dcmi->do_crop = false;
/* pix to mbus format */
v4l2_fill_mbus_format(mf, pix,
sd_format->mbus_code);
mf->width = sd_framesize.width;
mf->height = sd_framesize.height;
ret = dcmi_pipeline_s_fmt(dcmi, &format); if (ret < 0) return ret;
dev_dbg(dcmi->dev, "Sensor format set to 0x%x %ux%u\n",
mf->code, mf->width, mf->height);
dev_dbg(dcmi->dev, "Buffer format set to %4.4s %ux%u\n",
(char *)&pix->pixelformat,
pix->width, pix->height);
/* * Get sensor bounds first
*/
ret = v4l2_subdev_call(dcmi->source, pad, get_selection,
NULL, &bounds); if (!ret)
*r = bounds.r; if (ret != -ENOIOCTLCMD) return ret;
/* * If selection is not implemented, * fallback by enumerating sensor frame sizes * and take the largest one
*/
max_width = 0;
max_height = 0;
max_pixsize = 0; for (i = 0; i < dcmi->num_of_sd_framesizes; i++) { struct dcmi_framesize *fsize = &dcmi->sd_framesizes[i]; unsignedint pixsize = fsize->width * fsize->height;
/* * If frame sizes enumeration is not implemented, * fallback by getting current sensor frame size
*/
ret = dcmi_get_sensor_format(dcmi, &pix); if (ret) return ret;
while (!v4l2_subdev_call(subdev, pad, enum_mbus_code,
NULL, &mbus_code)) { for (i = 0; i < ARRAY_SIZE(dcmi_formats); i++) { if (dcmi_formats[i].mbus_code != mbus_code.code) continue;
/* Exclude JPEG if BT656 bus is selected */ if (dcmi_formats[i].fourcc == V4L2_PIX_FMT_JPEG &&
dcmi->bus_type == V4L2_MBUS_BT656) continue;
/* Code supported, have we got this fourcc yet? */ for (j = 0; j < num_fmts; j++) if (sd_fmts[j]->fourcc ==
dcmi_formats[i].fourcc) { /* Already available */
dev_dbg(dcmi->dev, "Skipping fourcc/code: %4.4s/0x%x\n",
(char *)&sd_fmts[j]->fourcc,
mbus_code.code); break;
} if (j == num_fmts) { /* New */
sd_fmts[num_fmts++] = dcmi_formats + i;
dev_dbg(dcmi->dev, "Supported fourcc/code: %4.4s/0x%x\n",
(char *)&sd_fmts[num_fmts - 1]->fourcc,
sd_fmts[num_fmts - 1]->mbus_code);
}
}
mbus_code.index++;
}
/* * Now that the graph is complete, * we search for the source subdevice * in order to expose it through V4L2 interface
*/
dcmi->source = media_entity_to_v4l2_subdev(dcmi_find_source(dcmi)); if (!dcmi->source) {
dev_err(dcmi->dev, "Source subdevice not found\n"); return -ENODEV;
}
/* * Link this sub-device to DCMI, it could be * a parallel camera sensor or a bridge
*/
src_pad = media_entity_get_fwnode_pad(&subdev->entity,
subdev->fwnode,
MEDIA_PAD_FL_SOURCE);
ret = media_create_pad_link(&subdev->entity, src_pad,
&dcmi->vdev->entity, 0,
MEDIA_LNK_FL_IMMUTABLE |
MEDIA_LNK_FL_ENABLED); if (ret)
dev_err(dcmi->dev, "Failed to create media pad link with subdev \"%s\"\n",
subdev->name); else
dev_dbg(dcmi->dev, "DCMI is now linked to \"%s\"\n",
subdev->name);
dcmi = devm_kzalloc(&pdev->dev, sizeof(struct stm32_dcmi), GFP_KERNEL); if (!dcmi) return -ENOMEM;
dcmi->rstc = devm_reset_control_get_exclusive(&pdev->dev, NULL); if (IS_ERR(dcmi->rstc)) return dev_err_probe(&pdev->dev, PTR_ERR(dcmi->rstc), "Could not get reset control\n");
/* Get bus characteristics from devicetree */
np = of_graph_get_endpoint_by_regs(np, 0, -1); if (!np) {
dev_err(&pdev->dev, "Could not find the endpoint\n"); return -ENODEV;
}
ret = v4l2_fwnode_endpoint_parse(of_fwnode_handle(np), &ep);
of_node_put(np); if (ret) {
dev_err(&pdev->dev, "Could not parse the endpoint\n"); return ret;
}
if (ep.bus_type == V4L2_MBUS_CSI2_DPHY) {
dev_err(&pdev->dev, "CSI bus not supported\n"); return -ENODEV;
}
if (ep.bus_type == V4L2_MBUS_BT656 &&
ep.bus.parallel.bus_width != 8) {
dev_err(&pdev->dev, "BT656 bus conflicts with %u bits bus width (8 bits required)\n",
ep.bus.parallel.bus_width); return -ENODEV;
}
/* Media entity pads */
dcmi->vid_cap_pad.flags = MEDIA_PAD_FL_SINK;
ret = media_entity_pads_init(&dcmi->vdev->entity,
1, &dcmi->vid_cap_pad); if (ret) {
dev_err(dcmi->dev, "Failed to init media entity pad\n"); goto err_device_release;
}
dcmi->vdev->entity.flags |= MEDIA_ENT_FL_DEFAULT;
ret = video_register_device(dcmi->vdev, VFL_TYPE_VIDEO, -1); if (ret) {
dev_err(dcmi->dev, "Failed to register video device\n"); goto err_media_entity_cleanup;
}
dev_dbg(dcmi->dev, "Device registered as %s\n",
video_device_node_name(dcmi->vdev));
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung und die Messung sind noch experimentell.