1// SPDX-License-Identifier: GPL-2.0-or-later
2/*
3 * STM32 DMA2D - 2D Graphics Accelerator Driver
4 *
5 * Copyright (c) 2021 Dillon Min
6 * Dillon Min, <dillon.minfei@gmail.com>
7 *
8 * based on s5p-g2d
9 *
10 * Copyright (c) 2011 Samsung Electronics Co., Ltd.
11 * Kamil Debski, <k.debski@samsung.com>
12 */
13
14#include <linux/module.h>
15#include <linux/fs.h>
16#include <linux/timer.h>
17#include <linux/sched.h>
18#include <linux/slab.h>
19#include <linux/clk.h>
20#include <linux/interrupt.h>
21#include <linux/of.h>
22
23#include <linux/platform_device.h>
24#include <media/v4l2-mem2mem.h>
25#include <media/v4l2-device.h>
26#include <media/v4l2-ioctl.h>
27#include <media/v4l2-event.h>
28#include <media/videobuf2-v4l2.h>
29#include <media/videobuf2-dma-contig.h>
30
31#include "dma2d.h"
32#include "dma2d-regs.h"
33
34/*
35 * This V4L2 subdev m2m driver enables Chrom-Art Accelerator unit
36 * of STMicroelectronics STM32 SoC series.
37 *
38 * Currently support r2m, m2m, m2m_pfc.
39 *
40 * - r2m, Filling a part or the whole of a destination image with a specific
41 *   color.
42 * - m2m, Copying a part or the whole of a source image into a part or the
43 *   whole of a destination.
44 * - m2m_pfc, Copying a part or the whole of a source image into a part or the
45 *   whole of a destination image with a pixel format conversion.
46 */
47
48#define fh2ctx(__fh) container_of(__fh, struct dma2d_ctx, fh)
49
50static const struct dma2d_fmt formats[] = {
51	{
52		.fourcc	= V4L2_PIX_FMT_ARGB32,
53		.cmode = DMA2D_CMODE_ARGB8888,
54		.depth = 32,
55	},
56	{
57		.fourcc	= V4L2_PIX_FMT_RGB24,
58		.cmode = DMA2D_CMODE_RGB888,
59		.depth = 24,
60	},
61	{
62		.fourcc	= V4L2_PIX_FMT_RGB565,
63		.cmode = DMA2D_CMODE_RGB565,
64		.depth = 16,
65	},
66	{
67		.fourcc	= V4L2_PIX_FMT_ARGB555,
68		.cmode = DMA2D_CMODE_ARGB1555,
69		.depth = 16,
70	},
71	{
72		.fourcc	= V4L2_PIX_FMT_ARGB444,
73		.cmode = DMA2D_CMODE_ARGB4444,
74		.depth = 16,
75	},
76};
77
78#define NUM_FORMATS ARRAY_SIZE(formats)
79
80static const struct dma2d_frame def_frame = {
81	.width		= DEFAULT_WIDTH,
82	.height		= DEFAULT_HEIGHT,
83	.line_offset	= 0,
84	.a_rgb		= {0x00, 0x00, 0x00, 0xff},
85	.a_mode		= DMA2D_ALPHA_MODE_NO_MODIF,
86	.fmt		= (struct dma2d_fmt *)&formats[0],
87	.size		= DEFAULT_SIZE,
88};
89
90static struct dma2d_fmt *find_fmt(int pixelformat)
91{
92	unsigned int i;
93
94	for (i = 0; i < NUM_FORMATS; i++) {
95		if (formats[i].fourcc == pixelformat)
96			return (struct dma2d_fmt *)&formats[i];
97	}
98
99	return NULL;
100}
101
102static struct dma2d_frame *get_frame(struct dma2d_ctx *ctx,
103				     enum v4l2_buf_type type)
104{
105	return V4L2_TYPE_IS_OUTPUT(type) ? &ctx->cap : &ctx->out;
106}
107
108static int dma2d_queue_setup(struct vb2_queue *vq,
109			     unsigned int *nbuffers, unsigned int *nplanes,
110			     unsigned int sizes[], struct device *alloc_devs[])
111{
112	struct dma2d_ctx *ctx = vb2_get_drv_priv(vq);
113	struct dma2d_frame *f = get_frame(ctx, vq->type);
114
115	if (*nplanes)
116		return sizes[0] < f->size ? -EINVAL : 0;
117
118	sizes[0] = f->size;
119	*nplanes = 1;
120
121	return 0;
122}
123
124static int dma2d_buf_out_validate(struct vb2_buffer *vb)
125{
126	struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
127
128	if (vbuf->field == V4L2_FIELD_ANY)
129		vbuf->field = V4L2_FIELD_NONE;
130	if (vbuf->field != V4L2_FIELD_NONE)
131		return -EINVAL;
132
133	return 0;
134}
135
136static int dma2d_buf_prepare(struct vb2_buffer *vb)
137{
138	struct dma2d_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
139	struct dma2d_frame *f = get_frame(ctx, vb->vb2_queue->type);
140
141	if (vb2_plane_size(vb, 0) < f->size)
142		return -EINVAL;
143
144	vb2_set_plane_payload(vb, 0, f->size);
145
146	return 0;
147}
148
149static void dma2d_buf_queue(struct vb2_buffer *vb)
150{
151	struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
152	struct dma2d_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
153
154	v4l2_m2m_buf_queue(ctx->fh.m2m_ctx, vbuf);
155}
156
157static int dma2d_start_streaming(struct vb2_queue *q, unsigned int count)
158{
159	struct dma2d_ctx *ctx = vb2_get_drv_priv(q);
160	struct dma2d_frame *f = get_frame(ctx, q->type);
161
162	f->sequence = 0;
163	return 0;
164}
165
166static void dma2d_stop_streaming(struct vb2_queue *q)
167{
168	struct dma2d_ctx *ctx = vb2_get_drv_priv(q);
169	struct vb2_v4l2_buffer *vbuf;
170
171	for (;;) {
172		if (V4L2_TYPE_IS_OUTPUT(q->type))
173			vbuf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
174		else
175			vbuf = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
176		if (!vbuf)
177			return;
178		v4l2_m2m_buf_done(vbuf, VB2_BUF_STATE_ERROR);
179	}
180}
181
182static const struct vb2_ops dma2d_qops = {
183	.queue_setup	= dma2d_queue_setup,
184	.buf_out_validate	 = dma2d_buf_out_validate,
185	.buf_prepare	= dma2d_buf_prepare,
186	.buf_queue	= dma2d_buf_queue,
187	.start_streaming = dma2d_start_streaming,
188	.stop_streaming  = dma2d_stop_streaming,
189	.wait_prepare	= vb2_ops_wait_prepare,
190	.wait_finish	= vb2_ops_wait_finish,
191};
192
193static int queue_init(void *priv, struct vb2_queue *src_vq,
194		      struct vb2_queue *dst_vq)
195{
196	struct dma2d_ctx *ctx = priv;
197	int ret;
198
199	src_vq->type = V4L2_BUF_TYPE_VIDEO_OUTPUT;
200	src_vq->io_modes = VB2_MMAP | VB2_DMABUF;
201	src_vq->drv_priv = ctx;
202	src_vq->ops = &dma2d_qops;
203	src_vq->mem_ops = &vb2_dma_contig_memops;
204	src_vq->buf_struct_size = sizeof(struct v4l2_m2m_buffer);
205	src_vq->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_COPY;
206	src_vq->lock = &ctx->dev->mutex;
207	src_vq->dev = ctx->dev->v4l2_dev.dev;
208
209	ret = vb2_queue_init(src_vq);
210	if (ret)
211		return ret;
212
213	dst_vq->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
214	dst_vq->io_modes = VB2_MMAP | VB2_DMABUF;
215	dst_vq->drv_priv = ctx;
216	dst_vq->ops = &dma2d_qops;
217	dst_vq->mem_ops = &vb2_dma_contig_memops;
218	dst_vq->buf_struct_size = sizeof(struct v4l2_m2m_buffer);
219	dst_vq->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_COPY;
220	dst_vq->lock = &ctx->dev->mutex;
221	dst_vq->dev = ctx->dev->v4l2_dev.dev;
222
223	return vb2_queue_init(dst_vq);
224}
225
226static int dma2d_s_ctrl(struct v4l2_ctrl *ctrl)
227{
228	struct dma2d_frame *frm;
229	struct dma2d_ctx *ctx = container_of(ctrl->handler, struct dma2d_ctx,
230								ctrl_handler);
231	unsigned long flags;
232
233	spin_lock_irqsave(&ctx->dev->ctrl_lock, flags);
234	switch (ctrl->id) {
235	case V4L2_CID_COLORFX:
236		if (ctrl->val == V4L2_COLORFX_SET_RGB)
237			ctx->op_mode = DMA2D_MODE_R2M;
238		else if (ctrl->val == V4L2_COLORFX_NONE)
239			ctx->op_mode = DMA2D_MODE_M2M;
240		break;
241	case V4L2_CID_COLORFX_RGB:
242		frm = get_frame(ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE);
243		frm->a_rgb[2] = (ctrl->val >> 16) & 0xff;
244		frm->a_rgb[1] = (ctrl->val >> 8) & 0xff;
245		frm->a_rgb[0] = (ctrl->val >> 0) & 0xff;
246		break;
247	default:
248		spin_unlock_irqrestore(&ctx->dev->ctrl_lock, flags);
249		return -EINVAL;
250	}
251	spin_unlock_irqrestore(&ctx->dev->ctrl_lock, flags);
252
253	return 0;
254}
255
256static const struct v4l2_ctrl_ops dma2d_ctrl_ops = {
257	.s_ctrl	= dma2d_s_ctrl,
258};
259
260static int dma2d_setup_ctrls(struct dma2d_ctx *ctx)
261{
262	struct v4l2_ctrl_handler *handler = &ctx->ctrl_handler;
263
264	v4l2_ctrl_handler_init(handler, 2);
265
266	v4l2_ctrl_new_std_menu(handler, &dma2d_ctrl_ops, V4L2_CID_COLORFX,
267			       V4L2_COLORFX_SET_RGB, ~0x10001,
268			       V4L2_COLORFX_NONE);
269
270	v4l2_ctrl_new_std(handler, &dma2d_ctrl_ops, V4L2_CID_COLORFX_RGB, 0,
271			  0xffffff, 1, 0);
272
273	return 0;
274}
275
276static int dma2d_open(struct file *file)
277{
278	struct dma2d_dev *dev = video_drvdata(file);
279	struct dma2d_ctx *ctx = NULL;
280	int ret = 0;
281
282	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
283	if (!ctx)
284		return -ENOMEM;
285	ctx->dev = dev;
286	/* Set default formats */
287	ctx->cap		= def_frame;
288	ctx->bg		= def_frame;
289	ctx->out	= def_frame;
290	ctx->op_mode	= DMA2D_MODE_M2M_FPC;
291	ctx->colorspace = V4L2_COLORSPACE_REC709;
292	if (mutex_lock_interruptible(&dev->mutex)) {
293		kfree(ctx);
294		return -ERESTARTSYS;
295	}
296
297	ctx->fh.m2m_ctx = v4l2_m2m_ctx_init(dev->m2m_dev, ctx, &queue_init);
298	if (IS_ERR(ctx->fh.m2m_ctx)) {
299		ret = PTR_ERR(ctx->fh.m2m_ctx);
300		mutex_unlock(&dev->mutex);
301		kfree(ctx);
302		return ret;
303	}
304
305	v4l2_fh_init(&ctx->fh, video_devdata(file));
306	file->private_data = &ctx->fh;
307	v4l2_fh_add(&ctx->fh);
308
309	dma2d_setup_ctrls(ctx);
310
311	/* Write the default values to the ctx struct */
312	v4l2_ctrl_handler_setup(&ctx->ctrl_handler);
313
314	ctx->fh.ctrl_handler = &ctx->ctrl_handler;
315	mutex_unlock(&dev->mutex);
316
317	return 0;
318}
319
320static int dma2d_release(struct file *file)
321{
322	struct dma2d_dev *dev = video_drvdata(file);
323	struct dma2d_ctx *ctx = fh2ctx(file->private_data);
324
325	mutex_lock(&dev->mutex);
326	v4l2_m2m_ctx_release(ctx->fh.m2m_ctx);
327	mutex_unlock(&dev->mutex);
328	v4l2_ctrl_handler_free(&ctx->ctrl_handler);
329	v4l2_fh_del(&ctx->fh);
330	v4l2_fh_exit(&ctx->fh);
331	kfree(ctx);
332
333	return 0;
334}
335
336static int vidioc_querycap(struct file *file, void *priv,
337			   struct v4l2_capability *cap)
338{
339	strscpy(cap->driver, DMA2D_NAME, sizeof(cap->driver));
340	strscpy(cap->card, DMA2D_NAME, sizeof(cap->card));
341	strscpy(cap->bus_info, BUS_INFO, sizeof(cap->bus_info));
342
343	return 0;
344}
345
346static int vidioc_enum_fmt(struct file *file, void *prv, struct v4l2_fmtdesc *f)
347{
348	if (f->index >= NUM_FORMATS)
349		return -EINVAL;
350
351	f->pixelformat = formats[f->index].fourcc;
352	return 0;
353}
354
355static int vidioc_g_fmt(struct file *file, void *prv, struct v4l2_format *f)
356{
357	struct dma2d_ctx *ctx = prv;
358	struct vb2_queue *vq;
359	struct dma2d_frame *frm;
360
361	vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
362	if (!vq)
363		return -EINVAL;
364
365	frm = get_frame(ctx, f->type);
366	f->fmt.pix.width		= frm->width;
367	f->fmt.pix.height		= frm->height;
368	f->fmt.pix.field		= V4L2_FIELD_NONE;
369	f->fmt.pix.pixelformat		= frm->fmt->fourcc;
370	f->fmt.pix.bytesperline		= (frm->width * frm->fmt->depth) >> 3;
371	f->fmt.pix.sizeimage		= frm->size;
372	f->fmt.pix.colorspace		= ctx->colorspace;
373	f->fmt.pix.xfer_func		= ctx->xfer_func;
374	f->fmt.pix.ycbcr_enc		= ctx->ycbcr_enc;
375	f->fmt.pix.quantization		= ctx->quant;
376
377	return 0;
378}
379
380static int vidioc_try_fmt(struct file *file, void *prv, struct v4l2_format *f)
381{
382	struct dma2d_ctx *ctx = prv;
383	struct dma2d_fmt *fmt;
384	enum v4l2_field *field;
385	u32 fourcc = f->fmt.pix.pixelformat;
386
387	fmt = find_fmt(fourcc);
388	if (!fmt) {
389		f->fmt.pix.pixelformat = formats[0].fourcc;
390		fmt = find_fmt(f->fmt.pix.pixelformat);
391	}
392
393	field = &f->fmt.pix.field;
394	if (*field == V4L2_FIELD_ANY)
395		*field = V4L2_FIELD_NONE;
396	else if (*field != V4L2_FIELD_NONE)
397		return -EINVAL;
398
399	if (f->fmt.pix.width > MAX_WIDTH)
400		f->fmt.pix.width = MAX_WIDTH;
401	if (f->fmt.pix.height > MAX_HEIGHT)
402		f->fmt.pix.height = MAX_HEIGHT;
403
404	if (f->fmt.pix.width < 1)
405		f->fmt.pix.width = 1;
406	if (f->fmt.pix.height < 1)
407		f->fmt.pix.height = 1;
408
409	if (f->type == V4L2_BUF_TYPE_VIDEO_OUTPUT && !f->fmt.pix.colorspace) {
410		f->fmt.pix.colorspace = V4L2_COLORSPACE_REC709;
411	} else if (f->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) {
412		f->fmt.pix.colorspace	= ctx->colorspace;
413		f->fmt.pix.xfer_func = ctx->xfer_func;
414		f->fmt.pix.ycbcr_enc = ctx->ycbcr_enc;
415		f->fmt.pix.quantization = ctx->quant;
416	}
417	f->fmt.pix.bytesperline = (f->fmt.pix.width * fmt->depth) >> 3;
418	f->fmt.pix.sizeimage = f->fmt.pix.height * f->fmt.pix.bytesperline;
419
420	return 0;
421}
422
423static int vidioc_s_fmt(struct file *file, void *prv, struct v4l2_format *f)
424{
425	struct dma2d_ctx *ctx = prv;
426	struct vb2_queue *vq;
427	struct dma2d_frame *frm;
428	struct dma2d_fmt *fmt;
429	int ret = 0;
430
431	/* Adjust all values accordingly to the hardware capabilities
432	 * and chosen format.
433	 */
434	ret = vidioc_try_fmt(file, prv, f);
435	if (ret)
436		return ret;
437
438	vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
439	if (vb2_is_busy(vq))
440		return -EBUSY;
441
442	fmt = find_fmt(f->fmt.pix.pixelformat);
443	if (!fmt)
444		return -EINVAL;
445
446	if (f->type == V4L2_BUF_TYPE_VIDEO_OUTPUT) {
447		ctx->colorspace = f->fmt.pix.colorspace;
448		ctx->xfer_func = f->fmt.pix.xfer_func;
449		ctx->ycbcr_enc = f->fmt.pix.ycbcr_enc;
450		ctx->quant = f->fmt.pix.quantization;
451	}
452
453	frm = get_frame(ctx, f->type);
454	frm->width = f->fmt.pix.width;
455	frm->height = f->fmt.pix.height;
456	frm->size = f->fmt.pix.sizeimage;
457	/* Reset crop settings */
458	frm->o_width = 0;
459	frm->o_height = 0;
460	frm->c_width = frm->width;
461	frm->c_height = frm->height;
462	frm->right = frm->width;
463	frm->bottom = frm->height;
464	frm->fmt = fmt;
465	frm->line_offset = 0;
466
467	return 0;
468}
469
470static void device_run(void *prv)
471{
472	struct dma2d_ctx *ctx = prv;
473	struct dma2d_dev *dev = ctx->dev;
474	struct dma2d_frame *frm_out, *frm_cap;
475	struct vb2_v4l2_buffer *src, *dst;
476	unsigned long flags;
477
478	spin_lock_irqsave(&dev->ctrl_lock, flags);
479	dev->curr = ctx;
480
481	src = v4l2_m2m_next_src_buf(ctx->fh.m2m_ctx);
482	dst = v4l2_m2m_next_dst_buf(ctx->fh.m2m_ctx);
483	if (!dst || !src)
484		goto end;
485
486	frm_cap = get_frame(ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE);
487	frm_out = get_frame(ctx, V4L2_BUF_TYPE_VIDEO_OUTPUT);
488	if (!frm_cap || !frm_out)
489		goto end;
490
491	src->sequence = frm_out->sequence++;
492	dst->sequence = frm_cap->sequence++;
493	v4l2_m2m_buf_copy_metadata(src, dst, true);
494
495	clk_enable(dev->gate);
496
497	dma2d_config_fg(dev, frm_out,
498			vb2_dma_contig_plane_dma_addr(&src->vb2_buf, 0));
499
500	/* TODO: add M2M_BLEND handler here */
501
502	if (ctx->op_mode != DMA2D_MODE_R2M) {
503		if (frm_out->fmt->fourcc == frm_cap->fmt->fourcc)
504			ctx->op_mode = DMA2D_MODE_M2M;
505		else
506			ctx->op_mode = DMA2D_MODE_M2M_FPC;
507	}
508
509	dma2d_config_out(dev, frm_cap,
510			 vb2_dma_contig_plane_dma_addr(&dst->vb2_buf, 0));
511	dma2d_config_common(dev, ctx->op_mode, frm_cap->width, frm_cap->height);
512
513	dma2d_start(dev);
514end:
515	spin_unlock_irqrestore(&dev->ctrl_lock, flags);
516}
517
518static irqreturn_t dma2d_isr(int irq, void *prv)
519{
520	struct dma2d_dev *dev = prv;
521	struct dma2d_ctx *ctx = dev->curr;
522	struct vb2_v4l2_buffer *src, *dst;
523	u32 s = dma2d_get_int(dev);
524
525	dma2d_clear_int(dev);
526	if (s & ISR_TCIF || s == 0) {
527		clk_disable(dev->gate);
528
529		WARN_ON(!ctx);
530
531		src = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
532		dst = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
533
534		WARN_ON(!dst);
535		WARN_ON(!src);
536
537		v4l2_m2m_buf_done(src, VB2_BUF_STATE_DONE);
538		v4l2_m2m_buf_done(dst, VB2_BUF_STATE_DONE);
539		v4l2_m2m_job_finish(dev->m2m_dev, ctx->fh.m2m_ctx);
540
541		dev->curr = NULL;
542	}
543
544	return IRQ_HANDLED;
545}
546
547static const struct v4l2_file_operations dma2d_fops = {
548	.owner		= THIS_MODULE,
549	.open		= dma2d_open,
550	.release	= dma2d_release,
551	.poll		= v4l2_m2m_fop_poll,
552	.unlocked_ioctl	= video_ioctl2,
553	.mmap		= v4l2_m2m_fop_mmap,
554#ifndef CONFIG_MMU
555	.get_unmapped_area = v4l2_m2m_get_unmapped_area,
556#endif
557};
558
559static const struct v4l2_ioctl_ops dma2d_ioctl_ops = {
560	.vidioc_querycap	= vidioc_querycap,
561
562	.vidioc_enum_fmt_vid_cap	= vidioc_enum_fmt,
563	.vidioc_g_fmt_vid_cap		= vidioc_g_fmt,
564	.vidioc_try_fmt_vid_cap		= vidioc_try_fmt,
565	.vidioc_s_fmt_vid_cap		= vidioc_s_fmt,
566
567	.vidioc_enum_fmt_vid_out	= vidioc_enum_fmt,
568	.vidioc_g_fmt_vid_out		= vidioc_g_fmt,
569	.vidioc_try_fmt_vid_out		= vidioc_try_fmt,
570	.vidioc_s_fmt_vid_out		= vidioc_s_fmt,
571
572	.vidioc_reqbufs			= v4l2_m2m_ioctl_reqbufs,
573	.vidioc_querybuf		= v4l2_m2m_ioctl_querybuf,
574	.vidioc_qbuf			= v4l2_m2m_ioctl_qbuf,
575	.vidioc_dqbuf			= v4l2_m2m_ioctl_dqbuf,
576	.vidioc_prepare_buf		= v4l2_m2m_ioctl_prepare_buf,
577	.vidioc_create_bufs		= v4l2_m2m_ioctl_create_bufs,
578	.vidioc_expbuf			= v4l2_m2m_ioctl_expbuf,
579
580	.vidioc_streamon		= v4l2_m2m_ioctl_streamon,
581	.vidioc_streamoff		= v4l2_m2m_ioctl_streamoff,
582
583	.vidioc_subscribe_event = v4l2_ctrl_subscribe_event,
584	.vidioc_unsubscribe_event = v4l2_event_unsubscribe,
585};
586
587static const struct video_device dma2d_videodev = {
588	.name		= DMA2D_NAME,
589	.fops		= &dma2d_fops,
590	.ioctl_ops	= &dma2d_ioctl_ops,
591	.minor		= -1,
592	.release	= video_device_release,
593	.vfl_dir	= VFL_DIR_M2M,
594};
595
596static const struct v4l2_m2m_ops dma2d_m2m_ops = {
597	.device_run	= device_run,
598};
599
600static const struct of_device_id stm32_dma2d_match[];
601
602static int dma2d_probe(struct platform_device *pdev)
603{
604	struct dma2d_dev *dev;
605	struct video_device *vfd;
606	int ret = 0;
607
608	dev = devm_kzalloc(&pdev->dev, sizeof(*dev), GFP_KERNEL);
609	if (!dev)
610		return -ENOMEM;
611
612	spin_lock_init(&dev->ctrl_lock);
613	mutex_init(&dev->mutex);
614	atomic_set(&dev->num_inst, 0);
615
616	dev->regs = devm_platform_get_and_ioremap_resource(pdev, 0, NULL);
617	if (IS_ERR(dev->regs))
618		return PTR_ERR(dev->regs);
619
620	dev->gate = clk_get(&pdev->dev, "dma2d");
621	if (IS_ERR(dev->gate)) {
622		dev_err(&pdev->dev, "failed to get dma2d clock gate\n");
623		ret = -ENXIO;
624		return ret;
625	}
626
627	ret = clk_prepare(dev->gate);
628	if (ret) {
629		dev_err(&pdev->dev, "failed to prepare dma2d clock gate\n");
630		goto put_clk_gate;
631	}
632
633	ret = platform_get_irq(pdev, 0);
634	if (ret < 0)
635		goto unprep_clk_gate;
636
637	dev->irq = ret;
638
639	ret = devm_request_irq(&pdev->dev, dev->irq, dma2d_isr,
640			       0, pdev->name, dev);
641	if (ret) {
642		dev_err(&pdev->dev, "failed to install IRQ\n");
643		goto unprep_clk_gate;
644	}
645
646	ret = v4l2_device_register(&pdev->dev, &dev->v4l2_dev);
647	if (ret)
648		goto unprep_clk_gate;
649
650	vfd = video_device_alloc();
651	if (!vfd) {
652		v4l2_err(&dev->v4l2_dev, "Failed to allocate video device\n");
653		ret = -ENOMEM;
654		goto unreg_v4l2_dev;
655	}
656
657	*vfd = dma2d_videodev;
658	vfd->lock = &dev->mutex;
659	vfd->v4l2_dev = &dev->v4l2_dev;
660	vfd->device_caps = V4L2_CAP_VIDEO_M2M | V4L2_CAP_STREAMING;
661
662	platform_set_drvdata(pdev, dev);
663	dev->m2m_dev = v4l2_m2m_init(&dma2d_m2m_ops);
664	if (IS_ERR(dev->m2m_dev)) {
665		v4l2_err(&dev->v4l2_dev, "Failed to init mem2mem device\n");
666		ret = PTR_ERR(dev->m2m_dev);
667		goto rel_vdev;
668	}
669
670	ret = video_register_device(vfd, VFL_TYPE_VIDEO, 0);
671	if (ret) {
672		v4l2_err(&dev->v4l2_dev, "Failed to register video device\n");
673		goto free_m2m;
674	}
675
676	video_set_drvdata(vfd, dev);
677	dev->vfd = vfd;
678	v4l2_info(&dev->v4l2_dev, "device registered as /dev/video%d\n",
679		  vfd->num);
680	return 0;
681
682free_m2m:
683	v4l2_m2m_release(dev->m2m_dev);
684rel_vdev:
685	video_device_release(vfd);
686unreg_v4l2_dev:
687	v4l2_device_unregister(&dev->v4l2_dev);
688unprep_clk_gate:
689	clk_unprepare(dev->gate);
690put_clk_gate:
691	clk_put(dev->gate);
692
693	return ret;
694}
695
696static void dma2d_remove(struct platform_device *pdev)
697{
698	struct dma2d_dev *dev = platform_get_drvdata(pdev);
699
700	v4l2_info(&dev->v4l2_dev, "Removing " DMA2D_NAME);
701	v4l2_m2m_release(dev->m2m_dev);
702	video_unregister_device(dev->vfd);
703	v4l2_device_unregister(&dev->v4l2_dev);
704	vb2_dma_contig_clear_max_seg_size(&pdev->dev);
705	clk_unprepare(dev->gate);
706	clk_put(dev->gate);
707}
708
709static const struct of_device_id stm32_dma2d_match[] = {
710	{
711		.compatible = "st,stm32-dma2d",
712		.data = NULL,
713	},
714	{},
715};
716MODULE_DEVICE_TABLE(of, stm32_dma2d_match);
717
718static struct platform_driver dma2d_pdrv = {
719	.probe		= dma2d_probe,
720	.remove_new	= dma2d_remove,
721	.driver		= {
722		.name = DMA2D_NAME,
723		.of_match_table = stm32_dma2d_match,
724	},
725};
726
727module_platform_driver(dma2d_pdrv);
728
729MODULE_AUTHOR("Dillon Min <dillon.minfei@gmail.com>");
730MODULE_DESCRIPTION("STM32 Chrom-Art Accelerator DMA2D driver");
731MODULE_LICENSE("GPL");
732