1// SPDX-License-Identifier: GPL-2.0
2/*
3 * Copyright (C) STMicroelectronics SA 2015
4 * Authors: Yannick Fertre <yannick.fertre@st.com>
5 *          Hugues Fruchet <hugues.fruchet@st.com>
6 */
7
8#include <linux/module.h>
9#include <linux/mod_devicetable.h>
10#include <linux/platform_device.h>
11#include <linux/slab.h>
12#include <media/v4l2-event.h>
13#include <media/v4l2-ioctl.h>
14#include <media/videobuf2-dma-contig.h>
15
16#include "hva.h"
17#include "hva-hw.h"
18
19#define MIN_FRAMES	1
20#define MIN_STREAMS	1
21
22#define HVA_MIN_WIDTH	32
23#define HVA_MAX_WIDTH	1920
24#define HVA_MIN_HEIGHT	32
25#define HVA_MAX_HEIGHT	1920
26
27/* HVA requires a 16x16 pixels alignment for frames */
28#define HVA_WIDTH_ALIGNMENT	16
29#define HVA_HEIGHT_ALIGNMENT	16
30
31#define HVA_DEFAULT_WIDTH	HVA_MIN_WIDTH
32#define	HVA_DEFAULT_HEIGHT	HVA_MIN_HEIGHT
33#define HVA_DEFAULT_FRAME_NUM	1
34#define HVA_DEFAULT_FRAME_DEN	30
35
36#define to_type_str(type) (type == V4L2_BUF_TYPE_VIDEO_OUTPUT ? \
37			   "frame" : "stream")
38
39#define fh_to_ctx(f)    (container_of(f, struct hva_ctx, fh))
40
41/* registry of available encoders */
42static const struct hva_enc *hva_encoders[] = {
43	&nv12h264enc,
44	&nv21h264enc,
45};
46
47static inline int frame_size(u32 w, u32 h, u32 fmt)
48{
49	switch (fmt) {
50	case V4L2_PIX_FMT_NV12:
51	case V4L2_PIX_FMT_NV21:
52		return (w * h * 3) / 2;
53	default:
54		return 0;
55	}
56}
57
58static inline int frame_stride(u32 w, u32 fmt)
59{
60	switch (fmt) {
61	case V4L2_PIX_FMT_NV12:
62	case V4L2_PIX_FMT_NV21:
63		return w;
64	default:
65		return 0;
66	}
67}
68
69static inline int frame_alignment(u32 fmt)
70{
71	switch (fmt) {
72	case V4L2_PIX_FMT_NV12:
73	case V4L2_PIX_FMT_NV21:
74		/* multiple of 2 */
75		return 2;
76	default:
77		return 1;
78	}
79}
80
81static inline int estimated_stream_size(u32 w, u32 h)
82{
83	/*
84	 * HVA only encodes in YUV420 format, whatever the frame format.
85	 * A compression ratio of 2 is assumed: thus, the maximum size
86	 * of a stream is estimated to ((width x height x 3 / 2) / 2)
87	 */
88	return (w * h * 3) / 4;
89}
90
91static void set_default_params(struct hva_ctx *ctx)
92{
93	struct hva_frameinfo *frameinfo = &ctx->frameinfo;
94	struct hva_streaminfo *streaminfo = &ctx->streaminfo;
95
96	frameinfo->pixelformat = V4L2_PIX_FMT_NV12;
97	frameinfo->width = HVA_DEFAULT_WIDTH;
98	frameinfo->height = HVA_DEFAULT_HEIGHT;
99	frameinfo->aligned_width = ALIGN(frameinfo->width,
100					 HVA_WIDTH_ALIGNMENT);
101	frameinfo->aligned_height = ALIGN(frameinfo->height,
102					  HVA_HEIGHT_ALIGNMENT);
103	frameinfo->size = frame_size(frameinfo->aligned_width,
104				     frameinfo->aligned_height,
105				     frameinfo->pixelformat);
106
107	streaminfo->streamformat = V4L2_PIX_FMT_H264;
108	streaminfo->width = HVA_DEFAULT_WIDTH;
109	streaminfo->height = HVA_DEFAULT_HEIGHT;
110
111	ctx->colorspace = V4L2_COLORSPACE_REC709;
112	ctx->xfer_func = V4L2_XFER_FUNC_DEFAULT;
113	ctx->ycbcr_enc = V4L2_YCBCR_ENC_DEFAULT;
114	ctx->quantization = V4L2_QUANTIZATION_DEFAULT;
115
116	ctx->max_stream_size = estimated_stream_size(streaminfo->width,
117						     streaminfo->height);
118}
119
120static const struct hva_enc *hva_find_encoder(struct hva_ctx *ctx,
121					      u32 pixelformat,
122					      u32 streamformat)
123{
124	struct hva_dev *hva = ctx_to_hdev(ctx);
125	const struct hva_enc *enc;
126	unsigned int i;
127
128	for (i = 0; i < hva->nb_of_encoders; i++) {
129		enc = hva->encoders[i];
130		if ((enc->pixelformat == pixelformat) &&
131		    (enc->streamformat == streamformat))
132			return enc;
133	}
134
135	return NULL;
136}
137
138static void register_format(u32 format, u32 formats[], u32 *nb_of_formats)
139{
140	u32 i;
141	bool found = false;
142
143	for (i = 0; i < *nb_of_formats; i++) {
144		if (format == formats[i]) {
145			found = true;
146			break;
147		}
148	}
149
150	if (!found)
151		formats[(*nb_of_formats)++] = format;
152}
153
154static void register_formats(struct hva_dev *hva)
155{
156	unsigned int i;
157
158	for (i = 0; i < hva->nb_of_encoders; i++) {
159		register_format(hva->encoders[i]->pixelformat,
160				hva->pixelformats,
161				&hva->nb_of_pixelformats);
162
163		register_format(hva->encoders[i]->streamformat,
164				hva->streamformats,
165				&hva->nb_of_streamformats);
166	}
167}
168
169static void register_encoders(struct hva_dev *hva)
170{
171	struct device *dev = hva_to_dev(hva);
172	unsigned int i;
173
174	for (i = 0; i < ARRAY_SIZE(hva_encoders); i++) {
175		if (hva->nb_of_encoders >= HVA_MAX_ENCODERS) {
176			dev_dbg(dev,
177				"%s failed to register %s encoder (%d maximum reached)\n",
178				HVA_PREFIX, hva_encoders[i]->name,
179				HVA_MAX_ENCODERS);
180			return;
181		}
182
183		hva->encoders[hva->nb_of_encoders++] = hva_encoders[i];
184		dev_info(dev, "%s %s encoder registered\n", HVA_PREFIX,
185			 hva_encoders[i]->name);
186	}
187}
188
189static int hva_open_encoder(struct hva_ctx *ctx, u32 streamformat,
190			    u32 pixelformat, struct hva_enc **penc)
191{
192	struct hva_dev *hva = ctx_to_hdev(ctx);
193	struct device *dev = ctx_to_dev(ctx);
194	struct hva_enc *enc;
195	int ret;
196
197	/* find an encoder which can deal with these formats */
198	enc = (struct hva_enc *)hva_find_encoder(ctx, pixelformat,
199						 streamformat);
200	if (!enc) {
201		dev_err(dev, "%s no encoder found matching %4.4s => %4.4s\n",
202			ctx->name, (char *)&pixelformat, (char *)&streamformat);
203		return -EINVAL;
204	}
205
206	dev_dbg(dev, "%s one encoder matching %4.4s => %4.4s\n",
207		ctx->name, (char *)&pixelformat, (char *)&streamformat);
208
209	/* update instance name */
210	snprintf(ctx->name, sizeof(ctx->name), "[%3d:%4.4s]",
211		 hva->instance_id, (char *)&streamformat);
212
213	/* open encoder instance */
214	ret = enc->open(ctx);
215	if (ret) {
216		dev_err(dev, "%s failed to open encoder instance (%d)\n",
217			ctx->name, ret);
218		return ret;
219	}
220
221	dev_dbg(dev, "%s %s encoder opened\n", ctx->name, enc->name);
222
223	*penc = enc;
224
225	return ret;
226}
227
228static void hva_dbg_summary(struct hva_ctx *ctx)
229{
230	struct device *dev = ctx_to_dev(ctx);
231	struct hva_streaminfo *stream = &ctx->streaminfo;
232	struct hva_frameinfo *frame = &ctx->frameinfo;
233
234	if (!(ctx->flags & HVA_FLAG_STREAMINFO))
235		return;
236
237	dev_dbg(dev, "%s %4.4s %dx%d > %4.4s %dx%d %s %s: %d frames encoded, %d system errors, %d encoding errors, %d frame errors\n",
238		ctx->name,
239		(char *)&frame->pixelformat,
240		frame->aligned_width, frame->aligned_height,
241		(char *)&stream->streamformat,
242		stream->width, stream->height,
243		stream->profile, stream->level,
244		ctx->encoded_frames,
245		ctx->sys_errors,
246		ctx->encode_errors,
247		ctx->frame_errors);
248}
249
250/*
251 * V4L2 ioctl operations
252 */
253
254static int hva_querycap(struct file *file, void *priv,
255			struct v4l2_capability *cap)
256{
257	struct hva_ctx *ctx = fh_to_ctx(file->private_data);
258	struct hva_dev *hva = ctx_to_hdev(ctx);
259
260	strscpy(cap->driver, HVA_NAME, sizeof(cap->driver));
261	strscpy(cap->card, hva->vdev->name, sizeof(cap->card));
262	snprintf(cap->bus_info, sizeof(cap->bus_info), "platform:%s",
263		 hva->pdev->name);
264
265	return 0;
266}
267
268static int hva_enum_fmt_stream(struct file *file, void *priv,
269			       struct v4l2_fmtdesc *f)
270{
271	struct hva_ctx *ctx = fh_to_ctx(file->private_data);
272	struct hva_dev *hva = ctx_to_hdev(ctx);
273
274	if (unlikely(f->index >= hva->nb_of_streamformats))
275		return -EINVAL;
276
277	f->pixelformat = hva->streamformats[f->index];
278
279	return 0;
280}
281
282static int hva_enum_fmt_frame(struct file *file, void *priv,
283			      struct v4l2_fmtdesc *f)
284{
285	struct hva_ctx *ctx = fh_to_ctx(file->private_data);
286	struct hva_dev *hva = ctx_to_hdev(ctx);
287
288	if (unlikely(f->index >= hva->nb_of_pixelformats))
289		return -EINVAL;
290
291	f->pixelformat = hva->pixelformats[f->index];
292
293	return 0;
294}
295
296static int hva_g_fmt_stream(struct file *file, void *fh, struct v4l2_format *f)
297{
298	struct hva_ctx *ctx = fh_to_ctx(file->private_data);
299	struct hva_streaminfo *streaminfo = &ctx->streaminfo;
300
301	f->fmt.pix.width = streaminfo->width;
302	f->fmt.pix.height = streaminfo->height;
303	f->fmt.pix.field = V4L2_FIELD_NONE;
304	f->fmt.pix.colorspace = ctx->colorspace;
305	f->fmt.pix.xfer_func = ctx->xfer_func;
306	f->fmt.pix.ycbcr_enc = ctx->ycbcr_enc;
307	f->fmt.pix.quantization = ctx->quantization;
308	f->fmt.pix.pixelformat = streaminfo->streamformat;
309	f->fmt.pix.bytesperline = 0;
310	f->fmt.pix.sizeimage = ctx->max_stream_size;
311
312	return 0;
313}
314
315static int hva_g_fmt_frame(struct file *file, void *fh, struct v4l2_format *f)
316{
317	struct hva_ctx *ctx = fh_to_ctx(file->private_data);
318	struct hva_frameinfo *frameinfo = &ctx->frameinfo;
319
320	f->fmt.pix.width = frameinfo->width;
321	f->fmt.pix.height = frameinfo->height;
322	f->fmt.pix.field = V4L2_FIELD_NONE;
323	f->fmt.pix.colorspace = ctx->colorspace;
324	f->fmt.pix.xfer_func = ctx->xfer_func;
325	f->fmt.pix.ycbcr_enc = ctx->ycbcr_enc;
326	f->fmt.pix.quantization = ctx->quantization;
327	f->fmt.pix.pixelformat = frameinfo->pixelformat;
328	f->fmt.pix.bytesperline = frame_stride(frameinfo->aligned_width,
329					       frameinfo->pixelformat);
330	f->fmt.pix.sizeimage = frameinfo->size;
331
332	return 0;
333}
334
335static int hva_try_fmt_stream(struct file *file, void *priv,
336			      struct v4l2_format *f)
337{
338	struct hva_ctx *ctx = fh_to_ctx(file->private_data);
339	struct device *dev = ctx_to_dev(ctx);
340	struct v4l2_pix_format *pix = &f->fmt.pix;
341	u32 streamformat = pix->pixelformat;
342	const struct hva_enc *enc;
343	u32 width, height;
344	u32 stream_size;
345
346	enc = hva_find_encoder(ctx, ctx->frameinfo.pixelformat, streamformat);
347	if (!enc) {
348		dev_dbg(dev,
349			"%s V4L2 TRY_FMT (CAPTURE): unsupported format %.4s\n",
350			ctx->name, (char *)&pix->pixelformat);
351		return -EINVAL;
352	}
353
354	width = pix->width;
355	height = pix->height;
356	if (ctx->flags & HVA_FLAG_FRAMEINFO) {
357		/*
358		 * if the frame resolution is already fixed, only allow the
359		 * same stream resolution
360		 */
361		pix->width = ctx->frameinfo.width;
362		pix->height = ctx->frameinfo.height;
363		if ((pix->width != width) || (pix->height != height))
364			dev_dbg(dev,
365				"%s V4L2 TRY_FMT (CAPTURE): resolution updated %dx%d -> %dx%d to fit frame resolution\n",
366				ctx->name, width, height,
367				pix->width, pix->height);
368	} else {
369		/* adjust width & height */
370		v4l_bound_align_image(&pix->width,
371				      HVA_MIN_WIDTH, enc->max_width,
372				      0,
373				      &pix->height,
374				      HVA_MIN_HEIGHT, enc->max_height,
375				      0,
376				      0);
377
378		if ((pix->width != width) || (pix->height != height))
379			dev_dbg(dev,
380				"%s V4L2 TRY_FMT (CAPTURE): resolution updated %dx%d -> %dx%d to fit min/max/alignment\n",
381				ctx->name, width, height,
382				pix->width, pix->height);
383	}
384
385	stream_size = estimated_stream_size(pix->width, pix->height);
386	if (pix->sizeimage < stream_size)
387		pix->sizeimage = stream_size;
388
389	pix->bytesperline = 0;
390	pix->colorspace = ctx->colorspace;
391	pix->xfer_func = ctx->xfer_func;
392	pix->ycbcr_enc = ctx->ycbcr_enc;
393	pix->quantization = ctx->quantization;
394	pix->field = V4L2_FIELD_NONE;
395
396	return 0;
397}
398
399static int hva_try_fmt_frame(struct file *file, void *priv,
400			     struct v4l2_format *f)
401{
402	struct hva_ctx *ctx = fh_to_ctx(file->private_data);
403	struct device *dev = ctx_to_dev(ctx);
404	struct v4l2_pix_format *pix = &f->fmt.pix;
405	u32 pixelformat = pix->pixelformat;
406	const struct hva_enc *enc;
407	u32 width, height;
408
409	enc = hva_find_encoder(ctx, pixelformat, ctx->streaminfo.streamformat);
410	if (!enc) {
411		dev_dbg(dev,
412			"%s V4L2 TRY_FMT (OUTPUT): unsupported format %.4s\n",
413			ctx->name, (char *)&pixelformat);
414		return -EINVAL;
415	}
416
417	/* adjust width & height */
418	width = pix->width;
419	height = pix->height;
420	v4l_bound_align_image(&pix->width,
421			      HVA_MIN_WIDTH, HVA_MAX_WIDTH,
422			      frame_alignment(pixelformat) - 1,
423			      &pix->height,
424			      HVA_MIN_HEIGHT, HVA_MAX_HEIGHT,
425			      frame_alignment(pixelformat) - 1,
426			      0);
427
428	if ((pix->width != width) || (pix->height != height))
429		dev_dbg(dev,
430			"%s V4L2 TRY_FMT (OUTPUT): resolution updated %dx%d -> %dx%d to fit min/max/alignment\n",
431			ctx->name, width, height, pix->width, pix->height);
432
433	width = ALIGN(pix->width, HVA_WIDTH_ALIGNMENT);
434	height = ALIGN(pix->height, HVA_HEIGHT_ALIGNMENT);
435
436	if (!pix->colorspace) {
437		pix->colorspace = V4L2_COLORSPACE_REC709;
438		pix->xfer_func = V4L2_XFER_FUNC_DEFAULT;
439		pix->ycbcr_enc = V4L2_YCBCR_ENC_DEFAULT;
440		pix->quantization = V4L2_QUANTIZATION_DEFAULT;
441	}
442
443	pix->bytesperline = frame_stride(width, pixelformat);
444	pix->sizeimage = frame_size(width, height, pixelformat);
445	pix->field = V4L2_FIELD_NONE;
446
447	return 0;
448}
449
450static int hva_s_fmt_stream(struct file *file, void *fh, struct v4l2_format *f)
451{
452	struct hva_ctx *ctx = fh_to_ctx(file->private_data);
453	struct device *dev = ctx_to_dev(ctx);
454	struct vb2_queue *vq;
455	int ret;
456
457	ret = hva_try_fmt_stream(file, fh, f);
458	if (ret) {
459		dev_dbg(dev, "%s V4L2 S_FMT (CAPTURE): unsupported format %.4s\n",
460			ctx->name, (char *)&f->fmt.pix.pixelformat);
461		return ret;
462	}
463
464	vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
465	if (vb2_is_streaming(vq)) {
466		dev_dbg(dev, "%s V4L2 S_FMT (CAPTURE): queue busy\n",
467			ctx->name);
468		return -EBUSY;
469	}
470
471	ctx->max_stream_size = f->fmt.pix.sizeimage;
472	ctx->streaminfo.width = f->fmt.pix.width;
473	ctx->streaminfo.height = f->fmt.pix.height;
474	ctx->streaminfo.streamformat = f->fmt.pix.pixelformat;
475	ctx->flags |= HVA_FLAG_STREAMINFO;
476
477	return 0;
478}
479
480static int hva_s_fmt_frame(struct file *file, void *fh, struct v4l2_format *f)
481{
482	struct hva_ctx *ctx = fh_to_ctx(file->private_data);
483	struct device *dev = ctx_to_dev(ctx);
484	struct v4l2_pix_format *pix = &f->fmt.pix;
485	struct vb2_queue *vq;
486	int ret;
487
488	ret = hva_try_fmt_frame(file, fh, f);
489	if (ret) {
490		dev_dbg(dev, "%s V4L2 S_FMT (OUTPUT): unsupported format %.4s\n",
491			ctx->name, (char *)&pix->pixelformat);
492		return ret;
493	}
494
495	vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
496	if (vb2_is_streaming(vq)) {
497		dev_dbg(dev, "%s V4L2 S_FMT (OUTPUT): queue busy\n", ctx->name);
498		return -EBUSY;
499	}
500
501	ctx->colorspace = pix->colorspace;
502	ctx->xfer_func = pix->xfer_func;
503	ctx->ycbcr_enc = pix->ycbcr_enc;
504	ctx->quantization = pix->quantization;
505
506	ctx->frameinfo.aligned_width = ALIGN(pix->width, HVA_WIDTH_ALIGNMENT);
507	ctx->frameinfo.aligned_height = ALIGN(pix->height,
508					      HVA_HEIGHT_ALIGNMENT);
509	ctx->frameinfo.size = pix->sizeimage;
510	ctx->frameinfo.pixelformat = pix->pixelformat;
511	ctx->frameinfo.width = pix->width;
512	ctx->frameinfo.height = pix->height;
513	ctx->flags |= HVA_FLAG_FRAMEINFO;
514
515	return 0;
516}
517
518static int hva_g_parm(struct file *file, void *fh, struct v4l2_streamparm *sp)
519{
520	struct hva_ctx *ctx = fh_to_ctx(file->private_data);
521	struct v4l2_fract *time_per_frame = &ctx->ctrls.time_per_frame;
522
523	if (sp->type != V4L2_BUF_TYPE_VIDEO_OUTPUT)
524		return -EINVAL;
525
526	sp->parm.output.capability = V4L2_CAP_TIMEPERFRAME;
527	sp->parm.output.timeperframe.numerator = time_per_frame->numerator;
528	sp->parm.output.timeperframe.denominator =
529		time_per_frame->denominator;
530
531	return 0;
532}
533
534static int hva_s_parm(struct file *file, void *fh, struct v4l2_streamparm *sp)
535{
536	struct hva_ctx *ctx = fh_to_ctx(file->private_data);
537	struct v4l2_fract *time_per_frame = &ctx->ctrls.time_per_frame;
538
539	if (sp->type != V4L2_BUF_TYPE_VIDEO_OUTPUT)
540		return -EINVAL;
541
542	if (!sp->parm.output.timeperframe.numerator ||
543	    !sp->parm.output.timeperframe.denominator)
544		return hva_g_parm(file, fh, sp);
545
546	sp->parm.output.capability = V4L2_CAP_TIMEPERFRAME;
547	time_per_frame->numerator = sp->parm.output.timeperframe.numerator;
548	time_per_frame->denominator =
549		sp->parm.output.timeperframe.denominator;
550
551	return 0;
552}
553
554static int hva_qbuf(struct file *file, void *priv, struct v4l2_buffer *buf)
555{
556	struct hva_ctx *ctx = fh_to_ctx(file->private_data);
557	struct device *dev = ctx_to_dev(ctx);
558
559	if (buf->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) {
560		/*
561		 * depending on the targeted compressed video format, the
562		 * capture buffer might contain headers (e.g. H.264 SPS/PPS)
563		 * filled in by the driver client; the size of these data is
564		 * copied from the bytesused field of the V4L2 buffer in the
565		 * payload field of the hva stream buffer
566		 */
567		struct vb2_queue *vq;
568		struct hva_stream *stream;
569		struct vb2_buffer *vb2_buf;
570
571		vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, buf->type);
572		vb2_buf = vb2_get_buffer(vq, buf->index);
573		if (!vb2_buf) {
574			dev_dbg(dev, "%s buffer index %d not found\n", ctx->name, buf->index);
575			return -EINVAL;
576		}
577		stream = to_hva_stream(to_vb2_v4l2_buffer(vb2_buf));
578		stream->bytesused = buf->bytesused;
579	}
580
581	return v4l2_m2m_qbuf(file, ctx->fh.m2m_ctx, buf);
582}
583
584/* V4L2 ioctl ops */
585static const struct v4l2_ioctl_ops hva_ioctl_ops = {
586	.vidioc_querycap		= hva_querycap,
587	.vidioc_enum_fmt_vid_cap	= hva_enum_fmt_stream,
588	.vidioc_enum_fmt_vid_out	= hva_enum_fmt_frame,
589	.vidioc_g_fmt_vid_cap		= hva_g_fmt_stream,
590	.vidioc_g_fmt_vid_out		= hva_g_fmt_frame,
591	.vidioc_try_fmt_vid_cap		= hva_try_fmt_stream,
592	.vidioc_try_fmt_vid_out		= hva_try_fmt_frame,
593	.vidioc_s_fmt_vid_cap		= hva_s_fmt_stream,
594	.vidioc_s_fmt_vid_out		= hva_s_fmt_frame,
595	.vidioc_g_parm			= hva_g_parm,
596	.vidioc_s_parm			= hva_s_parm,
597	.vidioc_reqbufs			= v4l2_m2m_ioctl_reqbufs,
598	.vidioc_create_bufs             = v4l2_m2m_ioctl_create_bufs,
599	.vidioc_querybuf		= v4l2_m2m_ioctl_querybuf,
600	.vidioc_expbuf			= v4l2_m2m_ioctl_expbuf,
601	.vidioc_qbuf			= hva_qbuf,
602	.vidioc_dqbuf			= v4l2_m2m_ioctl_dqbuf,
603	.vidioc_streamon		= v4l2_m2m_ioctl_streamon,
604	.vidioc_streamoff		= v4l2_m2m_ioctl_streamoff,
605	.vidioc_subscribe_event		= v4l2_ctrl_subscribe_event,
606	.vidioc_unsubscribe_event	= v4l2_event_unsubscribe,
607};
608
609/*
610 * V4L2 control operations
611 */
612
613static int hva_s_ctrl(struct v4l2_ctrl *ctrl)
614{
615	struct hva_ctx *ctx = container_of(ctrl->handler, struct hva_ctx,
616					   ctrl_handler);
617	struct device *dev = ctx_to_dev(ctx);
618
619	dev_dbg(dev, "%s S_CTRL: id = %d, val = %d\n", ctx->name,
620		ctrl->id, ctrl->val);
621
622	switch (ctrl->id) {
623	case V4L2_CID_MPEG_VIDEO_BITRATE_MODE:
624		ctx->ctrls.bitrate_mode = ctrl->val;
625		break;
626	case V4L2_CID_MPEG_VIDEO_GOP_SIZE:
627		ctx->ctrls.gop_size = ctrl->val;
628		break;
629	case V4L2_CID_MPEG_VIDEO_BITRATE:
630		ctx->ctrls.bitrate = ctrl->val;
631		break;
632	case V4L2_CID_MPEG_VIDEO_ASPECT:
633		ctx->ctrls.aspect = ctrl->val;
634		break;
635	case V4L2_CID_MPEG_VIDEO_H264_PROFILE:
636		ctx->ctrls.profile = ctrl->val;
637		snprintf(ctx->streaminfo.profile,
638			 sizeof(ctx->streaminfo.profile),
639			 "%s profile",
640			 v4l2_ctrl_get_menu(ctrl->id)[ctrl->val]);
641		break;
642	case V4L2_CID_MPEG_VIDEO_H264_LEVEL:
643		ctx->ctrls.level = ctrl->val;
644		snprintf(ctx->streaminfo.level,
645			 sizeof(ctx->streaminfo.level),
646			 "level %s",
647			 v4l2_ctrl_get_menu(ctrl->id)[ctrl->val]);
648		break;
649	case V4L2_CID_MPEG_VIDEO_H264_ENTROPY_MODE:
650		ctx->ctrls.entropy_mode = ctrl->val;
651		break;
652	case V4L2_CID_MPEG_VIDEO_H264_CPB_SIZE:
653		ctx->ctrls.cpb_size = ctrl->val;
654		break;
655	case V4L2_CID_MPEG_VIDEO_H264_8X8_TRANSFORM:
656		ctx->ctrls.dct8x8 = ctrl->val;
657		break;
658	case V4L2_CID_MPEG_VIDEO_H264_MIN_QP:
659		ctx->ctrls.qpmin = ctrl->val;
660		break;
661	case V4L2_CID_MPEG_VIDEO_H264_MAX_QP:
662		ctx->ctrls.qpmax = ctrl->val;
663		break;
664	case V4L2_CID_MPEG_VIDEO_H264_VUI_SAR_ENABLE:
665		ctx->ctrls.vui_sar = ctrl->val;
666		break;
667	case V4L2_CID_MPEG_VIDEO_H264_VUI_SAR_IDC:
668		ctx->ctrls.vui_sar_idc = ctrl->val;
669		break;
670	case V4L2_CID_MPEG_VIDEO_H264_SEI_FRAME_PACKING:
671		ctx->ctrls.sei_fp = ctrl->val;
672		break;
673	case V4L2_CID_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE:
674		ctx->ctrls.sei_fp_type = ctrl->val;
675		break;
676	default:
677		dev_dbg(dev, "%s S_CTRL: invalid control (id = %d)\n",
678			ctx->name, ctrl->id);
679		return -EINVAL;
680	}
681
682	return 0;
683}
684
685/* V4L2 control ops */
686static const struct v4l2_ctrl_ops hva_ctrl_ops = {
687	.s_ctrl = hva_s_ctrl,
688};
689
690static int hva_ctrls_setup(struct hva_ctx *ctx)
691{
692	struct device *dev = ctx_to_dev(ctx);
693	u64 mask;
694	enum v4l2_mpeg_video_h264_sei_fp_arrangement_type sei_fp_type =
695		V4L2_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE_TOP_BOTTOM;
696
697	v4l2_ctrl_handler_init(&ctx->ctrl_handler, 15);
698
699	v4l2_ctrl_new_std_menu(&ctx->ctrl_handler, &hva_ctrl_ops,
700			       V4L2_CID_MPEG_VIDEO_BITRATE_MODE,
701			       V4L2_MPEG_VIDEO_BITRATE_MODE_CBR,
702			       0,
703			       V4L2_MPEG_VIDEO_BITRATE_MODE_CBR);
704
705	v4l2_ctrl_new_std(&ctx->ctrl_handler, &hva_ctrl_ops,
706			  V4L2_CID_MPEG_VIDEO_GOP_SIZE,
707			  1, 60, 1, 16);
708
709	v4l2_ctrl_new_std(&ctx->ctrl_handler, &hva_ctrl_ops,
710			  V4L2_CID_MPEG_VIDEO_BITRATE,
711			  1000, 60000000, 1000, 20000000);
712
713	mask = ~(1 << V4L2_MPEG_VIDEO_ASPECT_1x1);
714	v4l2_ctrl_new_std_menu(&ctx->ctrl_handler, &hva_ctrl_ops,
715			       V4L2_CID_MPEG_VIDEO_ASPECT,
716			       V4L2_MPEG_VIDEO_ASPECT_1x1,
717			       mask,
718			       V4L2_MPEG_VIDEO_ASPECT_1x1);
719
720	mask = ~((1 << V4L2_MPEG_VIDEO_H264_PROFILE_BASELINE) |
721		 (1 << V4L2_MPEG_VIDEO_H264_PROFILE_MAIN) |
722		 (1 << V4L2_MPEG_VIDEO_H264_PROFILE_HIGH) |
723		 (1 << V4L2_MPEG_VIDEO_H264_PROFILE_STEREO_HIGH));
724	v4l2_ctrl_new_std_menu(&ctx->ctrl_handler, &hva_ctrl_ops,
725			       V4L2_CID_MPEG_VIDEO_H264_PROFILE,
726			       V4L2_MPEG_VIDEO_H264_PROFILE_STEREO_HIGH,
727			       mask,
728			       V4L2_MPEG_VIDEO_H264_PROFILE_HIGH);
729
730	v4l2_ctrl_new_std_menu(&ctx->ctrl_handler, &hva_ctrl_ops,
731			       V4L2_CID_MPEG_VIDEO_H264_LEVEL,
732			       V4L2_MPEG_VIDEO_H264_LEVEL_4_2,
733			       0,
734			       V4L2_MPEG_VIDEO_H264_LEVEL_4_0);
735
736	v4l2_ctrl_new_std_menu(&ctx->ctrl_handler, &hva_ctrl_ops,
737			       V4L2_CID_MPEG_VIDEO_H264_ENTROPY_MODE,
738			       V4L2_MPEG_VIDEO_H264_ENTROPY_MODE_CABAC,
739			       0,
740			       V4L2_MPEG_VIDEO_H264_ENTROPY_MODE_CAVLC);
741
742	v4l2_ctrl_new_std(&ctx->ctrl_handler, &hva_ctrl_ops,
743			  V4L2_CID_MPEG_VIDEO_H264_CPB_SIZE,
744			  1, 10000, 1, 3000);
745
746	v4l2_ctrl_new_std(&ctx->ctrl_handler, &hva_ctrl_ops,
747			  V4L2_CID_MPEG_VIDEO_H264_8X8_TRANSFORM,
748			  0, 1, 1, 0);
749
750	v4l2_ctrl_new_std(&ctx->ctrl_handler, &hva_ctrl_ops,
751			  V4L2_CID_MPEG_VIDEO_H264_MIN_QP,
752			  0, 51, 1, 5);
753
754	v4l2_ctrl_new_std(&ctx->ctrl_handler, &hva_ctrl_ops,
755			  V4L2_CID_MPEG_VIDEO_H264_MAX_QP,
756			  0, 51, 1, 51);
757
758	v4l2_ctrl_new_std(&ctx->ctrl_handler, &hva_ctrl_ops,
759			  V4L2_CID_MPEG_VIDEO_H264_VUI_SAR_ENABLE,
760			  0, 1, 1, 1);
761
762	mask = ~(1 << V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_1x1);
763	v4l2_ctrl_new_std_menu(&ctx->ctrl_handler, &hva_ctrl_ops,
764			       V4L2_CID_MPEG_VIDEO_H264_VUI_SAR_IDC,
765			       V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_1x1,
766			       mask,
767			       V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_1x1);
768
769	v4l2_ctrl_new_std(&ctx->ctrl_handler, &hva_ctrl_ops,
770			  V4L2_CID_MPEG_VIDEO_H264_SEI_FRAME_PACKING,
771			  0, 1, 1, 0);
772
773	mask = ~(1 << sei_fp_type);
774	v4l2_ctrl_new_std_menu(&ctx->ctrl_handler, &hva_ctrl_ops,
775			       V4L2_CID_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE,
776			       sei_fp_type,
777			       mask,
778			       sei_fp_type);
779
780	if (ctx->ctrl_handler.error) {
781		int err = ctx->ctrl_handler.error;
782
783		dev_dbg(dev, "%s controls setup failed (%d)\n",
784			ctx->name, err);
785		v4l2_ctrl_handler_free(&ctx->ctrl_handler);
786		return err;
787	}
788
789	v4l2_ctrl_handler_setup(&ctx->ctrl_handler);
790
791	/* set default time per frame */
792	ctx->ctrls.time_per_frame.numerator = HVA_DEFAULT_FRAME_NUM;
793	ctx->ctrls.time_per_frame.denominator = HVA_DEFAULT_FRAME_DEN;
794
795	return 0;
796}
797
798/*
799 * mem-to-mem operations
800 */
801
802static void hva_run_work(struct work_struct *work)
803{
804	struct hva_ctx *ctx = container_of(work, struct hva_ctx, run_work);
805	struct vb2_v4l2_buffer *src_buf, *dst_buf;
806	const struct hva_enc *enc = ctx->enc;
807	struct hva_frame *frame;
808	struct hva_stream *stream;
809	int ret;
810
811	/* protect instance against reentrancy */
812	mutex_lock(&ctx->lock);
813
814#ifdef CONFIG_VIDEO_STI_HVA_DEBUGFS
815	hva_dbg_perf_begin(ctx);
816#endif
817
818	src_buf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
819	dst_buf = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
820
821	frame = to_hva_frame(src_buf);
822	stream = to_hva_stream(dst_buf);
823	frame->vbuf.sequence = ctx->frame_num++;
824
825	ret = enc->encode(ctx, frame, stream);
826
827	vb2_set_plane_payload(&dst_buf->vb2_buf, 0, stream->bytesused);
828	if (ret) {
829		v4l2_m2m_buf_done(src_buf, VB2_BUF_STATE_ERROR);
830		v4l2_m2m_buf_done(dst_buf, VB2_BUF_STATE_ERROR);
831	} else {
832		/* propagate frame timestamp */
833		dst_buf->vb2_buf.timestamp = src_buf->vb2_buf.timestamp;
834		dst_buf->field = V4L2_FIELD_NONE;
835		dst_buf->sequence = ctx->stream_num - 1;
836
837		ctx->encoded_frames++;
838
839#ifdef CONFIG_VIDEO_STI_HVA_DEBUGFS
840		hva_dbg_perf_end(ctx, stream);
841#endif
842
843		v4l2_m2m_buf_done(src_buf, VB2_BUF_STATE_DONE);
844		v4l2_m2m_buf_done(dst_buf, VB2_BUF_STATE_DONE);
845	}
846
847	mutex_unlock(&ctx->lock);
848
849	v4l2_m2m_job_finish(ctx->hva_dev->m2m_dev, ctx->fh.m2m_ctx);
850}
851
852static void hva_device_run(void *priv)
853{
854	struct hva_ctx *ctx = priv;
855	struct hva_dev *hva = ctx_to_hdev(ctx);
856
857	queue_work(hva->work_queue, &ctx->run_work);
858}
859
860static void hva_job_abort(void *priv)
861{
862	struct hva_ctx *ctx = priv;
863	struct device *dev = ctx_to_dev(ctx);
864
865	dev_dbg(dev, "%s aborting job\n", ctx->name);
866
867	ctx->aborting = true;
868}
869
870static int hva_job_ready(void *priv)
871{
872	struct hva_ctx *ctx = priv;
873	struct device *dev = ctx_to_dev(ctx);
874
875	if (!v4l2_m2m_num_src_bufs_ready(ctx->fh.m2m_ctx)) {
876		dev_dbg(dev, "%s job not ready: no frame buffers\n",
877			ctx->name);
878		return 0;
879	}
880
881	if (!v4l2_m2m_num_dst_bufs_ready(ctx->fh.m2m_ctx)) {
882		dev_dbg(dev, "%s job not ready: no stream buffers\n",
883			ctx->name);
884		return 0;
885	}
886
887	if (ctx->aborting) {
888		dev_dbg(dev, "%s job not ready: aborting\n", ctx->name);
889		return 0;
890	}
891
892	return 1;
893}
894
895/* mem-to-mem ops */
896static const struct v4l2_m2m_ops hva_m2m_ops = {
897	.device_run	= hva_device_run,
898	.job_abort	= hva_job_abort,
899	.job_ready	= hva_job_ready,
900};
901
902/*
903 * VB2 queue operations
904 */
905
906static int hva_queue_setup(struct vb2_queue *vq,
907			   unsigned int *num_buffers, unsigned int *num_planes,
908			   unsigned int sizes[], struct device *alloc_devs[])
909{
910	struct hva_ctx *ctx = vb2_get_drv_priv(vq);
911	struct device *dev = ctx_to_dev(ctx);
912	unsigned int size;
913
914	dev_dbg(dev, "%s %s queue setup: num_buffers %d\n", ctx->name,
915		to_type_str(vq->type), *num_buffers);
916
917	size = vq->type == V4L2_BUF_TYPE_VIDEO_OUTPUT ?
918		ctx->frameinfo.size : ctx->max_stream_size;
919
920	if (*num_planes)
921		return sizes[0] < size ? -EINVAL : 0;
922
923	/* only one plane supported */
924	*num_planes = 1;
925	sizes[0] = size;
926
927	return 0;
928}
929
930static int hva_buf_prepare(struct vb2_buffer *vb)
931{
932	struct hva_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
933	struct device *dev = ctx_to_dev(ctx);
934	struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
935
936	if (vb->vb2_queue->type == V4L2_BUF_TYPE_VIDEO_OUTPUT) {
937		struct hva_frame *frame = to_hva_frame(vbuf);
938
939		if (vbuf->field == V4L2_FIELD_ANY)
940			vbuf->field = V4L2_FIELD_NONE;
941		if (vbuf->field != V4L2_FIELD_NONE) {
942			dev_dbg(dev,
943				"%s frame[%d] prepare: %d field not supported\n",
944				ctx->name, vb->index, vbuf->field);
945			return -EINVAL;
946		}
947
948		if (!frame->prepared) {
949			/* get memory addresses */
950			frame->vaddr = vb2_plane_vaddr(&vbuf->vb2_buf, 0);
951			frame->paddr = vb2_dma_contig_plane_dma_addr(
952					&vbuf->vb2_buf, 0);
953			frame->info = ctx->frameinfo;
954			frame->prepared = true;
955
956			dev_dbg(dev,
957				"%s frame[%d] prepared; virt=%p, phy=%pad\n",
958				ctx->name, vb->index,
959				frame->vaddr, &frame->paddr);
960		}
961	} else {
962		struct hva_stream *stream = to_hva_stream(vbuf);
963
964		if (!stream->prepared) {
965			/* get memory addresses */
966			stream->vaddr = vb2_plane_vaddr(&vbuf->vb2_buf, 0);
967			stream->paddr = vb2_dma_contig_plane_dma_addr(
968					&vbuf->vb2_buf, 0);
969			stream->size = vb2_plane_size(&vbuf->vb2_buf, 0);
970			stream->prepared = true;
971
972			dev_dbg(dev,
973				"%s stream[%d] prepared; virt=%p, phy=%pad\n",
974				ctx->name, vb->index,
975				stream->vaddr, &stream->paddr);
976		}
977	}
978
979	return 0;
980}
981
982static void hva_buf_queue(struct vb2_buffer *vb)
983{
984	struct hva_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
985	struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
986
987	if (ctx->fh.m2m_ctx)
988		v4l2_m2m_buf_queue(ctx->fh.m2m_ctx, vbuf);
989}
990
991static int hva_start_streaming(struct vb2_queue *vq, unsigned int count)
992{
993	struct hva_ctx *ctx = vb2_get_drv_priv(vq);
994	struct hva_dev *hva = ctx_to_hdev(ctx);
995	struct device *dev = ctx_to_dev(ctx);
996	struct vb2_v4l2_buffer *vbuf;
997	int ret;
998	unsigned int i;
999	bool found = false;
1000
1001	dev_dbg(dev, "%s %s start streaming\n", ctx->name,
1002		to_type_str(vq->type));
1003
1004	/* open encoder when both start_streaming have been called */
1005	if (V4L2_TYPE_IS_OUTPUT(vq->type)) {
1006		if (!vb2_start_streaming_called(&ctx->fh.m2m_ctx->cap_q_ctx.q))
1007			return 0;
1008	} else {
1009		if (!vb2_start_streaming_called(&ctx->fh.m2m_ctx->out_q_ctx.q))
1010			return 0;
1011	}
1012
1013	/* store the instance context in the instances array */
1014	for (i = 0; i < HVA_MAX_INSTANCES; i++) {
1015		if (!hva->instances[i]) {
1016			hva->instances[i] = ctx;
1017			/* save the context identifier in the context */
1018			ctx->id = i;
1019			found = true;
1020			break;
1021		}
1022	}
1023
1024	if (!found) {
1025		dev_err(dev, "%s maximum instances reached\n", ctx->name);
1026		ret = -ENOMEM;
1027		goto err;
1028	}
1029
1030	hva->nb_of_instances++;
1031
1032	if (!ctx->enc) {
1033		ret = hva_open_encoder(ctx,
1034				       ctx->streaminfo.streamformat,
1035				       ctx->frameinfo.pixelformat,
1036				       &ctx->enc);
1037		if (ret < 0)
1038			goto err_ctx;
1039	}
1040
1041	return 0;
1042
1043err_ctx:
1044	hva->instances[ctx->id] = NULL;
1045	hva->nb_of_instances--;
1046err:
1047	if (vq->type == V4L2_BUF_TYPE_VIDEO_OUTPUT) {
1048		/* return of all pending buffers to vb2 (in queued state) */
1049		while ((vbuf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx)))
1050			v4l2_m2m_buf_done(vbuf, VB2_BUF_STATE_QUEUED);
1051	} else {
1052		/* return of all pending buffers to vb2 (in queued state) */
1053		while ((vbuf = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx)))
1054			v4l2_m2m_buf_done(vbuf, VB2_BUF_STATE_QUEUED);
1055	}
1056
1057	ctx->sys_errors++;
1058
1059	return ret;
1060}
1061
1062static void hva_stop_streaming(struct vb2_queue *vq)
1063{
1064	struct hva_ctx *ctx = vb2_get_drv_priv(vq);
1065	struct hva_dev *hva = ctx_to_hdev(ctx);
1066	struct device *dev = ctx_to_dev(ctx);
1067	const struct hva_enc *enc = ctx->enc;
1068	struct vb2_v4l2_buffer *vbuf;
1069
1070	dev_dbg(dev, "%s %s stop streaming\n", ctx->name,
1071		to_type_str(vq->type));
1072
1073	if (vq->type == V4L2_BUF_TYPE_VIDEO_OUTPUT) {
1074		/* return of all pending buffers to vb2 (in error state) */
1075		ctx->frame_num = 0;
1076		while ((vbuf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx)))
1077			v4l2_m2m_buf_done(vbuf, VB2_BUF_STATE_ERROR);
1078	} else {
1079		/* return of all pending buffers to vb2 (in error state) */
1080		ctx->stream_num = 0;
1081		while ((vbuf = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx)))
1082			v4l2_m2m_buf_done(vbuf, VB2_BUF_STATE_ERROR);
1083	}
1084
1085	if ((V4L2_TYPE_IS_OUTPUT(vq->type) &&
1086	     vb2_is_streaming(&ctx->fh.m2m_ctx->cap_q_ctx.q)) ||
1087	    (V4L2_TYPE_IS_CAPTURE(vq->type) &&
1088	     vb2_is_streaming(&ctx->fh.m2m_ctx->out_q_ctx.q))) {
1089		dev_dbg(dev, "%s %s out=%d cap=%d\n",
1090			ctx->name, to_type_str(vq->type),
1091			vb2_is_streaming(&ctx->fh.m2m_ctx->out_q_ctx.q),
1092			vb2_is_streaming(&ctx->fh.m2m_ctx->cap_q_ctx.q));
1093		return;
1094	}
1095
1096	/* close encoder when both stop_streaming have been called */
1097	if (enc) {
1098		dev_dbg(dev, "%s %s encoder closed\n", ctx->name, enc->name);
1099		enc->close(ctx);
1100		ctx->enc = NULL;
1101
1102		/* clear instance context in instances array */
1103		hva->instances[ctx->id] = NULL;
1104		hva->nb_of_instances--;
1105	}
1106
1107	ctx->aborting = false;
1108}
1109
1110/* VB2 queue ops */
1111static const struct vb2_ops hva_qops = {
1112	.queue_setup		= hva_queue_setup,
1113	.buf_prepare		= hva_buf_prepare,
1114	.buf_queue		= hva_buf_queue,
1115	.start_streaming	= hva_start_streaming,
1116	.stop_streaming		= hva_stop_streaming,
1117	.wait_prepare		= vb2_ops_wait_prepare,
1118	.wait_finish		= vb2_ops_wait_finish,
1119};
1120
1121/*
1122 * V4L2 file operations
1123 */
1124
1125static int queue_init(struct hva_ctx *ctx, struct vb2_queue *vq)
1126{
1127	vq->io_modes = VB2_MMAP | VB2_DMABUF;
1128	vq->drv_priv = ctx;
1129	vq->ops = &hva_qops;
1130	vq->mem_ops = &vb2_dma_contig_memops;
1131	vq->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_COPY;
1132	vq->lock = &ctx->hva_dev->lock;
1133
1134	return vb2_queue_init(vq);
1135}
1136
1137static int hva_queue_init(void *priv, struct vb2_queue *src_vq,
1138			  struct vb2_queue *dst_vq)
1139{
1140	struct hva_ctx *ctx = priv;
1141	int ret;
1142
1143	src_vq->type = V4L2_BUF_TYPE_VIDEO_OUTPUT;
1144	src_vq->buf_struct_size = sizeof(struct hva_frame);
1145	src_vq->min_queued_buffers = MIN_FRAMES;
1146	src_vq->dev = ctx->hva_dev->dev;
1147
1148	ret = queue_init(ctx, src_vq);
1149	if (ret)
1150		return ret;
1151
1152	dst_vq->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
1153	dst_vq->buf_struct_size = sizeof(struct hva_stream);
1154	dst_vq->min_queued_buffers = MIN_STREAMS;
1155	dst_vq->dev = ctx->hva_dev->dev;
1156
1157	return queue_init(ctx, dst_vq);
1158}
1159
1160static int hva_open(struct file *file)
1161{
1162	struct hva_dev *hva = video_drvdata(file);
1163	struct device *dev = hva_to_dev(hva);
1164	struct hva_ctx *ctx;
1165	int ret;
1166
1167	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
1168	if (!ctx) {
1169		ret = -ENOMEM;
1170		goto out;
1171	}
1172	ctx->hva_dev = hva;
1173
1174	INIT_WORK(&ctx->run_work, hva_run_work);
1175	v4l2_fh_init(&ctx->fh, video_devdata(file));
1176	file->private_data = &ctx->fh;
1177	v4l2_fh_add(&ctx->fh);
1178
1179	ret = hva_ctrls_setup(ctx);
1180	if (ret) {
1181		dev_err(dev, "%s [x:x] failed to setup controls\n",
1182			HVA_PREFIX);
1183		ctx->sys_errors++;
1184		goto err_fh;
1185	}
1186	ctx->fh.ctrl_handler = &ctx->ctrl_handler;
1187
1188	mutex_init(&ctx->lock);
1189
1190	ctx->fh.m2m_ctx = v4l2_m2m_ctx_init(hva->m2m_dev, ctx,
1191					    &hva_queue_init);
1192	if (IS_ERR(ctx->fh.m2m_ctx)) {
1193		ret = PTR_ERR(ctx->fh.m2m_ctx);
1194		dev_err(dev, "%s failed to initialize m2m context (%d)\n",
1195			HVA_PREFIX, ret);
1196		ctx->sys_errors++;
1197		goto err_ctrls;
1198	}
1199
1200	/* set the instance name */
1201	mutex_lock(&hva->lock);
1202	hva->instance_id++;
1203	snprintf(ctx->name, sizeof(ctx->name), "[%3d:----]",
1204		 hva->instance_id);
1205	mutex_unlock(&hva->lock);
1206
1207	/* default parameters for frame and stream */
1208	set_default_params(ctx);
1209
1210#ifdef CONFIG_VIDEO_STI_HVA_DEBUGFS
1211	hva_dbg_ctx_create(ctx);
1212#endif
1213
1214	dev_info(dev, "%s encoder instance created\n", ctx->name);
1215
1216	return 0;
1217
1218err_ctrls:
1219	v4l2_ctrl_handler_free(&ctx->ctrl_handler);
1220err_fh:
1221	v4l2_fh_del(&ctx->fh);
1222	v4l2_fh_exit(&ctx->fh);
1223	kfree(ctx);
1224out:
1225	return ret;
1226}
1227
1228static int hva_release(struct file *file)
1229{
1230	struct hva_ctx *ctx = fh_to_ctx(file->private_data);
1231	struct hva_dev *hva = ctx_to_hdev(ctx);
1232	struct device *dev = ctx_to_dev(ctx);
1233	const struct hva_enc *enc = ctx->enc;
1234
1235	if (enc) {
1236		dev_dbg(dev, "%s %s encoder closed\n", ctx->name, enc->name);
1237		enc->close(ctx);
1238		ctx->enc = NULL;
1239
1240		/* clear instance context in instances array */
1241		hva->instances[ctx->id] = NULL;
1242		hva->nb_of_instances--;
1243	}
1244
1245	/* trace a summary of instance before closing (debug purpose) */
1246	hva_dbg_summary(ctx);
1247
1248	v4l2_m2m_ctx_release(ctx->fh.m2m_ctx);
1249
1250	v4l2_ctrl_handler_free(&ctx->ctrl_handler);
1251
1252	v4l2_fh_del(&ctx->fh);
1253	v4l2_fh_exit(&ctx->fh);
1254
1255#ifdef CONFIG_VIDEO_STI_HVA_DEBUGFS
1256	hva_dbg_ctx_remove(ctx);
1257#endif
1258
1259	dev_info(dev, "%s encoder instance released\n", ctx->name);
1260
1261	kfree(ctx);
1262
1263	return 0;
1264}
1265
1266/* V4L2 file ops */
1267static const struct v4l2_file_operations hva_fops = {
1268	.owner			= THIS_MODULE,
1269	.open			= hva_open,
1270	.release		= hva_release,
1271	.unlocked_ioctl		= video_ioctl2,
1272	.mmap			= v4l2_m2m_fop_mmap,
1273	.poll			= v4l2_m2m_fop_poll,
1274};
1275
1276/*
1277 * Platform device operations
1278 */
1279
1280static int hva_register_device(struct hva_dev *hva)
1281{
1282	int ret;
1283	struct video_device *vdev;
1284	struct device *dev;
1285
1286	if (!hva)
1287		return -ENODEV;
1288	dev = hva_to_dev(hva);
1289
1290	hva->m2m_dev = v4l2_m2m_init(&hva_m2m_ops);
1291	if (IS_ERR(hva->m2m_dev)) {
1292		dev_err(dev, "%s failed to initialize v4l2-m2m device\n",
1293			HVA_PREFIX);
1294		ret = PTR_ERR(hva->m2m_dev);
1295		goto err;
1296	}
1297
1298	vdev = video_device_alloc();
1299	if (!vdev) {
1300		dev_err(dev, "%s failed to allocate video device\n",
1301			HVA_PREFIX);
1302		ret = -ENOMEM;
1303		goto err_m2m_release;
1304	}
1305
1306	vdev->fops = &hva_fops;
1307	vdev->ioctl_ops = &hva_ioctl_ops;
1308	vdev->release = video_device_release;
1309	vdev->lock = &hva->lock;
1310	vdev->vfl_dir = VFL_DIR_M2M;
1311	vdev->device_caps = V4L2_CAP_STREAMING | V4L2_CAP_VIDEO_M2M;
1312	vdev->v4l2_dev = &hva->v4l2_dev;
1313	snprintf(vdev->name, sizeof(vdev->name), "%s%lx", HVA_NAME,
1314		 hva->ip_version);
1315
1316	ret = video_register_device(vdev, VFL_TYPE_VIDEO, -1);
1317	if (ret) {
1318		dev_err(dev, "%s failed to register video device\n",
1319			HVA_PREFIX);
1320		goto err_vdev_release;
1321	}
1322
1323	hva->vdev = vdev;
1324	video_set_drvdata(vdev, hva);
1325	return 0;
1326
1327err_vdev_release:
1328	video_device_release(vdev);
1329err_m2m_release:
1330	v4l2_m2m_release(hva->m2m_dev);
1331err:
1332	return ret;
1333}
1334
1335static void hva_unregister_device(struct hva_dev *hva)
1336{
1337	if (!hva)
1338		return;
1339
1340	if (hva->m2m_dev)
1341		v4l2_m2m_release(hva->m2m_dev);
1342
1343	video_unregister_device(hva->vdev);
1344}
1345
1346static int hva_probe(struct platform_device *pdev)
1347{
1348	struct hva_dev *hva;
1349	struct device *dev = &pdev->dev;
1350	int ret;
1351
1352	hva = devm_kzalloc(dev, sizeof(*hva), GFP_KERNEL);
1353	if (!hva) {
1354		ret = -ENOMEM;
1355		goto err;
1356	}
1357
1358	ret = dma_coerce_mask_and_coherent(dev, DMA_BIT_MASK(32));
1359	if (ret)
1360		return ret;
1361
1362	hva->dev = dev;
1363	hva->pdev = pdev;
1364	platform_set_drvdata(pdev, hva);
1365
1366	mutex_init(&hva->lock);
1367
1368	/* probe hardware */
1369	ret = hva_hw_probe(pdev, hva);
1370	if (ret)
1371		goto err;
1372
1373	/* register all available encoders */
1374	register_encoders(hva);
1375
1376	/* register all supported formats */
1377	register_formats(hva);
1378
1379	/* register on V4L2 */
1380	ret = v4l2_device_register(dev, &hva->v4l2_dev);
1381	if (ret) {
1382		dev_err(dev, "%s %s failed to register V4L2 device\n",
1383			HVA_PREFIX, HVA_NAME);
1384		goto err_hw;
1385	}
1386
1387#ifdef CONFIG_VIDEO_STI_HVA_DEBUGFS
1388	hva_debugfs_create(hva);
1389#endif
1390
1391	hva->work_queue = create_workqueue(HVA_NAME);
1392	if (!hva->work_queue) {
1393		dev_err(dev, "%s %s failed to allocate work queue\n",
1394			HVA_PREFIX, HVA_NAME);
1395		ret = -ENOMEM;
1396		goto err_v4l2;
1397	}
1398
1399	/* register device */
1400	ret = hva_register_device(hva);
1401	if (ret)
1402		goto err_work_queue;
1403
1404	dev_info(dev, "%s %s registered as /dev/video%d\n", HVA_PREFIX,
1405		 HVA_NAME, hva->vdev->num);
1406
1407	return 0;
1408
1409err_work_queue:
1410	destroy_workqueue(hva->work_queue);
1411err_v4l2:
1412#ifdef CONFIG_VIDEO_STI_HVA_DEBUGFS
1413	hva_debugfs_remove(hva);
1414#endif
1415	v4l2_device_unregister(&hva->v4l2_dev);
1416err_hw:
1417	hva_hw_remove(hva);
1418err:
1419	return ret;
1420}
1421
1422static void hva_remove(struct platform_device *pdev)
1423{
1424	struct hva_dev *hva = platform_get_drvdata(pdev);
1425	struct device *dev = hva_to_dev(hva);
1426
1427	hva_unregister_device(hva);
1428
1429	destroy_workqueue(hva->work_queue);
1430
1431	hva_hw_remove(hva);
1432
1433#ifdef CONFIG_VIDEO_STI_HVA_DEBUGFS
1434	hva_debugfs_remove(hva);
1435#endif
1436
1437	v4l2_device_unregister(&hva->v4l2_dev);
1438
1439	dev_info(dev, "%s %s removed\n", HVA_PREFIX, pdev->name);
1440}
1441
1442/* PM ops */
1443static const struct dev_pm_ops hva_pm_ops = {
1444	.runtime_suspend	= hva_hw_runtime_suspend,
1445	.runtime_resume		= hva_hw_runtime_resume,
1446};
1447
1448static const struct of_device_id hva_match_types[] = {
1449	{
1450	 .compatible = "st,st-hva",
1451	},
1452	{ /* end node */ }
1453};
1454
1455MODULE_DEVICE_TABLE(of, hva_match_types);
1456
1457static struct platform_driver hva_driver = {
1458	.probe  = hva_probe,
1459	.remove_new = hva_remove,
1460	.driver = {
1461		.name		= HVA_NAME,
1462		.of_match_table	= hva_match_types,
1463		.pm		= &hva_pm_ops,
1464		},
1465};
1466
1467module_platform_driver(hva_driver);
1468
1469MODULE_LICENSE("GPL");
1470MODULE_AUTHOR("Yannick Fertre <yannick.fertre@st.com>");
1471MODULE_DESCRIPTION("STMicroelectronics HVA video encoder V4L2 driver");
1472