1// SPDX-License-Identifier: GPL-2.0-only
2/*
3 * Copyright (c) 2019 MediaTek Inc.
4 * Author: Xia Jiang <xia.jiang@mediatek.com>
5 *
6 */
7
8#include <linux/clk.h>
9#include <linux/interrupt.h>
10#include <linux/irq.h>
11#include <linux/io.h>
12#include <linux/kernel.h>
13#include <linux/mod_devicetable.h>
14#include <linux/module.h>
15#include <linux/platform_device.h>
16#include <linux/pm_runtime.h>
17#include <linux/slab.h>
18#include <media/media-device.h>
19#include <media/videobuf2-core.h>
20#include <media/videobuf2-dma-contig.h>
21#include <media/videobuf2-v4l2.h>
22#include <media/v4l2-mem2mem.h>
23#include <media/v4l2-dev.h>
24#include <media/v4l2-device.h>
25#include <media/v4l2-fh.h>
26#include <media/v4l2-event.h>
27
28#include "mtk_jpeg_core.h"
29#include "mtk_jpeg_enc_hw.h"
30
31static const struct mtk_jpeg_enc_qlt mtk_jpeg_enc_quality[] = {
32	{.quality_param = 34, .hardware_value = JPEG_ENC_QUALITY_Q34},
33	{.quality_param = 39, .hardware_value = JPEG_ENC_QUALITY_Q39},
34	{.quality_param = 48, .hardware_value = JPEG_ENC_QUALITY_Q48},
35	{.quality_param = 60, .hardware_value = JPEG_ENC_QUALITY_Q60},
36	{.quality_param = 64, .hardware_value = JPEG_ENC_QUALITY_Q64},
37	{.quality_param = 68, .hardware_value = JPEG_ENC_QUALITY_Q68},
38	{.quality_param = 74, .hardware_value = JPEG_ENC_QUALITY_Q74},
39	{.quality_param = 80, .hardware_value = JPEG_ENC_QUALITY_Q80},
40	{.quality_param = 82, .hardware_value = JPEG_ENC_QUALITY_Q82},
41	{.quality_param = 84, .hardware_value = JPEG_ENC_QUALITY_Q84},
42	{.quality_param = 87, .hardware_value = JPEG_ENC_QUALITY_Q87},
43	{.quality_param = 90, .hardware_value = JPEG_ENC_QUALITY_Q90},
44	{.quality_param = 92, .hardware_value = JPEG_ENC_QUALITY_Q92},
45	{.quality_param = 95, .hardware_value = JPEG_ENC_QUALITY_Q95},
46	{.quality_param = 97, .hardware_value = JPEG_ENC_QUALITY_Q97},
47};
48
49static const struct of_device_id mtk_jpegenc_drv_ids[] = {
50	{
51		.compatible = "mediatek,mt8195-jpgenc-hw",
52	},
53	{},
54};
55MODULE_DEVICE_TABLE(of, mtk_jpegenc_drv_ids);
56
57void mtk_jpeg_enc_reset(void __iomem *base)
58{
59	writel(0, base + JPEG_ENC_RSTB);
60	writel(JPEG_ENC_RESET_BIT, base + JPEG_ENC_RSTB);
61	writel(0, base + JPEG_ENC_CODEC_SEL);
62}
63EXPORT_SYMBOL_GPL(mtk_jpeg_enc_reset);
64
65u32 mtk_jpeg_enc_get_file_size(void __iomem *base)
66{
67	return readl(base + JPEG_ENC_DMA_ADDR0) -
68	       readl(base + JPEG_ENC_DST_ADDR0);
69}
70EXPORT_SYMBOL_GPL(mtk_jpeg_enc_get_file_size);
71
72void mtk_jpeg_enc_start(void __iomem *base)
73{
74	u32 value;
75
76	value = readl(base + JPEG_ENC_CTRL);
77	value |= JPEG_ENC_CTRL_INT_EN_BIT | JPEG_ENC_CTRL_ENABLE_BIT;
78	writel(value, base + JPEG_ENC_CTRL);
79}
80EXPORT_SYMBOL_GPL(mtk_jpeg_enc_start);
81
82void mtk_jpeg_set_enc_src(struct mtk_jpeg_ctx *ctx,  void __iomem *base,
83			  struct vb2_buffer *src_buf)
84{
85	int i;
86	dma_addr_t dma_addr;
87
88	for (i = 0; i < src_buf->num_planes; i++) {
89		dma_addr = vb2_dma_contig_plane_dma_addr(src_buf, i) +
90			   src_buf->planes[i].data_offset;
91		if (!i)
92			writel(dma_addr, base + JPEG_ENC_SRC_LUMA_ADDR);
93		else
94			writel(dma_addr, base + JPEG_ENC_SRC_CHROMA_ADDR);
95	}
96}
97EXPORT_SYMBOL_GPL(mtk_jpeg_set_enc_src);
98
99void mtk_jpeg_set_enc_dst(struct mtk_jpeg_ctx *ctx, void __iomem *base,
100			  struct vb2_buffer *dst_buf)
101{
102	dma_addr_t dma_addr;
103	size_t size;
104	u32 dma_addr_offset;
105	u32 dma_addr_offsetmask;
106
107	dma_addr = vb2_dma_contig_plane_dma_addr(dst_buf, 0);
108	dma_addr_offset = ctx->enable_exif ? MTK_JPEG_MAX_EXIF_SIZE : 0;
109	dma_addr_offsetmask = dma_addr & JPEG_ENC_DST_ADDR_OFFSET_MASK;
110	size = vb2_plane_size(dst_buf, 0);
111
112	writel(dma_addr_offset & ~0xf, base + JPEG_ENC_OFFSET_ADDR);
113	writel(dma_addr_offsetmask & 0xf, base + JPEG_ENC_BYTE_OFFSET_MASK);
114	writel(dma_addr & ~0xf, base + JPEG_ENC_DST_ADDR0);
115	writel((dma_addr + size) & ~0xf, base + JPEG_ENC_STALL_ADDR0);
116}
117EXPORT_SYMBOL_GPL(mtk_jpeg_set_enc_dst);
118
119void mtk_jpeg_set_enc_params(struct mtk_jpeg_ctx *ctx,  void __iomem *base)
120{
121	u32 value;
122	u32 width = ctx->out_q.enc_crop_rect.width;
123	u32 height = ctx->out_q.enc_crop_rect.height;
124	u32 enc_format = ctx->out_q.fmt->fourcc;
125	u32 bytesperline = ctx->out_q.pix_mp.plane_fmt[0].bytesperline;
126	u32 blk_num;
127	u32 img_stride;
128	u32 mem_stride;
129	u32 i, enc_quality;
130	u32 nr_enc_quality = ARRAY_SIZE(mtk_jpeg_enc_quality);
131
132	value = width << 16 | height;
133	writel(value, base + JPEG_ENC_IMG_SIZE);
134
135	if (enc_format == V4L2_PIX_FMT_NV12M ||
136	    enc_format == V4L2_PIX_FMT_NV21M)
137	    /*
138	     * Total 8 x 8 block number of luma and chroma.
139	     * The number of blocks is counted from 0.
140	     */
141		blk_num = DIV_ROUND_UP(width, 16) *
142			  DIV_ROUND_UP(height, 16) * 6 - 1;
143	else
144		blk_num = DIV_ROUND_UP(width, 16) *
145			  DIV_ROUND_UP(height, 8) * 4 - 1;
146	writel(blk_num, base + JPEG_ENC_BLK_NUM);
147
148	if (enc_format == V4L2_PIX_FMT_NV12M ||
149	    enc_format == V4L2_PIX_FMT_NV21M) {
150		/* 4:2:0 */
151		img_stride = round_up(width, 16);
152		mem_stride = bytesperline;
153	} else {
154		/* 4:2:2 */
155		img_stride = round_up(width * 2, 32);
156		mem_stride = img_stride;
157	}
158	writel(img_stride, base + JPEG_ENC_IMG_STRIDE);
159	writel(mem_stride, base + JPEG_ENC_STRIDE);
160
161	enc_quality = mtk_jpeg_enc_quality[nr_enc_quality - 1].hardware_value;
162	for (i = 0; i < nr_enc_quality; i++) {
163		if (ctx->enc_quality <= mtk_jpeg_enc_quality[i].quality_param) {
164			enc_quality = mtk_jpeg_enc_quality[i].hardware_value;
165			break;
166		}
167	}
168	writel(enc_quality, base + JPEG_ENC_QUALITY);
169
170	value = readl(base + JPEG_ENC_CTRL);
171	value &= ~JPEG_ENC_CTRL_YUV_FORMAT_MASK;
172	value |= (ctx->out_q.fmt->hw_format & 3) << 3;
173	if (ctx->enable_exif)
174		value |= JPEG_ENC_CTRL_FILE_FORMAT_BIT;
175	else
176		value &= ~JPEG_ENC_CTRL_FILE_FORMAT_BIT;
177	if (ctx->restart_interval)
178		value |= JPEG_ENC_CTRL_RESTART_EN_BIT;
179	else
180		value &= ~JPEG_ENC_CTRL_RESTART_EN_BIT;
181	writel(value, base + JPEG_ENC_CTRL);
182
183	writel(ctx->restart_interval, base + JPEG_ENC_RST_MCU_NUM);
184}
185EXPORT_SYMBOL_GPL(mtk_jpeg_set_enc_params);
186
187static void mtk_jpegenc_put_buf(struct mtk_jpegenc_comp_dev *jpeg)
188{
189	struct mtk_jpeg_ctx *ctx;
190	struct vb2_v4l2_buffer *dst_buffer;
191	struct list_head *temp_entry;
192	struct list_head *pos = NULL;
193	struct mtk_jpeg_src_buf *dst_done_buf, *tmp_dst_done_buf;
194	unsigned long flags;
195
196	ctx = jpeg->hw_param.curr_ctx;
197	if (!ctx) {
198		dev_err(jpeg->dev, "comp_jpeg ctx fail !!!\n");
199		return;
200	}
201
202	dst_buffer = jpeg->hw_param.dst_buffer;
203	if (!dst_buffer) {
204		dev_err(jpeg->dev, "comp_jpeg dst_buffer fail !!!\n");
205		return;
206	}
207
208	dst_done_buf = container_of(dst_buffer,
209				    struct mtk_jpeg_src_buf, b);
210
211	spin_lock_irqsave(&ctx->done_queue_lock, flags);
212	list_add_tail(&dst_done_buf->list, &ctx->dst_done_queue);
213	while (!list_empty(&ctx->dst_done_queue) &&
214	       (pos != &ctx->dst_done_queue)) {
215		list_for_each_prev_safe(pos, temp_entry, &ctx->dst_done_queue) {
216			tmp_dst_done_buf = list_entry(pos,
217						      struct mtk_jpeg_src_buf,
218						      list);
219			if (tmp_dst_done_buf->frame_num ==
220				ctx->last_done_frame_num) {
221				list_del(&tmp_dst_done_buf->list);
222				v4l2_m2m_buf_done(&tmp_dst_done_buf->b,
223						  VB2_BUF_STATE_DONE);
224				ctx->last_done_frame_num++;
225			}
226		}
227	}
228	spin_unlock_irqrestore(&ctx->done_queue_lock, flags);
229}
230
231static void mtk_jpegenc_timeout_work(struct work_struct *work)
232{
233	struct delayed_work *dly_work = to_delayed_work(work);
234	struct mtk_jpegenc_comp_dev *cjpeg =
235		container_of(dly_work,
236			     struct mtk_jpegenc_comp_dev,
237			     job_timeout_work);
238	struct mtk_jpeg_dev *master_jpeg = cjpeg->master_dev;
239	enum vb2_buffer_state buf_state = VB2_BUF_STATE_ERROR;
240	struct vb2_v4l2_buffer *src_buf, *dst_buf;
241
242	src_buf = cjpeg->hw_param.src_buffer;
243	dst_buf = cjpeg->hw_param.dst_buffer;
244	v4l2_m2m_buf_copy_metadata(src_buf, dst_buf, true);
245
246	mtk_jpeg_enc_reset(cjpeg->reg_base);
247	clk_disable_unprepare(cjpeg->venc_clk.clks->clk);
248	pm_runtime_put(cjpeg->dev);
249	cjpeg->hw_state = MTK_JPEG_HW_IDLE;
250	atomic_inc(&master_jpeg->hw_rdy);
251	wake_up(&master_jpeg->hw_wq);
252	v4l2_m2m_buf_done(src_buf, buf_state);
253	mtk_jpegenc_put_buf(cjpeg);
254}
255
256static irqreturn_t mtk_jpegenc_hw_irq_handler(int irq, void *priv)
257{
258	struct vb2_v4l2_buffer *src_buf, *dst_buf;
259	enum vb2_buffer_state buf_state;
260	struct mtk_jpeg_ctx *ctx;
261	u32 result_size;
262	u32 irq_status;
263
264	struct mtk_jpegenc_comp_dev *jpeg = priv;
265	struct mtk_jpeg_dev *master_jpeg = jpeg->master_dev;
266
267	cancel_delayed_work(&jpeg->job_timeout_work);
268
269	ctx = jpeg->hw_param.curr_ctx;
270	src_buf = jpeg->hw_param.src_buffer;
271	dst_buf = jpeg->hw_param.dst_buffer;
272	v4l2_m2m_buf_copy_metadata(src_buf, dst_buf, true);
273
274	irq_status = readl(jpeg->reg_base + JPEG_ENC_INT_STS) &
275		JPEG_ENC_INT_STATUS_MASK_ALLIRQ;
276	if (irq_status)
277		writel(0, jpeg->reg_base + JPEG_ENC_INT_STS);
278	if (!(irq_status & JPEG_ENC_INT_STATUS_DONE))
279		dev_warn(jpeg->dev, "Jpg Enc occurs unknown Err.");
280
281	result_size = mtk_jpeg_enc_get_file_size(jpeg->reg_base);
282	vb2_set_plane_payload(&dst_buf->vb2_buf, 0, result_size);
283	buf_state = VB2_BUF_STATE_DONE;
284	v4l2_m2m_buf_done(src_buf, buf_state);
285	mtk_jpegenc_put_buf(jpeg);
286	pm_runtime_put(ctx->jpeg->dev);
287	clk_disable_unprepare(jpeg->venc_clk.clks->clk);
288
289	jpeg->hw_state = MTK_JPEG_HW_IDLE;
290	wake_up(&master_jpeg->hw_wq);
291	atomic_inc(&master_jpeg->hw_rdy);
292
293	return IRQ_HANDLED;
294}
295
296static int mtk_jpegenc_hw_init_irq(struct mtk_jpegenc_comp_dev *dev)
297{
298	struct platform_device *pdev = dev->plat_dev;
299	int ret;
300
301	dev->jpegenc_irq = platform_get_irq(pdev, 0);
302	if (dev->jpegenc_irq < 0)
303		return dev->jpegenc_irq;
304
305	ret = devm_request_irq(&pdev->dev,
306			       dev->jpegenc_irq,
307			       mtk_jpegenc_hw_irq_handler,
308			       0,
309			       pdev->name, dev);
310	if (ret) {
311		dev_err(&pdev->dev, "Failed to devm_request_irq %d (%d)",
312			dev->jpegenc_irq, ret);
313		return ret;
314	}
315
316	return 0;
317}
318
319static int mtk_jpegenc_hw_probe(struct platform_device *pdev)
320{
321	struct mtk_jpegenc_clk *jpegenc_clk;
322	struct mtk_jpeg_dev *master_dev;
323	struct mtk_jpegenc_comp_dev *dev;
324	int ret, i;
325
326	struct device *decs = &pdev->dev;
327
328	if (!decs->parent)
329		return -EPROBE_DEFER;
330
331	master_dev = dev_get_drvdata(decs->parent);
332	if (!master_dev)
333		return -EPROBE_DEFER;
334
335	dev = devm_kzalloc(&pdev->dev, sizeof(*dev), GFP_KERNEL);
336	if (!dev)
337		return -ENOMEM;
338
339	dev->plat_dev = pdev;
340	dev->dev = &pdev->dev;
341
342	spin_lock_init(&dev->hw_lock);
343	dev->hw_state = MTK_JPEG_HW_IDLE;
344
345	INIT_DELAYED_WORK(&dev->job_timeout_work,
346			  mtk_jpegenc_timeout_work);
347
348	jpegenc_clk = &dev->venc_clk;
349
350	jpegenc_clk->clk_num = devm_clk_bulk_get_all(&pdev->dev,
351						     &jpegenc_clk->clks);
352	if (jpegenc_clk->clk_num < 0)
353		return dev_err_probe(&pdev->dev, jpegenc_clk->clk_num,
354				     "Failed to get jpegenc clock count\n");
355
356	dev->reg_base = devm_platform_ioremap_resource(pdev, 0);
357	if (IS_ERR(dev->reg_base))
358		return PTR_ERR(dev->reg_base);
359
360	ret = mtk_jpegenc_hw_init_irq(dev);
361	if (ret)
362		return ret;
363
364	i = atomic_add_return(1, &master_dev->hw_index) - 1;
365	master_dev->enc_hw_dev[i] = dev;
366	master_dev->reg_encbase[i] = dev->reg_base;
367	dev->master_dev = master_dev;
368
369	platform_set_drvdata(pdev, dev);
370	pm_runtime_enable(&pdev->dev);
371
372	return 0;
373}
374
375static struct platform_driver mtk_jpegenc_hw_driver = {
376	.probe = mtk_jpegenc_hw_probe,
377	.driver = {
378		.name = "mtk-jpegenc-hw",
379		.of_match_table = mtk_jpegenc_drv_ids,
380	},
381};
382
383module_platform_driver(mtk_jpegenc_hw_driver);
384
385MODULE_DESCRIPTION("MediaTek JPEG encode HW driver");
386MODULE_LICENSE("GPL");
387