1  // SPDX-License-Identifier: (GPL-2.0 OR BSD-3-Clause)
2  /*
3   * Wave5 series multi-standard codec IP - helper functions
4   *
5   * Copyright (C) 2021-2023 CHIPS&MEDIA INC
6   */
7  
8  #include <linux/bug.h>
9  #include "wave5-vpuapi.h"
10  #include "wave5-regdefine.h"
11  #include "wave5.h"
12  
13  #define DECODE_ALL_TEMPORAL_LAYERS 0
14  #define DECODE_ALL_SPATIAL_LAYERS 0
15  
wave5_initialize_vpu(struct device * dev,u8 * code,size_t size)16  static int wave5_initialize_vpu(struct device *dev, u8 *code, size_t size)
17  {
18  	int ret;
19  	struct vpu_device *vpu_dev = dev_get_drvdata(dev);
20  
21  	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
22  	if (ret)
23  		return ret;
24  
25  	if (wave5_vpu_is_init(vpu_dev)) {
26  		wave5_vpu_re_init(dev, (void *)code, size);
27  		ret = -EBUSY;
28  		goto err_out;
29  	}
30  
31  	ret = wave5_vpu_reset(dev, SW_RESET_ON_BOOT);
32  	if (ret)
33  		goto err_out;
34  
35  	ret = wave5_vpu_init(dev, (void *)code, size);
36  
37  err_out:
38  	mutex_unlock(&vpu_dev->hw_lock);
39  	return ret;
40  }
41  
wave5_vpu_init_with_bitcode(struct device * dev,u8 * bitcode,size_t size)42  int wave5_vpu_init_with_bitcode(struct device *dev, u8 *bitcode, size_t size)
43  {
44  	if (!bitcode || size == 0)
45  		return -EINVAL;
46  
47  	return wave5_initialize_vpu(dev, bitcode, size);
48  }
49  
wave5_vpu_flush_instance(struct vpu_instance * inst)50  int wave5_vpu_flush_instance(struct vpu_instance *inst)
51  {
52  	int ret = 0;
53  	int retry = 0;
54  
55  	ret = mutex_lock_interruptible(&inst->dev->hw_lock);
56  	if (ret)
57  		return ret;
58  	do {
59  		/*
60  		 * Repeat the FLUSH command until the firmware reports that the
61  		 * VPU isn't running anymore
62  		 */
63  		ret = wave5_vpu_hw_flush_instance(inst);
64  		if (ret < 0 && ret != -EBUSY) {
65  			dev_warn(inst->dev->dev, "Flush of %s instance with id: %d fail: %d\n",
66  				 inst->type == VPU_INST_TYPE_DEC ? "DECODER" : "ENCODER", inst->id,
67  				 ret);
68  			mutex_unlock(&inst->dev->hw_lock);
69  			return ret;
70  		}
71  		if (ret == -EBUSY && retry++ >= MAX_FIRMWARE_CALL_RETRY) {
72  			dev_warn(inst->dev->dev, "Flush of %s instance with id: %d timed out!\n",
73  				 inst->type == VPU_INST_TYPE_DEC ? "DECODER" : "ENCODER", inst->id);
74  			mutex_unlock(&inst->dev->hw_lock);
75  			return -ETIMEDOUT;
76  		}
77  	} while (ret != 0);
78  	mutex_unlock(&inst->dev->hw_lock);
79  
80  	return ret;
81  }
82  
wave5_vpu_get_version_info(struct device * dev,u32 * revision,unsigned int * product_id)83  int wave5_vpu_get_version_info(struct device *dev, u32 *revision, unsigned int *product_id)
84  {
85  	int ret;
86  	struct vpu_device *vpu_dev = dev_get_drvdata(dev);
87  
88  	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
89  	if (ret)
90  		return ret;
91  
92  	if (!wave5_vpu_is_init(vpu_dev)) {
93  		ret = -EINVAL;
94  		goto err_out;
95  	}
96  
97  	if (product_id)
98  		*product_id = vpu_dev->product;
99  	ret = wave5_vpu_get_version(vpu_dev, revision);
100  
101  err_out:
102  	mutex_unlock(&vpu_dev->hw_lock);
103  	return ret;
104  }
105  
wave5_check_dec_open_param(struct vpu_instance * inst,struct dec_open_param * param)106  static int wave5_check_dec_open_param(struct vpu_instance *inst, struct dec_open_param *param)
107  {
108  	if (inst->id >= MAX_NUM_INSTANCE) {
109  		dev_err(inst->dev->dev, "Too many simultaneous instances: %d (max: %u)\n",
110  			inst->id, MAX_NUM_INSTANCE);
111  		return -EOPNOTSUPP;
112  	}
113  
114  	if (param->bitstream_buffer % 8) {
115  		dev_err(inst->dev->dev,
116  			"Bitstream buffer must be aligned to a multiple of 8\n");
117  		return -EINVAL;
118  	}
119  
120  	if (param->bitstream_buffer_size % 1024 ||
121  	    param->bitstream_buffer_size < MIN_BITSTREAM_BUFFER_SIZE) {
122  		dev_err(inst->dev->dev,
123  			"Bitstream buffer size must be aligned to a multiple of 1024 and have a minimum size of %d\n",
124  			MIN_BITSTREAM_BUFFER_SIZE);
125  		return -EINVAL;
126  	}
127  
128  	return 0;
129  }
130  
wave5_vpu_dec_open(struct vpu_instance * inst,struct dec_open_param * open_param)131  int wave5_vpu_dec_open(struct vpu_instance *inst, struct dec_open_param *open_param)
132  {
133  	struct dec_info *p_dec_info;
134  	int ret;
135  	struct vpu_device *vpu_dev = inst->dev;
136  	dma_addr_t buffer_addr;
137  	size_t buffer_size;
138  
139  	ret = wave5_check_dec_open_param(inst, open_param);
140  	if (ret)
141  		return ret;
142  
143  	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
144  	if (ret)
145  		return ret;
146  
147  	if (!wave5_vpu_is_init(vpu_dev)) {
148  		mutex_unlock(&vpu_dev->hw_lock);
149  		return -ENODEV;
150  	}
151  
152  	p_dec_info = &inst->codec_info->dec_info;
153  	memcpy(&p_dec_info->open_param, open_param, sizeof(struct dec_open_param));
154  
155  	buffer_addr = open_param->bitstream_buffer;
156  	buffer_size = open_param->bitstream_buffer_size;
157  	p_dec_info->stream_wr_ptr = buffer_addr;
158  	p_dec_info->stream_rd_ptr = buffer_addr;
159  	p_dec_info->stream_buf_start_addr = buffer_addr;
160  	p_dec_info->stream_buf_size = buffer_size;
161  	p_dec_info->stream_buf_end_addr = buffer_addr + buffer_size;
162  	p_dec_info->reorder_enable = TRUE;
163  	p_dec_info->temp_id_select_mode = TEMPORAL_ID_MODE_ABSOLUTE;
164  	p_dec_info->target_temp_id = DECODE_ALL_TEMPORAL_LAYERS;
165  	p_dec_info->target_spatial_id = DECODE_ALL_SPATIAL_LAYERS;
166  
167  	ret = wave5_vpu_build_up_dec_param(inst, open_param);
168  	mutex_unlock(&vpu_dev->hw_lock);
169  
170  	return ret;
171  }
172  
reset_auxiliary_buffers(struct vpu_instance * inst,unsigned int index)173  static int reset_auxiliary_buffers(struct vpu_instance *inst, unsigned int index)
174  {
175  	struct dec_info *p_dec_info = &inst->codec_info->dec_info;
176  
177  	if (index >= MAX_REG_FRAME)
178  		return 1;
179  
180  	if (p_dec_info->vb_mv[index].size == 0 && p_dec_info->vb_fbc_y_tbl[index].size == 0 &&
181  	    p_dec_info->vb_fbc_c_tbl[index].size == 0)
182  		return 1;
183  
184  	wave5_vdi_free_dma_memory(inst->dev, &p_dec_info->vb_mv[index]);
185  	wave5_vdi_free_dma_memory(inst->dev, &p_dec_info->vb_fbc_y_tbl[index]);
186  	wave5_vdi_free_dma_memory(inst->dev, &p_dec_info->vb_fbc_c_tbl[index]);
187  
188  	return 0;
189  }
190  
wave5_vpu_dec_close(struct vpu_instance * inst,u32 * fail_res)191  int wave5_vpu_dec_close(struct vpu_instance *inst, u32 *fail_res)
192  {
193  	struct dec_info *p_dec_info = &inst->codec_info->dec_info;
194  	int ret;
195  	int retry = 0;
196  	struct vpu_device *vpu_dev = inst->dev;
197  	int i;
198  
199  	*fail_res = 0;
200  	if (!inst->codec_info)
201  		return -EINVAL;
202  
203  	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
204  	if (ret)
205  		return ret;
206  
207  	do {
208  		ret = wave5_vpu_dec_finish_seq(inst, fail_res);
209  		if (ret < 0 && *fail_res != WAVE5_SYSERR_VPU_STILL_RUNNING) {
210  			dev_warn(inst->dev->dev, "dec_finish_seq timed out\n");
211  			goto unlock_and_return;
212  		}
213  
214  		if (*fail_res == WAVE5_SYSERR_VPU_STILL_RUNNING &&
215  		    retry++ >= MAX_FIRMWARE_CALL_RETRY) {
216  			ret = -ETIMEDOUT;
217  			goto unlock_and_return;
218  		}
219  	} while (ret != 0);
220  
221  	dev_dbg(inst->dev->dev, "%s: dec_finish_seq complete\n", __func__);
222  
223  	wave5_vdi_free_dma_memory(vpu_dev, &p_dec_info->vb_work);
224  
225  	for (i = 0 ; i < MAX_REG_FRAME; i++) {
226  		ret = reset_auxiliary_buffers(inst, i);
227  		if (ret) {
228  			ret = 0;
229  			break;
230  		}
231  	}
232  
233  	wave5_vdi_free_dma_memory(vpu_dev, &p_dec_info->vb_task);
234  
235  unlock_and_return:
236  	mutex_unlock(&vpu_dev->hw_lock);
237  
238  	return ret;
239  }
240  
wave5_vpu_dec_issue_seq_init(struct vpu_instance * inst)241  int wave5_vpu_dec_issue_seq_init(struct vpu_instance *inst)
242  {
243  	int ret;
244  	struct vpu_device *vpu_dev = inst->dev;
245  
246  	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
247  	if (ret)
248  		return ret;
249  
250  	ret = wave5_vpu_dec_init_seq(inst);
251  
252  	mutex_unlock(&vpu_dev->hw_lock);
253  
254  	return ret;
255  }
256  
wave5_vpu_dec_complete_seq_init(struct vpu_instance * inst,struct dec_initial_info * info)257  int wave5_vpu_dec_complete_seq_init(struct vpu_instance *inst, struct dec_initial_info *info)
258  {
259  	struct dec_info *p_dec_info = &inst->codec_info->dec_info;
260  	int ret;
261  	struct vpu_device *vpu_dev = inst->dev;
262  
263  	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
264  	if (ret)
265  		return ret;
266  
267  	ret = wave5_vpu_dec_get_seq_info(inst, info);
268  	if (!ret)
269  		p_dec_info->initial_info_obtained = true;
270  
271  	info->rd_ptr = wave5_dec_get_rd_ptr(inst);
272  	info->wr_ptr = p_dec_info->stream_wr_ptr;
273  
274  	p_dec_info->initial_info = *info;
275  
276  	mutex_unlock(&vpu_dev->hw_lock);
277  
278  	return ret;
279  }
280  
wave5_vpu_dec_register_frame_buffer_ex(struct vpu_instance * inst,int num_of_decoding_fbs,int num_of_display_fbs,int stride,int height)281  int wave5_vpu_dec_register_frame_buffer_ex(struct vpu_instance *inst, int num_of_decoding_fbs,
282  					   int num_of_display_fbs, int stride, int height)
283  {
284  	struct dec_info *p_dec_info;
285  	int ret;
286  	struct vpu_device *vpu_dev = inst->dev;
287  	struct frame_buffer *fb;
288  
289  	if (num_of_decoding_fbs >= WAVE5_MAX_FBS || num_of_display_fbs >= WAVE5_MAX_FBS)
290  		return -EINVAL;
291  
292  	p_dec_info = &inst->codec_info->dec_info;
293  	p_dec_info->num_of_decoding_fbs = num_of_decoding_fbs;
294  	p_dec_info->num_of_display_fbs = num_of_display_fbs;
295  	p_dec_info->stride = stride;
296  
297  	if (!p_dec_info->initial_info_obtained)
298  		return -EINVAL;
299  
300  	if (stride < p_dec_info->initial_info.pic_width || (stride % 8 != 0) ||
301  	    height < p_dec_info->initial_info.pic_height)
302  		return -EINVAL;
303  
304  	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
305  	if (ret)
306  		return ret;
307  
308  	fb = inst->frame_buf;
309  	ret = wave5_vpu_dec_register_framebuffer(inst, &fb[p_dec_info->num_of_decoding_fbs],
310  						 LINEAR_FRAME_MAP, p_dec_info->num_of_display_fbs);
311  	if (ret)
312  		goto err_out;
313  
314  	ret = wave5_vpu_dec_register_framebuffer(inst, &fb[0], COMPRESSED_FRAME_MAP,
315  						 p_dec_info->num_of_decoding_fbs);
316  
317  err_out:
318  	mutex_unlock(&vpu_dev->hw_lock);
319  
320  	return ret;
321  }
322  
wave5_vpu_dec_get_bitstream_buffer(struct vpu_instance * inst,dma_addr_t * prd_ptr,dma_addr_t * pwr_ptr,size_t * size)323  int wave5_vpu_dec_get_bitstream_buffer(struct vpu_instance *inst, dma_addr_t *prd_ptr,
324  				       dma_addr_t *pwr_ptr, size_t *size)
325  {
326  	struct dec_info *p_dec_info;
327  	dma_addr_t rd_ptr;
328  	dma_addr_t wr_ptr;
329  	int room;
330  	struct vpu_device *vpu_dev = inst->dev;
331  	int ret;
332  
333  	p_dec_info = &inst->codec_info->dec_info;
334  
335  	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
336  	if (ret)
337  		return ret;
338  	rd_ptr = wave5_dec_get_rd_ptr(inst);
339  	mutex_unlock(&vpu_dev->hw_lock);
340  
341  	wr_ptr = p_dec_info->stream_wr_ptr;
342  
343  	if (wr_ptr < rd_ptr)
344  		room = rd_ptr - wr_ptr;
345  	else
346  		room = (p_dec_info->stream_buf_end_addr - wr_ptr) +
347  			(rd_ptr - p_dec_info->stream_buf_start_addr);
348  	room--;
349  
350  	if (prd_ptr)
351  		*prd_ptr = rd_ptr;
352  	if (pwr_ptr)
353  		*pwr_ptr = wr_ptr;
354  	if (size)
355  		*size = room;
356  
357  	return 0;
358  }
359  
wave5_vpu_dec_update_bitstream_buffer(struct vpu_instance * inst,size_t size)360  int wave5_vpu_dec_update_bitstream_buffer(struct vpu_instance *inst, size_t size)
361  {
362  	struct dec_info *p_dec_info;
363  	dma_addr_t wr_ptr;
364  	dma_addr_t rd_ptr;
365  	int ret;
366  	struct vpu_device *vpu_dev = inst->dev;
367  
368  	if (!inst->codec_info)
369  		return -EINVAL;
370  
371  	p_dec_info = &inst->codec_info->dec_info;
372  	wr_ptr = p_dec_info->stream_wr_ptr;
373  	rd_ptr = p_dec_info->stream_rd_ptr;
374  
375  	if (size > 0) {
376  		if (wr_ptr < rd_ptr && rd_ptr <= wr_ptr + size)
377  			return -EINVAL;
378  
379  		wr_ptr += size;
380  
381  		if (wr_ptr > p_dec_info->stream_buf_end_addr) {
382  			u32 room = wr_ptr - p_dec_info->stream_buf_end_addr;
383  
384  			wr_ptr = p_dec_info->stream_buf_start_addr;
385  			wr_ptr += room;
386  		} else if (wr_ptr == p_dec_info->stream_buf_end_addr) {
387  			wr_ptr = p_dec_info->stream_buf_start_addr;
388  		}
389  
390  		p_dec_info->stream_wr_ptr = wr_ptr;
391  		p_dec_info->stream_rd_ptr = rd_ptr;
392  	}
393  
394  	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
395  	if (ret)
396  		return ret;
397  	ret = wave5_vpu_dec_set_bitstream_flag(inst, (size == 0));
398  	mutex_unlock(&vpu_dev->hw_lock);
399  
400  	return ret;
401  }
402  
wave5_vpu_dec_start_one_frame(struct vpu_instance * inst,u32 * res_fail)403  int wave5_vpu_dec_start_one_frame(struct vpu_instance *inst, u32 *res_fail)
404  {
405  	struct dec_info *p_dec_info = &inst->codec_info->dec_info;
406  	int ret;
407  	struct vpu_device *vpu_dev = inst->dev;
408  
409  	if (p_dec_info->stride == 0) /* this means frame buffers have not been registered. */
410  		return -EINVAL;
411  
412  	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
413  	if (ret)
414  		return ret;
415  
416  	ret = wave5_vpu_decode(inst, res_fail);
417  
418  	mutex_unlock(&vpu_dev->hw_lock);
419  
420  	return ret;
421  }
422  
wave5_vpu_dec_set_rd_ptr(struct vpu_instance * inst,dma_addr_t addr,int update_wr_ptr)423  int wave5_vpu_dec_set_rd_ptr(struct vpu_instance *inst, dma_addr_t addr, int update_wr_ptr)
424  {
425  	struct dec_info *p_dec_info = &inst->codec_info->dec_info;
426  	int ret;
427  	struct vpu_device *vpu_dev = inst->dev;
428  
429  	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
430  	if (ret)
431  		return ret;
432  
433  	ret = wave5_dec_set_rd_ptr(inst, addr);
434  
435  	p_dec_info->stream_rd_ptr = addr;
436  	if (update_wr_ptr)
437  		p_dec_info->stream_wr_ptr = addr;
438  
439  	mutex_unlock(&vpu_dev->hw_lock);
440  
441  	return ret;
442  }
443  
wave5_vpu_dec_get_rd_ptr(struct vpu_instance * inst)444  dma_addr_t wave5_vpu_dec_get_rd_ptr(struct vpu_instance *inst)
445  {
446  	int ret;
447  	dma_addr_t rd_ptr;
448  
449  	ret = mutex_lock_interruptible(&inst->dev->hw_lock);
450  	if (ret)
451  		return ret;
452  
453  	rd_ptr = wave5_dec_get_rd_ptr(inst);
454  
455  	mutex_unlock(&inst->dev->hw_lock);
456  
457  	return rd_ptr;
458  }
459  
wave5_vpu_dec_get_output_info(struct vpu_instance * inst,struct dec_output_info * info)460  int wave5_vpu_dec_get_output_info(struct vpu_instance *inst, struct dec_output_info *info)
461  {
462  	struct dec_info *p_dec_info;
463  	int ret;
464  	struct vpu_rect rect_info;
465  	u32 val;
466  	u32 decoded_index;
467  	u32 disp_idx;
468  	u32 max_dec_index;
469  	struct vpu_device *vpu_dev = inst->dev;
470  	struct dec_output_info *disp_info;
471  
472  	if (!info)
473  		return -EINVAL;
474  
475  	p_dec_info = &inst->codec_info->dec_info;
476  
477  	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
478  	if (ret)
479  		return ret;
480  
481  	memset(info, 0, sizeof(*info));
482  
483  	ret = wave5_vpu_dec_get_result(inst, info);
484  	if (ret) {
485  		info->rd_ptr = p_dec_info->stream_rd_ptr;
486  		info->wr_ptr = p_dec_info->stream_wr_ptr;
487  		goto err_out;
488  	}
489  
490  	decoded_index = info->index_frame_decoded;
491  
492  	/* calculate display frame region */
493  	val = 0;
494  	rect_info.left = 0;
495  	rect_info.right = 0;
496  	rect_info.top = 0;
497  	rect_info.bottom = 0;
498  
499  	if (decoded_index < WAVE5_MAX_FBS) {
500  		if (inst->std == W_HEVC_DEC || inst->std == W_AVC_DEC)
501  			rect_info = p_dec_info->initial_info.pic_crop_rect;
502  
503  		if (inst->std == W_HEVC_DEC)
504  			p_dec_info->dec_out_info[decoded_index].decoded_poc = info->decoded_poc;
505  
506  		p_dec_info->dec_out_info[decoded_index].rc_decoded = rect_info;
507  	}
508  	info->rc_decoded = rect_info;
509  
510  	disp_idx = info->index_frame_display;
511  	if (info->index_frame_display >= 0 && info->index_frame_display < WAVE5_MAX_FBS) {
512  		disp_info = &p_dec_info->dec_out_info[disp_idx];
513  		if (info->index_frame_display != info->index_frame_decoded) {
514  			/*
515  			 * when index_frame_decoded < 0, and index_frame_display >= 0
516  			 * info->dec_pic_width and info->dec_pic_height are still valid
517  			 * but those of p_dec_info->dec_out_info[disp_idx] are invalid in VP9
518  			 */
519  			info->disp_pic_width = disp_info->dec_pic_width;
520  			info->disp_pic_height = disp_info->dec_pic_height;
521  		} else {
522  			info->disp_pic_width = info->dec_pic_width;
523  			info->disp_pic_height = info->dec_pic_height;
524  		}
525  
526  		info->rc_display = disp_info->rc_decoded;
527  
528  	} else {
529  		info->rc_display.left = 0;
530  		info->rc_display.right = 0;
531  		info->rc_display.top = 0;
532  		info->rc_display.bottom = 0;
533  		info->disp_pic_width = 0;
534  		info->disp_pic_height = 0;
535  	}
536  
537  	p_dec_info->stream_rd_ptr = wave5_dec_get_rd_ptr(inst);
538  	p_dec_info->frame_display_flag = vpu_read_reg(vpu_dev, W5_RET_DEC_DISP_IDC);
539  
540  	val = p_dec_info->num_of_decoding_fbs; //fb_offset
541  
542  	max_dec_index = (p_dec_info->num_of_decoding_fbs > p_dec_info->num_of_display_fbs) ?
543  		p_dec_info->num_of_decoding_fbs : p_dec_info->num_of_display_fbs;
544  
545  	if (info->index_frame_display >= 0 &&
546  	    info->index_frame_display < (int)max_dec_index)
547  		info->disp_frame = inst->frame_buf[val + info->index_frame_display];
548  
549  	info->rd_ptr = p_dec_info->stream_rd_ptr;
550  	info->wr_ptr = p_dec_info->stream_wr_ptr;
551  	info->frame_display_flag = p_dec_info->frame_display_flag;
552  
553  	info->sequence_no = p_dec_info->initial_info.sequence_no;
554  	if (decoded_index < WAVE5_MAX_FBS)
555  		p_dec_info->dec_out_info[decoded_index] = *info;
556  
557  	if (disp_idx < WAVE5_MAX_FBS)
558  		info->disp_frame.sequence_no = info->sequence_no;
559  
560  	if (info->sequence_changed) {
561  		memcpy((void *)&p_dec_info->initial_info, (void *)&p_dec_info->new_seq_info,
562  		       sizeof(struct dec_initial_info));
563  		p_dec_info->initial_info.sequence_no++;
564  	}
565  
566  err_out:
567  	mutex_unlock(&vpu_dev->hw_lock);
568  
569  	return ret;
570  }
571  
wave5_vpu_dec_clr_disp_flag(struct vpu_instance * inst,int index)572  int wave5_vpu_dec_clr_disp_flag(struct vpu_instance *inst, int index)
573  {
574  	struct dec_info *p_dec_info = &inst->codec_info->dec_info;
575  	int ret;
576  	struct vpu_device *vpu_dev = inst->dev;
577  
578  	if (index >= p_dec_info->num_of_display_fbs)
579  		return -EINVAL;
580  
581  	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
582  	if (ret)
583  		return ret;
584  	ret = wave5_dec_clr_disp_flag(inst, index);
585  	mutex_unlock(&vpu_dev->hw_lock);
586  
587  	return ret;
588  }
589  
wave5_vpu_dec_set_disp_flag(struct vpu_instance * inst,int index)590  int wave5_vpu_dec_set_disp_flag(struct vpu_instance *inst, int index)
591  {
592  	struct dec_info *p_dec_info = &inst->codec_info->dec_info;
593  	int ret = 0;
594  	struct vpu_device *vpu_dev = inst->dev;
595  
596  	if (index >= p_dec_info->num_of_display_fbs)
597  		return -EINVAL;
598  
599  	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
600  	if (ret)
601  		return ret;
602  	ret = wave5_dec_set_disp_flag(inst, index);
603  	mutex_unlock(&vpu_dev->hw_lock);
604  
605  	return ret;
606  }
607  
wave5_vpu_dec_reset_framebuffer(struct vpu_instance * inst,unsigned int index)608  int wave5_vpu_dec_reset_framebuffer(struct vpu_instance *inst, unsigned int index)
609  {
610  	if (index >= MAX_REG_FRAME)
611  		return -EINVAL;
612  
613  	if (inst->frame_vbuf[index].size == 0)
614  		return -EINVAL;
615  
616  	wave5_vdi_free_dma_memory(inst->dev, &inst->frame_vbuf[index]);
617  
618  	return 0;
619  }
620  
wave5_vpu_dec_give_command(struct vpu_instance * inst,enum codec_command cmd,void * parameter)621  int wave5_vpu_dec_give_command(struct vpu_instance *inst, enum codec_command cmd, void *parameter)
622  {
623  	struct dec_info *p_dec_info = &inst->codec_info->dec_info;
624  	int ret = 0;
625  
626  	switch (cmd) {
627  	case DEC_GET_QUEUE_STATUS: {
628  		struct queue_status_info *queue_info = parameter;
629  
630  		queue_info->instance_queue_count = p_dec_info->instance_queue_count;
631  		queue_info->report_queue_count = p_dec_info->report_queue_count;
632  		break;
633  	}
634  	case DEC_RESET_FRAMEBUF_INFO: {
635  		int i;
636  
637  		for (i = 0; i < MAX_REG_FRAME; i++) {
638  			ret = wave5_vpu_dec_reset_framebuffer(inst, i);
639  			if (ret)
640  				break;
641  		}
642  
643  		for (i = 0; i < MAX_REG_FRAME; i++) {
644  			ret = reset_auxiliary_buffers(inst, i);
645  			if (ret)
646  				break;
647  		}
648  
649  		wave5_vdi_free_dma_memory(inst->dev, &p_dec_info->vb_task);
650  		break;
651  	}
652  	case DEC_GET_SEQ_INFO: {
653  		struct dec_initial_info *seq_info = parameter;
654  
655  		*seq_info = p_dec_info->initial_info;
656  		break;
657  	}
658  
659  	default:
660  		return -EINVAL;
661  	}
662  
663  	return ret;
664  }
665  
wave5_vpu_enc_open(struct vpu_instance * inst,struct enc_open_param * open_param)666  int wave5_vpu_enc_open(struct vpu_instance *inst, struct enc_open_param *open_param)
667  {
668  	struct enc_info *p_enc_info;
669  	int ret;
670  	struct vpu_device *vpu_dev = inst->dev;
671  
672  	ret = wave5_vpu_enc_check_open_param(inst, open_param);
673  	if (ret)
674  		return ret;
675  
676  	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
677  	if (ret)
678  		return ret;
679  
680  	if (!wave5_vpu_is_init(vpu_dev)) {
681  		mutex_unlock(&vpu_dev->hw_lock);
682  		return -ENODEV;
683  	}
684  
685  	p_enc_info = &inst->codec_info->enc_info;
686  	p_enc_info->open_param = *open_param;
687  
688  	ret = wave5_vpu_build_up_enc_param(vpu_dev->dev, inst, open_param);
689  	mutex_unlock(&vpu_dev->hw_lock);
690  
691  	return ret;
692  }
693  
wave5_vpu_enc_close(struct vpu_instance * inst,u32 * fail_res)694  int wave5_vpu_enc_close(struct vpu_instance *inst, u32 *fail_res)
695  {
696  	struct enc_info *p_enc_info = &inst->codec_info->enc_info;
697  	int ret;
698  	int retry = 0;
699  	struct vpu_device *vpu_dev = inst->dev;
700  
701  	*fail_res = 0;
702  	if (!inst->codec_info)
703  		return -EINVAL;
704  
705  	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
706  	if (ret)
707  		return ret;
708  
709  	do {
710  		ret = wave5_vpu_enc_finish_seq(inst, fail_res);
711  		if (ret < 0 && *fail_res != WAVE5_SYSERR_VPU_STILL_RUNNING) {
712  			dev_warn(inst->dev->dev, "enc_finish_seq timed out\n");
713  			mutex_unlock(&vpu_dev->hw_lock);
714  			return ret;
715  		}
716  
717  		if (*fail_res == WAVE5_SYSERR_VPU_STILL_RUNNING &&
718  		    retry++ >= MAX_FIRMWARE_CALL_RETRY) {
719  			mutex_unlock(&vpu_dev->hw_lock);
720  			return -ETIMEDOUT;
721  		}
722  	} while (ret != 0);
723  
724  	dev_dbg(inst->dev->dev, "%s: enc_finish_seq complete\n", __func__);
725  
726  	wave5_vdi_free_dma_memory(vpu_dev, &p_enc_info->vb_work);
727  
728  	if (inst->std == W_HEVC_ENC || inst->std == W_AVC_ENC) {
729  		wave5_vdi_free_dma_memory(vpu_dev, &p_enc_info->vb_sub_sam_buf);
730  		wave5_vdi_free_dma_memory(vpu_dev, &p_enc_info->vb_mv);
731  		wave5_vdi_free_dma_memory(vpu_dev, &p_enc_info->vb_fbc_y_tbl);
732  		wave5_vdi_free_dma_memory(vpu_dev, &p_enc_info->vb_fbc_c_tbl);
733  	}
734  
735  	wave5_vdi_free_dma_memory(vpu_dev, &p_enc_info->vb_task);
736  
737  	mutex_unlock(&vpu_dev->hw_lock);
738  
739  	return 0;
740  }
741  
wave5_vpu_enc_register_frame_buffer(struct vpu_instance * inst,unsigned int num,unsigned int stride,int height,enum tiled_map_type map_type)742  int wave5_vpu_enc_register_frame_buffer(struct vpu_instance *inst, unsigned int num,
743  					unsigned int stride, int height,
744  					enum tiled_map_type map_type)
745  {
746  	struct enc_info *p_enc_info = &inst->codec_info->enc_info;
747  	int ret;
748  	struct vpu_device *vpu_dev = inst->dev;
749  	unsigned int size_luma, size_chroma;
750  	int i;
751  
752  	if (p_enc_info->stride)
753  		return -EINVAL;
754  
755  	if (!p_enc_info->initial_info_obtained)
756  		return -EINVAL;
757  
758  	if (num < p_enc_info->initial_info.min_frame_buffer_count)
759  		return -EINVAL;
760  
761  	if (stride == 0 || stride % 8 != 0)
762  		return -EINVAL;
763  
764  	if (height <= 0)
765  		return -EINVAL;
766  
767  	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
768  	if (ret)
769  		return ret;
770  
771  	p_enc_info->num_frame_buffers = num;
772  	p_enc_info->stride = stride;
773  
774  	size_luma = stride * height;
775  	size_chroma = ALIGN(stride / 2, 16) * height;
776  
777  	for (i = 0; i < num; i++) {
778  		if (!inst->frame_buf[i].update_fb_info)
779  			continue;
780  
781  		inst->frame_buf[i].update_fb_info = false;
782  		inst->frame_buf[i].stride = stride;
783  		inst->frame_buf[i].height = height;
784  		inst->frame_buf[i].map_type = COMPRESSED_FRAME_MAP;
785  		inst->frame_buf[i].buf_y_size = size_luma;
786  		inst->frame_buf[i].buf_cb = inst->frame_buf[i].buf_y + size_luma;
787  		inst->frame_buf[i].buf_cb_size = size_chroma;
788  		inst->frame_buf[i].buf_cr_size = 0;
789  	}
790  
791  	ret = wave5_vpu_enc_register_framebuffer(inst->dev->dev, inst, &inst->frame_buf[0],
792  						 COMPRESSED_FRAME_MAP,
793  						 p_enc_info->num_frame_buffers);
794  
795  	mutex_unlock(&vpu_dev->hw_lock);
796  
797  	return ret;
798  }
799  
wave5_check_enc_param(struct vpu_instance * inst,struct enc_param * param)800  static int wave5_check_enc_param(struct vpu_instance *inst, struct enc_param *param)
801  {
802  	struct enc_info *p_enc_info = &inst->codec_info->enc_info;
803  
804  	if (!param)
805  		return -EINVAL;
806  
807  	if (!param->source_frame)
808  		return -EINVAL;
809  
810  	if (p_enc_info->open_param.bit_rate == 0 && inst->std == W_HEVC_ENC) {
811  		if (param->pic_stream_buffer_addr % 16 || param->pic_stream_buffer_size == 0)
812  			return -EINVAL;
813  	}
814  	if (param->pic_stream_buffer_addr % 8 || param->pic_stream_buffer_size == 0)
815  		return -EINVAL;
816  
817  	return 0;
818  }
819  
wave5_vpu_enc_start_one_frame(struct vpu_instance * inst,struct enc_param * param,u32 * fail_res)820  int wave5_vpu_enc_start_one_frame(struct vpu_instance *inst, struct enc_param *param, u32 *fail_res)
821  {
822  	struct enc_info *p_enc_info = &inst->codec_info->enc_info;
823  	int ret;
824  	struct vpu_device *vpu_dev = inst->dev;
825  
826  	*fail_res = 0;
827  
828  	if (p_enc_info->stride == 0) /* this means frame buffers have not been registered. */
829  		return -EINVAL;
830  
831  	ret = wave5_check_enc_param(inst, param);
832  	if (ret)
833  		return ret;
834  
835  	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
836  	if (ret)
837  		return ret;
838  
839  	p_enc_info->pts_map[param->src_idx] = param->pts;
840  
841  	ret = wave5_vpu_encode(inst, param, fail_res);
842  
843  	mutex_unlock(&vpu_dev->hw_lock);
844  
845  	return ret;
846  }
847  
wave5_vpu_enc_get_output_info(struct vpu_instance * inst,struct enc_output_info * info)848  int wave5_vpu_enc_get_output_info(struct vpu_instance *inst, struct enc_output_info *info)
849  {
850  	struct enc_info *p_enc_info = &inst->codec_info->enc_info;
851  	int ret;
852  	struct vpu_device *vpu_dev = inst->dev;
853  
854  	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
855  	if (ret)
856  		return ret;
857  
858  	ret = wave5_vpu_enc_get_result(inst, info);
859  	if (ret) {
860  		info->pts = 0;
861  		goto unlock;
862  	}
863  
864  	if (info->recon_frame_index >= 0)
865  		info->pts = p_enc_info->pts_map[info->enc_src_idx];
866  
867  unlock:
868  	mutex_unlock(&vpu_dev->hw_lock);
869  
870  	return ret;
871  }
872  
wave5_vpu_enc_give_command(struct vpu_instance * inst,enum codec_command cmd,void * parameter)873  int wave5_vpu_enc_give_command(struct vpu_instance *inst, enum codec_command cmd, void *parameter)
874  {
875  	struct enc_info *p_enc_info = &inst->codec_info->enc_info;
876  
877  	switch (cmd) {
878  	case ENABLE_ROTATION:
879  		p_enc_info->rotation_enable = true;
880  		break;
881  	case ENABLE_MIRRORING:
882  		p_enc_info->mirror_enable = true;
883  		break;
884  	case SET_MIRROR_DIRECTION: {
885  		enum mirror_direction mir_dir;
886  
887  		mir_dir = *(enum mirror_direction *)parameter;
888  		if (mir_dir != MIRDIR_NONE && mir_dir != MIRDIR_HOR &&
889  		    mir_dir != MIRDIR_VER && mir_dir != MIRDIR_HOR_VER)
890  			return -EINVAL;
891  		p_enc_info->mirror_direction = mir_dir;
892  		break;
893  	}
894  	case SET_ROTATION_ANGLE: {
895  		int angle;
896  
897  		angle = *(int *)parameter;
898  		if (angle && angle != 90 && angle != 180 && angle != 270)
899  			return -EINVAL;
900  		if (p_enc_info->initial_info_obtained && (angle == 90 || angle == 270))
901  			return -EINVAL;
902  		p_enc_info->rotation_angle = angle;
903  		break;
904  	}
905  	case ENC_GET_QUEUE_STATUS: {
906  		struct queue_status_info *queue_info = parameter;
907  
908  		queue_info->instance_queue_count = p_enc_info->instance_queue_count;
909  		queue_info->report_queue_count = p_enc_info->report_queue_count;
910  		break;
911  	}
912  	default:
913  		return -EINVAL;
914  	}
915  	return 0;
916  }
917  
wave5_vpu_enc_issue_seq_init(struct vpu_instance * inst)918  int wave5_vpu_enc_issue_seq_init(struct vpu_instance *inst)
919  {
920  	int ret;
921  	struct vpu_device *vpu_dev = inst->dev;
922  
923  	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
924  	if (ret)
925  		return ret;
926  
927  	ret = wave5_vpu_enc_init_seq(inst);
928  
929  	mutex_unlock(&vpu_dev->hw_lock);
930  
931  	return ret;
932  }
933  
wave5_vpu_enc_complete_seq_init(struct vpu_instance * inst,struct enc_initial_info * info)934  int wave5_vpu_enc_complete_seq_init(struct vpu_instance *inst, struct enc_initial_info *info)
935  {
936  	struct enc_info *p_enc_info = &inst->codec_info->enc_info;
937  	int ret;
938  	struct vpu_device *vpu_dev = inst->dev;
939  
940  	if (!info)
941  		return -EINVAL;
942  
943  	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
944  	if (ret)
945  		return ret;
946  
947  	ret = wave5_vpu_enc_get_seq_info(inst, info);
948  	if (ret) {
949  		p_enc_info->initial_info_obtained = false;
950  		mutex_unlock(&vpu_dev->hw_lock);
951  		return ret;
952  	}
953  
954  	p_enc_info->initial_info_obtained = true;
955  	p_enc_info->initial_info = *info;
956  
957  	mutex_unlock(&vpu_dev->hw_lock);
958  
959  	return 0;
960  }
961