android13/external/uvc-gadget/main.c

919 lines
23 KiB
C
Executable File

/* SPDX-License-Identifier: GPL-2.0-or-later */
/*
* UVC gadget test application
*
* Copyright (C) 2020 Rockchip Electronics Co., Ltd.
*
* Author: Bin Yang <yangbin@rock-chips.com>
*/
/* To provide basename and asprintf from the GNU library. */
#define _GNU_SOURCE
#include <signal.h>
#include <stdio.h>
#include <unistd.h>
#include <string.h>
#include <stdbool.h>
#include <fcntl.h>
#include <pthread.h>
#include <errno.h>
#include "uvc-gadget.h"
#include "uvc-enc.h"
#include "uvc-camera.h"
#include "uvc-rga.h"
#include "uvc-log.h"
#include "tools.h"
static pthread_t uvc_pthread_id = 0;
#ifdef ANDROID_PLATFORM
#include <cutils/properties.h>
static int uvc_get_log_level(struct uvc_source *src)
{
int log_level = property_get_int32("uvc.gadget.debug", 0x0);
uvc_set_source_log_level(src, log_level);
return log_level;
}
static int gadget_get_function(char *func)
{
if (property_get("sys.usb.config", func, "") <= 0) {
uvc_err("android_set_function getprop sys.usb.config failed.");
return -1;
}
return 0;
}
static int gadget_set_function(char *func, bool reset_flag)
{
char get_func[50] = {0};
/* Delay for debouncing USB disconnects */
if (reset_flag) {
uvc_info("uvc gadget start reset.\n");
sleep(2);
}
gadget_get_function(get_func);
if (strstr(get_func, func)) {
uvc_warn("gadget function is already %s.", func);
return 0;
}
property_set("sys.usb.config", "none");
property_set("sys.usb.config", func);
/* Delay 200ms wait for the function set to complete */
usleep(200000);
return 0;
}
#else
static int uvc_get_log_level(struct uvc_source *src)
{
int log_level = 0;
uvc_set_source_log_level(src, log_level);
return log_level;
}
static int gadget_get_function(char *func)
{
return 0;
}
static int gadget_set_function(char *func, bool reset_flag)
{
return 0;
}
#endif
static void usage(const char *argv0)
{
fprintf(stderr, "Usage: %s [options] <source device>\n", argv0);
fprintf(stderr, "Available options are\n");
fprintf(stderr, " none Source is buffer fill (Default path:"UVC_RES_DIR")\n");
fprintf(stderr, " -c <device> Source is V4L2 device: <device> is v4l2 video node or 'rkcam'.\n");
fprintf(stderr, " Use 'rkcam' include (rkaiq, media-ctl link, encode, crop).\n");
fprintf(stderr, " Source is buffer fill: <device> is source path.\n");
fprintf(stderr, " Specified path:/<device>/<wHeight>p.jpg)\n");
fprintf(stderr, " -a With rkaiq (ISP camera must support).\n");
fprintf(stderr, " -l Support media-ctl link (ISP camera must support).\n");
fprintf(stderr, " -p Buffers process, use for RGA process and so on \n");
fprintf(stderr, " 0: SYNC process(Smaller latency).\n");
fprintf(stderr, " 1: ASYNC process(More stable frame rate).\n");
fprintf(stderr, " NOTE: If '-p' is set, buffer is forced to process whether\n");
fprintf(stderr, " it needs to or not. If '-p' is not set, it will determines\n");
fprintf(stderr, " whether to process based on the size and number of buffers.\n");
fprintf(stderr, " -r <W*H> Fix source resolution, if not fix, use the uvc resolution.\n");
fprintf(stderr, " -s Support Image crop.\n");
fprintf(stderr, " -b USB is transferred in bulk mode\n");
fprintf(stderr, " 0: set usb transfers mode by 'streaming_bulk' node\n");
fprintf(stderr, " 1: set usb transfers mode by user,\n");
fprintf(stderr, " dynamic buffers, need stream on/off\n");
fprintf(stderr, " 2: set usb transfers mode by user, static buffers\n");
fprintf(stderr, " does not stream on/off\n");
fprintf(stderr, " -m Memory type is MMAP, default is DMABUF.\n");
fprintf(stderr, " -- apply only to input and output format is the same.\n");
fprintf(stderr, " -y The specified source format is YUYV, default is NV12.\n");
fprintf(stderr, " -v UVC video format is H.265, default is H.264.\n");
fprintf(stderr, " -e uvc-gadget with mpp encode.\n");
fprintf(stderr, " The following format conversions are supported:\n");
fprintf(stderr, " ==> with '-e':\n");
fprintf(stderr, " IN: NV12 -> OUT: All formats\n");
fprintf(stderr, " IN: (with '-y') YUYV -> OUT: All formats\n");
fprintf(stderr, " IN: MJPEG -> OUT: MJPEG\n");
fprintf(stderr, " IN: H264/H265 -> OUT: H264/H265\n");
fprintf(stderr, " ==> without '-e':\n");
fprintf(stderr, " IN: NV12 -> OUT: YUYV\n");
fprintf(stderr, " IN: (with '-y') YUYV -> OUT: YUYV\n");
fprintf(stderr, " IN: MJPEG -> OUT: MJPEG\n");
fprintf(stderr, " IN: H264/H265 -> OUT: H264/H265\n");
fprintf(stderr, " -h Print this help screen and exit\n");
fprintf(stderr, "\n");
fprintf(stderr, "Example usage:\n");
fprintf(stderr, " uvc-gadget -- ISOC, NV12 buffer fill\n");
fprintf(stderr, " uvc-gadget -c "UVC_RES_DIR" -- ISOC, NV12 buffer fill\n");
fprintf(stderr, " uvc-gadget -y -- ISOC, YUYV buffer fill\n");
fprintf(stderr, " uvc-gadget -c /dev/video0 -- ISOC, V4l2 Stream\n");
fprintf(stderr, " uvc-gadget -c rkcam -- ISOC, V4l2 Stream, auto identification node\n");
fprintf(stderr, " uvc-gadget -b 0 -- BUKL, NV12 buffer fill, dynamic buffer\n");
fprintf(stderr, " uvc-gadget -b 1 -- BULK, NV12 buffer fill, static buffer\n");
fprintf(stderr, "\n");
}
/* Necessary for and only used by signal handler. */
static struct uvc_source *sigint_src;
static void uvc_gadget_exit(void)
{
/* force the uvc gadget thread to exit */
uvc_gadget_wait_complete(sigint_src, true);
}
static void sigint_handler(int signal)
{
uvc_gadget_exit();
}
static void uvc_source_set_signal(struct uvc_source *src)
{
sigint_src = src;
signal(SIGINT, sigint_handler);
signal(SIGTERM, sigint_handler);
}
static char *get_uvc_source_file(struct uvc_source *src)
{
char *cap_device = uvc_get_device(src);
struct v4l2_pix_format *fmt = uvc_get_format(src);
char *path = NULL;
int height = fmt->height;
int ret = -1;
if (cap_device) {
if (fmt->pixelformat == V4L2_PIX_FMT_MJPEG)
ret = asprintf(&path, "/%s/%up.jpg", cap_device, height);
else if (fmt->pixelformat == V4L2_PIX_FMT_H264)
ret = asprintf(&path, "/%s/%up.h264", cap_device, height);
} else {
if (fmt->pixelformat == V4L2_PIX_FMT_MJPEG)
ret = asprintf(&path, UVC_RES_DIR"/%up.jpg", height);
else if (fmt->pixelformat == V4L2_PIX_FMT_H264)
ret = asprintf(&path, UVC_RES_DIR"/%up.h264", height);
}
if (ret < 0)
path = NULL;
return path;
}
static int uvc_source_file_init(struct uvc_source *src)
{
char *path;
struct v4l2_pix_format *fmt = uvc_get_format(src);
if ((fmt->pixelformat != V4L2_PIX_FMT_H264) &&
(fmt->pixelformat != V4L2_PIX_FMT_MJPEG))
return 0;
path = get_uvc_source_file(src);
if (!path) {
uvc_err("uvc source file generate failed.\n");
return -1;
}
src->source_fp = fopen(path, "rb");
free(path);
if (src->source_fp == NULL) {
uvc_err("Unable to open uvc source file!\n");
return -1;
}
return 0;
}
static void uvc_source_file_uninit(struct uvc_source *src)
{
struct v4l2_pix_format *fmt = uvc_get_format(src);
if ((fmt->pixelformat != V4L2_PIX_FMT_H264) &&
(fmt->pixelformat != V4L2_PIX_FMT_MJPEG))
return;
if (src->source_fp)
fclose(src->source_fp);
}
static int uvc_source_file_load(struct uvc_source *src, int size, char *buffer)
{
if (!src->source_fp)
return -1;
if (buffer == NULL) {
uvc_err("App buffer has not been allocate yet!\n");
return -1;
}
fread(buffer, 1, size, src->source_fp);
return 0;
}
static int get_mjpeg_image_size(struct uvc_source *src)
{
int imagesize = 0;
if (!src->source_fp)
return imagesize;
fseek(src->source_fp, 0, SEEK_END);
imagesize = ftell(src->source_fp);
fseek(src->source_fp, 0, SEEK_SET);
return imagesize;
}
static int get_h264_stream_size(struct uvc_source *src)
{
size_t frame_size = 0;
size_t bytes_read = 0;
unsigned char start_code[] = {0x00, 0x00, 0x00, 0x01};
int start_code_size = sizeof(start_code);
char buf[1024] = {0};
FILE *fp = src->source_fp;
if (!fp)
return 0;
/* H.264 frame start code */
frame_size = fread(buf, 1, start_code_size, fp);
if (memcmp(buf, start_code, start_code_size)) {
if (feof(fp))
fseek(fp, 0, SEEK_SET);
else
uvc_err("read uvc source file failed!\n");
return 0;
}
while (!feof(fp)) {
bytes_read = fread(buf, 1, sizeof(buf), fp);
for (int i = 0; i < bytes_read - start_code_size; i++) {
if (!memcmp(&buf[i], start_code, start_code_size)) {
int offset = frame_size + bytes_read;
frame_size += i;
fseek(fp, 0 - offset, SEEK_CUR);
return frame_size;
}
}
/* move file pointer forward */
if (!feof(fp)) {
fseek(fp, 0 - start_code_size, SEEK_CUR);
frame_size += bytes_read - start_code_size;
} else {
frame_size += bytes_read;
}
}
fseek(fp, 0 - frame_size, SEEK_CUR);
return frame_size;
}
static int nv12_buffer_fill(struct uvc_source *src, int size, char *buffer)
{
struct v4l2_pix_format *fmt;
int y, uv;
fmt = uvc_get_format(src);
y = fmt->width * fmt->height / 4;
memset(buffer, 128, y);
memset(buffer + y, 64, y);
memset(buffer + y * 2, 128, y);
memset(buffer + y * 3, 192, y);
uv = fmt->width * fmt->height / 8;
memset(buffer + y * 4, 0, uv);
memset(buffer + y * 4 + uv, 64, uv);
memset(buffer + y * 4 + uv * 2, 128, uv);
memset(buffer + y * 4 + uv * 3, 192, uv);
return 0;
}
static int yuyv_buffer_fill(struct uvc_source *src, int size, char *buffer)
{
char *tmpdst = buffer;
struct v4l2_pix_format *fmt;
int i, y;
fmt = uvc_get_format(src);
y = fmt->width * fmt->height / 2;
memset(buffer, 128, y);
memset(buffer + y, 64, y);
memset(buffer + y * 2, 128, y);
memset(buffer + y * 3, 192, y);
for(i = 0; i < y / 2; i++) {
if (i % 2)
*tmpdst = 0;
tmpdst++;
}
for(i = y / 2; i < y; i++) {
if (i % 2)
*tmpdst = 64;
tmpdst++;
}
for(i = y; i < y * 3 / 2; i++) {
if (i % 2)
*tmpdst = 128;
tmpdst++;
}
for(i = y * 3 / 2; i < 2 * y; i++) {
if (i % 2)
*tmpdst = 192;
tmpdst++;
}
return 0;
}
static int get_transfer_buffer_length(struct uvc_source *src, struct v4l2_pix_format *fmt)
{
int imagesize = 0;
switch (fmt->pixelformat) {
case V4L2_PIX_FMT_YUYV:
imagesize = fmt->width * fmt->height * 2 ;
break;
case V4L2_PIX_FMT_NV12:
imagesize = fmt->width * fmt->height * 3 / 2 ;
break;
case V4L2_PIX_FMT_MJPEG:
imagesize = get_mjpeg_image_size(src);
break;
case V4L2_PIX_FMT_H264:
imagesize = get_h264_stream_size(src);
break;
default:
break;
}
return imagesize;
}
static int fill_transfer_buffer(struct uvc_source *src, char *buffer)
{
struct v4l2_pix_format *fmt;
int transfer_length = 0;
fmt = uvc_get_format(src);
transfer_length = get_transfer_buffer_length(src, fmt);
switch (fmt->pixelformat) {
case V4L2_PIX_FMT_YUYV:
if (yuyv_buffer_fill(src, transfer_length, buffer))
return -1;
break;
case V4L2_PIX_FMT_NV12:
if (nv12_buffer_fill(src, transfer_length, buffer))
return -1;
break;
case V4L2_PIX_FMT_MJPEG:
case V4L2_PIX_FMT_H264:
if (uvc_source_file_load(src, transfer_length, buffer))
return -1;
break;
default:
return -1;
}
return 0;
}
static int uvc_copy_buffer_process(struct video_buffer *src_buf,
struct video_buffer *dest_buf)
{
/* TODO */
memcpy(dest_buf->mem, src_buf->mem, dest_buf->fmt->sizeimage);
return 0;
}
static int uvc_source_buffer_process(struct uvc_source *src,
struct video_buffer *src_buf, struct video_buffer *dest_buf)
{
void *src_rga_ctx;
void *dest_rga_ctx;
struct v4l2_pix_format *src_fmt;
struct v4l2_pix_format *dest_fmt;
struct timeval time_start, time_end;
int ret = 0;
bool use_rga = true;
gettimeofday(&time_start, NULL);
dest_rga_ctx = dest_buf->rga_ctx;
dest_fmt = dest_buf->fmt;
src_rga_ctx = src_buf->rga_ctx;
src_fmt = src_buf->fmt;
if (!src_rga_ctx || !dest_rga_ctx)
use_rga = false;
/* CPU copy or RGA copy/scale */
if (use_rga)
ret = uvc_rga_buffer_process(src_rga_ctx, src_fmt,
dest_rga_ctx, dest_fmt);
else
ret = uvc_copy_buffer_process(src_buf, dest_buf);
gettimeofday(&time_end, NULL);
uvc_dbg_if(src->log_level & UVCRGA_LOG,
"uvc [%s] process - time:%ld.\n",
use_rga ? "RGA" : "CPU",
1000000 * (time_end.tv_sec - time_start.tv_sec) +
(time_end.tv_usec - time_start.tv_usec));
return ret;
}
/* UVC ASYNC Process */
static int uvc_source_async_proc(struct uvc_source *src,
struct video_buffer *dest_buf)
{
struct video_buffer *src_buf = NULL;
int ret = 0;
/* to_pre_source() is usage '-p' */
if (to_pre_source(src)) {
/* get v4l2 device stream */
uvc_stream_get(src, dest_buf->index, &src_buf);
/* RGA ASYNC Process */
ret = uvc_source_buffer_process(src, src_buf, dest_buf);
} else {
/* Buffer fill */
ret = fill_transfer_buffer(src, dest_buf->mem);
}
return ret;
}
static void *uvc_buffer_fill_pthread(void *arg)
{
struct uvc_source *src = (struct uvc_source *)arg;
struct video_buffer *buffer = NULL;
struct v4l2_pix_format *fmt = uvc_get_format(src);
int transfer_length = 0;
/* 0: block, O_NONBLOCK: non-block */
int flags = 0;
uvc_source_file_init(src);
while(uvc_get_stream_on(src)) {
/* 1. get transfer length */
transfer_length = get_transfer_buffer_length(src, fmt);
/* 2. export source buffer */
if (uvc_buffer_export(src, transfer_length, flags, &buffer))
break;
if (!buffer) {
/*
* 1. non-block should 2ms retry export buffer
* 2. exit 2ms after being interrupted by streamoff
*/
usleep(2000);
continue;
}
/* 3. fill or process source buffer */
/* TODO */
if (uvc_source_async_proc(src, buffer))
break;
/* 4. submit source buffer */
uvc_buffer_submit(src, buffer);
}
uvc_source_file_uninit(src);
uvc_info("app fill buffer exit\n");
pthread_exit(NULL);
}
static int uvc_source_streamon(struct uvc_source *src, void *data)
{
uvc_info("video source streamon.\n");
uvc_get_log_level(src);
if (src->SRC_Args & SRC_FILL_TYPE_IS_V4L2) {
if (src->SRC_Args & SRC_ENABLE_RKAIQ)
uvc_rkaiq_start();
if (!to_pre_source(src))
return 0;
}
if (pthread_create(&uvc_pthread_id, NULL, uvc_buffer_fill_pthread, src)) {
uvc_err("%s: pthread_create failed!\n", __func__);
return -1;
}
return 0;
}
static int uvc_source_streamoff(struct uvc_source *src, void *data)
{
uvc_info("video source streamoff.\n");
if (src->SRC_Args & SRC_FILL_TYPE_IS_V4L2) {
if (src->SRC_Args & SRC_ENABLE_RKAIQ)
uvc_rkaiq_stop();
if (!to_pre_source(src))
return 0;
}
if (uvc_pthread_id)
pthread_join(uvc_pthread_id, NULL);
return 0;
}
static int uvc_source_start_ctrl(struct uvc_source *src, void *data)
{
uvc_info("uvc source start ctrl.\n");
/* TODO */
return 0;
}
static int uvc_source_rga_init(struct uvc_source *src, void *data)
{
struct video_buffer *buffer = (struct video_buffer *)data;
uvc_info("uvc source rga init, %ux%u, %c%c%c%c (size:%d).\n",
buffer->fmt->width, buffer->fmt->height,
PIX_FMT_STR(buffer->fmt->pixelformat),
buffer->fmt->sizeimage);
buffer->rga_ctx = uvc_rga_buffer_create(buffer->dmabuf, buffer->fmt);
return 0;
}
static int uvc_source_rga_deinit(struct uvc_source *src, void *data)
{
struct video_buffer *buffer = (struct video_buffer *)data;
if (!buffer->rga_ctx)
return 0;
uvc_info("uvc source rga deinit.\n");
return uvc_rga_buffer_destroy(buffer->rga_ctx);
}
/* RGA SYNC Process */
static int uvc_source_rga_proc(struct uvc_source *src, void *data)
{
struct rga_video_buffer *rga_param = (struct rga_video_buffer *)data;
struct video_buffer *src_buf = rga_param->src;
struct video_buffer *dest_buf = rga_param->dest;
return uvc_source_buffer_process(src, src_buf, dest_buf);
}
static int uvc_ctrl_unit_ct(struct uvc_source *src,
struct uvc_request_param *param, void *p)
{
struct uvc_request_ct *ct = (struct uvc_request_ct *)p;
int ret = -1;
if (!uvc_get_stream_on(src))
return 0;
if (src->SRC_Args & SRC_ENABLE_RKAIQ)
ret = uvc_rkaiq_ct(param, ct);
else
uvc_err("rkaiq is not running, cannot control CT");
return ret;
}
static int uvc_ctrl_unit_pu(struct uvc_source *src,
struct uvc_request_param *param, void *p)
{
struct uvc_request_pu *pu = (struct uvc_request_pu *)p;
int ret = -1;
if (src->SRC_Args & SRC_ENABLE_RKAIQ)
ret = uvc_rkaiq_pu(param, pu);
else
uvc_err("rkaiq is not running, cannot control PU");
return ret;
}
static int uvc_ctrl_unit_xu(struct uvc_source *src,
struct uvc_request_param *param, void *p)
{
struct uvc_request_xu *xu = (struct uvc_request_xu *)p;
/* Get XU data, the size cannot exceed 60. */
param->get_len = 0x02;
switch (param->cs) {
case 0x01:
case 0x02:
case 0x03:
/* Example: */
if(param->dir == USB_TRAN_IN) {
/* TODO: */
memset(xu->data, 0xAA, param->len);
uvc_info("uvc_ctrl_unit_xu read len:%d.\n", param->len);
} else if (param->dir == USB_TRAN_OUT) {
/* Set XU data, the size cannot exceed 60. */
uvc_info("uvc_ctrl_unit_xu write len:%d "
"(Data: %2x %2x ...)\n",
param->len, xu->data[0], xu->data[1]);
}
break;
default:
uvc_err("uvc_ctrl_unit_xu cs:%d is not support.\n", param->cs);
break;
}
return 0;
}
int main(int argc, char *argv[])
{
struct uvc_source *src = NULL;
char *cap_device = NULL;
struct uvc_src_size src_size;
struct v4l2_pix_format format;
unsigned int SRC_Args = SRC_INITIALIZE_FLAG;
unsigned int width = 0, height = 0;
int ret = 0;
int opt;
char restore_func[50] = {0};
bool reset_flag = false;
/*
* Set fixed_uvc to true for Linux platform.
* It is recommended set fixed_uvc to false for Android platform.
*/
#ifdef ANDROID_PLATFORM
bool fixed_uvc = false;
#else
bool fixed_uvc = true;
#endif
if(getuid()) {
uvc_err("Permission Denied\n");
exit(EXIT_FAILURE);
}
while ((opt = getopt(argc, argv, "ab:c:ehlmp:r:svy")) != -1) {
switch (opt) {
case 'c':
cap_device = optarg;
if (cap_device && strstr(cap_device, "/dev/video"))
SRC_Args |= SRC_FILL_TYPE_IS_V4L2;
break;
case 'b':
/*
* optarg:
* 0: set usb transfers mode by 'streaming_bulk' node.
*
* 1: set usb transfers mode by user,
* dynamic buffers, host app open/close must stream on/off.
* The uvc-gadget will reset and the USB will be reconnected.
* This process takes a long time, about 2-3 seconds.
*
* 2: set usb transfers mode by user,
* static buffers, host app open/close don't need stream on/off.
* This process is very fast, but need alloc a large fixed buffer.
* The buffer size is calculated using the maximum resolution.
* NOTE: This method does not support format switching.
*/
switch (atoi(optarg)) {
case 0:
SRC_Args |= SINK_USER_SET_MODE;
break;
case 1:
SRC_Args |= SINK_TRAN_MODE_BULK;
SRC_Args &= ~SINK_BULK_BUF_STATIC;
break;
case 2:
SRC_Args |= SINK_TRAN_MODE_BULK;
SRC_Args |= SINK_BULK_BUF_STATIC;
break;
default:
fprintf(stderr, "Invalid optarg: %s\n", optarg);
usage(argv[0]);
return 1;
}
break;
case 'm':
SRC_Args |= SRC_MEM_TYPE_IS_MMAP;
break;
case 'e':
SRC_Args |= SRC_BUF_TYPE_IS_ENC;
break;
case 'v':
SRC_Args |= SINK_FMT_OUTPUT_H265;
break;
case 'p':
if (atoi(optarg) == 1)
SRC_Args |= SRC_BUF_ASYNC_PROCESS;
else
SRC_Args |= SRC_BUF_SYNC_PROCESS;
break;
case 'r':
if ((2 == sscanf(optarg, "%d*%d", &width, &height)) ||
(2 == sscanf(optarg, "%dx%d", &width, &height)) ||
(2 == sscanf(optarg, "%dX%d", &width, &height))) {
SRC_Args |= SRC_FIXED_RESOLUTION;
} else {
fprintf(stderr, "Invalid optarg: %s\n", optarg);
usage(argv[0]);
return 1;
}
break;
case 'y':
SRC_Args |= SRC_FMT_TYPE_IS_YUYV;
break;
case 'a':
SRC_Args |= SRC_ENABLE_RKAIQ;
break;
case 'l':
SRC_Args |= SRC_MEDIA_LINK;
break;
case 's':
SRC_Args |= SRC_ENABLE_CROP;
break;
case 'h':
usage(argv[0]);
return 0;
default:
fprintf(stderr, "Invalid option '-%c'\n", opt);
usage(argv[0]);
return 1;
}
}
memset(&src_size, 0, sizeof(struct uvc_src_size));
memset(&format, 0, sizeof(struct v4l2_pix_format));
if (SRC_Args & SRC_FILL_TYPE_IS_V4L2 && SRC_Args & SRC_ENABLE_RKAIQ) {
uvc_rkaiq_init(SRC_Args & SRC_MEDIA_LINK);
uvc_rkaiq_get_format(&format, 0);
} else if(cap_device && !strcmp(cap_device, "rkcam")) {
SRC_Args |= SRC_FILL_TYPE_IS_V4L2;
SRC_Args |= SRC_BUF_TYPE_IS_ENC;
SRC_Args |= SRC_ENABLE_RKAIQ;
SRC_Args |= SRC_MEDIA_LINK;
SRC_Args |= SRC_ENABLE_CROP;
uvc_rkaiq_init(SRC_Args & SRC_MEDIA_LINK);
uvc_rkaiq_get_video(&cap_device, 0);
uvc_rkaiq_get_format(&format, 0);
src_size.max.width = format.width;
src_size.max.height = format.height;
src_size.src.width = width;
src_size.src.height = height;
src_size.pre.width = 0;
src_size.pre.height = 0;
}
/* Create a video source and init*/
src = uvc_source_create(SRC_Args, cap_device);
if (src == NULL) {
uvc_err("uvc video source create failed \n");
return 1;
}
/* Option: Setting Source Resolution */
uvc_set_source_size(src, src_size);
uvc_get_log_level(src);
/* register callback functions */
uvc_source_func_register(src, SRC_STREAM_ON, uvc_source_streamon);
uvc_source_func_register(src, SRC_STREAM_OFF, uvc_source_streamoff);
uvc_source_func_register(src, SRC_START_CTRL, uvc_source_start_ctrl);
uvc_source_func_register(src, SRC_RGA_INIT, uvc_source_rga_init);
uvc_source_func_register(src, SRC_RGA_DEINIT, uvc_source_rga_deinit);
uvc_source_func_register(src, SRC_RGA_PROC, uvc_source_rga_proc);
/* register mpp encode function */
if (SRC_Args & SRC_BUF_TYPE_IS_ENC)
uvc_gadget_register_encode(src);
uvc_source_set_signal(src);
uvc_gadget_fixed_function(fixed_uvc);
if (!fixed_uvc)
gadget_get_function(restore_func);
UVC_LOOP:
if (!fixed_uvc)
gadget_set_function("uvc", reset_flag);
/* create uvc main pthread */
ret = uvc_gadget_create(src);
if (ret) {
uvc_info("uvc gadget create failed.\n");
goto ERR;
}
/* Option: use dma buffer or drm buffer */
uvc_use_dma_buffer(src);
/* register uvc CT/PU/XU callback */
uvc_control_func_register(src, UVC_UNIT_ID_CT, uvc_ctrl_unit_ct);
uvc_control_func_register(src, UVC_UNIT_ID_PU, uvc_ctrl_unit_pu);
uvc_control_func_register(src, UVC_UNIT_ID_XU, uvc_ctrl_unit_xu);
/*
* Wait uvc gadget thread complete, this is a blocking function.
* return 0: exit the uvc-gadget
* return 1: reset the uvc-gadget
*
* If gadget function support dynamic setting, UVC should be
* reset when usb is disconnected.
*/
reset_flag = false;
ret = uvc_gadget_wait_complete(src, false);
if (ret && !fixed_uvc) {
uvc_info("usb is disconnectd, uvc gadget needs to be reset.\n");
reset_flag = true;
}
/* unregister uvc CT/PU/XU callback */
uvc_control_func_unregister(src);
/* UVC stop-and-wait, restart UVC or reset UVC */
if (reset_flag)
goto UVC_LOOP;
ERR:
if (!fixed_uvc)
gadget_set_function(restore_func, false);
if (SRC_Args & SRC_FILL_TYPE_IS_V4L2 && SRC_Args & SRC_ENABLE_RKAIQ)
uvc_rkaiq_deinit(SRC_Args & SRC_MEDIA_LINK);
uvc_source_func_unregister(src);
uvc_source_destroy(src);
return ret;
}