Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Multi Planar device support #233

Merged
merged 4 commits into from
Oct 8, 2023
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions src/libs/frame.c
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,7 @@ unsigned us_frame_get_padding(const us_frame_s *frame) {
case V4L2_PIX_FMT_YUYV:
case V4L2_PIX_FMT_UYVY:
case V4L2_PIX_FMT_RGB565: bytes_per_pixel = 2; break;
case V4L2_PIX_FMT_BGR24:
case V4L2_PIX_FMT_RGB24: bytes_per_pixel = 3; break;
// case V4L2_PIX_FMT_H264:
case V4L2_PIX_FMT_MJPEG:
Expand Down
115 changes: 81 additions & 34 deletions src/ustreamer/device.c
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ static const struct {
{"UYVY", V4L2_PIX_FMT_UYVY},
{"RGB565", V4L2_PIX_FMT_RGB565},
{"RGB24", V4L2_PIX_FMT_RGB24},
{"BGR24", V4L2_PIX_FMT_BGR24},
{"MJPEG", V4L2_PIX_FMT_MJPEG},
{"JPEG", V4L2_PIX_FMT_JPEG},
};
Expand Down Expand Up @@ -218,7 +219,7 @@ int us_device_export_to_dma(us_device_s *dev) {

for (unsigned index = 0; index < _RUN(n_bufs); ++index) {
struct v4l2_exportbuffer exp = {0};
exp.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
exp.type = dev->capture_type;
exp.index = index;

US_LOG_DEBUG("Exporting device buffer=%u to DMA ...", index);
Expand All @@ -245,7 +246,7 @@ int us_device_export_to_dma(us_device_s *dev) {

int us_device_switch_capturing(us_device_s *dev, bool enable) {
if (enable != _RUN(capturing)) {
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
enum v4l2_buf_type type = dev->capture_type;

US_LOG_DEBUG("%s device capturing ...", (enable ? "Starting" : "Stopping"));
if (_D_XIOCTL((enable ? VIDIOC_STREAMON : VIDIOC_STREAMOFF), &type) < 0) {
Expand Down Expand Up @@ -319,8 +320,14 @@ int us_device_grab_buffer(us_device_s *dev, us_hw_buffer_s **hw) {

do {
struct v4l2_buffer new = {0};
new.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
new.type = dev->capture_type;
new.memory = dev->io_method;
if (dev->capture_type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
new.length = 1;
argakon marked this conversation as resolved.
Show resolved Hide resolved
struct v4l2_plane planes[VIDEO_MAX_PLANES];
new.m.planes = planes;
argakon marked this conversation as resolved.
Show resolved Hide resolved
}

const bool new_got = (_D_XIOCTL(VIDIOC_DQBUF, &new) >= 0);

if (new_got) {
Expand All @@ -338,6 +345,9 @@ int us_device_grab_buffer(us_device_s *dev, us_hw_buffer_s **hw) {
}
GRABBED(new) = true;

if (dev->capture_type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)
new.bytesused = new.m.planes[0].bytesused;
argakon marked this conversation as resolved.
Show resolved Hide resolved

broken = !_device_is_buffer_valid(dev, &new, FRAME_DATA(new));
if (broken) {
US_LOG_DEBUG("Releasing device buffer=%u (broken frame) ...", new.index);
Expand All @@ -363,6 +373,7 @@ int us_device_grab_buffer(us_device_s *dev, us_hw_buffer_s **hw) {
# undef FRAME_DATA

memcpy(&buf, &new, sizeof(struct v4l2_buffer));

buf_got = true;

} else {
Expand Down Expand Up @@ -475,7 +486,11 @@ static int _device_open_check_cap(us_device_s *dev) {
return -1;
}

if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
if (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) {
dev->capture_type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
} else if (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE) {
dev->capture_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
} else {
US_LOG_ERROR("Video capture is not supported by device");
return -1;
}
Expand All @@ -485,11 +500,13 @@ static int _device_open_check_cap(us_device_s *dev) {
return -1;
}

int input = dev->input; // Needs a pointer to int for ioctl()
US_LOG_INFO("Using input channel: %d", input);
if (_D_XIOCTL(VIDIOC_S_INPUT, &input) < 0) {
US_LOG_ERROR("Can't set input channel");
return -1;
if (dev->capture_type == V4L2_BUF_TYPE_VIDEO_CAPTURE) {
mdevaev marked this conversation as resolved.
Show resolved Hide resolved
int input = dev->input; // Needs a pointer to int for ioctl()
US_LOG_INFO("Using input channel: %d", input);
if (_D_XIOCTL(VIDIOC_S_INPUT, &input) < 0) {
US_LOG_ERROR("Can't set input channel");
return -1;
}
}

if (dev->standard != V4L2_STD_UNKNOWN) {
Expand Down Expand Up @@ -571,12 +588,21 @@ static int _device_open_format(us_device_s *dev, bool first) {
const unsigned stride = us_align_size(_RUN(width), 32) << 1;

struct v4l2_format fmt = {0};
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.width = _RUN(width);
fmt.fmt.pix.height = _RUN(height);
fmt.fmt.pix.pixelformat = dev->format;
fmt.fmt.pix.field = V4L2_FIELD_ANY;
fmt.fmt.pix.bytesperline = stride;
fmt.type = dev->capture_type;
if (dev->capture_type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
fmt.fmt.pix_mp.width = _RUN(width);
fmt.fmt.pix_mp.height = _RUN(height);
fmt.fmt.pix_mp.pixelformat = dev->format;
fmt.fmt.pix_mp.field = V4L2_FIELD_ANY;
fmt.fmt.pix_mp.flags = 0;
fmt.fmt.pix_mp.num_planes = 1;
} else {
fmt.fmt.pix.width = _RUN(width);
fmt.fmt.pix.height = _RUN(height);
fmt.fmt.pix.pixelformat = dev->format;
fmt.fmt.pix.field = V4L2_FIELD_ANY;
fmt.fmt.pix.bytesperline = stride;
}

// Set format
US_LOG_DEBUG("Probing device format=%s, stride=%u, resolution=%ux%u ...",
Expand All @@ -586,50 +612,58 @@ static int _device_open_format(us_device_s *dev, bool first) {
return -1;
}

# define FMT(x_next) ( fmt.type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE ? fmt.fmt.pix_mp.x_next : fmt.fmt.pix.x_next )
# define FMTS(x_next) ( fmt.type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE ? fmt.fmt.pix_mp.plane_fmt[0].x_next : fmt.fmt.pix.x_next )

// Check resolution
bool retry = false;
if (fmt.fmt.pix.width != _RUN(width) || fmt.fmt.pix.height != _RUN(height)) {
if (FMT(width) != _RUN(width) || FMT(height) != _RUN(height)) {
US_LOG_ERROR("Requested resolution=%ux%u is unavailable", _RUN(width), _RUN(height));
retry = true;
}
if (_device_apply_resolution(dev, fmt.fmt.pix.width, fmt.fmt.pix.height) < 0) {
if (_device_apply_resolution(dev, FMT(width), FMT(height)) < 0) {
return -1;
}
if (first && retry) {
return _device_open_format(dev, false);
}
US_LOG_INFO("Using resolution: %ux%u", _RUN(width), _RUN(height));
US_LOG_INFO("Using resolution: %ux%u, size:%u", _RUN(width), _RUN(height), _RUN(width)*_RUN(height)*3);

// Check format
if (fmt.fmt.pix.pixelformat != dev->format) {
if (FMT(pixelformat) != dev->format) {
US_LOG_ERROR("Could not obtain the requested format=%s; driver gave us %s",
_format_to_string_supported(dev->format),
_format_to_string_supported(fmt.fmt.pix.pixelformat));
_format_to_string_supported(FMT(pixelformat)));

char *format_str;
if ((format_str = (char *)_format_to_string_nullable(fmt.fmt.pix.pixelformat)) != NULL) {
if ((format_str = (char *)_format_to_string_nullable(FMT(pixelformat))) != NULL) {
US_LOG_INFO("Falling back to format=%s", format_str);
} else {
char fourcc_str[8];
US_LOG_ERROR("Unsupported format=%s (fourcc)",
us_fourcc_to_string(fmt.fmt.pix.pixelformat, fourcc_str, 8));
us_fourcc_to_string(FMT(pixelformat), fourcc_str, 8));
return -1;
}
}

_RUN(format) = fmt.fmt.pix.pixelformat;
_RUN(format) = FMT(pixelformat);
US_LOG_INFO("Using format: %s", _format_to_string_supported(_RUN(format)));

_RUN(stride) = fmt.fmt.pix.bytesperline;
_RUN(raw_size) = fmt.fmt.pix.sizeimage; // Only for userptr

_RUN(stride) = FMTS(bytesperline);
_RUN(raw_size) = FMTS(sizeimage); // Only for userptr

# undef FMTS
# undef FMT

return 0;
}

static void _device_open_hw_fps(us_device_s *dev) {
_RUN(hw_fps) = 0;

struct v4l2_streamparm setfps = {0};
setfps.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
setfps.type = dev->capture_type;

US_LOG_DEBUG("Querying HW FPS ...");
if (_D_XIOCTL(VIDIOC_G_PARM, &setfps) < 0) {
Expand All @@ -649,7 +683,7 @@ static void _device_open_hw_fps(us_device_s *dev) {
# define SETFPS_TPF(x_next) setfps.parm.capture.timeperframe.x_next

US_MEMSET_ZERO(setfps);
setfps.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
setfps.type = dev->capture_type;
SETFPS_TPF(numerator) = 1;
SETFPS_TPF(denominator) = (dev->desired_fps == 0 ? 255 : dev->desired_fps);

Expand Down Expand Up @@ -712,7 +746,7 @@ static int _device_open_io_method(us_device_s *dev) {
static int _device_open_io_method_mmap(us_device_s *dev) {
struct v4l2_requestbuffers req = {0};
req.count = dev->n_bufs;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.type = dev->capture_type;
req.memory = V4L2_MEMORY_MMAP;

US_LOG_DEBUG("Requesting %u device buffers for MMAP ...", req.count);
Expand All @@ -733,9 +767,14 @@ static int _device_open_io_method_mmap(us_device_s *dev) {
US_CALLOC(_RUN(hw_bufs), req.count);
for (_RUN(n_bufs) = 0; _RUN(n_bufs) < req.count; ++_RUN(n_bufs)) {
struct v4l2_buffer buf = {0};
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
struct v4l2_plane planes[VIDEO_MAX_PLANES];
buf.type = dev->capture_type;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = _RUN(n_bufs);
if (dev->capture_type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
buf.m.planes = planes;
argakon marked this conversation as resolved.
Show resolved Hide resolved
buf.length = VIDEO_MAX_PLANES;
}

US_LOG_DEBUG("Calling us_xioctl(VIDIOC_QUERYBUF) for device buffer=%u ...", _RUN(n_bufs));
if (_D_XIOCTL(VIDIOC_QUERYBUF, &buf) < 0) {
Expand All @@ -750,17 +789,17 @@ static int _device_open_io_method_mmap(us_device_s *dev) {
US_LOG_DEBUG("Mapping device buffer=%u ...", _RUN(n_bufs));
if ((HW(raw.data) = mmap(
NULL,
buf.length,
(buf.type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE ? buf.m.planes[0].length : buf.length),
PROT_READ | PROT_WRITE,
MAP_SHARED,
_RUN(fd),
buf.m.offset
(buf.type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE ? buf.m.planes[0].m.mem_offset : buf.m.offset)
)) == MAP_FAILED) {
US_LOG_PERROR("Can't map device buffer=%u", _RUN(n_bufs));
return -1;
}
assert(HW(raw.data) != NULL);
HW(raw.allocated) = buf.length;
HW(raw.allocated) = (buf.type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE ? buf.m.planes[0].length : buf.length);

# undef HW
}
Expand All @@ -770,7 +809,7 @@ static int _device_open_io_method_mmap(us_device_s *dev) {
static int _device_open_io_method_userptr(us_device_s *dev) {
struct v4l2_requestbuffers req = {0};
req.count = dev->n_bufs;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.type = dev->capture_type;
req.memory = V4L2_MEMORY_USERPTR;

US_LOG_DEBUG("Requesting %u device buffers for USERPTR ...", req.count);
Expand Down Expand Up @@ -806,10 +845,18 @@ static int _device_open_io_method_userptr(us_device_s *dev) {
static int _device_open_queue_buffers(us_device_s *dev) {
for (unsigned index = 0; index < _RUN(n_bufs); ++index) {
struct v4l2_buffer buf = {0};
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
struct v4l2_plane planes[VIDEO_MAX_PLANES];
buf.type = dev->capture_type;
buf.memory = dev->io_method;
buf.index = index;
if (dev->capture_type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
buf.m.planes = planes;
argakon marked this conversation as resolved.
Show resolved Hide resolved
buf.length = 1;
}

if (dev->io_method == V4L2_MEMORY_USERPTR) {
// i am not sure, may be this is incorrect for mplane device,
// but i don't have one which supports V4L2_MEMORY_USERPTR
buf.m.userptr = (unsigned long)_RUN(hw_bufs)[index].raw.data;
buf.length = _RUN(hw_bufs)[index].raw.allocated;
}
Expand Down
3 changes: 2 additions & 1 deletion src/ustreamer/device.h
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@
#define US_STANDARDS_STR "PAL, NTSC, SECAM"

#define US_FORMAT_UNKNOWN -1
#define US_FORMATS_STR "YUYV, UYVY, RGB565, RGB24, MJPEG, JPEG"
#define US_FORMATS_STR "YUYV, UYVY, RGB565, RGB24, BGR24, MJPEG, JPEG"

#define US_IO_METHOD_UNKNOWN -1
#define US_IO_METHODS_STR "MMAP, USERPTR"
Expand Down Expand Up @@ -126,6 +126,7 @@ typedef struct {
unsigned jpeg_quality;
v4l2_std_id standard;
enum v4l2_memory io_method;
enum v4l2_buf_type capture_type;
bool dv_timings;
unsigned n_bufs;
unsigned desired_fps;
Expand Down
28 changes: 28 additions & 0 deletions src/ustreamer/encoders/cpu/encoder.c
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ static void _jpeg_write_scanlines_yuyv(struct jpeg_compress_struct *jpeg, const
static void _jpeg_write_scanlines_uyvy(struct jpeg_compress_struct *jpeg, const us_frame_s *frame);
static void _jpeg_write_scanlines_rgb565(struct jpeg_compress_struct *jpeg, const us_frame_s *frame);
static void _jpeg_write_scanlines_rgb24(struct jpeg_compress_struct *jpeg, const us_frame_s *frame);
static void _jpeg_write_scanlines_bgr24(struct jpeg_compress_struct *jpeg, const us_frame_s *frame);

static void _jpeg_init_destination(j_compress_ptr jpeg);
static boolean _jpeg_empty_output_buffer(j_compress_ptr jpeg);
Expand Down Expand Up @@ -78,6 +79,7 @@ void us_cpu_encoder_compress(const us_frame_s *src, us_frame_s *dest, unsigned q
WRITE_SCANLINES(V4L2_PIX_FMT_YUYV, _jpeg_write_scanlines_yuyv);
WRITE_SCANLINES(V4L2_PIX_FMT_UYVY, _jpeg_write_scanlines_uyvy);
WRITE_SCANLINES(V4L2_PIX_FMT_RGB565, _jpeg_write_scanlines_rgb565);
WRITE_SCANLINES(V4L2_PIX_FMT_BGR24, _jpeg_write_scanlines_bgr24);
WRITE_SCANLINES(V4L2_PIX_FMT_RGB24, _jpeg_write_scanlines_rgb24);
default: assert(0 && "Unsupported input format for CPU encoder");
}
Expand Down Expand Up @@ -220,6 +222,32 @@ static void _jpeg_write_scanlines_rgb565(struct jpeg_compress_struct *jpeg, cons
free(line_buf);
}

static void _jpeg_write_scanlines_bgr24(struct jpeg_compress_struct *jpeg, const us_frame_s *frame) {
uint8_t *line_buf;
US_CALLOC(line_buf, frame->width * 3);

const unsigned padding = us_frame_get_padding(frame);
uint8_t *data = frame->data;

while (jpeg->next_scanline < frame->height) {
uint8_t *ptr = line_buf;

// swap B and R values
for (unsigned x = 0; x < frame->width * 3; x+=3) {
*(ptr++) = data[x+2];
*(ptr++) = data[x+1];
*(ptr++) = data[x];
}

JSAMPROW scanlines[1] = {line_buf};
jpeg_write_scanlines(jpeg, scanlines, 1);

data += (frame->width * 3) + padding;
}

free(line_buf);
}

static void _jpeg_write_scanlines_rgb24(struct jpeg_compress_struct *jpeg, const us_frame_s *frame) {
const unsigned padding = us_frame_get_padding(frame);
uint8_t *data = frame->data;
Expand Down
Loading