summaryrefslogtreecommitdiff
path: root/v4l2++
diff options
context:
space:
mode:
authorTomi Valkeinen <tomi.valkeinen@ideasonboard.com>2021-10-06 10:26:00 +0300
committerTomi Valkeinen <tomi.valkeinen@ideasonboard.com>2021-10-06 10:44:26 +0300
commit2b1a8f48f3a414e565cefb809f3e6a7c6aa5f8a7 (patch)
tree3f274b92d00754e6a5980f07b2c9cefcbf2cae53 /v4l2++
parentf691ed65d6bcfff0abbc2d7ce58e560af3ee63dc (diff)
Split V4L2 code into separate libs
Create v4l2++ library and pyv4l2, which are independent from the rest of the kms++. Signed-off-by: Tomi Valkeinen <tomi.valkeinen@ideasonboard.com>
Diffstat (limited to 'v4l2++')
-rw-r--r--v4l2++/inc/v4l2++/helpers.h60
-rw-r--r--v4l2++/inc/v4l2++/pixelformats.h111
-rw-r--r--v4l2++/inc/v4l2++/videodevice.h127
-rw-r--r--v4l2++/meson.build32
-rw-r--r--v4l2++/src/helpers.cpp16
-rw-r--r--v4l2++/src/pixelformats.cpp301
-rw-r--r--v4l2++/src/videodevice.cpp653
7 files changed, 1300 insertions, 0 deletions
diff --git a/v4l2++/inc/v4l2++/helpers.h b/v4l2++/inc/v4l2++/helpers.h
new file mode 100644
index 0000000..b5c3284
--- /dev/null
+++ b/v4l2++/inc/v4l2++/helpers.h
@@ -0,0 +1,60 @@
+#pragma once
+
+#include <fmt/format.h>
+
+#define ARRAY_SIZE(arr) (sizeof(arr) / sizeof((arr)[0]))
+
+#define unlikely(x) __builtin_expect(!!(x), 0)
+
+/* __STRING(x) is a glibcism (i.e. not standard), which happens to also
+ * be available in uClibc. However, musl does not define it. Do it here.
+ */
+#ifndef __STRING
+#define __STRING(x) #x
+#endif
+
+#define ASSERT(x) \
+ if (unlikely(!(x))) { \
+ fprintf(stderr, "%s:%d: %s: ASSERT(%s) failed\n", __FILE__, __LINE__, __PRETTY_FUNCTION__, __STRING(x)); \
+ abort(); \
+ }
+
+#define FAIL(fmt, ...) \
+ do { \
+ fprintf(stderr, "%s:%d: %s:\n" fmt "\n", __FILE__, __LINE__, __PRETTY_FUNCTION__, ##__VA_ARGS__); \
+ abort(); \
+ } while (0)
+
+#define FAIL_IF(x, format, ...) \
+ if (unlikely(x)) { \
+ fprintf(stderr, "%s:%d: %s:\n" format "\n", __FILE__, __LINE__, __PRETTY_FUNCTION__, ##__VA_ARGS__); \
+ abort(); \
+ }
+
+#define EXIT(fmt, ...) \
+ do { \
+ fprintf(stderr, fmt "\n", ##__VA_ARGS__); \
+ exit(-1); \
+ } while (0)
+
+#define EXIT_IF(x, fmt, ...) \
+ if (unlikely(x)) { \
+ fprintf(stderr, fmt "\n", ##__VA_ARGS__); \
+ exit(-1); \
+ }
+
+void __my_throw(const char* file, int line, const char* funcname, const char* cond, fmt::string_view format, fmt::format_args args);
+
+template<typename S, typename... Args>
+void _my_throw(const char* file, int line, const char* funcname, const char* cond, const S& format, Args&&... args)
+{
+ __my_throw(file, line, funcname, cond, format, fmt::make_format_args(args...));
+}
+
+#define THROW(format, ...) \
+ _my_throw(__FILE__, __LINE__, __PRETTY_FUNCTION__, nullptr, format, ##__VA_ARGS__);
+
+#define THROW_IF(x, format, ...) \
+ if (unlikely(x)) { \
+ _my_throw(__FILE__, __LINE__, __PRETTY_FUNCTION__, #x, format, ##__VA_ARGS__); \
+ }
diff --git a/v4l2++/inc/v4l2++/pixelformats.h b/v4l2++/inc/v4l2++/pixelformats.h
new file mode 100644
index 0000000..609ff4f
--- /dev/null
+++ b/v4l2++/inc/v4l2++/pixelformats.h
@@ -0,0 +1,111 @@
+#pragma once
+
+#include <cstdint>
+#include <string>
+#include <stdexcept>
+
+namespace v4l2
+{
+
+constexpr uint32_t MakeFourCC(const char* fourcc)
+{
+ return fourcc[0] | (fourcc[1] << 8) | (fourcc[2] << 16) | (fourcc[3] << 24);
+}
+
+enum class PixelFormat : uint32_t {
+ Undefined = 0,
+
+ NV12 = MakeFourCC("NV12"),
+ NV21 = MakeFourCC("NV21"),
+ NV16 = MakeFourCC("NV16"),
+ NV61 = MakeFourCC("NV61"),
+
+ YUV420 = MakeFourCC("YU12"),
+ YVU420 = MakeFourCC("YV12"),
+ YUV422 = MakeFourCC("YU16"),
+ YVU422 = MakeFourCC("YV16"),
+ YUV444 = MakeFourCC("YU24"),
+ YVU444 = MakeFourCC("YV24"),
+
+ UYVY = MakeFourCC("UYVY"),
+ YUYV = MakeFourCC("YUYV"),
+ YVYU = MakeFourCC("YVYU"),
+ VYUY = MakeFourCC("VYUY"),
+
+ XRGB8888 = MakeFourCC("XR24"),
+ XBGR8888 = MakeFourCC("XB24"),
+ RGBX8888 = MakeFourCC("RX24"),
+ BGRX8888 = MakeFourCC("BX24"),
+
+ ARGB8888 = MakeFourCC("AR24"),
+ ABGR8888 = MakeFourCC("AB24"),
+ RGBA8888 = MakeFourCC("RA24"),
+ BGRA8888 = MakeFourCC("BA24"),
+
+ RGB888 = MakeFourCC("RG24"),
+ BGR888 = MakeFourCC("BG24"),
+
+ RGB332 = MakeFourCC("RGB8"),
+
+ RGB565 = MakeFourCC("RG16"),
+ BGR565 = MakeFourCC("BG16"),
+
+ XRGB4444 = MakeFourCC("XR12"),
+ XRGB1555 = MakeFourCC("XR15"),
+
+ ARGB4444 = MakeFourCC("AR12"),
+ ARGB1555 = MakeFourCC("AR15"),
+
+ XRGB2101010 = MakeFourCC("XR30"),
+ XBGR2101010 = MakeFourCC("XB30"),
+ RGBX1010102 = MakeFourCC("RX30"),
+ BGRX1010102 = MakeFourCC("BX30"),
+
+ ARGB2101010 = MakeFourCC("AR30"),
+ ABGR2101010 = MakeFourCC("AB30"),
+ RGBA1010102 = MakeFourCC("RA30"),
+ BGRA1010102 = MakeFourCC("BA30"),
+
+ SBGGR12 = MakeFourCC("BG12"),
+ SRGGB12 = MakeFourCC("RG12"),
+
+ META_8 = MakeFourCC("ME08"),
+ META_16 = MakeFourCC("ME16"),
+};
+
+static inline PixelFormat FourCCToPixelFormat(const std::string& fourcc)
+{
+ return (PixelFormat)MakeFourCC(fourcc.c_str());
+}
+
+static inline std::string PixelFormatToFourCC(PixelFormat f)
+{
+ char buf[5] = { (char)(((uint32_t)f >> 0) & 0xff),
+ (char)(((uint32_t)f >> 8) & 0xff),
+ (char)(((uint32_t)f >> 16) & 0xff),
+ (char)(((uint32_t)f >> 24) & 0xff),
+ 0 };
+ return std::string(buf);
+}
+
+enum class PixelColorType {
+ RGB,
+ YUV,
+ RAW,
+};
+
+struct PixelFormatPlaneInfo {
+ uint8_t bitspp;
+ uint8_t xsub;
+ uint8_t ysub;
+};
+
+struct PixelFormatInfo {
+ PixelColorType type;
+ uint8_t num_planes;
+ struct PixelFormatPlaneInfo planes[4];
+};
+
+const struct PixelFormatInfo& get_pixel_format_info(PixelFormat format);
+
+} // namespace kms
diff --git a/v4l2++/inc/v4l2++/videodevice.h b/v4l2++/inc/v4l2++/videodevice.h
new file mode 100644
index 0000000..bdb290e
--- /dev/null
+++ b/v4l2++/inc/v4l2++/videodevice.h
@@ -0,0 +1,127 @@
+#pragma once
+
+#include <string>
+#include <memory>
+#include <vector>
+#include <v4l2++/pixelformats.h>
+
+namespace v4l2
+{
+
+class VideoStreamer;
+class MetaStreamer;
+
+enum class VideoMemoryType
+{
+ MMAP,
+ DMABUF,
+};
+
+class VideoBuffer
+{
+public:
+ VideoMemoryType m_mem_type;
+ uint32_t m_index;
+ uint32_t m_length;
+ int m_fd;
+ uint32_t m_offset;
+ PixelFormat m_format;
+};
+
+class VideoDevice
+{
+public:
+ struct VideoFrameSize {
+ uint32_t min_w, max_w, step_w;
+ uint32_t min_h, max_h, step_h;
+ };
+
+ VideoDevice(const std::string& dev);
+ VideoDevice(int fd);
+ ~VideoDevice();
+
+ VideoDevice(const VideoDevice& other) = delete;
+ VideoDevice& operator=(const VideoDevice& other) = delete;
+
+ VideoStreamer* get_capture_streamer();
+ VideoStreamer* get_output_streamer();
+ MetaStreamer* get_meta_capture_streamer();
+
+ std::vector<std::tuple<uint32_t, uint32_t>> get_discrete_frame_sizes(PixelFormat fmt);
+ VideoFrameSize get_frame_sizes(PixelFormat fmt);
+
+ int fd() const { return m_fd; }
+ bool has_capture() const { return m_has_capture; }
+ bool has_output() const { return m_has_output; }
+ bool has_m2m() const { return m_has_m2m; }
+
+ static std::vector<std::string> get_capture_devices();
+ static std::vector<std::string> get_m2m_devices();
+
+private:
+ int m_fd;
+
+ bool m_has_capture = false;
+ bool m_has_mplane_capture = false;
+
+ bool m_has_output = false;
+ bool m_has_mplane_output = false;
+
+ bool m_has_m2m = false;
+ bool m_has_mplane_m2m = false;
+
+ bool m_has_meta_capture = false;
+
+ std::unique_ptr<VideoStreamer> m_capture_streamer;
+ std::unique_ptr<VideoStreamer> m_output_streamer;
+ std::unique_ptr<MetaStreamer> m_meta_capture_streamer;
+};
+
+class VideoStreamer
+{
+public:
+ enum class StreamerType {
+ CaptureSingle,
+ CaptureMulti,
+ OutputSingle,
+ OutputMulti,
+ CaptureMeta,
+ OutputMeta,
+ };
+
+ VideoStreamer(int fd, StreamerType type);
+ virtual ~VideoStreamer() { }
+
+ std::vector<std::string> get_ports();
+ void set_port(uint32_t index);
+
+ std::vector<PixelFormat> get_formats();
+ int get_format(PixelFormat& fmt, uint32_t& width, uint32_t& height);
+ void set_format(PixelFormat fmt, uint32_t width, uint32_t height);
+ void get_selection(uint32_t& left, uint32_t& top, uint32_t& width, uint32_t& height);
+ void set_selection(uint32_t& left, uint32_t& top, uint32_t& width, uint32_t& height);
+ void set_queue_size(uint32_t queue_size, VideoMemoryType mem_type);
+ void queue(VideoBuffer& fb);
+ VideoBuffer dequeue();
+ void stream_on();
+ void stream_off();
+
+ int fd() const { return m_fd; }
+
+protected:
+ int m_fd;
+ StreamerType m_type;
+ VideoMemoryType m_mem_type;
+ std::vector<bool> m_fbs;
+};
+
+
+class MetaStreamer : public VideoStreamer
+{
+public:
+ MetaStreamer(int fd, VideoStreamer::StreamerType type);
+
+ void set_format(PixelFormat fmt, uint32_t size);
+};
+
+}
diff --git a/v4l2++/meson.build b/v4l2++/meson.build
new file mode 100644
index 0000000..2f425d6
--- /dev/null
+++ b/v4l2++/meson.build
@@ -0,0 +1,32 @@
+libv4l2xx_sources = files([
+ 'src/videodevice.cpp',
+ 'src/pixelformats.cpp',
+ 'src/helpers.cpp',
+])
+
+public_headers = [
+ 'inc/v4l2++/videodevice.h',
+ 'inc/v4l2++/pixelformats.h',
+ 'inc/v4l2++/helpers.h',
+]
+
+private_includes = include_directories('src', 'inc')
+public_includes = include_directories('inc')
+
+libv4l2xx_deps = [ libfmt_dep ]
+
+libv4l2xx = library('v4l2++',
+ libv4l2xx_sources,
+ install : true,
+ include_directories : [ private_includes ],
+ dependencies : libv4l2xx_deps,
+ version : meson.project_version())
+
+
+libv4l2xx_dep = declare_dependency(include_directories : public_includes,
+ link_with : libv4l2xx)
+
+install_headers(public_headers, subdir : 'v4l2++')
+
+pkg = import('pkgconfig')
+pkg.generate(libv4l2xx)
diff --git a/v4l2++/src/helpers.cpp b/v4l2++/src/helpers.cpp
new file mode 100644
index 0000000..db80408
--- /dev/null
+++ b/v4l2++/src/helpers.cpp
@@ -0,0 +1,16 @@
+#include <v4l2++/helpers.h>
+
+void __my_throw(const char* file, int line, const char *funcname, const char *cond, fmt::string_view format, fmt::format_args args)
+{
+ std::string str = fmt::vformat(format, args);
+
+ fmt::print(stderr, "{}:{}: {}:\n{}", file, line, funcname, str);
+ if (cond)
+ fmt::print(stderr, " ({})\n", cond);
+ else
+ fmt::print("\n");
+
+ fflush(stderr);
+
+ throw std::runtime_error(str);
+}
diff --git a/v4l2++/src/pixelformats.cpp b/v4l2++/src/pixelformats.cpp
new file mode 100644
index 0000000..1c8453f
--- /dev/null
+++ b/v4l2++/src/pixelformats.cpp
@@ -0,0 +1,301 @@
+#include <map>
+
+#include <v4l2++/pixelformats.h>
+
+using namespace std;
+
+namespace v4l2
+{
+static const map<PixelFormat, PixelFormatInfo> format_info_array = {
+ /* YUV packed */
+ { PixelFormat::UYVY, {
+ PixelColorType::YUV,
+ 1,
+ { { 16, 2, 1 } },
+ } },
+ { PixelFormat::YUYV, {
+ PixelColorType::YUV,
+ 1,
+ { { 16, 2, 1 } },
+ } },
+ { PixelFormat::YVYU, {
+ PixelColorType::YUV,
+ 1,
+ { { 16, 2, 1 } },
+ } },
+ { PixelFormat::VYUY, {
+ PixelColorType::YUV,
+ 1,
+ { { 16, 2, 1 } },
+ } },
+ /* YUV semi-planar */
+ { PixelFormat::NV12, {
+ PixelColorType::YUV,
+ 2,
+ { {
+ 8,
+ 1,
+ 1,
+ },
+ { 8, 2, 2 } },
+ } },
+ { PixelFormat::NV21, {
+ PixelColorType::YUV,
+ 2,
+ { {
+ 8,
+ 1,
+ 1,
+ },
+ { 8, 2, 2 } },
+ } },
+ { PixelFormat::NV16, {
+ PixelColorType::YUV,
+ 2,
+ { {
+ 8,
+ 1,
+ 1,
+ },
+ { 8, 2, 1 } },
+ } },
+ { PixelFormat::NV61, {
+ PixelColorType::YUV,
+ 2,
+ { {
+ 8,
+ 1,
+ 1,
+ },
+ { 8, 2, 1 } },
+ } },
+ /* YUV planar */
+ { PixelFormat::YUV420, {
+ PixelColorType::YUV,
+ 3,
+ { {
+ 8,
+ 1,
+ 1,
+ },
+ { 8, 2, 2 },
+ { 8, 2, 2 } },
+ } },
+ { PixelFormat::YVU420, {
+ PixelColorType::YUV,
+ 3,
+ { {
+ 8,
+ 1,
+ 1,
+ },
+ { 8, 2, 2 },
+ { 8, 2, 2 } },
+ } },
+ { PixelFormat::YUV422, {
+ PixelColorType::YUV,
+ 3,
+ { {
+ 8,
+ 1,
+ 1,
+ },
+ { 8, 2, 1 },
+ { 8, 2, 1 } },
+ } },
+ { PixelFormat::YVU422, {
+ PixelColorType::YUV,
+ 3,
+ { {
+ 8,
+ 1,
+ 1,
+ },
+ { 8, 2, 1 },
+ { 8, 2, 1 } },
+ } },
+ { PixelFormat::YUV444, {
+ PixelColorType::YUV,
+ 3,
+ { {
+ 8,
+ 1,
+ 1,
+ },
+ { 8, 1, 1 },
+ { 8, 1, 1 } },
+ } },
+ { PixelFormat::YVU444, {
+ PixelColorType::YUV,
+ 3,
+ { {
+ 8,
+ 1,
+ 1,
+ },
+ { 8, 1, 1 },
+ { 8, 1, 1 } },
+ } },
+ /* RGB8 */
+ { PixelFormat::RGB332, {
+ PixelColorType::RGB,
+ 1,
+ { { 8, 1, 1 } },
+ } },
+ /* RGB16 */
+ { PixelFormat::RGB565, {
+ PixelColorType::RGB,
+ 1,
+ { { 16, 1, 1 } },
+ } },
+ { PixelFormat::BGR565, {
+ PixelColorType::RGB,
+ 1,
+ { { 16, 1, 1 } },
+ } },
+ { PixelFormat::XRGB4444, {
+ PixelColorType::RGB,
+ 1,
+ { { 16, 1, 1 } },
+ } },
+ { PixelFormat::XRGB1555, {
+ PixelColorType::RGB,
+ 1,
+ { { 16, 1, 1 } },
+ } },
+ { PixelFormat::ARGB4444, {
+ PixelColorType::RGB,
+ 1,
+ { { 16, 1, 1 } },
+ } },
+ { PixelFormat::ARGB1555, {
+ PixelColorType::RGB,
+ 1,
+ { { 16, 1, 1 } },
+ } },
+ /* RGB24 */
+ { PixelFormat::RGB888, {
+ PixelColorType::RGB,
+ 1,
+ { { 24, 1, 1 } },
+ } },
+ { PixelFormat::BGR888, {
+ PixelColorType::RGB,
+ 1,
+ { { 24, 1, 1 } },
+ } },
+ /* RGB32 */
+ { PixelFormat::XRGB8888, {
+ PixelColorType::RGB,
+ 1,
+ { { 32, 1, 1 } },
+ } },
+ { PixelFormat::XBGR8888, {
+ PixelColorType::RGB,
+ 1,
+ { { 32, 1, 1 } },
+ } },
+ { PixelFormat::RGBX8888, {
+ PixelColorType::RGB,
+ 1,
+ { { 32, 1, 1 } },
+ } },
+ { PixelFormat::BGRX8888, {
+ PixelColorType::RGB,
+ 1,
+ { { 32, 1, 1 } },
+ } },
+
+ { PixelFormat::ARGB8888, {
+ PixelColorType::RGB,
+ 1,
+ { { 32, 1, 1 } },
+ } },
+ { PixelFormat::ABGR8888, {
+ PixelColorType::RGB,
+ 1,
+ { { 32, 1, 1 } },
+ } },
+ { PixelFormat::RGBA8888, {
+ PixelColorType::RGB,
+ 1,
+ { { 32, 1, 1 } },
+ } },
+ { PixelFormat::BGRA8888, {
+ PixelColorType::RGB,
+ 1,
+ { { 32, 1, 1 } },
+ } },
+
+ { PixelFormat::XRGB2101010, {
+ PixelColorType::RGB,
+ 1,
+ { { 32, 1, 1 } },
+ } },
+ { PixelFormat::XBGR2101010, {
+ PixelColorType::RGB,
+ 1,
+ { { 32, 1, 1 } },
+ } },
+ { PixelFormat::RGBX1010102, {
+ PixelColorType::RGB,
+ 1,
+ { { 32, 1, 1 } },
+ } },
+ { PixelFormat::BGRX1010102, {
+ PixelColorType::RGB,
+ 1,
+ { { 32, 1, 1 } },
+ } },
+
+ { PixelFormat::ARGB2101010, {
+ PixelColorType::RGB,
+ 1,
+ { { 32, 1, 1 } },
+ } },
+ { PixelFormat::ABGR2101010, {
+ PixelColorType::RGB,
+ 1,
+ { { 32, 1, 1 } },
+ } },
+ { PixelFormat::RGBA1010102, {
+ PixelColorType::RGB,
+ 1,
+ { { 32, 1, 1 } },
+ } },
+ { PixelFormat::BGRA1010102, {
+ PixelColorType::RGB,
+ 1,
+ { { 32, 1, 1 } },
+ } },
+ { PixelFormat::SBGGR12, {
+ PixelColorType::RAW,
+ 1,
+ { { 16, 1, 1 } },
+ } },
+ { PixelFormat::SRGGB12, {
+ PixelColorType::RAW,
+ 1,
+ { { 16, 1, 1 } },
+ } },
+ { PixelFormat::META_8, {
+ PixelColorType::RGB,
+ 1,
+ { { 8, 1, 1 } },
+ } },
+ { PixelFormat::META_16, {
+ PixelColorType::RGB,
+ 1,
+ { { 16, 1, 1 } },
+ } },
+};
+
+const struct PixelFormatInfo& get_pixel_format_info(PixelFormat format)
+{
+ if (!format_info_array.count(format))
+ throw invalid_argument("get_pixel_format_info: Unsupported pixelformat");
+
+ return format_info_array.at(format);
+}
+
+}
diff --git a/v4l2++/src/videodevice.cpp b/v4l2++/src/videodevice.cpp
new file mode 100644
index 0000000..5ab7099
--- /dev/null
+++ b/v4l2++/src/videodevice.cpp
@@ -0,0 +1,653 @@
+#include <string>
+
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <linux/videodev2.h>
+#include <sys/ioctl.h>
+#include <unistd.h>
+#include <system_error>
+
+#include <v4l2++/videodevice.h>
+#include <v4l2++/helpers.h>
+
+using namespace std;
+using namespace v4l2;
+
+/*
+ * V4L2 and DRM differ in their interpretation of YUV420::NV12
+ *
+ * V4L2 NV12 is a Y and UV co-located planes in a single plane buffer.
+ * DRM NV12 is a Y and UV planes presented as dual plane buffer,
+ * which is known as NM12 in V4L2.
+ *
+ * Since here we have hybrid DRM/V4L2 user space helper functions
+ * we need to translate DRM::NV12 to V4L2:NM12 pixel format back
+ * and forth to keep the data view consistent.
+ */
+
+static v4l2_memory get_mem_type(VideoMemoryType type)
+{
+ switch (type) {
+ case VideoMemoryType::MMAP:
+ return V4L2_MEMORY_MMAP;
+ case VideoMemoryType::DMABUF:
+ return V4L2_MEMORY_DMABUF;
+ default:
+ FAIL("Bad VideoMemoryType");
+ }
+}
+
+/* V4L2 helper funcs */
+static vector<PixelFormat> v4l2_get_formats(int fd, uint32_t buf_type)
+{
+ vector<PixelFormat> v;
+
+ v4l2_fmtdesc desc{};
+ desc.type = buf_type;
+
+ while (ioctl(fd, VIDIOC_ENUM_FMT, &desc) == 0) {
+ if (desc.pixelformat == V4L2_PIX_FMT_NV12M)
+ v.push_back(PixelFormat::NV12);
+ else if (desc.pixelformat != V4L2_PIX_FMT_NV12)
+ v.push_back((PixelFormat)desc.pixelformat);
+
+ desc.index++;
+ }
+
+ return v;
+}
+
+static int v4l2_get_format(int fd, uint32_t buf_type, PixelFormat& fmt, uint32_t& width, uint32_t& height)
+{
+ int r;
+
+ v4l2_format v4lfmt{};
+
+ v4lfmt.type = buf_type;
+ r = ioctl(fd, VIDIOC_G_FMT, &v4lfmt);
+ ASSERT(r == 0);
+
+ bool mplane = buf_type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE || buf_type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
+
+ FAIL_IF(mplane, "mplane not supported");
+
+ fmt = (PixelFormat)v4lfmt.fmt.pix.pixelformat;
+ width = v4lfmt.fmt.pix.width;
+ height = v4lfmt.fmt.pix.height;
+
+ return 0;
+}
+
+static void v4l2_set_format(int fd, PixelFormat fmt, uint32_t width, uint32_t height, uint32_t buf_type)
+{
+ int r;
+
+ v4l2_format v4lfmt{};
+
+ v4lfmt.type = buf_type;
+ r = ioctl(fd, VIDIOC_G_FMT, &v4lfmt);
+ ASSERT(r == 0);
+
+ const PixelFormatInfo& pfi = get_pixel_format_info(fmt);
+
+ bool mplane = buf_type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE || buf_type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
+
+ if (mplane) {
+ v4l2_pix_format_mplane& mp = v4lfmt.fmt.pix_mp;
+ uint32_t used_fmt;
+
+ if (fmt == PixelFormat::NV12)
+ used_fmt = V4L2_PIX_FMT_NV12M;
+ else
+ used_fmt = (uint32_t)fmt;
+
+ mp.pixelformat = used_fmt;
+ mp.width = width;
+ mp.height = height;
+
+ mp.num_planes = pfi.num_planes;
+
+ for (unsigned i = 0; i < pfi.num_planes; ++i) {
+ const PixelFormatPlaneInfo& pfpi = pfi.planes[i];
+ v4l2_plane_pix_format& p = mp.plane_fmt[i];
+
+ p.bytesperline = width * pfpi.bitspp / 8;
+ p.sizeimage = p.bytesperline * height / pfpi.ysub;
+ }
+
+ r = ioctl(fd, VIDIOC_S_FMT, &v4lfmt);
+ ASSERT(r == 0);
+
+ ASSERT(mp.pixelformat == used_fmt);
+ ASSERT(mp.width == width);
+ ASSERT(mp.height == height);
+
+ ASSERT(mp.num_planes == pfi.num_planes);
+
+ for (unsigned i = 0; i < pfi.num_planes; ++i) {
+ const PixelFormatPlaneInfo& pfpi = pfi.planes[i];
+ v4l2_plane_pix_format& p = mp.plane_fmt[i];
+
+ ASSERT(p.bytesperline == width * pfpi.bitspp / 8);
+ ASSERT(p.sizeimage == p.bytesperline * height / pfpi.ysub);
+ }
+ } else {
+ ASSERT(pfi.num_planes == 1);
+
+ v4lfmt.fmt.pix.pixelformat = (uint32_t)fmt;
+ v4lfmt.fmt.pix.width = width;
+ v4lfmt.fmt.pix.height = height;
+ v4lfmt.fmt.pix.bytesperline = width * pfi.planes[0].bitspp / 8;
+ v4lfmt.fmt.pix.field = V4L2_FIELD_NONE;
+
+ r = ioctl(fd, VIDIOC_S_FMT, &v4lfmt);
+ ASSERT(r == 0);
+
+ ASSERT(v4lfmt.fmt.pix.pixelformat == (uint32_t)fmt);
+ ASSERT(v4lfmt.fmt.pix.width == width);
+ ASSERT(v4lfmt.fmt.pix.height == height);
+ ASSERT(v4lfmt.fmt.pix.bytesperline == width * pfi.planes[0].bitspp / 8);
+ }
+}
+
+static void v4l2_get_selection(int fd, uint32_t& left, uint32_t& top, uint32_t& width, uint32_t& height, uint32_t buf_type)
+{
+ int r;
+ struct v4l2_selection selection;
+
+ if (buf_type == V4L2_BUF_TYPE_VIDEO_OUTPUT ||
+ buf_type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) {
+ selection.type = V4L2_BUF_TYPE_VIDEO_OUTPUT;
+ selection.target = V4L2_SEL_TGT_CROP;
+ } else if (buf_type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
+ buf_type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
+ selection.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ selection.target = V4L2_SEL_TGT_COMPOSE;
+ } else {
+ FAIL("buf_type (%d) is not valid\n", buf_type);
+ }
+
+ r = ioctl(fd, VIDIOC_G_SELECTION, &selection);
+ ASSERT(r == 0);
+
+ left = selection.r.left;
+ top = selection.r.top;
+ width = selection.r.width;
+ height = selection.r.height;
+}
+
+static void v4l2_set_selection(int fd, uint32_t& left, uint32_t& top, uint32_t& width, uint32_t& height, uint32_t buf_type)
+{
+ int r;
+ struct v4l2_selection selection;
+
+ if (buf_type == V4L2_BUF_TYPE_VIDEO_OUTPUT ||
+ buf_type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) {
+ selection.type = V4L2_BUF_TYPE_VIDEO_OUTPUT;
+ selection.target = V4L2_SEL_TGT_CROP;
+ } else if (buf_type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
+ buf_type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
+ selection.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ selection.target = V4L2_SEL_TGT_COMPOSE;
+ } else {
+ FAIL("buf_type (%d) is not valid\n", buf_type);
+ }
+
+ selection.r.left = left;
+ selection.r.top = top;
+ selection.r.width = width;
+ selection.r.height = height;
+
+ r = ioctl(fd, VIDIOC_S_SELECTION, &selection);
+ ASSERT(r == 0);
+
+ left = selection.r.left;
+ top = selection.r.top;
+ width = selection.r.width;
+ height = selection.r.height;
+}
+
+static void v4l2_request_bufs(int fd, uint32_t queue_size, uint32_t buf_type, uint32_t mem_type)
+{
+ v4l2_requestbuffers v4lreqbuf{};
+ v4lreqbuf.type = buf_type;
+ v4lreqbuf.memory = mem_type;
+ v4lreqbuf.count = queue_size;
+ int r = ioctl(fd, VIDIOC_REQBUFS, &v4lreqbuf);
+ FAIL_IF(r != 0, "VIDIOC_REQBUFS failed: %d", errno);
+ ASSERT(v4lreqbuf.count == queue_size);
+}
+
+static void v4l2_queue(int fd, VideoBuffer& fb, uint32_t buf_type)
+{
+ v4l2_buffer buf{};
+ buf.type = buf_type;
+ buf.memory = get_mem_type(fb.m_mem_type);
+ buf.index = fb.m_index;
+
+ bool mplane = buf_type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE || buf_type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
+
+ if (mplane) {
+ ASSERT(false);
+ /*
+ const PixelFormatInfo& pfi = get_pixel_format_info(fb->m_format);
+
+ buf.length = pfi.num_planes;
+
+ v4l2_plane planes[4]{};
+ buf.m.planes = planes;
+
+ for (unsigned i = 0; i < pfi.num_planes; ++i) {
+ planes[i].m.fd = fb->prime_fd(i);
+ planes[i].bytesused = fb->size(i);
+ planes[i].length = fb->size(i);
+ }
+
+ int r = ioctl(fd, VIDIOC_QBUF, &buf);
+ ASSERT(r == 0);
+ */
+ } else {
+ if (fb.m_mem_type == VideoMemoryType::DMABUF)
+ buf.m.fd = fb.m_fd;
+
+ int r = ioctl(fd, VIDIOC_QBUF, &buf);
+ ASSERT(r == 0);
+ }
+}
+
+static uint32_t v4l2_dequeue(int fd, VideoBuffer& fb, uint32_t buf_type)
+{
+ v4l2_buffer buf{};
+ buf.type = buf_type;
+ buf.memory = get_mem_type(fb.m_mem_type);
+
+ // V4L2 crashes if planes are not set
+ v4l2_plane planes[4]{};
+ buf.m.planes = planes;
+ buf.length = 4;
+
+ int r = ioctl(fd, VIDIOC_DQBUF, &buf);
+ if (r)
+ throw system_error(errno, generic_category());
+
+ fb.m_index = buf.index;
+ fb.m_length = buf.length;
+
+ if (fb.m_mem_type == VideoMemoryType::DMABUF)
+ fb.m_fd = buf.m.fd;
+ else
+ fb.m_offset = buf.m.offset;
+
+ return buf.index;
+}
+
+VideoDevice::VideoDevice(const string& dev)
+ : VideoDevice(::open(dev.c_str(), O_RDWR | O_NONBLOCK))
+{
+}
+
+VideoDevice::VideoDevice(int fd)
+ : m_fd(fd)
+{
+ if (fd < 0)
+ throw runtime_error("bad fd");
+
+ struct v4l2_capability cap = {};
+ int r = ioctl(fd, VIDIOC_QUERYCAP, &cap);
+ ASSERT(r == 0);
+
+ if (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE) {
+ m_has_capture = true;
+ m_has_mplane_capture = true;
+ } else if (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) {
+ m_has_capture = true;
+ m_has_mplane_capture = false;
+ }
+
+ if (cap.capabilities & V4L2_CAP_VIDEO_OUTPUT_MPLANE) {
+ m_has_output = true;
+ m_has_mplane_output = true;
+ } else if (cap.capabilities & V4L2_CAP_VIDEO_OUTPUT) {
+ m_has_output = true;
+ m_has_mplane_output = false;
+ }
+
+ if (cap.capabilities & V4L2_CAP_VIDEO_M2M_MPLANE) {
+ m_has_m2m = true;
+ m_has_capture = true;
+ m_has_output = true;
+ m_has_mplane_m2m = true;
+ m_has_mplane_capture = true;
+ m_has_mplane_output = true;
+ } else if (cap.capabilities & V4L2_CAP_VIDEO_M2M) {
+ m_has_m2m = true;
+ m_has_capture = true;
+ m_has_output = true;
+ m_has_mplane_m2m = false;
+ m_has_mplane_capture = false;
+ m_has_mplane_output = false;
+ }
+
+ if (cap.capabilities & V4L2_CAP_META_CAPTURE) {
+ m_has_meta_capture = true;
+ }
+}
+
+VideoDevice::~VideoDevice()
+{
+ ::close(m_fd);
+}
+
+VideoStreamer* VideoDevice::get_capture_streamer()
+{
+ ASSERT(m_has_capture);
+
+ if (!m_capture_streamer) {
+ auto type = m_has_mplane_capture ? VideoStreamer::StreamerType::CaptureMulti : VideoStreamer::StreamerType::CaptureSingle;
+ m_capture_streamer = std::unique_ptr<VideoStreamer>(new VideoStreamer(m_fd, type));
+ }
+
+ return m_capture_streamer.get();
+}
+
+VideoStreamer* VideoDevice::get_output_streamer()
+{
+ ASSERT(m_has_output);
+
+ if (!m_output_streamer) {
+ auto type = m_has_mplane_output ? VideoStreamer::StreamerType::OutputMulti : VideoStreamer::StreamerType::OutputSingle;
+ m_output_streamer = std::unique_ptr<VideoStreamer>(new VideoStreamer(m_fd, type));
+ }
+
+ return m_output_streamer.get();
+}
+
+MetaStreamer* VideoDevice::get_meta_capture_streamer()
+{
+ ASSERT(m_has_meta_capture);
+
+ if (!m_meta_capture_streamer)
+ m_meta_capture_streamer = make_unique<MetaStreamer>(m_fd, MetaStreamer::StreamerType::CaptureMeta);
+
+ return m_meta_capture_streamer.get();
+}
+
+vector<tuple<uint32_t, uint32_t>> VideoDevice::get_discrete_frame_sizes(PixelFormat fmt)
+{
+ vector<tuple<uint32_t, uint32_t>> v;
+
+ v4l2_frmsizeenum v4lfrms{};
+ v4lfrms.pixel_format = (uint32_t)fmt;
+
+ int r = ioctl(m_fd, VIDIOC_ENUM_FRAMESIZES, &v4lfrms);
+ ASSERT(r);
+
+ FAIL_IF(v4lfrms.type != V4L2_FRMSIZE_TYPE_DISCRETE, "No discrete frame sizes");
+
+ while (ioctl(m_fd, VIDIOC_ENUM_FRAMESIZES, &v4lfrms) == 0) {
+ v.emplace_back(v4lfrms.discrete.width, v4lfrms.discrete.height);
+ v4lfrms.index++;
+ };
+
+ return v;
+}
+
+VideoDevice::VideoFrameSize VideoDevice::get_frame_sizes(PixelFormat fmt)
+{
+ v4l2_frmsizeenum v4lfrms{};
+ v4lfrms.pixel_format = (uint32_t)fmt;
+
+ int r = ioctl(m_fd, VIDIOC_ENUM_FRAMESIZES, &v4lfrms);
+ ASSERT(r);
+
+ FAIL_IF(v4lfrms.type == V4L2_FRMSIZE_TYPE_DISCRETE, "No continuous frame sizes");
+
+ VideoFrameSize s;
+
+ s.min_w = v4lfrms.stepwise.min_width;
+ s.max_w = v4lfrms.stepwise.max_width;
+ s.step_w = v4lfrms.stepwise.step_width;
+
+ s.min_h = v4lfrms.stepwise.min_height;
+ s.max_h = v4lfrms.stepwise.max_height;
+ s.step_h = v4lfrms.stepwise.step_height;
+
+ return s;
+}
+
+vector<string> VideoDevice::get_capture_devices()
+{
+ vector<string> v;
+
+ for (int i = 0; i < 20; ++i) {
+ string name = "/dev/video" + to_string(i);
+
+ struct stat buffer;
+ if (stat(name.c_str(), &buffer) != 0)
+ continue;
+
+ try {
+ VideoDevice vid(name);
+
+ if (vid.has_capture() && !vid.has_m2m())
+ v.push_back(name);
+ } catch (...) {
+ }
+ }
+
+ return v;
+}
+
+vector<string> VideoDevice::get_m2m_devices()
+{
+ vector<string> v;
+
+ for (int i = 0; i < 20; ++i) {
+ string name = "/dev/video" + to_string(i);
+
+ struct stat buffer;
+ if (stat(name.c_str(), &buffer) != 0)
+ continue;
+
+ try {
+ VideoDevice vid(name);
+
+ if (vid.has_m2m())
+ v.push_back(name);
+ } catch (...) {
+ }
+ }
+
+ return v;
+}
+
+VideoStreamer::VideoStreamer(int fd, StreamerType type)
+ : m_fd(fd), m_type(type)
+{
+}
+
+std::vector<string> VideoStreamer::get_ports()
+{
+ vector<string> v;
+
+ switch (m_type) {
+ case StreamerType::CaptureSingle:
+ case StreamerType::CaptureMulti: {
+ struct v4l2_input input {
+ };
+
+ while (ioctl(m_fd, VIDIOC_ENUMINPUT, &input) == 0) {
+ v.push_back(string((char*)&input.name));
+ input.index++;
+ }
+
+ break;
+ }
+
+ case StreamerType::OutputSingle:
+ case StreamerType::OutputMulti: {
+ struct v4l2_output output {
+ };
+
+ while (ioctl(m_fd, VIDIOC_ENUMOUTPUT, &output) == 0) {
+ v.push_back(string((char*)&output.name));
+ output.index++;
+ }
+
+ break;
+ }
+
+ default:
+ FAIL("Bad StreamerType");
+ }
+
+ return v;
+}
+
+void VideoStreamer::set_port(uint32_t index)
+{
+ unsigned long req;
+
+ switch (m_type) {
+ case StreamerType::CaptureSingle:
+ case StreamerType::CaptureMulti:
+ req = VIDIOC_S_INPUT;
+ break;
+
+ case StreamerType::OutputSingle:
+ case StreamerType::OutputMulti:
+ req = VIDIOC_S_OUTPUT;
+ break;
+
+ default:
+ FAIL("Bad StreamerType");
+ }
+
+ int r = ioctl(m_fd, req, &index);
+ ASSERT(r == 0);
+}
+
+static v4l2_buf_type get_buf_type(VideoStreamer::StreamerType type)
+{
+ switch (type) {
+ case VideoStreamer::StreamerType::CaptureSingle:
+ return V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ case VideoStreamer::StreamerType::CaptureMulti:
+ return V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ case VideoStreamer::StreamerType::OutputSingle:
+ return V4L2_BUF_TYPE_VIDEO_OUTPUT;
+ case VideoStreamer::StreamerType::OutputMulti:
+ return V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
+ case MetaStreamer::StreamerType::CaptureMeta:
+ return V4L2_BUF_TYPE_META_CAPTURE;
+ case MetaStreamer::StreamerType::OutputMeta:
+ return (v4l2_buf_type)14; // XXX V4L2_BUF_TYPE_META_OUTPUT;
+ default:
+ FAIL("Bad StreamerType");
+ }
+}
+
+std::vector<PixelFormat> VideoStreamer::get_formats()
+{
+ return v4l2_get_formats(m_fd, get_buf_type(m_type));
+}
+
+int VideoStreamer::get_format(PixelFormat &fmt, uint32_t &width, uint32_t &height)
+{
+ return v4l2_get_format(m_fd, get_buf_type(m_type), fmt, width, height);
+}
+
+void VideoStreamer::set_format(PixelFormat fmt, uint32_t width, uint32_t height)
+{
+ v4l2_set_format(m_fd, fmt, width, height, get_buf_type(m_type));
+}
+
+void VideoStreamer::get_selection(uint32_t& left, uint32_t& top, uint32_t& width, uint32_t& height)
+{
+ v4l2_get_selection(m_fd, left, top, width, height, get_buf_type(m_type));
+}
+
+void VideoStreamer::set_selection(uint32_t& left, uint32_t& top, uint32_t& width, uint32_t& height)
+{
+ v4l2_set_selection(m_fd, left, top, width, height, get_buf_type(m_type));
+}
+
+void VideoStreamer::set_queue_size(uint32_t queue_size, VideoMemoryType mem_type)
+{
+ m_mem_type = mem_type;
+
+ v4l2_request_bufs(m_fd, queue_size, get_buf_type(m_type), get_mem_type(m_mem_type));
+
+ m_fbs.resize(queue_size);
+}
+
+void VideoStreamer::queue(VideoBuffer &fb)
+{
+ uint32_t idx;
+
+ for (idx = 0; idx < m_fbs.size(); ++idx) {
+ if (m_fbs[idx] == false)
+ break;
+ }
+
+ FAIL_IF(idx == m_fbs.size(), "queue full");
+
+ fb.m_index = idx;
+
+ m_fbs[idx] = true;
+
+ v4l2_queue(m_fd, fb, get_buf_type(m_type));
+}
+
+VideoBuffer VideoStreamer::dequeue()
+{
+ VideoBuffer fb {};
+ fb.m_mem_type = m_mem_type;
+
+ uint32_t idx = v4l2_dequeue(m_fd, fb, get_buf_type(m_type));
+
+ m_fbs[idx] = false;
+
+ return fb;
+}
+
+void VideoStreamer::stream_on()
+{
+ uint32_t buf_type = get_buf_type(m_type);
+ int r = ioctl(m_fd, VIDIOC_STREAMON, &buf_type);
+ FAIL_IF(r, "Failed to enable stream: %d", r);
+}
+
+void VideoStreamer::stream_off()
+{
+ uint32_t buf_type = get_buf_type(m_type);
+ int r = ioctl(m_fd, VIDIOC_STREAMOFF, &buf_type);
+ FAIL_IF(r, "Failed to disable stream: %d", r);
+}
+
+
+
+
+
+MetaStreamer::MetaStreamer(int fd, StreamerType type)
+ : VideoStreamer(fd, type)
+{
+}
+
+void MetaStreamer::set_format(PixelFormat fmt, uint32_t size)
+{
+ int r;
+
+ v4l2_format v4lfmt {};
+
+ v4lfmt.type = get_buf_type(m_type);
+ //r = ioctl(m_fd, VIDIOC_G_FMT, &v4lfmt);
+ //ASSERT(r == 0);
+
+ v4lfmt.fmt.meta.dataformat = (uint32_t)fmt;
+ v4lfmt.fmt.meta.buffersize = size;
+
+ r = ioctl(m_fd, VIDIOC_S_FMT, &v4lfmt);
+ ASSERT(r == 0);
+}