first commit

This commit is contained in:
Your Name
2026-02-07 20:22:48 +08:00
commit 1b9711d5e4
2270 changed files with 805872 additions and 0 deletions

View File

@@ -0,0 +1,23 @@
CXX := g++
CXXFLAGS := -Wall -O2 `pkg-config --cflags opencv4`
LDFLAGS := `pkg-config --libs opencv4`
SRC := v4l2_camera.cpp test_camera.cpp
TARGET := cam_test
all: $(TARGET)
$(TARGET): $(SRC)
$(CXX) $(CXXFLAGS) -o $@ $^ $(LDFLAGS)
PYTHON_MODULE := v4l2cam$(shell python3-config --extension-suffix)
PYBIND11_FLAGS := `python3 -m pybind11 --includes`
PYTHON_LIBS := `python3-config --ldflags`
python_module: v4l2_camera_bind.cpp v4l2_camera.cpp
$(CXX) $(CXXFLAGS) -O2 -Wall -shared -fPIC $(PYBIND11_FLAGS) -o $(PYTHON_MODULE) $^ $(LDFLAGS) $(PYTHON_LIBS)
.PHONY: all clean python_module
clean:
rm -f $(TARGET) $(PYTHON_MODULE)

View File

@@ -0,0 +1,52 @@
#include "v4l2_camera.h"
#include <opencv2/opencv.hpp>
#include <chrono>
#include <iostream>
int main(int argc, char* argv[])
{
const char* device = "/dev/video8";
if (argc > 1)
device = argv[1];
std::string device_str = device;
V4L2Camera cam(device_str, 640, 480);
if (!cam.init() || !cam.start()) {
std::cerr << "Failed to initialize camera" << std::endl;
return 1;
}
cv::namedWindow("Preview", cv::WINDOW_AUTOSIZE);
int frame_count = 0;
auto start_time = std::chrono::steady_clock::now();
double fps = 0.0;
while (true) {
cv::Mat frame;
if (!cam.getFrame(frame))
break;
frame_count++;
auto now = std::chrono::steady_clock::now();
double elapsed = std::chrono::duration_cast<std::chrono::milliseconds>(now - start_time).count() / 1000.0;
if (elapsed >= 1.0) {
fps = frame_count / elapsed;
start_time = now;
frame_count = 0;
}
char fps_text[32];
snprintf(fps_text, sizeof(fps_text), "FPS: %.1f", fps);
cv::putText(frame, fps_text, cv::Point(10, 30),
cv::FONT_HERSHEY_SIMPLEX, 0.8, cv::Scalar(0, 255, 0), 2);
cv::imshow("Preview", frame);
if (cv::waitKey(1) == 27)
break;
}
return 0;
}

View File

@@ -0,0 +1,43 @@
import cv2
import v4l2cam
import time
cam = v4l2cam.V4L2Camera("/dev/video8", 640, 480)
if not cam.init():
print("init failed")
if not cam.start():
print("start failed")
frame_count = 0
start_time = time.time()
fps = 0.0
while True:
try:
frame = cam.get_frame()
frame_count += 1
now = time.time()
elapsed = now - start_time
if elapsed >= 1.0:
fps = frame_count / elapsed
frame_count = 0
start_time = now
fps_text = f"FPS: {fps:.1f}"
cv2.putText(frame, fps_text, (10, 30),
cv2.FONT_HERSHEY_SIMPLEX, 0.8, (0, 255, 0), 2)
cv2.imshow("Preview", frame)
except Exception as e:
print("Error:", e)
break
if cv2.waitKey(1) & 0xFF == 27:
break
cam.stop()
cv2.destroyAllWindows()

View File

@@ -0,0 +1,267 @@
/* SPDX-License-Identifier: GPL-2.0-or-later */
/*
* V4L2Camera 实现文件
*/
#include "v4l2_camera.h"
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <unistd.h>
#include <errno.h>
#include <fcntl.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#define V4L2_MODE_VIDEO 0x0002
#define CLEAR(x) (memset(&(x), 0, sizeof(x)))
V4L2Camera::V4L2Camera(const std::string& device, int width, int height)
: device_(device), width_(width), height_(height), fd_(-1),
streaming_(false), initialized_(false), nplanes_(3) {
}
V4L2Camera::~V4L2Camera() {
stop();
if (initialized_) {
releaseBuffers();
if (fd_ >= 0) {
close(fd_);
}
}
}
bool V4L2Camera::init() {
if (initialized_) {
return true;
}
if (!initDevice() || !setFormat() || !requestBuffers() || !queueAllBuffers()) {
return false;
}
initialized_ = true;
return true;
}
bool V4L2Camera::start() {
if (!initialized_) {
return false;
}
if (streaming_) {
return true;
}
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
if (ioctl(fd_, VIDIOC_STREAMON, &type) == -1) {
return false;
}
streaming_ = true;
return true;
}
bool V4L2Camera::stop() {
if (!streaming_) {
return true;
}
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
if (ioctl(fd_, VIDIOC_STREAMOFF, &type) == -1) {
return false;
}
streaming_ = false;
return true;
}
bool V4L2Camera::getFrame(cv::Mat& frame) {
if (!streaming_) {
return false;
}
// 等待帧数据
fd_set fds;
struct timeval tv = {1, 0};
FD_ZERO(&fds);
FD_SET(fd_, &fds);
int ret = select(fd_ + 1, &fds, NULL, NULL, &tv);
if (ret <= 0) {
return false;
}
// 出队缓冲区
struct v4l2_buffer buf;
struct v4l2_plane planes[3];
CLEAR(buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
buf.memory = V4L2_MEMORY_MMAP;
buf.length = nplanes_;
buf.m.planes = planes;
if (ioctl(fd_, VIDIOC_DQBUF, &buf) == -1) {
return false;
}
// 转换为BGR格式
if (nplanes_ >= 3) {
yuv420mToBgr((unsigned char*)buffers_[buf.index].start[0],
(unsigned char*)buffers_[buf.index].start[1],
(unsigned char*)buffers_[buf.index].start[2],
frame);
}
// 重新入队缓冲区
if (ioctl(fd_, VIDIOC_QBUF, &buf) == -1) {
return false;
}
return true;
}
bool V4L2Camera::initDevice() {
fd_ = open(device_.c_str(), O_RDWR | O_NONBLOCK, 0);
if (fd_ < 0) {
return false;
}
// 设置输入源
struct v4l2_input inp;
CLEAR(inp);
inp.index = 0;
ioctl(fd_, VIDIOC_S_INPUT, &inp); // 忽略错误,不是所有设备都需要
// 设置流参数
struct v4l2_streamparm parms;
CLEAR(parms);
parms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
parms.parm.capture.timeperframe.numerator = 1;
parms.parm.capture.timeperframe.denominator = 30;
parms.parm.capture.capturemode = V4L2_MODE_VIDEO;
ioctl(fd_, VIDIOC_S_PARM, &parms); // 忽略错误
return true;
}
bool V4L2Camera::setFormat() {
struct v4l2_format fmt;
CLEAR(fmt);
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
fmt.fmt.pix_mp.width = width_;
fmt.fmt.pix_mp.height = height_;
fmt.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_YUV420M;
fmt.fmt.pix_mp.field = V4L2_FIELD_NONE;
if (ioctl(fd_, VIDIOC_S_FMT, &fmt) == -1) {
return false;
}
// 获取实际设置的格式
if (ioctl(fd_, VIDIOC_G_FMT, &fmt) == -1) {
return false;
}
nplanes_ = fmt.fmt.pix_mp.num_planes;
width_ = fmt.fmt.pix_mp.width;
height_ = fmt.fmt.pix_mp.height;
return true;
}
bool V4L2Camera::requestBuffers() {
struct v4l2_requestbuffers req;
CLEAR(req);
req.count = BUFFER_COUNT;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
req.memory = V4L2_MEMORY_MMAP;
if (ioctl(fd_, VIDIOC_REQBUFS, &req) == -1) {
return false;
}
// 查询并映射每个缓冲区
for (unsigned int i = 0; i < BUFFER_COUNT; i++) {
struct v4l2_buffer buf;
struct v4l2_plane planes[3];
CLEAR(buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = i;
buf.length = nplanes_;
buf.m.planes = planes;
if (ioctl(fd_, VIDIOC_QUERYBUF, &buf) == -1) {
return false;
}
// 映射每个平面
for (unsigned int j = 0; j < nplanes_; j++) {
buffers_[i].length[j] = planes[j].length;
buffers_[i].start[j] = mmap(NULL, planes[j].length,
PROT_READ | PROT_WRITE,
MAP_SHARED, fd_,
planes[j].m.mem_offset);
if (buffers_[i].start[j] == MAP_FAILED) {
return false;
}
}
}
return true;
}
bool V4L2Camera::queueAllBuffers() {
for (unsigned int i = 0; i < BUFFER_COUNT; i++) {
struct v4l2_buffer buf;
struct v4l2_plane planes[3];
CLEAR(buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = i;
buf.length = nplanes_;
buf.m.planes = planes;
if (ioctl(fd_, VIDIOC_QBUF, &buf) == -1) {
return false;
}
}
return true;
}
bool V4L2Camera::releaseBuffers() {
for (unsigned int i = 0; i < BUFFER_COUNT; i++) {
for (unsigned int j = 0; j < nplanes_; j++) {
if (buffers_[i].start[j] != MAP_FAILED) {
munmap(buffers_[i].start[j], buffers_[i].length[j]);
}
}
}
return true;
}
void V4L2Camera::yuv420mToBgr(unsigned char* y_plane, unsigned char* u_plane,
unsigned char* v_plane, cv::Mat& bgr_frame) {
cv::Mat yuv_frame(height_ * 3 / 2, width_, CV_8UC1);
// 复制Y平面
memcpy(yuv_frame.data, y_plane, width_ * height_);
// 复制UV平面
unsigned char* uv_dst = yuv_frame.data + width_ * height_;
int uv_size = width_ * height_ / 4;
memcpy(uv_dst, u_plane, uv_size);
memcpy(uv_dst + uv_size, v_plane, uv_size);
cv::cvtColor(yuv_frame, bgr_frame, cv::COLOR_YUV2BGR_I420);
}

View File

@@ -0,0 +1,51 @@
/* SPDX-License-Identifier: GPL-2.0-or-later */
/*
* V4L2Camera - 简洁的V4L2摄像头采集类
* 支持YUV420M格式提供简单的初始化、启动、停止和获取帧接口
*/
#ifndef V4L2_CAMERA_H
#define V4L2_CAMERA_H
#include <string>
#include <linux/videodev2.h>
#include <opencv2/opencv.hpp>
class V4L2Camera {
public:
V4L2Camera(const std::string& device, int width, int height);
~V4L2Camera();
bool init();
bool start();
bool stop();
bool getFrame(cv::Mat& frame);
private:
struct Buffer {
void* start[3]; // Y, U, V 三个平面
size_t length[3]; // 每个平面的长度
};
bool initDevice();
bool setFormat();
bool requestBuffers();
bool queueAllBuffers();
bool releaseBuffers();
void yuv420mToBgr(unsigned char* y_plane, unsigned char* u_plane,
unsigned char* v_plane, cv::Mat& bgr_frame);
static const int BUFFER_COUNT = 4;
std::string device_;
int width_;
int height_;
int fd_;
bool streaming_;
bool initialized_;
Buffer buffers_[BUFFER_COUNT];
unsigned int nplanes_;
};
#endif // V4L2_CAMERA_H

View File

@@ -0,0 +1,52 @@
#include <pybind11/pybind11.h>
#include <pybind11/numpy.h>
#include "v4l2_camera.h"
namespace py = pybind11;
class PyCamera
{
public:
PyCamera(const std::string& device, const int width, const int hight) : cam_(device, width, hight)
{
//std::cout << "[PyCamera] Device passed: " << device << std::endl;
}
bool init()
{
return cam_.init();
}
bool start()
{
return cam_.start();
}
bool stop()
{
return cam_.stop();
}
py::array_t<uint8_t> get_frame()
{
cv::Mat frame;
if (!cam_.getFrame(frame)) {
throw std::runtime_error("Failed to get frame");
}
py::array_t<uint8_t> array({frame.rows, frame.cols, frame.channels()}, frame.data);
return array;
}
private:
V4L2Camera cam_;
};
PYBIND11_MODULE(v4l2cam, m)
{
py::class_<PyCamera>(m, "V4L2Camera")
.def(py::init<const std::string&, const int, const int>())
.def("init", &PyCamera::init)
.def("start", &PyCamera::start)
.def("stop", &PyCamera::stop)
.def("get_frame", &PyCamera::get_frame);
}