【树莓派】【v4l2】在树莓派环境下取流-编码-存储

硬件环境:树莓派3B+

Camera模块:rpi Camera(500像素)

编码库:x264

工程代码

#include <stdio.h>
#include <stdint.h>
#include <stdlib.h>
#include <string.h>
#include <fcntl.h>
#include <unistd.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <linux/videodev2.h>
#include <signal.h>
#include <errno.h>

#include <time.h>
#include <x264.h>

#define DEVICE "/dev/video0"
#define WIDTH 1920
#define HEIGHT 1080
#define BUF_COUNT 4
#define MAX_FRAMES 10  // 最多保存 10 帧后自动退出(可改为无限循环)

static volatile int keep_running = 1;




// 信号处理函数:响应 Ctrl+C
void signal_handler(int sig) {
    (void)sig;
    keep_running = 0;
}

// 安全关闭资源
void cleanup_v4l2(int fd, void *mem[], size_t len[], int buf_count, enum v4l2_buf_type type) {
    if (fd >= 0) {
        ioctl(fd, VIDIOC_STREAMOFF, &type);  // 先停止流
        for (int i = 0; i < buf_count; ++i) {
            if (mem[i] != MAP_FAILED && mem[i] != NULL) {
                munmap(mem[i], len[i]);
            }
        }
        close(fd);
    }
}
/**
 * @brief 将一帧 YUYV 数据保存为 .yuyv 文件
 *
 * @param data        指向帧数据的指针(来自 mmap 映射)
 * @param size        帧数据的实际字节数(dqbuf.bytesused)
 * @param frame_index 帧序号,用于生成文件名
 * @return int        成功返回 0,失败返回 -1
 * 
 * @call  save_frame_to_file(mem[dqbuf.index], dqbuf.bytesused, frame_count);
 */
int save_frame_to_file(const void *data, size_t size, int frame_index) {
    if (!data || size == 0) {
        fprintf(stderr, "Invalid frame data or size\n");
        return -1;
    }

    char filename[64];
    snprintf(filename, sizeof(filename), "frame_%03d.yuyv", frame_index);

    FILE *f = fopen(filename, "wb");
    if (!f) {
        perror("fopen frame file");
        return -1;
    }

    size_t written = fwrite(data, 1, size, f);
    fclose(f);

    if (written != size) {
        fprintf(stderr, "Warning: only %zu/%zu bytes written to %s\n", written, size, filename);
        return -1;
    }

    printf("Saved %s (%zu bytes)\n", filename, size);
    return 0;
}

// ============ x264 封装模块 ============
typedef struct {
    x264_t      *encoder;
    FILE        *output_file;
    int          width;
    int          height;
    int64_t      frame_count;
} X264Encoder;


X264Encoder* x264_encoder_create(int width, int height, const char *output_path) {
    X264Encoder *enc = calloc(1, sizeof(X264Encoder));
    if (!enc) return NULL;

    enc->width = width;
    enc->height = height;

    // 打开输出文件
    enc->output_file = fopen(output_path, "wb");
    if (!enc->output_file) {
        perror("fopen output file");
        free(enc);
        return NULL;
    }

    // 配置 x264 参数
    x264_param_t param;
    x264_param_default_preset(&param, "ultrafast", "zerolatency");
    param.i_width = width;
    param.i_height = height;
    param.i_fps_num = 30;
    param.i_fps_den = 1;
    param.i_keyint_max = 30;
    param.b_intra_refresh = 1;
    param.rc.i_bitrate = 2000;           // 2 Mbps
    param.rc.i_rc_method = X264_RC_ABR;
    param.i_threads = 1;
    param.b_repeat_headers = 1;
    param.i_level_idc = 40;
    x264_param_apply_profile(&param, "high");

    enc->encoder = x264_encoder_open(&param);
    if (!enc->encoder) {
        fprintf(stderr, "Failed to open x264 encoder\n");
        fclose(enc->output_file);
        free(enc);
        return NULL;
    }

    // 写入 SPS/PPS
    x264_nal_t *nals;
    int i_nals;
    x264_encoder_headers(enc->encoder, &nals, &i_nals);
    for (int i = 0; i < i_nals; i++) {
        fwrite(nals[i].p_payload, 1, nals[i].i_payload, enc->output_file);
    }

    return enc;
}

int x264_encoder_encode_frame(X264Encoder *enc,
                              unsigned char *y, unsigned char *u, unsigned char *v) {
    if (!enc || !enc->encoder || !y || !u || !v) return -1;

    x264_picture_t pic_in, pic_out;
    x264_picture_init(&pic_in);

    pic_in.img.i_csp = X264_CSP_I420;
    pic_in.img.i_plane = 3;
    pic_in.img.plane[0] = y;
    pic_in.img.plane[1] = u;
    pic_in.img.plane[2] = v;
    pic_in.img.i_stride[0] = enc->width;
    pic_in.img.i_stride[1] = enc->width / 2;
    pic_in.img.i_stride[2] = enc->width / 2;
    pic_in.i_pts = enc->frame_count++;

    x264_nal_t *nals;
    int i_nals;
    int result = x264_encoder_encode(enc->encoder, &nals, &i_nals, &pic_in, &pic_out);
    if (result < 0) {
        fprintf(stderr, "x264 encode error\n");
        return -1;
    } else if (result > 0) {
        for (int i = 0; i < i_nals; i++) {
            fwrite(nals[i].p_payload, 1, nals[i].i_payload, enc->output_file);
        }
        fflush(enc->output_file); // 可选:确保实时写入
    }

    return 0;
}

void x264_enc_close(X264Encoder *enc) {
    if (!enc) return;

    // Flush encoder
    if (enc->encoder) {
        x264_picture_t pic_out;
        x264_nal_t *nals;
        int i_nals;
        x264_picture_t pic_flush;
        x264_picture_init(&pic_flush);

        while (x264_encoder_encode(enc->encoder, &nals, &i_nals, &pic_flush, &pic_out) > 0) {
            for (int i = 0; i < i_nals; i++) {
                fwrite(nals[i].p_payload, 1, nals[i].i_payload, enc->output_file);
            }
        }

        x264_encoder_close(enc->encoder);
    }

    if (enc->output_file) {
        fclose(enc->output_file);
    }

    free(enc);
}

// YUYV 转 I420(关键!)
void yuyv_to_i420(const unsigned char *yuyv, unsigned char *y, unsigned char *u, unsigned char *v,
                  int width, int height) {
    int uv_width = width / 2;
    int uv_height = height / 2;

    for (int i = 0; i < height; ++i) {
        for (int j = 0; j < width / 2; ++j) {
            int yuyv_idx = i * width * 2 + j * 4;
            int y_idx = i * width + j * 2;
            int u_idx = (i / 2) * uv_width + j;
            int v_idx = u_idx;

            // Y0 U0 Y1 V0
            unsigned char y0 = yuyv[yuyv_idx + 0];
            unsigned char u0 = yuyv[yuyv_idx + 1];
            unsigned char y1 = yuyv[yuyv_idx + 2];
            unsigned char v0 = yuyv[yuyv_idx + 3];

            y[y_idx + 0] = y0;
            y[y_idx + 1] = y1;

            if (i % 2 == 0) { // 只在偶数行写 UV(因为 I420 是 4:2:0)
                u[u_idx] = u0;
                v[v_idx] = v0;
            }
        }
    }
}



int main() 
{
    int fd = -1;
    void *mem[BUF_COUNT];
    size_t len[BUF_COUNT];
    enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

    // 初始化内存指针
    for (int i = 0; i < BUF_COUNT; ++i) {
        mem[i] = MAP_FAILED;
    }

    // 注册信号处理
    signal(SIGINT, signal_handler);

    // 打开设备
    fd = open(DEVICE, O_RDWR | O_NONBLOCK);  // 使用非阻塞模式更安全(配合 DQBUF 超时)
    if (fd < 0) {
        perror("open");
        return EXIT_FAILURE;
    }

    // 1.设置格式
    struct v4l2_format fmt = {0};
    fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    fmt.fmt.pix.width = WIDTH;
    fmt.fmt.pix.height = HEIGHT;
    fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
    fmt.fmt.pix.field = V4L2_FIELD_NONE;

    if(ioctl(fd,VIDIOC_S_FMT,&fmt) < 0)
    {
        perror("VIDIOC_S_FMT");
        cleanup_v4l2(fd,mem,len,BUF_COUNT,type);
        return EXIT_FAILURE;
    }

    // 向内核申请缓冲区
    struct v4l2_requestbuffers req = {0};
    req.count = BUF_COUNT;
    req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    req.memory = V4L2_MEMORY_MMAP;

    if(ioctl(fd,VIDIOC_REQBUFS,&req) < 0)
    {
        perror("VIDIOC_REQBUFS");
        cleanup_v4l2(fd,mem,len,BUF_COUNT,type);
        return EXIT_FAILURE;
    }

    // 映射并入队所有缓冲区
    for (int i = 0; i < BUF_COUNT; ++i) {
        // 1.枚举buffer
        struct v4l2_buffer buf = {0};
        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        buf.memory = V4L2_MEMORY_MMAP;
        buf.index = i;

        if (ioctl(fd, VIDIOC_QUERYBUF, &buf) < 0) {
            perror("VIDIOC_QUERYBUF");
            cleanup_v4l2(fd, mem, len, BUF_COUNT, type);
            return EXIT_FAILURE;
        }

        // 映射buffer地址到mem[i],长度len[i]
        len[i] = buf.length;
        mem[i] = mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buf.m.offset);
        if (mem[i] == MAP_FAILED) {
            perror("mmap");
            cleanup_v4l2(fd, mem, len, BUF_COUNT, type);
            return EXIT_FAILURE;
        }

        // 将映射好的buffer地址入队
        if (ioctl(fd, VIDIOC_QBUF, &buf) < 0) {
            perror("VIDIOC_QBUF");
            cleanup_v4l2(fd, mem, len, BUF_COUNT, type);
            return EXIT_FAILURE;
        }
    }

    // 开始出流
    if(ioctl(fd,VIDIOC_STREAMON,&type) < 0)
    {
        perror("VIDIOC_STREAMON");
        cleanup_v4l2(fd, mem, len, BUF_COUNT, type);
        return EXIT_FAILURE;
    }

    printf("Streaming started. Press Ctrl+C to stop.\n");

    // 创建编码器(替代原来的 init_x264_encoder + fopen)
    X264Encoder *encoder = x264_encoder_create(WIDTH, HEIGHT, "output.h264");
    if (!encoder) {
        cleanup_v4l2(fd, mem, len, BUF_COUNT, type);
        return EXIT_FAILURE;
    }

    printf("Encoding started. Press Ctrl+C to stop.\n");

    // 分配 I420 缓冲区(不变)
    int y_size = WIDTH * HEIGHT;
    int uv_size = y_size / 4;
    unsigned char *y_plane = malloc(y_size);
    unsigned char *u_plane = malloc(uv_size);
    unsigned char *v_plane = malloc(uv_size);
    if (!y_plane || !u_plane || !v_plane) {
        fprintf(stderr, "Failed to allocate I420 buffers\n");
        goto cleanup;
    }

    int frame_count = 0;
    struct timespec last_time, current_time;
    clock_gettime(CLOCK_MONOTONIC, &last_time);

    while (keep_running) {
        struct v4l2_buffer dqbuf = {0};
        dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        dqbuf.memory = V4L2_MEMORY_MMAP;

        if (ioctl(fd, VIDIOC_DQBUF, &dqbuf) < 0) {
            if (errno == EAGAIN || errno == EIO) {
                usleep(10000);
                continue;
            } else {
                perror("VIDIOC_DQBUF");
                break;
            }
        }

        // 转换 YUYV -> I420
        yuyv_to_i420((const unsigned char *)mem[dqbuf.index],
                     y_plane, u_plane, v_plane,
                     WIDTH, HEIGHT);

        // 编码一帧(调用封装函数)
        if (x264_encoder_encode_frame(encoder, y_plane, u_plane, v_plane) < 0) {
            fprintf(stderr, "Failed to encode frame %d\n", frame_count);
            break;
        }

        frame_count++;

        // 每秒计算并打印 FPS
        clock_gettime(CLOCK_MONOTONIC, &current_time);
        double elapsed = (current_time.tv_sec - last_time.tv_sec)
                       + (current_time.tv_nsec - last_time.tv_nsec) / 1e9;
        if (elapsed >= 1.0) {
            printf("FPS: %.2f (captured %d frames in %.2f seconds)\n", 
                   frame_count / elapsed, frame_count, elapsed);
            frame_count = 0;
            last_time = current_time;
        }

        if (ioctl(fd, VIDIOC_QBUF, &dqbuf) < 0) {
            perror("Re-queue buffer failed");
            break;
        }
    }

cleanup:
    // 关闭编码器(自动 flush + fclose + free)
    x264_enc_close(encoder);

    // 释放 I420 缓冲区
    free(y_plane);
    free(u_plane);
    free(v_plane);
    printf("\nDone. Output saved to output.h264\n");
    // 清理 V4L2
    cleanup_v4l2(fd, mem, len, BUF_COUNT, type);
    printf("\nStopping capture...\n");
    printf("Done.\n");

    return EXIT_SUCCESS;
  
}


设计原理

1.为什么要先将yuyv转换成I420格式,才能进行编码

x264 只接受特定的、planar(平面)YUV 格式作为输入,而 YUYV 是 packed(打包)格式,不被支持。

YUYV(也叫 YUY2)是一种 packed YUV 4:2:2 格式

存储方式

[Y0][U0][Y1][V0]  → 像素0 和 像素1 共享同一组 U/V

I420(或 YUV420 Planar)

存储方式

三个分量 完全分开存储 (planar)

[YYYYYYYY...]  // 所有 Y 分量(W×H 字节)
[UUUUUUUU...]  // 所有 U 分量(W/2 × H/2 字节)
[VVVVVVVV...]  // 所有 V 分量(W/2 × H/2 字节)

**x264 是一个高度优化的 H.264 编码器,其内部算法(如运动估计、DCT 变换、色度下采样)都假设输入是 **planar 格式。

可以考虑v4l2直接输出I420,这样就可以免去软件转码

fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUV420;  // 或 V4L2_PIX_FMT_NV12

2.x264编码器的必要步骤和输入输出参数

2.1 x264编码的必要步骤

1.配置编码参数(x264_param_t)

  • 设置分辨率、帧率、码率、预设(preset)、profile等
  • 使用x264_param_default_preset()初始化

2.打开编码器(x264_encoder_open)

  • 根据参数创建编码器实例(返回x264_t*)
  • 失败时返回NULL

3.获取并写入SPS/PPS(x264_encoder_headers)

  • SPS(Sequence Parameter Set)和 PPS(Picture Parameter Set)是 H.264 解码必需的元数据。
  • 必须在第一帧前写入输出流(尤其是用于文件或网络传输时)。

4.循环编码每一帧(x264_encoder_encode)

  • 输入:YUV帧(I420/NV12等planar格式)
  • 输出:NAL单元(如IDR帧,P帧等),写入文件或网络

5.刷新并关闭编码器(x264_encoder_encode+x264_encoder_close)

  • 发送空帧(pic_in=NULL)以flush延迟帧
  • 释放内部资源

2.2.输入参数 : x264_param_t

参数 说明 ---
i_width,i_height 视频宽高(必须是偶数) 1920 1080
i_fps_num,i_fps_den 帧率(分子/分母) 10,1->30fps
rc.i_bitrate 目标码率(kbps) 2000->2Mbps
rc.i_rc_method 码率控制方式 x264_RC_ABR(平均码率)
i_keyint_max 最大GOP长度(关键帧间隔) 30
b_repeat_headers 是否在每个关键帧重复SPS/PPS 1(推荐用于流媒体)
i_threads 编码线程数 0(自动)或1(低延迟)
psz_profile/x264_param_apply_profile Profile(baseline/main/high) "high"
x264_param_t param;
x264_param_default_preset(&param, "ultrafast", "zerolatency");
// 再覆盖自定义参数

2.3 输入图像:x264_picture_t

字段 说明
img.i_csp 色彩空间(必须是planar):X264_CSP_I420(YUV420P)X264_CSP_NV12(YUV420SP)
img.plane[0..2] 指向Y/U/V平面的指针
img.i_stride[0..2] 每个平面的stride(行字节数)
i_pts 显示时间戳(Presentation Time Stamp),用于同步
i_type 可强制帧类型(如x264_TYPE_IDR),通常设为 <span class="ne-text">0</span>让编码器自动决定

x264不接受packed格式(如:YUYV、RGB24)

2.4 输出结果:NAL单元

调用x264_encoder_encode()后,输出是一组NAL(Network Abstraction Layer)单元

x264_nal_t *nals;  // NAL 数组
int i_nals;        // NAL 数量

int frame_size = x264_encoder_encode(encoder, &nals, &i_nals, &pic_in, &pic_out);

NAL类型常见值

nal->i_type 名称 说明
5 IDR Slice 关键帧,可独立编码
1 Coded Slice P帧或B帧
7 SPS 序列参数集
8 PPS 图像参数集
6 SEI 补充增强信息(时间戳)

输出方式:

  • 写入文件:直接fwrite(nal->p_payload,1,nal->i_payload,file)
  • 网络传输:按RTP打包(需额外封装)
  • 播放:保存为.h264或封装进MP4/MKV

2.5 完整流程(伪代码)

// 1. 配置参数
x264_param_t param;
x264_param_default_preset(&param, "medium", "zerolatency");
param.i_width = 1920;
param.i_height = 1080;
param.i_fps_num = 30;
param.i_fps_den = 1;
param.rc.i_bitrate = 2000;          // 2000 kbps
param.rc.i_rc_method = X264_RC_ABR; // 平均码率控制
if (x264_param_apply_profile(&param, "high") < 0) {
    fprintf(stderr, "Failed to apply 'high' profile\n");
    return -1;
}

// 2. 打开编码器
x264_t *encoder = x264_encoder_open(&param);
if (!encoder) {
    fprintf(stderr, "Failed to open x264 encoder\n");
    return -1;
}

// 3. 打开输出文件
FILE *output_file = fopen("output.h264", "wb");
if (!output_file) {
    perror("fopen output.h264");
    x264_encoder_close(encoder);
    return -1;
}

// 4. 获取并写入 SPS/PPS(headers)
x264_nal_t *headers;
int num_headers;
if (x264_encoder_headers(encoder, &headers, &num_headers) < 0) {
    fprintf(stderr, "Failed to get encoder headers\n");
    fclose(output_file);
    x264_encoder_close(encoder);
    return -1;
}
for (int i = 0; i < num_headers; i++) {
    fwrite(headers[i].p_payload, 1, headers[i].i_payload, output_file);
}
fflush(output_file); // 确保立即写入

// 假设你有 I420 帧数据(这里用 malloc 模拟,实际应来自摄像头或图像)
int width = 1920, height = 1080;
size_t y_size = width * height;
size_t uv_size = y_size / 4;
unsigned char *y_plane = malloc(y_size);
unsigned char *u_plane = malloc(uv_size);
unsigned char *v_plane = malloc(uv_size);
if (!y_plane || !u_plane || !v_plane) {
    fprintf(stderr, "Failed to allocate frame buffers\n");
    goto cleanup;
}

// 5. 编码每一帧(例如 100 帧)
for (int frame_idx = 0; frame_idx < 100; frame_idx++) {
    x264_picture_t pic_in, pic_out;
    x264_picture_init(&pic_in);

    // 填充 YUV 数据(此处假设已准备好)
    pic_in.img.i_csp = X264_CSP_I420;
    pic_in.img.i_plane = 3;
    pic_in.img.plane[0] = y_plane;
    pic_in.img.plane[1] = u_plane;
    pic_in.img.plane[2] = v_plane;
    pic_in.img.i_stride[0] = width;
    pic_in.img.i_stride[1] = width / 2;
    pic_in.img.i_stride[2] = width / 2;
    pic_in.i_pts = frame_idx; // 必须单调递增

    // 编码
    x264_nal_t *nals;
    int i_nals;
    int frame_size = x264_encoder_encode(encoder, &nals, &i_nals, &pic_in, &pic_out);
    if (frame_size < 0) {
        fprintf(stderr, "Error encoding frame %d\n", frame_idx);
        break;
    }

    // 写入所有 NAL 单元到文件
    for (int i = 0; i < i_nals; i++) {
        fwrite(nals[i].p_payload, 1, nals[i].i_payload, output_file);
    }
    fflush(output_file); // 可选:实时写入(用于调试或流式保存)
}

 // 6. Flush 编码器(获取延迟帧)
x264_picture_t pic_flush;
x264_picture_init(&pic_flush); // 空输入表示 flush
x264_nal_t *nals;
int i_nals;
x264_picture_t pic_out;
while (x264_encoder_encode(encoder, &nals, &i_nals, &pic_flush, &pic_out) > 0) {
    for (int i = 0; i < i_nals; i++) {
        fwrite(nals[i].p_payload, 1, nals[i].i_payload, output_file);
    }
}

cleanup:
    // 7. 清理资源
    free(y_plane);
    free(u_plane);
    free(v_plane);
    fclose(output_file);
    x264_encoder_close(encoder);

    printf("Encoding finished. Output saved to output.h264\n");

编译运行

# 编译c文件
gcc -o my_v4l2 my_v4l2.c -lx264 -lm
# 运行(可能需要权限)
sudo ./my_v4l2
pi@raspberrypi:~/Codes/V4l2 $ ./my_v4l2 
Streaming started. Press Ctrl+C to stop.
FPS: 30.64 (captured 31 frames in 1.01 seconds)   # 720p的帧数
FPS: 29.97 (captured 30 frames in 1.00 seconds)
FPS: 29.96 (captured 30 frames in 1.00 seconds)
FPS: 29.96 (captured 30 frames in 1.00 seconds)
FPS: 30.09 (captured 31 frames in 1.03 seconds)
FPS: 30.10 (captured 31 frames in 1.03 seconds)
FPS: 29.95 (captured 30 frames in 1.00 seconds)
FPS: 29.94 (captured 30 frames in 1.00 seconds)
FPS: 30.14 (captured 31 frames in 1.03 seconds)
FPS: 30.00 (captured 30 frames in 1.00 seconds)
^C
Stopping capture...
Done.
pi@raspberrypi:~/Codes/V4l2 $ gcc my_v4l2.c -o my_v4l2
pi@raspberrypi:~/Codes/V4l2 $ ./my_v4l2 
Streaming started. Press Ctrl+C to stop.
FPS: 4.95 (captured 5 frames in 1.01 seconds)    # 1080p的帧数
FPS: 5.73 (captured 6 frames in 1.05 seconds)
FPS: 5.72 (captured 6 frames in 1.05 seconds)

并在目录下生成output.h264,可以直接用vlc打开浏览

输出验证

程序会生成一个文件:captured_frame.yuv

可以用ffplay查看:

ffplay -f rawvideo -pix_fmt yuyv422 -s 1920x1080 captured_frame.yuyv

或者转换成 MP4:

ffmpeg -f rawvideo -pix_fmt yuv420p -s 1920x1080 -r 25 -i captured_frame.yuv -c:v libx264 -pix_fmt yuv420p output.mp4

调试记录

1.运行代码总是报错

pi@raspberrypi:~/Codes/V4l2 $ ./v4l2_capture 
V4L2 Raspberry Pi Camera Capture Example
Resolution: 640x480, Format: YUYV (YUY2)
Driver: unicam
Card: unicam
Bus: platform:3f801000.csi
Version: 6.1.58
Actual: 640x480, fmt=YUYV, bpl=1280, size=614400, field=1
Buffer 0 mapped at address 0x7f894ea000 (614400 bytes)
Buffer 1 mapped at address 0x7f89454000 (614400 bytes)
Buffer 2 mapped at address 0x7f893be000 (614400 bytes)
Buffer 3 mapped at address 0x7f89328000 (614400 bytes)
VIDIOC_STREAMON failed: Invalid argument
errno = 22: Invalid argument - check format/field/buffer setup

使用vcgencmd get_camera,发现返回为不支持

supported=0 detected=0, libcamera interfaces=0

但是使用预览命令,是可以在小窗中预览实时视频流的

libcamera-vid -t 0 --width 1280 --height 720 --framerate 30 -o - --qt-preview

image

排除硬件故障

后发现是配置文件问题

sudo nano /boot/firmware/config.txt

把camera_auto_detect=1给注释掉,开启这个好像会自动使用新版本的摄像头驱动( libcamera ),注释完,在下面添加 start_x=1

# camera_auto_detect=1
start_x=1

重启树莓派,这时再使用V4l2,就可以发现正常了

posted @ 2025-12-09 22:02  Emma1111  阅读(2)  评论(0)    收藏  举报