【树莓派】搭建树莓派的交叉编译环境
手里的树莓派是3B+,性能有点弱鸡,在编译时总是会卡死,考虑在ubuntu虚拟机环境下搭建一个交叉编译环境,提高效率
安装交叉编译链
需要先在Ubuntu环境下安装交叉编译链
# 安装32位ARM交叉编译器(适用于大多数树莓派)
sudo apt update
sudo apt install -y gcc-arm-linux-gnueabihf g++-arm-linux-gnueabihf
# 安装64位
sudo apt install gcc-aarch64-linux-gnu g++-aarch64-linux-gnu
# 验证安装
arm-linux-gnueabihf-gcc --version
获取树莓派的文件系统
最简单不出错的方法就是直接从树莓派上拷贝系统库到Ubuntu环境下
# 在Ubuntu主机上创建sysroot目录
mkdir -p ~/rpi/sysroot
# 从树莓派复制必要的库文件(需要在树莓派和Ubuntu之间建立SSH连接)
# 替换 pi@raspberrypi.local(pi@192.168.3.17) 为你的树莓派 IP 或主机名
rsync -avz --copy-unsafe-links --rsync-path="sudo rsync" \
--exclude='/usr/share/doc' \
--exclude='/usr/share/man' \
pi@raspberrypi.local:/lib \
pi@raspberrypi.local:/usr/lib \
pi@raspberrypi.local:/usr/include \
~/rpi-sysroot/
单文件编译
创建源文件
#include <stdio.h>
int main() {
printf("Hello from Raspberry Pi!\n");
return 0;
}
交叉编译
arm-linux-gnueabihf-gcc \
--sysroot=~/rpi-sysroot \
-o hello_pi hello.c
检查生成的文件架构
file hello_pi
传输到树莓派
scp hello_pi pi@raspberrypi.local:~/
ssh pi@raspberrypi.local ./hello_pi
Makefile编译安装
这是一个使用v4l2取流,x264编码,并将编码后视频写入文件的一个demo,可计算每秒钟帧率
my_v4l2.cpp
#include <iostream>
#include <fstream>
#include <vector>
#include <memory>
#include <cstring>
#include <csignal>
#include <chrono>
#include <thread>
#include <stdexcept>
#include <system_error>
#include <atomic> // C++11 支持 std::atomic
// Linux / V4L2 headers
#include <fcntl.h>
#include <unistd.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <linux/videodev2.h>
// x264
extern "C" {
#include <x264.h>
}
// 全局标志:响应 Ctrl+C
std::atomic<bool> keep_running(false); // C++11: 先声明,稍后初始化
void signal_handler(int) {
keep_running = false;
}
// ======================
// RAII: 内存映射管理
// ======================
class MappedBuffer {
public:
MappedBuffer() : ptr_(nullptr), length_(0) {}
MappedBuffer(int fd, size_t length, off_t offset)
: length_(length) {
ptr_ = static_cast<unsigned char*>(
mmap(nullptr, length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, offset));
if (ptr_ == MAP_FAILED) {
ptr_ = nullptr;
throw std::system_error(errno, std::generic_category(), "mmap failed");
}
}
~MappedBuffer() {
if (ptr_ != nullptr) {
munmap(ptr_, length_);
}
}
MappedBuffer(const MappedBuffer&) = delete;
MappedBuffer& operator=(const MappedBuffer&) = delete;
MappedBuffer(MappedBuffer&& other) throw()
: ptr_(other.ptr_), length_(other.length_) {
other.ptr_ = nullptr;
other.length_ = 0;
}
MappedBuffer& operator=(MappedBuffer&& other) throw() {
if (this != &other) {
if (ptr_ != nullptr) {
munmap(ptr_, length_);
}
ptr_ = other.ptr_;
length_ = other.length_;
other.ptr_ = nullptr;
other.length_ = 0;
}
return *this;
}
unsigned char* data() const { return ptr_; }
size_t size() const { return length_; }
private:
unsigned char* ptr_;
size_t length_;
};
// ======================
// V4L2 视频采集类
// ======================
class V4L2Capture {
public:
static const int BUF_COUNT = 4;
static const char* DEFAULT_DEVICE;
V4L2Capture(const std::string& device, int width, int height)
: device_(device), width_(width), height_(height), fd_(-1) {
fd_ = open(device_.c_str(), O_RDWR | O_NONBLOCK);
if (fd_ < 0) {
throw std::system_error(errno, std::generic_category(),
"Failed to open V4L2 device: " + device_);
}
setupFormat();
requestBuffers();
mapAndQueueBuffers();
startStreaming();
}
~V4L2Capture() {
stopStreaming();
if (fd_ >= 0) close(fd_);
}
struct Frame {
unsigned char* data;
size_t size;
int index;
};
// 返回是否成功获取帧;通过 output 参数返回数据
bool dequeueFrame(Frame& out_frame) {
v4l2_buffer buf = {};
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
while (keep_running.load()) {
if (ioctl(fd_, VIDIOC_DQBUF, &buf) == 0) {
out_frame.data = buffers_[buf.index].data();
out_frame.size = buf.bytesused;
out_frame.index = buf.index;
return true;
}
if (errno == EAGAIN || errno == EIO) {
std::this_thread::sleep_for(std::chrono::milliseconds(10));
continue;
} else {
throw std::system_error(errno, std::generic_category(), "VIDIOC_DQBUF failed");
}
}
return false;
}
void requeueBuffer(int index) {
v4l2_buffer buf = {};
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = index;
if (ioctl(fd_, VIDIOC_QBUF, &buf) < 0) {
throw std::system_error(errno, std::generic_category(), "VIDIOC_QBUF failed");
}
}
int getWidth() const { return width_; }
int getHeight() const { return height_; }
private:
void setupFormat() {
v4l2_format fmt = {};
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.width = width_;
fmt.fmt.pix.height = height_;
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
fmt.fmt.pix.field = V4L2_FIELD_NONE;
if (ioctl(fd_, VIDIOC_S_FMT, &fmt) < 0) {
throw std::system_error(errno, std::generic_category(), "VIDIOC_S_FMT failed");
}
}
void requestBuffers() {
v4l2_requestbuffers req = {};
req.count = BUF_COUNT;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
if (ioctl(fd_, VIDIOC_REQBUFS, &req) < 0) {
throw std::system_error(errno, std::generic_category(), "VIDIOC_REQBUFS failed");
}
if (req.count < static_cast<unsigned int>(BUF_COUNT)) {
throw std::runtime_error("Insufficient buffer count");
}
}
void mapAndQueueBuffers() {
buffers_.reserve(BUF_COUNT);
for (int i = 0; i < BUF_COUNT; ++i) {
v4l2_buffer buf = {};
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = i;
if (ioctl(fd_, VIDIOC_QUERYBUF, &buf) < 0) {
throw std::system_error(errno, std::generic_category(), "VIDIOC_QUERYBUF failed");
}
buffers_.push_back(MappedBuffer(fd_, buf.length, buf.m.offset));
if (ioctl(fd_, VIDIOC_QBUF, &buf) < 0) {
throw std::system_error(errno, std::generic_category(), "VIDIOC_QBUF failed");
}
}
}
void startStreaming() {
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl(fd_, VIDIOC_STREAMON, &type) < 0) {
throw std::system_error(errno, std::generic_category(), "VIDIOC_STREAMON failed");
}
}
void stopStreaming() {
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
ioctl(fd_, VIDIOC_STREAMOFF, &type);
}
std::string device_;
int fd_;
int width_, height_;
std::vector<MappedBuffer> buffers_;
};
const char* V4L2Capture::DEFAULT_DEVICE = "/dev/video0";
// ======================
// YUYV 转 I420
// ======================
void yuyv_to_i420(const unsigned char* yuyv,
unsigned char* y, unsigned char* u, unsigned char* v,
int width, int height) {
const int uv_width = width / 2;
for (int i = 0; i < height; ++i) {
for (int j = 0; j < width / 2; ++j) {
int yuyv_idx = i * width * 2 + j * 4;
int y_idx = i * width + j * 2;
int uv_idx = (i / 2) * uv_width + j;
unsigned char y0 = yuyv[yuyv_idx + 0];
unsigned char u0 = yuyv[yuyv_idx + 1];
unsigned char y1 = yuyv[yuyv_idx + 2];
unsigned char v0 = yuyv[yuyv_idx + 3];
y[y_idx + 0] = y0;
y[y_idx + 1] = y1;
if (i % 2 == 0) {
u[uv_idx] = u0;
v[uv_idx] = v0;
}
}
}
}
// ======================
// x264 编码器封装
// ======================
class X264Encoder {
public:
X264Encoder(int width, int height, const std::string& output_path)
: width_(width), height_(height), output_path_(output_path), encoder_(nullptr), frame_count_(0) {
file_.open(output_path_.c_str(), std::ios::binary);
if (!file_.is_open()) {
throw std::runtime_error("Cannot open output file: " + output_path_);
}
x264_param_default_preset(¶m_, "ultrafast", "zerolatency");
param_.i_width = width_;
param_.i_height = height_;
param_.i_fps_num = 30;
param_.i_fps_den = 1;
param_.i_keyint_max = 30;
param_.b_intra_refresh = 1;
param_.rc.i_bitrate = 2000;
param_.rc.i_rc_method = X264_RC_ABR;
param_.i_threads = 1;
param_.b_repeat_headers = 1;
param_.i_level_idc = 40;
if (x264_param_apply_profile(¶m_, "high") < 0) {
throw std::runtime_error("x264_param_apply_profile failed");
}
encoder_ = x264_encoder_open(¶m_);
if (!encoder_) {
throw std::runtime_error("x264_encoder_open failed");
}
// Write SPS/PPS
x264_nal_t* nals;
int i_nals;
x264_encoder_headers(encoder_, &nals, &i_nals);
for (int i = 0; i < i_nals; ++i) {
file_.write(reinterpret_cast<char*>(nals[i].p_payload), nals[i].i_payload);
}
file_.flush();
}
~X264Encoder() {
flush();
if (encoder_) {
x264_encoder_close(encoder_);
}
}
X264Encoder(const X264Encoder&) = delete;
X264Encoder& operator=(const X264Encoder&) = delete;
bool encodeFrame(unsigned char* y, unsigned char* u, unsigned char* v) {
x264_picture_t pic_in, pic_out;
x264_picture_init(&pic_in);
pic_in.img.i_csp = X264_CSP_I420;
pic_in.img.i_plane = 3;
pic_in.img.plane[0] = y;
pic_in.img.plane[1] = u;
pic_in.img.plane[2] = v;
pic_in.img.i_stride[0] = width_;
pic_in.img.i_stride[1] = width_ / 2;
pic_in.img.i_stride[2] = width_ / 2;
pic_in.i_pts = frame_count_++;
x264_nal_t* nals;
int i_nals;
int result = x264_encoder_encode(encoder_, &nals, &i_nals, &pic_in, &pic_out);
if (result < 0) {
std::cerr << "x264 encode error\n";
return false;
} else if (result > 0) {
for (int i = 0; i < i_nals; ++i) {
file_.write(reinterpret_cast<char*>(nals[i].p_payload), nals[i].i_payload);
}
file_.flush();
}
return true;
}
private:
void flush() {
if (!encoder_) return;
x264_picture_t pic_out, pic_flush;
x264_picture_init(&pic_flush);
x264_nal_t* nals;
int i_nals;
while (x264_encoder_encode(encoder_, &nals, &i_nals, &pic_flush, &pic_out) > 0) {
for (int i = 0; i < i_nals; ++i) {
file_.write(reinterpret_cast<char*>(nals[i].p_payload), nals[i].i_payload);
}
}
file_.flush();
}
x264_param_t param_;
x264_t* encoder_;
std::ofstream file_;
int width_, height_;
std::string output_path_;
int64_t frame_count_;
};
// =====================
// 主函数
// ======================
int main() {
keep_running = true;
std::signal(SIGINT, signal_handler);
try {
V4L2Capture capture("/dev/video0", 1280, 720);
X264Encoder encoder(capture.getWidth(), capture.getHeight(), "output.h264");
const int y_size = capture.getWidth() * capture.getHeight();
const int uv_size = y_size / 4;
std::unique_ptr<unsigned char[]> y_plane(new unsigned char[y_size]);
std::unique_ptr<unsigned char[]> u_plane(new unsigned char[uv_size]);
std::unique_ptr<unsigned char[]> v_plane(new unsigned char[uv_size]);
int frame_count = 0;
auto last_time = std::chrono::steady_clock::now();
std::cout << "Capturing and encoding... Press Ctrl+C to stop.\n";
V4L2Capture::Frame frame;
while (keep_running.load()) {
if (!capture.dequeueFrame(frame)) {
break;
}
yuyv_to_i420(frame.data, y_plane.get(), u_plane.get(), v_plane.get(),
capture.getWidth(), capture.getHeight());
if (!encoder.encodeFrame(y_plane.get(), u_plane.get(), v_plane.get())) {
std::cerr << "Encoding failed at frame " << frame_count << "\n";
break;
}
frame_count++;
capture.requeueBuffer(frame.index);
auto now = std::chrono::steady_clock::now();
auto elapsed = std::chrono::duration<double>(now - last_time).count();
if (elapsed >= 1.0) {
std::cout << "FPS: " << (frame_count / elapsed)
<< " (" << frame_count << " frames in " << elapsed << "s)\n";
frame_count = 0;
last_time = now;
}
}
std::cout << "\nDone. Output saved to output.h264\n";
} catch (const std::exception& e) {
std::cerr << "Error: " << e.what() << "\n";
return EXIT_FAILURE;
}
return EXIT_SUCCESS;
}
Makefile Demo
以下是my_v4l2.cpp和xop::rtspserver一起编译的一个makefile demo,可以按需修改
# 调试宏:启用 _DEBUG
DEBUG = -D_DEBUG
# 目标程序
TARGET1 = my_v4l2
TARGET2 = rtsp_h264_file
# TARGET3 = v4l2_rtsp_server
# 输出目录
OBJS_PATH = objs
# 默认为本地编译
ifeq ($(TARGET_ARCH),arm)
# ========== 交叉编译配置(ARM 64位)==========
CROSS_COMPILE = aarch64-linux-gnu-
CXX = $(CROSS_COMPILE)g++
CC = $(CROSS_COMPILE)gcc
AR = $(CROSS_COMPILE)ar
SYSROOT = /home/zx/rpi-sysroot
# 编译选项
CFLAGS = -Wall -O2 --sysroot=$(SYSROOT) $(DEBUG)
CXXFLAGS = $(CFLAGS) -std=c++11
INC += -I$(SYSROOT)/usr/include
# 链接选项:必须包含 --sysroot!
LDFLAGS = --sysroot=$(SYSROOT) \
-L./lib \
-L$(SYSROOT)/usr/lib \
-L$(SYSROOT)/usr/lib/aarch64-linux-gnu \
-Wl,-rpath-link=$(SYSROOT)/usr/lib \
-Wl,-rpath-link=$(SYSROOT)/usr/lib/aarch64-linux-gnu \
-lrt -pthread -lpthread -ldl -lm -lx264
else
# ========== 本地编译配置 ==========
CXX = g++
CC = gcc
AR = ar
CFLAGS = -Wall -O2 $(DEBUG)
CXXFLAGS = $(CFLAGS) -std=c++11
INC += -I/usr/include
LDFLAGS = -lx264 -lrt -pthread -lpthread -ldl -lm
endif
# 公共头文件路径(始终添加)
INC += -I$(shell pwd)/src/ \
-I$(shell pwd)/src/base \
-I$(shell pwd)/src/net \
-I$(shell pwd)/src/xop \
-I$(shell pwd)/src/3rdpart
# ========== 源文件与目标文件 ==========
SRC_BASE = $(wildcard ./src/base/*.cpp)
SRC_NET = $(wildcard ./src/net/*.cpp)
SRC_XOP = $(wildcard ./src/xop/*.cpp)
SRC_V4L2 = $(wildcard ./src/driver/v4l2/*.cpp)
OBJ_BASE = $(patsubst ./src/base/%.cpp, $(OBJS_PATH)/%.o, $(SRC_BASE))
OBJ_NET = $(patsubst ./src/net/%.cpp, $(OBJS_PATH)/%.o, $(SRC_NET))
OBJ_XOP = $(patsubst ./src/xop/%.cpp, $(OBJS_PATH)/%.o, $(SRC_XOP))
OBJ_V4L2 = $(patsubst ./src/driver/v4l2/%.cpp, $(OBJS_PATH)/%.o, $(SRC_V4L2))
# 示例程序源文件(注意:使用完整路径)
SRC_TARGET1 = ./example/my_v4l2.cpp
SRC_TARGET2 = ./example/rtsp_h264_file.cpp
# SRC_TARGET3 = ./example/v4l2_rtsp_server.cpp
OBJ_TARGET1 = $(OBJS_PATH)/my_v4l2.o
OBJ_TARGET2 = $(OBJS_PATH)/rtsp_h264_file.o
# OBJ_TARGET3 = $(OBJS_PATH)/v4l2_rtsp_server.o
# ========== 构建规则 ==========
all: BUILD_DIR $(TARGET1) $(TARGET2)
BUILD_DIR:
@mkdir -p $(OBJS_PATH)
# 目标1:my_v4l2
$(TARGET1): $(OBJ_BASE) $(OBJ_NET) $(OBJ_XOP) $(OBJ_TARGET1)
$(CXX) $^ -o $@ $(LDFLAGS)
# 目标2:rtsp_h264_file
$(TARGET2): $(OBJ_BASE) $(OBJ_NET) $(OBJ_XOP) $(OBJ_TARGET2)
$(CXX) $^ -o $@ $(LDFLAGS)
# 目标3:v4l2_rtsp_server(包含 v4l2 驱动和 my_v4l2)
# $(TARGET3): $(OBJ_BASE) $(OBJ_NET) $(OBJ_XOP) $(OBJ_V4L2) $(OBJ_TARGET3)
# $(CXX) $^ -o $@ $(LDFLAGS)
# ========== 编译规则 ==========
$(OBJS_PATH)/%.o: ./example/%.cpp | BUILD_DIR
$(CXX) -c $< -o $@ $(CXXFLAGS) $(INC)
$(OBJS_PATH)/%.o: ./src/base/%.cpp | BUILD_DIR
$(CXX) -c $< -o $@ $(CXXFLAGS) $(INC)
$(OBJS_PATH)/%.o: ./src/net/%.cpp | BUILD_DIR
$(CXX) -c $< -o $@ $(CXXFLAGS) $(INC)
$(OBJS_PATH)/%.o: ./src/xop/%.cpp | BUILD_DIR
$(CXX) -c $< -o $@ $(CXXFLAGS) $(INC)
$(OBJS_PATH)/%.o: ./src/driver/v4l2/%.cpp | BUILD_DIR
$(CXX) -c $< -o $@ $(CXXFLAGS) $(INC)
# ========== 安装 ==========
install: all
scp $(TARGET1) $(TARGET2) pi@192.168.3.17:/home/pi/app/
@echo "Executables copied to Raspberry Pi"
# ========== 清理 ==========
clean:
-rm -rf $(OBJS_PATH) $(TARGET1) $(TARGET2)
.PHONY: all clean install BUILD_DIR

浙公网安备 33010602011771号