一个简易的录屏demo

MyScreenRecord.cpp

//#define LOG_NODEBUG 0
#define LOG_TAG "myrecord"

#include <signal.h>
#include <string.h>
#include <assert.h>


#include <utils/Errors.h>
#include <ui/DisplayState.h>
#include <ui/DisplayConfig.h>
#include <ui/PhysicalDisplayId.h>
#include <media/stagefright/MediaCodec.h>
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/foundation/ALooper.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/MediaCodecConstants.h>
#include <gui/SurfaceComposerClient.h>
#include <media/openmax/OMX_IVCommon.h>
#include <media/NdkMediaMuxer.h>
#include <media/NdkMediaCodec.h>
#include <media/MediaCodecBuffer.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <gui/Surface.h>
#include <binder/ProcessState.h>
#include <mediadrm/ICrypto.h>
#include <media/NdkMediaFormatPriv.h>

using namespace android;
namespace ui = android::ui;

static struct sigaction gOrigSigactionINT;
static struct sigaction gOrigSigactionHUP;

static PhysicalDisplayId gPhysicalDisplayId;

static uint32_t gVideoWidth = 0;
static uint32_t gVideoHeight = 0;
static uint32_t gBitRate = 20000000;
static uint32_t gBframes = 0;

static const char* kMimeTypeAvc = "video/avc";
static const char* kMimeTypeApplicationOctetstream = "application/octet-stream";
static volatile bool gStopRequested = false;

static void signalCatcher(int signum)
{
    gStopRequested = true;
    switch(signum){
        case SIGINT:
        case SIGHUP:
            sigaction(SIGINT, &gOrigSigactionINT, NULL);
            sigaction(SIGHUP, &gOrigSigactionHUP, NULL);
            break;
        default:
            abort();
            break;
    }
}


static status_t configureSignals()
{
    struct sigaction act;
    memset(&act, 0, sizeof(act));
    act.sa_handler = signalCatcher;
    sigaction(SIGINT, &act, &gOrigSigactionINT);
    sigaction(SIGHUP, &act, &gOrigSigactionHUP);
    signal(SIGPIPE,SIG_IGN);
    return NO_ERROR;
}


static status_t prepareEncoder(float displayFps, sp<MediaCodec> *pCodec, sp<IGraphicBufferProducer> *pBufferProducer)
{
    // 组织encoder参数,包括width、height、mime、color、bitrate,framerate、I帧间隔、B帧间隔
    sp<AMessage> format = new AMessage;
    format->setInt32(KEY_WIDTH, gVideoWidth);
    format->setInt32(KEY_HEIGHT, gVideoHeight);
    format->setString(KEY_MIME, kMimeTypeAvc);
    format->setInt32(KEY_COLOR_FORMAT, OMX_COLOR_FormatAndroidOpaque);
    format->setInt32(KEY_BIT_RATE, gBitRate);
    format->setInt32(KEY_FRAME_RATE, displayFps);
    format->setInt32(KEY_I_FRAME_INTERVAL, 10);
    format->setInt32(KEY_MAX_B_FRAMES, 10);
    if(gBframes > 0)
    {
        format->setInt32(KEY_PROFILE, AVCProfileMain);
        format->setInt32(KEY_LEVEL, AVCLevel41);
    }
    
    sp<android::ALooper> looper = new android::ALooper;
    looper->setName("screenrecord_looper");
    looper->start();
    
    // 最后一个参数true意为创建一个encode组件
    sp<MediaCodec> codec = MediaCodec::CreateByType(looper, kMimeTypeAvc, true);
    if(codec == NULL)
    {
        fprintf(stderr, "ERROR: unable to create %s codec instance\n", kMimeTypeAvc);
        return UNKNOWN_ERROR;
    }
    // configure时最后一个参数flag使用CONFIGURE_FLAG_ENCODE,表示使用配置Encoder
    status_t err = codec->configure(format, NULL, NULL, MediaCodec::CONFIGURE_FLAG_ENCODE);
    CHECK_EQ(err, OK);
    
    sp<IGraphicBufferProducer> bufferProducer;
    // 调用omx中的createInputSurface方法,创建出一个OmxGraphicBufferSource
    err = codec->createInputSurface(&bufferProducer);
    CHECK_EQ(err, OK);
    
    err = codec->start();
    CHECK_EQ(err, OK);
    
    *pCodec = codec;
    *pBufferProducer = bufferProducer;
    
    return OK;
}

static status_t setDisplayProjection(SurfaceComposerClient::Transaction& t, const sp<IBinder>& dpy, const ui::DisplayState& displayState)
{
    const ui::Size& viewport = displayState.viewport;
    // 创建基础图层
    Rect layerStackRect(viewport);
    
    float displayAspect = viewport.getHeight() / static_cast<float>(viewport.getWidth());
    
    uint32_t videoWidth, videoHeight;
    uint32_t outWidth, outHeight;
    videoWidth = gVideoWidth;
    videoHeight = gVideoHeight;
    
    if (videoHeight > (uint32_t)(videoWidth * displayAspect)) {
        outWidth = videoWidth;
        outHeight = (uint32_t)(videoWidth * displayAspect);
    }
    else{
        outHeight = videoHeight;
        outWidth = (uint32_t)(videoHeight / displayAspect);
    }
    
    uint32_t offX, offY;
    offX = (videoWidth - outWidth) / 2;
    offY = (videoHeight - outHeight) / 2;
    // 创建显示图层大小
    //Rect displayRect(offX, offY, offX + outWidth, offY + outHeight);
    Rect displayRect(50, 50, 500, 500);
    // 将投射区域以及旋转属性设定给display
    t.setDisplayProjection(dpy, ui::ROTATION_0, layerStackRect, displayRect);
    return NO_ERROR;
    
}

static status_t prepareVirtualDisplay(const ui::DisplayState& displayState, const sp<IGraphicBufferProducer>& bufferProducer, sp<IBinder> *pDisplayHandle)
{
    // 创建一块display
    sp<IBinder> dpy = SurfaceComposerClient::createDisplay(String8("ScreenRecorder"), false);
    SurfaceComposerClient::Transaction t;
    // 给display设置surface
    t.setDisplaySurface(dpy, bufferProducer);
    setDisplayProjection(t, dpy, displayState);
    // 设置底部图层,感觉和上一步中的displayStackRect是相同的
    t.setDisplayLayerStack(dpy, displayState.layerStack);
    // 应用配置内容
    t.apply();
    *pDisplayHandle = dpy;
    return NO_ERROR;
}

static status_t runEncoder(const sp<MediaCodec>& encoder, AMediaMuxer *muxer, FILE* rawFp, const sp<IBinder> &display, const sp<IBinder>& virtualDpy, ui::Rotation orientation)
{
    static int kTimeout = 250000;
    // 获取所有的OutputBuffer
    Vector<sp<MediaCodecBuffer>> buffers;
    status_t err = encoder->getOutputBuffers(&buffers);
    CHECK_EQ(err, OK);
    ssize_t trackIdx = -1;
    ssize_t metaTrackIdx = -1;
    //Vector<int64_t> timestamps;
    
    while(!gStopRequested){
        size_t bufIndex, offset, size;
        int64_t ptsUsec;
        uint32_t flags;
        // 等待OutputBuffer
        err = encoder->dequeueOutputBuffer(&bufIndex, &offset, &size, &ptsUsec, &flags, kTimeout);
        
        switch(err)
        {
            case NO_ERROR:
            {
                // 如果返回的flag为CODECCONFIG,那么将数据长度置为0
                if((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) != 0)
                {
                    ALOGD("get codec config buffer %d bytes", size);
                    if(muxer != NULL)
                        size = 0;
                }
                if(size != 0){
                    ALOGD("get codec output buffer %d bytes", size);
                    {
                        // 这是用来检测是否有屏幕旋转
                        /*
                        ui::DisplayState displayState;
                        err = SurfaceComposerClient::getDisplayState(display, &displayState);
                        SurfaceComposerClient::Transaction t;
                        setDisplayProjection(t, virtualDpy, displayState);
                        t.apply();
                        orientation = displayState.orientation;
                        */
                    }
                    
                    if(ptsUsec == 0)
                        ptsUsec = systemTime(SYSTEM_TIME_MONOTONIC) / 1000;
                    
                    // 将encode之后的数据写给MediaMuxer
                    sp<ABuffer> buffer = new ABuffer(buffers[bufIndex]->data(), buffers[bufIndex]->size());
                    AMediaCodecBufferInfo bufferInfo = {0, static_cast<int32_t>(buffer->size()), ptsUsec, flags};
                    err = AMediaMuxer_writeSampleData(muxer, trackIdx, buffer->data(), &bufferInfo);
                    if(err != NO_ERROR)
                    {
                        ALOGD("Failed writing data to muxer (err=%d)", err);
                        return err;
                    }
                    //timestamps.add(ptsUsec);
                }
                err = encoder->releaseOutputBuffer(bufIndex);
                // 如果收到EOS,结束录制
                if ((flags & MediaCodec::BUFFER_FLAG_EOS) != 0) {
                    gStopRequested = true;
                }
                break;
            }
            case -EAGAIN:
                ALOGD("Got -EAGAIN, looping");
                break;
            case INFO_FORMAT_CHANGED:
            {
                // encoder返回format changed之后,先获取output format,并用该format给MediaMuxer创建新的track
                ALOGD("Encoder format changed");
                sp<AMessage> newFormat;
                encoder->getOutputFormat(&newFormat);
                AMediaFormat *ndkFormat = AMediaFormat_fromMsg(&newFormat);
                trackIdx = AMediaMuxer_addTrack(muxer, ndkFormat);
                ALOGD("trackIdx = %d", trackIdx);
                AMediaFormat *metaFormat = AMediaFormat_new();
                AMediaFormat_setString(metaFormat, AMEDIAFORMAT_KEY_MIME, kMimeTypeApplicationOctetstream);
                metaTrackIdx = AMediaMuxer_addTrack(muxer, metaFormat);
                AMediaFormat_delete(metaFormat);
                
                ALOGD("starting muxer");
                // 有了track之后才能开启MediaMuxer
                err = AMediaMuxer_start(muxer);
                break;
            }
            case android::INFO_OUTPUT_BUFFERS_CHANGED:
                ALOGD("dequeueOutputBuffer returned INFO_OUTPUT_BUFFERS_CHANGED");
                err = encoder->getOutputBuffers(&buffers);
                break;
            case INVALID_OPERATION:
                ALOGD("dequeueOutputBuffer returned INVALID_OPERATION");
                return err;
            default:
                ALOGD("GOT other result");
                return err;
        }
        
    }
    
    ALOGD("Encoder stopping (req=%d)", gStopRequested);
    return OK;
}


status_t recordScreen(const char *fileName)
{
    // 信号捕捉,利用sigaction捕捉到SIGINT和SIGHUP时将循环条件置为false
    configureSignals();
    
    sp<ProcessState> self = ProcessState::self();
    self->startThreadPool();
    
    sp<IBinder> display = SurfaceComposerClient::getPhysicalDisplayToken(gPhysicalDisplayId);
    if(display == NULL){
        fprintf(stderr, "ERROR: no display\n");
        return NAME_NOT_FOUND;
    }
    
    ui::DisplayState displayState;
    status_t err = SurfaceComposerClient::getDisplayState(display, &displayState);
    if(err != NO_ERROR)
    {
        fprintf(stderr, "ERROR: unable to get display state\n");
        return err;
    }

    DisplayConfig displayConfig;
    err = SurfaceComposerClient::getActiveDisplayConfig(display, &displayConfig);
    if(err != NO_ERROR){
        fprintf(stderr, "ERROR: unable to get displau config\n");
        return err;
    }
    
    // 获取当前显示区域的视窗参数
    const ui::Size& viewport = displayState.viewport;
    // 要注意,viewport获得的宽高不能是奇数
    gVideoWidth = viewport.getWidth();
    gVideoHeight = viewport.getHeight();
    ALOGD("gVideoWidth = %d, gVideoHeight = %d", gVideoWidth, gVideoHeight);
    
    
    sp<MediaCodec> encoder;
    sp<IGraphicBufferProducer> encoderInputSurface;
    // 准备encoder
    prepareEncoder(displayConfig.refreshRate, &encoder, &encoderInputSurface);
    
    sp<IGraphicBufferProducer> bufferProducer = encoderInputSurface;
    
    // Configure virtual display.
    sp<IBinder> dpy;
    err = prepareVirtualDisplay(displayState, bufferProducer, &dpy);
    CHECK_EQ(err, OK);    
    
    // AMediaMuxer是ndk封装的MediaMuxer
    AMediaMuxer *muxer = NULL;
    
    // 删除文件
    err = unlink(fileName);
    if(err != 0 && errno != ENOENT)
    {
        fprintf(stderr, "ERROR: couldn't remove existing file\n");
        abort();  
    }
    int fd = open(fileName, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
    if(fd < 0)
    {
        fprintf(stderr, "ERROR: couldn't open file\n");
        abort();
    }
    
    // 创建一个MediaMuxer 
    // new MediaMuxer(fd, (android::MediaMuxer::OutputFormat)format);
    muxer = AMediaMuxer_new(fd, AMEDIAMUXER_OUTPUT_FORMAT_MPEG_4);
    
    // MPEG4Writer中会有dup拷贝一份fd,所以这里的fd可以关闭掉
    close(fd);
    
    // 开始录屏
    err = runEncoder(encoder, muxer, NULL, display, dpy, displayState.orientation);
    
    encoderInputSurface = NULL;
    SurfaceComposerClient::destroyDisplay(dpy);
    
    encoder->stop();
    err = AMediaMuxer_stop(muxer);
    encoder->release();
    return OK;
}




int main(int argc, char** argv)
{
    std::optional<PhysicalDisplayId> displayId = SurfaceComposerClient::getInternalDisplayId();
    if(!displayId)
    {
        fprintf(stderr, "Failed to get token for internal display\n");
        return 1;
    }
    
    gPhysicalDisplayId = *displayId;
    const char* fileName = argv[1];
    status_t err = recordScreen(fileName);
    
    return OK;
    
}

 

Android.bp

cc_binary{
    name: "myscreenrecord",
    
    srcs: [
        "myscreenrecord.cpp",
    ],
    
    shared_libs:[
        "libstagefright",
        "libmedia",
        "libmediandk",
        "libmedia_omx",
        "libutils",
        "libbinder",
        "libstagefright_foundation",
        "libui",
        "libgui",
        "libcutils",
        "liblog",  
    ],

    header_libs: [
        "libmediadrm_headers",
        "libmediametrics_headers",
    ],
    
    include_dirs: [
        "frameworks/av/media/libstagefright",
        "frameworks/av/media/libstagefright/include",
        "frameworks/native/include/media/openmax",
    ],    
}

 

MediaMuxer

看完上面的demo大致可以了解MediaMuxer是怎么使用的了。下面来看看MediaMuxer的代码:

构造函数

传入fd和outputformat,fd是打开的写入文件描述符,OutputFormat是输出类型,这是一个枚举类型,定义在MediaMuxer.h中,支持的输出类型有

    enum OutputFormat {
        OUTPUT_FORMAT_MPEG_4      = 0,  // MPEG4
        OUTPUT_FORMAT_WEBM        = 1,  // WEBM,类似于MKV
        OUTPUT_FORMAT_THREE_GPP   = 2,  // 3GPP 音频
        OUTPUT_FORMAT_HEIF        = 3,  // HEVC 视频
        OUTPUT_FORMAT_OGG         = 4,  // OGG 音频
        OUTPUT_FORMAT_LIST_END // must be last - used to validate format type
    };
static bool isMp4Format(MediaMuxer::OutputFormat format) {
    return format == MediaMuxer::OUTPUT_FORMAT_MPEG_4 ||
           format == MediaMuxer::OUTPUT_FORMAT_THREE_GPP ||
           format == MediaMuxer::OUTPUT_FORMAT_HEIF;
}

其中MPEG4、3GPP、HEIF都用MP4封装

MediaMuxer::MediaMuxer(int fd, OutputFormat format)
    : mFormat(format),
      mState(UNINITIALIZED) {
    if (isMp4Format(format)) {
        mWriter = new MPEG4Writer(fd);
    } else if (format == OUTPUT_FORMAT_WEBM) {
        mWriter = new WebmWriter(fd);
    } else if (format == OUTPUT_FORMAT_OGG) {
        mWriter = new OggWriter(fd);
    }

    if (mWriter != NULL) {
        mFileMeta = new MetaData;
        if (format == OUTPUT_FORMAT_HEIF) {
            // Note that the key uses recorder file types.
            mFileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_HEIF);
        } else if (format == OUTPUT_FORMAT_OGG) {
            mFileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_OGG);
        }
        mState = INITIALIZED;
    }
}

原生支持的的还有AACWriter、AMRWriter、MPEG2TSWriter

 

addTrack

给封装的文件添加Track,先用output Media Format创建MediaAdapter,然后调用addSource方法添加到MediaWriter中

ssize_t MediaMuxer::addTrack(const sp<AMessage> &format) {
    Mutex::Autolock autoLock(mMuxerLock);

    if (format.get() == NULL) {
        ALOGE("addTrack() get a null format");
        return -EINVAL;
    }

    if (mState != INITIALIZED) {
        ALOGE("addTrack() must be called after constructor and before start().");
        return INVALID_OPERATION;
    }

    sp<MetaData> trackMeta = new MetaData;
    convertMessageToMetaData(format, trackMeta);

    sp<MediaAdapter> newTrack = new MediaAdapter(trackMeta);
    status_t result = mWriter->addSource(newTrack);
    if (result != OK) {
        return -1;
    }
    float captureFps = -1.0;
    if (format->findAsFloat("time-lapse-fps", &captureFps)) {
        ALOGV("addTrack() time-lapse-fps: %f", captureFps);
        result = mWriter->setCaptureRate(captureFps);
        if (result != OK) {
            ALOGW("addTrack() setCaptureRate failed :%d", result);
        }
    }
    return mTrackList.add(newTrack);
}

 

start

实际会开启多线程做文件写入

status_t MediaMuxer::start() {
    Mutex::Autolock autoLock(mMuxerLock);
    if (mState == INITIALIZED) {
        mState = STARTED;
        mFileMeta->setInt32(kKeyRealTimeRecording, false);
        return mWriter->start(mFileMeta.get());
    } else {
        ALOGE("start() is called in invalid state %d", mState);
        return INVALID_OPERATION;
    }
}

 

writeSampleData

找到trackIndex对应的MediaAdapter,然后将数据写入到MediaAdapter中,MediaWriter中的线程会从MediaAdapter读取数据写入文件,MediaAdapter用到了消费者生产者模型

status_t MediaMuxer::writeSampleData(const sp<ABuffer> &buffer, size_t trackIndex,
                                     int64_t timeUs, uint32_t flags) {
    Mutex::Autolock autoLock(mMuxerLock);

    if (buffer.get() == NULL) {
        ALOGE("WriteSampleData() get an NULL buffer.");
        return -EINVAL;
    }

    if (mState != STARTED) {
        ALOGE("WriteSampleData() is called in invalid state %d", mState);
        return INVALID_OPERATION;
    }

    if (trackIndex >= mTrackList.size()) {
        ALOGE("WriteSampleData() get an invalid index %zu", trackIndex);
        return -EINVAL;
    }

    MediaBuffer* mediaBuffer = new MediaBuffer(buffer);

    mediaBuffer->add_ref(); // Released in MediaAdapter::signalBufferReturned().
    mediaBuffer->set_range(buffer->offset(), buffer->size());

    MetaDataBase &sampleMetaData = mediaBuffer->meta_data();
    sampleMetaData.setInt64(kKeyTime, timeUs);
    // Just set the kKeyDecodingTime as the presentation time for now.
    sampleMetaData.setInt64(kKeyDecodingTime, timeUs);

    if (flags & MediaCodec::BUFFER_FLAG_SYNCFRAME) {
        sampleMetaData.setInt32(kKeyIsSyncFrame, true);
    }

    if (flags & MediaCodec::BUFFER_FLAG_MUXER_DATA) {
        sampleMetaData.setInt32(kKeyIsMuxerData, 1);
    }

    if (flags & MediaCodec::BUFFER_FLAG_EOS) {
        sampleMetaData.setInt32(kKeyIsEndOfStream, 1);
        ALOGV("BUFFER_FLAG_EOS");
    }

    sp<MediaAdapter> currentTrack = mTrackList[trackIndex];
    // This pushBuffer will wait until the mediaBuffer is consumed.
    return currentTrack->pushBuffer(mediaBuffer);
}

 

stop

关闭MediaAdapter的读写和MediaWriter中的线程

status_t MediaMuxer::stop() {
    Mutex::Autolock autoLock(mMuxerLock);
    if (mState == STARTED) {
        mState = STOPPED;
        for (size_t i = 0; i < mTrackList.size(); i++) {
            if (mTrackList[i]->stop() != OK) {
                return INVALID_OPERATION;
            }
        }
        status_t err = mWriter->stop();
        if (err != OK) {
            ALOGE("stop() err: %d", err);
        }
        return err;
    } else {
        ALOGE("stop() is called in invalid state %d", mState);
        return INVALID_OPERATION;
    }
}

 

posted @ 2022-05-20 17:55  青山渺渺  阅读(640)  评论(0编辑  收藏  举报