iOS摄像头采集视频并硬编码

参考,代码转载自 简书:

https://www.jianshu.com/p/eccdcf43d7d2

https://www.jianshu.com/p/0d18f04e524d

还有GitHub上其他大佬的代码

#import <AVFoundation/AVFoundation.h>
#import <VideoToolbox/VideoToolbox.h>
@interface ViewController : UIViewController<AVCaptureVideoDataOutputSampleBufferDelegate>
@property (strong, nonatomic) IBOutlet UIImageView *ShowCapView;
@property (strong, nonatomic) IBOutlet UIButton *ShowButton;
//  ViewController.m

//
//  Created by yizhimao on 2020/3/31.
//

#import "ViewController.h"

#import <CoreGraphics/CoreGraphics.h>
#import <CoreVideo/CoreVideo.h>
#import <CoreMedia/CoreMedia.h>
typedef NS_ENUM(NSUInteger, VEVideoEncoderProfileLevel)
{
    VEVideoEncoderProfileLevelBP,
    VEVideoEncoderProfileLevelMP,
    VEVideoEncoderProfileLevelHP
};

@interface ViewController ()
//编码参数
@property (assign, nonatomic) VTCompressionSessionRef compressionSessionRef;

@end

@implementation ViewController{
    AVCaptureSession *_captureSession;
    AVCaptureVideoPreviewLayer *_captureLayer;
    bool isCapturing;
    AVCaptureConnection* _VideoConnection ;//用于判断回调数据类型
    AVCaptureConnection* _AudioConnection ;
}

- (void)viewDidLoad {
    [super viewDidLoad];
    // Do any additional setup after loading the view.
   _captureSession = [[AVCaptureSession alloc] init];//首先初始化AVCaptureSessionz会话控制
    // 不使用应用的实例,避免被异常挂断
    _captureSession.usesApplicationAudioSession = NO;
    isCapturing = NO;
    self.ShowCapView.frame = CGRectMake(20, 20 , 200, 200);
    self.ShowButton.frame= CGRectMake(30, 230 , 60, 50);
    [self setCapSession];
    [self ShowPic];
    [self EncH264DataCreateFun];
    
}



-(void)setCapSession{
    // 配置采集输入源(摄像头)
    NSError *error = nil;
    // 获得一个采集设备,例如前置/后置摄像头
    AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    // 用设备初始化一个采集的输入对象
    AVCaptureDeviceInput *CapvideoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
    if (error) {
        NSLog(@"Error getting video input device: %@", error.description);
    }
    
   //-------
   
    NSInteger framedur = 30;//设置帧间隔范围3-30毫秒
    //activeFormat.videoSupportedFrameRateRanges to discover valid ranges
    AVFrameRateRange *frameRateRange = [CapvideoInput.device.activeFormat.videoSupportedFrameRateRanges objectAtIndex:0];
    //NSLog(@"tramerate max %f min %f",frameRateRange .maxFrameRate ,frameRateRange.minFrameRate);
   if (framedur > frameRateRange.maxFrameRate || framedur < frameRateRange.minFrameRate)
   {
       //这里的比较是比较帧间隔,而不是帧率
       return;
   }
    [videoDevice lockForConfiguration:NULL];//要锁上,否则会崩溃;用于获取设备配置使用权
    //设置帧间隔,间隔最小对应最大的帧率
    CapvideoInput.device.activeVideoMinFrameDuration = CMTimeMake(1, (int)framedur);
    CapvideoInput.device.activeVideoMaxFrameDuration = CMTimeMake(1, (int)framedur);
    [videoDevice unlockForConfiguration];
    //--------
    
    if ([_captureSession canAddInput:CapvideoInput]) {
        [_captureSession addInput:CapvideoInput]; // input 添加到Session
    }
    
     AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc]init];
    // 丢弃延迟的帧
    captureOutput.alwaysDiscardsLateVideoFrames = YES;
    
    // 设置视频数据格式
    NSDictionary *videoSetting = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey, nil];
    [captureOutput setVideoSettings:videoSetting];
    
    dispatch_queue_t queue;
    queue = dispatch_queue_create("cameraQueue", NULL);
     // 为output添加 代理,在代理中就可以拿到采集到的原始视频数据
    [captureOutput setSampleBufferDelegate:self queue:queue];
    if ([_captureSession canAddOutput:captureOutput]) {
        [_captureSession addOutput:captureOutput]; // out put 添加到Session
    }
    // 保存Connection,用于在SampleBufferDelegate中判断数据来源(是Video/Audio?)
    _VideoConnection = [captureOutput connectionWithMediaType:AVMediaTypeVideo];
    _AudioConnection = [captureOutput connectionWithMediaType:AVMediaTypeAudio];
    // 设置分辨率
    if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset1280x720])
    {
        _captureSession.sessionPreset = AVCaptureSessionPreset1280x720;
    }
   
}
-(void)ShowPic{
//实时显示摄像头内容这是AVCaptureSession的一个属性,集成自CALayer,通过类名我们可以知道这个layer是用来预览采集到的视频图像的,直接把这个layer加到UIView上面就可以实现采集到的视频实时预览了。
    _captureLayer = [AVCaptureVideoPreviewLayer layerWithSession: _captureSession];
    //设置预览时的视频缩放方式
    _captureLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
    [[_captureLayer connection] setVideoOrientation:AVCaptureVideoOrientationPortrait]; // 设置视频的朝向竖屏
   
    _captureLayer.frame = self.ShowCapView.layer.bounds;
    [self.ShowCapView.layer addSublayer: _captureLayer];//叠层
    
}

- (IBAction)StartCapAndShow:(id)sender {
    [self startCapture];
}
- (BOOL)startCapture
{
    if (self->isCapturing)
    {
         [_captureSession stopRunning];
        self->isCapturing = NO;
        return NO;
    }
    // 摄像头权限判断
    AVAuthorizationStatus videoAuthStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
    if (videoAuthStatus != AVAuthorizationStatusAuthorized)
    {
        return NO;
    }
    [_captureSession startRunning];
    self->isCapturing = YES;
    return YES;
}

/**
 输入待编码数据

 @param sampleBuffer 待编码数据
 @param forceKeyFrame 是否强制I帧
 @return 结果
 */
- (BOOL)videoEncodeInputData:(CMSampleBufferRef)sampleBuffer forceKeyFrame:(BOOL)forceKeyFrame
{
    if (NULL == _compressionSessionRef)
    {
        return NO;
    }
    
    if (nil == sampleBuffer)
    {
        return NO;
    }
    
    CVImageBufferRef pixelBuffer = (CVImageBufferRef)CMSampleBufferGetImageBuffer(sampleBuffer);
    NSDictionary *frameProperties = @{(__bridge NSString *)kVTEncodeFrameOptionKey_ForceKeyFrame: @(forceKeyFrame)};
    
    OSStatus status = VTCompressionSessionEncodeFrame(_compressionSessionRef, pixelBuffer, kCMTimeInvalid, kCMTimeInvalid, (__bridge CFDictionaryRef)frameProperties, NULL, NULL);
    if (noErr != status)
    {
        NSLog(@"VEVideoEncoder::VTCompressionSessionEncodeFrame failed! status:%d", (int)status);
        return NO;
    }
    return YES;
}


//启动后自己调用,底层输出的函数可能已经注册过了,直接在外边实现函数的使用即可。
- (void) captureOutput:(AVCaptureOutput *)captureOutput
 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
        fromConnection:(AVCaptureConnection *)connection
{
    // 这里的sampleBuffer就是采集到的数据了,但它是Video还是Audio的数据,得根据connection来判断
    if (connection == _VideoConnection) {  // Video
        
        // 取得当前视频尺寸信息
        CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
        CVPixelBufferLockBaseAddress(pixelBuffer, 0);
        
        int width = CVPixelBufferGetWidth(pixelBuffer);
        int height = CVPixelBufferGetHeight(pixelBuffer);
        void *imageAddress = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);//yuv数据地址
        ///write file
//        NSArray *dicts = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
//               NSString *path = [dicts objectAtIndex:0];
//        static FILE* fpdec;
//        if(fpdec == NULL)
//        {
//           fpdec = fopen([[NSString stringWithFormat:@"%@/yuv2.data", path] UTF8String], "wb");}
//
//        fwrite(imageAddress, 1, (width *height * 3/2), fpdec);
//        fflush (fpdec);
        [self videoEncodeInputData:sampleBuffer forceKeyFrame:NO];
    
        CVPixelBufferUnlockBaseAddress(pixelBuffer,0);
         //NSLog(@"在这里获得video sampleBuffer,做进一步处理(编码H.264)");
    } else if (connection == _AudioConnection) {  // Audio
        //NSLog(@"这里获得audio sampleBuffer,做进一步处理(编码AAC)");
        //do nothing
    }
}

/*
1  ABR (Average Bit Rate) ----ios 在一定的时间范围内达到设定的码率,但是局部码率峰值可以超过设定的码率,平均码率恒定。可以作为VBR和CBR的一种折中选择。
2 设置H264编码的画质,H264有4种Profile:BP、EP、MP、HP
 3 设置是否实时编码输出;配置是否产生B帧,High profile 支持 B 帧;配置I帧间隔

**/
-(void)EncH264DataCreateFun
{
    //创建编码器
    OSStatus status = VTCompressionSessionCreate(NULL, 1280, 720, kCMVideoCodecType_H264, NULL, NULL, NULL, encodeOutputDataCallback, (__bridge void *)(self), &_compressionSessionRef);

    // 设置码率 512kbps
     status = VTSessionSetProperty(_compressionSessionRef, kVTCompressionPropertyKey_AverageBitRate, (__bridge CFTypeRef)@(512 * 1024));
    // 设置ProfileLevel为BP3.1
    status = VTSessionSetProperty(_compressionSessionRef, kVTCompressionPropertyKey_ProfileLevel, kVTProfileLevel_H264_Baseline_3_1);
    // 设置实时编码输出(避免延迟)
    status = VTSessionSetProperty(_compressionSessionRef, kVTCompressionPropertyKey_RealTime, kCFBooleanTrue);
    // 配置是否产生B帧
    status = VTSessionSetProperty(_compressionSessionRef, kVTCompressionPropertyKey_AllowFrameReordering,  kCFBooleanFalse);
     status = VTSessionSetProperty(_compressionSessionRef, kVTCompressionPropertyKey_ExpectedFrameRate, (__bridge CFTypeRef)@(30));
    // 配置最大I帧间隔  30帧 *2秒;也就是按照30帧率每秒,每两秒编一个GOP
    status = VTSessionSetProperty(_compressionSessionRef, kVTCompressionPropertyKey_MaxKeyFrameInterval, (__bridge CFTypeRef)@(30*2));
    // 配置I帧持续时间 GOP持续时间2秒
    status = VTSessionSetProperty(_compressionSessionRef, kVTCompressionPropertyKey_MaxKeyFrameIntervalDuration, (__bridge CFTypeRef)@(2.0));
    // 编码器准备编码
    status = VTCompressionSessionPrepareToEncodeFrames(_compressionSessionRef);
    
}

NSString *Pathtotal = nil;
NSFileHandle * fileHandle = nil;
//编码输出回调
void encodeOutputDataCallback(void * CM_NULLABLE outputCallbackRefCon, void * CM_NULLABLE sourceFrameRefCon, OSStatus status, VTEncodeInfoFlags infoFlags, CM_NULLABLE CMSampleBufferRef sampleBuffer)
{
    
   
    if(Pathtotal == nil)
    {
    NSArray *dicts = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
                     NSString *path = [dicts objectAtIndex:0];
    Pathtotal = [NSString stringWithFormat:@"%@/264.data", path] ;
      
    fileHandle = [NSFileHandle fileHandleForWritingAtPath:Pathtotal];
        if(fileHandle == nil)
        {
            return;
        }
        [fileHandle seekToEndOfFile];
    }
    
    
    
           
    if (noErr != status || nil == sampleBuffer)
    {
        NSLog(@"VEVideoEncoder::encodeOutputCallback Error : %d!", (int)status);
        return;
    }
    
    if (nil == outputCallbackRefCon)
    {
        return;
    }
    
    if (!CMSampleBufferDataIsReady(sampleBuffer))
    {
        return;
    }
    
    if (infoFlags & kVTEncodeInfo_FrameDropped)
    {
        NSLog(@"VEVideoEncoder::H264 encode dropped frame.");
        return;
    }
    
    const char header[] = "\x00\x00\x00\x01";
    size_t headerLen = (sizeof header) - 1;
    NSData *headerData = [NSData dataWithBytes:header length:headerLen];
    
    // 判断是否是关键帧
    bool isKeyFrame = !CFDictionaryContainsKey((CFDictionaryRef)CFArrayGetValueAtIndex(CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, true), 0), (const void *)kCMSampleAttachmentKey_NotSync);
    
    if (isKeyFrame)//sps pps信息获取
    {
        NSLog(@"VEVideoEncoder::编码了一个关键帧");
        CMFormatDescriptionRef formatDescriptionRef = CMSampleBufferGetFormatDescription(sampleBuffer);
        
        // 关键帧需要加上SPS、PPS信息
        size_t sParameterSetSize, sParameterSetCount;
        const uint8_t *sParameterSet;
        OSStatus spsStatus = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(formatDescriptionRef, 0, &sParameterSet, &sParameterSetSize, &sParameterSetCount, 0);
        
        size_t pParameterSetSize, pParameterSetCount;
        const uint8_t *pParameterSet;
        OSStatus ppsStatus = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(formatDescriptionRef, 1, &pParameterSet, &pParameterSetSize, &pParameterSetCount, 0);
        
        if (noErr == spsStatus && noErr == ppsStatus)
        {
            NSData *sps = [NSData dataWithBytes:sParameterSet length:sParameterSetSize];
            NSData *pps = [NSData dataWithBytes:pParameterSet length:pParameterSetSize];
            NSMutableData *spsData = [NSMutableData data];
            [spsData appendData:headerData];
            [spsData appendData:sps];
            NSLog(@"write file key");
            
            [fileHandle writeData:spsData];
            
            NSMutableData *ppsData = [NSMutableData data];
            [ppsData appendData:headerData];
            [ppsData appendData:pps];
            //[ppsData writeToFile:Pathtotal atomically:NO];//会覆盖
            [fileHandle writeData:ppsData];
        }
        
        
    }
    
    CMBlockBufferRef blockBuffer = CMSampleBufferGetDataBuffer(sampleBuffer);
    size_t length, totalLength;
    char *dataPointer;
    status = CMBlockBufferGetDataPointer(blockBuffer, 0, &length, &totalLength, &dataPointer);
    if (noErr != status)
    {
        NSLog(@"VEVideoEncoder::CMBlockBufferGetDataPointer Error : %d!", (int)status);
        return;
    }
    
    size_t bufferOffset = 0;
    static const int avcHeaderLength = 4;
    while (bufferOffset < totalLength - avcHeaderLength)
    {
        // 读取 NAL 单元长度
        uint32_t nalUnitLength = 0;
        memcpy(&nalUnitLength, dataPointer + bufferOffset, avcHeaderLength);
        
        // 大端转小端
        nalUnitLength = CFSwapInt32BigToHost(nalUnitLength);
        
        NSData *frameData = [[NSData alloc] initWithBytes:(dataPointer + bufferOffset + avcHeaderLength) length:nalUnitLength];
        
        NSMutableData *outputFrameData = [NSMutableData data];
        [outputFrameData appendData:headerData];
        [outputFrameData appendData:frameData];
         NSLog(@"write file frame %d",nalUnitLength);
       // [outputFrameData writeToFile:Pathtotal atomically:NO];
        [fileHandle writeData:outputFrameData];
        bufferOffset += avcHeaderLength + nalUnitLength;
       
    }
    
}

- (BOOL)stopVideoEncode
{
    if (NULL == _compressionSessionRef)
    {
        return NO;
    }
    
    OSStatus status = VTCompressionSessionCompleteFrames(_compressionSessionRef, kCMTimeInvalid);
    
    if (noErr != status)
    {
        NSLog(@"VEVideoEncoder::VTCompressionSessionCompleteFrames failed! status:%d", (int)status);
        return NO;
    }
    return YES;
}

@end

注意记得添加摄像头权限 要不然会崩溃,以及高配运行黑屏问题。网上都有相应的解决办法。

posted on 2020-04-15 21:04  邗影  阅读(716)  评论(0)    收藏  举报

导航