美文网首页
iOS实时采集视频

iOS实时采集视频

作者: Goning | 来源:发表于2018-05-17 19:25 被阅读112次

本文介绍iOS下使用AVCaptureSession用于视频的实时采集录制


CaptureSession.h
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>

typedef NS_ENUM (NSUInteger,CaptureSessionPreset){
    CaptureSessionPreset640x480,
    CaptureSessionPresetiFrame960x540,
    CaptureSessionPreset1280x720,
};

@protocol CaptureSessionDelegate <NSObject>
- (void)videoWithSampleBuffer:(CMSampleBufferRef)sampleBuffer;
@end

@interface CaptureSession : NSObject
@property (nonatomic ,strong) id<CaptureSessionDelegate>delegate;
@property (nonatomic ,strong) AVCaptureSession *session;
- (instancetype)initWithCaptureSessionPreset:(CaptureSessionPreset)preset;
- (void)start;
- (void)stop;
@end
CaptureSession.m
#import "CaptureSession.h"

@interface CaptureSession()<AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate>
@property (nonatomic,strong) AVCaptureDevice *videoDevice;
@property (nonatomic,strong) AVCaptureDeviceInput *videoInput;
@property (nonatomic,strong) AVCaptureVideoDataOutput *videoOutput;
@property (nonatomic,assign) CaptureSessionPreset definePreset;
@property (nonatomic,strong) NSString *realPreset;
@end


@implementation CaptureSession

- (instancetype)initWithCaptureSessionPreset:(CaptureSessionPreset)preset {
    if ([super init]) {
        [self initAVCaptureSession];
        _definePreset = preset;
    }
    return self;
}

- (void)initAVCaptureSession {
    //初始化AVCaptureSession
    _session = [[AVCaptureSession alloc] init];
    //设置录像分辨率
    if (![self.session canSetSessionPreset:self.realPreset]) {
        if (![self.session canSetSessionPreset:AVCaptureSessionPresetiFrame960x540]) {
            if (![self.session canSetSessionPreset:AVCaptureSessionPreset640x480]) {
            }
        }
    }
    //开始配置
    [_session beginConfiguration];
    //获取视频设备对象
    AVCaptureDeviceDiscoverySession *deviceDiscoverySession = [AVCaptureDeviceDiscoverySession  discoverySessionWithDeviceTypes:@[AVCaptureDeviceTypeBuiltInWideAngleCamera] mediaType:AVMediaTypeVideo position:AVCaptureDevicePositionFront];
    NSArray *devices  = deviceDiscoverySession.devices;
    for (AVCaptureDevice *device in devices) {
        if (device.position == AVCaptureDevicePositionFront) {
            self.videoDevice = device;//前置摄像头
        }
    }
    //初始化视频捕获输入对象
    NSError *error;
    self.videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:self.videoDevice error:&error];
    if (error) {
        NSLog(@"摄像头错误");
        return;
    }
    //将输入对象添加到Session
    if ([self.session canAddInput:self.videoInput]) {
        [self.session addInput:self.videoInput];
    }
    //初始化视频输出对象
    self.videoOutput = [[AVCaptureVideoDataOutput alloc] init];
    //是否卡顿时丢帧
    self.videoOutput.alwaysDiscardsLateVideoFrames = NO;
    //设置像素格式:kCVPixelFormatType_{长度|序列}{颜色空间}{Planar|BiPlanar}{VideoRange|FullRange}
    [self.videoOutput setVideoSettings:@{
                                         (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey:@(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange)
                                         }];
    //设置代理并添加到队列
    dispatch_queue_t captureQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0);
    [self.videoOutput setSampleBufferDelegate:self queue:captureQueue];
    //将输出对象添加到Session
    if ([self.session canAddOutput:self.videoOutput]) {
        [self.session addOutput:self.videoOutput];
        //链接视频I/O对象
    }
    //创建连接:AVCaptureConnection输入对像和捕获输出对象之间建立连接。
    AVCaptureConnection *connection = [self.videoOutput connectionWithMediaType:AVMediaTypeVideo];
    //设置视频方向
    connection.videoOrientation = AVCaptureVideoOrientationPortrait;
    //设置稳定性,判断connection是否支持视频稳定
    if ([connection isVideoStabilizationSupported]) {
        //这个稳定模式最适合连接
        connection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto;
    }
    //缩放裁剪系数
    connection.videoScaleAndCropFactor = connection.videoMaxScaleAndCropFactor;
    //提交配置
    [self.session commitConfiguration];
}

- (NSString *)realPreset {
    switch (_definePreset) {
        case CaptureSessionPreset640x480:
            _realPreset = AVCaptureSessionPreset640x480;
            break;
        case CaptureSessionPresetiFrame960x540:
            _realPreset = AVCaptureSessionPresetiFrame960x540;
            break;
        case CaptureSessionPreset1280x720:
            _realPreset = AVCaptureSessionPreset1280x720;
            break;
        default:
            _realPreset = AVCaptureSessionPreset640x480;
            break;
    }
    return _realPreset;
}

- (void)start {
    [self.session startRunning];
}

- (void)stop {
    [self.session stopRunning];
}

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
    if (self.delegate && [self.delegate respondsToSelector:@selector(videoWithSampleBuffer:)]) {
        [self.delegate videoWithSampleBuffer:sampleBuffer];
    }
}

@end

ViewController.m
#import "ViewController.h"
#import "CaptureSession.h"

@interface ViewController ()<CaptureSessionDelegate>
@property (nonatomic,strong) CaptureSession *captureSession;
@end

@implementation ViewController

- (void)viewDidLoad {
    [super viewDidLoad];
    //创建视频采集会话
    _captureSession = [[CaptureSession alloc] initWithCaptureSessionPreset:CaptureSessionPreset640x480];
    //设置代理
    _captureSession.delegate = self;
    //创建预览层
    AVCaptureVideoPreviewLayer *preViewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_captureSession.session];
    //设置frame
    preViewLayer.frame = CGRectMake(0.f, 0.f, self.view.bounds.size.width, self.view.bounds.size.height);
    //设置方向
    preViewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;//填充且不拉伸
    [self.view.layer addSublayer:preViewLayer];
    [self.view bringSubviewToFront:self.btn];
}

- (IBAction)start:(UIButton *)sender {
    sender.selected = !sender.selected;
    if (sender.selected) {
        [_captureSession start];
    }else {
        [_captureSession stop];
    }
}

- (void)videoWithSampleBuffer:(CMSampleBufferRef)sampleBuffer {
    NSLog(@"实时采集回调得到sampleBuffer");
}


@end

将采集到的视频数据进行硬编码为H.264:iOS实时硬编码H.264

相关文章

  • iOS实时采集视频

    本文介绍iOS下使用AVCaptureSession用于视频的实时采集录制 CaptureSession.h Ca...

  • 锐动的直播ios SDK解决方案

    直播iOS SDK,可以在手机iOS端实时采集视频,同时在拍摄过程中支持多种实时滤镜效果,只要调用视频直播接口,通...

  • iOS直播技术分享-音视频采集(一)

    1、iOS直播技术的流程 数据采集:通过摄像头和麦克风获得实时的音视频数据; 图像处理:将数据采集的输入流进行实时...

  • 音视频采集

    1、iOS直播技术的流程 数据采集:通过摄像头和麦克风获得实时的音视频数据;图像处理:将数据采集的输入流进行实时滤...

  • iOS视频开发(一):视频采集

    前言 作为iOS音视频开发之视频开发的第一篇,本文介绍iOS视频采集的相关概念及视频采集的工作原理,后续将对采集后...

  • iOS视频开发(二):视频H264硬编码

    前言 上一篇《iOS视频开发(一):视频采集》我们已经介绍了如何采集iOS摄像头的视频数据,采集到的原始视频数据量...

  • iOS实时硬编码H.264

    本文介绍iOS下使用VTCompressionSessionRef将AVCaptureSession实时采集到的视...

  • iOS-音视屏采集

    概述 音视屏采集分音频采集和视频采集 在iOS中,可以同步采集音频和视频 采集的API在AVFoundation框...

  • 音视频采集硬编码

    iOS音视频实时采集硬件编码(H264+AAC) 最近在做音视频方面的东西,发现国内硬编的资料特别少,去网上搜全是...

  • iOS语音对讲(三)FFmpeg实时解码AAC并播放PCM

    本文介绍iOS实时语音双向对讲功能:(一)实时采集PCM并编码AAC(二)RTSP+RTP协议实时传输(三)FFm...

网友评论

      本文标题:iOS实时采集视频

      本文链接:https://www.haomeiwen.com/subject/gssidftx.html