美文网首页
iOS获取视频信息的常用方法

iOS获取视频信息的常用方法

作者: LeverTsui | 来源:发表于2018-12-17 14:50 被阅读107次

1、将CVPixelBufferRef对象转换为UIImage对象

CVPixelBufferRef pixelBuffer;
CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBuffer];
            
CIContext *temporaryContext = [CIContext contextWithOptions:nil];
CGImageRef videoImage = [temporaryContext createCGImage:ciImage fromRect:CGRectMake(0, 0, CVPixelBufferGetWidth(pixelBuffer), CVPixelBufferGetHeight(pixelBuffer))];
            
UIImage *uiImage = [UIImage imageWithCGImage:videoImage];
CGImageRelease(videoImage);

2、获取视频中的音频信息

+ (CGSize)videoSizeTransformFromVideoPath:(NSString *)videoPath {
    
    if (![[NSFileManager defaultManager] fileExistsAtPath:videoPath]) {
        return CGSizeZero;
    }
    
    AVURLAsset *asset = [AVURLAsset assetWithURL:[NSURL fileURLWithPath:videoPath]];
    AVAssetTrack *track = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
    AVAssetTrack *audioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] firstObject];
    CMAudioFormatDescriptionRef item = (__bridge CMAudioFormatDescriptionRef)audioTrack.formatDescriptions.firstObject;
    const  AudioStreamBasicDescription *audioStreamDescription = CMAudioFormatDescriptionGetStreamBasicDescription(item);
    audioStreamDescription->mSampleRate;
    
    CGSize dimensions = CGSizeApplyAffineTransform(track.naturalSize, track.preferredTransform);
    return CGSizeMake(fabs(dimensions.width), fabs(dimensions.height));
}

3、获取视频的分辨率

+ (CGSize)videoSizeFromVideoPath:(NSString *)videoPath {
    if (![[NSFileManager defaultManager] fileExistsAtPath:videoPath]) {
        return CGSizeZero;
    }
    
    AVURLAsset *asset = [AVURLAsset assetWithURL:[NSURL fileURLWithPath:videoPath]];
    AVAssetTrack *track = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
    CGSize dimensions = CGSizeApplyAffineTransform(track.naturalSize, track.preferredTransform);
    return CGSizeMake(fabs(dimensions.width), fabs(dimensions.height));
}

4、获取视频的帧率

+ (CGFloat)nominalFrameRateFromVideoPath:(NSString *)videoPath {
    CGFloat fps = 0.00;
    if (![[NSFileManager defaultManager] fileExistsAtPath:videoPath]) {
        return fps;
    }
    AVURLAsset *asset = [AVURLAsset assetWithURL:[NSURL fileURLWithPath:videoPath]];
    AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
    fps = videoTrack.nominalFrameRate; 
    return fps;
} 

5、获取视频时长(单位毫秒)

+ (NSTimeInterval)videoDurationFromVideoPath:(NSString *)videoPath { 
    if (![[NSFileManager defaultManager] fileExistsAtPath:videoPath]) {
        return 0.00;
    }
    AVURLAsset *urlAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:videoPath] options:nil];
    return 1000.0 * urlAsset.duration.value / urlAsset.duration.timescale;
}

6、AVFrame转换为UIImage(针对YUV420p数据)


+ (UIImage *)converUIImageFromAVFrame:(AVFrame*)frame {
    CVPixelBufferRef pixelBuffer = [GTVideoTool converCVPixelBufferRefFromAVFrame:frame];
    
    CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBuffer];
    
    CIContext *temporaryContext = [CIContext contextWithOptions:nil];
    CGImageRef videoImage = [temporaryContext
                             createCGImage:ciImage
                             fromRect:CGRectMake(0, 0,
                                                 CVPixelBufferGetWidth(pixelBuffer),
                                                 CVPixelBufferGetHeight(pixelBuffer))];
    
    UIImage *uiImage = [UIImage imageWithCGImage:videoImage];
    
    CVPixelBufferRelease(pixelBuffer); 
    CGImageRelease(videoImage);
    
    return uiImage;
}

+ (CVPixelBufferRef)converCVPixelBufferRefFromAVFrame:(AVFrame *)avframe {
    if (!avframe || !avframe->data[0]) {
        return NULL;
    }
    
    CVPixelBufferRef outputPixelBuffer = NULL;
    
    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                             
                             @(avframe->linesize[0]), kCVPixelBufferBytesPerRowAlignmentKey,
                             [NSNumber numberWithBool:YES], kCVPixelBufferOpenGLESCompatibilityKey,
                             [NSDictionary dictionary], kCVPixelBufferIOSurfacePropertiesKey,
                             nil];
    
    
    if (avframe->linesize[1] != avframe->linesize[2]) {
        return  NULL;
    }
    
    size_t srcPlaneSize = avframe->linesize[1]*avframe->height/2;
    size_t dstPlaneSize = srcPlaneSize *2;
    uint8_t *dstPlane = malloc(dstPlaneSize);
    
    // interleave Cb and Cr plane
    for(size_t i = 0; i<srcPlaneSize; i++){
        dstPlane[2*i  ]=avframe->data[1][i];
        dstPlane[2*i+1]=avframe->data[2][i];
    }
    
    
    int ret = CVPixelBufferCreate(kCFAllocatorDefault,
                                  avframe->width,
                                  avframe->height,
                                  kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
                                  (__bridge CFDictionaryRef)(options),
                                  &outputPixelBuffer);
    
    CVPixelBufferLockBaseAddress(outputPixelBuffer, 0);
    
    size_t bytePerRowY = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 0);
    size_t bytesPerRowUV = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 1);
    
    void* base =  CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 0);
    memcpy(base, avframe->data[0], bytePerRowY*avframe->height);
    
    base = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 1);
    memcpy(base, dstPlane, bytesPerRowUV*avframe->height/2);
    
    CVPixelBufferUnlockBaseAddress(outputPixelBuffer, 0);
    
    free(dstPlane);
    
    if(ret != kCVReturnSuccess)
    {
        NSLog(@"CVPixelBufferCreate Failed");
        return NULL;
    }
    
    return outputPixelBuffer;
}

7、BGRA转换为UIImage对象

+ (UIImage *)converUIImageFromRGBA:(ST_GTV_RGBA *)argb {
    UIImage *image = [GTVideoTool imageFromBRGABytes:argb->p_rgba imageSize:CGSizeMake(argb->width, argb->heigh)];
    gtv_queue_rgba_free(argb);
    return image;
}

+ (UIImage *)imageFromBRGABytes:(unsigned char *)imageBytes imageSize:(CGSize)imageSize {
    CGImageRef imageRef = [self imageRefFromBGRABytes:imageBytes imageSize:imageSize];
    UIImage *image = [UIImage imageWithCGImage:imageRef];
    CGImageRelease(imageRef);
    
    return image;
}

+ (CGImageRef)imageRefFromBGRABytes:(unsigned char *)imageBytes imageSize:(CGSize)imageSize {
    
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(imageBytes,
                                                 imageSize.width,
                                                 imageSize.height,
                                                 8,
                                                 imageSize.width * 4,
                                                 colorSpace,
                                                 kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
    CGImageRef imageRef = CGBitmapContextCreateImage(context);
    CGContextRelease(context);
    CGColorSpaceRelease(colorSpace);
    
    return imageRef;
}

相关文章

  • iOS获取视频信息的常用方法

    1、将CVPixelBufferRef对象转换为UIImage对象 2、获取视频中的音频信息 3、获取视频的分辨率...

  • iOS-拷贝文件/文件夹

    序言 常用的IOS目录 不常用iOS目录 获取应用目录 下面就是这些文件夹获取路径的方法: 1.获取AppName...

  • iOS获取设备信息的常用方法

    参考链接 : http://childhood.logdown.com/posts/208216/ios-acce...

  • 获取iOS设备信息

    在进行iOS开发过程中,我们有时候需要获取设备的一些信息。下面整理了一些常用的获取设备信息的方法。 1.获取系统版...

  • iOS 调用系统通讯录,获取联系人信息;适配 iOS9;发送短信

    一、 调用系统通讯录,获取联系人信息 iOS9 之前的 和 框架 常用的一个代理方法 iOS9 之后的 <...

  • Java反射基础

    使用反射获取类的信息 反射获取类的信息是通过Class类获取的,常用方法: String getName() 获...

  • iOS开发中的常用define定义

    iOS开发中的常用define定义 //获取iphone的基本信息: #define ScreenHeight [...

  • iOS runtime--获取类信息

    iOS runtime--获取类信息 在iOS中可以通过runtime获取一个类的相关信息:有哪些方法、有哪些协议...

  • I/O

    获取文件信息 File类的常用方法 压缩文件 ZipOutputStream类的常用方法 解压缩ZIP文件 ZIP...

  • Calendar类

    获取日历信息及时间的类。 calendar的常用方法: 获取时间: 设置时间格式: 利用SimpleDateFor...

网友评论

      本文标题:iOS获取视频信息的常用方法

      本文链接:https://www.haomeiwen.com/subject/mrsshqtx.html