1、将CVPixelBufferRef对象转换为UIImage对象
CVPixelBufferRef pixelBuffer;
CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBuffer];
CIContext *temporaryContext = [CIContext contextWithOptions:nil];
CGImageRef videoImage = [temporaryContext createCGImage:ciImage fromRect:CGRectMake(0, 0, CVPixelBufferGetWidth(pixelBuffer), CVPixelBufferGetHeight(pixelBuffer))];
UIImage *uiImage = [UIImage imageWithCGImage:videoImage];
CGImageRelease(videoImage);
2、获取视频中的音频信息
+ (CGSize)videoSizeTransformFromVideoPath:(NSString *)videoPath {
if (![[NSFileManager defaultManager] fileExistsAtPath:videoPath]) {
return CGSizeZero;
}
AVURLAsset *asset = [AVURLAsset assetWithURL:[NSURL fileURLWithPath:videoPath]];
AVAssetTrack *track = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
AVAssetTrack *audioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] firstObject];
CMAudioFormatDescriptionRef item = (__bridge CMAudioFormatDescriptionRef)audioTrack.formatDescriptions.firstObject;
const AudioStreamBasicDescription *audioStreamDescription = CMAudioFormatDescriptionGetStreamBasicDescription(item);
audioStreamDescription->mSampleRate;
CGSize dimensions = CGSizeApplyAffineTransform(track.naturalSize, track.preferredTransform);
return CGSizeMake(fabs(dimensions.width), fabs(dimensions.height));
}
3、获取视频的分辨率
+ (CGSize)videoSizeFromVideoPath:(NSString *)videoPath {
if (![[NSFileManager defaultManager] fileExistsAtPath:videoPath]) {
return CGSizeZero;
}
AVURLAsset *asset = [AVURLAsset assetWithURL:[NSURL fileURLWithPath:videoPath]];
AVAssetTrack *track = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
CGSize dimensions = CGSizeApplyAffineTransform(track.naturalSize, track.preferredTransform);
return CGSizeMake(fabs(dimensions.width), fabs(dimensions.height));
}
4、获取视频的帧率
+ (CGFloat)nominalFrameRateFromVideoPath:(NSString *)videoPath {
CGFloat fps = 0.00;
if (![[NSFileManager defaultManager] fileExistsAtPath:videoPath]) {
return fps;
}
AVURLAsset *asset = [AVURLAsset assetWithURL:[NSURL fileURLWithPath:videoPath]];
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
fps = videoTrack.nominalFrameRate;
return fps;
}
5、获取视频时长(单位毫秒)
+ (NSTimeInterval)videoDurationFromVideoPath:(NSString *)videoPath {
if (![[NSFileManager defaultManager] fileExistsAtPath:videoPath]) {
return 0.00;
}
AVURLAsset *urlAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:videoPath] options:nil];
return 1000.0 * urlAsset.duration.value / urlAsset.duration.timescale;
}
6、AVFrame转换为UIImage(针对YUV420p数据)
+ (UIImage *)converUIImageFromAVFrame:(AVFrame*)frame {
CVPixelBufferRef pixelBuffer = [GTVideoTool converCVPixelBufferRefFromAVFrame:frame];
CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBuffer];
CIContext *temporaryContext = [CIContext contextWithOptions:nil];
CGImageRef videoImage = [temporaryContext
createCGImage:ciImage
fromRect:CGRectMake(0, 0,
CVPixelBufferGetWidth(pixelBuffer),
CVPixelBufferGetHeight(pixelBuffer))];
UIImage *uiImage = [UIImage imageWithCGImage:videoImage];
CVPixelBufferRelease(pixelBuffer);
CGImageRelease(videoImage);
return uiImage;
}
+ (CVPixelBufferRef)converCVPixelBufferRefFromAVFrame:(AVFrame *)avframe {
if (!avframe || !avframe->data[0]) {
return NULL;
}
CVPixelBufferRef outputPixelBuffer = NULL;
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
@(avframe->linesize[0]), kCVPixelBufferBytesPerRowAlignmentKey,
[NSNumber numberWithBool:YES], kCVPixelBufferOpenGLESCompatibilityKey,
[NSDictionary dictionary], kCVPixelBufferIOSurfacePropertiesKey,
nil];
if (avframe->linesize[1] != avframe->linesize[2]) {
return NULL;
}
size_t srcPlaneSize = avframe->linesize[1]*avframe->height/2;
size_t dstPlaneSize = srcPlaneSize *2;
uint8_t *dstPlane = malloc(dstPlaneSize);
// interleave Cb and Cr plane
for(size_t i = 0; i<srcPlaneSize; i++){
dstPlane[2*i ]=avframe->data[1][i];
dstPlane[2*i+1]=avframe->data[2][i];
}
int ret = CVPixelBufferCreate(kCFAllocatorDefault,
avframe->width,
avframe->height,
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
(__bridge CFDictionaryRef)(options),
&outputPixelBuffer);
CVPixelBufferLockBaseAddress(outputPixelBuffer, 0);
size_t bytePerRowY = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 0);
size_t bytesPerRowUV = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 1);
void* base = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 0);
memcpy(base, avframe->data[0], bytePerRowY*avframe->height);
base = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 1);
memcpy(base, dstPlane, bytesPerRowUV*avframe->height/2);
CVPixelBufferUnlockBaseAddress(outputPixelBuffer, 0);
free(dstPlane);
if(ret != kCVReturnSuccess)
{
NSLog(@"CVPixelBufferCreate Failed");
return NULL;
}
return outputPixelBuffer;
}
7、BGRA转换为UIImage对象
+ (UIImage *)converUIImageFromRGBA:(ST_GTV_RGBA *)argb {
UIImage *image = [GTVideoTool imageFromBRGABytes:argb->p_rgba imageSize:CGSizeMake(argb->width, argb->heigh)];
gtv_queue_rgba_free(argb);
return image;
}
+ (UIImage *)imageFromBRGABytes:(unsigned char *)imageBytes imageSize:(CGSize)imageSize {
CGImageRef imageRef = [self imageRefFromBGRABytes:imageBytes imageSize:imageSize];
UIImage *image = [UIImage imageWithCGImage:imageRef];
CGImageRelease(imageRef);
return image;
}
+ (CGImageRef)imageRefFromBGRABytes:(unsigned char *)imageBytes imageSize:(CGSize)imageSize {
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(imageBytes,
imageSize.width,
imageSize.height,
8,
imageSize.width * 4,
colorSpace,
kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef imageRef = CGBitmapContextCreateImage(context);
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);
return imageRef;
}
网友评论