前言
AVFoundation框架是iOS中很紧张的框架,全部与音视频相干的软硬件控制都在这个框架里。本文紧张先容iOS媒体捕获和视频收罗。
媒体捕获流程
简朴先容
- AVCaptureSession:媒体捕获会话(包罗音频和视频),负责把捕获的音视频数据输出到输出设备中,一个AVCaptureSession可以有多个输入输出。
在视频或音频捕获时,客户端可以实例AVCaptureSession,添加适当的AVCaptureInputs、AVCaptureDeviceInput和输出。
- AVCaptureInput和AVCaptureDevice:设备输入数据管理对象,可以根据AVCaptureDevice创建对应AVCaptureDeviceInput对象,该对象将会被添加到AVCaptureSession中管理。
- AVCaptureOutput:设备输出数据管理对象。
- AVCaptureVideoPreviewLayer和AVSampleBufferDisplayLayer,相机拍摄预览图层,是CALayer的子类,前者创建须要AVCaptureSession对象,后者可以直接创建,添加CMSampleBufferRef举行展示。
相干代码展示
- (void)configureCamera{ /// 参数设置 // 默认后置摄像头 AVCaptureDevicePosition position = AVCaptureDevicePositionBack; // 帧率 int frameRate = 25; // 显色方案 OSType videoFormat = kCVPixelFormatType_32BGRA; // 分辨率高 int resolutionHeight = 720; /// 创建AVCaptureSession对象 AVCaptureSession *session = [[AVCaptureSession alloc] init]; /// 设置分辨率 session.sessionPreset = AVCaptureSessionPreset1280x720; /// 获取摄像头 AVCaptureDevice *captureDevice; // 默认AVCaptureDevicePositionBack,后置摄像头 AVCaptureDeviceDiscoverySession *deviceDiscoverySession = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes [AVCaptureDeviceTypeBuiltInWideAngleCamera] mediaType:AVMediaTypeVideo position:position]; NSArray *devices = deviceDiscoverySession.devices; for (AVCaptureDevice *device in devices) { if (AVCaptureDevicePositionBack == device.position) { captureDevice = device; }else if (AVCaptureDevicePositionFront == device.position){ captureDevice = device; } } /// 设置帧率和分辨率高度 BOOL isSuccess = NO; for(AVCaptureDeviceFormat *vFormat in [captureDevice formats]) { CMFormatDescriptionRef description = vFormat.formatDescription; float maxRate = ((AVFrameRateRange*) [vFormat.videoSupportedFrameRateRanges objectAtIndex:0]).maxFrameRate; if (maxRate >= frameRate && CMFormatDescriptionGetMediaSubType(description) == videoFormat) { if ([captureDevice lockForConfiguration:NULL] == YES) { // 对比镜头支持的分辨率和当前设置的分辨率 CMVideoDimensions dims = CMVideoFormatDescriptionGetDimensions(description); if (dims.height == resolutionHeight && dims.width == [self.class getResolutionWidthByHeight:resolutionHeight]) { [session beginConfiguration]; if ([captureDevice lockForConfiguration:NULL]){ captureDevice.activeFormat = vFormat; [captureDevice setActiveVideoMinFrameDuration:CMTimeMake(1, frameRate)]; [captureDevice setActiveVideoMaxFrameDuration:CMTimeMake(1, frameRate)]; [captureDevice unlockForConfiguration]; } [session commitConfiguration]; isSuccess = YES; } }else { NSLog(@"%s: 失败",__func__); } } } NSError *error; //添加输入 AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error]; if (error != noErr) { NSLog(@"设置设备输入失败:%@",error.localizedDescription); return; } [session addInput:input]; //添加输出 AVCaptureVideoDataOutput *videoDataOutput = [[AVCaptureVideoDataOutput alloc] init]; if ([session canAddOutput:videoDataOutput]) { [session addOutput:videoDataOutput]; } videoDataOutput.videoSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:videoFormat] forKey id)kCVPixelBufferPixelFormatTypeKey]; //当此属性的值为YES时,吸取方将立即扬弃捕获的帧,而处理处罚现有帧的调度队列在captureOutput:didOutputSampleBuffer:fromConnection: delegate方法中被壅闭。当此属性的值为NO时,将允许委托在扬弃新帧之前有更多的时间处理处罚旧帧,但应用步伐的内存利用量大概会明显增长。默认值为“YES”。(机翻) videoDataOutput.alwaysDiscardsLateVideoFrames = NO; //创建一个队列吸取数据 dispatch_queue_t videoQueue = dispatch_queue_create("video_receive_queue", NULL); [videoDataOutput setSampleBufferDelegate:self queue:videoQueue]; //创建吸取对象 AVCaptureVideoPreviewLayer *videoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session]; videoPreviewLayer.backgroundColor = [[UIColor blackColor] CGColor]; CGRect frame = [videoPreviewLayer bounds]; NSLog(@"previewViewLayer = %@",NSStringFromCGRect(frame)); //设置尺寸和添补方式 [videoPreviewLayer setFrame:[UIScreen mainScreen].bounds]; [videoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspect]; if ([[videoPreviewLayer connection] isVideoOrientationSupported]) { [videoPreviewLayer.connection setVideoOrientation:AVCaptureVideoOrientationPortrait]; }else{ NSLog(@"不支持视频定向"); } //须要在哪个view上展示 UIView *showView = [[UIView alloc] init]; [showView.layer insertSublayer:videoPreviewLayer atIndex:0]; }//收罗视频的回调,假如须要编码H264/H265,在这里操作- (void)captureOutput AVCaptureOutput *)output didOutputSampleBuffer CMSampleBufferRef)sampleBuffer fromConnection AVCaptureConnection *)connection{ /* //另一种展示方式 AVSampleBufferDisplayLayer *previewLayer = [AVSampleBufferDisplayLayer layer]; previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; [previewLayer enqueueSampleBuffer:sampleBuffer]; //须要在哪个view上展示 UIView *showView = [[UIView alloc] init]; [showView.layer insertSublayer:previewLayer atIndex:0]; */}///须要思量横竖屏环境,这里暂未思量+ (int)getResolutionWidthByHeight int)height { switch (height) { case 2160: return 3840; case 1080: return 1920; case 720: return 1280; case 480: return 640; default: return -1; }}Tips:设置收罗之前,记得申请摄像头权限,假如没有权限,须要自己做判定,这里省略。
Demo所在整理后奉上。
有其他不明确的,可以留言,看到就会复兴。
假如喜好,请帮助点赞。支持转载,转载请附原文链接。 |