iOS使用AVFoundation展示视频
本文实例为大家分享了iOS使用AVFoundation展示视频的具体代码,供大家参考,具体内容如下
// //Capter2ViewController.m //IosTest // //Createdbygarinon13-7-19. //Copyright(c)2013年garin.Allrightsreserved. // #import"Capter2ViewController.h" @interfaceCapter2ViewController() @end @implementationCapter2ViewController -(void)dealloc { [sessionrelease]; [superdealloc]; } -(id)initWithNibName:(NSString*)nibNameOrNilbundle:(NSBundle*)nibBundleOrNil { self=[superinitWithNibName:nibNameOrNilbundle:nibBundleOrNil]; if(self){ //Custominitialization } returnself; } -(void)viewDidLoad { [superviewDidLoad]; videoPreviewView=[[UIViewalloc]initWithFrame:CGRectMake(10,10,320,200)]; [self.viewaddSubview:videoPreviewView]; [videoPreviewViewrelease]; //Doanyadditionalsetupafterloadingtheview. //在viewdidload调用下面的函数显示摄像信息 [selfsetupCaptureSession]; //imgView=[[UIImageViewalloc]initWithFrame:CGRectMake(10,230,320,100)]; //imgView.backgroundColor=[UIColorgrayColor]; //[self.viewaddSubview:imgView]; //[imgViewrelease]; UIButton*cloeseBtn=[UIButtonbuttonWithType:UIButtonTypeRoundedRect]; cloeseBtn.frame=CGRectMake(10,220,300,50); [cloeseBtnsetTitle:@"Press"forState:UIControlStateNormal]; [cloeseBtnaddTarget:selfaction:@selector(closeBtnClick:)forControlEvents:UIControlEventTouchUpInside]; [self.viewaddSubview:cloeseBtn]; } -(void)closeBtnClick:(id)sender { [sessionstopRunning]; } -(void)didReceiveMemoryWarning { [superdidReceiveMemoryWarning]; //Disposeofanyresourcesthatcanberecreated. } -(void)setupCaptureSession { NSError*error=nil; //Createthesession session=[[AVCaptureSessionalloc]init]; //Configurethesessiontoproducelowerresolutionvideoframes,ifyour //processingalgorithmcancope.We'llspecifymediumqualityforthe //chosendevice. session.sessionPreset=AVCaptureSessionPresetLow; //FindasuitableAVCaptureDevice AVCaptureDevice*device=[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; //Createadeviceinputwiththedeviceandaddittothesession. AVCaptureDeviceInput*input=[AVCaptureDeviceInputdeviceInputWithDevice:device error:&error]; if(!input){ //Handlingtheerrorappropriately. } [sessionaddInput:input]; //CreateaVideoDataOutputandaddittothesession AVCaptureVideoDataOutput*output=[[[AVCaptureVideoDataOutputalloc]init]autorelease]; [sessionaddOutput:output]; //Configureyouroutput. dispatch_queue_tqueue=dispatch_queue_create("myQueue",NULL); [outputsetSampleBufferDelegate:selfqueue:queue]; dispatch_release(queue); //Specifythepixelformat output.videoSettings= [NSDictionarydictionaryWithObject: [NSNumbernumberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]; //Ifyouwishtocaptheframeratetoaknownvalue,suchas15fps,set //minFrameDuration. //output.minFrameDuration=CMTimeMake(1,15); //AVCaptureConnection*avcaptureconn=[[AVCaptureConnectionalloc]init]; //[avcaptureconnsetVideoMinFrameDuration:CMTimeMake(1,15)]; //Startthesessionrunningtostarttheflowofdata [sessionstartRunning]; AVCaptureVideoPreviewLayer*previewLayer=[AVCaptureVideoPreviewLayerlayerWithSession:session]; previewLayer.frame=videoPreviewView.bounds;//视频显示到的UIView previewLayer.videoGravity=AVLayerVideoGravityResizeAspectFill; //[previewLayersetOrientation:AVCaptureVideoOrientationLandscapeRight]; //if(previewLayer.orientationSupported){ //previewLayer.orientation=mOrientation; //} [videoPreviewView.layeraddSublayer:previewLayer]; if(![sessionisRunning]){ [sessionstartRunning]; } //Assignsessiontoanivar. //[selfsetSession:session]; } //得到视频流 -(void)captureOutput:(AVCaptureOutput*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection*)connection { //CreateaUIImagefromthesamplebufferdata return; UIImage*image=[selfimageFromSampleBuffer:sampleBuffer]; //得到的视频流图片 imgView.image=image; } //CreateaUIImagefromsamplebufferdata -(UIImage*)imageFromSampleBuffer:(CMSampleBufferRef)sampleBuffer { //GetaCMSampleBuffer'sCoreVideoimagebufferforthemediadata CVImageBufferRefimageBuffer=CMSampleBufferGetImageBuffer(sampleBuffer); //Lockthebaseaddressofthepixelbuffer CVPixelBufferLockBaseAddress(imageBuffer,0); //Getthenumberofbytesperrowforthepixelbuffer void*baseAddress=CVPixelBufferGetBaseAddress(imageBuffer); //Getthenumberofbytesperrowforthepixelbuffer size_tbytesPerRow=CVPixelBufferGetBytesPerRow(imageBuffer); //Getthepixelbufferwidthandheight size_twidth=CVPixelBufferGetWidth(imageBuffer); size_theight=CVPixelBufferGetHeight(imageBuffer); //Createadevice-dependentRGBcolorspace CGColorSpaceRefcolorSpace=CGColorSpaceCreateDeviceRGB(); //Createabitmapgraphicscontextwiththesamplebufferdata CGContextRefcontext=CGBitmapContextCreate(baseAddress,width,height,8, bytesPerRow,colorSpace,kCGBitmapByteOrder32Little|kCGImageAlphaPremultipliedFirst); //CreateaQuartzimagefromthepixeldatainthebitmapgraphicscontext CGImageRefquartzImage=CGBitmapContextCreateImage(context); //Unlockthepixelbuffer CVPixelBufferUnlockBaseAddress(imageBuffer,0); //Freeupthecontextandcolorspace CGContextRelease(context); CGColorSpaceRelease(colorSpace); //CreateanimageobjectfromtheQuartzimage UIImage*image=[UIImageimageWithCGImage:quartzImage]; //ReleasetheQuartzimage CGImageRelease(quartzImage); return(image); } @end
以上就是本文的全部内容,希望对大家的学习有所帮助,也希望大家多多支持毛票票。