iOS實現攝像頭實時采集圖像
本文實例為大傢分享瞭iOS實現攝像頭實時采集圖像的具體代碼,供大傢參考,具體內容如下
新接到一個實時獲取攝像頭當前照片的需求,在設定的時間內需要保持攝像頭處在開啟狀態並可以實時回調到當前的圖片數據信息;
此次結合 AVCaptureDevice、AVCaptureSession、AVCaptureVideoPreviewLayer 將其與 UIView、UIImageView 和 UIImage 相結合;
Github
具體實現 code 如下:
#import <UIKit/UIKit.h> #import <CoreVideo/CoreVideo.h> #import <CoreMedia/CoreMedia.h> #import <AVFoundation/AVFoundation.h> NS_ASSUME_NONNULL_BEGIN @interface YHCameraView : UIView <AVCaptureVideoDataOutputSampleBufferDelegate> @property (nonatomic, weak) UIImageView *cameraImageView; @property (strong, nonatomic) AVCaptureDevice* device; @property (strong, nonatomic) AVCaptureSession* captureSession; @property (strong, nonatomic) AVCaptureVideoPreviewLayer* previewLayer; @property (strong, nonatomic) UIImage* cameraImage; @end NS_ASSUME_NONNULL_END
#import "YHCameraView.h" @implementation YHCameraView - (instancetype)initWithFrame:(CGRect)frame { if (self = [super initWithFrame:frame]) { self.backgroundColor = [UIColor lightGrayColor]; [self createUI]; } return self; } /* // Only override drawRect: if you perform custom drawing. // An empty implementation adversely affects performance during animation. - (void)drawRect:(CGRect)rect { // Drawing code } */ - (void)createUI { NSArray* devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; for(AVCaptureDevice *device in devices) { if([device position] == AVCaptureDevicePositionFront) // 前置攝像頭 self.device = device; } AVCaptureDeviceInput* input = [AVCaptureDeviceInput deviceInputWithDevice:self.device error:nil]; AVCaptureVideoDataOutput* output = [[AVCaptureVideoDataOutput alloc] init]; output.alwaysDiscardsLateVideoFrames = YES; dispatch_queue_t queue; queue = dispatch_queue_create("cameraQueue", NULL); [output setSampleBufferDelegate:self queue:queue]; NSString* key = (NSString *) kCVPixelBufferPixelFormatTypeKey; NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]; NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key]; [output setVideoSettings:videoSettings]; self.captureSession = [[AVCaptureSession alloc] init]; [self.captureSession addInput:input]; [self.captureSession addOutput:output]; [self.captureSession setSessionPreset:AVCaptureSessionPresetPhoto]; self.previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession]; self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; // CHECK FOR YOUR APP NSInteger screenWidth = self.frame.size.width; NSInteger screenHeitht = self.frame.size.height; self.previewLayer.frame = self.bounds; self.previewLayer.orientation = AVCaptureVideoOrientationPortrait; // CHECK FOR YOUR APP // [self.layer insertSublayer:self.previewLayer atIndex:0]; // Comment-out to hide preview layer [self.captureSession startRunning]; } - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection { CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); CVPixelBufferLockBaseAddress(imageBuffer, 0); uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer); size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); size_t width = CVPixelBufferGetWidth(imageBuffer); size_t height = CVPixelBufferGetHeight(imageBuffer); CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst); CGImageRef newImage = CGBitmapContextCreateImage(newContext); CGContextRelease(newContext); CGColorSpaceRelease(colorSpace); self.cameraImage = [UIImage imageWithCGImage:newImage scale:1.0f orientation:UIImageOrientationLeftMirrored]; // UIImageOrientationDownMirrored self.cameraImageView.image = [UIImage imageWithCGImage:newImage scale:1.0f orientation:UIImageOrientationLeftMirrored]; CGImageRelease(newImage); CVPixelBufferUnlockBaseAddress(imageBuffer, 0); } @end
將其實例化後在需要的時候直接獲取其 cameraView 的 cameraImage 即可;
#pragma mark - 快照采集 /// 快照采集 - (YHCameraView *)cameraView { if (!_cameraView) { YHCameraView *view = [[YHCameraView alloc] init]; view.frame = CGRectMake(1, 1, 1, 1); view.cameraImageView.image = view.cameraImage; _cameraView = view; } return _cameraView; } NSString *strImg = [YHCameraManager imageBase64EncodedWithImage:self.cameraView.cameraImage AndImageType:@"JPEG"]; // 獲取照片信息
/** 圖片轉 Base64 @param img 原圖片 @param type 圖片類型(PNG 或 JPEG) @return 處理結果 */ + (NSString *)imageBase64EncodedWithImage:(UIImage *)img AndImageType:(NSString *)type { NSString *callBack = nil; if ([img isKindOfClass:[UIImage class]]) { NSData *data = [NSData data]; if ([type isEqualToString:@"PNG"]) { data = UIImagePNGRepresentation(img); } else { data = UIImageJPEGRepresentation(img, 1.0f); } NSString *encodedImgStr = [data base64EncodedStringWithOptions:NSDataBase64Encoding64CharacterLineLength]; NSLog(@"YHCameraManager\nencodedImgStr: %@", encodedImgStr); return encodedImgStr; } else { return callBack; } }
以上就是本文的全部內容,希望對大傢的學習有所幫助,也希望大傢多多支持WalkonNet。
推薦閱讀:
- None Found