您好,登錄后才能下訂單哦!
小編給大家分享一下iOS實現攝像頭實時采集圖像的方法,相信大部分人都還不怎么了解,因此分享這篇文章給大家參考一下,希望大家閱讀完這篇文章后大有收獲,下面讓我們一起去了解一下吧!
iOS實現攝像頭實時采集圖像的具體內容如下
新接到一個實時獲取攝像頭當前照片的需求,在設定的時間內需要保持攝像頭處在開啟狀態并可以實時回調到當前的圖片數據信息;
此次結合 AVCaptureDevice、AVCaptureSession、AVCaptureVideoPreviewLayer 將其與 UIView、UIImageView 和 UIImage 相結合;
Github
具體實現 code 如下:
#import <UIKit/UIKit.h> #import <CoreVideo/CoreVideo.h> #import <CoreMedia/CoreMedia.h> #import <AVFoundation/AVFoundation.h> NS_ASSUME_NONNULL_BEGIN @interface YHCameraView : UIView <AVCaptureVideoDataOutputSampleBufferDelegate> @property (nonatomic, weak) UIImageView *cameraImageView; @property (strong, nonatomic) AVCaptureDevice* device; @property (strong, nonatomic) AVCaptureSession* captureSession; @property (strong, nonatomic) AVCaptureVideoPreviewLayer* previewLayer; @property (strong, nonatomic) UIImage* cameraImage; @end NS_ASSUME_NONNULL_END
#import "YHCameraView.h" @implementation YHCameraView - (instancetype)initWithFrame:(CGRect)frame { if (self = [super initWithFrame:frame]) { self.backgroundColor = [UIColor lightGrayColor]; [self createUI]; } return self; } /* // Only override drawRect: if you perform custom drawing. // An empty implementation adversely affects performance during animation. - (void)drawRect:(CGRect)rect { // Drawing code } */ - (void)createUI { NSArray* devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; for(AVCaptureDevice *device in devices) { if([device position] == AVCaptureDevicePositionFront) // 前置攝像頭 self.device = device; } AVCaptureDeviceInput* input = [AVCaptureDeviceInput deviceInputWithDevice:self.device error:nil]; AVCaptureVideoDataOutput* output = [[AVCaptureVideoDataOutput alloc] init]; output.alwaysDiscardsLateVideoFrames = YES; dispatch_queue_t queue; queue = dispatch_queue_create("cameraQueue", NULL); [output setSampleBufferDelegate:self queue:queue]; NSString* key = (NSString *) kCVPixelBufferPixelFormatTypeKey; NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]; NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key]; [output setVideoSettings:videoSettings]; self.captureSession = [[AVCaptureSession alloc] init]; [self.captureSession addInput:input]; [self.captureSession addOutput:output]; [self.captureSession setSessionPreset:AVCaptureSessionPresetPhoto]; self.previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession]; self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; // CHECK FOR YOUR APP NSInteger screenWidth = self.frame.size.width; NSInteger screenHeitht = self.frame.size.height; self.previewLayer.frame = self.bounds; self.previewLayer.orientation = AVCaptureVideoOrientationPortrait; // CHECK FOR YOUR APP // [self.layer insertSublayer:self.previewLayer atIndex:0]; // Comment-out to hide preview layer [self.captureSession startRunning]; } - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection { CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); CVPixelBufferLockBaseAddress(imageBuffer, 0); uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer); size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); size_t width = CVPixelBufferGetWidth(imageBuffer); size_t height = CVPixelBufferGetHeight(imageBuffer); CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst); CGImageRef newImage = CGBitmapContextCreateImage(newContext); CGContextRelease(newContext); CGColorSpaceRelease(colorSpace); self.cameraImage = [UIImage imageWithCGImage:newImage scale:1.0f orientation:UIImageOrientationLeftMirrored]; // UIImageOrientationDownMirrored self.cameraImageView.image = [UIImage imageWithCGImage:newImage scale:1.0f orientation:UIImageOrientationLeftMirrored]; CGImageRelease(newImage); CVPixelBufferUnlockBaseAddress(imageBuffer, 0); } @end
將其實例化后在需要的時候直接獲取其 cameraView 的 cameraImage 即可;
#pragma mark - 快照采集 /// 快照采集 - (YHCameraView *)cameraView { if (!_cameraView) { YHCameraView *view = [[YHCameraView alloc] init]; view.frame = CGRectMake(1, 1, 1, 1); view.cameraImageView.image = view.cameraImage; _cameraView = view; } return _cameraView; } NSString *strImg = [YHCameraManager imageBase64EncodedWithImage:self.cameraView.cameraImage AndImageType:@"JPEG"]; // 獲取照片信息
/** 圖片轉 Base64 @param img 原圖片 @param type 圖片類型(PNG 或 JPEG) @return 處理結果 */ + (NSString *)imageBase64EncodedWithImage:(UIImage *)img AndImageType:(NSString *)type { NSString *callBack = nil; if ([img isKindOfClass:[UIImage class]]) { NSData *data = [NSData data]; if ([type isEqualToString:@"PNG"]) { data = UIImagePNGRepresentation(img); } else { data = UIImageJPEGRepresentation(img, 1.0f); } NSString *encodedImgStr = [data base64EncodedStringWithOptions:NSDataBase64Encoding64CharacterLineLength]; NSLog(@"YHCameraManager\nencodedImgStr: %@", encodedImgStr); return encodedImgStr; } else { return callBack; } }
以上是“iOS實現攝像頭實時采集圖像的方法”這篇文章的所有內容,感謝各位的閱讀!相信大家都有了一定的了解,希望分享的內容對大家有所幫助,如果還想學習更多知識,歡迎關注億速云行業資訊頻道!
免責聲明:本站發布的內容(圖片、視頻和文字)以原創、轉載和分享為主,文章觀點不代表本網站立場,如果涉及侵權請聯系站長郵箱:is@yisu.com進行舉報,并提供相關證據,一經查實,將立刻刪除涉嫌侵權內容。