小编给大家分享一下iOS实现摄像头实时采集图像的方法,相信大部分人都还不怎么了解,因此分享这篇文章给大家参考一下,希望大家阅读完这篇文章后大有收获,下面让我们一起去了解一下吧!
iOS实现摄像头实时采集图像的具体内容如下
新接到一个实时获取摄像头当前照片的需求,在设定的时间内需要保持摄像头处在开启状态并可以实时回调到当前的图片数据信息;
此次结合 AVCaptureDevice、AVCaptureSession、AVCaptureVideoPreviewLayer 将其与 UIView、UIImageView 和 UIImage 相结合;
Github
具体实现 code 如下:
#import <UIKit/UIKit.h> #import <CoreVideo/CoreVideo.h> #import <CoreMedia/CoreMedia.h> #import <AVFoundation/AVFoundation.h> NS_ASSUME_NONNULL_BEGIN @interface YHCameraView : UIView <AVCaptureVideoDataOutputSampleBufferDelegate> @property (nonatomic, weak) UIImageView *cameraImageView; @property (strong, nonatomic) AVCaptureDevice* device; @property (strong, nonatomic) AVCaptureSession* captureSession; @property (strong, nonatomic) AVCaptureVideoPreviewLayer* previewLayer; @property (strong, nonatomic) UIImage* cameraImage; @end NS_ASSUME_NONNULL_END
#import "YHCameraView.h" @implementation YHCameraView - (instancetype)initWithFrame:(CGRect)frame { if (self = [super initWithFrame:frame]) { self.backgroundColor = [UIColor lightGrayColor]; [self createUI]; } return self; } /* // Only override drawRect: if you perform custom drawing. // An empty implementation adversely affects performance during animation. - (void)drawRect:(CGRect)rect { // Drawing code } */ - (void)createUI { NSArray* devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; for(AVCaptureDevice *device in devices) { if([device position] == AVCaptureDevicePositionFront) // 前置摄像头 self.device = device; } AVCaptureDeviceInput* input = [AVCaptureDeviceInput deviceInputWithDevice:self.device error:nil]; AVCaptureVideoDataOutput* output = [[AVCaptureVideoDataOutput alloc] init]; output.alwaysDiscardsLateVideoFrames = YES; dispatch_queue_t queue; queue = dispatch_queue_create("cameraQueue", NULL); [output setSampleBufferDelegate:self queue:queue]; NSString* key = (NSString *) kCVPixelBufferPixelFormatTypeKey; NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]; NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key]; [output setVideoSettings:videoSettings]; self.captureSession = [[AVCaptureSession alloc] init]; [self.captureSession addInput:input]; [self.captureSession addOutput:output]; [self.captureSession setSessionPreset:AVCaptureSessionPresetPhoto]; self.previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession]; self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; // CHECK FOR YOUR APP NSInteger screenWidth = self.frame.size.width; NSInteger screenHeitht = self.frame.size.height; self.previewLayer.frame = self.bounds; self.previewLayer.orientation = AVCaptureVideoOrientationPortrait; // CHECK FOR YOUR APP // [self.layer insertSublayer:self.previewLayer atIndex:0]; // Comment-out to hide preview layer [self.captureSession startRunning]; } - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection { CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); CVPixelBufferLockBaseAddress(imageBuffer, 0); uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer); size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); size_t width = CVPixelBufferGetWidth(imageBuffer); size_t height = CVPixelBufferGetHeight(imageBuffer); CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst); CGImageRef newImage = CGBitmapContextCreateImage(newContext); CGContextRelease(newContext); CGColorSpaceRelease(colorSpace); self.cameraImage = [UIImage imageWithCGImage:newImage scale:1.0f orientation:UIImageOrientationLeftMirrored]; // UIImageOrientationDownMirrored self.cameraImageView.image = [UIImage imageWithCGImage:newImage scale:1.0f orientation:UIImageOrientationLeftMirrored]; CGImageRelease(newImage); CVPixelBufferUnlockBaseAddress(imageBuffer, 0); } @end
将其实例化后在需要的时候直接获取其 cameraView 的 cameraImage 即可;
#pragma mark - 快照采集 /// 快照采集 - (YHCameraView *)cameraView { if (!_cameraView) { YHCameraView *view = [[YHCameraView alloc] init]; view.frame = CGRectMake(1, 1, 1, 1); view.cameraImageView.image = view.cameraImage; _cameraView = view; } return _cameraView; } NSString *strImg = [YHCameraManager imageBase64EncodedWithImage:self.cameraView.cameraImage AndImageType:@"JPEG"]; // 获取照片信息
/** 图片转 Base64 @param img 原图片 @param type 图片类型(PNG 或 JPEG) @return 处理结果 */ + (NSString *)imageBase64EncodedWithImage:(UIImage *)img AndImageType:(NSString *)type { NSString *callBack = nil; if ([img isKindOfClass:[UIImage class]]) { NSData *data = [NSData data]; if ([type isEqualToString:@"PNG"]) { data = UIImagePNGRepresentation(img); } else { data = UIImageJPEGRepresentation(img, 1.0f); } NSString *encodedImgStr = [data base64EncodedStringWithOptions:NSDataBase64Encoding64CharacterLineLength]; NSLog(@"YHCameraManager\nencodedImgStr: %@", encodedImgStr); return encodedImgStr; } else { return callBack; } }
以上是“iOS实现摄像头实时采集图像的方法”这篇文章的所有内容,感谢各位的阅读!相信大家都有了一定的了解,希望分享的内容对大家有所帮助,如果还想学习更多知识,欢迎关注亿速云行业资讯频道!
免责声明:本站发布的内容(图片、视频和文字)以原创、转载和分享为主,文章观点不代表本网站立场,如果涉及侵权请联系站长邮箱:is@yisu.com进行举报,并提供相关证据,一经查实,将立刻删除涉嫌侵权内容。