123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659 |
- //
- // YMCameraManager.m
- // MSYOUPAI
- //
- // Created by YoMi on 2024/3/17.
- // Copyright © 2024 MS. All rights reserved.
- //
- #import "YMCameraManager.h"
- #import "UIImage+YMCameraManager.h"
- #define adjustingFocus @"adjustingFocus"
- #define showMessage(TITLE,MESSAGE,VC) dispatch_async(dispatch_get_main_queue(), ^{ \
- UIAlertController *alertController = [UIAlertController alertControllerWithTitle:TITLE message:MESSAGE preferredStyle:UIAlertControllerStyleAlert];\
- [alertController addAction:[UIAlertAction actionWithTitle:@"确定"style:UIAlertActionStyleDefault handler:nil]];\
- [VC presentViewController:alertController animated:YES completion:nil]; \
- });
- @interface YMCameraManager () <CAAnimationDelegate,AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureMetadataOutputObjectsDelegate>
- @property (nonatomic) dispatch_queue_t sessionQueue;
- @property (nonatomic, strong) AVCaptureVideoPreviewLayer *previewLayer;
- @property (nonatomic, strong) AVCaptureDeviceInput *inputDevice;
- @property (nonatomic, strong) AVCaptureStillImageOutput *stillImageOutput;
- @property (nonatomic, strong) AVCaptureMetadataOutput *metadataOutput;
- @property (nonatomic, copy) void (^finishBlock)(void);
- /** 对焦视图*/
- @property (nonatomic, strong) UIView *focusView;
- /** 人脸识别视图*/
- @property (nonatomic, strong) UIView *faceView;
- //判断是否手动对焦
- @property (nonatomic, assign) BOOL isManualFocus;
- //判断是否人脸识别
- @property (nonatomic, assign) BOOL isStartFaceRecognition;
- @end
- @implementation YMCameraManager
- - (void)dealloc
- {
- NSLog(@"照相机管理释放");
- if ([self.session isRunning]) {
- [self.session stopRunning];
- self.session = nil;
- }
- [self setFocusObserver:NO];
- }
- - (instancetype)init
- {
- if (self = [super init]) {
- [self setup];
- }
- return self;
- }
- - (instancetype)initWithParentView:(UIView *)view
- {
- if (self = [super init]) {
- [self setup];
- [self configureWithParentLayer:view];
- }
- return self;
- }
- - (void)setup
- {
- self.session = [[AVCaptureSession alloc] init];
- self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session];
- self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
-
- //对焦队列
- [self createQueue];
- //加入输入设备(前置或后置摄像头)
- [self addVideoInputFrontCamera:YES];
- //加入输出设备
- [self addStillImageOutput];
- //对焦MVO
- [self setFocusObserver:YES];
- }
- - (void)configureWithParentLayer:(UIView *)parent
- {
- if (!parent) {
- showMessage(@"提示", @"请加入附载视图", [YMGlobalUtils getCurrentVC]);
- return;
- }
-
-
- self.previewLayer.frame = parent.bounds;
- [parent.layer addSublayer:self.previewLayer];
- //加入对焦框
- [self initfocusImageWithParent:parent];
- //加入脸部识别框
- [self initFaceImageWithParent:parent];
- [self.session startRunning];
- dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(2.0 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
- self.isStartFaceRecognition = YES;
- });
- }
- /** 对焦的框*/
- - (void)initfocusImageWithParent:(UIView *)view;
- {
- if (self.focusView) {
- return;
- }
- self.focusView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, 70, 70)];
- self.focusView.backgroundColor = [UIColor clearColor];
- self.focusView.layer.borderColor = [UIColor greenColor].CGColor;
- self.focusView.layer.borderWidth = 1;
- self.focusView.alpha = 0;
- if (view.superview != nil) {
- [view.superview addSubview:self.focusView];
- }else{
- self.focusView = nil;
- }
- }
- /** 脸部识别的框*/
- - (void)initFaceImageWithParent:(UIView *)view;
- {
- if (self.faceView) {
- return;
- }
- self.faceView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, 70, 70)];
- self.faceView.backgroundColor = [UIColor clearColor];
- self.faceView.layer.borderColor = [UIColor greenColor].CGColor;
- self.faceView.layer.borderWidth = 1;
- self.faceView.alpha = 0;
- if (view.superview) {
- [view.superview addSubview:self.faceView];
- }else{
- self.faceView = nil;
- }
- }
- /**
- * 创建一个队列,防止阻塞主线程
- */
- - (void)createQueue {
- dispatch_queue_t sessionQueue = dispatch_queue_create("session queue", DISPATCH_QUEUE_SERIAL);
- self.sessionQueue = sessionQueue;
- }
- /**
- * 添加输入设备
- *
- * @param front 前或后摄像头
- */
- - (void)addVideoInputFrontCamera:(BOOL)front {
- AVAuthorizationStatus status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
- if (status == AVAuthorizationStatusAuthorized) {
-
- } else if (status == AVAuthorizationStatusNotDetermined) {
- [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) {
- if (granted) {
- [self addVideoInputFrontCamera:front];
- }
- }];
- return;
- } else if (status == AVAuthorizationStatusRestricted || status == AVAuthorizationStatusDenied) { //没有权限)
- showMessage(@"提示", @"您的设备相机未授权", [YMGlobalUtils getCurrentVC])
- return;
- }
-
- NSArray *devices = [AVCaptureDevice devices];
- AVCaptureDevice *frontCamera;
- AVCaptureDevice *backCamera;
- for (AVCaptureDevice *device in devices) {
- if ([device hasMediaType:AVMediaTypeVideo]) {
- if ([device position] == AVCaptureDevicePositionBack) {
- backCamera = device;
- } else {
- frontCamera = device;
- }
- }
- }
- NSError *error = nil;
- if (front) {
- AVCaptureDeviceInput *frontFacingCameraDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:frontCamera error:&error];
- if (!error) {
- if ([_session canAddInput:frontFacingCameraDeviceInput]) {
- [_session addInput:frontFacingCameraDeviceInput];
- self.inputDevice = frontFacingCameraDeviceInput;
- } else {
- NSLog(@"Couldn't add front facing video input");
- }
- }else{
- NSLog(@"你的设备没有照相机");
- }
- } else {
- AVCaptureDeviceInput *backFacingCameraDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:backCamera error:&error];
- if (!error) {
- if ([_session canAddInput:backFacingCameraDeviceInput]) {
- [_session addInput:backFacingCameraDeviceInput];
- self.inputDevice = backFacingCameraDeviceInput;
- } else {
- NSLog(@"Couldn't add back facing video input");
- }
- }else{
- NSLog(@"你的设备没有照相机");
- }
- }
- if (error) {
- showMessage(@"提示", @"您的设备没有照相机或是未授权", [YMGlobalUtils getCurrentVC])
- }
- }
- /**
- * 添加输出设备
- */
- - (void)addStillImageOutput
- {
-
-
- AVCaptureStillImageOutput *tmpOutput = [[AVCaptureStillImageOutput alloc] init];
- NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecTypeJPEG,AVVideoCodecKey,nil];//输出jpeg
- tmpOutput.outputSettings = outputSettings;
-
- // AVCaptureConnection *videoConnection = [self findVideoConnection];
-
- [_session addOutput:tmpOutput];
-
- self.stillImageOutput = tmpOutput;
-
- // AVCaptureVideoDataOutput *dataOutput = [[AVCaptureVideoDataOutput alloc] init];
- // if ([self.session canAddOutput:dataOutput]) {
- // [self.session addOutput:dataOutput];
- // dispatch_queue_t cameraQueue;
- // cameraQueue = dispatch_queue_create("cameraQueue", DISPATCH_QUEUE_SERIAL);
- // [dataOutput setSampleBufferDelegate:self queue:cameraQueue];
- // }
-
- AVCaptureConnection *videoConnection = [self findVideoConnection];
- if (!videoConnection) {
- showMessage(@"提示", @"您的设备没有照相机或是未授权", [YMGlobalUtils getCurrentVC])
- return;
- }
-
-
- AVCaptureMetadataOutput *metadataOutput = [[AVCaptureMetadataOutput alloc] init];
- if ([_session canAddOutput:metadataOutput]) {
- [_session addOutput:metadataOutput];
- [metadataOutput setMetadataObjectTypes:@[AVMetadataObjectTypeFace]];
- [metadataOutput setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
- self.metadataOutput = metadataOutput;
- }
- }
- /** 拍照*/
- - (void)takePhotoWithImageBlock:(void (^)(UIImage *, UIImage *, UIImage *))block
- {
-
- AVCaptureConnection *videoConnection = [self findVideoConnection];
- if (!videoConnection) {
- showMessage(@"提示", @"您的设备没有照相机或是未授权", [YMGlobalUtils getCurrentVC])
- return;
- }
- __weak typeof(self) weak = self;
- [self.stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
- if (error == nil && imageDataSampleBuffer != NULL) {
- NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
- UIImage *originImage = [[UIImage alloc] initWithData:imageData];
- NSLog(@"originImage = %@",originImage);
- CGFloat squareLength = weak.previewLayer.bounds.size.width;
- CGFloat previewLayerH = weak.previewLayer.bounds.size.height;
- // CGFloat headHeight = weak.previewLayer.bounds.size.height - squareLength;
- // NSLog(@"heeadHeight=%f",headHeight);
- CGSize size = CGSizeMake(squareLength*2, previewLayerH*2);
- UIImage *scaledImage = [originImage resizedImageWithContentMode:UIViewContentModeScaleAspectFill bounds:size interpolationQuality:kCGInterpolationHigh];
- NSLog(@"scaledImage = %@",scaledImage);
- CGRect cropFrame = CGRectMake((scaledImage.size.width - size.width) / 2, (scaledImage.size.height - size.height) / 2, size.width, size.height);
- NSLog(@"cropFrame:%@", [NSValue valueWithCGRect:cropFrame]);
- UIImage *croppedImage = [scaledImage croppedImage:cropFrame];
- NSLog(@"croppedImage = %@",croppedImage);
- UIDeviceOrientation orientation = [UIDevice currentDevice].orientation;
- if (orientation != UIDeviceOrientationPortrait) {
- CGFloat degree = 0;
- if (orientation == UIDeviceOrientationPortraitUpsideDown) {
- degree = 180;// M_PI;
- } else if (orientation == UIDeviceOrientationLandscapeLeft) {
- degree = -90;// -M_PI_2;
- } else if (orientation == UIDeviceOrientationLandscapeRight) {
- degree = 90;// M_PI_2;
- }
- croppedImage = [croppedImage rotatedByDegrees:degree];
- scaledImage = [scaledImage rotatedByDegrees:degree];
- originImage = [originImage rotatedByDegrees:degree];
- }
- if (block) {
- block(originImage,scaledImage,croppedImage);
- }
- }
-
- }];
-
- }
- /** 切换闪光灯模式 (切换顺序:最开始是auto,然后是off,最后是on,一直循环)*/
- - (void)switchFlashModeDidFinishChanceBlock:(void (^)(YMCaptureFlashMode flashMode))block
- {
- Class captureDeviceClass = NSClassFromString(@"AVCaptureDevice");
- if (!captureDeviceClass) {
- showMessage(@"提示", @"您的设备没有拍照功能", [YMGlobalUtils getCurrentVC]);
- return;
- }
- YMCaptureFlashMode flashMode = YMCaptureFlashModeOff;
- AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
- [device lockForConfiguration:nil];
- if ([device hasFlash]) {
- if (device.flashMode == AVCaptureFlashModeOff) {
- device.flashMode = AVCaptureFlashModeOn;
- if (block) {
- block(YMCaptureFlashModeOn);
- }
-
- } else if (device.flashMode == AVCaptureFlashModeOn) {
- device.flashMode = AVCaptureFlashModeAuto;
- if (block) {
- block(YMCaptureFlashModeAuto);
- }
-
- } else if (device.flashMode == AVCaptureFlashModeAuto) {
- device.flashMode = AVCaptureFlashModeOff;
- if (block) {
- block(YMCaptureFlashModeOff);
- }
- }
- } else {
- showMessage(@"提示", @"您的设备没有闪光灯功能", [YMGlobalUtils getCurrentVC]);
- }
- [device unlockForConfiguration];
- }
- /** 切换前后镜*/
- - (void)switchCamera:(BOOL)isFrontCamera didFinishChanceBlock:(void (^)(void))block
- {
- if (!_inputDevice) {
-
- if (block) {
- block();
- }
- showMessage(@"提示", @"您的设备没有摄像头", [YMGlobalUtils getCurrentVC])
- return;
- }
- if (block) {
- self.finishBlock = [block copy];
- }
- CABasicAnimation *caAnimation = [CABasicAnimation animationWithKeyPath:@"opacity"];
-
- /*
- duration 动画的时长
- repeatCount 重复的次数。不停重复设置为 HUGE_VALF
- repeatDuration 设置动画的时间。在该时间内动画一直执行,不计次数。
- beginTime 指定动画开始的时间。从开始延迟几秒的话,设置为[CACurrentMediaTime() + 秒数] 的方式
- timingFunction 设置动画的速度变化
- autoreverses 动画结束时是否执行逆动画
- fromValue 所改变属性的起始值
- toValue 所改变属性的结束时的值
- byValue 所改变属性相同起始值的改变量
- */
-
- caAnimation.fromValue = (__bridge id _Nullable)([UIColor blackColor].CGColor);
- caAnimation.toValue = (__bridge id _Nullable)([UIColor whiteColor].CGColor);
- caAnimation.duration = 0.5f;
- caAnimation.delegate = self;
- [self.previewLayer addAnimation:caAnimation forKey:@"switchAnimation"];
-
- @weakify(self)
- dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
- @strongify(self)
- [self.session beginConfiguration];
- [self.session removeInput:self.inputDevice];
- [self addVideoInputFrontCamera:isFrontCamera];
- [self.session commitConfiguration];
-
- dispatch_async(dispatch_get_main_queue(), ^{
-
- });
- });
- }
- - (void)animationDidStop:(CAAnimation *)anim finished:(BOOL)flag
- {
- if (self.finishBlock) {
- self.finishBlock();
- }
- }
- /** 点击对焦*/
- - (void)focusInPoint:(CGPoint)devicePoint
- {
- if (CGRectContainsPoint(_previewLayer.bounds, devicePoint) == NO) {
- return;
- }
- self.isManualFocus = YES;
- [self focusImageAnimateWithCenterPoint:devicePoint];
- devicePoint = [self convertToPointOfInterestFromViewCoordinates:devicePoint];
- [self focusWithMode:AVCaptureFocusModeAutoFocus exposeWithMode:AVCaptureExposureModeContinuousAutoExposure atDevicePoint:devicePoint monitorSubjectAreaChange:YES];
-
- }
- - (void)focusImageAnimateWithCenterPoint:(CGPoint)point
- {
- [self.focusView setCenter:point];
- self.focusView.transform = CGAffineTransformMakeScale(2.0, 2.0);
- __weak typeof(self) weak = self;
- [UIView animateWithDuration:0.3f delay:0.f options:UIViewAnimationOptionAllowUserInteraction animations:^{
- weak.focusView.alpha = 1.f;
- weak.focusView.transform = CGAffineTransformMakeScale(1.0, 1.0);
- } completion:^(BOOL finished) {
- [UIView animateWithDuration:0.5f delay:0.5f options:UIViewAnimationOptionAllowUserInteraction animations:^{
- weak.focusView.alpha = 0.f;
- } completion:^(BOOL finished) {
- weak.isManualFocus = NO;
- }];
- }];
- }
- - (void)focusWithMode:(AVCaptureFocusMode)focusMode exposeWithMode:(AVCaptureExposureMode)exposureMode atDevicePoint:(CGPoint)point monitorSubjectAreaChange:(BOOL)monitorSubjectAreaChange
- {
-
- dispatch_async(_sessionQueue, ^{
- AVCaptureDevice *device = [self.inputDevice device];
- NSError *error = nil;
- if ([device lockForConfiguration:&error])
- {
- if ([device isFocusPointOfInterestSupported] && [device isFocusModeSupported:focusMode])
- {
- [device setFocusMode:focusMode];
- [device setFocusPointOfInterest:point];
- }
- if ([device isExposurePointOfInterestSupported] && [device isExposureModeSupported:exposureMode])
- {
- [device setExposureMode:exposureMode];
- [device setExposurePointOfInterest:point];
- }
- [device setSubjectAreaChangeMonitoringEnabled:monitorSubjectAreaChange];
- [device unlockForConfiguration];
- }
- else
- {
- NSLog(@"%@", error);
- }
- });
- }
- /**
- * 外部的point转换为camera需要的point(外部point/相机页面的frame)
- *
- * @param viewCoordinates 外部的point
- *
- * @return 相对位置的point
- */
- - (CGPoint)convertToPointOfInterestFromViewCoordinates:(CGPoint)viewCoordinates {
- CGPoint pointOfInterest = CGPointMake(.5f, .5f);
- CGSize frameSize = _previewLayer.bounds.size;
-
- AVCaptureVideoPreviewLayer *videoPreviewLayer = self.previewLayer;
-
- if([[videoPreviewLayer videoGravity] isEqualToString:AVLayerVideoGravityResize]) {
- pointOfInterest = CGPointMake(viewCoordinates.y / frameSize.height, 1.f - (viewCoordinates.x / frameSize.width));
- } else {
- CGRect cleanAperture;
- for(AVCaptureInputPort *port in [[self.session.inputs lastObject]ports]) {
- if([port mediaType] == AVMediaTypeVideo) {
- cleanAperture = CMVideoFormatDescriptionGetCleanAperture([port formatDescription], YES);
- CGSize apertureSize = cleanAperture.size;
- CGPoint point = viewCoordinates;
-
- CGFloat apertureRatio = apertureSize.height / apertureSize.width;
- CGFloat viewRatio = frameSize.width / frameSize.height;
- CGFloat xc = .5f;
- CGFloat yc = .5f;
-
- if([[videoPreviewLayer videoGravity]isEqualToString:AVLayerVideoGravityResizeAspect]) {
- if(viewRatio > apertureRatio) {
- CGFloat y2 = frameSize.height;
- CGFloat x2 = frameSize.height * apertureRatio;
- CGFloat x1 = frameSize.width;
- CGFloat blackBar = (x1 - x2) / 2;
- if(point.x >= blackBar && point.x <= blackBar + x2) {
- xc = point.y / y2;
- yc = 1.f - ((point.x - blackBar) / x2);
- }
- } else {
- CGFloat y2 = frameSize.width / apertureRatio;
- CGFloat y1 = frameSize.height;
- CGFloat x2 = frameSize.width;
- CGFloat blackBar = (y1 - y2) / 2;
- if(point.y >= blackBar && point.y <= blackBar + y2) {
- xc = ((point.y - blackBar) / y2);
- yc = 1.f - (point.x / x2);
- }
- }
- } else if([[videoPreviewLayer videoGravity]isEqualToString:AVLayerVideoGravityResizeAspectFill]) {
- if(viewRatio > apertureRatio) {
- CGFloat y2 = apertureSize.width * (frameSize.width / apertureSize.height);
- xc = (point.y + ((y2 - frameSize.height) / 2.f)) / y2;
- yc = (frameSize.width - point.x) / frameSize.width;
- } else {
- CGFloat x2 = apertureSize.height * (frameSize.height / apertureSize.width);
- yc = 1.f - ((point.x + ((x2 - frameSize.width) / 2)) / x2);
- xc = point.y / frameSize.height;
- }
-
- }
-
- pointOfInterest = CGPointMake(xc, yc);
- break;
- }
- }
- }
-
- return pointOfInterest;
- }
- /** 人脸框的动画*/
- - (void)showFaceImageWithFrame:(CGRect)rect
- {
- if (self.isStartFaceRecognition) {
- self.isStartFaceRecognition = NO;
- self.faceView.frame = CGRectMake(rect.origin.y * self.previewLayer.frame.size.width-10, rect.origin.x * self.previewLayer.frame.size.height - 20, rect.size.width * self.previewLayer.frame.size.width * 2, rect.size.height * self.previewLayer.frame.size.height);
-
- self.faceView.transform = CGAffineTransformMakeScale(1.5, 1.5);
- __weak typeof(self) weak = self;
- [UIView animateWithDuration:0.3f animations:^{
- weak.faceView.alpha = 1.f;
- weak.faceView.transform = CGAffineTransformMakeScale(1.0, 1.0);
- } completion:^(BOOL finished) {
- [UIView animateWithDuration:2.f animations:^{
- weak.faceView.alpha = 0.f;
- } completion:^(BOOL finished) {
- if (weak.faceRecognitonCallBack) {
- weak.faceRecognitonCallBack(weak.faceView.frame);
- }
- weak.isStartFaceRecognition = YES;
-
- }];
- }];
- }
- }
- /** 查找摄像头连接设备*/
- - (AVCaptureConnection *)findVideoConnection
- {
- AVCaptureConnection *videoConnection = nil;
- for (AVCaptureConnection *connection in _stillImageOutput.connections) {
- for (AVCaptureInputPort *port in connection.inputPorts) {
- if ([[port mediaType] isEqual:AVMediaTypeVideo]) {
- videoConnection = connection;
- break;
- }
- }
- if (videoConnection) {
- break;
- }
- }
- return videoConnection;
- }
- /** 检查是否有相机权限*/
- + (BOOL)checkAuthority
- {
- NSString *mediaType = AVMediaTypeVideo;
- AVAuthorizationStatus authStatus = [AVCaptureDevice authorizationStatusForMediaType:mediaType];
- if(authStatus == AVAuthorizationStatusRestricted || authStatus == AVAuthorizationStatusDenied){
- return NO;
- }
- return YES;
- }
- #pragma -mark Observer
- - (void)setFocusObserver:(BOOL)yes
- {
- AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
- if (device && [device isFocusPointOfInterestSupported]) {
- if (yes) {
- [device addObserver:self forKeyPath:adjustingFocus options:NSKeyValueObservingOptionNew|NSKeyValueObservingOptionOld context:nil];
- }else{
- [device removeObserver:self forKeyPath:adjustingFocus context:nil];
- }
- }else{
- //showMessage(@"提示", @"您的设备没有照相机或是未授权", [YMGlobalUtils getCurrentVC])
- }
- }
- /** 监听对焦是否完成*/
- - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
- if ([keyPath isEqualToString:adjustingFocus]) {
- BOOL isAdjustingFocus = [[change objectForKey:NSKeyValueChangeNewKey] boolValue];
- if (isAdjustingFocus) {
- if (self.isManualFocus==NO) {
- [self focusImageAnimateWithCenterPoint:CGPointMake(self.previewLayer.bounds.size.width/2, self.previewLayer.bounds.size.height/2)];
- }
- if ([self.delegate respondsToSelector:@selector(cameraDidStareFocus)]) {
- [self.delegate cameraDidStareFocus];
- }
- }else{
- if ([self.delegate respondsToSelector:@selector(cameraDidFinishFocus)]) {
- [self.delegate cameraDidFinishFocus];
- }
- }
- }
- }
- #pragma -mark AVCaptureMetadataOutputObjectsDelegate
- - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection
- {
- if (self.canFaceRecognition) {
- for(AVMetadataObject *metadataObject in metadataObjects) {
- if([metadataObject.type isEqualToString:AVMetadataObjectTypeFace]) {
- [self showFaceImageWithFrame:metadataObject.bounds];
- }
- }
- }
- }
- /*
- #pragma -mark AVCaptureVideoDataOutputSampleBufferDelegate
- - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
- {
- // if (self.isStartFaceRecognition) {
- // UIImage *curImage = [self getSampleBufferImageWithSampleBuffer:sampleBuffer];
- // CIContext *context = [CIContext contextWithOptions:@{kCIContextUseSoftwareRenderer:@YES}];
- // CIDetector *detector = [CIDetector detectorOfType:CIDetectorTypeFace context:context options:@{CIDetectorAccuracy:CIDetectorAccuracyHigh}];
- //
- // }
-
- }
- - (UIImage *)getSampleBufferImageWithSampleBuffer:(CMSampleBufferRef)sampleBuffer
- {
- CVImageBufferRef buffer;
- buffer = CMSampleBufferGetImageBuffer(sampleBuffer);
-
- CVPixelBufferLockBaseAddress(buffer, 0);
-
- //从 CVImageBufferRef 取得影像的细部信息
- uint8_t *base;
- size_t width, height, bytesPerRow;
- base = CVPixelBufferGetBaseAddress(buffer);
- width = CVPixelBufferGetWidth(buffer);
- height = CVPixelBufferGetHeight(buffer);
- bytesPerRow = CVPixelBufferGetBytesPerRow(buffer);
-
- //利用取得影像细部信息格式化 CGContextRef
- CGColorSpaceRef colorSpace;
- CGContextRef cgContext;
- colorSpace = CGColorSpaceCreateDeviceRGB();
- cgContext = CGBitmapContextCreate(base, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
- CGColorSpaceRelease(colorSpace);
-
- //透过 CGImageRef 将 CGContextRef 转换成 UIImage
- CGImageRef cgImage;
- UIImage *image;
- cgImage = CGBitmapContextCreateImage(cgContext);
- image = [UIImage imageWithCGImage:cgImage];
- CGImageRelease(cgImage);
- CGContextRelease(cgContext);
-
- CVPixelBufferUnlockBaseAddress(buffer, 0);
-
- return image;
- }
- */
- @end
|