YMCameraManager.m 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659
  1. //
  2. // YMCameraManager.m
  3. // MSYOUPAI
  4. //
  5. // Created by YoMi on 2024/3/17.
  6. // Copyright © 2024 MS. All rights reserved.
  7. //
  8. #import "YMCameraManager.h"
  9. #import "UIImage+YMCameraManager.h"
  10. #define adjustingFocus @"adjustingFocus"
  11. #define showMessage(TITLE,MESSAGE,VC) dispatch_async(dispatch_get_main_queue(), ^{ \
  12. UIAlertController *alertController = [UIAlertController alertControllerWithTitle:TITLE message:MESSAGE preferredStyle:UIAlertControllerStyleAlert];\
  13. [alertController addAction:[UIAlertAction actionWithTitle:@"确定"style:UIAlertActionStyleDefault handler:nil]];\
  14. [VC presentViewController:alertController animated:YES completion:nil]; \
  15. });
  16. @interface YMCameraManager () <CAAnimationDelegate,AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureMetadataOutputObjectsDelegate>
  17. @property (nonatomic) dispatch_queue_t sessionQueue;
  18. @property (nonatomic, strong) AVCaptureVideoPreviewLayer *previewLayer;
  19. @property (nonatomic, strong) AVCaptureDeviceInput *inputDevice;
  20. @property (nonatomic, strong) AVCaptureStillImageOutput *stillImageOutput;
  21. @property (nonatomic, strong) AVCaptureMetadataOutput *metadataOutput;
  22. @property (nonatomic, copy) void (^finishBlock)(void);
  23. /** 对焦视图*/
  24. @property (nonatomic, strong) UIView *focusView;
  25. /** 人脸识别视图*/
  26. @property (nonatomic, strong) UIView *faceView;
  27. //判断是否手动对焦
  28. @property (nonatomic, assign) BOOL isManualFocus;
  29. //判断是否人脸识别
  30. @property (nonatomic, assign) BOOL isStartFaceRecognition;
  31. @end
  32. @implementation YMCameraManager
  33. - (void)dealloc
  34. {
  35. NSLog(@"照相机管理释放");
  36. if ([self.session isRunning]) {
  37. [self.session stopRunning];
  38. self.session = nil;
  39. }
  40. [self setFocusObserver:NO];
  41. }
  42. - (instancetype)init
  43. {
  44. if (self = [super init]) {
  45. [self setup];
  46. }
  47. return self;
  48. }
  49. - (instancetype)initWithParentView:(UIView *)view
  50. {
  51. if (self = [super init]) {
  52. [self setup];
  53. [self configureWithParentLayer:view];
  54. }
  55. return self;
  56. }
  57. - (void)setup
  58. {
  59. self.session = [[AVCaptureSession alloc] init];
  60. self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session];
  61. self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
  62. //对焦队列
  63. [self createQueue];
  64. //加入输入设备(前置或后置摄像头)
  65. [self addVideoInputFrontCamera:YES];
  66. //加入输出设备
  67. [self addStillImageOutput];
  68. //对焦MVO
  69. [self setFocusObserver:YES];
  70. }
  71. - (void)configureWithParentLayer:(UIView *)parent
  72. {
  73. if (!parent) {
  74. showMessage(@"提示", @"请加入附载视图", [YMGlobalUtils getCurrentVC]);
  75. return;
  76. }
  77. self.previewLayer.frame = parent.bounds;
  78. [parent.layer addSublayer:self.previewLayer];
  79. //加入对焦框
  80. [self initfocusImageWithParent:parent];
  81. //加入脸部识别框
  82. [self initFaceImageWithParent:parent];
  83. [self.session startRunning];
  84. dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(2.0 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
  85. self.isStartFaceRecognition = YES;
  86. });
  87. }
  88. /** 对焦的框*/
  89. - (void)initfocusImageWithParent:(UIView *)view;
  90. {
  91. if (self.focusView) {
  92. return;
  93. }
  94. self.focusView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, 70, 70)];
  95. self.focusView.backgroundColor = [UIColor clearColor];
  96. self.focusView.layer.borderColor = [UIColor greenColor].CGColor;
  97. self.focusView.layer.borderWidth = 1;
  98. self.focusView.alpha = 0;
  99. if (view.superview != nil) {
  100. [view.superview addSubview:self.focusView];
  101. }else{
  102. self.focusView = nil;
  103. }
  104. }
  105. /** 脸部识别的框*/
  106. - (void)initFaceImageWithParent:(UIView *)view;
  107. {
  108. if (self.faceView) {
  109. return;
  110. }
  111. self.faceView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, 70, 70)];
  112. self.faceView.backgroundColor = [UIColor clearColor];
  113. self.faceView.layer.borderColor = [UIColor greenColor].CGColor;
  114. self.faceView.layer.borderWidth = 1;
  115. self.faceView.alpha = 0;
  116. if (view.superview) {
  117. [view.superview addSubview:self.faceView];
  118. }else{
  119. self.faceView = nil;
  120. }
  121. }
  122. /**
  123. * 创建一个队列,防止阻塞主线程
  124. */
  125. - (void)createQueue {
  126. dispatch_queue_t sessionQueue = dispatch_queue_create("session queue", DISPATCH_QUEUE_SERIAL);
  127. self.sessionQueue = sessionQueue;
  128. }
  129. /**
  130. * 添加输入设备
  131. *
  132. * @param front 前或后摄像头
  133. */
  134. - (void)addVideoInputFrontCamera:(BOOL)front {
  135. AVAuthorizationStatus status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
  136. if (status == AVAuthorizationStatusAuthorized) {
  137. } else if (status == AVAuthorizationStatusNotDetermined) {
  138. [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) {
  139. if (granted) {
  140. [self addVideoInputFrontCamera:front];
  141. }
  142. }];
  143. return;
  144. } else if (status == AVAuthorizationStatusRestricted || status == AVAuthorizationStatusDenied) { //没有权限)
  145. showMessage(@"提示", @"您的设备相机未授权", [YMGlobalUtils getCurrentVC])
  146. return;
  147. }
  148. NSArray *devices = [AVCaptureDevice devices];
  149. AVCaptureDevice *frontCamera;
  150. AVCaptureDevice *backCamera;
  151. for (AVCaptureDevice *device in devices) {
  152. if ([device hasMediaType:AVMediaTypeVideo]) {
  153. if ([device position] == AVCaptureDevicePositionBack) {
  154. backCamera = device;
  155. } else {
  156. frontCamera = device;
  157. }
  158. }
  159. }
  160. NSError *error = nil;
  161. if (front) {
  162. AVCaptureDeviceInput *frontFacingCameraDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:frontCamera error:&error];
  163. if (!error) {
  164. if ([_session canAddInput:frontFacingCameraDeviceInput]) {
  165. [_session addInput:frontFacingCameraDeviceInput];
  166. self.inputDevice = frontFacingCameraDeviceInput;
  167. } else {
  168. NSLog(@"Couldn't add front facing video input");
  169. }
  170. }else{
  171. NSLog(@"你的设备没有照相机");
  172. }
  173. } else {
  174. AVCaptureDeviceInput *backFacingCameraDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:backCamera error:&error];
  175. if (!error) {
  176. if ([_session canAddInput:backFacingCameraDeviceInput]) {
  177. [_session addInput:backFacingCameraDeviceInput];
  178. self.inputDevice = backFacingCameraDeviceInput;
  179. } else {
  180. NSLog(@"Couldn't add back facing video input");
  181. }
  182. }else{
  183. NSLog(@"你的设备没有照相机");
  184. }
  185. }
  186. if (error) {
  187. showMessage(@"提示", @"您的设备没有照相机或是未授权", [YMGlobalUtils getCurrentVC])
  188. }
  189. }
  190. /**
  191. * 添加输出设备
  192. */
  193. - (void)addStillImageOutput
  194. {
  195. AVCaptureStillImageOutput *tmpOutput = [[AVCaptureStillImageOutput alloc] init];
  196. NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecTypeJPEG,AVVideoCodecKey,nil];//输出jpeg
  197. tmpOutput.outputSettings = outputSettings;
  198. // AVCaptureConnection *videoConnection = [self findVideoConnection];
  199. [_session addOutput:tmpOutput];
  200. self.stillImageOutput = tmpOutput;
  201. // AVCaptureVideoDataOutput *dataOutput = [[AVCaptureVideoDataOutput alloc] init];
  202. // if ([self.session canAddOutput:dataOutput]) {
  203. // [self.session addOutput:dataOutput];
  204. // dispatch_queue_t cameraQueue;
  205. // cameraQueue = dispatch_queue_create("cameraQueue", DISPATCH_QUEUE_SERIAL);
  206. // [dataOutput setSampleBufferDelegate:self queue:cameraQueue];
  207. // }
  208. AVCaptureConnection *videoConnection = [self findVideoConnection];
  209. if (!videoConnection) {
  210. showMessage(@"提示", @"您的设备没有照相机或是未授权", [YMGlobalUtils getCurrentVC])
  211. return;
  212. }
  213. AVCaptureMetadataOutput *metadataOutput = [[AVCaptureMetadataOutput alloc] init];
  214. if ([_session canAddOutput:metadataOutput]) {
  215. [_session addOutput:metadataOutput];
  216. [metadataOutput setMetadataObjectTypes:@[AVMetadataObjectTypeFace]];
  217. [metadataOutput setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
  218. self.metadataOutput = metadataOutput;
  219. }
  220. }
  221. /** 拍照*/
  222. - (void)takePhotoWithImageBlock:(void (^)(UIImage *, UIImage *, UIImage *))block
  223. {
  224. AVCaptureConnection *videoConnection = [self findVideoConnection];
  225. if (!videoConnection) {
  226. showMessage(@"提示", @"您的设备没有照相机或是未授权", [YMGlobalUtils getCurrentVC])
  227. return;
  228. }
  229. __weak typeof(self) weak = self;
  230. [self.stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
  231. if (error == nil && imageDataSampleBuffer != NULL) {
  232. NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
  233. UIImage *originImage = [[UIImage alloc] initWithData:imageData];
  234. NSLog(@"originImage = %@",originImage);
  235. CGFloat squareLength = weak.previewLayer.bounds.size.width;
  236. CGFloat previewLayerH = weak.previewLayer.bounds.size.height;
  237. // CGFloat headHeight = weak.previewLayer.bounds.size.height - squareLength;
  238. // NSLog(@"heeadHeight=%f",headHeight);
  239. CGSize size = CGSizeMake(squareLength*2, previewLayerH*2);
  240. UIImage *scaledImage = [originImage resizedImageWithContentMode:UIViewContentModeScaleAspectFill bounds:size interpolationQuality:kCGInterpolationHigh];
  241. NSLog(@"scaledImage = %@",scaledImage);
  242. CGRect cropFrame = CGRectMake((scaledImage.size.width - size.width) / 2, (scaledImage.size.height - size.height) / 2, size.width, size.height);
  243. NSLog(@"cropFrame:%@", [NSValue valueWithCGRect:cropFrame]);
  244. UIImage *croppedImage = [scaledImage croppedImage:cropFrame];
  245. NSLog(@"croppedImage = %@",croppedImage);
  246. UIDeviceOrientation orientation = [UIDevice currentDevice].orientation;
  247. if (orientation != UIDeviceOrientationPortrait) {
  248. CGFloat degree = 0;
  249. if (orientation == UIDeviceOrientationPortraitUpsideDown) {
  250. degree = 180;// M_PI;
  251. } else if (orientation == UIDeviceOrientationLandscapeLeft) {
  252. degree = -90;// -M_PI_2;
  253. } else if (orientation == UIDeviceOrientationLandscapeRight) {
  254. degree = 90;// M_PI_2;
  255. }
  256. croppedImage = [croppedImage rotatedByDegrees:degree];
  257. scaledImage = [scaledImage rotatedByDegrees:degree];
  258. originImage = [originImage rotatedByDegrees:degree];
  259. }
  260. if (block) {
  261. block(originImage,scaledImage,croppedImage);
  262. }
  263. }
  264. }];
  265. }
  266. /** 切换闪光灯模式 (切换顺序:最开始是auto,然后是off,最后是on,一直循环)*/
  267. - (void)switchFlashModeDidFinishChanceBlock:(void (^)(YMCaptureFlashMode flashMode))block
  268. {
  269. Class captureDeviceClass = NSClassFromString(@"AVCaptureDevice");
  270. if (!captureDeviceClass) {
  271. showMessage(@"提示", @"您的设备没有拍照功能", [YMGlobalUtils getCurrentVC]);
  272. return;
  273. }
  274. YMCaptureFlashMode flashMode = YMCaptureFlashModeOff;
  275. AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
  276. [device lockForConfiguration:nil];
  277. if ([device hasFlash]) {
  278. if (device.flashMode == AVCaptureFlashModeOff) {
  279. device.flashMode = AVCaptureFlashModeOn;
  280. if (block) {
  281. block(YMCaptureFlashModeOn);
  282. }
  283. } else if (device.flashMode == AVCaptureFlashModeOn) {
  284. device.flashMode = AVCaptureFlashModeAuto;
  285. if (block) {
  286. block(YMCaptureFlashModeAuto);
  287. }
  288. } else if (device.flashMode == AVCaptureFlashModeAuto) {
  289. device.flashMode = AVCaptureFlashModeOff;
  290. if (block) {
  291. block(YMCaptureFlashModeOff);
  292. }
  293. }
  294. } else {
  295. showMessage(@"提示", @"您的设备没有闪光灯功能", [YMGlobalUtils getCurrentVC]);
  296. }
  297. [device unlockForConfiguration];
  298. }
  299. /** 切换前后镜*/
  300. - (void)switchCamera:(BOOL)isFrontCamera didFinishChanceBlock:(void (^)(void))block
  301. {
  302. if (!_inputDevice) {
  303. if (block) {
  304. block();
  305. }
  306. showMessage(@"提示", @"您的设备没有摄像头", [YMGlobalUtils getCurrentVC])
  307. return;
  308. }
  309. if (block) {
  310. self.finishBlock = [block copy];
  311. }
  312. CABasicAnimation *caAnimation = [CABasicAnimation animationWithKeyPath:@"opacity"];
  313. /*
  314. duration 动画的时长
  315. repeatCount 重复的次数。不停重复设置为 HUGE_VALF
  316. repeatDuration 设置动画的时间。在该时间内动画一直执行,不计次数。
  317. beginTime 指定动画开始的时间。从开始延迟几秒的话,设置为[CACurrentMediaTime() + 秒数] 的方式
  318. timingFunction 设置动画的速度变化
  319. autoreverses 动画结束时是否执行逆动画
  320. fromValue 所改变属性的起始值
  321. toValue 所改变属性的结束时的值
  322. byValue 所改变属性相同起始值的改变量
  323. */
  324. caAnimation.fromValue = (__bridge id _Nullable)([UIColor blackColor].CGColor);
  325. caAnimation.toValue = (__bridge id _Nullable)([UIColor whiteColor].CGColor);
  326. caAnimation.duration = 0.5f;
  327. caAnimation.delegate = self;
  328. [self.previewLayer addAnimation:caAnimation forKey:@"switchAnimation"];
  329. @weakify(self)
  330. dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
  331. @strongify(self)
  332. [self.session beginConfiguration];
  333. [self.session removeInput:self.inputDevice];
  334. [self addVideoInputFrontCamera:isFrontCamera];
  335. [self.session commitConfiguration];
  336. dispatch_async(dispatch_get_main_queue(), ^{
  337. });
  338. });
  339. }
  340. - (void)animationDidStop:(CAAnimation *)anim finished:(BOOL)flag
  341. {
  342. if (self.finishBlock) {
  343. self.finishBlock();
  344. }
  345. }
  346. /** 点击对焦*/
  347. - (void)focusInPoint:(CGPoint)devicePoint
  348. {
  349. if (CGRectContainsPoint(_previewLayer.bounds, devicePoint) == NO) {
  350. return;
  351. }
  352. self.isManualFocus = YES;
  353. [self focusImageAnimateWithCenterPoint:devicePoint];
  354. devicePoint = [self convertToPointOfInterestFromViewCoordinates:devicePoint];
  355. [self focusWithMode:AVCaptureFocusModeAutoFocus exposeWithMode:AVCaptureExposureModeContinuousAutoExposure atDevicePoint:devicePoint monitorSubjectAreaChange:YES];
  356. }
  357. - (void)focusImageAnimateWithCenterPoint:(CGPoint)point
  358. {
  359. [self.focusView setCenter:point];
  360. self.focusView.transform = CGAffineTransformMakeScale(2.0, 2.0);
  361. __weak typeof(self) weak = self;
  362. [UIView animateWithDuration:0.3f delay:0.f options:UIViewAnimationOptionAllowUserInteraction animations:^{
  363. weak.focusView.alpha = 1.f;
  364. weak.focusView.transform = CGAffineTransformMakeScale(1.0, 1.0);
  365. } completion:^(BOOL finished) {
  366. [UIView animateWithDuration:0.5f delay:0.5f options:UIViewAnimationOptionAllowUserInteraction animations:^{
  367. weak.focusView.alpha = 0.f;
  368. } completion:^(BOOL finished) {
  369. weak.isManualFocus = NO;
  370. }];
  371. }];
  372. }
  373. - (void)focusWithMode:(AVCaptureFocusMode)focusMode exposeWithMode:(AVCaptureExposureMode)exposureMode atDevicePoint:(CGPoint)point monitorSubjectAreaChange:(BOOL)monitorSubjectAreaChange
  374. {
  375. dispatch_async(_sessionQueue, ^{
  376. AVCaptureDevice *device = [self.inputDevice device];
  377. NSError *error = nil;
  378. if ([device lockForConfiguration:&error])
  379. {
  380. if ([device isFocusPointOfInterestSupported] && [device isFocusModeSupported:focusMode])
  381. {
  382. [device setFocusMode:focusMode];
  383. [device setFocusPointOfInterest:point];
  384. }
  385. if ([device isExposurePointOfInterestSupported] && [device isExposureModeSupported:exposureMode])
  386. {
  387. [device setExposureMode:exposureMode];
  388. [device setExposurePointOfInterest:point];
  389. }
  390. [device setSubjectAreaChangeMonitoringEnabled:monitorSubjectAreaChange];
  391. [device unlockForConfiguration];
  392. }
  393. else
  394. {
  395. NSLog(@"%@", error);
  396. }
  397. });
  398. }
  399. /**
  400. * 外部的point转换为camera需要的point(外部point/相机页面的frame)
  401. *
  402. * @param viewCoordinates 外部的point
  403. *
  404. * @return 相对位置的point
  405. */
  406. - (CGPoint)convertToPointOfInterestFromViewCoordinates:(CGPoint)viewCoordinates {
  407. CGPoint pointOfInterest = CGPointMake(.5f, .5f);
  408. CGSize frameSize = _previewLayer.bounds.size;
  409. AVCaptureVideoPreviewLayer *videoPreviewLayer = self.previewLayer;
  410. if([[videoPreviewLayer videoGravity] isEqualToString:AVLayerVideoGravityResize]) {
  411. pointOfInterest = CGPointMake(viewCoordinates.y / frameSize.height, 1.f - (viewCoordinates.x / frameSize.width));
  412. } else {
  413. CGRect cleanAperture;
  414. for(AVCaptureInputPort *port in [[self.session.inputs lastObject]ports]) {
  415. if([port mediaType] == AVMediaTypeVideo) {
  416. cleanAperture = CMVideoFormatDescriptionGetCleanAperture([port formatDescription], YES);
  417. CGSize apertureSize = cleanAperture.size;
  418. CGPoint point = viewCoordinates;
  419. CGFloat apertureRatio = apertureSize.height / apertureSize.width;
  420. CGFloat viewRatio = frameSize.width / frameSize.height;
  421. CGFloat xc = .5f;
  422. CGFloat yc = .5f;
  423. if([[videoPreviewLayer videoGravity]isEqualToString:AVLayerVideoGravityResizeAspect]) {
  424. if(viewRatio > apertureRatio) {
  425. CGFloat y2 = frameSize.height;
  426. CGFloat x2 = frameSize.height * apertureRatio;
  427. CGFloat x1 = frameSize.width;
  428. CGFloat blackBar = (x1 - x2) / 2;
  429. if(point.x >= blackBar && point.x <= blackBar + x2) {
  430. xc = point.y / y2;
  431. yc = 1.f - ((point.x - blackBar) / x2);
  432. }
  433. } else {
  434. CGFloat y2 = frameSize.width / apertureRatio;
  435. CGFloat y1 = frameSize.height;
  436. CGFloat x2 = frameSize.width;
  437. CGFloat blackBar = (y1 - y2) / 2;
  438. if(point.y >= blackBar && point.y <= blackBar + y2) {
  439. xc = ((point.y - blackBar) / y2);
  440. yc = 1.f - (point.x / x2);
  441. }
  442. }
  443. } else if([[videoPreviewLayer videoGravity]isEqualToString:AVLayerVideoGravityResizeAspectFill]) {
  444. if(viewRatio > apertureRatio) {
  445. CGFloat y2 = apertureSize.width * (frameSize.width / apertureSize.height);
  446. xc = (point.y + ((y2 - frameSize.height) / 2.f)) / y2;
  447. yc = (frameSize.width - point.x) / frameSize.width;
  448. } else {
  449. CGFloat x2 = apertureSize.height * (frameSize.height / apertureSize.width);
  450. yc = 1.f - ((point.x + ((x2 - frameSize.width) / 2)) / x2);
  451. xc = point.y / frameSize.height;
  452. }
  453. }
  454. pointOfInterest = CGPointMake(xc, yc);
  455. break;
  456. }
  457. }
  458. }
  459. return pointOfInterest;
  460. }
  461. /** 人脸框的动画*/
  462. - (void)showFaceImageWithFrame:(CGRect)rect
  463. {
  464. if (self.isStartFaceRecognition) {
  465. self.isStartFaceRecognition = NO;
  466. self.faceView.frame = CGRectMake(rect.origin.y * self.previewLayer.frame.size.width-10, rect.origin.x * self.previewLayer.frame.size.height - 20, rect.size.width * self.previewLayer.frame.size.width * 2, rect.size.height * self.previewLayer.frame.size.height);
  467. self.faceView.transform = CGAffineTransformMakeScale(1.5, 1.5);
  468. __weak typeof(self) weak = self;
  469. [UIView animateWithDuration:0.3f animations:^{
  470. weak.faceView.alpha = 1.f;
  471. weak.faceView.transform = CGAffineTransformMakeScale(1.0, 1.0);
  472. } completion:^(BOOL finished) {
  473. [UIView animateWithDuration:2.f animations:^{
  474. weak.faceView.alpha = 0.f;
  475. } completion:^(BOOL finished) {
  476. if (weak.faceRecognitonCallBack) {
  477. weak.faceRecognitonCallBack(weak.faceView.frame);
  478. }
  479. weak.isStartFaceRecognition = YES;
  480. }];
  481. }];
  482. }
  483. }
  484. /** 查找摄像头连接设备*/
  485. - (AVCaptureConnection *)findVideoConnection
  486. {
  487. AVCaptureConnection *videoConnection = nil;
  488. for (AVCaptureConnection *connection in _stillImageOutput.connections) {
  489. for (AVCaptureInputPort *port in connection.inputPorts) {
  490. if ([[port mediaType] isEqual:AVMediaTypeVideo]) {
  491. videoConnection = connection;
  492. break;
  493. }
  494. }
  495. if (videoConnection) {
  496. break;
  497. }
  498. }
  499. return videoConnection;
  500. }
  501. /** 检查是否有相机权限*/
  502. + (BOOL)checkAuthority
  503. {
  504. NSString *mediaType = AVMediaTypeVideo;
  505. AVAuthorizationStatus authStatus = [AVCaptureDevice authorizationStatusForMediaType:mediaType];
  506. if(authStatus == AVAuthorizationStatusRestricted || authStatus == AVAuthorizationStatusDenied){
  507. return NO;
  508. }
  509. return YES;
  510. }
  511. #pragma -mark Observer
  512. - (void)setFocusObserver:(BOOL)yes
  513. {
  514. AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
  515. if (device && [device isFocusPointOfInterestSupported]) {
  516. if (yes) {
  517. [device addObserver:self forKeyPath:adjustingFocus options:NSKeyValueObservingOptionNew|NSKeyValueObservingOptionOld context:nil];
  518. }else{
  519. [device removeObserver:self forKeyPath:adjustingFocus context:nil];
  520. }
  521. }else{
  522. //showMessage(@"提示", @"您的设备没有照相机或是未授权", [YMGlobalUtils getCurrentVC])
  523. }
  524. }
  525. /** 监听对焦是否完成*/
  526. - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
  527. if ([keyPath isEqualToString:adjustingFocus]) {
  528. BOOL isAdjustingFocus = [[change objectForKey:NSKeyValueChangeNewKey] boolValue];
  529. if (isAdjustingFocus) {
  530. if (self.isManualFocus==NO) {
  531. [self focusImageAnimateWithCenterPoint:CGPointMake(self.previewLayer.bounds.size.width/2, self.previewLayer.bounds.size.height/2)];
  532. }
  533. if ([self.delegate respondsToSelector:@selector(cameraDidStareFocus)]) {
  534. [self.delegate cameraDidStareFocus];
  535. }
  536. }else{
  537. if ([self.delegate respondsToSelector:@selector(cameraDidFinishFocus)]) {
  538. [self.delegate cameraDidFinishFocus];
  539. }
  540. }
  541. }
  542. }
  543. #pragma -mark AVCaptureMetadataOutputObjectsDelegate
  544. - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection
  545. {
  546. if (self.canFaceRecognition) {
  547. for(AVMetadataObject *metadataObject in metadataObjects) {
  548. if([metadataObject.type isEqualToString:AVMetadataObjectTypeFace]) {
  549. [self showFaceImageWithFrame:metadataObject.bounds];
  550. }
  551. }
  552. }
  553. }
  554. /*
  555. #pragma -mark AVCaptureVideoDataOutputSampleBufferDelegate
  556. - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
  557. {
  558. // if (self.isStartFaceRecognition) {
  559. // UIImage *curImage = [self getSampleBufferImageWithSampleBuffer:sampleBuffer];
  560. // CIContext *context = [CIContext contextWithOptions:@{kCIContextUseSoftwareRenderer:@YES}];
  561. // CIDetector *detector = [CIDetector detectorOfType:CIDetectorTypeFace context:context options:@{CIDetectorAccuracy:CIDetectorAccuracyHigh}];
  562. //
  563. // }
  564. }
  565. - (UIImage *)getSampleBufferImageWithSampleBuffer:(CMSampleBufferRef)sampleBuffer
  566. {
  567. CVImageBufferRef buffer;
  568. buffer = CMSampleBufferGetImageBuffer(sampleBuffer);
  569. CVPixelBufferLockBaseAddress(buffer, 0);
  570. //从 CVImageBufferRef 取得影像的细部信息
  571. uint8_t *base;
  572. size_t width, height, bytesPerRow;
  573. base = CVPixelBufferGetBaseAddress(buffer);
  574. width = CVPixelBufferGetWidth(buffer);
  575. height = CVPixelBufferGetHeight(buffer);
  576. bytesPerRow = CVPixelBufferGetBytesPerRow(buffer);
  577. //利用取得影像细部信息格式化 CGContextRef
  578. CGColorSpaceRef colorSpace;
  579. CGContextRef cgContext;
  580. colorSpace = CGColorSpaceCreateDeviceRGB();
  581. cgContext = CGBitmapContextCreate(base, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
  582. CGColorSpaceRelease(colorSpace);
  583. //透过 CGImageRef 将 CGContextRef 转换成 UIImage
  584. CGImageRef cgImage;
  585. UIImage *image;
  586. cgImage = CGBitmapContextCreateImage(cgContext);
  587. image = [UIImage imageWithCGImage:cgImage];
  588. CGImageRelease(cgImage);
  589. CGContextRelease(cgContext);
  590. CVPixelBufferUnlockBaseAddress(buffer, 0);
  591. return image;
  592. }
  593. */
  594. @end