YMCameraManager.m 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641
  1. //
  2. // YMCameraManager.m
  3. // MSYOUPAI
  4. //
  5. // Created by YoMi on 2024/3/17.
  6. // Copyright © 2024 MS. All rights reserved.
  7. //
  8. #import "YMCameraManager.h"
  9. #import "UIImage+YMCameraManager.h"
  10. #define adjustingFocus @"adjustingFocus"
  11. #define showMessage(TITLE,MESSAGE,VC) UIAlertController *alertController = [UIAlertController alertControllerWithTitle:TITLE message:MESSAGE preferredStyle:UIAlertControllerStyleAlert];\
  12. [alertController addAction:[UIAlertAction actionWithTitle:@"确定"style:UIAlertActionStyleDefault handler:nil]];\
  13. [VC presentViewController:alertController animated:YES completion:nil];
  14. @interface YMCameraManager () <CAAnimationDelegate,AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureMetadataOutputObjectsDelegate>
  15. @property (nonatomic) dispatch_queue_t sessionQueue;
  16. @property (nonatomic, strong) AVCaptureVideoPreviewLayer *previewLayer;
  17. @property (nonatomic, strong) AVCaptureDeviceInput *inputDevice;
  18. @property (nonatomic, strong) AVCaptureStillImageOutput *stillImageOutput;
  19. @property (nonatomic, strong) AVCaptureMetadataOutput *metadataOutput;
  20. @property (nonatomic, copy) void (^finishBlock)(void);
  21. /** 对焦视图*/
  22. @property (nonatomic, strong) UIView *focusView;
  23. /** 人脸识别视图*/
  24. @property (nonatomic, strong) UIView *faceView;
  25. //判断是否手动对焦
  26. @property (nonatomic, assign) BOOL isManualFocus;
  27. //判断是否人脸识别
  28. @property (nonatomic, assign) BOOL isStartFaceRecognition;
  29. @end
  30. @implementation YMCameraManager
  31. - (void)dealloc
  32. {
  33. NSLog(@"照相机管理释放");
  34. if ([self.session isRunning]) {
  35. [self.session stopRunning];
  36. self.session = nil;
  37. }
  38. [self setFocusObserver:NO];
  39. }
  40. - (instancetype)init
  41. {
  42. if (self = [super init]) {
  43. [self setup];
  44. }
  45. return self;
  46. }
  47. - (instancetype)initWithParentView:(UIView *)view
  48. {
  49. if (self = [super init]) {
  50. [self setup];
  51. [self configureWithParentLayer:view];
  52. }
  53. return self;
  54. }
  55. - (void)setup
  56. {
  57. self.session = [[AVCaptureSession alloc] init];
  58. self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session];
  59. self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
  60. //对焦队列
  61. [self createQueue];
  62. //加入输入设备(前置或后置摄像头)
  63. [self addVideoInputFrontCamera:NO];
  64. //加入输出设备
  65. [self addStillImageOutput];
  66. //对焦MVO
  67. [self setFocusObserver:YES];
  68. }
  69. - (void)configureWithParentLayer:(UIView *)parent
  70. {
  71. if (!parent) {
  72. showMessage(@"提示", @"请加入附载视图", [YMGlobalUtils getCurrentVC]);
  73. return;
  74. }
  75. self.previewLayer.frame = parent.bounds;
  76. [parent.layer addSublayer:self.previewLayer];
  77. //加入对焦框
  78. [self initfocusImageWithParent:parent];
  79. //加入脸部识别框
  80. [self initFaceImageWithParent:parent];
  81. [self.session startRunning];
  82. dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(2.0 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
  83. self.isStartFaceRecognition = YES;
  84. });
  85. }
  86. /** 对焦的框*/
  87. - (void)initfocusImageWithParent:(UIView *)view;
  88. {
  89. if (self.focusView) {
  90. return;
  91. }
  92. self.focusView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, 70, 70)];
  93. self.focusView.backgroundColor = [UIColor clearColor];
  94. self.focusView.layer.borderColor = [UIColor greenColor].CGColor;
  95. self.focusView.layer.borderWidth = 1;
  96. self.focusView.alpha = 0;
  97. if (view.superview != nil) {
  98. [view.superview addSubview:self.focusView];
  99. }else{
  100. self.focusView = nil;
  101. }
  102. }
  103. /** 脸部识别的框*/
  104. - (void)initFaceImageWithParent:(UIView *)view;
  105. {
  106. if (self.faceView) {
  107. return;
  108. }
  109. self.faceView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, 70, 70)];
  110. self.faceView.backgroundColor = [UIColor clearColor];
  111. self.faceView.layer.borderColor = [UIColor greenColor].CGColor;
  112. self.faceView.layer.borderWidth = 1;
  113. self.faceView.alpha = 0;
  114. if (view.superview) {
  115. [view.superview addSubview:self.faceView];
  116. }else{
  117. self.faceView = nil;
  118. }
  119. }
  120. /**
  121. * 创建一个队列,防止阻塞主线程
  122. */
  123. - (void)createQueue {
  124. dispatch_queue_t sessionQueue = dispatch_queue_create("session queue", DISPATCH_QUEUE_SERIAL);
  125. self.sessionQueue = sessionQueue;
  126. }
  127. /**
  128. * 添加输入设备
  129. *
  130. * @param front 前或后摄像头
  131. */
  132. - (void)addVideoInputFrontCamera:(BOOL)front {
  133. NSArray *devices = [AVCaptureDevice devices];
  134. AVCaptureDevice *frontCamera;
  135. AVCaptureDevice *backCamera;
  136. for (AVCaptureDevice *device in devices) {
  137. if ([device hasMediaType:AVMediaTypeVideo]) {
  138. if ([device position] == AVCaptureDevicePositionBack) {
  139. backCamera = device;
  140. } else {
  141. frontCamera = device;
  142. }
  143. }
  144. }
  145. NSError *error = nil;
  146. if (front) {
  147. AVCaptureDeviceInput *frontFacingCameraDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:frontCamera error:&error];
  148. if (!error) {
  149. if ([_session canAddInput:frontFacingCameraDeviceInput]) {
  150. [_session addInput:frontFacingCameraDeviceInput];
  151. self.inputDevice = frontFacingCameraDeviceInput;
  152. } else {
  153. NSLog(@"Couldn't add front facing video input");
  154. }
  155. }else{
  156. NSLog(@"你的设备没有照相机");
  157. }
  158. } else {
  159. AVCaptureDeviceInput *backFacingCameraDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:backCamera error:&error];
  160. if (!error) {
  161. if ([_session canAddInput:backFacingCameraDeviceInput]) {
  162. [_session addInput:backFacingCameraDeviceInput];
  163. self.inputDevice = backFacingCameraDeviceInput;
  164. } else {
  165. NSLog(@"Couldn't add back facing video input");
  166. }
  167. }else{
  168. NSLog(@"你的设备没有照相机");
  169. }
  170. }
  171. if (error) {
  172. showMessage(@"提示", @"您的设备没有照相机或是未授权", [YMGlobalUtils getCurrentVC])
  173. }
  174. }
  175. /**
  176. * 添加输出设备
  177. */
  178. - (void)addStillImageOutput
  179. {
  180. AVCaptureStillImageOutput *tmpOutput = [[AVCaptureStillImageOutput alloc] init];
  181. NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecTypeJPEG,AVVideoCodecKey,nil];//输出jpeg
  182. tmpOutput.outputSettings = outputSettings;
  183. // AVCaptureConnection *videoConnection = [self findVideoConnection];
  184. [_session addOutput:tmpOutput];
  185. self.stillImageOutput = tmpOutput;
  186. // AVCaptureVideoDataOutput *dataOutput = [[AVCaptureVideoDataOutput alloc] init];
  187. // if ([self.session canAddOutput:dataOutput]) {
  188. // [self.session addOutput:dataOutput];
  189. // dispatch_queue_t cameraQueue;
  190. // cameraQueue = dispatch_queue_create("cameraQueue", DISPATCH_QUEUE_SERIAL);
  191. // [dataOutput setSampleBufferDelegate:self queue:cameraQueue];
  192. // }
  193. AVCaptureConnection *videoConnection = [self findVideoConnection];
  194. if (!videoConnection) {
  195. showMessage(@"提示", @"您的设备没有照相机或是未授权", [YMGlobalUtils getCurrentVC])
  196. return;
  197. }
  198. AVCaptureMetadataOutput *metadataOutput = [[AVCaptureMetadataOutput alloc] init];
  199. if ([_session canAddOutput:metadataOutput]) {
  200. [_session addOutput:metadataOutput];
  201. [metadataOutput setMetadataObjectTypes:@[AVMetadataObjectTypeFace]];
  202. [metadataOutput setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
  203. self.metadataOutput = metadataOutput;
  204. }
  205. }
  206. /** 拍照*/
  207. - (void)takePhotoWithImageBlock:(void (^)(UIImage *, UIImage *, UIImage *))block
  208. {
  209. AVCaptureConnection *videoConnection = [self findVideoConnection];
  210. if (!videoConnection) {
  211. showMessage(@"提示", @"您的设备没有照相机或是未授权", [YMGlobalUtils getCurrentVC])
  212. return;
  213. }
  214. __weak typeof(self) weak = self;
  215. [self.stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
  216. if (error == nil && imageDataSampleBuffer != NULL) {
  217. NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
  218. UIImage *originImage = [[UIImage alloc] initWithData:imageData];
  219. NSLog(@"originImage = %@",originImage);
  220. CGFloat squareLength = weak.previewLayer.bounds.size.width;
  221. CGFloat previewLayerH = weak.previewLayer.bounds.size.height;
  222. // CGFloat headHeight = weak.previewLayer.bounds.size.height - squareLength;
  223. // NSLog(@"heeadHeight=%f",headHeight);
  224. CGSize size = CGSizeMake(squareLength*2, previewLayerH*2);
  225. UIImage *scaledImage = [originImage resizedImageWithContentMode:UIViewContentModeScaleAspectFill bounds:size interpolationQuality:kCGInterpolationHigh];
  226. NSLog(@"scaledImage = %@",scaledImage);
  227. CGRect cropFrame = CGRectMake((scaledImage.size.width - size.width) / 2, (scaledImage.size.height - size.height) / 2, size.width, size.height);
  228. NSLog(@"cropFrame:%@", [NSValue valueWithCGRect:cropFrame]);
  229. UIImage *croppedImage = [scaledImage croppedImage:cropFrame];
  230. NSLog(@"croppedImage = %@",croppedImage);
  231. UIDeviceOrientation orientation = [UIDevice currentDevice].orientation;
  232. if (orientation != UIDeviceOrientationPortrait) {
  233. CGFloat degree = 0;
  234. if (orientation == UIDeviceOrientationPortraitUpsideDown) {
  235. degree = 180;// M_PI;
  236. } else if (orientation == UIDeviceOrientationLandscapeLeft) {
  237. degree = -90;// -M_PI_2;
  238. } else if (orientation == UIDeviceOrientationLandscapeRight) {
  239. degree = 90;// M_PI_2;
  240. }
  241. croppedImage = [croppedImage rotatedByDegrees:degree];
  242. scaledImage = [scaledImage rotatedByDegrees:degree];
  243. originImage = [originImage rotatedByDegrees:degree];
  244. }
  245. if (block) {
  246. block(originImage,scaledImage,croppedImage);
  247. }
  248. }
  249. }];
  250. }
  251. /** 切换闪光灯模式 (切换顺序:最开始是auto,然后是off,最后是on,一直循环)*/
  252. - (void)switchFlashModeDidFinishChanceBlock:(void (^)(YMCaptureFlashMode flashMode))block
  253. {
  254. Class captureDeviceClass = NSClassFromString(@"AVCaptureDevice");
  255. if (!captureDeviceClass) {
  256. showMessage(@"提示", @"您的设备没有拍照功能", [YMGlobalUtils getCurrentVC]);
  257. return;
  258. }
  259. YMCaptureFlashMode flashMode = YMCaptureFlashModeOff;
  260. AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
  261. [device lockForConfiguration:nil];
  262. if ([device hasFlash]) {
  263. if (device.flashMode == AVCaptureFlashModeOff) {
  264. device.flashMode = AVCaptureFlashModeOn;
  265. if (block) {
  266. block(YMCaptureFlashModeOn);
  267. }
  268. } else if (device.flashMode == AVCaptureFlashModeOn) {
  269. device.flashMode = AVCaptureFlashModeAuto;
  270. if (block) {
  271. block(YMCaptureFlashModeAuto);
  272. }
  273. } else if (device.flashMode == AVCaptureFlashModeAuto) {
  274. device.flashMode = AVCaptureFlashModeOff;
  275. if (block) {
  276. block(YMCaptureFlashModeOff);
  277. }
  278. }
  279. } else {
  280. showMessage(@"提示", @"您的设备没有闪光灯功能", [YMGlobalUtils getCurrentVC]);
  281. }
  282. [device unlockForConfiguration];
  283. }
  284. /** 切换前后镜*/
  285. - (void)switchCamera:(BOOL)isFrontCamera didFinishChanceBlock:(void (^)(void))block
  286. {
  287. if (!_inputDevice) {
  288. if (block) {
  289. block();
  290. }
  291. showMessage(@"提示", @"您的设备没有摄像头", [YMGlobalUtils getCurrentVC])
  292. return;
  293. }
  294. if (block) {
  295. self.finishBlock = [block copy];
  296. }
  297. CABasicAnimation *caAnimation = [CABasicAnimation animationWithKeyPath:@"opacity"];
  298. /*
  299. duration 动画的时长
  300. repeatCount 重复的次数。不停重复设置为 HUGE_VALF
  301. repeatDuration 设置动画的时间。在该时间内动画一直执行,不计次数。
  302. beginTime 指定动画开始的时间。从开始延迟几秒的话,设置为[CACurrentMediaTime() + 秒数] 的方式
  303. timingFunction 设置动画的速度变化
  304. autoreverses 动画结束时是否执行逆动画
  305. fromValue 所改变属性的起始值
  306. toValue 所改变属性的结束时的值
  307. byValue 所改变属性相同起始值的改变量
  308. */
  309. caAnimation.fromValue = (__bridge id _Nullable)([UIColor blackColor].CGColor);
  310. caAnimation.toValue = (__bridge id _Nullable)([UIColor whiteColor].CGColor);
  311. caAnimation.duration = 0.5f;
  312. caAnimation.delegate = self;
  313. [self.previewLayer addAnimation:caAnimation forKey:@"switchAnimation"];
  314. @weakify(self)
  315. dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
  316. @strongify(self)
  317. [self.session beginConfiguration];
  318. [self.session removeInput:self.inputDevice];
  319. [self addVideoInputFrontCamera:isFrontCamera];
  320. [self.session commitConfiguration];
  321. dispatch_async(dispatch_get_main_queue(), ^{
  322. });
  323. });
  324. }
  325. - (void)animationDidStop:(CAAnimation *)anim finished:(BOOL)flag
  326. {
  327. if (self.finishBlock) {
  328. self.finishBlock();
  329. }
  330. }
  331. /** 点击对焦*/
  332. - (void)focusInPoint:(CGPoint)devicePoint
  333. {
  334. if (CGRectContainsPoint(_previewLayer.bounds, devicePoint) == NO) {
  335. return;
  336. }
  337. self.isManualFocus = YES;
  338. [self focusImageAnimateWithCenterPoint:devicePoint];
  339. devicePoint = [self convertToPointOfInterestFromViewCoordinates:devicePoint];
  340. [self focusWithMode:AVCaptureFocusModeAutoFocus exposeWithMode:AVCaptureExposureModeContinuousAutoExposure atDevicePoint:devicePoint monitorSubjectAreaChange:YES];
  341. }
  342. - (void)focusImageAnimateWithCenterPoint:(CGPoint)point
  343. {
  344. [self.focusView setCenter:point];
  345. self.focusView.transform = CGAffineTransformMakeScale(2.0, 2.0);
  346. __weak typeof(self) weak = self;
  347. [UIView animateWithDuration:0.3f delay:0.f options:UIViewAnimationOptionAllowUserInteraction animations:^{
  348. weak.focusView.alpha = 1.f;
  349. weak.focusView.transform = CGAffineTransformMakeScale(1.0, 1.0);
  350. } completion:^(BOOL finished) {
  351. [UIView animateWithDuration:0.5f delay:0.5f options:UIViewAnimationOptionAllowUserInteraction animations:^{
  352. weak.focusView.alpha = 0.f;
  353. } completion:^(BOOL finished) {
  354. weak.isManualFocus = NO;
  355. }];
  356. }];
  357. }
  358. - (void)focusWithMode:(AVCaptureFocusMode)focusMode exposeWithMode:(AVCaptureExposureMode)exposureMode atDevicePoint:(CGPoint)point monitorSubjectAreaChange:(BOOL)monitorSubjectAreaChange
  359. {
  360. dispatch_async(_sessionQueue, ^{
  361. AVCaptureDevice *device = [self.inputDevice device];
  362. NSError *error = nil;
  363. if ([device lockForConfiguration:&error])
  364. {
  365. if ([device isFocusPointOfInterestSupported] && [device isFocusModeSupported:focusMode])
  366. {
  367. [device setFocusMode:focusMode];
  368. [device setFocusPointOfInterest:point];
  369. }
  370. if ([device isExposurePointOfInterestSupported] && [device isExposureModeSupported:exposureMode])
  371. {
  372. [device setExposureMode:exposureMode];
  373. [device setExposurePointOfInterest:point];
  374. }
  375. [device setSubjectAreaChangeMonitoringEnabled:monitorSubjectAreaChange];
  376. [device unlockForConfiguration];
  377. }
  378. else
  379. {
  380. NSLog(@"%@", error);
  381. }
  382. });
  383. }
  384. /**
  385. * 外部的point转换为camera需要的point(外部point/相机页面的frame)
  386. *
  387. * @param viewCoordinates 外部的point
  388. *
  389. * @return 相对位置的point
  390. */
  391. - (CGPoint)convertToPointOfInterestFromViewCoordinates:(CGPoint)viewCoordinates {
  392. CGPoint pointOfInterest = CGPointMake(.5f, .5f);
  393. CGSize frameSize = _previewLayer.bounds.size;
  394. AVCaptureVideoPreviewLayer *videoPreviewLayer = self.previewLayer;
  395. if([[videoPreviewLayer videoGravity] isEqualToString:AVLayerVideoGravityResize]) {
  396. pointOfInterest = CGPointMake(viewCoordinates.y / frameSize.height, 1.f - (viewCoordinates.x / frameSize.width));
  397. } else {
  398. CGRect cleanAperture;
  399. for(AVCaptureInputPort *port in [[self.session.inputs lastObject]ports]) {
  400. if([port mediaType] == AVMediaTypeVideo) {
  401. cleanAperture = CMVideoFormatDescriptionGetCleanAperture([port formatDescription], YES);
  402. CGSize apertureSize = cleanAperture.size;
  403. CGPoint point = viewCoordinates;
  404. CGFloat apertureRatio = apertureSize.height / apertureSize.width;
  405. CGFloat viewRatio = frameSize.width / frameSize.height;
  406. CGFloat xc = .5f;
  407. CGFloat yc = .5f;
  408. if([[videoPreviewLayer videoGravity]isEqualToString:AVLayerVideoGravityResizeAspect]) {
  409. if(viewRatio > apertureRatio) {
  410. CGFloat y2 = frameSize.height;
  411. CGFloat x2 = frameSize.height * apertureRatio;
  412. CGFloat x1 = frameSize.width;
  413. CGFloat blackBar = (x1 - x2) / 2;
  414. if(point.x >= blackBar && point.x <= blackBar + x2) {
  415. xc = point.y / y2;
  416. yc = 1.f - ((point.x - blackBar) / x2);
  417. }
  418. } else {
  419. CGFloat y2 = frameSize.width / apertureRatio;
  420. CGFloat y1 = frameSize.height;
  421. CGFloat x2 = frameSize.width;
  422. CGFloat blackBar = (y1 - y2) / 2;
  423. if(point.y >= blackBar && point.y <= blackBar + y2) {
  424. xc = ((point.y - blackBar) / y2);
  425. yc = 1.f - (point.x / x2);
  426. }
  427. }
  428. } else if([[videoPreviewLayer videoGravity]isEqualToString:AVLayerVideoGravityResizeAspectFill]) {
  429. if(viewRatio > apertureRatio) {
  430. CGFloat y2 = apertureSize.width * (frameSize.width / apertureSize.height);
  431. xc = (point.y + ((y2 - frameSize.height) / 2.f)) / y2;
  432. yc = (frameSize.width - point.x) / frameSize.width;
  433. } else {
  434. CGFloat x2 = apertureSize.height * (frameSize.height / apertureSize.width);
  435. yc = 1.f - ((point.x + ((x2 - frameSize.width) / 2)) / x2);
  436. xc = point.y / frameSize.height;
  437. }
  438. }
  439. pointOfInterest = CGPointMake(xc, yc);
  440. break;
  441. }
  442. }
  443. }
  444. return pointOfInterest;
  445. }
  446. /** 人脸框的动画*/
  447. - (void)showFaceImageWithFrame:(CGRect)rect
  448. {
  449. if (self.isStartFaceRecognition) {
  450. self.isStartFaceRecognition = NO;
  451. self.faceView.frame = CGRectMake(rect.origin.y * self.previewLayer.frame.size.width-10, rect.origin.x * self.previewLayer.frame.size.height - 20, rect.size.width * self.previewLayer.frame.size.width * 2, rect.size.height * self.previewLayer.frame.size.height);
  452. self.faceView.transform = CGAffineTransformMakeScale(1.5, 1.5);
  453. __weak typeof(self) weak = self;
  454. [UIView animateWithDuration:0.3f animations:^{
  455. weak.faceView.alpha = 1.f;
  456. weak.faceView.transform = CGAffineTransformMakeScale(1.0, 1.0);
  457. } completion:^(BOOL finished) {
  458. [UIView animateWithDuration:2.f animations:^{
  459. weak.faceView.alpha = 0.f;
  460. } completion:^(BOOL finished) {
  461. if (weak.faceRecognitonCallBack) {
  462. weak.faceRecognitonCallBack(weak.faceView.frame);
  463. }
  464. weak.isStartFaceRecognition = YES;
  465. }];
  466. }];
  467. }
  468. }
  469. /** 查找摄像头连接设备*/
  470. - (AVCaptureConnection *)findVideoConnection
  471. {
  472. AVCaptureConnection *videoConnection = nil;
  473. for (AVCaptureConnection *connection in _stillImageOutput.connections) {
  474. for (AVCaptureInputPort *port in connection.inputPorts) {
  475. if ([[port mediaType] isEqual:AVMediaTypeVideo]) {
  476. videoConnection = connection;
  477. break;
  478. }
  479. }
  480. if (videoConnection) {
  481. break;
  482. }
  483. }
  484. return videoConnection;
  485. }
  486. /** 检查是否有相机权限*/
  487. + (BOOL)checkAuthority
  488. {
  489. NSString *mediaType = AVMediaTypeVideo;
  490. AVAuthorizationStatus authStatus = [AVCaptureDevice authorizationStatusForMediaType:mediaType];
  491. if(authStatus == AVAuthorizationStatusRestricted || authStatus == AVAuthorizationStatusDenied){
  492. return NO;
  493. }
  494. return YES;
  495. }
  496. #pragma -mark Observer
  497. - (void)setFocusObserver:(BOOL)yes
  498. {
  499. AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
  500. if (device && [device isFocusPointOfInterestSupported]) {
  501. if (yes) {
  502. [device addObserver:self forKeyPath:adjustingFocus options:NSKeyValueObservingOptionNew|NSKeyValueObservingOptionOld context:nil];
  503. }else{
  504. [device removeObserver:self forKeyPath:adjustingFocus context:nil];
  505. }
  506. }else{
  507. showMessage(@"提示", @"您的设备没有照相机或是未授权", [YMGlobalUtils getCurrentVC])
  508. }
  509. }
  510. /** 监听对焦是否完成*/
  511. - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
  512. if ([keyPath isEqualToString:adjustingFocus]) {
  513. BOOL isAdjustingFocus = [[change objectForKey:NSKeyValueChangeNewKey] boolValue];
  514. if (isAdjustingFocus) {
  515. if (self.isManualFocus==NO) {
  516. [self focusImageAnimateWithCenterPoint:CGPointMake(self.previewLayer.bounds.size.width/2, self.previewLayer.bounds.size.height/2)];
  517. }
  518. if ([self.delegate respondsToSelector:@selector(cameraDidStareFocus)]) {
  519. [self.delegate cameraDidStareFocus];
  520. }
  521. }else{
  522. if ([self.delegate respondsToSelector:@selector(cameraDidFinishFocus)]) {
  523. [self.delegate cameraDidFinishFocus];
  524. }
  525. }
  526. }
  527. }
  528. #pragma -mark AVCaptureMetadataOutputObjectsDelegate
  529. - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection
  530. {
  531. if (self.canFaceRecognition) {
  532. for(AVMetadataObject *metadataObject in metadataObjects) {
  533. if([metadataObject.type isEqualToString:AVMetadataObjectTypeFace]) {
  534. [self showFaceImageWithFrame:metadataObject.bounds];
  535. }
  536. }
  537. }
  538. }
  539. /*
  540. #pragma -mark AVCaptureVideoDataOutputSampleBufferDelegate
  541. - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
  542. {
  543. // if (self.isStartFaceRecognition) {
  544. // UIImage *curImage = [self getSampleBufferImageWithSampleBuffer:sampleBuffer];
  545. // CIContext *context = [CIContext contextWithOptions:@{kCIContextUseSoftwareRenderer:@YES}];
  546. // CIDetector *detector = [CIDetector detectorOfType:CIDetectorTypeFace context:context options:@{CIDetectorAccuracy:CIDetectorAccuracyHigh}];
  547. //
  548. // }
  549. }
  550. - (UIImage *)getSampleBufferImageWithSampleBuffer:(CMSampleBufferRef)sampleBuffer
  551. {
  552. CVImageBufferRef buffer;
  553. buffer = CMSampleBufferGetImageBuffer(sampleBuffer);
  554. CVPixelBufferLockBaseAddress(buffer, 0);
  555. //从 CVImageBufferRef 取得影像的细部信息
  556. uint8_t *base;
  557. size_t width, height, bytesPerRow;
  558. base = CVPixelBufferGetBaseAddress(buffer);
  559. width = CVPixelBufferGetWidth(buffer);
  560. height = CVPixelBufferGetHeight(buffer);
  561. bytesPerRow = CVPixelBufferGetBytesPerRow(buffer);
  562. //利用取得影像细部信息格式化 CGContextRef
  563. CGColorSpaceRef colorSpace;
  564. CGContextRef cgContext;
  565. colorSpace = CGColorSpaceCreateDeviceRGB();
  566. cgContext = CGBitmapContextCreate(base, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
  567. CGColorSpaceRelease(colorSpace);
  568. //透过 CGImageRef 将 CGContextRef 转换成 UIImage
  569. CGImageRef cgImage;
  570. UIImage *image;
  571. cgImage = CGBitmapContextCreateImage(cgContext);
  572. image = [UIImage imageWithCGImage:cgImage];
  573. CGImageRelease(cgImage);
  574. CGContextRelease(cgContext);
  575. CVPixelBufferUnlockBaseAddress(buffer, 0);
  576. return image;
  577. }
  578. */
  579. @end