FUCamera.m 26 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781
  1. //
  2. // FUCamera.m
  3. // FULiveDemo
  4. //
  5. // Created by liuyang on 2016/12/26.
  6. // Copyright © 2016年 liuyang. All rights reserved.
  7. //
  8. #import "FUCamera.h"
  9. #import <UIKit/UIKit.h>
  10. #import "FURecordEncoder.h"
  11. #import "SVProgressHUD.h"
  12. typedef enum : NSUInteger {
  13. CommonMode,
  14. PhotoTakeMode,
  15. VideoRecordMode,
  16. VideoRecordEndMode,
  17. } RunMode;
  18. typedef void(^FUCameraRecordVidepCompleted)(NSString *videoPath);
  19. @interface FUCamera()<AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate>
  20. {
  21. RunMode runMode;
  22. BOOL hasStarted;
  23. BOOL videoHDREnabled;
  24. }
  25. @property (nonatomic, strong) AVCaptureSession *captureSession;
  26. @property (strong, nonatomic) AVCaptureDeviceInput *backCameraInput;//后置摄像头输入
  27. @property (strong, nonatomic) AVCaptureDeviceInput *frontCameraInput;//前置摄像头输入
  28. @property (nonatomic, strong) AVCaptureVideoDataOutput *videoOutput; //视频输出
  29. @property (nonatomic, strong) AVCaptureConnection *videoConnection;
  30. @property (nonatomic, strong) AVCaptureDevice *camera;
  31. @property (assign, nonatomic) AVCaptureDevicePosition cameraPosition;
  32. @property (strong, nonatomic) FURecordEncoder *recordEncoder;//录制编码
  33. @property (nonatomic, strong) AVCaptureDeviceInput *audioMicInput;//麦克风输入
  34. @property (nonatomic, strong) AVCaptureAudioDataOutput *audioOutput;//音频输出
  35. @property (copy, nonatomic) FUCameraRecordVidepCompleted recordVidepCompleted;
  36. @property (assign, nonatomic) AVCaptureSessionPreset mSessionPreset;
  37. @property (nonatomic) FUCameraFocusModel cameraFocusModel;
  38. @end
  39. @implementation FUCamera
  40. - (instancetype)initWithCameraPosition:(AVCaptureDevicePosition)cameraPosition captureFormat:(int)captureFormat
  41. {
  42. if (self = [super init]) {
  43. self.cameraPosition = cameraPosition;
  44. self.captureFormat = captureFormat;
  45. }
  46. return self;
  47. }
  48. - (instancetype)init
  49. {
  50. if (self = [super init]) {
  51. self.cameraPosition = AVCaptureDevicePositionFront;
  52. self.captureFormat = kCVPixelFormatType_32BGRA;
  53. videoHDREnabled = YES;
  54. }
  55. return self;
  56. }
  57. - (void)startCapture{
  58. _cameraFocusModel = FUCameraModelAutoFace;
  59. if (![self.captureSession isRunning] && !hasStarted) {
  60. hasStarted = YES;
  61. // [self addAudio];
  62. [self.captureSession startRunning];
  63. /* 设置曝光中点 */
  64. [self focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposeWithMode:AVCaptureExposureModeContinuousAutoExposure atDevicePoint:CGPointMake(0.5, 0.5) monitorSubjectAreaChange:YES];
  65. }
  66. }
  67. - (void)stopCapture{
  68. hasStarted = NO;
  69. // [self removeAudio];
  70. if ([self.captureSession isRunning]) {
  71. [self.captureSession stopRunning];
  72. }
  73. NSLog(@"视频采集关闭");
  74. }
  75. - (void)addAudio{
  76. if ([_captureSession canAddOutput:self.audioOutput]) {
  77. [_captureSession addOutput:self.audioOutput];
  78. }
  79. }
  80. - (void)removeAudio {
  81. [_captureSession removeOutput:self.audioOutput];
  82. }
  83. - (AVCaptureSession *)captureSession
  84. {
  85. if (!_captureSession) {
  86. _captureSession = [[AVCaptureSession alloc] init];
  87. _captureSession.sessionPreset = AVCaptureSessionPreset1280x720;
  88. AVCaptureDeviceInput *deviceInput = self.isFrontCamera ? self.frontCameraInput:self.backCameraInput;
  89. [_captureSession beginConfiguration]; // the session to which the receiver's AVCaptureDeviceInput is added.
  90. if ([_captureSession canAddInput: deviceInput]) {
  91. [_captureSession addInput: deviceInput];
  92. }
  93. if ([_captureSession canAddOutput:self.videoOutput]) {
  94. [_captureSession addOutput:self.videoOutput];
  95. }
  96. if ([_captureSession canAddInput:self.audioMicInput]) {
  97. [_captureSession addInput:self.audioMicInput];
  98. }
  99. [self addAudio];
  100. [self.videoConnection setVideoOrientation:AVCaptureVideoOrientationPortrait];
  101. if (self.videoConnection.supportsVideoMirroring && self.isFrontCamera) {
  102. self.videoConnection.videoMirrored = YES;
  103. }
  104. if ( [deviceInput.device lockForConfiguration:NULL] ) {
  105. [deviceInput.device setActiveVideoMinFrameDuration:CMTimeMake(1, 30)];
  106. [deviceInput.device unlockForConfiguration];
  107. }
  108. [_captureSession commitConfiguration]; //
  109. }
  110. return _captureSession;
  111. }
  112. //后置摄像头输入
  113. - (AVCaptureDeviceInput *)backCameraInput {
  114. if (_backCameraInput == nil) {
  115. NSError *error;
  116. _backCameraInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self backCamera] error:&error];
  117. if (error) {
  118. NSLog(@"获取后置摄像头失败~");
  119. }
  120. }
  121. self.camera = _backCameraInput.device;
  122. // if ( [self.camera lockForConfiguration:NULL] ) {
  123. // self.camera.automaticallyAdjustsVideoHDREnabled = NO;
  124. // self.camera.videoHDREnabled = videoHDREnabled;
  125. // [self.camera unlockForConfiguration];
  126. // }
  127. return _backCameraInput;
  128. }
  129. //前置摄像头输入
  130. - (AVCaptureDeviceInput *)frontCameraInput {
  131. if (_frontCameraInput == nil) {
  132. NSError *error;
  133. _frontCameraInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self frontCamera] error:&error];
  134. if (error) {
  135. NSLog(@"获取前置摄像头失败~");
  136. }
  137. }
  138. self.camera = _frontCameraInput.device;
  139. // if ([self.camera lockForConfiguration:NULL] ) {
  140. // self.camera.automaticallyAdjustsVideoHDREnabled = NO;
  141. // self.camera.videoHDREnabled = videoHDREnabled;
  142. // [self.camera unlockForConfiguration];
  143. // }
  144. return _frontCameraInput;
  145. }
  146. - (AVCaptureDeviceInput *)audioMicInput
  147. {
  148. if (!_audioMicInput) {
  149. //添加后置麦克风的输出
  150. AVCaptureDevice *mic = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
  151. NSError *error;
  152. _audioMicInput = [AVCaptureDeviceInput deviceInputWithDevice:mic error:&error];
  153. if (error) {
  154. NSLog(@"获取麦克风失败~");
  155. }
  156. }
  157. return _audioMicInput;
  158. }
  159. - (AVCaptureAudioDataOutput *)audioOutput
  160. {
  161. if (!_audioOutput) {
  162. //添加音频输出
  163. _audioOutput = [[AVCaptureAudioDataOutput alloc] init];
  164. [_audioOutput setSampleBufferDelegate:self queue:self.audioCaptureQueue];
  165. }
  166. return _audioOutput;
  167. }
  168. //返回前置摄像头
  169. - (AVCaptureDevice *)frontCamera {
  170. return [self cameraWithPosition:AVCaptureDevicePositionFront];
  171. }
  172. //返回后置摄像头
  173. - (AVCaptureDevice *)backCamera {
  174. return [self cameraWithPosition:AVCaptureDevicePositionBack];
  175. }
  176. -(BOOL)supportsAVCaptureSessionPreset:(BOOL)isFront {
  177. if (isFront) {
  178. return [self.frontCameraInput.device supportsAVCaptureSessionPreset:_mSessionPreset];
  179. }else {
  180. return [self.backCameraInput.device supportsAVCaptureSessionPreset:_mSessionPreset];
  181. }
  182. }
  183. //切换前后置摄像头
  184. -(void)changeCameraInputDeviceisFront:(BOOL)isFront {
  185. [self.captureSession stopRunning];
  186. if (isFront) {
  187. [self.captureSession removeInput:self.backCameraInput];
  188. if ([self.captureSession canAddInput:self.frontCameraInput]) {
  189. [self.captureSession addInput:self.frontCameraInput];
  190. [[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:self.camera];
  191. [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:_camera];
  192. NSLog(@"前置添加监听----");
  193. }
  194. self.cameraPosition = AVCaptureDevicePositionFront;
  195. }else {
  196. [self.captureSession removeInput:self.frontCameraInput];
  197. if ([self.captureSession canAddInput:self.backCameraInput]) {
  198. [self.captureSession addInput:self.backCameraInput];
  199. [[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:self.camera];
  200. [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:_camera];
  201. NSLog(@"后置添加监听----");
  202. }
  203. self.cameraPosition = AVCaptureDevicePositionBack;
  204. }
  205. AVCaptureDeviceInput *deviceInput = isFront ? self.frontCameraInput:self.backCameraInput;
  206. [self.captureSession beginConfiguration]; // the session to which the receiver's AVCaptureDeviceInput is added.
  207. if ( [deviceInput.device lockForConfiguration:NULL] ) {
  208. [deviceInput.device setActiveVideoMinFrameDuration:CMTimeMake(1, 30)];
  209. [deviceInput.device unlockForConfiguration];
  210. }
  211. [self.captureSession commitConfiguration];
  212. self.videoConnection.videoOrientation = AVCaptureVideoOrientationPortrait;
  213. if (self.videoConnection.supportsVideoMirroring) {
  214. self.videoConnection.videoMirrored = isFront;
  215. }
  216. /* 与标准视频稳定相比,这种稳定方法减少了摄像机的视野,在视频捕获管道中引入了更多的延迟,并消耗了更多的系统内存 */
  217. if(self.videoConnection.supportsVideoStabilization && !isFront) {//前置保持大视野,关闭防抖
  218. self.videoConnection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeStandard;
  219. NSLog(@"activeVideoStabilizationMode = %ld",(long)self.videoConnection.activeVideoStabilizationMode);
  220. }else {
  221. NSLog(@"connection don't support video stabilization");
  222. self.videoConnection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeOff;
  223. }
  224. [self.captureSession startRunning];
  225. }
  226. //用来返回是前置摄像头还是后置摄像头
  227. - (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition) position {
  228. // //返回和视频录制相关的所有默认设备
  229. // NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
  230. // //遍历这些设备返回跟position相关的设备
  231. // for (AVCaptureDevice *device in devices) {
  232. // if ([device position] == position) {
  233. // return device;
  234. // }
  235. // }
  236. // return nil;
  237. if (@available(iOS 10.2, *)) {
  238. AVCaptureDevice* newDevice = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInDualCamera mediaType:AVMediaTypeVideo position:position];
  239. if(!newDevice){
  240. newDevice = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInWideAngleCamera mediaType:AVMediaTypeVideo position:position];
  241. }
  242. return newDevice;
  243. }else{
  244. //返回和视频录制相关的所有默认设备
  245. NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
  246. //遍历这些设备返回跟position相关的设备
  247. for (AVCaptureDevice *device in devices) {
  248. if ([device position] == position) {
  249. return device;
  250. }
  251. }
  252. return nil;
  253. }
  254. }
  255. - (AVCaptureDevice *)camera
  256. {
  257. if (!_camera) {
  258. NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
  259. for (AVCaptureDevice *device in devices) {
  260. if ([device position] == self.cameraPosition)
  261. {
  262. _camera = device;
  263. [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:_camera];
  264. }
  265. }
  266. }
  267. return _camera;
  268. }
  269. - (AVCaptureVideoDataOutput *)videoOutput
  270. {
  271. if (!_videoOutput) {
  272. //输出
  273. _videoOutput = [[AVCaptureVideoDataOutput alloc] init];
  274. [_videoOutput setAlwaysDiscardsLateVideoFrames:YES];
  275. [_videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:_captureFormat] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
  276. [_videoOutput setSampleBufferDelegate:self queue:self.videoCaptureQueue];
  277. }
  278. return _videoOutput;
  279. }
  280. //视频采集队列
  281. - (dispatch_queue_t)videoCaptureQueue {
  282. if (_videoCaptureQueue == nil) {
  283. // _videoCaptureQueue = dispatch_queue_create("com.faceunity.videoCaptureQueue", NULL);
  284. _videoCaptureQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0);
  285. }
  286. return _videoCaptureQueue;
  287. }
  288. //音频采集队列
  289. - (dispatch_queue_t)audioCaptureQueue {
  290. if (_audioCaptureQueue == nil) {
  291. _audioCaptureQueue = dispatch_queue_create("com.faceunity.audioCaptureQueue", NULL);
  292. }
  293. return _audioCaptureQueue;
  294. }
  295. //视频连接
  296. - (AVCaptureConnection *)videoConnection {
  297. _videoConnection = [self.videoOutput connectionWithMediaType:AVMediaTypeVideo];
  298. _videoConnection.automaticallyAdjustsVideoMirroring = NO;
  299. return _videoConnection;
  300. }
  301. //设置采集格式
  302. - (void)setCaptureFormat:(int)captureFormat
  303. {
  304. if (_captureFormat == captureFormat) {
  305. return;
  306. }
  307. _captureFormat = captureFormat;
  308. if (((NSNumber *)[[_videoOutput videoSettings] objectForKey:(id)kCVPixelBufferPixelFormatTypeKey]).intValue != captureFormat) {
  309. [_videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:_captureFormat] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
  310. if ([self.camera lockForConfiguration:nil]){
  311. [self.camera setExposureMode:AVCaptureExposureModeContinuousAutoExposure];
  312. [self.camera unlockForConfiguration];
  313. }
  314. }
  315. }
  316. /**
  317. * 切换回连续对焦和曝光模式
  318. * 中心店对焦和曝光(centerPoint)
  319. */
  320. - (void)resetFocusAndExposureModes {
  321. AVCaptureFocusMode focusMode = AVCaptureFocusModeContinuousAutoFocus;
  322. BOOL canResetFocus = [self.camera isFocusPointOfInterestSupported] && [self.camera isFocusModeSupported:focusMode];
  323. AVCaptureExposureMode exposureMode = AVCaptureExposureModeContinuousAutoExposure;
  324. BOOL canResetExposure = [self.camera isExposurePointOfInterestSupported] && [self.camera isExposureModeSupported:exposureMode];
  325. CGPoint centerPoint = CGPointMake(0.5f, 0.5f);
  326. NSError *error;
  327. if ([self.camera lockForConfiguration:&error]) {
  328. if (canResetFocus) {
  329. self.camera.focusMode = focusMode;
  330. self.camera.focusPointOfInterest = centerPoint;
  331. }
  332. if (canResetExposure) {
  333. self.camera.exposureMode = exposureMode;
  334. self.camera.exposurePointOfInterest = centerPoint;
  335. }
  336. [self.camera unlockForConfiguration];
  337. } else {
  338. NSLog(@"%@",error);
  339. }
  340. }
  341. - (void)subjectAreaDidChange:(NSNotification *)notification
  342. {
  343. dispatch_async(self.videoCaptureQueue, ^{
  344. CGPoint devicePoint = CGPointMake(0.5, 0.5);
  345. [self focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposeWithMode:AVCaptureExposureModeContinuousAutoExposure atDevicePoint:devicePoint monitorSubjectAreaChange:YES];
  346. [self cameraChangeModle:FUCameraModelAutoFace];
  347. });
  348. }
  349. #pragma mark - 曝光补偿
  350. - (void)setExposureValue:(float)value {
  351. // NSLog(@"camera----曝光值----%lf",value);
  352. NSError *error;
  353. if ([self.camera lockForConfiguration:&error]){
  354. [self.camera setExposureMode:AVCaptureExposureModeContinuousAutoExposure];
  355. [self.camera setExposureTargetBias:value completionHandler:nil];
  356. [self.camera unlockForConfiguration];
  357. }else{
  358. }
  359. }
  360. #pragma mark - 分辨率
  361. -(BOOL)changeSessionPreset:(AVCaptureSessionPreset)sessionPreset{
  362. if ([self.captureSession canSetSessionPreset:sessionPreset]) {
  363. if ([self.captureSession isRunning]) {
  364. [self.captureSession stopRunning];
  365. }
  366. _captureSession.sessionPreset = sessionPreset;
  367. _mSessionPreset = sessionPreset;
  368. [self.captureSession startRunning];
  369. return YES;
  370. }
  371. return NO;
  372. }
  373. #pragma mark - 镜像
  374. -(void)changeVideoMirrored:(BOOL)videoMirrored{
  375. if (self.videoConnection.supportsVideoMirroring) {
  376. self.videoConnection.videoMirrored = videoMirrored;
  377. }
  378. }
  379. #pragma mark - 帧率
  380. -(void)changeVideoFrameRate:(int)frameRate{
  381. if (frameRate <= 30) {//此方法可以设置相机帧率,仅支持帧率小于等于30帧.
  382. AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
  383. [videoDevice lockForConfiguration:NULL];
  384. [videoDevice setActiveVideoMinFrameDuration:CMTimeMake(10, frameRate * 10)];
  385. [videoDevice setActiveVideoMaxFrameDuration:CMTimeMake(10, frameRate * 10)];
  386. [videoDevice unlockForConfiguration];
  387. return;
  388. }
  389. AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
  390. for(AVCaptureDeviceFormat *vFormat in [videoDevice formats] ) {
  391. CMFormatDescriptionRef description= vFormat.formatDescription;
  392. float maxRate = ((AVFrameRateRange*) [vFormat.videoSupportedFrameRateRanges objectAtIndex:0]).maxFrameRate;
  393. if (maxRate > frameRate - 1 &&
  394. CMFormatDescriptionGetMediaSubType(description)==kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) {
  395. if ([videoDevice lockForConfiguration:nil]) {
  396. /* 设置分辨率的方法activeFormat与sessionPreset是互斥的 */
  397. videoDevice.activeFormat = vFormat;
  398. [videoDevice setActiveVideoMinFrameDuration:CMTimeMake(10, frameRate * 10)];
  399. [videoDevice setActiveVideoMaxFrameDuration:CMTimeMake(10, frameRate * 10)];
  400. [videoDevice unlockForConfiguration];
  401. break;
  402. }
  403. }
  404. }
  405. }
  406. - (BOOL)focusPointSupported
  407. {
  408. return self.camera.focusPointOfInterestSupported;
  409. }
  410. - (BOOL)exposurePointSupported
  411. {
  412. return self.camera.exposurePointOfInterestSupported;
  413. }
  414. - (BOOL)isFrontCamera
  415. {
  416. return self.cameraPosition == AVCaptureDevicePositionFront;
  417. }
  418. - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
  419. if (captureOutput == self.audioOutput) {
  420. if (runMode == VideoRecordMode) {
  421. if (self.recordEncoder == nil) {
  422. return ;
  423. }
  424. CFRetain(sampleBuffer);
  425. // 进行数据编码
  426. [self.recordEncoder encodeFrame:sampleBuffer isVideo:NO];
  427. CFRelease(sampleBuffer);
  428. }
  429. return ;
  430. }
  431. if([self.delegate respondsToSelector:@selector(didOutputVideoSampleBuffer:)])
  432. {
  433. [self.delegate didOutputVideoSampleBuffer:sampleBuffer];
  434. }
  435. /* 人脸对焦判断 */
  436. [self cameraFocusAndExpose];
  437. switch (runMode) {
  438. case CommonMode:
  439. break;
  440. case PhotoTakeMode:
  441. {
  442. runMode = CommonMode;
  443. CVPixelBufferRef buffer = CMSampleBufferGetImageBuffer(sampleBuffer);
  444. UIImage *image = [self imageFromPixelBuffer:buffer];
  445. if (image) {
  446. UIImageWriteToSavedPhotosAlbum(image, self, @selector(image:didFinishSavingWithError:contextInfo:), NULL);
  447. }
  448. }
  449. break;
  450. case VideoRecordMode:
  451. if (self.recordEncoder == nil) {
  452. NSDate *currentDate = [NSDate date];//获取当前时间,日期
  453. NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
  454. [dateFormatter setDateFormat:@"YYYYMMddhhmmssSS"];
  455. NSString *dateString = [dateFormatter stringFromDate:currentDate];
  456. NSString *videoPath = [NSTemporaryDirectory() stringByAppendingPathComponent:[NSString stringWithFormat:@"%@.mp4",dateString]];
  457. CVPixelBufferRef buffer = CMSampleBufferGetImageBuffer(sampleBuffer);
  458. float frameWidth = CVPixelBufferGetWidth(buffer);
  459. float frameHeight = CVPixelBufferGetHeight(buffer);
  460. if (frameWidth != 0 && frameHeight != 0) {
  461. self.recordEncoder = [FURecordEncoder encoderForPath:videoPath Height:frameHeight width:frameWidth channels:1 samples:44100];
  462. return ;
  463. }
  464. }
  465. CFRetain(sampleBuffer);
  466. // 进行数据编码
  467. [self.recordEncoder encodeFrame:sampleBuffer isVideo:YES];
  468. CFRelease(sampleBuffer);
  469. break;
  470. case VideoRecordEndMode:
  471. {
  472. runMode = CommonMode;
  473. // if (self.recordEncoder.writer.status == AVAssetWriterStatusUnknown) {
  474. // self.recordEncoder = nil;
  475. // }else{
  476. __weak typeof(self)weakSelf = self ;
  477. [self.recordEncoder finishWithCompletionHandler:^{
  478. [weakSelf videpCompleted];
  479. }];
  480. }
  481. break;
  482. default:
  483. break;
  484. }
  485. }
  486. #pragma mark - 人脸曝光逻辑
  487. -(void)cameraFocusAndExpose{
  488. if (_cameraFocusModel == FUCameraModelAutoFace) {
  489. if ([self.dataSource respondsToSelector:@selector(faceCenterInImage:)]) {
  490. CGPoint center = [self.dataSource faceCenterInImage:self];
  491. if (center.y >= 0) {
  492. [self focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposeWithMode:AVCaptureExposureModeContinuousAutoExposure atDevicePoint:center monitorSubjectAreaChange:YES];
  493. }else{
  494. [self focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposeWithMode:AVCaptureExposureModeContinuousAutoExposure atDevicePoint:CGPointMake(0.5, 0.5) monitorSubjectAreaChange:YES];
  495. }
  496. }
  497. }
  498. }
  499. -(void)videpCompleted{
  500. NSLog(@"1111");
  501. NSString *path = self.recordEncoder.path;
  502. self.recordEncoder = nil;
  503. if (self.recordVidepCompleted) {
  504. self.recordVidepCompleted(path);
  505. }
  506. }
  507. - (void)takePhotoAndSave
  508. {
  509. runMode = PhotoTakeMode;
  510. }
  511. //开始录像
  512. - (void)startRecord
  513. {
  514. runMode = VideoRecordMode;
  515. }
  516. //停止录像
  517. - (void)stopRecordWithCompletionHandler:(void (^)(NSString *videoPath))handler
  518. {
  519. self.recordVidepCompleted = handler;
  520. runMode = VideoRecordEndMode;
  521. }
  522. - (UIImage *)imageFromPixelBuffer:(CVPixelBufferRef)pixelBufferRef {
  523. CVPixelBufferLockBaseAddress(pixelBufferRef, 0);
  524. CGFloat SW = [UIScreen mainScreen].bounds.size.width;
  525. CGFloat SH = [UIScreen mainScreen].bounds.size.height;
  526. float width = CVPixelBufferGetWidth(pixelBufferRef);
  527. float height = CVPixelBufferGetHeight(pixelBufferRef);
  528. float dw = width / SW;
  529. float dh = height / SH;
  530. float cropW = width;
  531. float cropH = height;
  532. if (dw > dh) {
  533. cropW = SW * dh;
  534. }else
  535. {
  536. cropH = SH * dw;
  537. }
  538. CGFloat cropX = (width - cropW) * 0.5;
  539. CGFloat cropY = (height - cropH) * 0.5;
  540. CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBufferRef];
  541. CIContext *temporaryContext = [CIContext contextWithOptions:nil];
  542. CGImageRef videoImage = [temporaryContext
  543. createCGImage:ciImage
  544. fromRect:CGRectMake(cropX, cropY,
  545. cropW,
  546. cropH)];
  547. UIImage *image = [UIImage imageWithCGImage:videoImage];
  548. CGImageRelease(videoImage);
  549. CVPixelBufferUnlockBaseAddress(pixelBufferRef, 0);
  550. return image;
  551. }
  552. - (void)image: (UIImage *) image didFinishSavingWithError: (NSError *) error contextInfo: (void *) contextInfo
  553. {
  554. if(error != NULL){
  555. [SVProgressHUD showErrorWithStatus:@"保存图片失败"];
  556. }else{
  557. [SVProgressHUD showSuccessWithStatus:@"图片已保存到相册"];
  558. }
  559. }
  560. - (void)video:(NSString *)videoPath didFinishSavingWithError:(NSError *)error contextInfo:(void *)contextInfo
  561. {
  562. if(error != NULL){
  563. [SVProgressHUD showErrorWithStatus:@"保存视频失败"];
  564. }else{
  565. [SVProgressHUD showSuccessWithStatus:@"视频已保存到相册"];
  566. }
  567. }
  568. - (void)setCaptureVideoOrientation:(AVCaptureVideoOrientation) orientation {
  569. [self.videoConnection setVideoOrientation:orientation];
  570. }
  571. - (void)getCurrentExposureValue:(float *)current max:(float *)max min:(float *)min{
  572. *min = self.camera.minExposureTargetBias;
  573. *max = self.camera.maxExposureTargetBias;
  574. *current = self.camera.exposureTargetBias;
  575. }
  576. - (void)focusWithMode:(AVCaptureFocusMode)focusMode exposeWithMode:(AVCaptureExposureMode)exposureMode atDevicePoint:(CGPoint)point monitorSubjectAreaChange:(BOOL)monitorSubjectAreaChange
  577. {
  578. dispatch_async(self.videoCaptureQueue, ^{
  579. AVCaptureDevice *device = self.camera;
  580. NSError *error = nil;
  581. if ( [device lockForConfiguration:&error] ) {
  582. // Setting (focus/exposure)PointOfInterest alone does not initiate a (focus/exposure) operation.
  583. // Call -set(Focus/Exposure)Mode: to apply the new point of interest.
  584. if ( device.isFocusPointOfInterestSupported && [device isFocusModeSupported:focusMode] ) {
  585. device.focusPointOfInterest = point;
  586. device.focusMode = focusMode;
  587. }
  588. if ( device.isExposurePointOfInterestSupported && [device isExposureModeSupported:exposureMode] ) {
  589. device.exposurePointOfInterest = point;
  590. device.exposureMode = exposureMode;
  591. }
  592. // NSLog(@"---point --%@",NSStringFromCGPoint(point));
  593. device.subjectAreaChangeMonitoringEnabled = monitorSubjectAreaChange;
  594. [device unlockForConfiguration];
  595. }
  596. else {
  597. NSLog( @"Could not lock device for configuration: %@", error );
  598. }
  599. });
  600. }
  601. -(void)cameraChangeModle:(FUCameraFocusModel)modle
  602. {
  603. _cameraFocusModel = modle;
  604. }
  605. //缩放
  606. - (CGFloat)maxZoomFactor
  607. {
  608. return MIN(self.camera.activeFormat.videoMaxZoomFactor, 4.0f);
  609. }
  610. - (void)setZoomValue:(CGFloat)zoomValue
  611. {
  612. if (!self.camera.isRampingVideoZoom) {
  613. NSError *error;
  614. if ([self.camera lockForConfiguration:&error]) {
  615. CGFloat zoomFactor = pow([self maxZoomFactor], zoomValue);
  616. self.camera.videoZoomFactor = zoomFactor;
  617. [self.camera unlockForConfiguration];
  618. }
  619. }
  620. }
  621. - (void)dealloc{
  622. [[NSNotificationCenter defaultCenter] removeObserver:self];
  623. NSLog(@"camera dealloc");
  624. }
  625. @end