FUVideoReader.m 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543
  1. //
  2. // FUVideoReader.m
  3. // AVAssetReader2
  4. //
  5. // Created by L on 2018/6/13.
  6. // Copyright © 2018年 千山暮雪. All rights reserved.
  7. //
  8. #import "FUVideoReader.h"
  9. #import <UIKit/UIKit.h>
  10. @interface FUVideoReader ()
  11. {
  12. BOOL isReadFirstFrame ;
  13. BOOL isReadLastFrame ;
  14. CMSampleBufferRef firstFrame ;
  15. CVPixelBufferRef renderTarget ;
  16. }
  17. @property (nonatomic, copy) NSString *destinationPath ;
  18. // 读
  19. @property (nonatomic, strong) AVAssetReader *assetReader ;
  20. // 写
  21. @property (nonatomic, strong) AVAssetWriter *assetWriter ;
  22. // 音频输入
  23. @property (nonatomic, strong) AVAssetWriterInput *audioInput;
  24. // 音频输出
  25. @property (nonatomic, strong) AVAssetReaderTrackOutput *audioOutput;
  26. // 视频输入
  27. @property (nonatomic, strong) AVAssetWriterInput *videoInput;
  28. // 视频输出
  29. @property (nonatomic, strong) AVAssetReaderTrackOutput *videoOutput;
  30. // 视频通道
  31. @property (nonatomic, strong) AVAssetTrack *videoTrack ;
  32. // 视频朝向
  33. @property (nonatomic, assign, readwrite) FUVideoReaderOrientation videoOrientation ;
  34. // 定时器
  35. @property (nonatomic, strong) CADisplayLink *displayLink;
  36. @property (nonatomic, strong) dispatch_semaphore_t finishSemaphore ;
  37. @end
  38. @implementation FUVideoReader
  39. -(instancetype)initWithVideoURL:(NSURL *)videoRUL {
  40. self = [super init];
  41. if (self) {
  42. _displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(displayLinkCallback:)];
  43. [_displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSRunLoopCommonModes];
  44. if (@available(iOS 10.0, *)) {
  45. [_displayLink setPreferredFramesPerSecond:30];
  46. } else {
  47. [_displayLink setFrameInterval:2];
  48. }
  49. _displayLink.paused = YES;
  50. _videoURL = videoRUL ;
  51. isReadFirstFrame = NO ;
  52. isReadLastFrame = NO ;
  53. }
  54. return self ;
  55. }
  56. -(void)setVideoURL:(NSURL *)videoURL {
  57. _videoURL = videoURL ;
  58. }
  59. -(void)configAssetReader {
  60. AVAsset *asset = [AVAsset assetWithURL:_videoURL];
  61. self.assetReader = [[AVAssetReader alloc] initWithAsset:asset error:nil];
  62. // 视频通道
  63. NSMutableDictionary *outputSettings = [NSMutableDictionary dictionary];
  64. [outputSettings setObject: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey: (NSString*)kCVPixelBufferPixelFormatTypeKey];
  65. _videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
  66. self.videoOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:_videoTrack outputSettings:outputSettings];
  67. self.videoOutput.alwaysCopiesSampleData = NO;
  68. CGAffineTransform transform = self.videoTrack.preferredTransform ;
  69. if(transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0){
  70. self.videoOrientation = FUVideoReaderOrientationLandscapeRight ;
  71. }else if(transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0){
  72. self.videoOrientation = FUVideoReaderOrientationLandscapeLeft ;
  73. }else if(transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0){
  74. self.videoOrientation = FUVideoReaderOrientationPortrait ;
  75. }else if(transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0){
  76. self.videoOrientation = FUVideoReaderOrientationUpsideDown ;
  77. }
  78. if ([self.assetReader canAddOutput:self.videoOutput]) {
  79. [self.assetReader addOutput:self.videoOutput];
  80. }else{
  81. NSLog(@"配置视频输出失败 ~") ;
  82. }
  83. // 音频通道
  84. NSArray *audioTracks = [asset tracksWithMediaType:AVMediaTypeAudio];
  85. if (audioTracks.count > 0) {
  86. AVAssetTrack *audioTrack = [audioTracks objectAtIndex:0];
  87. NSMutableDictionary *audioSettings = [NSMutableDictionary dictionary];
  88. [audioSettings setObject: [NSNumber numberWithInt:kAudioFormatLinearPCM] forKey: (NSString*)AVFormatIDKey];
  89. self.audioOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:audioTrack outputSettings:audioSettings];
  90. self.audioOutput.alwaysCopiesSampleData = NO;
  91. if ([self.assetReader canAddOutput:self.audioOutput]) {
  92. [self.assetReader addOutput:self.audioOutput];
  93. }else {
  94. NSLog(@"配置音频输出失败 ~");
  95. }
  96. }
  97. }
  98. -(void)configAssetWriterWithPath:(NSString *)destinationPath {
  99. _destinationPath = destinationPath ;
  100. self.assetWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath: destinationPath] fileType:AVFileTypeQuickTimeMovie error:nil];
  101. //音频编码
  102. NSDictionary *audioInputSetting = [self configAudioInput];
  103. self.audioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioInputSetting];
  104. if ([self.assetWriter canAddInput:self.audioInput]) {
  105. [self.assetWriter addInput:self.audioInput];
  106. } else {
  107. NSLog(@"配置音频输入失败 ~") ;
  108. }
  109. //视频编码
  110. NSDictionary *videoInputSetting = [self configVideoInput];
  111. self.videoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoInputSetting];
  112. self.videoInput.expectsMediaDataInRealTime = YES;
  113. CGAffineTransform transform ;
  114. switch (self.videoOrientation) {
  115. case FUVideoReaderOrientationPortrait:
  116. transform = CGAffineTransformIdentity ;
  117. break;
  118. case FUVideoReaderOrientationLandscapeRight:
  119. transform = CGAffineTransformMakeRotation(M_PI_2) ;
  120. break;
  121. case FUVideoReaderOrientationLandscapeLeft:
  122. transform = CGAffineTransformMakeRotation(-M_PI_2) ;
  123. break ;
  124. case FUVideoReaderOrientationUpsideDown:
  125. transform = CGAffineTransformMakeRotation(M_PI) ;
  126. break ;
  127. }
  128. self.videoInput.transform = transform ;
  129. if ([self.assetWriter canAddInput:self.videoInput]) {
  130. [self.assetWriter addInput:self.videoInput];
  131. } else {
  132. NSLog(@"配置视频输入失败 ~") ;
  133. }
  134. }
  135. // 开始读
  136. - (void)startReadWithDestinationPath:(NSString *)destinationPath {
  137. if (self.finishSemaphore == nil) {
  138. self.finishSemaphore = dispatch_semaphore_create(1) ;
  139. }
  140. if ([[NSFileManager defaultManager] fileExistsAtPath:destinationPath]) {
  141. [[NSFileManager defaultManager] removeItemAtPath:destinationPath error:nil] ;
  142. }
  143. [self configAssetReader];
  144. [self configAssetWriterWithPath:destinationPath];
  145. if (self.assetReader.status != AVAssetReaderStatusReading) {
  146. [self.assetReader startReading];
  147. }
  148. if (self.assetWriter.status != AVAssetWriterStatusWriting) {
  149. [self.assetWriter startWriting];
  150. }
  151. [self.assetWriter startSessionAtSourceTime:kCMTimeZero];
  152. isReadFirstFrame = NO ;
  153. isReadLastFrame = NO ;
  154. _displayLink.paused = NO ;
  155. }
  156. - (void)displayLinkCallback:(CADisplayLink *)displatLink {
  157. if (isReadFirstFrame) {
  158. CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(firstFrame) ;
  159. CVPixelBufferLockBaseAddress(pixelBuffer, 0) ;
  160. int w0 = (int)CVPixelBufferGetWidth(pixelBuffer) ;
  161. int h0 = (int)CVPixelBufferGetHeight(pixelBuffer) ;
  162. void *byte0 = CVPixelBufferGetBaseAddress(pixelBuffer) ;
  163. if (!renderTarget) {
  164. [self createPixelBufferWithSize:CGSizeMake(w0, h0)];
  165. }
  166. CVPixelBufferLockBaseAddress(renderTarget, 0) ;
  167. int w1 = (int)CVPixelBufferGetWidth(renderTarget) ;
  168. int h1 = (int)CVPixelBufferGetHeight(renderTarget) ;
  169. if (w0 != w1 || h0 != h1) {
  170. [self createPixelBufferWithSize:CGSizeMake(w0, h0)];
  171. }
  172. void *byte1 = CVPixelBufferGetBaseAddress(renderTarget) ;
  173. memcpy(byte1, byte0, w0 * h0 * 4) ;
  174. CVPixelBufferUnlockBaseAddress(renderTarget, 0);
  175. CVPixelBufferUnlockBaseAddress(pixelBuffer, 0) ;
  176. if (self.delegate && [self.delegate respondsToSelector:@selector(videoReaderDidReadVideoBuffer:)] && !self.displayLink.paused) {
  177. dispatch_async(dispatch_get_global_queue(0, 0), ^{
  178. [self.delegate videoReaderDidReadVideoBuffer:renderTarget];
  179. });
  180. }
  181. [self.assetReader cancelReading];
  182. self.assetReader = nil;
  183. return ;
  184. }
  185. if (isReadLastFrame) {
  186. void *bytes = [self getCopyDataFromPixelBuffer:renderTarget];
  187. if (self.delegate && [self.delegate respondsToSelector:@selector(videoReaderDidReadVideoBuffer:)] && !_displayLink.isPaused) {
  188. [self.delegate videoReaderDidReadVideoBuffer:renderTarget];
  189. }
  190. [self copyDataBackToPixelBuffer:renderTarget copyData:bytes];
  191. free(bytes) ;
  192. return ;
  193. }
  194. [self readAudioBuffer];
  195. [self readVideoBuffer];
  196. }
  197. static BOOL isAudioFirst = YES ;
  198. - (void)readAudioBuffer {
  199. if ([self.audioInput isReadyForMoreMediaData] && self.assetReader.status == AVAssetReaderStatusReading) {
  200. if (isAudioFirst) {
  201. isAudioFirst = NO;
  202. return ;
  203. }
  204. CMSampleBufferRef nextSampleBuffer = [self.audioOutput copyNextSampleBuffer];
  205. if (nextSampleBuffer) {
  206. [self.audioInput appendSampleBuffer:nextSampleBuffer];
  207. CMSampleBufferInvalidate(nextSampleBuffer);
  208. CFRelease(nextSampleBuffer);
  209. } else {
  210. [self.audioInput markAsFinished];
  211. if (dispatch_semaphore_wait(self.finishSemaphore, DISPATCH_TIME_NOW) != 0) {
  212. [self readVideoFinished];
  213. }
  214. }
  215. }
  216. }
  217. static BOOL isVideoFirst = YES ;
  218. - (void)readVideoBuffer {
  219. if ([self.videoInput isReadyForMoreMediaData] && self.assetReader.status == AVAssetReaderStatusReading) {
  220. if (isVideoFirst) {
  221. isVideoFirst = NO;
  222. return ;
  223. }
  224. CMSampleBufferRef nextSampleBuffer = [self.videoOutput copyNextSampleBuffer];
  225. if (nextSampleBuffer) {
  226. CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(nextSampleBuffer) ;
  227. // 数据保存到 renderTarget
  228. CVPixelBufferLockBaseAddress(pixelBuffer, 0) ;
  229. int w0 = (int)CVPixelBufferGetWidth(pixelBuffer) ;
  230. int h0 = (int)CVPixelBufferGetHeight(pixelBuffer) ;
  231. void *byte0 = CVPixelBufferGetBaseAddress(pixelBuffer) ;
  232. if (!renderTarget) {
  233. [self createPixelBufferWithSize:CGSizeMake(w0, h0)];
  234. }
  235. CVPixelBufferLockBaseAddress(renderTarget, 0) ;
  236. int w1 = (int)CVPixelBufferGetWidth(renderTarget) ;
  237. int h1 = (int)CVPixelBufferGetHeight(renderTarget) ;
  238. if (w0 != w1 || h0 != h1) {
  239. [self createPixelBufferWithSize:CGSizeMake(w0, h0)];
  240. }
  241. void *byte1 = CVPixelBufferGetBaseAddress(renderTarget) ;
  242. memcpy(byte1, byte0, w0 * h0 * 4) ;
  243. CVPixelBufferUnlockBaseAddress(renderTarget, 0);
  244. CVPixelBufferUnlockBaseAddress(pixelBuffer, 0) ;
  245. if (self.delegate && [self.delegate respondsToSelector:@selector(videoReaderDidReadVideoBuffer:)] && !self.displayLink.paused) {
  246. CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(nextSampleBuffer) ;
  247. [self.delegate videoReaderDidReadVideoBuffer:pixelBuffer];
  248. }
  249. [self.videoInput appendSampleBuffer:nextSampleBuffer];
  250. CMSampleBufferInvalidate(nextSampleBuffer);
  251. CFRelease(nextSampleBuffer);
  252. }else {
  253. [self.videoInput markAsFinished];
  254. if (dispatch_semaphore_wait(self.finishSemaphore, DISPATCH_TIME_NOW) != 0) {
  255. [self readVideoFinished];
  256. }
  257. }
  258. }
  259. }
  260. - (void)readVideoFinished {
  261. dispatch_semaphore_signal(self.finishSemaphore) ;
  262. self.finishSemaphore = nil ;
  263. if (self.assetWriter.status == AVAssetWriterStatusWriting) {
  264. [self.assetWriter finishWritingWithCompletionHandler:^{
  265. AVAssetWriterStatus status = self.assetWriter.status;
  266. BOOL success ;
  267. if (status == AVAssetWriterStatusCompleted) {
  268. success = YES ;
  269. NSLog(@"finsished");
  270. } else {
  271. success = NO ;
  272. NSLog(@"failure %ld",(long)status);
  273. }
  274. self.displayLink.paused = YES ;
  275. if (self.delegate && [self.delegate respondsToSelector:@selector(videoReaderDidFinishReadSuccess:)]) {
  276. [self.delegate videoReaderDidFinishReadSuccess:success];
  277. }
  278. }];
  279. }
  280. isReadFirstFrame = NO ;
  281. isReadLastFrame = NO ;
  282. _displayLink.paused = YES;
  283. }
  284. // 只读 第一帧
  285. - (void)startReadForFirstFrame {
  286. self.assetReader = nil ;
  287. [self configAssetReader];
  288. [self.assetReader startReading];
  289. isReadFirstFrame = YES ;
  290. isReadLastFrame = NO ;
  291. firstFrame = [self.videoOutput copyNextSampleBuffer];
  292. self.displayLink.paused = NO ;
  293. }
  294. // 只读 最后一帧
  295. - (void)startReadForLastFrame {
  296. isReadFirstFrame = NO ;
  297. isReadLastFrame = YES ;
  298. self.displayLink.paused = NO ;
  299. }
  300. // 停止
  301. - (void)stopReading {
  302. isReadFirstFrame = NO ;
  303. isReadLastFrame = NO ;
  304. _displayLink.paused = YES;
  305. [self.assetReader cancelReading];
  306. [self.assetWriter cancelWriting];
  307. self.assetWriter = nil;
  308. self.assetReader = nil;
  309. [self destorySemaphore];
  310. }
  311. -(void)continueReading{
  312. if (_displayLink.paused) {
  313. _displayLink.paused = NO;
  314. }
  315. }
  316. - (void)destory {
  317. _displayLink.paused = YES;
  318. [_displayLink invalidate];
  319. _displayLink = nil ;
  320. [self.assetReader cancelReading];
  321. [self.assetWriter cancelWriting];
  322. self.assetWriter = nil;
  323. self.assetReader = nil;
  324. [self destorySemaphore];
  325. }
  326. - (void)destorySemaphore {
  327. if (self.finishSemaphore) {
  328. do {
  329. if (dispatch_semaphore_wait(self.finishSemaphore, DISPATCH_TIME_NOW) != 0) {
  330. dispatch_semaphore_signal(self.finishSemaphore) ;
  331. self.finishSemaphore = nil ;
  332. }
  333. } while (self.finishSemaphore);
  334. }
  335. }
  336. /** 编码音频 */
  337. - (NSDictionary *)configAudioInput {
  338. AudioChannelLayout channelLayout = {
  339. .mChannelLayoutTag = kAudioChannelLayoutTag_Stereo,
  340. .mChannelBitmap = kAudioChannelBit_Left,
  341. .mNumberChannelDescriptions = 0
  342. };
  343. NSData *channelLayoutData = [NSData dataWithBytes:&channelLayout length:offsetof(AudioChannelLayout, mChannelDescriptions)];
  344. NSDictionary *audioInputSetting = @{
  345. AVFormatIDKey: @(kAudioFormatMPEG4AAC),
  346. AVSampleRateKey: @(44100),
  347. AVNumberOfChannelsKey: @(2),
  348. AVChannelLayoutKey:channelLayoutData
  349. };
  350. return audioInputSetting;
  351. }
  352. /** 编码视频 */
  353. - (NSDictionary *)configVideoInput {
  354. CGSize videoSize = self.videoTrack.naturalSize ;
  355. NSDictionary *videoInputSetting = @{
  356. AVVideoCodecKey:AVVideoCodecH264,
  357. AVVideoWidthKey: @(videoSize.width),
  358. AVVideoHeightKey: @(videoSize.height),
  359. };
  360. return videoInputSetting;
  361. }
  362. - (void)createPixelBufferWithSize:(CGSize)size {
  363. if (!renderTarget) {
  364. NSDictionary* pixelBufferOptions = @{ (NSString*) kCVPixelBufferPixelFormatTypeKey :
  365. @(kCVPixelFormatType_32BGRA),
  366. (NSString*) kCVPixelBufferWidthKey : @(size.width),
  367. (NSString*) kCVPixelBufferHeightKey : @(size.height),
  368. (NSString*) kCVPixelBufferOpenGLESCompatibilityKey : @YES,
  369. (NSString*) kCVPixelBufferIOSurfacePropertiesKey : @{}};
  370. CVPixelBufferCreate(kCFAllocatorDefault,
  371. size.width, size.height,
  372. kCVPixelFormatType_32BGRA,
  373. (__bridge CFDictionaryRef)pixelBufferOptions,
  374. &renderTarget);
  375. }
  376. }
  377. - (void *)getCopyDataFromPixelBuffer:(CVPixelBufferRef)pixelBuffer {
  378. CVPixelBufferLockBaseAddress(pixelBuffer, 0);
  379. size_t size = CVPixelBufferGetDataSize(pixelBuffer);
  380. void *bytes = (void *)CVPixelBufferGetBaseAddress(pixelBuffer);
  381. void *copyData = malloc(size);
  382. memcpy(copyData, bytes, size);
  383. CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
  384. return copyData;
  385. }
  386. - (void)copyDataBackToPixelBuffer:(CVPixelBufferRef)pixelBuffer copyData:(void *)copyData {
  387. CVPixelBufferLockBaseAddress(pixelBuffer, 0);
  388. size_t size = CVPixelBufferGetDataSize(pixelBuffer);
  389. void *bytes = (void *)CVPixelBufferGetBaseAddress(pixelBuffer);
  390. memcpy(bytes, copyData, size);
  391. CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
  392. }
  393. - (void)dealloc{
  394. NSLog(@"FUVideoReader dealloc");
  395. if (renderTarget) {
  396. CVPixelBufferRelease(renderTarget);
  397. }
  398. if (firstFrame) {
  399. CFRelease(firstFrame);
  400. }
  401. }
  402. @end