SDWebImageWebPCoder.m 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566
  1. /*
  2. * This file is part of the SDWebImage package.
  3. * (c) Olivier Poitrey <rs@dailymotion.com>
  4. *
  5. * For the full copyright and license information, please view the LICENSE
  6. * file that was distributed with this source code.
  7. */
  8. #ifdef SD_WEBP
  9. #import "SDWebImageWebPCoder.h"
  10. #import "SDWebImageCoderHelper.h"
  11. #import "NSImage+WebCache.h"
  12. #import "UIImage+MultiFormat.h"
  13. #import "SDWebImageImageIOCoder.h"
  14. #if __has_include(<webp/decode.h>) && __has_include(<webp/encode.h>) && __has_include(<webp/demux.h>) && __has_include(<webp/mux.h>)
  15. #import <webp/decode.h>
  16. #import <webp/encode.h>
  17. #import <webp/demux.h>
  18. #import <webp/mux.h>
  19. #else
  20. #import "webp/decode.h"
  21. #import "webp/encode.h"
  22. #import "webp/demux.h"
  23. #import "webp/mux.h"
  24. #endif
  25. #import <Accelerate/Accelerate.h>
  26. @implementation SDWebImageWebPCoder {
  27. WebPIDecoder *_idec;
  28. }
  29. - (void)dealloc {
  30. if (_idec) {
  31. WebPIDelete(_idec);
  32. _idec = NULL;
  33. }
  34. }
  35. + (instancetype)sharedCoder {
  36. static SDWebImageWebPCoder *coder;
  37. static dispatch_once_t onceToken;
  38. dispatch_once(&onceToken, ^{
  39. coder = [[SDWebImageWebPCoder alloc] init];
  40. });
  41. return coder;
  42. }
  43. #pragma mark - Decode
  44. - (BOOL)canDecodeFromData:(nullable NSData *)data {
  45. return ([NSData sd_imageFormatForImageData:data] == SDImageFormatWebP);
  46. }
  47. - (BOOL)canIncrementallyDecodeFromData:(NSData *)data {
  48. return ([NSData sd_imageFormatForImageData:data] == SDImageFormatWebP);
  49. }
  50. - (UIImage *)decodedImageWithData:(NSData *)data {
  51. if (!data) {
  52. return nil;
  53. }
  54. WebPData webpData;
  55. WebPDataInit(&webpData);
  56. webpData.bytes = data.bytes;
  57. webpData.size = data.length;
  58. WebPDemuxer *demuxer = WebPDemux(&webpData);
  59. if (!demuxer) {
  60. return nil;
  61. }
  62. uint32_t flags = WebPDemuxGetI(demuxer, WEBP_FF_FORMAT_FLAGS);
  63. CGColorSpaceRef colorSpace = [self sd_colorSpaceWithDemuxer:demuxer];
  64. if (!(flags & ANIMATION_FLAG)) {
  65. // for static single webp image
  66. UIImage *staticImage = [self sd_rawWebpImageWithData:webpData colorSpace:colorSpace];
  67. WebPDemuxDelete(demuxer);
  68. CGColorSpaceRelease(colorSpace);
  69. staticImage.sd_imageFormat = SDImageFormatWebP;
  70. return staticImage;
  71. }
  72. int loopCount = WebPDemuxGetI(demuxer, WEBP_FF_LOOP_COUNT);
  73. int canvasWidth = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_WIDTH);
  74. int canvasHeight = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT);
  75. CGBitmapInfo bitmapInfo;
  76. // `CGBitmapContextCreate` does not support RGB888 on iOS. Where `CGImageCreate` supports.
  77. if (!(flags & ALPHA_FLAG)) {
  78. // RGBX8888
  79. bitmapInfo = kCGBitmapByteOrder32Big | kCGImageAlphaNoneSkipLast;
  80. } else {
  81. // RGBA8888
  82. bitmapInfo = kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast;
  83. }
  84. CGContextRef canvas = CGBitmapContextCreate(NULL, canvasWidth, canvasHeight, 8, 0, SDCGColorSpaceGetDeviceRGB(), bitmapInfo);
  85. if (!canvas) {
  86. WebPDemuxDelete(demuxer);
  87. CGColorSpaceRelease(colorSpace);
  88. return nil;
  89. }
  90. // for animated webp image
  91. WebPIterator iter;
  92. if (!WebPDemuxGetFrame(demuxer, 1, &iter)) {
  93. WebPDemuxReleaseIterator(&iter);
  94. WebPDemuxDelete(demuxer);
  95. CGContextRelease(canvas);
  96. CGColorSpaceRelease(colorSpace);
  97. return nil;
  98. }
  99. NSMutableArray<SDWebImageFrame *> *frames = [NSMutableArray array];
  100. do {
  101. @autoreleasepool {
  102. UIImage *image = [self sd_drawnWebpImageWithCanvas:canvas iterator:iter colorSpace:colorSpace];
  103. if (!image) {
  104. continue;
  105. }
  106. int duration = iter.duration;
  107. if (duration <= 10) {
  108. // WebP standard says 0 duration is used for canvas updating but not showing image, but actually Chrome and other implementations set it to 100ms if duration is lower or equal than 10ms
  109. // Some animated WebP images also created without duration, we should keep compatibility
  110. duration = 100;
  111. }
  112. SDWebImageFrame *frame = [SDWebImageFrame frameWithImage:image duration:duration / 1000.f];
  113. [frames addObject:frame];
  114. }
  115. } while (WebPDemuxNextFrame(&iter));
  116. WebPDemuxReleaseIterator(&iter);
  117. WebPDemuxDelete(demuxer);
  118. CGContextRelease(canvas);
  119. CGColorSpaceRelease(colorSpace);
  120. UIImage *animatedImage = [SDWebImageCoderHelper animatedImageWithFrames:frames];
  121. animatedImage.sd_imageLoopCount = loopCount;
  122. animatedImage.sd_imageFormat = SDImageFormatWebP;
  123. return animatedImage;
  124. }
  125. - (UIImage *)incrementallyDecodedImageWithData:(NSData *)data finished:(BOOL)finished {
  126. if (!_idec) {
  127. // Progressive images need transparent, so always use premultiplied RGBA
  128. _idec = WebPINewRGB(MODE_rgbA, NULL, 0, 0);
  129. if (!_idec) {
  130. return nil;
  131. }
  132. }
  133. UIImage *image;
  134. VP8StatusCode status = WebPIUpdate(_idec, data.bytes, data.length);
  135. if (status != VP8_STATUS_OK && status != VP8_STATUS_SUSPENDED) {
  136. return nil;
  137. }
  138. int width = 0;
  139. int height = 0;
  140. int last_y = 0;
  141. int stride = 0;
  142. uint8_t *rgba = WebPIDecGetRGB(_idec, &last_y, &width, &height, &stride);
  143. // last_y may be 0, means no enough bitmap data to decode, ignore this
  144. if (width + height > 0 && last_y > 0 && height >= last_y) {
  145. // Construct a UIImage from the decoded RGBA value array
  146. size_t rgbaSize = last_y * stride;
  147. CGDataProviderRef provider =
  148. CGDataProviderCreateWithData(NULL, rgba, rgbaSize, NULL);
  149. CGColorSpaceRef colorSpaceRef = SDCGColorSpaceGetDeviceRGB();
  150. CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast;
  151. size_t components = 4;
  152. CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;
  153. // Why to use last_y for image height is because of libwebp's bug (https://bugs.chromium.org/p/webp/issues/detail?id=362)
  154. // It will not keep memory barrier safe on x86 architechure (macOS & iPhone simulator) but on ARM architecture (iPhone & iPad & tv & watch) it works great
  155. // If different threads use WebPIDecGetRGB to grab rgba bitmap, it will contain the previous decoded bitmap data
  156. // So this will cause our drawed image looks strange(above is the current part but below is the previous part)
  157. // We only grab the last_y height and draw the last_y heigh instead of total height image
  158. // Besides fix, this can enhance performance since we do not need to create extra bitmap
  159. CGImageRef imageRef = CGImageCreate(width, last_y, 8, components * 8, components * width, colorSpaceRef, bitmapInfo, provider, NULL, NO, renderingIntent);
  160. CGDataProviderRelease(provider);
  161. if (!imageRef) {
  162. return nil;
  163. }
  164. CGContextRef canvas = CGBitmapContextCreate(NULL, width, height, 8, 0, SDCGColorSpaceGetDeviceRGB(), bitmapInfo);
  165. if (!canvas) {
  166. CGImageRelease(imageRef);
  167. return nil;
  168. }
  169. // Only draw the last_y image height, keep remains transparent, in Core Graphics coordinate system
  170. CGContextDrawImage(canvas, CGRectMake(0, height - last_y, width, last_y), imageRef);
  171. CGImageRef newImageRef = CGBitmapContextCreateImage(canvas);
  172. CGImageRelease(imageRef);
  173. if (!newImageRef) {
  174. CGContextRelease(canvas);
  175. return nil;
  176. }
  177. #if SD_UIKIT || SD_WATCH
  178. image = [[UIImage alloc] initWithCGImage:newImageRef];
  179. #else
  180. image = [[UIImage alloc] initWithCGImage:newImageRef size:NSZeroSize];
  181. #endif
  182. image.sd_imageFormat = SDImageFormatWebP;
  183. CGImageRelease(newImageRef);
  184. CGContextRelease(canvas);
  185. }
  186. if (finished) {
  187. if (_idec) {
  188. WebPIDelete(_idec);
  189. _idec = NULL;
  190. }
  191. }
  192. return image;
  193. }
  194. - (UIImage *)decompressedImageWithImage:(UIImage *)image
  195. data:(NSData *__autoreleasing _Nullable *)data
  196. options:(nullable NSDictionary<NSString*, NSObject*>*)optionsDict {
  197. UIImage *decompressedImage = [[SDWebImageImageIOCoder sharedCoder] decompressedImageWithImage:image data:data options:optionsDict];
  198. // if the image is scaled down, need to modify the data pointer as well
  199. if (decompressedImage && !CGSizeEqualToSize(decompressedImage.size, image.size) && [NSData sd_imageFormatForImageData:*data] == SDImageFormatWebP) {
  200. NSData *imageData = [self encodedDataWithImage:decompressedImage format:SDImageFormatWebP];
  201. if (imageData) {
  202. *data = imageData;
  203. }
  204. }
  205. return decompressedImage;
  206. }
  207. - (nullable UIImage *)sd_drawnWebpImageWithCanvas:(CGContextRef)canvas iterator:(WebPIterator)iter colorSpace:(nonnull CGColorSpaceRef)colorSpaceRef {
  208. UIImage *image = [self sd_rawWebpImageWithData:iter.fragment colorSpace:colorSpaceRef];
  209. if (!image) {
  210. return nil;
  211. }
  212. size_t canvasWidth = CGBitmapContextGetWidth(canvas);
  213. size_t canvasHeight = CGBitmapContextGetHeight(canvas);
  214. CGSize size = CGSizeMake(canvasWidth, canvasHeight);
  215. CGFloat tmpX = iter.x_offset;
  216. CGFloat tmpY = size.height - iter.height - iter.y_offset;
  217. CGRect imageRect = CGRectMake(tmpX, tmpY, iter.width, iter.height);
  218. BOOL shouldBlend = iter.blend_method == WEBP_MUX_BLEND;
  219. // If not blend, cover the target image rect. (firstly clear then draw)
  220. if (!shouldBlend) {
  221. CGContextClearRect(canvas, imageRect);
  222. }
  223. CGContextDrawImage(canvas, imageRect, image.CGImage);
  224. CGImageRef newImageRef = CGBitmapContextCreateImage(canvas);
  225. #if SD_UIKIT || SD_WATCH
  226. image = [[UIImage alloc] initWithCGImage:newImageRef];
  227. #elif SD_MAC
  228. image = [[UIImage alloc] initWithCGImage:newImageRef size:NSZeroSize];
  229. #endif
  230. CGImageRelease(newImageRef);
  231. if (iter.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) {
  232. CGContextClearRect(canvas, imageRect);
  233. }
  234. return image;
  235. }
  236. - (nullable UIImage *)sd_rawWebpImageWithData:(WebPData)webpData colorSpace:(nonnull CGColorSpaceRef)colorSpaceRef {
  237. WebPDecoderConfig config;
  238. if (!WebPInitDecoderConfig(&config)) {
  239. return nil;
  240. }
  241. if (WebPGetFeatures(webpData.bytes, webpData.size, &config.input) != VP8_STATUS_OK) {
  242. return nil;
  243. }
  244. config.output.colorspace = config.input.has_alpha ? MODE_rgbA : MODE_RGB;
  245. config.options.use_threads = 1;
  246. // Decode the WebP image data into a RGBA value array
  247. if (WebPDecode(webpData.bytes, webpData.size, &config) != VP8_STATUS_OK) {
  248. return nil;
  249. }
  250. int width = config.input.width;
  251. int height = config.input.height;
  252. if (config.options.use_scaling) {
  253. width = config.options.scaled_width;
  254. height = config.options.scaled_height;
  255. }
  256. // Construct a UIImage from the decoded RGBA value array
  257. CGDataProviderRef provider =
  258. CGDataProviderCreateWithData(NULL, config.output.u.RGBA.rgba, config.output.u.RGBA.size, FreeImageData);
  259. CGBitmapInfo bitmapInfo;
  260. // `CGBitmapContextCreate` does not support RGB888 on iOS. Where `CGImageCreate` supports.
  261. if (!config.input.has_alpha) {
  262. // RGB888
  263. bitmapInfo = kCGBitmapByteOrder32Big | kCGImageAlphaNone;
  264. } else {
  265. // RGBA8888
  266. bitmapInfo = kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast;
  267. }
  268. size_t components = config.input.has_alpha ? 4 : 3;
  269. CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;
  270. CGImageRef imageRef = CGImageCreate(width, height, 8, components * 8, components * width, colorSpaceRef, bitmapInfo, provider, NULL, NO, renderingIntent);
  271. CGDataProviderRelease(provider);
  272. #if SD_UIKIT || SD_WATCH
  273. UIImage *image = [[UIImage alloc] initWithCGImage:imageRef];
  274. #else
  275. UIImage *image = [[UIImage alloc] initWithCGImage:imageRef size:NSZeroSize];
  276. #endif
  277. CGImageRelease(imageRef);
  278. return image;
  279. }
  280. // Create and return the correct colorspace by checking the ICC Profile
  281. - (nonnull CGColorSpaceRef)sd_colorSpaceWithDemuxer:(nonnull WebPDemuxer *)demuxer CF_RETURNS_RETAINED {
  282. // WebP contains ICC Profile should use the desired colorspace, instead of default device colorspace
  283. // See: https://developers.google.com/speed/webp/docs/riff_container#color_profile
  284. CGColorSpaceRef colorSpaceRef = NULL;
  285. uint32_t flags = WebPDemuxGetI(demuxer, WEBP_FF_FORMAT_FLAGS);
  286. if (flags & ICCP_FLAG) {
  287. WebPChunkIterator chunk_iter;
  288. int result = WebPDemuxGetChunk(demuxer, "ICCP", 1, &chunk_iter);
  289. if (result) {
  290. // See #2618, the `CGColorSpaceCreateWithICCProfile` does not copy ICC Profile data, it only retain `CFDataRef`.
  291. // When the libwebp `WebPDemuxer` dealloc, all chunks will be freed. So we must copy the ICC data (really cheap, less than 10KB)
  292. NSData *profileData = [NSData dataWithBytes:chunk_iter.chunk.bytes length:chunk_iter.chunk.size];
  293. colorSpaceRef = CGColorSpaceCreateWithICCProfile((__bridge CFDataRef)profileData);
  294. WebPDemuxReleaseChunkIterator(&chunk_iter);
  295. if (colorSpaceRef) {
  296. // We use RGB color model to decode WebP images currently, so we must filter out other colorSpace
  297. CGColorSpaceModel model = CGColorSpaceGetModel(colorSpaceRef);
  298. if (model != kCGColorSpaceModelRGB) {
  299. CGColorSpaceRelease(colorSpaceRef);
  300. colorSpaceRef = NULL;
  301. }
  302. }
  303. }
  304. }
  305. if (!colorSpaceRef) {
  306. colorSpaceRef = SDCGColorSpaceGetDeviceRGB();
  307. CGColorSpaceRetain(colorSpaceRef);
  308. }
  309. return colorSpaceRef;
  310. }
  311. #pragma mark - Encode
  312. - (BOOL)canEncodeToFormat:(SDImageFormat)format {
  313. return (format == SDImageFormatWebP);
  314. }
  315. - (NSData *)encodedDataWithImage:(UIImage *)image format:(SDImageFormat)format {
  316. if (!image) {
  317. return nil;
  318. }
  319. NSData *data;
  320. NSArray<SDWebImageFrame *> *frames = [SDWebImageCoderHelper framesFromAnimatedImage:image];
  321. if (frames.count == 0) {
  322. // for static single webp image
  323. data = [self sd_encodedWebpDataWithImage:image];
  324. } else {
  325. // for animated webp image
  326. WebPMux *mux = WebPMuxNew();
  327. if (!mux) {
  328. return nil;
  329. }
  330. for (size_t i = 0; i < frames.count; i++) {
  331. SDWebImageFrame *currentFrame = frames[i];
  332. NSData *webpData = [self sd_encodedWebpDataWithImage:currentFrame.image];
  333. int duration = currentFrame.duration * 1000;
  334. WebPMuxFrameInfo frame = { .bitstream.bytes = webpData.bytes,
  335. .bitstream.size = webpData.length,
  336. .duration = duration,
  337. .id = WEBP_CHUNK_ANMF,
  338. .dispose_method = WEBP_MUX_DISPOSE_BACKGROUND, // each frame will clear canvas
  339. .blend_method = WEBP_MUX_NO_BLEND
  340. };
  341. if (WebPMuxPushFrame(mux, &frame, 0) != WEBP_MUX_OK) {
  342. WebPMuxDelete(mux);
  343. return nil;
  344. }
  345. }
  346. int loopCount = (int)image.sd_imageLoopCount;
  347. WebPMuxAnimParams params = { .bgcolor = 0,
  348. .loop_count = loopCount
  349. };
  350. if (WebPMuxSetAnimationParams(mux, &params) != WEBP_MUX_OK) {
  351. WebPMuxDelete(mux);
  352. return nil;
  353. }
  354. WebPData outputData;
  355. WebPMuxError error = WebPMuxAssemble(mux, &outputData);
  356. WebPMuxDelete(mux);
  357. if (error != WEBP_MUX_OK) {
  358. return nil;
  359. }
  360. data = [NSData dataWithBytes:outputData.bytes length:outputData.size];
  361. WebPDataClear(&outputData);
  362. }
  363. return data;
  364. }
  365. - (nullable NSData *)sd_encodedWebpDataWithImage:(nullable UIImage *)image {
  366. if (!image) {
  367. return nil;
  368. }
  369. NSData *webpData;
  370. CGImageRef imageRef = image.CGImage;
  371. size_t width = CGImageGetWidth(imageRef);
  372. size_t height = CGImageGetHeight(imageRef);
  373. if (width == 0 || width > WEBP_MAX_DIMENSION) {
  374. return nil;
  375. }
  376. if (height == 0 || height > WEBP_MAX_DIMENSION) {
  377. return nil;
  378. }
  379. size_t bytesPerRow = CGImageGetBytesPerRow(imageRef);
  380. CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(imageRef);
  381. CGImageAlphaInfo alphaInfo = bitmapInfo & kCGBitmapAlphaInfoMask;
  382. CGBitmapInfo byteOrderInfo = bitmapInfo & kCGBitmapByteOrderMask;
  383. BOOL hasAlpha = !(alphaInfo == kCGImageAlphaNone ||
  384. alphaInfo == kCGImageAlphaNoneSkipFirst ||
  385. alphaInfo == kCGImageAlphaNoneSkipLast);
  386. BOOL byteOrderNormal = NO;
  387. switch (byteOrderInfo) {
  388. case kCGBitmapByteOrderDefault: {
  389. byteOrderNormal = YES;
  390. } break;
  391. case kCGBitmapByteOrder32Little: {
  392. } break;
  393. case kCGBitmapByteOrder32Big: {
  394. byteOrderNormal = YES;
  395. } break;
  396. default: break;
  397. }
  398. // If we can not get bitmap buffer, early return
  399. CGDataProviderRef dataProvider = CGImageGetDataProvider(imageRef);
  400. if (!dataProvider) {
  401. return nil;
  402. }
  403. CFDataRef dataRef = CGDataProviderCopyData(dataProvider);
  404. if (!dataRef) {
  405. return nil;
  406. }
  407. uint8_t *rgba = NULL;
  408. // We could not assume that input CGImage's color mode is always RGB888/RGBA8888. Convert all other cases to target color mode using vImage
  409. if (byteOrderNormal && ((alphaInfo == kCGImageAlphaNone) || (alphaInfo == kCGImageAlphaLast))) {
  410. // If the input CGImage is already RGB888/RGBA8888
  411. rgba = (uint8_t *)CFDataGetBytePtr(dataRef);
  412. } else {
  413. // Convert all other cases to target color mode using vImage
  414. vImageConverterRef convertor = NULL;
  415. vImage_Error error = kvImageNoError;
  416. vImage_CGImageFormat srcFormat = {
  417. .bitsPerComponent = (uint32_t)CGImageGetBitsPerComponent(imageRef),
  418. .bitsPerPixel = (uint32_t)CGImageGetBitsPerPixel(imageRef),
  419. .colorSpace = CGImageGetColorSpace(imageRef),
  420. .bitmapInfo = bitmapInfo
  421. };
  422. vImage_CGImageFormat destFormat = {
  423. .bitsPerComponent = 8,
  424. .bitsPerPixel = hasAlpha ? 32 : 24,
  425. .colorSpace = SDCGColorSpaceGetDeviceRGB(),
  426. .bitmapInfo = hasAlpha ? kCGImageAlphaLast | kCGBitmapByteOrderDefault : kCGImageAlphaNone | kCGBitmapByteOrderDefault // RGB888/RGBA8888 (Non-premultiplied to works for libwebp)
  427. };
  428. convertor = vImageConverter_CreateWithCGImageFormat(&srcFormat, &destFormat, NULL, kvImageNoFlags, &error);
  429. if (error != kvImageNoError) {
  430. CFRelease(dataRef);
  431. return nil;
  432. }
  433. vImage_Buffer src = {
  434. .data = (uint8_t *)CFDataGetBytePtr(dataRef),
  435. .width = width,
  436. .height = height,
  437. .rowBytes = bytesPerRow
  438. };
  439. vImage_Buffer dest;
  440. error = vImageBuffer_Init(&dest, height, width, destFormat.bitsPerPixel, kvImageNoFlags);
  441. if (error != kvImageNoError) {
  442. CFRelease(dataRef);
  443. return nil;
  444. }
  445. // Convert input color mode to RGB888/RGBA8888
  446. error = vImageConvert_AnyToAny(convertor, &src, &dest, NULL, kvImageNoFlags);
  447. if (error != kvImageNoError) {
  448. CFRelease(dataRef);
  449. return nil;
  450. }
  451. rgba = dest.data; // Converted buffer
  452. bytesPerRow = dest.rowBytes; // Converted bytePerRow
  453. CFRelease(dataRef);
  454. dataRef = NULL;
  455. }
  456. uint8_t *data = NULL; // Output WebP data
  457. float qualityFactor = 100; // WebP quality is 0-100
  458. // Encode RGB888/RGBA8888 buffer to WebP data
  459. size_t size;
  460. if (hasAlpha) {
  461. size = WebPEncodeRGBA(rgba, (int)width, (int)height, (int)bytesPerRow, qualityFactor, &data);
  462. } else {
  463. size = WebPEncodeRGB(rgba, (int)width, (int)height, (int)bytesPerRow, qualityFactor, &data);
  464. }
  465. if (dataRef) {
  466. CFRelease(dataRef); // free non-converted rgba buffer
  467. dataRef = NULL;
  468. } else {
  469. free(rgba); // free converted rgba buffer
  470. rgba = NULL;
  471. }
  472. if (size) {
  473. // success
  474. webpData = [NSData dataWithBytes:data length:size];
  475. }
  476. if (data) {
  477. WebPFree(data);
  478. }
  479. return webpData;
  480. }
  481. static void FreeImageData(void *info, const void *data, size_t size) {
  482. free((void *)data);
  483. }
  484. @end
  485. #endif