Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
75 changes: 46 additions & 29 deletions SDWebImageWebPCoder/Classes/SDImageWebPCoder.m
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ @implementation SDImageWebPCoder {
NSUInteger _frameCount;
NSArray<SDWebPCoderFrame *> *_frames;
CGContextRef _canvas;
CGColorSpaceRef _colorSpace;
BOOL _hasAnimation;
BOOL _hasAlpha;
BOOL _finished;
Expand All @@ -86,6 +87,10 @@ - (void)dealloc {
CGContextRelease(_canvas);
_canvas = NULL;
}
if (_colorSpace) {
CGColorSpaceRelease(_colorSpace);
_colorSpace = NULL;
}
}

+ (instancetype)sharedCoder {
Expand Down Expand Up @@ -131,21 +136,6 @@ - (UIImage *)decodedImageWithData:(NSData *)data options:(nullable SDImageCoderO
scale = 1;
}
}
if (!hasAnimation) {
// for static single webp image
CGImageRef imageRef = [self sd_createWebpImageWithData:webpData];
if (!imageRef) {
return nil;
}
#if SD_UIKIT || SD_WATCH
UIImage *staticImage = [[UIImage alloc] initWithCGImage:imageRef scale:scale orientation:UIImageOrientationUp];
#else
UIImage *staticImage = [[UIImage alloc] initWithCGImage:imageRef scale:scale orientation:kCGImagePropertyOrientationUp];
#endif
CGImageRelease(imageRef);
WebPDemuxDelete(demuxer);
return staticImage;
}

// for animated webp image
WebPIterator iter;
Expand All @@ -155,10 +145,12 @@ - (UIImage *)decodedImageWithData:(NSData *)data options:(nullable SDImageCoderO
WebPDemuxDelete(demuxer);
return nil;
}
CGColorSpaceRef colorSpace = [self sd_colorSpaceWithDemuxer:demuxer];

if (decodeFirstFrame) {
if (!hasAnimation || decodeFirstFrame) {
// first frame for animated webp image
CGImageRef imageRef = [self sd_createWebpImageWithData:iter.fragment];
CGImageRef imageRef = [self sd_createWebpImageWithData:iter.fragment colorSpace:colorSpace];
CGColorSpaceRelease(colorSpace);
#if SD_UIKIT || SD_WATCH
UIImage *firstFrameImage = [[UIImage alloc] initWithCGImage:imageRef scale:scale orientation:UIImageOrientationUp];
#else
Expand All @@ -180,14 +172,15 @@ - (UIImage *)decodedImageWithData:(NSData *)data options:(nullable SDImageCoderO
CGContextRef canvas = CGBitmapContextCreate(NULL, canvasWidth, canvasHeight, 8, 0, [SDImageCoderHelper colorSpaceGetDeviceRGB], bitmapInfo);
if (!canvas) {
WebPDemuxDelete(demuxer);
CGColorSpaceRelease(colorSpace);
return nil;
}

NSMutableArray<SDImageFrame *> *frames = [NSMutableArray array];

do {
@autoreleasepool {
CGImageRef imageRef = [self sd_drawnWebpImageWithCanvas:canvas iterator:iter];
CGImageRef imageRef = [self sd_drawnWebpImageWithCanvas:canvas iterator:iter colorSpace:colorSpace];
if (!imageRef) {
continue;
}
Expand All @@ -208,6 +201,7 @@ - (UIImage *)decodedImageWithData:(NSData *)data options:(nullable SDImageCoderO
WebPDemuxReleaseIterator(&iter);
WebPDemuxDelete(demuxer);
CGContextRelease(canvas);
CGColorSpaceRelease(colorSpace);

UIImage *animatedImage = [SDImageCoderHelper animatedImageWithFrames:frames];
animatedImage.sd_imageLoopCount = loopCount;
Expand Down Expand Up @@ -318,7 +312,7 @@ - (UIImage *)incrementalDecodedImageWithOptions:(SDImageCoderOptions *)options {
return image;
}

- (void)sd_blendWebpImageWithCanvas:(CGContextRef)canvas iterator:(WebPIterator)iter {
- (void)sd_blendWebpImageWithCanvas:(CGContextRef)canvas iterator:(WebPIterator)iter colorSpace:(nonnull CGColorSpaceRef)colorSpaceRef {
size_t canvasHeight = CGBitmapContextGetHeight(canvas);
CGFloat tmpX = iter.x_offset;
CGFloat tmpY = canvasHeight - iter.height - iter.y_offset;
Expand All @@ -327,7 +321,7 @@ - (void)sd_blendWebpImageWithCanvas:(CGContextRef)canvas iterator:(WebPIterator)
if (iter.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) {
CGContextClearRect(canvas, imageRect);
} else {
CGImageRef imageRef = [self sd_createWebpImageWithData:iter.fragment];
CGImageRef imageRef = [self sd_createWebpImageWithData:iter.fragment colorSpace:colorSpaceRef];
if (!imageRef) {
return;
}
Expand All @@ -341,8 +335,8 @@ - (void)sd_blendWebpImageWithCanvas:(CGContextRef)canvas iterator:(WebPIterator)
}
}

- (nullable CGImageRef)sd_drawnWebpImageWithCanvas:(CGContextRef)canvas iterator:(WebPIterator)iter CF_RETURNS_RETAINED {
CGImageRef imageRef = [self sd_createWebpImageWithData:iter.fragment];
- (nullable CGImageRef)sd_drawnWebpImageWithCanvas:(CGContextRef)canvas iterator:(WebPIterator)iter colorSpace:(nonnull CGColorSpaceRef)colorSpaceRef CF_RETURNS_RETAINED {
CGImageRef imageRef = [self sd_createWebpImageWithData:iter.fragment colorSpace:colorSpaceRef];
if (!imageRef) {
return nil;
}
Expand All @@ -369,7 +363,7 @@ - (nullable CGImageRef)sd_drawnWebpImageWithCanvas:(CGContextRef)canvas iterator
return newImageRef;
}

- (nullable CGImageRef)sd_createWebpImageWithData:(WebPData)webpData CF_RETURNS_RETAINED {
- (nullable CGImageRef)sd_createWebpImageWithData:(WebPData)webpData colorSpace:(nonnull CGColorSpaceRef)colorSpaceRef CF_RETURNS_RETAINED {
WebPDecoderConfig config;
if (!WebPInitDecoderConfig(&config)) {
return nil;
Expand All @@ -382,11 +376,10 @@ - (nullable CGImageRef)sd_createWebpImageWithData:(WebPData)webpData CF_RETURNS_
BOOL hasAlpha = config.input.has_alpha;
// iOS prefer BGRA8888 (premultiplied) or BGRX8888 bitmapInfo for screen rendering, which is same as `UIGraphicsBeginImageContext()` or `- [CALayer drawInContext:]`
// use this bitmapInfo, combined with right colorspace, even without decode, can still avoid extra CA::Render::copy_image(which marked `Color Copied Images` from Instruments)
WEBP_CSP_MODE colorspace = MODE_bgrA;
CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host;
bitmapInfo |= hasAlpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNoneSkipFirst;
config.options.use_threads = 1;
config.output.colorspace = colorspace;
config.output.colorspace = MODE_bgrA;

// Decode the WebP image data into a RGBA value array
if (WebPDecode(webpData.bytes, webpData.size, &config) != VP8_STATUS_OK) {
Expand All @@ -406,7 +399,6 @@ - (nullable CGImageRef)sd_createWebpImageWithData:(WebPData)webpData CF_RETURNS_
size_t bitsPerComponent = 8;
size_t bitsPerPixel = 32;
size_t bytesPerRow = config.output.u.RGBA.stride;
CGColorSpaceRef colorSpaceRef = [SDImageCoderHelper colorSpaceGetDeviceRGB];
CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;
CGImageRef imageRef = CGImageCreate(width, height, bitsPerComponent, bitsPerPixel, bytesPerRow, colorSpaceRef, bitmapInfo, provider, NULL, NO, renderingIntent);

Expand All @@ -425,6 +417,28 @@ - (NSTimeInterval)sd_frameDurationWithIterator:(WebPIterator)iter {
return duration / 1000.0;
}

// Create and return the correct colorspace by checking the ICC Profile
- (nonnull CGColorSpaceRef)sd_colorSpaceWithDemuxer:(nonnull WebPDemuxer *)demuxer CF_RETURNS_RETAINED {
// WebP contains ICC Profile should use the desired colorspace, instead of default device colorspace
// See: https://developers.google.com/speed/webp/docs/riff_container#color_profile

WebPChunkIterator chunk_iter;
CGColorSpaceRef colorSpaceRef = NULL;

int result = WebPDemuxGetChunk(demuxer, "ICCP", 1, &chunk_iter);
if (result) {
NSData *profileData = [NSData dataWithBytes:chunk_iter.chunk.bytes length:chunk_iter.chunk.size];
colorSpaceRef = CGColorSpaceCreateWithICCProfile((__bridge CFDataRef)profileData);
}

if (!colorSpaceRef) {
colorSpaceRef = [SDImageCoderHelper colorSpaceGetDeviceRGB];
CGColorSpaceRetain(colorSpaceRef);
}

return colorSpaceRef;
}

#pragma mark - Encode
- (BOOL)canEncodeToFormat:(SDImageFormat)format {
return (format == SDImageFormatWebP);
Expand Down Expand Up @@ -770,6 +784,9 @@ - (UIImage *)safeAnimatedImageFrameAtIndex:(NSUInteger)index {
}
_canvas = canvas;
}
if (!_colorSpace) {
_colorSpace = [self sd_colorSpaceWithDemuxer:_demux];
}

SDWebPCoderFrame *frame = _frames[index];
UIImage *image;
Expand All @@ -782,7 +799,7 @@ - (UIImage *)safeAnimatedImageFrameAtIndex:(NSUInteger)index {
WebPDemuxReleaseIterator(&iter);
return nil;
}
CGImageRef imageRef = [self sd_drawnWebpImageWithCanvas:_canvas iterator:iter];
CGImageRef imageRef = [self sd_drawnWebpImageWithCanvas:_canvas iterator:iter colorSpace:_colorSpace];
if (!imageRef) {
return nil;
}
Expand Down Expand Up @@ -810,9 +827,9 @@ - (UIImage *)safeAnimatedImageFrameAtIndex:(NSUInteger)index {
do {
@autoreleasepool {
if ((size_t)iter.frame_num == endIndex) {
[self sd_blendWebpImageWithCanvas:_canvas iterator:iter];
[self sd_blendWebpImageWithCanvas:_canvas iterator:iter colorSpace:_colorSpace];
} else {
CGImageRef imageRef = [self sd_drawnWebpImageWithCanvas:_canvas iterator:iter];
CGImageRef imageRef = [self sd_drawnWebpImageWithCanvas:_canvas iterator:iter colorSpace:_colorSpace];
if (!imageRef) {
return nil;
}
Expand Down