...
|
...
|
@@ -14,6 +14,8 @@ |
|
|
#import "webp/demux.h"
|
|
|
#import "NSImage+WebCache.h"
|
|
|
|
|
|
#import "objc/runtime.h"
|
|
|
|
|
|
// Callback for CGDataProviderRelease
|
|
|
static void FreeImageData(void *info, const void *data, size_t size) {
|
|
|
free((void *)data);
|
...
|
...
|
@@ -21,6 +23,12 @@ static void FreeImageData(void *info, const void *data, size_t size) { |
|
|
|
|
|
@implementation UIImage (WebP)
|
|
|
|
|
|
- (NSInteger)sd_webpLoopCount
|
|
|
{
|
|
|
NSNumber *value = objc_getAssociatedObject(self, @selector(sd_webpLoopCount));
|
|
|
return value.integerValue;
|
|
|
}
|
|
|
|
|
|
+ (nullable UIImage *)sd_imageWithWebPData:(nullable NSData *)data {
|
|
|
if (!data) {
|
|
|
return nil;
|
...
|
...
|
@@ -38,7 +46,7 @@ static void FreeImageData(void *info, const void *data, size_t size) { |
|
|
uint32_t flags = WebPDemuxGetI(demuxer, WEBP_FF_FORMAT_FLAGS);
|
|
|
if (!(flags & ANIMATION_FLAG)) {
|
|
|
// for static single webp image
|
|
|
UIImage *staticImage = [self sd_rawWepImageWithData:webpData];
|
|
|
UIImage *staticImage = [self sd_rawWebpImageWithData:webpData];
|
|
|
WebPDemuxDelete(demuxer);
|
|
|
return staticImage;
|
|
|
}
|
...
|
...
|
@@ -50,15 +58,35 @@ static void FreeImageData(void *info, const void *data, size_t size) { |
|
|
return nil;
|
|
|
}
|
|
|
|
|
|
NSMutableArray *images = [NSMutableArray array];
|
|
|
NSTimeInterval duration = 0;
|
|
|
#if SD_UIKIT || SD_WATCH
|
|
|
int loopCount = WebPDemuxGetI(demuxer, WEBP_FF_LOOP_COUNT);
|
|
|
#endif
|
|
|
int frameCount = WebPDemuxGetI(demuxer, WEBP_FF_FRAME_COUNT);
|
|
|
int canvasWidth = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_WIDTH);
|
|
|
int canvasHeight = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT);
|
|
|
CGBitmapInfo bitmapInfo;
|
|
|
if (!(flags & ALPHA_FLAG)) {
|
|
|
bitmapInfo = kCGBitmapByteOrder32Big | kCGImageAlphaNoneSkipLast;
|
|
|
} else {
|
|
|
bitmapInfo = kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast;
|
|
|
}
|
|
|
CGContextRef canvas = CGBitmapContextCreate(NULL, canvasWidth, canvasHeight, 8, 0, SDCGColorSpaceGetDeviceRGB(), bitmapInfo);
|
|
|
if (!canvas) {
|
|
|
WebPDemuxReleaseIterator(&iter);
|
|
|
WebPDemuxDelete(demuxer);
|
|
|
return nil;
|
|
|
}
|
|
|
|
|
|
NSMutableArray<UIImage *> *images = [NSMutableArray array];
|
|
|
NSTimeInterval totalDuration = 0;
|
|
|
int durations[frameCount];
|
|
|
|
|
|
do {
|
|
|
UIImage *image;
|
|
|
if (iter.blend_method == WEBP_MUX_BLEND) {
|
|
|
image = [self sd_blendWebpImageWithOriginImage:[images lastObject] iterator:iter];
|
|
|
image = [self sd_blendWebpImageWithCanvas:canvas iterator:iter];
|
|
|
} else {
|
|
|
image = [self sd_rawWepImageWithData:iter.fragment];
|
|
|
image = [self sd_nonblendWebpImageWithCanvas:canvas iterator:iter];
|
|
|
}
|
|
|
|
|
|
if (!image) {
|
...
|
...
|
@@ -66,46 +94,88 @@ static void FreeImageData(void *info, const void *data, size_t size) { |
|
|
}
|
|
|
|
|
|
[images addObject:image];
|
|
|
duration += iter.duration / 1000.0f;
|
|
|
|
|
|
#if SD_MAC
|
|
|
break;
|
|
|
#endif
|
|
|
|
|
|
int duration = iter.duration;
|
|
|
if (duration <= 10) {
|
|
|
// WebP standard says 0 duration is used for canvas updating but not showing image, but actually Chrome and other implementations set it to 100ms if duration is lower or equal than 10ms
|
|
|
// Some animated WebP images also created without duration, we should keep compatibility
|
|
|
duration = 100;
|
|
|
}
|
|
|
totalDuration += duration;
|
|
|
size_t count = images.count;
|
|
|
durations[count - 1] = duration;
|
|
|
|
|
|
} while (WebPDemuxNextFrame(&iter));
|
|
|
|
|
|
WebPDemuxReleaseIterator(&iter);
|
|
|
WebPDemuxDelete(demuxer);
|
|
|
CGContextRelease(canvas);
|
|
|
|
|
|
UIImage *finalImage = nil;
|
|
|
#if SD_UIKIT || SD_WATCH
|
|
|
finalImage = [UIImage animatedImageWithImages:images duration:duration];
|
|
|
#elif SD_MAC
|
|
|
if ([images count] > 0) {
|
|
|
finalImage = images[0];
|
|
|
NSArray<UIImage *> *animatedImages = [self sd_animatedImagesWithImages:images durations:durations totalDuration:totalDuration];
|
|
|
finalImage = [UIImage animatedImageWithImages:animatedImages duration:totalDuration / 1000.0];
|
|
|
if (finalImage) {
|
|
|
objc_setAssociatedObject(finalImage, @selector(sd_webpLoopCount), @(loopCount), OBJC_ASSOCIATION_RETAIN_NONATOMIC);
|
|
|
}
|
|
|
#elif SD_MAC
|
|
|
finalImage = images.firstObject;
|
|
|
#endif
|
|
|
return finalImage;
|
|
|
}
|
|
|
|
|
|
|
|
|
+ (nullable UIImage *)sd_blendWebpImageWithOriginImage:(nullable UIImage *)originImage iterator:(WebPIterator)iter {
|
|
|
if (!originImage) {
|
|
|
+ (nullable UIImage *)sd_blendWebpImageWithCanvas:(CGContextRef)canvas iterator:(WebPIterator)iter {
|
|
|
UIImage *image = [self sd_rawWebpImageWithData:iter.fragment];
|
|
|
if (!image) {
|
|
|
return nil;
|
|
|
}
|
|
|
|
|
|
CGSize size = originImage.size;
|
|
|
size_t canvasWidth = CGBitmapContextGetWidth(canvas);
|
|
|
size_t canvasHeight = CGBitmapContextGetHeight(canvas);
|
|
|
CGSize size = CGSizeMake(canvasWidth, canvasHeight);
|
|
|
CGFloat tmpX = iter.x_offset;
|
|
|
CGFloat tmpY = size.height - iter.height - iter.y_offset;
|
|
|
CGRect imageRect = CGRectMake(tmpX, tmpY, iter.width, iter.height);
|
|
|
|
|
|
UIImage *image = [self sd_rawWepImageWithData:iter.fragment];
|
|
|
CGContextDrawImage(canvas, imageRect, image.CGImage);
|
|
|
CGImageRef newImageRef = CGBitmapContextCreateImage(canvas);
|
|
|
|
|
|
#if SD_UIKIT || SD_WATCH
|
|
|
image = [UIImage imageWithCGImage:newImageRef];
|
|
|
#elif SD_MAC
|
|
|
image = [[UIImage alloc] initWithCGImage:newImageRef size:NSZeroSize];
|
|
|
#endif
|
|
|
|
|
|
CGImageRelease(newImageRef);
|
|
|
|
|
|
if (iter.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) {
|
|
|
CGContextClearRect(canvas, imageRect);
|
|
|
}
|
|
|
|
|
|
return image;
|
|
|
}
|
|
|
|
|
|
+ (nullable UIImage *)sd_nonblendWebpImageWithCanvas:(CGContextRef)canvas iterator:(WebPIterator)iter {
|
|
|
UIImage *image = [self sd_rawWebpImageWithData:iter.fragment];
|
|
|
if (!image) {
|
|
|
return nil;
|
|
|
}
|
|
|
|
|
|
CGColorSpaceRef colorSpaceRef = CGColorSpaceCreateDeviceRGB();
|
|
|
uint32_t bitmapInfo = iter.has_alpha ? kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast : 0;
|
|
|
CGContextRef blendCanvas = CGBitmapContextCreate(NULL, size.width, size.height, 8, 0, colorSpaceRef, bitmapInfo);
|
|
|
CGContextDrawImage(blendCanvas, CGRectMake(0, 0, size.width, size.height), originImage.CGImage);
|
|
|
CGContextDrawImage(blendCanvas, imageRect, image.CGImage);
|
|
|
CGImageRef newImageRef = CGBitmapContextCreateImage(blendCanvas);
|
|
|
size_t canvasWidth = CGBitmapContextGetWidth(canvas);
|
|
|
size_t canvasHeight = CGBitmapContextGetHeight(canvas);
|
|
|
CGSize size = CGSizeMake(canvasWidth, canvasHeight);
|
|
|
CGFloat tmpX = iter.x_offset;
|
|
|
CGFloat tmpY = size.height - iter.height - iter.y_offset;
|
|
|
CGRect imageRect = CGRectMake(tmpX, tmpY, iter.width, iter.height);
|
|
|
|
|
|
CGContextClearRect(canvas, imageRect);
|
|
|
CGContextDrawImage(canvas, imageRect, image.CGImage);
|
|
|
CGImageRef newImageRef = CGBitmapContextCreateImage(canvas);
|
|
|
|
|
|
#if SD_UIKIT || SD_WATCH
|
|
|
image = [UIImage imageWithCGImage:newImageRef];
|
...
|
...
|
@@ -114,13 +184,15 @@ static void FreeImageData(void *info, const void *data, size_t size) { |
|
|
#endif
|
|
|
|
|
|
CGImageRelease(newImageRef);
|
|
|
CGContextRelease(blendCanvas);
|
|
|
CGColorSpaceRelease(colorSpaceRef);
|
|
|
|
|
|
if (iter.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) {
|
|
|
CGContextClearRect(canvas, imageRect);
|
|
|
}
|
|
|
|
|
|
return image;
|
|
|
}
|
|
|
|
|
|
+ (nullable UIImage *)sd_rawWepImageWithData:(WebPData)webpData {
|
|
|
+ (nullable UIImage *)sd_rawWebpImageWithData:(WebPData)webpData {
|
|
|
WebPDecoderConfig config;
|
|
|
if (!WebPInitDecoderConfig(&config)) {
|
|
|
return nil;
|
...
|
...
|
@@ -133,7 +205,7 @@ static void FreeImageData(void *info, const void *data, size_t size) { |
|
|
config.output.colorspace = config.input.has_alpha ? MODE_rgbA : MODE_RGB;
|
|
|
config.options.use_threads = 1;
|
|
|
|
|
|
// Decode the WebP image data into a RGBA value array.
|
|
|
// Decode the WebP image data into a RGBA value array
|
|
|
if (WebPDecode(webpData.bytes, webpData.size, &config) != VP8_STATUS_OK) {
|
|
|
return nil;
|
|
|
}
|
...
|
...
|
@@ -145,16 +217,15 @@ static void FreeImageData(void *info, const void *data, size_t size) { |
|
|
height = config.options.scaled_height;
|
|
|
}
|
|
|
|
|
|
// Construct a UIImage from the decoded RGBA value array.
|
|
|
// Construct a UIImage from the decoded RGBA value array
|
|
|
CGDataProviderRef provider =
|
|
|
CGDataProviderCreateWithData(NULL, config.output.u.RGBA.rgba, config.output.u.RGBA.size, FreeImageData);
|
|
|
CGColorSpaceRef colorSpaceRef = CGColorSpaceCreateDeviceRGB();
|
|
|
CGBitmapInfo bitmapInfo = config.input.has_alpha ? kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast : 0;
|
|
|
CGColorSpaceRef colorSpaceRef = SDCGColorSpaceGetDeviceRGB();
|
|
|
CGBitmapInfo bitmapInfo = config.input.has_alpha ? kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast : kCGBitmapByteOrder32Big | kCGImageAlphaNoneSkipLast;
|
|
|
size_t components = config.input.has_alpha ? 4 : 3;
|
|
|
CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;
|
|
|
CGImageRef imageRef = CGImageCreate(width, height, 8, components * 8, components * width, colorSpaceRef, bitmapInfo, provider, NULL, NO, renderingIntent);
|
|
|
|
|
|
CGColorSpaceRelease(colorSpaceRef);
|
|
|
CGDataProviderRelease(provider);
|
|
|
|
|
|
#if SD_UIKIT || SD_WATCH
|
...
|
...
|
@@ -167,6 +238,63 @@ static void FreeImageData(void *info, const void *data, size_t size) { |
|
|
return image;
|
|
|
}
|
|
|
|
|
|
+ (NSArray<UIImage *> *)sd_animatedImagesWithImages:(NSArray<UIImage *> *)images durations:(int const * const)durations totalDuration:(NSTimeInterval)totalDuration
|
|
|
{
|
|
|
// [UIImage animatedImageWithImages:duration:] only use the average duration for per frame
|
|
|
// divide the total duration to implement per frame duration for animated WebP
|
|
|
NSUInteger count = images.count;
|
|
|
if (!count) {
|
|
|
return nil;
|
|
|
}
|
|
|
if (count == 1) {
|
|
|
return images;
|
|
|
}
|
|
|
|
|
|
int const gcd = gcdArray(count, durations);
|
|
|
NSMutableArray<UIImage *> *animatedImages = [NSMutableArray arrayWithCapacity:count];
|
|
|
[images enumerateObjectsUsingBlock:^(UIImage * _Nonnull image, NSUInteger idx, BOOL * _Nonnull stop) {
|
|
|
int duration = durations[idx];
|
|
|
int repeatCount;
|
|
|
if (gcd) {
|
|
|
repeatCount = duration / gcd;
|
|
|
} else {
|
|
|
repeatCount = 1;
|
|
|
}
|
|
|
for (int i = 0; i < repeatCount; ++i) {
|
|
|
[animatedImages addObject:image];
|
|
|
}
|
|
|
}];
|
|
|
|
|
|
return animatedImages;
|
|
|
}
|
|
|
|
|
|
static CGColorSpaceRef SDCGColorSpaceGetDeviceRGB() {
|
|
|
static CGColorSpaceRef space;
|
|
|
static dispatch_once_t onceToken;
|
|
|
dispatch_once(&onceToken, ^{
|
|
|
space = CGColorSpaceCreateDeviceRGB();
|
|
|
});
|
|
|
return space;
|
|
|
}
|
|
|
|
|
|
static int gcdArray(size_t const count, int const * const values) {
|
|
|
int result = values[0];
|
|
|
for (size_t i = 1; i < count; ++i) {
|
|
|
result = gcd(values[i], result);
|
|
|
}
|
|
|
return result;
|
|
|
}
|
|
|
|
|
|
static int gcd(int a,int b) {
|
|
|
int c;
|
|
|
while (a != 0) {
|
|
|
c = a;
|
|
|
a = b % a;
|
|
|
b = c;
|
|
|
}
|
|
|
return b;
|
|
|
}
|
|
|
|
|
|
@end
|
|
|
|
|
|
#endif |
...
|
...
|
|