From 6e7eaa9189b52bfa2a2f3d1a51079011f17509b2 Mon Sep 17 00:00:00 2001 From: psi Date: Sun, 15 Mar 2020 13:28:37 +0900 Subject: [PATCH 1/9] Separate sources --- SDWebImageAVIFCoder/Classes/ColorSpace.h | 19 + SDWebImageAVIFCoder/Classes/ColorSpace.m | 526 ++++++ SDWebImageAVIFCoder/Classes/Conversion.h | 9 + SDWebImageAVIFCoder/Classes/Conversion.m | 1063 +++++++++++ .../Classes/SDImageAVIFCoder.m | 1567 +---------------- 5 files changed, 1620 insertions(+), 1564 deletions(-) create mode 100644 SDWebImageAVIFCoder/Classes/ColorSpace.h create mode 100644 SDWebImageAVIFCoder/Classes/ColorSpace.m create mode 100644 SDWebImageAVIFCoder/Classes/Conversion.h create mode 100644 SDWebImageAVIFCoder/Classes/Conversion.m diff --git a/SDWebImageAVIFCoder/Classes/ColorSpace.h b/SDWebImageAVIFCoder/Classes/ColorSpace.h new file mode 100644 index 0000000..61295a1 --- /dev/null +++ b/SDWebImageAVIFCoder/Classes/ColorSpace.h @@ -0,0 +1,19 @@ +// +// ColorSpace.h +// SDWebImageAVIFCoder +// +// Created by psi on 2020/03/15. +// + +#pragma once +#if __has_include() +#import +#else +#import "avif/avif.h" +#endif + +CGColorSpaceRef CreateColorSpaceMono(avifNclxColourPrimaries const colorPrimaries, avifNclxTransferCharacteristics const transferCharacteristics); +CGColorSpaceRef CreateColorSpaceRGB(avifNclxColourPrimaries const colorPrimaries, avifNclxTransferCharacteristics const transferCharacteristics); + +void CalcColorSpaceMono(avifImage * avif, CGColorSpaceRef* ref, BOOL* shouldRelease); +void CalcColorSpaceRGB(avifImage * avif, CGColorSpaceRef* ref, BOOL* shouldRelease); diff --git a/SDWebImageAVIFCoder/Classes/ColorSpace.m b/SDWebImageAVIFCoder/Classes/ColorSpace.m new file mode 100644 index 0000000..ccf2be4 --- /dev/null +++ b/SDWebImageAVIFCoder/Classes/ColorSpace.m @@ -0,0 +1,526 @@ +// +// ColorSpace.m +// SDWebImageAVIFCoder +// +// Created by psi on 2020/03/15. +// + +#import "SDImageAVIFCoder.h" +#import +#if __has_include() +#import +#else +#import "avif/avif.h" +#endif + +static void CalcWhitePoint(uint16_t const colorPrimaries, vImageWhitePoint* const white) { + float primaries[8]; + avifNclxColourPrimariesGetValues(colorPrimaries, primaries); + white->white_x = primaries[6]; + white->white_y = primaries[7]; +} + +static void CalcRGBPrimaries(uint16_t const colorPrimaries, vImageRGBPrimaries* const prim) { + float primaries[8]; + avifNclxColourPrimariesGetValues(colorPrimaries, primaries); + prim->red_x = primaries[0]; + prim->red_y = primaries[1]; + prim->green_x = primaries[2]; + prim->green_y = primaries[3]; + prim->blue_x = primaries[4]; + prim->blue_y = primaries[5]; + prim->white_x = primaries[6]; + prim->white_y = primaries[7]; +} + +static void CalcTransferFunction(uint16_t const transferCharacteristics, vImageTransferFunction* const tf) { + // See: https://www.itu.int/rec/T-REC-H.273/en + static const float alpha = 1.099296826809442f; + static const float beta = 0.018053968510807f; + /* + // R' = c0 * pow( c1 * R + c2, gamma ) + c3, (R >= cutoff) + // R' = c4 * R + c5 (R < cutoff) + */ + + switch(transferCharacteristics) { + case AVIF_NCLX_TRANSFER_CHARACTERISTICS_GAMMA28: // 5 + tf->cutoff = -INFINITY; + tf->c0 = 1.0f; + tf->c1 = 1.0f; + tf->c2 = 0.0f; + tf->c3 = 0.0f; + tf->c4 = 0.0f; + tf->c5 = 0.0f; + tf->gamma = 1.0f/2.8f; + break; + case AVIF_NCLX_TRANSFER_CHARACTERISTICS_BT709: // 1 + case AVIF_NCLX_TRANSFER_CHARACTERISTICS_BT601: // 6 + case AVIF_NCLX_TRANSFER_CHARACTERISTICS_BT2020_10BIT: // 14 + case AVIF_NCLX_TRANSFER_CHARACTERISTICS_BT2020_12BIT: // 15 + tf->cutoff = beta; + // + tf->c0 = alpha; + tf->c1 = 1.0f; + tf->c2 = 0.0f; + tf->gamma = 0.45f; + tf->c3 = -(alpha - 1); + // + tf->c4 = 4.5f; + tf->c5 = 0.0f; + break; + case AVIF_NCLX_TRANSFER_CHARACTERISTICS_ST240: // 7 + tf->cutoff = beta; + // + tf->c0 = alpha; + tf->c1 = 1.0f; + tf->c2 = 0.0f; + tf->gamma = 0.45f; + tf->c3 = -(alpha - 1); + // + tf->c4 = 4.0f; + tf->c5 = 0.0f; + break; + case AVIF_NCLX_TRANSFER_CHARACTERISTICS_LINEAR: // 8 + tf->cutoff = INFINITY; + // + tf->c0 = 1.0f; + tf->c1 = 1.0f; + tf->c2 = 0.0f; + tf->gamma = 1.0f; + tf->c3 = 0.0f; + // + tf->c4 = 4.0f; + tf->c5 = 0.0f; + break; + case AVIF_NCLX_TRANSFER_CHARACTERISTICS_IEC61966: // 11 + tf->cutoff = beta; + // + tf->c0 = alpha; + tf->c1 = 1.0f; + tf->c2 = 0.0f; + tf->gamma = 0.45f; + tf->c3 = -(alpha - 1); + // + tf->c4 = 4.5f; + tf->c5 = 0.0f; + break; + case AVIF_NCLX_TRANSFER_CHARACTERISTICS_BT1361_EXTENDED: // 12 + tf->cutoff = beta; + // + tf->c0 = alpha; + tf->c1 = 1.0f; + tf->c2 = 0.0f; + tf->gamma = 0.45f; + tf->c3 = -(alpha - 1); + // + tf->c4 = 4.5f; + tf->c5 = 0.0f; + break; + case AVIF_NCLX_TRANSFER_CHARACTERISTICS_SRGB: // 13 + tf->cutoff = beta; + // + tf->c0 = alpha; + tf->c1 = 1.0f; + tf->c2 = 0.0f; + tf->gamma = 1.0f/2.4f; + tf->c3 = -(alpha - 1); + // + tf->c4 = 12.92f; + tf->c5 = 0.0f; + break; + case AVIF_NCLX_TRANSFER_CHARACTERISTICS_ST428: // 17 + tf->cutoff = -INFINITY; + // + tf->c0 = 1.0f; + tf->c1 = 48.0f / 52.37f; + tf->c2 = 0.0f; + tf->gamma = 1.0f/2.6f; + tf->c3 = 0.0f; + // + tf->c4 = 1.0f; + tf->c5 = 0.0f; + break; + // Can't be represented by vImageTransferFunction. Use gamma 2.2 as a fallback. + case AVIF_NCLX_TRANSFER_CHARACTERISTICS_ST2084: // 16 + case AVIF_NCLX_TRANSFER_CHARACTERISTICS_BT2100_HLG: // 18 + case AVIF_NCLX_TRANSFER_CHARACTERISTICS_LOG_100_1: // 9 + case AVIF_NCLX_TRANSFER_CHARACTERISTICS_LOG_100_SQRT: // 10 + // + case AVIF_NCLX_TRANSFER_CHARACTERISTICS_UNKNOWN: // 0 + case AVIF_NCLX_TRANSFER_CHARACTERISTICS_UNSPECIFIED: // 2 + case AVIF_NCLX_TRANSFER_CHARACTERISTICS_GAMMA22: // 4 + default: + tf->cutoff = -INFINITY; + tf->c0 = 1.0f; + tf->c1 = 1.0f; + tf->c2 = 0.0f; + tf->c3 = 0.0f; + tf->c4 = 0.0f; + tf->c5 = 0.0f; + tf->gamma = 1.0f/2.2f; + break; + } +} +CGColorSpaceRef CreateColorSpaceMono(avifNclxColourPrimaries const colorPrimaries, avifNclxTransferCharacteristics const transferCharacteristics) { + if (@available(macOS 10.10, iOS 8.0, tvOS 8.0, *)) { + vImage_Error err; + vImageWhitePoint white; + vImageTransferFunction transfer; + CalcWhitePoint(colorPrimaries, &white); + CalcTransferFunction(transferCharacteristics, &transfer); + CGColorSpaceRef colorSpace = vImageCreateMonochromeColorSpaceWithWhitePointAndTransferFunction(&white, &transfer, kCGRenderingIntentDefault, kvImagePrintDiagnosticsToConsole, &err); + if(err != kvImageNoError) { + NSLog(@"[BUG] Failed to create monochrome color space: %ld", err); + if(colorSpace != NULL) { + CGColorSpaceRelease(colorSpace); + } + return NULL; + } + return colorSpace; + }else{ + return NULL; + } +} + +CGColorSpaceRef CreateColorSpaceRGB(avifNclxColourPrimaries const colorPrimaries, avifNclxTransferCharacteristics const transferCharacteristics) { + if (@available(macOS 10.10, iOS 8.0, tvOS 8.0, *)) { + vImage_Error err; + vImageRGBPrimaries primaries; + vImageTransferFunction transfer; + CalcRGBPrimaries(colorPrimaries, &primaries); + CalcTransferFunction(transferCharacteristics, &transfer); + CGColorSpaceRef colorSpace = vImageCreateRGBColorSpaceWithPrimariesAndTransferFunction(&primaries, &transfer, kCGRenderingIntentDefault, kvImagePrintDiagnosticsToConsole, &err); + if(err != kvImageNoError) { + NSLog(@"[BUG] Failed to create monochrome color space: %ld", err); + if(colorSpace != NULL) { + CGColorSpaceRelease(colorSpace); + } + return NULL; + } + return colorSpace; + }else{ + return NULL; + } +} + +void CalcColorSpaceMono(avifImage * avif, CGColorSpaceRef* ref, BOOL* shouldRelease) { + static CGColorSpaceRef defaultColorSpace; + { + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + defaultColorSpace = CGColorSpaceCreateDeviceGray(); + }); + } + if(avif->profileFormat == AVIF_PROFILE_FORMAT_NONE) { + *ref = defaultColorSpace; + *shouldRelease = FALSE; + return; + } + if(avif->profileFormat == AVIF_PROFILE_FORMAT_ICC) { + if(avif->icc.data && avif->icc.size) { + if(@available(macOS 10.12, iOS 10.0, tvOS 10.0, *)) { + *ref = CGColorSpaceCreateWithICCData(avif->icc.data); + *shouldRelease = TRUE; + }else{ + NSData* iccData = [NSData dataWithBytes:avif->icc.data length:avif->icc.size]; + *ref = CGColorSpaceCreateWithICCProfile((__bridge CFDataRef)iccData); + *shouldRelease = TRUE; + } + return; + } + *ref = defaultColorSpace; + *shouldRelease = FALSE; + return; + } + avifNclxColourPrimaries const colorPrimaries = avif->nclx.colourPrimaries; + avifNclxTransferCharacteristics const transferCharacteristics = avif->nclx.transferCharacteristics; + if((colorPrimaries == AVIF_NCLX_COLOUR_PRIMARIES_UNKNOWN || + colorPrimaries == AVIF_NCLX_COLOUR_PRIMARIES_UNSPECIFIED) && + (transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_UNKNOWN || + transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_UNSPECIFIED)) { + *ref = defaultColorSpace; + *shouldRelease = FALSE; + return; + } + if(colorPrimaries == AVIF_NCLX_COLOUR_PRIMARIES_SRGB && + transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_SRGB) { + static CGColorSpaceRef sRGB = NULL; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + sRGB = CreateColorSpaceMono(colorPrimaries, transferCharacteristics); + if(sRGB == NULL) { + sRGB = defaultColorSpace; + } + }); + *ref = sRGB; + *shouldRelease = FALSE; + return; + } + if(colorPrimaries == AVIF_NCLX_COLOUR_PRIMARIES_BT709 && + transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_BT709) { + static CGColorSpaceRef bt709 = NULL; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + bt709 = CreateColorSpaceMono(colorPrimaries, transferCharacteristics); + if(bt709 == NULL) { + bt709 = defaultColorSpace; + } + }); + *ref = bt709; + *shouldRelease = FALSE; + return; + } + if(colorPrimaries == AVIF_NCLX_COLOUR_PRIMARIES_BT2020 && + (transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_BT2020_10BIT || + transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_BT2020_12BIT)) { + static CGColorSpaceRef bt2020 = NULL; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + bt2020 = CreateColorSpaceMono(colorPrimaries, transferCharacteristics); + if(bt2020 == NULL) { + bt2020 = defaultColorSpace; + } + }); + *ref = bt2020; + *shouldRelease = FALSE; + return; + } + if(colorPrimaries == AVIF_NCLX_COLOUR_PRIMARIES_P3 && + transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_SRGB) { + static CGColorSpaceRef p3 = NULL; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + p3 = CreateColorSpaceMono(colorPrimaries, transferCharacteristics); + if(p3 == NULL) { + p3 = defaultColorSpace; + } + }); + *ref = p3; + *shouldRelease = FALSE; + return; + } + + *ref = CreateColorSpaceMono(colorPrimaries, transferCharacteristics); + if(*ref != NULL) { + *shouldRelease = TRUE; + } else { + *ref = defaultColorSpace; + *shouldRelease = FALSE; + } +} + +void CalcColorSpaceRGB(avifImage * avif, CGColorSpaceRef* ref, BOOL* shouldRelease) { + static CGColorSpaceRef defaultColorSpace = NULL; + { + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + defaultColorSpace = CGColorSpaceCreateDeviceRGB(); + }); + } + if(avif->profileFormat == AVIF_PROFILE_FORMAT_NONE) { + *ref = defaultColorSpace; + *shouldRelease = FALSE; + return; + } + if(avif->profileFormat == AVIF_PROFILE_FORMAT_ICC) { + if(avif->icc.data && avif->icc.size) { + if(@available(macOS 10.12, iOS 10.0, tvOS 10.0, *)) { + *ref = CGColorSpaceCreateWithICCData(avif->icc.data); + *shouldRelease = TRUE; + }else{ + NSData* iccData = [NSData dataWithBytes:avif->icc.data length:avif->icc.size]; + *ref = CGColorSpaceCreateWithICCProfile((__bridge CFDataRef)iccData); + *shouldRelease = TRUE; + } + return; + } + *ref = defaultColorSpace; + *shouldRelease = FALSE; + return; + } + avifNclxColourPrimaries const colorPrimaries = avif->nclx.colourPrimaries; + avifNclxTransferCharacteristics const transferCharacteristics = avif->nclx.transferCharacteristics; + if((colorPrimaries == AVIF_NCLX_COLOUR_PRIMARIES_UNKNOWN || + colorPrimaries == AVIF_NCLX_COLOUR_PRIMARIES_UNSPECIFIED) && + (transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_UNKNOWN || + transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_UNSPECIFIED)) { + *ref = defaultColorSpace; + *shouldRelease = FALSE; + return; + } + if(colorPrimaries == AVIF_NCLX_COLOUR_PRIMARIES_BT709 && + transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_BT709) { + static CGColorSpaceRef bt709 = NULL; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + if (@available(macOS 10.11, iOS 9.0, tvOS 9.0, *)) { + bt709 = CGColorSpaceCreateWithName(kCGColorSpaceITUR_709); + } else { + bt709 = defaultColorSpace; + } + }); + *ref = bt709; + *shouldRelease = FALSE; + return; + } + if(colorPrimaries == AVIF_NCLX_COLOUR_PRIMARIES_SRGB && + transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_SRGB) { + static CGColorSpaceRef sRGB = NULL; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + if (@available(macOS 10.5, iOS 9.0, tvOS 9.0, *)) { + sRGB = CGColorSpaceCreateWithName(kCGColorSpaceSRGB); + } else { + sRGB = defaultColorSpace; + } + }); + *ref = sRGB; + *shouldRelease = FALSE; + return; + } + if(colorPrimaries == AVIF_NCLX_COLOUR_PRIMARIES_SRGB && + transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_LINEAR) { + static CGColorSpaceRef sRGBlinear = NULL; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + if (@available(macOS 10.12, iOS 10.0, tvOS 10.0, *)) { + sRGBlinear = CGColorSpaceCreateWithName(kCGColorSpaceLinearSRGB); + } else { + sRGBlinear = defaultColorSpace; + } + }); + *ref = sRGBlinear; + *shouldRelease = FALSE; + return; + } + if(colorPrimaries == AVIF_NCLX_COLOUR_PRIMARIES_BT2020 && + (transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_BT2020_10BIT || + transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_BT2020_12BIT)) { + static CGColorSpaceRef bt2020 = NULL; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + if (@available(macOS 10.11, iOS 9.0, tvOS 9.0, *)) { + bt2020 = CGColorSpaceCreateWithName(kCGColorSpaceITUR_2020); + } else { + bt2020 = defaultColorSpace; + } + }); + *ref = bt2020; + *shouldRelease = FALSE; + return; + } + if(colorPrimaries == AVIF_NCLX_COLOUR_PRIMARIES_BT2020 && + transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_LINEAR) { + static CGColorSpaceRef bt2020linear = NULL; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + if (@available(macOS 10.14.3, iOS 12.3, tvOS 12.3, *)) { + bt2020linear = CGColorSpaceCreateWithName(kCGColorSpaceExtendedLinearITUR_2020); + } else { + bt2020linear = defaultColorSpace; + } + }); + *ref = bt2020linear; + *shouldRelease = FALSE; + return; + } + if(colorPrimaries == AVIF_NCLX_COLOUR_PRIMARIES_BT2100 && + transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_BT2100_HLG) { + static CGColorSpaceRef bt2020hlg = NULL; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + if (@available(macOS 10.14.6, iOS 13.0, tvOS 13.0, *)) { + bt2020hlg = CGColorSpaceCreateWithName(kCGColorSpaceITUR_2020_HLG); + } else { + bt2020hlg = defaultColorSpace; + } + }); + *ref = bt2020hlg; + *shouldRelease = FALSE; + return; + } + if(colorPrimaries == AVIF_NCLX_COLOUR_PRIMARIES_BT2100 && + transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_BT2100_PQ) { + static CGColorSpaceRef bt2020pq = NULL; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + if (@available(macOS 10.14.6, iOS 13.0, tvOS 13.0, *)) { + bt2020pq = CGColorSpaceCreateWithName(kCGColorSpaceITUR_2020_PQ_EOTF); + } else { + bt2020pq = defaultColorSpace; + } + }); + *ref = bt2020pq; + *shouldRelease = FALSE; + return; + } + if(colorPrimaries == AVIF_NCLX_COLOUR_PRIMARIES_P3 && + transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_SRGB) { + static CGColorSpaceRef p3 = NULL; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + if (@available(macOS 10.11.2, iOS 9.3, tvOS 9.3, *)) { + p3 = CGColorSpaceCreateWithName(kCGColorSpaceDisplayP3); + } else { + p3 = defaultColorSpace; + } + }); + *ref = p3; + *shouldRelease = FALSE; + return; + } + if(colorPrimaries == AVIF_NCLX_COLOUR_PRIMARIES_P3 && + transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_BT2100_HLG) { + static CGColorSpaceRef p3hlg = NULL; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + if (@available(macOS 10.14.6, iOS 13.0, tvOS 13.0, *)) { + p3hlg = CGColorSpaceCreateWithName(kCGColorSpaceDisplayP3_HLG); + } else { + p3hlg = defaultColorSpace; + } + }); + + *ref = p3hlg; + *shouldRelease = FALSE; + return; + } + if(colorPrimaries == AVIF_NCLX_COLOUR_PRIMARIES_P3 && + transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_BT2100_PQ) { + static CGColorSpaceRef p3pq = NULL; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + if (@available(macOS 10.14.6, iOS 13.0, tvOS 13.0, *)) { + p3pq = CGColorSpaceCreateWithName(kCGColorSpaceDisplayP3_PQ_EOTF); + } else { + p3pq = defaultColorSpace; + } + }); + *ref = p3pq; + *shouldRelease = FALSE; + return; + } + if(colorPrimaries == AVIF_NCLX_COLOUR_PRIMARIES_P3 && + transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_LINEAR) { + static CGColorSpaceRef p3linear = NULL; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + if (@available(macOS 10.14.3, iOS 12.3, tvOS 12.3, *)) { + p3linear = CGColorSpaceCreateWithName(kCGColorSpaceExtendedLinearDisplayP3); + } else { + p3linear = defaultColorSpace; + } + }); + *ref = p3linear; + *shouldRelease = FALSE; + return; + } + + *ref = CreateColorSpaceRGB(colorPrimaries, transferCharacteristics); + if(*ref != NULL) { + *shouldRelease = TRUE; + } else { + *ref = defaultColorSpace; + *shouldRelease = FALSE; + } +} diff --git a/SDWebImageAVIFCoder/Classes/Conversion.h b/SDWebImageAVIFCoder/Classes/Conversion.h new file mode 100644 index 0000000..1e55d96 --- /dev/null +++ b/SDWebImageAVIFCoder/Classes/Conversion.h @@ -0,0 +1,9 @@ +// +// Conversion.h +// SDWebImageAVIFCoder +// +// Created by psi on 2020/03/15. +// + +#pragma once +CGImageRef CreateCGImageFromAVIF(avifImage * avif); diff --git a/SDWebImageAVIFCoder/Classes/Conversion.m b/SDWebImageAVIFCoder/Classes/Conversion.m new file mode 100644 index 0000000..ff431c8 --- /dev/null +++ b/SDWebImageAVIFCoder/Classes/Conversion.m @@ -0,0 +1,1063 @@ +// +// Conversion.m +// SDWebImageAVIFCoder +// +// Created by lizhuoli on 2018/5/8. +// + +#import "SDImageAVIFCoder.h" +#import +#if __has_include() +#import +#else +#import "avif/avif.h" +#endif +#import "ColorSpace.h" + +static void FreeImageData(void *info, const void *data, size_t size) { + free((void *)data); +} + +static CGImageRef CreateImageFromBuffer(avifImage * avif, vImage_Buffer* result) { + BOOL monochrome = avif->yuvPlanes[1] == NULL || avif->yuvPlanes[2] == NULL; + BOOL hasAlpha = avif->alphaPlane != NULL; + BOOL usesU16 = avifImageUsesU16(avif); + size_t components = (monochrome ? 1 : 3) + (hasAlpha ? 1 : 0); + + CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, result->data, result->rowBytes * result->height, FreeImageData); + CGBitmapInfo bitmapInfo = usesU16 ? kCGBitmapByteOrder16Host : kCGBitmapByteOrderDefault; + bitmapInfo |= hasAlpha ? kCGImageAlphaFirst : kCGImageAlphaNone; + + // Calc color space + CGColorSpaceRef colorSpace = NULL; + BOOL shouldReleaseColorSpace = FALSE; + if(monochrome){ + CalcColorSpaceMono(avif, &colorSpace, &shouldReleaseColorSpace); + }else{ + CalcColorSpaceRGB(avif, &colorSpace, &shouldReleaseColorSpace); + } + + CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault; + size_t bitsPerComponent = usesU16 ? 16 : 8; + size_t bitsPerPixel = components * bitsPerComponent; + size_t rowBytes = result->width * components * (usesU16 ? sizeof(uint16_t) : sizeof(uint8_t)); + + CGImageRef imageRef = CGImageCreate(result->width, result->height, bitsPerComponent, bitsPerPixel, rowBytes, colorSpace, bitmapInfo, provider, NULL, NO, renderingIntent); + + // clean up + if(shouldReleaseColorSpace) { + CGColorSpaceRelease(colorSpace); + } + CGDataProviderRelease(provider); + + return imageRef; +} + +static void SetupConversionInfo(avifImage * avif, + avifReformatState* state, + vImage_YpCbCrToARGBMatrix* matrix, + vImage_YpCbCrPixelRange* pixelRange) { + avifRGBImage emptyRGBImage = { + .width = avif->width, + .height = avif->height, + .depth = avif->depth, + .format = AVIF_RGB_FORMAT_ARGB, + + .pixels = NULL, + .rowBytes = 0, + }; + avifPrepareReformatState(avif, &emptyRGBImage, state); + + // Setup Matrix + matrix->Yp = 1.0f; + + matrix->Cb_B = 2.0f * (1.0f - state->kb); + matrix->Cb_G = -2.0f * (1.0f - state->kb) * state->kb / state->kg; + + matrix->Cr_R = 2.0f * (1.0f - state->kr); + matrix->Cr_G = -2.0f * (1.0f - state->kr) * state->kr / state->kg; + + // Setup Pixel Range + switch (avif->depth) { + case 8: + if (avif->yuvRange == AVIF_RANGE_LIMITED) { + pixelRange->Yp_bias = 16; + pixelRange->YpRangeMax = 235; + pixelRange->YpMax = 255; + pixelRange->YpMin = 0; + pixelRange->CbCr_bias = 128; + pixelRange->CbCrRangeMax = 240; + pixelRange->CbCrMax = 255; + pixelRange->CbCrMin = 0; + }else{ + pixelRange->Yp_bias = 0; + pixelRange->YpRangeMax = 255; + pixelRange->YpMax = 255; + pixelRange->YpMin = 0; + pixelRange->CbCr_bias = 128; + pixelRange->CbCrRangeMax = 255; + pixelRange->CbCrMax = 255; + pixelRange->CbCrMin = 0; + } + break; + case 10: + if (avif->yuvRange == AVIF_RANGE_LIMITED) { + pixelRange->Yp_bias = 64; + pixelRange->YpRangeMax = 940; + pixelRange->YpMax = 1023; + pixelRange->YpMin = 0; + pixelRange->CbCr_bias = 512; + pixelRange->CbCrRangeMax = 960; + pixelRange->CbCrMax = 1023; + pixelRange->CbCrMin = 0; + }else{ + pixelRange->Yp_bias = 0; + pixelRange->YpRangeMax = 1023; + pixelRange->YpMax = 1023; + pixelRange->YpMin = 0; + pixelRange->CbCr_bias = 512; + pixelRange->CbCrRangeMax = 1023; + pixelRange->CbCrMax = 1023; + pixelRange->CbCrMin = 0; + } + break; + case 12: + if (avif->yuvRange == AVIF_RANGE_LIMITED) { + pixelRange->Yp_bias = 256; + pixelRange->YpRangeMax = 3760; + pixelRange->YpMax = 4095; + pixelRange->YpMin = 0; + pixelRange->CbCr_bias = 2048; + pixelRange->CbCrRangeMax = 3840; + pixelRange->CbCrMax = 4095; + pixelRange->CbCrMin = 0; + }else{ + pixelRange->Yp_bias = 0; + pixelRange->YpRangeMax = 4095; + pixelRange->YpMax = 4095; + pixelRange->YpMin = 0; + pixelRange->CbCr_bias = 2048; + pixelRange->CbCrRangeMax = 4095; + pixelRange->CbCrMax = 4095; + pixelRange->CbCrMin = 0; + } + break; + default: + NSLog(@"Unknown bit depth: %d", avif->depth); + return; + } + +} + + +// Convert 8bit AVIF image into RGB888/ARGB8888/Mono/MonoA using vImage Acceralation Framework. +static CGImageRef CreateCGImage8(avifImage * avif) { + CGImageRef result = NULL; + uint8_t* resultBufferData = NULL; + uint8_t* argbBufferData = NULL; + uint8_t* dummyCbData = NULL; + uint8_t* dummyCrData = NULL; + uint8_t* scaledAlphaBufferData = NULL; + + vImage_Error err = kvImageNoError; + + // image properties + BOOL const monochrome = avif->yuvPlanes[1] == NULL || avif->yuvPlanes[2] == NULL; + BOOL const hasAlpha = avif->alphaPlane != NULL; + size_t const components = (monochrome ? 1 : 3) + (hasAlpha ? 1 : 0); + size_t const rowBytes = components * sizeof(uint8_t) * avif->width; + + // setup conversion info + avifReformatState state = {0}; + vImage_YpCbCrToARGBMatrix matrix = {0}; + vImage_YpCbCrPixelRange pixelRange = {0}; + SetupConversionInfo(avif, &state, &matrix, &pixelRange); + + vImage_YpCbCrToARGB convInfo = {0}; + + resultBufferData = calloc(components * rowBytes * avif->height, sizeof(uint8_t)); + if(resultBufferData == NULL) { + goto end_all; + } + + BOOL const useTempBuffer = monochrome || !hasAlpha; // if and only if the image is not ARGB + + if(useTempBuffer) { + argbBufferData = calloc(avif->width * avif->height * 4, sizeof(uint8_t)); + if(argbBufferData == NULL) { + goto end_all; + } + } + + vImage_Buffer resultBuffer = { + .data = resultBufferData, + .width = avif->width, + .height = avif->height, + .rowBytes = avif->width * components, + }; + vImage_Buffer argbBuffer = { + .data = useTempBuffer ? argbBufferData : resultBufferData, + .width = avif->width, + .height = avif->height, + .rowBytes = avif->width * 4, + }; + vImage_Buffer origY = { + .data = avif->yuvPlanes[AVIF_CHAN_Y], + .rowBytes = avif->yuvRowBytes[AVIF_CHAN_Y], + .width = avif->width, + .height = avif->height, + }; + + vImage_Buffer origCb = { + .data = avif->yuvPlanes[AVIF_CHAN_U], + .rowBytes = avif->yuvRowBytes[AVIF_CHAN_U], + .width = (avif->width+state.formatInfo.chromaShiftX) >> state.formatInfo.chromaShiftX, + .height = (avif->height+state.formatInfo.chromaShiftY) >> state.formatInfo.chromaShiftY, + }; + + if(origCb.data == NULL) { // allocate dummy data to convert monochrome images. + dummyCbData = calloc(origCb.width, sizeof(uint8_t)); + if(dummyCbData == NULL) { + goto end_all; + } + origCb.data = dummyCbData; + origCb.rowBytes = 0; + memset(origCb.data, pixelRange.CbCr_bias, origCb.width); + } + + vImage_Buffer origCr = { + .data = avif->yuvPlanes[AVIF_CHAN_V], + .rowBytes = avif->yuvRowBytes[AVIF_CHAN_V], + .width = (avif->width+state.formatInfo.chromaShiftX) >> state.formatInfo.chromaShiftX, + .height = (avif->height+state.formatInfo.chromaShiftY) >> state.formatInfo.chromaShiftY, + }; + if(origCr.data == NULL) { // allocate dummy data to convert monochrome images. + dummyCrData = calloc(origCr.width, sizeof(uint8_t)); + if(dummyCrData == NULL) { + goto end_all; + } + origCr.data = dummyCrData; + origCr.rowBytes = 0; + memset(origCr.data, pixelRange.CbCr_bias, origCr.width); + } + + uint8_t const permuteMap[4] = {0, 1, 2, 3}; + switch(avif->yuvFormat) { + case AVIF_PIXEL_FORMAT_NONE: + NSLog(@"Invalid pixel format."); + goto end_all; + case AVIF_PIXEL_FORMAT_YUV420: + case AVIF_PIXEL_FORMAT_YV12: + { + err = + vImageConvert_YpCbCrToARGB_GenerateConversion(&matrix, + &pixelRange, + &convInfo, + kvImage420Yp8_Cb8_Cr8, + kvImageARGB8888, + kvImageNoFlags); + if(err != kvImageNoError) { + NSLog(@"Failed to setup conversion: %ld", err); + goto end_420; + } + + err = vImageConvert_420Yp8_Cb8_Cr8ToARGB8888(&origY, + &origCb, + &origCr, + &argbBuffer, + &convInfo, + permuteMap, + 255, + kvImageNoFlags); + if(err != kvImageNoError) { + NSLog(@"Failed to convert to ARGB8888: %ld", err); + goto end_420; + } + end_420: + // We didn't allocate any heaps. + if(err == kvImageNoError) { + break; + } else { + goto end_all; + } + } + case AVIF_PIXEL_FORMAT_YUV444: + { + uint8_t* yuvBufferData = NULL; + err = + vImageConvert_YpCbCrToARGB_GenerateConversion(&matrix, + &pixelRange, + &convInfo, + kvImage444CrYpCb8, + kvImageARGB8888, + kvImageNoFlags); + if(err != kvImageNoError) { + NSLog(@"Failed to setup conversion: %ld", err); + goto end_444; + } + + yuvBufferData = calloc(avif->width * avif->height * 3, sizeof(uint8_t)); + if(yuvBufferData == NULL) { + err = kvImageMemoryAllocationError; + goto end_444; + } + vImage_Buffer yuvBuffer = { + .data = yuvBufferData, + .width = avif->width, + .height = avif->height, + .rowBytes = avif->width * 3, + }; + err = vImageConvert_Planar8toRGB888(&origCr, &origY, &origCb, &yuvBuffer, kvImageNoFlags); + if(err != kvImageNoError) { + NSLog(@"Failed to composite kvImage444CrYpCb8: %ld", err); + goto end_444; + } + vImageConvert_444CrYpCb8ToARGB8888(&yuvBuffer, + &argbBuffer, + &convInfo, + permuteMap, + 255, + kvImageNoFlags); + if(err != kvImageNoError) { + NSLog(@"Failed to convert to ARGB8888: %ld", err); + goto end_444; + } + end_444: + free(yuvBufferData); + if(err == kvImageNoError) { + break; + } else { + goto end_all; + } + } + case AVIF_PIXEL_FORMAT_YUV422: + { + uint8_t* y1BufferData = NULL; + uint8_t* y2BufferData = NULL; + uint8_t* yuyvBufferData = NULL; + + err = + vImageConvert_YpCbCrToARGB_GenerateConversion(&matrix, + &pixelRange, + &convInfo, + kvImage422YpCbYpCr8, + kvImageARGB8888, + kvImageNoFlags); + if(err != kvImageNoError) { + NSLog(@"Failed to setup conversion: %ld", err); + goto end_422; + } + + const vImagePixelCount alignedWidth = (origY.width+1) & (~1); + y1BufferData = calloc(alignedWidth/2 * origY.height, sizeof(uint8_t)); + y2BufferData = calloc(alignedWidth/2 * origY.height, sizeof(uint8_t)); + yuyvBufferData = calloc(alignedWidth * avif->height * 2, sizeof(uint8_t)); + if(y1BufferData == NULL || y2BufferData == NULL || yuyvBufferData == NULL) { + err = kvImageMemoryAllocationError; + goto end_422; + } + vImage_Buffer y1Buffer = { + .data = y1BufferData, + .width = alignedWidth/2, + .height = origY.height, + .rowBytes = alignedWidth/2 * sizeof(uint8_t), + }; + vImage_Buffer y2Buffer = { + .data = y2BufferData, + .width = alignedWidth/2, + .height = origY.height, + .rowBytes = alignedWidth/2 * sizeof(uint8_t), + }; + vImage_Buffer yuyvBuffer = { + .data = yuyvBufferData, + .width = alignedWidth/2, // It will be fixed later. + .height = avif->height, + .rowBytes = alignedWidth / 2 * 4 * sizeof(uint8_t), + }; + err = vImageConvert_ChunkyToPlanar8((const void*[]){origY.data}, + (const vImage_Buffer*[]){&y1Buffer}, + 1 /* channelCount */, 2 /* src srcStrideBytes */, + alignedWidth/2, origY.height, + origY.rowBytes, kvImageNoFlags); + if(err != kvImageNoError) { + NSLog(@"Failed to separate first Y channel: %ld", err); + goto end_422; + } + y2Buffer.width = origY.width/2; + err = vImageConvert_ChunkyToPlanar8((const void*[]){origY.data + 1}, + (const vImage_Buffer*[]){&y2Buffer}, + 1 /* channelCount */, 2 /* src srcStrideBytes */, + origY.width/2, origY.height, + origY.rowBytes, kvImageNoFlags); + y2Buffer.width = alignedWidth/2; + if(err != kvImageNoError) { + NSLog(@"Failed to separate second Y channel: %ld", err); + goto end_422; + } + err = vImageConvert_Planar8toARGB8888(&y1Buffer, &origCb, &y2Buffer, &origCr, + &yuyvBuffer, kvImageNoFlags); + if(err != kvImageNoError) { + NSLog(@"Failed to composite kvImage422YpCbYpCr8: %ld", err); + goto end_422; + } + yuyvBuffer.width *= 2; + + err = vImageConvert_422YpCbYpCr8ToARGB8888(&yuyvBuffer, + &argbBuffer, + &convInfo, + permuteMap, + 255, + kvImageNoFlags); + if(err != kvImageNoError) { + goto end_422; + } + end_422: + free(y1BufferData); + free(y2BufferData); + free(yuyvBufferData); + if(err == kvImageNoError) { + break; + } else { + goto end_all; + } + } + } + + if(hasAlpha) { // alpha + vImage_Buffer alphaBuffer = {0}; + if(avif->alphaRange == AVIF_RANGE_LIMITED) { + float* floatAlphaBufferData = NULL; + floatAlphaBufferData = calloc(avif->width * avif->height, sizeof(float)); + scaledAlphaBufferData = calloc(avif->width * avif->height, sizeof(uint8_t)); + if(floatAlphaBufferData == NULL || scaledAlphaBufferData == NULL) { + err = kvImageMemoryAllocationError; + goto end_prepare_alpha; + } + vImage_Buffer origAlphaBuffer = { + .data = avif->alphaPlane, + .width = avif->width, + .height = avif->height, + .rowBytes = avif->alphaRowBytes, + }; + vImage_Buffer floatAlphaBuffer = { + .data = floatAlphaBufferData, + .width = avif->width, + .height = avif->height, + .rowBytes = avif->width * sizeof(float), + }; + alphaBuffer.width = avif->width; + alphaBuffer.height = avif->height; + alphaBuffer.data = scaledAlphaBufferData; + alphaBuffer.rowBytes = avif->width * sizeof(uint8_t); + err = vImageConvert_Planar8toPlanarF(&origAlphaBuffer, &floatAlphaBuffer, 255.0f, 0.0f, kvImageNoFlags); + if(err != kvImageNoError) { + NSLog(@"Failed to convert alpha planes from uint8 to float: %ld", err); + goto end_prepare_alpha; + } + err = vImageConvert_PlanarFtoPlanar8(&floatAlphaBuffer, &alphaBuffer, 235.0f, 16.0f, kvImageNoFlags); + if(err != kvImageNoError) { + NSLog(@"Failed to convert alpha planes from float to uint8: %ld", err); + goto end_prepare_alpha; + } + end_prepare_alpha: + free(floatAlphaBufferData); + if(err != kvImageNoError) { + goto end_alpha; + } + } else { + alphaBuffer.width = avif->width; + alphaBuffer.height = avif->height; + alphaBuffer.data = avif->alphaPlane; + alphaBuffer.rowBytes = avif->alphaRowBytes; + } + if(monochrome) { // alpha_mono + uint8_t* tmpBufferData = NULL; + uint8_t* monoBufferData = NULL; + tmpBufferData = calloc(avif->width, sizeof(uint8_t)); + monoBufferData = calloc(avif->width * avif->height, sizeof(uint8_t)); + if(tmpBufferData == NULL || monoBufferData == NULL) { + goto end_alpha_mono; + } + vImage_Buffer tmpBuffer = { + .data = tmpBufferData, + .width = avif->width, + .height = avif->height, + .rowBytes = 0, + }; + vImage_Buffer monoBuffer = { + .data = monoBufferData, + .width = avif->width, + .height = avif->height, + .rowBytes = avif->width, + }; + err = vImageConvert_ARGB8888toPlanar8(&argbBuffer, &tmpBuffer, &tmpBuffer, &monoBuffer, &tmpBuffer, kvImageNoFlags); + if(err != kvImageNoError) { + NSLog(@"Failed to convert ARGB to A_G_: %ld", err); + goto end_alpha_mono; + } + err = vImageConvert_PlanarToChunky8((const vImage_Buffer*[]){&alphaBuffer, &monoBuffer}, + (void*[]){resultBuffer.data, resultBuffer.data + 1}, + 2 /* channelCount */, 2 /* destStrideBytes */, + resultBuffer.width, resultBuffer.height, + resultBuffer.rowBytes, kvImageNoFlags); + if(err != kvImageNoError) { + NSLog(@"Failed to combine mono and alpha: %ld", err); + goto end_alpha_mono; + } + result = CreateImageFromBuffer(avif, &resultBuffer); + resultBufferData = NULL; + end_alpha_mono: + free(tmpBufferData); + free(monoBufferData); + goto end_alpha; + } else { // alpha_color + err = vImageOverwriteChannels_ARGB8888(&alphaBuffer, &argbBuffer, &argbBuffer, 0x8, kvImageNoFlags); + if(err != kvImageNoError) { + NSLog(@"Failed to overwrite alpha: %ld", err); + goto end_alpha_color; + } + result = CreateImageFromBuffer(avif, &argbBuffer); + resultBufferData = NULL; + end_alpha_color: + goto end_alpha; + } + end_alpha: + goto end_all; + } else { // no_alpha + if(monochrome) { // no_alpha_mono + uint8_t* tmpBufferData = NULL; + tmpBufferData = calloc(avif->width, sizeof(uint8_t)); + if(tmpBufferData == NULL){ + goto end_no_alpha_mono; + } + vImage_Buffer tmpBuffer = { + .data = tmpBufferData, + .width = avif->width, + .height = avif->height, + .rowBytes = 0, + }; + err = vImageConvert_ARGB8888toPlanar8(&argbBuffer, &tmpBuffer, &tmpBuffer, &resultBuffer, &tmpBuffer, kvImageNoFlags); + if(err != kvImageNoError) { + NSLog(@"Failed to convert ARGB to B(Mono): %ld", err); + goto end_no_alpha_mono; + } + result = CreateImageFromBuffer(avif, &resultBuffer); + resultBufferData = NULL; + end_no_alpha_mono: + free(tmpBufferData); + goto end_no_alpha; + } else { // no_alpha_color + err = vImageConvert_ARGB8888toRGB888(&argbBuffer, &resultBuffer, kvImageNoFlags); + if(err != kvImageNoError) { + NSLog(@"Failed to convert ARGB to RGB: %ld", err); + goto end_no_alpha_color; + } + result = CreateImageFromBuffer(avif, &resultBuffer); + resultBufferData = NULL; + end_no_alpha_color: + goto end_no_alpha; + } + end_no_alpha: + goto end_all; + } + +end_all: + free(resultBufferData); + free(argbBufferData); + free(dummyCbData); + free(dummyCrData); + free(scaledAlphaBufferData); + return result; +} + +// Convert 10/12bit AVIF image into RGB16U/ARGB16U/Mono16U/MonoA16U +static CGImageRef CreateCGImage16U(avifImage * avif) { + CGImageRef result = NULL; + uint16_t* resultBufferData = NULL; + uint16_t* argbBufferData = NULL; + uint16_t* ayuvBufferData = NULL; + uint16_t* scaledAlphaBufferData = NULL; + uint16_t* dummyCbData = NULL; + uint16_t* dummyCrData = NULL; + uint16_t* dummyAlphaData = NULL; + + vImage_Error err = kvImageNoError; + + // image properties + BOOL const monochrome = avif->yuvPlanes[1] == NULL || avif->yuvPlanes[2] == NULL; + BOOL const hasAlpha = avif->alphaPlane != NULL; + size_t const components = (monochrome ? 1 : 3) + (hasAlpha ? 1 : 0); + + // setup conversion info + avifReformatState state = {0}; + vImage_YpCbCrToARGBMatrix matrix = {0}; + vImage_YpCbCrPixelRange pixelRange = {0}; + SetupConversionInfo(avif, &state, &matrix, &pixelRange); + + vImage_YpCbCrToARGB convInfo = {0}; + + resultBufferData = calloc(components * avif->width * avif->height, sizeof(uint16_t)); + ayuvBufferData = calloc(avif->width * avif->height * 4, sizeof(uint16_t)); + if(resultBufferData == NULL || ayuvBufferData == NULL) { + goto end_all; + } + + BOOL const useTempBuffer = monochrome || !hasAlpha; // if and only if the image is not ARGB + + if(useTempBuffer) { + argbBufferData = calloc(avif->width * avif->height * 4, sizeof(uint16_t)); + if(argbBufferData == NULL) { + goto end_all; + } + } + + vImage_Buffer resultBuffer = { + .data = resultBufferData, + .width = avif->width, + .height = avif->height, + .rowBytes = avif->width * components * sizeof(uint16_t), + }; + + vImage_Buffer argbBuffer = { + .data = useTempBuffer ? argbBufferData : resultBufferData, + .width = avif->width, + .height = avif->height, + .rowBytes = avif->width * 4 * sizeof(uint16_t), + }; + + vImage_Buffer ayuvBuffer = { + .data = ayuvBufferData, + .width = avif->width, + .height = avif->height, + .rowBytes = avif->width * 4 * sizeof(uint16_t), + }; + + vImage_Buffer origY = { + .data = avif->yuvPlanes[AVIF_CHAN_Y], + .rowBytes = avif->yuvRowBytes[AVIF_CHAN_Y], + .width = avif->width, + .height = avif->height, + }; + + vImage_Buffer origCb = { + .data = avif->yuvPlanes[AVIF_CHAN_U], + .rowBytes = avif->yuvRowBytes[AVIF_CHAN_U], + .width = (avif->width+state.formatInfo.chromaShiftX) >> state.formatInfo.chromaShiftX, + .height = (avif->height+state.formatInfo.chromaShiftY) >> state.formatInfo.chromaShiftY, + }; + + if(!origCb.data) { // allocate dummy data to convert monochrome images. + vImagePixelCount origHeight = origCb.height; + origCb.rowBytes = origCb.width * sizeof(uint16_t); + dummyCbData = calloc(origCb.width, sizeof(uint16_t)); + if(!dummyCbData) { + goto end_all; + } + origCb.data = dummyCbData; + origCb.height = 1; + // fill zero values. + err = vImageOverwriteChannelsWithScalar_Planar16U(pixelRange.CbCr_bias, &origCb, kvImageNoFlags); + if (err != kvImageNoError) { + NSLog(@"Failed to fill dummy Cr buffer: %ld", err); + goto end_all; + } + origCb.rowBytes = 0; + origCb.height = origHeight; + } + + vImage_Buffer origCr = { + .data = avif->yuvPlanes[AVIF_CHAN_V], + .rowBytes = avif->yuvRowBytes[AVIF_CHAN_V], + .width = (avif->width+state.formatInfo.chromaShiftX) >> state.formatInfo.chromaShiftX, + .height = (avif->height+state.formatInfo.chromaShiftY) >> state.formatInfo.chromaShiftY, + }; + + if(!origCr.data) { // allocate dummy data to convert monochrome images. + vImagePixelCount origHeight = origCr.height; + origCr.rowBytes = origCr.width * sizeof(uint16_t); + dummyCrData = calloc(origCr.width, sizeof(uint16_t)); + if(!dummyCrData) { + goto end_all; + } + origCr.data = dummyCrData; + origCr.height = 1; + // fill zero values. + err = vImageOverwriteChannelsWithScalar_Planar16U(pixelRange.CbCr_bias, &origCr, kvImageNoFlags); + if (err != kvImageNoError) { + NSLog(@"Failed to fill dummy Cr buffer: %ld", err); + goto end_all; + } + origCr.rowBytes = 0; + origCr.height = origHeight; + } + + vImage_Buffer origAlpha = {0}; + if(hasAlpha) { + float* floatAlphaBufferData = NULL; + floatAlphaBufferData = calloc(avif->width * avif->height, sizeof(float)); + scaledAlphaBufferData = calloc(avif->width * avif->height, sizeof(uint16_t)); + if(floatAlphaBufferData == NULL || scaledAlphaBufferData == NULL) { + err = kvImageMemoryAllocationError; + goto end_prepare_alpha; + } + origAlpha.data = avif->alphaPlane; + origAlpha.width = avif->width; + origAlpha.height = avif->height; + origAlpha.rowBytes = avif->alphaRowBytes; + + vImage_Buffer floatAlphaBuffer = { + .data = floatAlphaBufferData, + .width = avif->width, + .height = avif->height, + .rowBytes = avif->width * sizeof(float), + }; + vImage_Buffer scaledAlphaBuffer = { + .data = scaledAlphaBufferData, + .width = avif->width, + .height = avif->height, + .rowBytes = avif->width * sizeof(uint16_t), + }; + float offset = 0.0f; + float rangeMax = 0.0f; + if(avif->depth == 10) { + if(avif->alphaRange == AVIF_RANGE_LIMITED) { + offset = 64.0f; + rangeMax = 940.0f; + } else { + offset = 0.0f; + rangeMax = 1023.0f; + } + } else if(avif->depth == 12) { + if(avif->alphaRange == AVIF_RANGE_LIMITED) { + offset = 256.0f; + rangeMax = 3760.0f; + } else { + offset = 0.0f; + rangeMax = 4095.0f; + } + } + float const scale = (float)(rangeMax - offset) / 65535.0f; + err = vImageConvert_16UToF(&origAlpha, &floatAlphaBuffer, 0.0f, 1.0f, kvImageNoFlags); + if(err != kvImageNoError) { + NSLog(@"Failed to convert alpha planes from uint16 to float: %ld", err); + goto end_prepare_alpha; + } + err = vImageConvert_FTo16U(&floatAlphaBuffer, &scaledAlphaBuffer, offset, scale, kvImageNoFlags); + if(err != kvImageNoError) { + NSLog(@"Failed to convert alpha planes from uint16 to float: %ld", err); + goto end_prepare_alpha; + } + origAlpha.data = scaledAlphaBufferData; + origAlpha.rowBytes = avif->width * sizeof(uint16_t); + end_prepare_alpha: + free(floatAlphaBufferData); + if(err != kvImageNoError) { + goto end_all; + } + } else { + // allocate dummy data to convert monochrome images. + origAlpha.rowBytes = avif->width * sizeof(uint16_t); + dummyAlphaData = calloc(avif->width, sizeof(uint16_t)); + if(!dummyAlphaData) { + goto end_all; + } + origAlpha.data = dummyAlphaData; + origAlpha.width = avif->width; + origAlpha.height = 1; + err = vImageOverwriteChannelsWithScalar_Planar16U(0xffff, &origAlpha, kvImageNoFlags); + if (err != kvImageNoError) { + NSLog(@"Failed to fill dummy alpha buffer: %ld", err); + goto end_all; + } + origAlpha.rowBytes = 0; + origAlpha.height = avif->height; + }; + + + uint8_t const permuteMap[4] = {0, 1, 2, 3}; + switch(avif->yuvFormat) { + case AVIF_PIXEL_FORMAT_NONE: + NSLog(@"Invalid pixel format."); + goto end_all; + case AVIF_PIXEL_FORMAT_YUV420: + case AVIF_PIXEL_FORMAT_YUV422: + case AVIF_PIXEL_FORMAT_YV12: + { + uint16_t* scaledCbData = NULL; + uint16_t* scaledCrData = NULL; + void* scaleTempBuff = NULL; + + scaledCbData = calloc(avif->width * avif->height * 4, sizeof(uint16_t)); + scaledCrData = calloc(avif->width * avif->height * 4, sizeof(uint16_t)); + if(scaledCbData == NULL || scaledCrData == NULL) { + err = kvImageMemoryAllocationError; + goto end_420; + } + vImage_Buffer scaledCb = { + .data = scaledCbData, + .width = avif->width, + .height = avif->height, + .rowBytes = avif->width * 4 * sizeof(uint16_t), + }; + vImage_Buffer scaledCr = { + .data = scaledCrData, + .width = avif->width, + .height = avif->height, + .rowBytes = avif->width * 4 * sizeof(uint16_t), + }; + vImage_Error scaleTempBuffSize = vImageScale_Planar16U(&origCb, &scaledCb, NULL, kvImageGetTempBufferSize); + if(scaleTempBuffSize < 0) { + NSLog(@"Failed to get temp buffer size: %ld", scaleTempBuffSize); + goto end_420; + } + scaleTempBuff = malloc(scaleTempBuffSize); + if(scaleTempBuff == NULL) { + err = kvImageMemoryAllocationError; + goto end_420; + } + // upscale Cb + err = vImageScale_Planar16U(&origCb, &scaledCb, scaleTempBuff, kvImageNoFlags); + if(err != kvImageNoError) { + NSLog(@"Failed to scale Cb: %ld", err); + goto end_420; + } + // upscale Cr + err = vImageScale_Planar16U(&origCr, &scaledCr, scaleTempBuff, kvImageNoFlags); + if(err != kvImageNoError) { + NSLog(@"Failed to scale Cb: %ld", err); + goto end_420; + } + err = vImageConvert_Planar16UtoARGB16U(&origAlpha, &origY, &scaledCb, &scaledCr, &ayuvBuffer, kvImageNoFlags); + if(err != kvImageNoError) { + NSLog(@"Failed to composite kvImage444AYpCbCr16: %ld", err); + goto end_420; + } + end_420: + free(scaledCrData); + free(scaledCbData); + free(scaleTempBuff); + if(err == kvImageNoError) { + break; + } else { + goto end_all; + } + } + case AVIF_PIXEL_FORMAT_YUV444: + { + err = vImageConvert_Planar16UtoARGB16U(&origAlpha, &origY, &origCb, &origCr, &ayuvBuffer, kvImageNoFlags); + if(err != kvImageNoError) { + NSLog(@"Failed to composite kvImage444AYpCbCr16: %ld", err); + goto end_444; + } + end_444: + if(err == kvImageNoError) { + break; + } else { + goto end_all; + } + } + } + free(dummyCbData); + dummyCbData = NULL; + free(dummyCrData); + dummyCrData = NULL; + free(dummyAlphaData); + dummyAlphaData = NULL; + free(scaledAlphaBufferData); + scaledAlphaBufferData = NULL; + + err = vImageConvert_YpCbCrToARGB_GenerateConversion(&matrix, + &pixelRange, + &convInfo, + kvImage444AYpCbCr16, + kvImageARGB16U, + kvImageNoFlags); + if(err != kvImageNoError) { + NSLog(@"Failed to setup conversion: %ld", err); + goto end_all; + } + err = vImageConvert_444AYpCbCr16ToARGB16U(&ayuvBuffer, + &argbBuffer, + &convInfo, + permuteMap, + kvImageNoFlags); + if(err != kvImageNoError) { + NSLog(@"Failed to convert to ARGB16U: %ld", err); + goto end_all; + } + + if(hasAlpha) { // alpha + if(monochrome){ // alpha_mono + uint16_t* tmpBufferData = NULL; + uint16_t* alphaBufferData = NULL; + uint16_t* monoBufferData = NULL; + uint8_t* alphaBuffer1Data = NULL; + uint8_t* alphaBuffer2Data = NULL; + uint8_t* monoBuffer1Data = NULL; + uint8_t* monoBuffer2Data = NULL; + + tmpBufferData = calloc(avif->width, sizeof(uint16_t)); + alphaBufferData = calloc(avif->width * avif->height, sizeof(uint16_t)); + monoBufferData = calloc(avif->width * avif->height, sizeof(uint16_t)); + + monoBuffer1Data = calloc(avif->width * avif->height, sizeof(uint8_t)); + monoBuffer2Data = calloc(avif->width * avif->height, sizeof(uint8_t)); + + alphaBuffer1Data = calloc(avif->width * avif->height, sizeof(uint8_t)); + alphaBuffer2Data = calloc(avif->width * avif->height, sizeof(uint8_t)); + + if(tmpBufferData == NULL || + alphaBufferData == NULL || + monoBufferData == NULL || + alphaBuffer1Data == NULL || + alphaBuffer2Data == NULL || + monoBuffer1Data == NULL || + monoBuffer2Data == NULL){ + goto end_alpha_mono; + } + + vImage_Buffer tmpBuffer = { + .data = tmpBufferData, + .width = avif->width, + .height = avif->height, + .rowBytes = 0, + }; + vImage_Buffer alphaBuffer = { + .data = alphaBufferData, + .width = avif->width, + .height = avif->height, + .rowBytes = avif->width * sizeof(uint16_t), + }; + vImage_Buffer monoBuffer = { + .data = monoBufferData, + .width = avif->width, + .height = avif->height, + .rowBytes = avif->width * sizeof(uint16_t), + }; + vImage_Buffer monoBuffer1 = { + .data = monoBuffer1Data, + .width = avif->width, + .height = avif->height, + .rowBytes = avif->width * sizeof(uint8_t), + }; + vImage_Buffer monoBuffer2 = { + .data = monoBuffer2Data, + .width = avif->width, + .height = avif->height, + .rowBytes = avif->width * sizeof(uint8_t), + }; + vImage_Buffer alphaBuffer1 = { + .data = alphaBuffer1Data, + .width = avif->width, + .height = avif->height, + .rowBytes = avif->width * sizeof(uint8_t), + }; + vImage_Buffer alphaBuffer2 = { + .data = alphaBuffer2Data, + .width = avif->width, + .height = avif->height, + .rowBytes = avif->width * sizeof(uint8_t), + }; + + err = vImageConvert_ARGB16UtoPlanar16U(&argbBuffer, &alphaBuffer, &tmpBuffer, &monoBuffer, &tmpBuffer, kvImageNoFlags); + if(err != kvImageNoError) { + NSLog(@"Failed to convert ARGB to Mono: %ld", err); + goto end_alpha_mono; + } + err = vImageConvert_ChunkyToPlanar8((const void*[]){monoBuffer.data, monoBuffer.data + 1}, + (const vImage_Buffer*[]){&monoBuffer1, &monoBuffer2}, + 2 /* channelCount */, 2 /* src srcStrideBytes */, + monoBuffer.width, monoBuffer.height, + monoBuffer.rowBytes, kvImageNoFlags); + if(err != kvImageNoError) { + NSLog(@"Failed to split Mono16: %ld", err); + goto end_alpha_mono; + } + + err = vImageConvert_ChunkyToPlanar8((const void*[]){alphaBuffer.data, alphaBuffer.data + 1}, + (const vImage_Buffer*[]){&alphaBuffer1, &alphaBuffer2}, + 2 /* channelCount */, 2 /* src srcStrideBytes */, + alphaBuffer.width, alphaBuffer.height, + alphaBuffer.rowBytes, kvImageNoFlags); + if(err != kvImageNoError) { + NSLog(@"Failed to split Mono16: %ld", err); + goto end_alpha_mono; + } + + err = vImageConvert_Planar8toARGB8888(&alphaBuffer1, &alphaBuffer2, &monoBuffer1, &monoBuffer2, &resultBuffer, kvImageNoFlags); + if(err != kvImageNoError) { + free(resultBufferData); + NSLog(@"Failed to convert Planar Alpha + Mono to MonoA: %ld", err); + goto end_alpha_mono; + } + result = CreateImageFromBuffer(avif, &resultBuffer); + resultBufferData = NULL; + end_alpha_mono: + free(tmpBufferData); + free(alphaBufferData); + free(monoBufferData); + free(alphaBuffer1Data); + free(alphaBuffer2Data); + free(monoBuffer1Data); + free(monoBuffer2Data); + goto end_alpha; + }else{ // alpha_color + result = CreateImageFromBuffer(avif, &resultBuffer); + resultBufferData = NULL; + end_alpha_color: + goto end_alpha; + } + end_alpha: + goto end_all; + } else { // no_alpha + if(monochrome) { // no_alpha_mono + uint16_t* tmpBufferData = NULL; + tmpBufferData = calloc(avif->width, sizeof(uint16_t)); + if(tmpBufferData == NULL) { + goto end_no_alpha_mono; + } + vImage_Buffer tmpBuffer = { + .data = tmpBufferData, + .width = avif->width, + .height = avif->height, + .rowBytes = 0, + }; + err = vImageConvert_ARGB16UtoPlanar16U(&argbBuffer, &tmpBuffer, &tmpBuffer, &resultBuffer, &tmpBuffer, kvImageNoFlags); + if(err != kvImageNoError) { + NSLog(@"Failed to convert ARGB to Mono: %ld", err); + goto end_no_alpha_mono; + } + result = CreateImageFromBuffer(avif, &resultBuffer); + resultBufferData = NULL; + end_no_alpha_mono: + free(tmpBufferData); + goto end_no_alpha; + } else { // no_alpha_color + err = vImageConvert_ARGB16UtoRGB16U(&argbBuffer, &resultBuffer, kvImageNoFlags); + if(err != kvImageNoError) { + NSLog(@"Failed to convert ARGB to RGB: %ld", err); + goto end_no_alpha_color; + } + result = CreateImageFromBuffer(avif, &resultBuffer); + resultBufferData = NULL; + end_no_alpha_color: + goto end_no_alpha; + } + end_no_alpha: + goto end_all; + } +end_all: + free(resultBufferData); + free(argbBufferData); + free(ayuvBufferData); + free(scaledAlphaBufferData); + free(dummyCbData); + free(dummyCrData); + free(dummyAlphaData); + return result; +} + +// convert planar to ARGB/RGB +CGImageRef CreateCGImageFromAVIF(avifImage * avif) { + return avifImageUsesU16(avif) ? CreateCGImage16U(avif) : CreateCGImage8(avif); +} diff --git a/SDWebImageAVIFCoder/Classes/SDImageAVIFCoder.m b/SDWebImageAVIFCoder/Classes/SDImageAVIFCoder.m index 056ea8a..36ead8a 100644 --- a/SDWebImageAVIFCoder/Classes/SDImageAVIFCoder.m +++ b/SDWebImageAVIFCoder/Classes/SDImageAVIFCoder.m @@ -1,6 +1,6 @@ // // SDImageAVIFCoder.m -// SDWebImageHEIFCoder +// SDWebImageAVIFCoder // // Created by lizhuoli on 2018/5/8. // @@ -13,1560 +13,7 @@ #import "avif/avif.h" #endif -static void FreeImageData(void *info, const void *data, size_t size) { - free((void *)data); -} - -static void CalcWhitePoint(uint16_t const colorPrimaries, vImageWhitePoint* const white) { - float primaries[8]; - avifNclxColourPrimariesGetValues(colorPrimaries, primaries); - white->white_x = primaries[6]; - white->white_y = primaries[7]; -} - -static void CalcRGBPrimaries(uint16_t const colorPrimaries, vImageRGBPrimaries* const prim) { - float primaries[8]; - avifNclxColourPrimariesGetValues(colorPrimaries, primaries); - prim->red_x = primaries[0]; - prim->red_y = primaries[1]; - prim->green_x = primaries[2]; - prim->green_y = primaries[3]; - prim->blue_x = primaries[4]; - prim->blue_y = primaries[5]; - prim->white_x = primaries[6]; - prim->white_y = primaries[7]; -} - -static void CalcTransferFunction(uint16_t const transferCharacteristics, vImageTransferFunction* const tf) { - // See: https://www.itu.int/rec/T-REC-H.273/en - static const float alpha = 1.099296826809442f; - static const float beta = 0.018053968510807f; - /* - // R' = c0 * pow( c1 * R + c2, gamma ) + c3, (R >= cutoff) - // R' = c4 * R + c5 (R < cutoff) - */ - - switch(transferCharacteristics) { - case AVIF_NCLX_TRANSFER_CHARACTERISTICS_GAMMA28: // 5 - tf->cutoff = -INFINITY; - tf->c0 = 1.0f; - tf->c1 = 1.0f; - tf->c2 = 0.0f; - tf->c3 = 0.0f; - tf->c4 = 0.0f; - tf->c5 = 0.0f; - tf->gamma = 1.0f/2.8f; - break; - case AVIF_NCLX_TRANSFER_CHARACTERISTICS_BT709: // 1 - case AVIF_NCLX_TRANSFER_CHARACTERISTICS_BT601: // 6 - case AVIF_NCLX_TRANSFER_CHARACTERISTICS_BT2020_10BIT: // 14 - case AVIF_NCLX_TRANSFER_CHARACTERISTICS_BT2020_12BIT: // 15 - tf->cutoff = beta; - // - tf->c0 = alpha; - tf->c1 = 1.0f; - tf->c2 = 0.0f; - tf->gamma = 0.45f; - tf->c3 = -(alpha - 1); - // - tf->c4 = 4.5f; - tf->c5 = 0.0f; - break; - case AVIF_NCLX_TRANSFER_CHARACTERISTICS_ST240: // 7 - tf->cutoff = beta; - // - tf->c0 = alpha; - tf->c1 = 1.0f; - tf->c2 = 0.0f; - tf->gamma = 0.45f; - tf->c3 = -(alpha - 1); - // - tf->c4 = 4.0f; - tf->c5 = 0.0f; - break; - case AVIF_NCLX_TRANSFER_CHARACTERISTICS_LINEAR: // 8 - tf->cutoff = INFINITY; - // - tf->c0 = 1.0f; - tf->c1 = 1.0f; - tf->c2 = 0.0f; - tf->gamma = 1.0f; - tf->c3 = 0.0f; - // - tf->c4 = 4.0f; - tf->c5 = 0.0f; - break; - case AVIF_NCLX_TRANSFER_CHARACTERISTICS_IEC61966: // 11 - tf->cutoff = beta; - // - tf->c0 = alpha; - tf->c1 = 1.0f; - tf->c2 = 0.0f; - tf->gamma = 0.45f; - tf->c3 = -(alpha - 1); - // - tf->c4 = 4.5f; - tf->c5 = 0.0f; - break; - case AVIF_NCLX_TRANSFER_CHARACTERISTICS_BT1361_EXTENDED: // 12 - tf->cutoff = beta; - // - tf->c0 = alpha; - tf->c1 = 1.0f; - tf->c2 = 0.0f; - tf->gamma = 0.45f; - tf->c3 = -(alpha - 1); - // - tf->c4 = 4.5f; - tf->c5 = 0.0f; - break; - case AVIF_NCLX_TRANSFER_CHARACTERISTICS_SRGB: // 13 - tf->cutoff = beta; - // - tf->c0 = alpha; - tf->c1 = 1.0f; - tf->c2 = 0.0f; - tf->gamma = 1.0f/2.4f; - tf->c3 = -(alpha - 1); - // - tf->c4 = 12.92f; - tf->c5 = 0.0f; - break; - case AVIF_NCLX_TRANSFER_CHARACTERISTICS_ST428: // 17 - tf->cutoff = -INFINITY; - // - tf->c0 = 1.0f; - tf->c1 = 48.0f / 52.37f; - tf->c2 = 0.0f; - tf->gamma = 1.0f/2.6f; - tf->c3 = 0.0f; - // - tf->c4 = 1.0f; - tf->c5 = 0.0f; - break; - // Can't be represented by vImageTransferFunction. Use gamma 2.2 as a fallback. - case AVIF_NCLX_TRANSFER_CHARACTERISTICS_ST2084: // 16 - case AVIF_NCLX_TRANSFER_CHARACTERISTICS_BT2100_HLG: // 18 - case AVIF_NCLX_TRANSFER_CHARACTERISTICS_LOG_100_1: // 9 - case AVIF_NCLX_TRANSFER_CHARACTERISTICS_LOG_100_SQRT: // 10 - // - case AVIF_NCLX_TRANSFER_CHARACTERISTICS_UNKNOWN: // 0 - case AVIF_NCLX_TRANSFER_CHARACTERISTICS_UNSPECIFIED: // 2 - case AVIF_NCLX_TRANSFER_CHARACTERISTICS_GAMMA22: // 4 - default: - tf->cutoff = -INFINITY; - tf->c0 = 1.0f; - tf->c1 = 1.0f; - tf->c2 = 0.0f; - tf->c3 = 0.0f; - tf->c4 = 0.0f; - tf->c5 = 0.0f; - tf->gamma = 1.0f/2.2f; - break; - } -} -static CGColorSpaceRef CreateColorSpaceMono(uint16_t const colorPrimaries, uint16_t const transferCharacteristics) { - if (@available(macOS 10.10, iOS 8.0, tvOS 8.0, *)) { - vImage_Error err; - vImageWhitePoint white; - vImageTransferFunction transfer; - CalcWhitePoint(colorPrimaries, &white); - CalcTransferFunction(transferCharacteristics, &transfer); - CGColorSpaceRef colorSpace = vImageCreateMonochromeColorSpaceWithWhitePointAndTransferFunction(&white, &transfer, kCGRenderingIntentDefault, kvImagePrintDiagnosticsToConsole, &err); - if(err != kvImageNoError) { - NSLog(@"[BUG] Failed to create monochrome color space: %ld", err); - if(colorSpace != NULL) { - CGColorSpaceRelease(colorSpace); - } - return NULL; - } - return colorSpace; - }else{ - return NULL; - } -} - -static CGColorSpaceRef CreateColorSpaceRGB(uint16_t const colorPrimaries, uint16_t const transferCharacteristics) { - if (@available(macOS 10.10, iOS 8.0, tvOS 8.0, *)) { - vImage_Error err; - vImageRGBPrimaries primaries; - vImageTransferFunction transfer; - CalcRGBPrimaries(colorPrimaries, &primaries); - CalcTransferFunction(transferCharacteristics, &transfer); - CGColorSpaceRef colorSpace = vImageCreateRGBColorSpaceWithPrimariesAndTransferFunction(&primaries, &transfer, kCGRenderingIntentDefault, kvImagePrintDiagnosticsToConsole, &err); - if(err != kvImageNoError) { - NSLog(@"[BUG] Failed to create monochrome color space: %ld", err); - if(colorSpace != NULL) { - CGColorSpaceRelease(colorSpace); - } - return NULL; - } - return colorSpace; - }else{ - return NULL; - } -} - -static void CalcColorSpaceMono(avifImage * avif, CGColorSpaceRef* ref, BOOL* shouldRelease) { - static CGColorSpaceRef defaultColorSpace; - { - static dispatch_once_t onceToken; - dispatch_once(&onceToken, ^{ - defaultColorSpace = CGColorSpaceCreateDeviceGray(); - }); - } - if(avif->profileFormat == AVIF_PROFILE_FORMAT_NONE) { - *ref = defaultColorSpace; - *shouldRelease = FALSE; - return; - } - if(avif->profileFormat == AVIF_PROFILE_FORMAT_ICC) { - if(avif->icc.data && avif->icc.size) { - if(@available(macOS 10.12, iOS 10.0, tvOS 10.0, *)) { - *ref = CGColorSpaceCreateWithICCData(avif->icc.data); - *shouldRelease = TRUE; - }else{ - NSData* iccData = [NSData dataWithBytes:avif->icc.data length:avif->icc.size]; - *ref = CGColorSpaceCreateWithICCProfile((__bridge CFDataRef)iccData); - *shouldRelease = TRUE; - } - return; - } - *ref = defaultColorSpace; - *shouldRelease = FALSE; - return; - } - uint16_t const colorPrimaries = avif->nclx.colourPrimaries; - uint16_t const transferCharacteristics = avif->nclx.transferCharacteristics; - if((colorPrimaries == AVIF_NCLX_COLOUR_PRIMARIES_UNKNOWN || - colorPrimaries == AVIF_NCLX_COLOUR_PRIMARIES_UNSPECIFIED) && - (transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_UNKNOWN || - transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_UNSPECIFIED)) { - *ref = defaultColorSpace; - *shouldRelease = FALSE; - return; - } - if(colorPrimaries == AVIF_NCLX_COLOUR_PRIMARIES_SRGB && - transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_SRGB) { - static CGColorSpaceRef sRGB = NULL; - static dispatch_once_t onceToken; - dispatch_once(&onceToken, ^{ - sRGB = CreateColorSpaceMono(colorPrimaries, transferCharacteristics); - if(sRGB == NULL) { - sRGB = defaultColorSpace; - } - }); - *ref = sRGB; - *shouldRelease = FALSE; - return; - } - if(colorPrimaries == AVIF_NCLX_COLOUR_PRIMARIES_BT709 && - transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_BT709) { - static CGColorSpaceRef bt709 = NULL; - static dispatch_once_t onceToken; - dispatch_once(&onceToken, ^{ - bt709 = CreateColorSpaceMono(colorPrimaries, transferCharacteristics); - if(bt709 == NULL) { - bt709 = defaultColorSpace; - } - }); - *ref = bt709; - *shouldRelease = FALSE; - return; - } - if(colorPrimaries == AVIF_NCLX_COLOUR_PRIMARIES_BT2020 && - (transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_BT2020_10BIT || - transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_BT2020_12BIT)) { - static CGColorSpaceRef bt2020 = NULL; - static dispatch_once_t onceToken; - dispatch_once(&onceToken, ^{ - bt2020 = CreateColorSpaceMono(colorPrimaries, transferCharacteristics); - if(bt2020 == NULL) { - bt2020 = defaultColorSpace; - } - }); - *ref = bt2020; - *shouldRelease = FALSE; - return; - } - if(colorPrimaries == AVIF_NCLX_COLOUR_PRIMARIES_P3 && - transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_SRGB) { - static CGColorSpaceRef p3 = NULL; - static dispatch_once_t onceToken; - dispatch_once(&onceToken, ^{ - p3 = CreateColorSpaceMono(colorPrimaries, transferCharacteristics); - if(p3 == NULL) { - p3 = defaultColorSpace; - } - }); - *ref = p3; - *shouldRelease = FALSE; - return; - } - - *ref = CreateColorSpaceMono(colorPrimaries, transferCharacteristics); - if(*ref != NULL) { - *shouldRelease = TRUE; - } else { - *ref = defaultColorSpace; - *shouldRelease = FALSE; - } -} - -static void CalcColorSpaceRGB(avifImage * avif, CGColorSpaceRef* ref, BOOL* shouldRelease) { - static CGColorSpaceRef defaultColorSpace = NULL; - { - static dispatch_once_t onceToken; - dispatch_once(&onceToken, ^{ - defaultColorSpace = CGColorSpaceCreateDeviceRGB(); - }); - } - if(avif->profileFormat == AVIF_PROFILE_FORMAT_NONE) { - *ref = defaultColorSpace; - *shouldRelease = FALSE; - return; - } - if(avif->profileFormat == AVIF_PROFILE_FORMAT_ICC) { - if(avif->icc.data && avif->icc.size) { - if(@available(macOS 10.12, iOS 10.0, tvOS 10.0, *)) { - *ref = CGColorSpaceCreateWithICCData(avif->icc.data); - *shouldRelease = TRUE; - }else{ - NSData* iccData = [NSData dataWithBytes:avif->icc.data length:avif->icc.size]; - *ref = CGColorSpaceCreateWithICCProfile((__bridge CFDataRef)iccData); - *shouldRelease = TRUE; - } - return; - } - *ref = defaultColorSpace; - *shouldRelease = FALSE; - return; - } - uint16_t const colorPrimaries = avif->nclx.colourPrimaries; - uint16_t const transferCharacteristics = avif->nclx.transferCharacteristics; - if((colorPrimaries == AVIF_NCLX_COLOUR_PRIMARIES_UNKNOWN || - colorPrimaries == AVIF_NCLX_COLOUR_PRIMARIES_UNSPECIFIED) && - (transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_UNKNOWN || - transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_UNSPECIFIED)) { - *ref = defaultColorSpace; - *shouldRelease = FALSE; - return; - } - if(colorPrimaries == AVIF_NCLX_COLOUR_PRIMARIES_BT709 && - transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_BT709) { - static CGColorSpaceRef bt709 = NULL; - static dispatch_once_t onceToken; - dispatch_once(&onceToken, ^{ - if (@available(macOS 10.11, iOS 9.0, tvOS 9.0, *)) { - bt709 = CGColorSpaceCreateWithName(kCGColorSpaceITUR_709); - } else { - bt709 = defaultColorSpace; - } - }); - *ref = bt709; - *shouldRelease = FALSE; - return; - } - if(colorPrimaries == AVIF_NCLX_COLOUR_PRIMARIES_SRGB && - transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_SRGB) { - static CGColorSpaceRef sRGB = NULL; - static dispatch_once_t onceToken; - dispatch_once(&onceToken, ^{ - if (@available(macOS 10.5, iOS 9.0, tvOS 9.0, *)) { - sRGB = CGColorSpaceCreateWithName(kCGColorSpaceSRGB); - } else { - sRGB = defaultColorSpace; - } - }); - *ref = sRGB; - *shouldRelease = FALSE; - return; - } - if(colorPrimaries == AVIF_NCLX_COLOUR_PRIMARIES_SRGB && - transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_LINEAR) { - static CGColorSpaceRef sRGBlinear = NULL; - static dispatch_once_t onceToken; - dispatch_once(&onceToken, ^{ - if (@available(macOS 10.12, iOS 10.0, tvOS 10.0, *)) { - sRGBlinear = CGColorSpaceCreateWithName(kCGColorSpaceLinearSRGB); - } else { - sRGBlinear = defaultColorSpace; - } - }); - *ref = sRGBlinear; - *shouldRelease = FALSE; - return; - } - if(colorPrimaries == AVIF_NCLX_COLOUR_PRIMARIES_BT2020 && - (transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_BT2020_10BIT || - transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_BT2020_12BIT)) { - static CGColorSpaceRef bt2020 = NULL; - static dispatch_once_t onceToken; - dispatch_once(&onceToken, ^{ - if (@available(macOS 10.11, iOS 9.0, tvOS 9.0, *)) { - bt2020 = CGColorSpaceCreateWithName(kCGColorSpaceITUR_2020); - } else { - bt2020 = defaultColorSpace; - } - }); - *ref = bt2020; - *shouldRelease = FALSE; - return; - } - if(colorPrimaries == AVIF_NCLX_COLOUR_PRIMARIES_BT2020 && - transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_LINEAR) { - static CGColorSpaceRef bt2020linear = NULL; - static dispatch_once_t onceToken; - dispatch_once(&onceToken, ^{ - if (@available(macOS 10.14.3, iOS 12.3, tvOS 12.3, *)) { - bt2020linear = CGColorSpaceCreateWithName(kCGColorSpaceExtendedLinearITUR_2020); - } else { - bt2020linear = defaultColorSpace; - } - }); - *ref = bt2020linear; - *shouldRelease = FALSE; - return; - } - if(colorPrimaries == AVIF_NCLX_COLOUR_PRIMARIES_BT2100 && - transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_BT2100_HLG) { - static CGColorSpaceRef bt2020hlg = NULL; - static dispatch_once_t onceToken; - dispatch_once(&onceToken, ^{ - if (@available(macOS 10.14.6, iOS 13.0, tvOS 13.0, *)) { - bt2020hlg = CGColorSpaceCreateWithName(kCGColorSpaceITUR_2020_HLG); - } else { - bt2020hlg = defaultColorSpace; - } - }); - *ref = bt2020hlg; - *shouldRelease = FALSE; - return; - } - if(colorPrimaries == AVIF_NCLX_COLOUR_PRIMARIES_BT2100 && - transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_BT2100_PQ) { - static CGColorSpaceRef bt2020pq = NULL; - static dispatch_once_t onceToken; - dispatch_once(&onceToken, ^{ - if (@available(macOS 10.14.6, iOS 13.0, tvOS 13.0, *)) { - bt2020pq = CGColorSpaceCreateWithName(kCGColorSpaceITUR_2020_PQ_EOTF); - } else { - bt2020pq = defaultColorSpace; - } - }); - *ref = bt2020pq; - *shouldRelease = FALSE; - return; - } - if(colorPrimaries == AVIF_NCLX_COLOUR_PRIMARIES_P3 && - transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_SRGB) { - static CGColorSpaceRef p3 = NULL; - static dispatch_once_t onceToken; - dispatch_once(&onceToken, ^{ - if (@available(macOS 10.11.2, iOS 9.3, tvOS 9.3, *)) { - p3 = CGColorSpaceCreateWithName(kCGColorSpaceDisplayP3); - } else { - p3 = defaultColorSpace; - } - }); - *ref = p3; - *shouldRelease = FALSE; - return; - } - if(colorPrimaries == AVIF_NCLX_COLOUR_PRIMARIES_P3 && - transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_BT2100_HLG) { - static CGColorSpaceRef p3hlg = NULL; - static dispatch_once_t onceToken; - dispatch_once(&onceToken, ^{ - if (@available(macOS 10.14.6, iOS 13.0, tvOS 13.0, *)) { - p3hlg = CGColorSpaceCreateWithName(kCGColorSpaceDisplayP3_HLG); - } else { - p3hlg = defaultColorSpace; - } - }); - - *ref = p3hlg; - *shouldRelease = FALSE; - return; - } - if(colorPrimaries == AVIF_NCLX_COLOUR_PRIMARIES_P3 && - transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_BT2100_PQ) { - static CGColorSpaceRef p3pq = NULL; - static dispatch_once_t onceToken; - dispatch_once(&onceToken, ^{ - if (@available(macOS 10.14.6, iOS 13.0, tvOS 13.0, *)) { - p3pq = CGColorSpaceCreateWithName(kCGColorSpaceDisplayP3_PQ_EOTF); - } else { - p3pq = defaultColorSpace; - } - }); - *ref = p3pq; - *shouldRelease = FALSE; - return; - } - if(colorPrimaries == AVIF_NCLX_COLOUR_PRIMARIES_P3 && - transferCharacteristics == AVIF_NCLX_TRANSFER_CHARACTERISTICS_LINEAR) { - static CGColorSpaceRef p3linear = NULL; - static dispatch_once_t onceToken; - dispatch_once(&onceToken, ^{ - if (@available(macOS 10.14.3, iOS 12.3, tvOS 12.3, *)) { - p3linear = CGColorSpaceCreateWithName(kCGColorSpaceExtendedLinearDisplayP3); - } else { - p3linear = defaultColorSpace; - } - }); - *ref = p3linear; - *shouldRelease = FALSE; - return; - } - - *ref = CreateColorSpaceRGB(colorPrimaries, transferCharacteristics); - if(*ref != NULL) { - *shouldRelease = TRUE; - } else { - *ref = defaultColorSpace; - *shouldRelease = FALSE; - } -} - -static CGImageRef CreateImageFromBuffer(avifImage * avif, vImage_Buffer* result) { - BOOL monochrome = avif->yuvPlanes[1] == NULL || avif->yuvPlanes[2] == NULL; - BOOL hasAlpha = avif->alphaPlane != NULL; - BOOL usesU16 = avifImageUsesU16(avif); - size_t components = (monochrome ? 1 : 3) + (hasAlpha ? 1 : 0); - - CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, result->data, result->rowBytes * result->height, FreeImageData); - CGBitmapInfo bitmapInfo = usesU16 ? kCGBitmapByteOrder16Host : kCGBitmapByteOrderDefault; - bitmapInfo |= hasAlpha ? kCGImageAlphaFirst : kCGImageAlphaNone; - - // Calc color space - CGColorSpaceRef colorSpace = NULL; - BOOL shouldReleaseColorSpace = FALSE; - if(monochrome){ - CalcColorSpaceMono(avif, &colorSpace, &shouldReleaseColorSpace); - }else{ - CalcColorSpaceRGB(avif, &colorSpace, &shouldReleaseColorSpace); - } - - CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault; - size_t bitsPerComponent = usesU16 ? 16 : 8; - size_t bitsPerPixel = components * bitsPerComponent; - size_t rowBytes = result->width * components * (usesU16 ? sizeof(uint16_t) : sizeof(uint8_t)); - - CGImageRef imageRef = CGImageCreate(result->width, result->height, bitsPerComponent, bitsPerPixel, rowBytes, colorSpace, bitmapInfo, provider, NULL, NO, renderingIntent); - - // clean up - if(shouldReleaseColorSpace) { - CGColorSpaceRelease(colorSpace); - } - CGDataProviderRelease(provider); - - return imageRef; -} - -static void SetupConversionInfo(avifImage * avif, - avifReformatState* state, - vImage_YpCbCrToARGBMatrix* matrix, - vImage_YpCbCrPixelRange* pixelRange) { - avifRGBImage emptyRGBImage = { - .width = avif->width, - .height = avif->height, - .depth = avif->depth, - .format = AVIF_RGB_FORMAT_ARGB, - - .pixels = NULL, - .rowBytes = 0, - }; - avifPrepareReformatState(avif, &emptyRGBImage, state); - - // Setup Matrix - matrix->Yp = 1.0f; - - matrix->Cb_B = 2.0f * (1.0f - state->kb); - matrix->Cb_G = -2.0f * (1.0f - state->kb) * state->kb / state->kg; - - matrix->Cr_R = 2.0f * (1.0f - state->kr); - matrix->Cr_G = -2.0f * (1.0f - state->kr) * state->kr / state->kg; - - // Setup Pixel Range - switch (avif->depth) { - case 8: - if (avif->yuvRange == AVIF_RANGE_LIMITED) { - pixelRange->Yp_bias = 16; - pixelRange->YpRangeMax = 235; - pixelRange->YpMax = 255; - pixelRange->YpMin = 0; - pixelRange->CbCr_bias = 128; - pixelRange->CbCrRangeMax = 240; - pixelRange->CbCrMax = 255; - pixelRange->CbCrMin = 0; - }else{ - pixelRange->Yp_bias = 0; - pixelRange->YpRangeMax = 255; - pixelRange->YpMax = 255; - pixelRange->YpMin = 0; - pixelRange->CbCr_bias = 128; - pixelRange->CbCrRangeMax = 255; - pixelRange->CbCrMax = 255; - pixelRange->CbCrMin = 0; - } - break; - case 10: - if (avif->yuvRange == AVIF_RANGE_LIMITED) { - pixelRange->Yp_bias = 64; - pixelRange->YpRangeMax = 940; - pixelRange->YpMax = 1023; - pixelRange->YpMin = 0; - pixelRange->CbCr_bias = 512; - pixelRange->CbCrRangeMax = 960; - pixelRange->CbCrMax = 1023; - pixelRange->CbCrMin = 0; - }else{ - pixelRange->Yp_bias = 0; - pixelRange->YpRangeMax = 1023; - pixelRange->YpMax = 1023; - pixelRange->YpMin = 0; - pixelRange->CbCr_bias = 512; - pixelRange->CbCrRangeMax = 1023; - pixelRange->CbCrMax = 1023; - pixelRange->CbCrMin = 0; - } - break; - case 12: - if (avif->yuvRange == AVIF_RANGE_LIMITED) { - pixelRange->Yp_bias = 256; - pixelRange->YpRangeMax = 3760; - pixelRange->YpMax = 4095; - pixelRange->YpMin = 0; - pixelRange->CbCr_bias = 2048; - pixelRange->CbCrRangeMax = 3840; - pixelRange->CbCrMax = 4095; - pixelRange->CbCrMin = 0; - }else{ - pixelRange->Yp_bias = 0; - pixelRange->YpRangeMax = 4095; - pixelRange->YpMax = 4095; - pixelRange->YpMin = 0; - pixelRange->CbCr_bias = 2048; - pixelRange->CbCrRangeMax = 4095; - pixelRange->CbCrMax = 4095; - pixelRange->CbCrMin = 0; - } - break; - default: - NSLog(@"Unknown bit depth: %d", avif->depth); - return; - } - -} - - -// Convert 8bit AVIF image into RGB888/ARGB8888/Mono/MonoA using vImage Acceralation Framework. -static CGImageRef CreateImage8(avifImage * avif) { - CGImageRef result = NULL; - uint8_t* resultBufferData = NULL; - uint8_t* argbBufferData = NULL; - uint8_t* dummyCbData = NULL; - uint8_t* dummyCrData = NULL; - uint8_t* scaledAlphaBufferData = NULL; - - vImage_Error err = kvImageNoError; - - // image properties - BOOL const monochrome = avif->yuvPlanes[1] == NULL || avif->yuvPlanes[2] == NULL; - BOOL const hasAlpha = avif->alphaPlane != NULL; - size_t const components = (monochrome ? 1 : 3) + (hasAlpha ? 1 : 0); - size_t const rowBytes = components * sizeof(uint8_t) * avif->width; - - // setup conversion info - avifReformatState state = {0}; - vImage_YpCbCrToARGBMatrix matrix = {0}; - vImage_YpCbCrPixelRange pixelRange = {0}; - SetupConversionInfo(avif, &state, &matrix, &pixelRange); - - vImage_YpCbCrToARGB convInfo = {0}; - - resultBufferData = calloc(components * rowBytes * avif->height, sizeof(uint8_t)); - if(resultBufferData == NULL) { - goto end_all; - } - - BOOL const useTempBuffer = monochrome || !hasAlpha; // if and only if the image is not ARGB - - if(useTempBuffer) { - argbBufferData = calloc(avif->width * avif->height * 4, sizeof(uint8_t)); - if(argbBufferData == NULL) { - goto end_all; - } - } - - vImage_Buffer resultBuffer = { - .data = resultBufferData, - .width = avif->width, - .height = avif->height, - .rowBytes = avif->width * components, - }; - vImage_Buffer argbBuffer = { - .data = useTempBuffer ? argbBufferData : resultBufferData, - .width = avif->width, - .height = avif->height, - .rowBytes = avif->width * 4, - }; - vImage_Buffer origY = { - .data = avif->yuvPlanes[AVIF_CHAN_Y], - .rowBytes = avif->yuvRowBytes[AVIF_CHAN_Y], - .width = avif->width, - .height = avif->height, - }; - - vImage_Buffer origCb = { - .data = avif->yuvPlanes[AVIF_CHAN_U], - .rowBytes = avif->yuvRowBytes[AVIF_CHAN_U], - .width = (avif->width+state.formatInfo.chromaShiftX) >> state.formatInfo.chromaShiftX, - .height = (avif->height+state.formatInfo.chromaShiftY) >> state.formatInfo.chromaShiftY, - }; - - if(origCb.data == NULL) { // allocate dummy data to convert monochrome images. - dummyCbData = calloc(origCb.width, sizeof(uint8_t)); - if(dummyCbData == NULL) { - goto end_all; - } - origCb.data = dummyCbData; - origCb.rowBytes = 0; - memset(origCb.data, pixelRange.CbCr_bias, origCb.width); - } - - vImage_Buffer origCr = { - .data = avif->yuvPlanes[AVIF_CHAN_V], - .rowBytes = avif->yuvRowBytes[AVIF_CHAN_V], - .width = (avif->width+state.formatInfo.chromaShiftX) >> state.formatInfo.chromaShiftX, - .height = (avif->height+state.formatInfo.chromaShiftY) >> state.formatInfo.chromaShiftY, - }; - if(origCr.data == NULL) { // allocate dummy data to convert monochrome images. - dummyCrData = calloc(origCr.width, sizeof(uint8_t)); - if(dummyCrData == NULL) { - goto end_all; - } - origCr.data = dummyCrData; - origCr.rowBytes = 0; - memset(origCr.data, pixelRange.CbCr_bias, origCr.width); - } - - uint8_t const permuteMap[4] = {0, 1, 2, 3}; - switch(avif->yuvFormat) { - case AVIF_PIXEL_FORMAT_NONE: - NSLog(@"Invalid pixel format."); - goto end_all; - case AVIF_PIXEL_FORMAT_YUV420: - case AVIF_PIXEL_FORMAT_YV12: - { - err = - vImageConvert_YpCbCrToARGB_GenerateConversion(&matrix, - &pixelRange, - &convInfo, - kvImage420Yp8_Cb8_Cr8, - kvImageARGB8888, - kvImageNoFlags); - if(err != kvImageNoError) { - NSLog(@"Failed to setup conversion: %ld", err); - goto end_420; - } - - err = vImageConvert_420Yp8_Cb8_Cr8ToARGB8888(&origY, - &origCb, - &origCr, - &argbBuffer, - &convInfo, - permuteMap, - 255, - kvImageNoFlags); - if(err != kvImageNoError) { - NSLog(@"Failed to convert to ARGB8888: %ld", err); - goto end_420; - } - end_420: - // We didn't allocate any heaps. - if(err == kvImageNoError) { - break; - } else { - goto end_all; - } - } - case AVIF_PIXEL_FORMAT_YUV444: - { - uint8_t* yuvBufferData = NULL; - err = - vImageConvert_YpCbCrToARGB_GenerateConversion(&matrix, - &pixelRange, - &convInfo, - kvImage444CrYpCb8, - kvImageARGB8888, - kvImageNoFlags); - if(err != kvImageNoError) { - NSLog(@"Failed to setup conversion: %ld", err); - goto end_444; - } - - yuvBufferData = calloc(avif->width * avif->height * 3, sizeof(uint8_t)); - if(yuvBufferData == NULL) { - err = kvImageMemoryAllocationError; - goto end_444; - } - vImage_Buffer yuvBuffer = { - .data = yuvBufferData, - .width = avif->width, - .height = avif->height, - .rowBytes = avif->width * 3, - }; - err = vImageConvert_Planar8toRGB888(&origCr, &origY, &origCb, &yuvBuffer, kvImageNoFlags); - if(err != kvImageNoError) { - NSLog(@"Failed to composite kvImage444CrYpCb8: %ld", err); - goto end_444; - } - vImageConvert_444CrYpCb8ToARGB8888(&yuvBuffer, - &argbBuffer, - &convInfo, - permuteMap, - 255, - kvImageNoFlags); - if(err != kvImageNoError) { - NSLog(@"Failed to convert to ARGB8888: %ld", err); - goto end_444; - } - end_444: - free(yuvBufferData); - if(err == kvImageNoError) { - break; - } else { - goto end_all; - } - } - case AVIF_PIXEL_FORMAT_YUV422: - { - uint8_t* y1BufferData = NULL; - uint8_t* y2BufferData = NULL; - uint8_t* yuyvBufferData = NULL; - - err = - vImageConvert_YpCbCrToARGB_GenerateConversion(&matrix, - &pixelRange, - &convInfo, - kvImage422YpCbYpCr8, - kvImageARGB8888, - kvImageNoFlags); - if(err != kvImageNoError) { - NSLog(@"Failed to setup conversion: %ld", err); - goto end_422; - } - - const vImagePixelCount alignedWidth = (origY.width+1) & (~1); - y1BufferData = calloc(alignedWidth/2 * origY.height, sizeof(uint8_t)); - y2BufferData = calloc(alignedWidth/2 * origY.height, sizeof(uint8_t)); - yuyvBufferData = calloc(alignedWidth * avif->height * 2, sizeof(uint8_t)); - if(y1BufferData == NULL || y2BufferData == NULL || yuyvBufferData == NULL) { - err = kvImageMemoryAllocationError; - goto end_422; - } - vImage_Buffer y1Buffer = { - .data = y1BufferData, - .width = alignedWidth/2, - .height = origY.height, - .rowBytes = alignedWidth/2 * sizeof(uint8_t), - }; - vImage_Buffer y2Buffer = { - .data = y2BufferData, - .width = alignedWidth/2, - .height = origY.height, - .rowBytes = alignedWidth/2 * sizeof(uint8_t), - }; - vImage_Buffer yuyvBuffer = { - .data = yuyvBufferData, - .width = alignedWidth/2, // It will be fixed later. - .height = avif->height, - .rowBytes = alignedWidth / 2 * 4 * sizeof(uint8_t), - }; - err = vImageConvert_ChunkyToPlanar8((const void*[]){origY.data}, - (const vImage_Buffer*[]){&y1Buffer}, - 1 /* channelCount */, 2 /* src srcStrideBytes */, - alignedWidth/2, origY.height, - origY.rowBytes, kvImageNoFlags); - if(err != kvImageNoError) { - NSLog(@"Failed to separate first Y channel: %ld", err); - goto end_422; - } - y2Buffer.width = origY.width/2; - err = vImageConvert_ChunkyToPlanar8((const void*[]){origY.data + 1}, - (const vImage_Buffer*[]){&y2Buffer}, - 1 /* channelCount */, 2 /* src srcStrideBytes */, - origY.width/2, origY.height, - origY.rowBytes, kvImageNoFlags); - y2Buffer.width = alignedWidth/2; - if(err != kvImageNoError) { - NSLog(@"Failed to separate second Y channel: %ld", err); - goto end_422; - } - err = vImageConvert_Planar8toARGB8888(&y1Buffer, &origCb, &y2Buffer, &origCr, - &yuyvBuffer, kvImageNoFlags); - if(err != kvImageNoError) { - NSLog(@"Failed to composite kvImage422YpCbYpCr8: %ld", err); - goto end_422; - } - yuyvBuffer.width *= 2; - - err = vImageConvert_422YpCbYpCr8ToARGB8888(&yuyvBuffer, - &argbBuffer, - &convInfo, - permuteMap, - 255, - kvImageNoFlags); - if(err != kvImageNoError) { - goto end_422; - } - end_422: - free(y1BufferData); - free(y2BufferData); - free(yuyvBufferData); - if(err == kvImageNoError) { - break; - } else { - goto end_all; - } - } - } - - if(hasAlpha) { // alpha - vImage_Buffer alphaBuffer = {0}; - if(avif->alphaRange == AVIF_RANGE_LIMITED) { - float* floatAlphaBufferData = NULL; - floatAlphaBufferData = calloc(avif->width * avif->height, sizeof(float)); - scaledAlphaBufferData = calloc(avif->width * avif->height, sizeof(uint8_t)); - if(floatAlphaBufferData == NULL || scaledAlphaBufferData == NULL) { - err = kvImageMemoryAllocationError; - goto end_prepare_alpha; - } - vImage_Buffer origAlphaBuffer = { - .data = avif->alphaPlane, - .width = avif->width, - .height = avif->height, - .rowBytes = avif->alphaRowBytes, - }; - vImage_Buffer floatAlphaBuffer = { - .data = floatAlphaBufferData, - .width = avif->width, - .height = avif->height, - .rowBytes = avif->width * sizeof(float), - }; - alphaBuffer.width = avif->width; - alphaBuffer.height = avif->height; - alphaBuffer.data = scaledAlphaBufferData; - alphaBuffer.rowBytes = avif->width * sizeof(uint8_t); - err = vImageConvert_Planar8toPlanarF(&origAlphaBuffer, &floatAlphaBuffer, 255.0f, 0.0f, kvImageNoFlags); - if(err != kvImageNoError) { - NSLog(@"Failed to convert alpha planes from uint8 to float: %ld", err); - goto end_prepare_alpha; - } - err = vImageConvert_PlanarFtoPlanar8(&floatAlphaBuffer, &alphaBuffer, 235.0f, 16.0f, kvImageNoFlags); - if(err != kvImageNoError) { - NSLog(@"Failed to convert alpha planes from float to uint8: %ld", err); - goto end_prepare_alpha; - } - end_prepare_alpha: - free(floatAlphaBufferData); - if(err != kvImageNoError) { - goto end_alpha; - } - } else { - alphaBuffer.width = avif->width; - alphaBuffer.height = avif->height; - alphaBuffer.data = avif->alphaPlane; - alphaBuffer.rowBytes = avif->alphaRowBytes; - } - if(monochrome) { // alpha_mono - uint8_t* tmpBufferData = NULL; - uint8_t* monoBufferData = NULL; - tmpBufferData = calloc(avif->width, sizeof(uint8_t)); - monoBufferData = calloc(avif->width * avif->height, sizeof(uint8_t)); - if(tmpBufferData == NULL || monoBufferData == NULL) { - goto end_alpha_mono; - } - vImage_Buffer tmpBuffer = { - .data = tmpBufferData, - .width = avif->width, - .height = avif->height, - .rowBytes = 0, - }; - vImage_Buffer monoBuffer = { - .data = monoBufferData, - .width = avif->width, - .height = avif->height, - .rowBytes = avif->width, - }; - err = vImageConvert_ARGB8888toPlanar8(&argbBuffer, &tmpBuffer, &tmpBuffer, &monoBuffer, &tmpBuffer, kvImageNoFlags); - if(err != kvImageNoError) { - NSLog(@"Failed to convert ARGB to A_G_: %ld", err); - goto end_alpha_mono; - } - err = vImageConvert_PlanarToChunky8((const vImage_Buffer*[]){&alphaBuffer, &monoBuffer}, - (void*[]){resultBuffer.data, resultBuffer.data + 1}, - 2 /* channelCount */, 2 /* destStrideBytes */, - resultBuffer.width, resultBuffer.height, - resultBuffer.rowBytes, kvImageNoFlags); - if(err != kvImageNoError) { - NSLog(@"Failed to combine mono and alpha: %ld", err); - goto end_alpha_mono; - } - result = CreateImageFromBuffer(avif, &resultBuffer); - resultBufferData = NULL; - end_alpha_mono: - free(tmpBufferData); - free(monoBufferData); - goto end_alpha; - } else { // alpha_color - err = vImageOverwriteChannels_ARGB8888(&alphaBuffer, &argbBuffer, &argbBuffer, 0x8, kvImageNoFlags); - if(err != kvImageNoError) { - NSLog(@"Failed to overwrite alpha: %ld", err); - goto end_alpha_color; - } - result = CreateImageFromBuffer(avif, &argbBuffer); - resultBufferData = NULL; - end_alpha_color: - goto end_alpha; - } - end_alpha: - goto end_all; - } else { // no_alpha - if(monochrome) { // no_alpha_mono - uint8_t* tmpBufferData = NULL; - tmpBufferData = calloc(avif->width, sizeof(uint8_t)); - if(tmpBufferData == NULL){ - goto end_no_alpha_mono; - } - vImage_Buffer tmpBuffer = { - .data = tmpBufferData, - .width = avif->width, - .height = avif->height, - .rowBytes = 0, - }; - err = vImageConvert_ARGB8888toPlanar8(&argbBuffer, &tmpBuffer, &tmpBuffer, &resultBuffer, &tmpBuffer, kvImageNoFlags); - if(err != kvImageNoError) { - NSLog(@"Failed to convert ARGB to B(Mono): %ld", err); - goto end_no_alpha_mono; - } - result = CreateImageFromBuffer(avif, &resultBuffer); - resultBufferData = NULL; - end_no_alpha_mono: - free(tmpBufferData); - goto end_no_alpha; - } else { // no_alpha_color - err = vImageConvert_ARGB8888toRGB888(&argbBuffer, &resultBuffer, kvImageNoFlags); - if(err != kvImageNoError) { - NSLog(@"Failed to convert ARGB to RGB: %ld", err); - goto end_no_alpha_color; - } - result = CreateImageFromBuffer(avif, &resultBuffer); - resultBufferData = NULL; - end_no_alpha_color: - goto end_no_alpha; - } - end_no_alpha: - goto end_all; - } - -end_all: - free(resultBufferData); - free(argbBufferData); - free(dummyCbData); - free(dummyCrData); - free(scaledAlphaBufferData); - return result; -} - -// Convert 10/12bit AVIF image into RGB16U/ARGB16U/Mono16U/MonoA16U -static CGImageRef CreateImage16U(avifImage * avif) { - CGImageRef result = NULL; - uint16_t* resultBufferData = NULL; - uint16_t* argbBufferData = NULL; - uint16_t* ayuvBufferData = NULL; - uint16_t* scaledAlphaBufferData = NULL; - uint16_t* dummyCbData = NULL; - uint16_t* dummyCrData = NULL; - uint16_t* dummyAlphaData = NULL; - - vImage_Error err = kvImageNoError; - - // image properties - BOOL const monochrome = avif->yuvPlanes[1] == NULL || avif->yuvPlanes[2] == NULL; - BOOL const hasAlpha = avif->alphaPlane != NULL; - size_t const components = (monochrome ? 1 : 3) + (hasAlpha ? 1 : 0); - - // setup conversion info - avifReformatState state = {0}; - vImage_YpCbCrToARGBMatrix matrix = {0}; - vImage_YpCbCrPixelRange pixelRange = {0}; - SetupConversionInfo(avif, &state, &matrix, &pixelRange); - - vImage_YpCbCrToARGB convInfo = {0}; - - resultBufferData = calloc(components * avif->width * avif->height, sizeof(uint16_t)); - ayuvBufferData = calloc(avif->width * avif->height * 4, sizeof(uint16_t)); - if(resultBufferData == NULL || ayuvBufferData == NULL) { - goto end_all; - } - - BOOL const useTempBuffer = monochrome || !hasAlpha; // if and only if the image is not ARGB - - if(useTempBuffer) { - argbBufferData = calloc(avif->width * avif->height * 4, sizeof(uint16_t)); - if(argbBufferData == NULL) { - goto end_all; - } - } - - vImage_Buffer resultBuffer = { - .data = resultBufferData, - .width = avif->width, - .height = avif->height, - .rowBytes = avif->width * components * sizeof(uint16_t), - }; - - vImage_Buffer argbBuffer = { - .data = useTempBuffer ? argbBufferData : resultBufferData, - .width = avif->width, - .height = avif->height, - .rowBytes = avif->width * 4 * sizeof(uint16_t), - }; - - vImage_Buffer ayuvBuffer = { - .data = ayuvBufferData, - .width = avif->width, - .height = avif->height, - .rowBytes = avif->width * 4 * sizeof(uint16_t), - }; - - vImage_Buffer origY = { - .data = avif->yuvPlanes[AVIF_CHAN_Y], - .rowBytes = avif->yuvRowBytes[AVIF_CHAN_Y], - .width = avif->width, - .height = avif->height, - }; - - vImage_Buffer origCb = { - .data = avif->yuvPlanes[AVIF_CHAN_U], - .rowBytes = avif->yuvRowBytes[AVIF_CHAN_U], - .width = (avif->width+state.formatInfo.chromaShiftX) >> state.formatInfo.chromaShiftX, - .height = (avif->height+state.formatInfo.chromaShiftY) >> state.formatInfo.chromaShiftY, - }; - - if(!origCb.data) { // allocate dummy data to convert monochrome images. - vImagePixelCount origHeight = origCb.height; - origCb.rowBytes = origCb.width * sizeof(uint16_t); - dummyCbData = calloc(origCb.width, sizeof(uint16_t)); - if(!dummyCbData) { - goto end_all; - } - origCb.data = dummyCbData; - origCb.height = 1; - // fill zero values. - err = vImageOverwriteChannelsWithScalar_Planar16U(pixelRange.CbCr_bias, &origCb, kvImageNoFlags); - if (err != kvImageNoError) { - NSLog(@"Failed to fill dummy Cr buffer: %ld", err); - goto end_all; - } - origCb.rowBytes = 0; - origCb.height = origHeight; - } - - vImage_Buffer origCr = { - .data = avif->yuvPlanes[AVIF_CHAN_V], - .rowBytes = avif->yuvRowBytes[AVIF_CHAN_V], - .width = (avif->width+state.formatInfo.chromaShiftX) >> state.formatInfo.chromaShiftX, - .height = (avif->height+state.formatInfo.chromaShiftY) >> state.formatInfo.chromaShiftY, - }; - - if(!origCr.data) { // allocate dummy data to convert monochrome images. - vImagePixelCount origHeight = origCr.height; - origCr.rowBytes = origCr.width * sizeof(uint16_t); - dummyCrData = calloc(origCr.width, sizeof(uint16_t)); - if(!dummyCrData) { - goto end_all; - } - origCr.data = dummyCrData; - origCr.height = 1; - // fill zero values. - err = vImageOverwriteChannelsWithScalar_Planar16U(pixelRange.CbCr_bias, &origCr, kvImageNoFlags); - if (err != kvImageNoError) { - NSLog(@"Failed to fill dummy Cr buffer: %ld", err); - goto end_all; - } - origCr.rowBytes = 0; - origCr.height = origHeight; - } - - vImage_Buffer origAlpha = {0}; - if(hasAlpha) { - float* floatAlphaBufferData = NULL; - floatAlphaBufferData = calloc(avif->width * avif->height, sizeof(float)); - scaledAlphaBufferData = calloc(avif->width * avif->height, sizeof(uint16_t)); - if(floatAlphaBufferData == NULL || scaledAlphaBufferData == NULL) { - err = kvImageMemoryAllocationError; - goto end_prepare_alpha; - } - origAlpha.data = avif->alphaPlane; - origAlpha.width = avif->width; - origAlpha.height = avif->height; - origAlpha.rowBytes = avif->alphaRowBytes; - - vImage_Buffer floatAlphaBuffer = { - .data = floatAlphaBufferData, - .width = avif->width, - .height = avif->height, - .rowBytes = avif->width * sizeof(float), - }; - vImage_Buffer scaledAlphaBuffer = { - .data = scaledAlphaBufferData, - .width = avif->width, - .height = avif->height, - .rowBytes = avif->width * sizeof(uint16_t), - }; - float offset = 0.0f; - float rangeMax = 0.0f; - if(avif->depth == 10) { - if(avif->alphaRange == AVIF_RANGE_LIMITED) { - offset = 64.0f; - rangeMax = 940.0f; - } else { - offset = 0.0f; - rangeMax = 1023.0f; - } - } else if(avif->depth == 12) { - if(avif->alphaRange == AVIF_RANGE_LIMITED) { - offset = 256.0f; - rangeMax = 3760.0f; - } else { - offset = 0.0f; - rangeMax = 4095.0f; - } - } - float const scale = (float)(rangeMax - offset) / 65535.0f; - err = vImageConvert_16UToF(&origAlpha, &floatAlphaBuffer, 0.0f, 1.0f, kvImageNoFlags); - if(err != kvImageNoError) { - NSLog(@"Failed to convert alpha planes from uint16 to float: %ld", err); - goto end_prepare_alpha; - } - err = vImageConvert_FTo16U(&floatAlphaBuffer, &scaledAlphaBuffer, offset, scale, kvImageNoFlags); - if(err != kvImageNoError) { - NSLog(@"Failed to convert alpha planes from uint16 to float: %ld", err); - goto end_prepare_alpha; - } - origAlpha.data = scaledAlphaBufferData; - origAlpha.rowBytes = avif->width * sizeof(uint16_t); - end_prepare_alpha: - free(floatAlphaBufferData); - if(err != kvImageNoError) { - goto end_all; - } - } else { - // allocate dummy data to convert monochrome images. - origAlpha.rowBytes = avif->width * sizeof(uint16_t); - dummyAlphaData = calloc(avif->width, sizeof(uint16_t)); - if(!dummyAlphaData) { - goto end_all; - } - origAlpha.data = dummyAlphaData; - origAlpha.width = avif->width; - origAlpha.height = 1; - err = vImageOverwriteChannelsWithScalar_Planar16U(0xffff, &origAlpha, kvImageNoFlags); - if (err != kvImageNoError) { - NSLog(@"Failed to fill dummy alpha buffer: %ld", err); - goto end_all; - } - origAlpha.rowBytes = 0; - origAlpha.height = avif->height; - }; - - - uint8_t const permuteMap[4] = {0, 1, 2, 3}; - switch(avif->yuvFormat) { - case AVIF_PIXEL_FORMAT_NONE: - NSLog(@"Invalid pixel format."); - goto end_all; - case AVIF_PIXEL_FORMAT_YUV420: - case AVIF_PIXEL_FORMAT_YUV422: - case AVIF_PIXEL_FORMAT_YV12: - { - uint16_t* scaledCbData = NULL; - uint16_t* scaledCrData = NULL; - void* scaleTempBuff = NULL; - - scaledCbData = calloc(avif->width * avif->height * 4, sizeof(uint16_t)); - scaledCrData = calloc(avif->width * avif->height * 4, sizeof(uint16_t)); - if(scaledCbData == NULL || scaledCrData == NULL) { - err = kvImageMemoryAllocationError; - goto end_420; - } - vImage_Buffer scaledCb = { - .data = scaledCbData, - .width = avif->width, - .height = avif->height, - .rowBytes = avif->width * 4 * sizeof(uint16_t), - }; - vImage_Buffer scaledCr = { - .data = scaledCrData, - .width = avif->width, - .height = avif->height, - .rowBytes = avif->width * 4 * sizeof(uint16_t), - }; - vImage_Error scaleTempBuffSize = vImageScale_Planar16U(&origCb, &scaledCb, NULL, kvImageGetTempBufferSize); - if(scaleTempBuffSize < 0) { - NSLog(@"Failed to get temp buffer size: %ld", scaleTempBuffSize); - goto end_420; - } - scaleTempBuff = malloc(scaleTempBuffSize); - if(scaleTempBuff == NULL) { - err = kvImageMemoryAllocationError; - goto end_420; - } - // upscale Cb - err = vImageScale_Planar16U(&origCb, &scaledCb, scaleTempBuff, kvImageNoFlags); - if(err != kvImageNoError) { - NSLog(@"Failed to scale Cb: %ld", err); - goto end_420; - } - // upscale Cr - err = vImageScale_Planar16U(&origCr, &scaledCr, scaleTempBuff, kvImageNoFlags); - if(err != kvImageNoError) { - NSLog(@"Failed to scale Cb: %ld", err); - goto end_420; - } - err = vImageConvert_Planar16UtoARGB16U(&origAlpha, &origY, &scaledCb, &scaledCr, &ayuvBuffer, kvImageNoFlags); - if(err != kvImageNoError) { - NSLog(@"Failed to composite kvImage444AYpCbCr16: %ld", err); - goto end_420; - } - end_420: - free(scaledCrData); - free(scaledCbData); - free(scaleTempBuff); - if(err == kvImageNoError) { - break; - } else { - goto end_all; - } - } - case AVIF_PIXEL_FORMAT_YUV444: - { - err = vImageConvert_Planar16UtoARGB16U(&origAlpha, &origY, &origCb, &origCr, &ayuvBuffer, kvImageNoFlags); - if(err != kvImageNoError) { - NSLog(@"Failed to composite kvImage444AYpCbCr16: %ld", err); - goto end_444; - } - end_444: - if(err == kvImageNoError) { - break; - } else { - goto end_all; - } - } - } - free(dummyCbData); - dummyCbData = NULL; - free(dummyCrData); - dummyCrData = NULL; - free(dummyAlphaData); - dummyAlphaData = NULL; - free(scaledAlphaBufferData); - scaledAlphaBufferData = NULL; - - err = vImageConvert_YpCbCrToARGB_GenerateConversion(&matrix, - &pixelRange, - &convInfo, - kvImage444AYpCbCr16, - kvImageARGB16U, - kvImageNoFlags); - if(err != kvImageNoError) { - NSLog(@"Failed to setup conversion: %ld", err); - goto end_all; - } - err = vImageConvert_444AYpCbCr16ToARGB16U(&ayuvBuffer, - &argbBuffer, - &convInfo, - permuteMap, - kvImageNoFlags); - if(err != kvImageNoError) { - NSLog(@"Failed to convert to ARGB16U: %ld", err); - goto end_all; - } - - if(hasAlpha) { // alpha - if(monochrome){ // alpha_mono - uint16_t* tmpBufferData = NULL; - uint16_t* alphaBufferData = NULL; - uint16_t* monoBufferData = NULL; - uint8_t* alphaBuffer1Data = NULL; - uint8_t* alphaBuffer2Data = NULL; - uint8_t* monoBuffer1Data = NULL; - uint8_t* monoBuffer2Data = NULL; - - tmpBufferData = calloc(avif->width, sizeof(uint16_t)); - alphaBufferData = calloc(avif->width * avif->height, sizeof(uint16_t)); - monoBufferData = calloc(avif->width * avif->height, sizeof(uint16_t)); - - monoBuffer1Data = calloc(avif->width * avif->height, sizeof(uint8_t)); - monoBuffer2Data = calloc(avif->width * avif->height, sizeof(uint8_t)); - - alphaBuffer1Data = calloc(avif->width * avif->height, sizeof(uint8_t)); - alphaBuffer2Data = calloc(avif->width * avif->height, sizeof(uint8_t)); - - if(tmpBufferData == NULL || - alphaBufferData == NULL || - monoBufferData == NULL || - alphaBuffer1Data == NULL || - alphaBuffer2Data == NULL || - monoBuffer1Data == NULL || - monoBuffer2Data == NULL){ - goto end_alpha_mono; - } - - vImage_Buffer tmpBuffer = { - .data = tmpBufferData, - .width = avif->width, - .height = avif->height, - .rowBytes = 0, - }; - vImage_Buffer alphaBuffer = { - .data = alphaBufferData, - .width = avif->width, - .height = avif->height, - .rowBytes = avif->width * sizeof(uint16_t), - }; - vImage_Buffer monoBuffer = { - .data = monoBufferData, - .width = avif->width, - .height = avif->height, - .rowBytes = avif->width * sizeof(uint16_t), - }; - vImage_Buffer monoBuffer1 = { - .data = monoBuffer1Data, - .width = avif->width, - .height = avif->height, - .rowBytes = avif->width * sizeof(uint8_t), - }; - vImage_Buffer monoBuffer2 = { - .data = monoBuffer2Data, - .width = avif->width, - .height = avif->height, - .rowBytes = avif->width * sizeof(uint8_t), - }; - vImage_Buffer alphaBuffer1 = { - .data = alphaBuffer1Data, - .width = avif->width, - .height = avif->height, - .rowBytes = avif->width * sizeof(uint8_t), - }; - vImage_Buffer alphaBuffer2 = { - .data = alphaBuffer2Data, - .width = avif->width, - .height = avif->height, - .rowBytes = avif->width * sizeof(uint8_t), - }; - - err = vImageConvert_ARGB16UtoPlanar16U(&argbBuffer, &alphaBuffer, &tmpBuffer, &monoBuffer, &tmpBuffer, kvImageNoFlags); - if(err != kvImageNoError) { - NSLog(@"Failed to convert ARGB to Mono: %ld", err); - goto end_alpha_mono; - } - err = vImageConvert_ChunkyToPlanar8((const void*[]){monoBuffer.data, monoBuffer.data + 1}, - (const vImage_Buffer*[]){&monoBuffer1, &monoBuffer2}, - 2 /* channelCount */, 2 /* src srcStrideBytes */, - monoBuffer.width, monoBuffer.height, - monoBuffer.rowBytes, kvImageNoFlags); - if(err != kvImageNoError) { - NSLog(@"Failed to split Mono16: %ld", err); - goto end_alpha_mono; - } - - err = vImageConvert_ChunkyToPlanar8((const void*[]){alphaBuffer.data, alphaBuffer.data + 1}, - (const vImage_Buffer*[]){&alphaBuffer1, &alphaBuffer2}, - 2 /* channelCount */, 2 /* src srcStrideBytes */, - alphaBuffer.width, alphaBuffer.height, - alphaBuffer.rowBytes, kvImageNoFlags); - if(err != kvImageNoError) { - NSLog(@"Failed to split Mono16: %ld", err); - goto end_alpha_mono; - } - - err = vImageConvert_Planar8toARGB8888(&alphaBuffer1, &alphaBuffer2, &monoBuffer1, &monoBuffer2, &resultBuffer, kvImageNoFlags); - if(err != kvImageNoError) { - free(resultBufferData); - NSLog(@"Failed to convert Planar Alpha + Mono to MonoA: %ld", err); - goto end_alpha_mono; - } - result = CreateImageFromBuffer(avif, &resultBuffer); - resultBufferData = NULL; - end_alpha_mono: - free(tmpBufferData); - free(alphaBufferData); - free(monoBufferData); - free(alphaBuffer1Data); - free(alphaBuffer2Data); - free(monoBuffer1Data); - free(monoBuffer2Data); - goto end_alpha; - }else{ // alpha_color - result = CreateImageFromBuffer(avif, &resultBuffer); - resultBufferData = NULL; - end_alpha_color: - goto end_alpha; - } - end_alpha: - goto end_all; - } else { // no_alpha - if(monochrome) { // no_alpha_mono - uint16_t* tmpBufferData = NULL; - tmpBufferData = calloc(avif->width, sizeof(uint16_t)); - if(tmpBufferData == NULL) { - goto end_no_alpha_mono; - } - vImage_Buffer tmpBuffer = { - .data = tmpBufferData, - .width = avif->width, - .height = avif->height, - .rowBytes = 0, - }; - err = vImageConvert_ARGB16UtoPlanar16U(&argbBuffer, &tmpBuffer, &tmpBuffer, &resultBuffer, &tmpBuffer, kvImageNoFlags); - if(err != kvImageNoError) { - NSLog(@"Failed to convert ARGB to Mono: %ld", err); - goto end_no_alpha_mono; - } - result = CreateImageFromBuffer(avif, &resultBuffer); - resultBufferData = NULL; - end_no_alpha_mono: - free(tmpBufferData); - goto end_no_alpha; - } else { // no_alpha_color - err = vImageConvert_ARGB16UtoRGB16U(&argbBuffer, &resultBuffer, kvImageNoFlags); - if(err != kvImageNoError) { - NSLog(@"Failed to convert ARGB to RGB: %ld", err); - goto end_no_alpha_color; - } - result = CreateImageFromBuffer(avif, &resultBuffer); - resultBufferData = NULL; - end_no_alpha_color: - goto end_no_alpha; - } - end_no_alpha: - goto end_all; - } -end_all: - free(resultBufferData); - free(argbBufferData); - free(ayuvBufferData); - free(scaledAlphaBufferData); - free(dummyCbData); - free(dummyCrData); - free(dummyAlphaData); - return result; -} +#import "Conversion.h" @implementation SDImageAVIFCoder @@ -1630,15 +77,7 @@ - (nullable CGImageRef)sd_createAVIFImageWithData:(nonnull NSData *)data CF_RETU avifDecoderDestroy(decoder); return nil; } - avifImage * avif = decoder->image; - - CGImageRef image = NULL; - // convert planar to ARGB/RGB - if(avifImageUsesU16(avif)) { // 10bit or 12bit - image = CreateImage16U(avif); - } else { //8bit - image = CreateImage8(avif); - } + CGImageRef const image = CreateCGImageFromAVIF(decoder->image); avifDecoderDestroy(decoder); return image; } From 5b70bf5a99669c87cc4253a72e25a3ac2cd3b13c Mon Sep 17 00:00:00 2001 From: psi Date: Sun, 15 Mar 2020 14:06:30 +0900 Subject: [PATCH 2/9] Add test that checks all colorspace supports output --- Example/Tests/Tests.m | 21 +++++++++++++++++++++ SDWebImageAVIFCoder/Classes/Conversion.h | 6 ++++++ 2 files changed, 27 insertions(+) diff --git a/Example/Tests/Tests.m b/Example/Tests/Tests.m index 410f291..68b3f41 100644 --- a/Example/Tests/Tests.m +++ b/Example/Tests/Tests.m @@ -8,6 +8,8 @@ @import XCTest; #import +#import +#import static UInt8 kBlack8[] = {0,0,0}; static UInt8 kGray8[] = {0x88,0x88,0x88}; @@ -152,6 +154,25 @@ - (void)testSpecialTest expectedColor16:kSpecial16 expectedNumComponents16:3]; } +-(void)testAllColorSpaceSupportsOutput +{ + static avifNclxColourPrimaries const numPrimaries = AVIF_NCLX_COLOUR_PRIMARIES_EBU3213E; + static avifNclxTransferCharacteristics const numTransfers = AVIF_NCLX_TRANSFER_CHARACTERISTICS_BT2100_HLG; + for(avifNclxColourPrimaries primaries = 0; primaries < numPrimaries; ++primaries) { + for(avifNclxTransferCharacteristics transfer = 0; transfer < numTransfers; ++transfer) { + CGColorSpaceRef space = NULL; + + space = CreateColorSpaceRGB(primaries, transfer); + XCTAssertTrue(CGColorSpaceSupportsOutput(space)); + CGColorSpaceRelease(space); + + space = CreateColorSpaceMono(primaries, transfer); + XCTAssertTrue(CGColorSpaceSupportsOutput(space)); + CGColorSpaceRelease(space); + } + + } +} -(void)assertColor8: (NSString*)filename img:(CGImageRef)img expectedColor:(UInt8*)expectedColor { diff --git a/SDWebImageAVIFCoder/Classes/Conversion.h b/SDWebImageAVIFCoder/Classes/Conversion.h index 1e55d96..c44805b 100644 --- a/SDWebImageAVIFCoder/Classes/Conversion.h +++ b/SDWebImageAVIFCoder/Classes/Conversion.h @@ -6,4 +6,10 @@ // #pragma once +#if __has_include() +#import +#else +#import "avif/avif.h" +#endif + CGImageRef CreateCGImageFromAVIF(avifImage * avif); From 5d0ade97292281897dd6b026b7b27d87f90de344 Mon Sep 17 00:00:00 2001 From: psi Date: Sun, 15 Mar 2020 15:07:25 +0900 Subject: [PATCH 3/9] Add tests about color space and fix a bug about ICC profile. --- Example/Tests/Tests.m | 86 ++++++++++++++++++++++-- SDWebImageAVIFCoder/Classes/ColorSpace.m | 8 ++- 2 files changed, 88 insertions(+), 6 deletions(-) diff --git a/Example/Tests/Tests.m b/Example/Tests/Tests.m index 68b3f41..6783919 100644 --- a/Example/Tests/Tests.m +++ b/Example/Tests/Tests.m @@ -27,6 +27,10 @@ static UInt16 kBlue16[] = {0,0,65535}; static UInt16 kSpecial16[] = {0xe4 << 8,0x7a << 8,0x8c << 8}; +static avifNclxColourPrimaries const kNumPrimaries = AVIF_NCLX_COLOUR_PRIMARIES_EBU3213E; +static avifNclxTransferCharacteristics const kNumTransfers = AVIF_NCLX_TRANSFER_CHARACTERISTICS_BT2100_HLG; + + // FIXME(ledyba-z): libavif does not respect MatrixCoefficients in AV1 Sequence Header. // Instead, it uses ColorPrimaries to calculate MatrixCoefficients. // This threashold can be less if libavif respects MatrixCoefficients... @@ -156,10 +160,8 @@ - (void)testSpecialTest -(void)testAllColorSpaceSupportsOutput { - static avifNclxColourPrimaries const numPrimaries = AVIF_NCLX_COLOUR_PRIMARIES_EBU3213E; - static avifNclxTransferCharacteristics const numTransfers = AVIF_NCLX_TRANSFER_CHARACTERISTICS_BT2100_HLG; - for(avifNclxColourPrimaries primaries = 0; primaries < numPrimaries; ++primaries) { - for(avifNclxTransferCharacteristics transfer = 0; transfer < numTransfers; ++transfer) { + for(avifNclxColourPrimaries primaries = 0; primaries < kNumPrimaries; ++primaries) { + for(avifNclxTransferCharacteristics transfer = 0; transfer < kNumTransfers; ++transfer) { CGColorSpaceRef space = NULL; space = CreateColorSpaceRGB(primaries, transfer); @@ -174,6 +176,82 @@ -(void)testAllColorSpaceSupportsOutput } } +-(void)testCalcNCLXColorSpaceFromAVIFImage +{ + avifImage* img = avifImageCreate(100, 100, 8, AVIF_PIXEL_FORMAT_YUV420); + for(avifNclxColourPrimaries primaries = 0; primaries < kNumPrimaries; ++primaries) { + for(avifNclxTransferCharacteristics transfer = 0; transfer < kNumTransfers; ++transfer) { + avifNclxColorProfile nclx; + nclx.colourPrimaries = primaries; + nclx.transferCharacteristics = transfer; + avifImageSetProfileNCLX(img, &nclx); + avifImageAllocatePlanes(img, AVIF_PLANES_YUV); + + CGColorSpaceRef space = NULL; + BOOL shouldRelease = FALSE; + + CalcColorSpaceRGB(img, &space, &shouldRelease); + XCTAssertTrue(CGColorSpaceSupportsOutput(space)); + if(shouldRelease) { + CGColorSpaceRelease(space); + } + + // monochrome + free(img->yuvPlanes[AVIF_CHAN_U]); + img->yuvPlanes[AVIF_CHAN_U] = NULL; + img->yuvRowBytes[AVIF_CHAN_U] = 0; + free(img->yuvPlanes[AVIF_CHAN_V]); + img->yuvPlanes[AVIF_CHAN_V] = NULL; + img->yuvRowBytes[AVIF_CHAN_V] = 0; + + CalcColorSpaceMono(img, &space, &shouldRelease); + XCTAssertTrue(CGColorSpaceSupportsOutput(space)); + if(shouldRelease) { + CGColorSpaceRelease(space); + } + + avifImageFreePlanes(img, AVIF_PLANES_ALL); + } + } + avifImageDestroy(img); +} + +-(void)testCalcICCColorSpaceFromAVIFImage +{ + NSData *iccProfile = (__bridge_transfer NSData *)CGColorSpaceCopyICCProfile([SDImageCoderHelper colorSpaceGetDeviceRGB]); + avifImage* img = avifImageCreate(100, 100, 8, AVIF_PIXEL_FORMAT_YUV420); + avifImageSetProfileICC(img, (uint8_t *)iccProfile.bytes, iccProfile.length); + + avifImageAllocatePlanes(img, AVIF_PLANES_YUV); + + CGColorSpaceRef space = NULL; + BOOL shouldRelease = FALSE; + + CalcColorSpaceRGB(img, &space, &shouldRelease); + XCTAssertTrue(CGColorSpaceSupportsOutput(space)); + if(shouldRelease) { + CGColorSpaceRelease(space); + } + + // monochrome + free(img->yuvPlanes[AVIF_CHAN_U]); + img->yuvPlanes[AVIF_CHAN_U] = NULL; + img->yuvRowBytes[AVIF_CHAN_U] = 0; + free(img->yuvPlanes[AVIF_CHAN_V]); + img->yuvPlanes[AVIF_CHAN_V] = NULL; + img->yuvRowBytes[AVIF_CHAN_V] = 0; + + CalcColorSpaceMono(img, &space, &shouldRelease); + XCTAssertTrue(CGColorSpaceSupportsOutput(space)); + if(shouldRelease) { + CGColorSpaceRelease(space); + } + + avifImageFreePlanes(img, AVIF_PLANES_ALL); + + avifImageDestroy(img); +} + -(void)assertColor8: (NSString*)filename img:(CGImageRef)img expectedColor:(UInt8*)expectedColor { CFDataRef rawData = CGDataProviderCopyData(CGImageGetDataProvider(img)); diff --git a/SDWebImageAVIFCoder/Classes/ColorSpace.m b/SDWebImageAVIFCoder/Classes/ColorSpace.m index ccf2be4..016aa00 100644 --- a/SDWebImageAVIFCoder/Classes/ColorSpace.m +++ b/SDWebImageAVIFCoder/Classes/ColorSpace.m @@ -219,7 +219,9 @@ void CalcColorSpaceMono(avifImage * avif, CGColorSpaceRef* ref, BOOL* shouldRele if(avif->profileFormat == AVIF_PROFILE_FORMAT_ICC) { if(avif->icc.data && avif->icc.size) { if(@available(macOS 10.12, iOS 10.0, tvOS 10.0, *)) { - *ref = CGColorSpaceCreateWithICCData(avif->icc.data); + CFDataRef iccData = CFDataCreateWithBytesNoCopy(kCFAllocatorDefault, avif->icc.data, avif->icc.size,kCFAllocatorNull); + *ref = CGColorSpaceCreateWithICCData(iccData); + CFRelease(iccData); *shouldRelease = TRUE; }else{ NSData* iccData = [NSData dataWithBytes:avif->icc.data length:avif->icc.size]; @@ -325,7 +327,9 @@ void CalcColorSpaceRGB(avifImage * avif, CGColorSpaceRef* ref, BOOL* shouldRelea if(avif->profileFormat == AVIF_PROFILE_FORMAT_ICC) { if(avif->icc.data && avif->icc.size) { if(@available(macOS 10.12, iOS 10.0, tvOS 10.0, *)) { - *ref = CGColorSpaceCreateWithICCData(avif->icc.data); + CFDataRef iccData = CFDataCreateWithBytesNoCopy(kCFAllocatorDefault, avif->icc.data, avif->icc.size,kCFAllocatorNull); + *ref = CGColorSpaceCreateWithICCData(iccData); + CFRelease(iccData); *shouldRelease = TRUE; }else{ NSData* iccData = [NSData dataWithBytes:avif->icc.data length:avif->icc.size]; From c2d89d6c7b26c713a164113a72686454f40525ec Mon Sep 17 00:00:00 2001 From: psi Date: Sun, 15 Mar 2020 15:41:55 +0900 Subject: [PATCH 4/9] Add encoding test --- Example/Tests/Tests.m | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/Example/Tests/Tests.m b/Example/Tests/Tests.m index 6783919..9f181ae 100644 --- a/Example/Tests/Tests.m +++ b/Example/Tests/Tests.m @@ -252,6 +252,23 @@ -(void)testCalcICCColorSpaceFromAVIFImage avifImageDestroy(img); } +-(void)testEncodingAndDecoding +{ + CGSize size = CGSizeMake(100, 100); + UIGraphicsBeginImageContextWithOptions(size, YES, 0); + [[UIColor redColor] setFill]; + UIRectFill(CGRectMake(0, 0, size.width, size.height)); + UIImage *image = UIGraphicsGetImageFromCurrentImageContext(); + UIGraphicsEndImageContext(); + NSData* encoded = [self->coder encodedDataWithImage:image format:SDImageFormatAVIF options:nil]; + image = nil; + + XCTAssertTrue([self->coder canDecodeFromData:encoded]); + + image = [self->coder decodedImageWithData:encoded options:nil]; + [self assertColor8:@"" img:image.CGImage expectedColor: kRed8]; +} + -(void)assertColor8: (NSString*)filename img:(CGImageRef)img expectedColor:(UInt8*)expectedColor { CFDataRef rawData = CGDataProviderCopyData(CGImageGetDataProvider(img)); From 03a4b9ba1c222a715e6ae7e462e6e1d55349245f Mon Sep 17 00:00:00 2001 From: psi Date: Sun, 15 Mar 2020 21:23:24 +0900 Subject: [PATCH 5/9] Make headers private or public in SwiftPM and Cocoapods --- Package.swift | 2 +- SDWebImageAVIFCoder.podspec | 2 ++ SDWebImageAVIFCoder/Classes/{ => Private}/ColorSpace.h | 0 SDWebImageAVIFCoder/Classes/{ => Private}/Conversion.h | 0 SDWebImageAVIFCoder/Classes/{ => Public}/SDImageAVIFCoder.h | 0 5 files changed, 3 insertions(+), 1 deletion(-) rename SDWebImageAVIFCoder/Classes/{ => Private}/ColorSpace.h (100%) rename SDWebImageAVIFCoder/Classes/{ => Private}/Conversion.h (100%) rename SDWebImageAVIFCoder/Classes/{ => Public}/SDImageAVIFCoder.h (100%) diff --git a/Package.swift b/Package.swift index c4f7b44..9200a06 100644 --- a/Package.swift +++ b/Package.swift @@ -28,7 +28,7 @@ let package = Package( dependencies: ["SDWebImage", "libavif"], path: ".", sources: ["SDWebImageAVIFCoder/Classes"], - publicHeadersPath: "SDWebImageAVIFCoder/Classes" + publicHeadersPath: "SDWebImageAVIFCoder/Classes/Public" ) ] ) diff --git a/SDWebImageAVIFCoder.podspec b/SDWebImageAVIFCoder.podspec index 2ef839d..5cac27f 100644 --- a/SDWebImageAVIFCoder.podspec +++ b/SDWebImageAVIFCoder.podspec @@ -34,6 +34,8 @@ Which is built based on the open-sourced libavif codec. s.watchos.deployment_target = '2.0' s.source_files = 'SDWebImageAVIFCoder/Classes/**/*', 'SDWebImageAVIFCoder/Module/SDWebImageAVIFCoder.h' + s.public_header_files = 'SDWebImageAVIFCoder/Classes/Public/*.{h,m}' + s.private_header_files = 'SDWebImageAVIFCoder/Classes/Private/*.{h,m}' s.dependency 'SDWebImage', '~> 5.0' s.dependency 'libavif', '~> 0.6' diff --git a/SDWebImageAVIFCoder/Classes/ColorSpace.h b/SDWebImageAVIFCoder/Classes/Private/ColorSpace.h similarity index 100% rename from SDWebImageAVIFCoder/Classes/ColorSpace.h rename to SDWebImageAVIFCoder/Classes/Private/ColorSpace.h diff --git a/SDWebImageAVIFCoder/Classes/Conversion.h b/SDWebImageAVIFCoder/Classes/Private/Conversion.h similarity index 100% rename from SDWebImageAVIFCoder/Classes/Conversion.h rename to SDWebImageAVIFCoder/Classes/Private/Conversion.h diff --git a/SDWebImageAVIFCoder/Classes/SDImageAVIFCoder.h b/SDWebImageAVIFCoder/Classes/Public/SDImageAVIFCoder.h similarity index 100% rename from SDWebImageAVIFCoder/Classes/SDImageAVIFCoder.h rename to SDWebImageAVIFCoder/Classes/Public/SDImageAVIFCoder.h From 3d7d5f759b5d2f50cd24984e1cba37e2e85a68d3 Mon Sep 17 00:00:00 2001 From: psi Date: Sun, 15 Mar 2020 21:44:27 +0900 Subject: [PATCH 6/9] Make headers private or public in Carthage --- SDWebImageAVIFCoder.xcodeproj/project.pbxproj | 76 ++++++++++++++++--- 1 file changed, 66 insertions(+), 10 deletions(-) diff --git a/SDWebImageAVIFCoder.xcodeproj/project.pbxproj b/SDWebImageAVIFCoder.xcodeproj/project.pbxproj index 76be692..f0c8bc8 100644 --- a/SDWebImageAVIFCoder.xcodeproj/project.pbxproj +++ b/SDWebImageAVIFCoder.xcodeproj/project.pbxproj @@ -12,17 +12,33 @@ 3237D42422639480001D069D /* SDWebImageAVIFCoder.h in Headers */ = {isa = PBXBuildFile; fileRef = 3237D42122639480001D069D /* SDWebImageAVIFCoder.h */; settings = {ATTRIBUTES = (Public, ); }; }; 3237D42522639480001D069D /* SDWebImageAVIFCoder.h in Headers */ = {isa = PBXBuildFile; fileRef = 3237D42122639480001D069D /* SDWebImageAVIFCoder.h */; settings = {ATTRIBUTES = (Public, ); }; }; 3237D42622639480001D069D /* SDWebImageAVIFCoder.h in Headers */ = {isa = PBXBuildFile; fileRef = 3237D42122639480001D069D /* SDWebImageAVIFCoder.h */; settings = {ATTRIBUTES = (Public, ); }; }; - 3237D427226394D1001D069D /* SDImageAVIFCoder.h in Headers */ = {isa = PBXBuildFile; fileRef = 32C2B19C22638A7100EA889C /* SDImageAVIFCoder.h */; settings = {ATTRIBUTES = (Public, ); }; }; 3237D428226394D1001D069D /* SDImageAVIFCoder.m in Sources */ = {isa = PBXBuildFile; fileRef = 32C2B19A22638A7100EA889C /* SDImageAVIFCoder.m */; }; - 3237D429226394D1001D069D /* SDImageAVIFCoder.h in Headers */ = {isa = PBXBuildFile; fileRef = 32C2B19C22638A7100EA889C /* SDImageAVIFCoder.h */; settings = {ATTRIBUTES = (Public, ); }; }; 3237D42A226394D1001D069D /* SDImageAVIFCoder.m in Sources */ = {isa = PBXBuildFile; fileRef = 32C2B19A22638A7100EA889C /* SDImageAVIFCoder.m */; }; - 3237D42B226394D2001D069D /* SDImageAVIFCoder.h in Headers */ = {isa = PBXBuildFile; fileRef = 32C2B19C22638A7100EA889C /* SDImageAVIFCoder.h */; settings = {ATTRIBUTES = (Public, ); }; }; 3237D42C226394D2001D069D /* SDImageAVIFCoder.m in Sources */ = {isa = PBXBuildFile; fileRef = 32C2B19A22638A7100EA889C /* SDImageAVIFCoder.m */; }; 3237D42E22639517001D069D /* libavif.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 3237D42D22639516001D069D /* libavif.framework */; }; 3237D43022639525001D069D /* libavif.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 3237D42F22639525001D069D /* libavif.framework */; }; 3237D43222639530001D069D /* libavif.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 3237D43122639530001D069D /* libavif.framework */; }; 32C2B19D22638A7100EA889C /* SDImageAVIFCoder.m in Sources */ = {isa = PBXBuildFile; fileRef = 32C2B19A22638A7100EA889C /* SDImageAVIFCoder.m */; }; - 32C2B19F22638A7100EA889C /* SDImageAVIFCoder.h in Headers */ = {isa = PBXBuildFile; fileRef = 32C2B19C22638A7100EA889C /* SDImageAVIFCoder.h */; settings = {ATTRIBUTES = (Public, ); }; }; + 6DD0A1BB241E594E0066E0EE /* Conversion.h in Headers */ = {isa = PBXBuildFile; fileRef = 6DD0A1B5241E594E0066E0EE /* Conversion.h */; settings = {ATTRIBUTES = (Private, ); }; }; + 6DD0A1BC241E594E0066E0EE /* Conversion.h in Headers */ = {isa = PBXBuildFile; fileRef = 6DD0A1B5241E594E0066E0EE /* Conversion.h */; settings = {ATTRIBUTES = (Private, ); }; }; + 6DD0A1BD241E594E0066E0EE /* Conversion.h in Headers */ = {isa = PBXBuildFile; fileRef = 6DD0A1B5241E594E0066E0EE /* Conversion.h */; settings = {ATTRIBUTES = (Private, ); }; }; + 6DD0A1BE241E594E0066E0EE /* Conversion.h in Headers */ = {isa = PBXBuildFile; fileRef = 6DD0A1B5241E594E0066E0EE /* Conversion.h */; settings = {ATTRIBUTES = (Private, ); }; }; + 6DD0A1BF241E594E0066E0EE /* ColorSpace.h in Headers */ = {isa = PBXBuildFile; fileRef = 6DD0A1B6241E594E0066E0EE /* ColorSpace.h */; settings = {ATTRIBUTES = (Private, ); }; }; + 6DD0A1C0241E594E0066E0EE /* ColorSpace.h in Headers */ = {isa = PBXBuildFile; fileRef = 6DD0A1B6241E594E0066E0EE /* ColorSpace.h */; settings = {ATTRIBUTES = (Private, ); }; }; + 6DD0A1C1241E594E0066E0EE /* ColorSpace.h in Headers */ = {isa = PBXBuildFile; fileRef = 6DD0A1B6241E594E0066E0EE /* ColorSpace.h */; settings = {ATTRIBUTES = (Private, ); }; }; + 6DD0A1C2241E594E0066E0EE /* ColorSpace.h in Headers */ = {isa = PBXBuildFile; fileRef = 6DD0A1B6241E594E0066E0EE /* ColorSpace.h */; settings = {ATTRIBUTES = (Private, ); }; }; + 6DD0A1C3241E594E0066E0EE /* ColorSpace.m in Sources */ = {isa = PBXBuildFile; fileRef = 6DD0A1B7241E594E0066E0EE /* ColorSpace.m */; }; + 6DD0A1C4241E594E0066E0EE /* ColorSpace.m in Sources */ = {isa = PBXBuildFile; fileRef = 6DD0A1B7241E594E0066E0EE /* ColorSpace.m */; }; + 6DD0A1C5241E594E0066E0EE /* ColorSpace.m in Sources */ = {isa = PBXBuildFile; fileRef = 6DD0A1B7241E594E0066E0EE /* ColorSpace.m */; }; + 6DD0A1C6241E594E0066E0EE /* ColorSpace.m in Sources */ = {isa = PBXBuildFile; fileRef = 6DD0A1B7241E594E0066E0EE /* ColorSpace.m */; }; + 6DD0A1C7241E594E0066E0EE /* SDImageAVIFCoder.h in Headers */ = {isa = PBXBuildFile; fileRef = 6DD0A1B9241E594E0066E0EE /* SDImageAVIFCoder.h */; settings = {ATTRIBUTES = (Public, ); }; }; + 6DD0A1C8241E594E0066E0EE /* SDImageAVIFCoder.h in Headers */ = {isa = PBXBuildFile; fileRef = 6DD0A1B9241E594E0066E0EE /* SDImageAVIFCoder.h */; settings = {ATTRIBUTES = (Public, ); }; }; + 6DD0A1C9241E594E0066E0EE /* SDImageAVIFCoder.h in Headers */ = {isa = PBXBuildFile; fileRef = 6DD0A1B9241E594E0066E0EE /* SDImageAVIFCoder.h */; settings = {ATTRIBUTES = (Public, ); }; }; + 6DD0A1CA241E594E0066E0EE /* SDImageAVIFCoder.h in Headers */ = {isa = PBXBuildFile; fileRef = 6DD0A1B9241E594E0066E0EE /* SDImageAVIFCoder.h */; settings = {ATTRIBUTES = (Public, ); }; }; + 6DD0A1CB241E594E0066E0EE /* Conversion.m in Sources */ = {isa = PBXBuildFile; fileRef = 6DD0A1BA241E594E0066E0EE /* Conversion.m */; }; + 6DD0A1CC241E594E0066E0EE /* Conversion.m in Sources */ = {isa = PBXBuildFile; fileRef = 6DD0A1BA241E594E0066E0EE /* Conversion.m */; }; + 6DD0A1CD241E594E0066E0EE /* Conversion.m in Sources */ = {isa = PBXBuildFile; fileRef = 6DD0A1BA241E594E0066E0EE /* Conversion.m */; }; + 6DD0A1CE241E594E0066E0EE /* Conversion.m in Sources */ = {isa = PBXBuildFile; fileRef = 6DD0A1BA241E594E0066E0EE /* Conversion.m */; }; /* End PBXBuildFile section */ /* Begin PBXFileReference section */ @@ -38,7 +54,11 @@ 3237D43322639BA5001D069D /* Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = Info.plist; path = Module/Info.plist; sourceTree = ""; }; 32C2B18E2263879300EA889C /* SDWebImageAVIFCoder.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = SDWebImageAVIFCoder.framework; sourceTree = BUILT_PRODUCTS_DIR; }; 32C2B19A22638A7100EA889C /* SDImageAVIFCoder.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = SDImageAVIFCoder.m; sourceTree = ""; }; - 32C2B19C22638A7100EA889C /* SDImageAVIFCoder.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = SDImageAVIFCoder.h; sourceTree = ""; }; + 6DD0A1B5241E594E0066E0EE /* Conversion.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = Conversion.h; sourceTree = ""; }; + 6DD0A1B6241E594E0066E0EE /* ColorSpace.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ColorSpace.h; sourceTree = ""; }; + 6DD0A1B7241E594E0066E0EE /* ColorSpace.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ColorSpace.m; sourceTree = ""; }; + 6DD0A1B9241E594E0066E0EE /* SDImageAVIFCoder.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = SDImageAVIFCoder.h; sourceTree = ""; }; + 6DD0A1BA241E594E0066E0EE /* Conversion.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = Conversion.m; sourceTree = ""; }; /* End PBXFileReference section */ /* Begin PBXFrameworksBuildPhase section */ @@ -122,12 +142,32 @@ 32C2B19922638A7100EA889C /* Classes */ = { isa = PBXGroup; children = ( - 32C2B19C22638A7100EA889C /* SDImageAVIFCoder.h */, + 6DD0A1CF241E5A0E0066E0EE /* Public */, + 6DD0A1B4241E594E0066E0EE /* Private */, + 6DD0A1B7241E594E0066E0EE /* ColorSpace.m */, + 6DD0A1BA241E594E0066E0EE /* Conversion.m */, 32C2B19A22638A7100EA889C /* SDImageAVIFCoder.m */, ); path = Classes; sourceTree = ""; }; + 6DD0A1B4241E594E0066E0EE /* Private */ = { + isa = PBXGroup; + children = ( + 6DD0A1B5241E594E0066E0EE /* Conversion.h */, + 6DD0A1B6241E594E0066E0EE /* ColorSpace.h */, + ); + path = Private; + sourceTree = ""; + }; + 6DD0A1CF241E5A0E0066E0EE /* Public */ = { + isa = PBXGroup; + children = ( + 6DD0A1B9241E594E0066E0EE /* SDImageAVIFCoder.h */, + ); + path = Public; + sourceTree = ""; + }; /* End PBXGroup section */ /* Begin PBXHeadersBuildPhase section */ @@ -136,7 +176,9 @@ buildActionMask = 2147483647; files = ( 3237D42422639480001D069D /* SDWebImageAVIFCoder.h in Headers */, - 3237D427226394D1001D069D /* SDImageAVIFCoder.h in Headers */, + 6DD0A1C0241E594E0066E0EE /* ColorSpace.h in Headers */, + 6DD0A1C8241E594E0066E0EE /* SDImageAVIFCoder.h in Headers */, + 6DD0A1BC241E594E0066E0EE /* Conversion.h in Headers */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -145,7 +187,9 @@ buildActionMask = 2147483647; files = ( 3237D42522639480001D069D /* SDWebImageAVIFCoder.h in Headers */, - 3237D429226394D1001D069D /* SDImageAVIFCoder.h in Headers */, + 6DD0A1C1241E594E0066E0EE /* ColorSpace.h in Headers */, + 6DD0A1C9241E594E0066E0EE /* SDImageAVIFCoder.h in Headers */, + 6DD0A1BD241E594E0066E0EE /* Conversion.h in Headers */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -154,7 +198,9 @@ buildActionMask = 2147483647; files = ( 3237D42622639480001D069D /* SDWebImageAVIFCoder.h in Headers */, - 3237D42B226394D2001D069D /* SDImageAVIFCoder.h in Headers */, + 6DD0A1C2241E594E0066E0EE /* ColorSpace.h in Headers */, + 6DD0A1CA241E594E0066E0EE /* SDImageAVIFCoder.h in Headers */, + 6DD0A1BE241E594E0066E0EE /* Conversion.h in Headers */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -163,7 +209,9 @@ buildActionMask = 2147483647; files = ( 3237D42322639480001D069D /* SDWebImageAVIFCoder.h in Headers */, - 32C2B19F22638A7100EA889C /* SDImageAVIFCoder.h in Headers */, + 6DD0A1BF241E594E0066E0EE /* ColorSpace.h in Headers */, + 6DD0A1C7241E594E0066E0EE /* SDImageAVIFCoder.h in Headers */, + 6DD0A1BB241E594E0066E0EE /* Conversion.h in Headers */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -321,6 +369,8 @@ isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( + 6DD0A1CC241E594E0066E0EE /* Conversion.m in Sources */, + 6DD0A1C4241E594E0066E0EE /* ColorSpace.m in Sources */, 3237D428226394D1001D069D /* SDImageAVIFCoder.m in Sources */, ); runOnlyForDeploymentPostprocessing = 0; @@ -329,6 +379,8 @@ isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( + 6DD0A1CD241E594E0066E0EE /* Conversion.m in Sources */, + 6DD0A1C5241E594E0066E0EE /* ColorSpace.m in Sources */, 3237D42A226394D1001D069D /* SDImageAVIFCoder.m in Sources */, ); runOnlyForDeploymentPostprocessing = 0; @@ -337,6 +389,8 @@ isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( + 6DD0A1CE241E594E0066E0EE /* Conversion.m in Sources */, + 6DD0A1C6241E594E0066E0EE /* ColorSpace.m in Sources */, 3237D42C226394D2001D069D /* SDImageAVIFCoder.m in Sources */, ); runOnlyForDeploymentPostprocessing = 0; @@ -345,6 +399,8 @@ isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( + 6DD0A1CB241E594E0066E0EE /* Conversion.m in Sources */, + 6DD0A1C3241E594E0066E0EE /* ColorSpace.m in Sources */, 32C2B19D22638A7100EA889C /* SDImageAVIFCoder.m in Sources */, ); runOnlyForDeploymentPostprocessing = 0; From 882f2c8b3fd33c5b6d1b7d56401b4ddc5a8229d5 Mon Sep 17 00:00:00 2001 From: psi Date: Sun, 15 Mar 2020 21:45:47 +0900 Subject: [PATCH 7/9] upgrade Package.resolved --- Package.resolved | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Package.resolved b/Package.resolved index f54d12f..64cb719 100644 --- a/Package.resolved +++ b/Package.resolved @@ -15,8 +15,8 @@ "repositoryURL": "https://github.com/SDWebImage/libavif-Xcode.git", "state": { "branch": null, - "revision": "d41b384e5465f8e705af3613b671dbe4750007e6", - "version": "0.5.3" + "revision": "9eac0474b8a1f69332865c3a85c101e953cc3a59", + "version": "0.6.0" } }, { @@ -24,8 +24,8 @@ "repositoryURL": "https://github.com/SDWebImage/SDWebImage.git", "state": { "branch": null, - "revision": "443bf50b58c0161efe7a6da841524dde1815792d", - "version": "5.5.1" + "revision": "2dcf1b65994d92ef8806f52dc41aa6fdd677489c", + "version": "5.6.1" } } ] From dba7a0d89868a3bf7f7baa03b8bc01c5c758841b Mon Sep 17 00:00:00 2001 From: psi Date: Tue, 17 Mar 2020 17:45:13 +0900 Subject: [PATCH 8/9] Rename functions and make them visible to tests. --- .../project.pbxproj | 3 +++ Example/Tests/Tests.m | 12 +++++----- SDWebImageAVIFCoder/Classes/ColorSpace.m | 22 +++++++++---------- SDWebImageAVIFCoder/Classes/Conversion.m | 6 ++--- .../Classes/Private/ColorSpace.h | 10 ++++----- .../Classes/Private/Conversion.h | 4 ++-- .../Classes/SDImageAVIFCoder.m | 2 +- .../Module/SDWebImageAVIFCoder.h | 1 - 8 files changed, 31 insertions(+), 29 deletions(-) diff --git a/Example/SDWebImageAVIFCoder.xcodeproj/project.pbxproj b/Example/SDWebImageAVIFCoder.xcodeproj/project.pbxproj index 872c16f..cfdfa4c 100644 --- a/Example/SDWebImageAVIFCoder.xcodeproj/project.pbxproj +++ b/Example/SDWebImageAVIFCoder.xcodeproj/project.pbxproj @@ -8052,6 +8052,7 @@ INFOPLIST_FILE = "Tests/Tests-Info.plist"; PRODUCT_BUNDLE_IDENTIFIER = "org.cocoapods.demo.${PRODUCT_NAME:rfc1034identifier}"; PRODUCT_NAME = "$(TARGET_NAME)"; + HEADER_SEARCH_PATHS = "${PODS_ROOT}/Headers/Private"; TEST_HOST = "$(BUILT_PRODUCTS_DIR)/SDWebImageAVIFCoder_Example.app/SDWebImageAVIFCoder_Example"; WRAPPER_EXTENSION = xctest; }; @@ -8072,7 +8073,9 @@ INFOPLIST_FILE = "Tests/Tests-Info.plist"; PRODUCT_BUNDLE_IDENTIFIER = "org.cocoapods.demo.${PRODUCT_NAME:rfc1034identifier}"; PRODUCT_NAME = "$(TARGET_NAME)"; + SYSTEM_HEADER_SEARCH_PATHS = "${PODS_ROOT}/Headers/Private"; TEST_HOST = "$(BUILT_PRODUCTS_DIR)/SDWebImageAVIFCoder_Example.app/SDWebImageAVIFCoder_Example"; + USER_HEADER_SEARCH_PATHS = ""; WRAPPER_EXTENSION = xctest; }; name = Release; diff --git a/Example/Tests/Tests.m b/Example/Tests/Tests.m index 9f181ae..5650fc1 100644 --- a/Example/Tests/Tests.m +++ b/Example/Tests/Tests.m @@ -164,11 +164,11 @@ -(void)testAllColorSpaceSupportsOutput for(avifNclxTransferCharacteristics transfer = 0; transfer < kNumTransfers; ++transfer) { CGColorSpaceRef space = NULL; - space = CreateColorSpaceRGB(primaries, transfer); + space = SDCreateAVIFCreateColorSpaceRGB(primaries, transfer); XCTAssertTrue(CGColorSpaceSupportsOutput(space)); CGColorSpaceRelease(space); - space = CreateColorSpaceMono(primaries, transfer); + space = SDCreateAVIFCreateColorSpaceMono(primaries, transfer); XCTAssertTrue(CGColorSpaceSupportsOutput(space)); CGColorSpaceRelease(space); } @@ -190,7 +190,7 @@ -(void)testCalcNCLXColorSpaceFromAVIFImage CGColorSpaceRef space = NULL; BOOL shouldRelease = FALSE; - CalcColorSpaceRGB(img, &space, &shouldRelease); + SDCalcColorSpaceRGBAVIF(img, &space, &shouldRelease); XCTAssertTrue(CGColorSpaceSupportsOutput(space)); if(shouldRelease) { CGColorSpaceRelease(space); @@ -204,7 +204,7 @@ -(void)testCalcNCLXColorSpaceFromAVIFImage img->yuvPlanes[AVIF_CHAN_V] = NULL; img->yuvRowBytes[AVIF_CHAN_V] = 0; - CalcColorSpaceMono(img, &space, &shouldRelease); + SDCalcColorSpaceMonoAVIF(img, &space, &shouldRelease); XCTAssertTrue(CGColorSpaceSupportsOutput(space)); if(shouldRelease) { CGColorSpaceRelease(space); @@ -227,7 +227,7 @@ -(void)testCalcICCColorSpaceFromAVIFImage CGColorSpaceRef space = NULL; BOOL shouldRelease = FALSE; - CalcColorSpaceRGB(img, &space, &shouldRelease); + SDCalcColorSpaceRGBAVIF(img, &space, &shouldRelease); XCTAssertTrue(CGColorSpaceSupportsOutput(space)); if(shouldRelease) { CGColorSpaceRelease(space); @@ -241,7 +241,7 @@ -(void)testCalcICCColorSpaceFromAVIFImage img->yuvPlanes[AVIF_CHAN_V] = NULL; img->yuvRowBytes[AVIF_CHAN_V] = 0; - CalcColorSpaceMono(img, &space, &shouldRelease); + SDCalcColorSpaceMonoAVIF(img, &space, &shouldRelease); XCTAssertTrue(CGColorSpaceSupportsOutput(space)); if(shouldRelease) { CGColorSpaceRelease(space); diff --git a/SDWebImageAVIFCoder/Classes/ColorSpace.m b/SDWebImageAVIFCoder/Classes/ColorSpace.m index 016aa00..2072c02 100644 --- a/SDWebImageAVIFCoder/Classes/ColorSpace.m +++ b/SDWebImageAVIFCoder/Classes/ColorSpace.m @@ -2,7 +2,7 @@ // ColorSpace.m // SDWebImageAVIFCoder // -// Created by psi on 2020/03/15. +// Created by Ryo Hirafuji on 2020/03/15. // #import "SDImageAVIFCoder.h" @@ -161,7 +161,7 @@ static void CalcTransferFunction(uint16_t const transferCharacteristics, vImageT break; } } -CGColorSpaceRef CreateColorSpaceMono(avifNclxColourPrimaries const colorPrimaries, avifNclxTransferCharacteristics const transferCharacteristics) { +CGColorSpaceRef SDCreateAVIFCreateColorSpaceMono(avifNclxColourPrimaries const colorPrimaries, avifNclxTransferCharacteristics const transferCharacteristics) { if (@available(macOS 10.10, iOS 8.0, tvOS 8.0, *)) { vImage_Error err; vImageWhitePoint white; @@ -182,7 +182,7 @@ CGColorSpaceRef CreateColorSpaceMono(avifNclxColourPrimaries const colorPrimarie } } -CGColorSpaceRef CreateColorSpaceRGB(avifNclxColourPrimaries const colorPrimaries, avifNclxTransferCharacteristics const transferCharacteristics) { +CGColorSpaceRef SDCreateAVIFCreateColorSpaceRGB(avifNclxColourPrimaries const colorPrimaries, avifNclxTransferCharacteristics const transferCharacteristics) { if (@available(macOS 10.10, iOS 8.0, tvOS 8.0, *)) { vImage_Error err; vImageRGBPrimaries primaries; @@ -203,7 +203,7 @@ CGColorSpaceRef CreateColorSpaceRGB(avifNclxColourPrimaries const colorPrimaries } } -void CalcColorSpaceMono(avifImage * avif, CGColorSpaceRef* ref, BOOL* shouldRelease) { +void SDCalcColorSpaceMonoAVIF(avifImage * avif, CGColorSpaceRef* ref, BOOL* shouldRelease) { static CGColorSpaceRef defaultColorSpace; { static dispatch_once_t onceToken; @@ -249,7 +249,7 @@ void CalcColorSpaceMono(avifImage * avif, CGColorSpaceRef* ref, BOOL* shouldRele static CGColorSpaceRef sRGB = NULL; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ - sRGB = CreateColorSpaceMono(colorPrimaries, transferCharacteristics); + sRGB = SDCreateAVIFCreateColorSpaceMono(colorPrimaries, transferCharacteristics); if(sRGB == NULL) { sRGB = defaultColorSpace; } @@ -263,7 +263,7 @@ void CalcColorSpaceMono(avifImage * avif, CGColorSpaceRef* ref, BOOL* shouldRele static CGColorSpaceRef bt709 = NULL; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ - bt709 = CreateColorSpaceMono(colorPrimaries, transferCharacteristics); + bt709 = SDCreateAVIFCreateColorSpaceMono(colorPrimaries, transferCharacteristics); if(bt709 == NULL) { bt709 = defaultColorSpace; } @@ -278,7 +278,7 @@ void CalcColorSpaceMono(avifImage * avif, CGColorSpaceRef* ref, BOOL* shouldRele static CGColorSpaceRef bt2020 = NULL; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ - bt2020 = CreateColorSpaceMono(colorPrimaries, transferCharacteristics); + bt2020 = SDCreateAVIFCreateColorSpaceMono(colorPrimaries, transferCharacteristics); if(bt2020 == NULL) { bt2020 = defaultColorSpace; } @@ -292,7 +292,7 @@ void CalcColorSpaceMono(avifImage * avif, CGColorSpaceRef* ref, BOOL* shouldRele static CGColorSpaceRef p3 = NULL; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ - p3 = CreateColorSpaceMono(colorPrimaries, transferCharacteristics); + p3 = SDCreateAVIFCreateColorSpaceMono(colorPrimaries, transferCharacteristics); if(p3 == NULL) { p3 = defaultColorSpace; } @@ -302,7 +302,7 @@ void CalcColorSpaceMono(avifImage * avif, CGColorSpaceRef* ref, BOOL* shouldRele return; } - *ref = CreateColorSpaceMono(colorPrimaries, transferCharacteristics); + *ref = SDCreateAVIFCreateColorSpaceMono(colorPrimaries, transferCharacteristics); if(*ref != NULL) { *shouldRelease = TRUE; } else { @@ -311,7 +311,7 @@ void CalcColorSpaceMono(avifImage * avif, CGColorSpaceRef* ref, BOOL* shouldRele } } -void CalcColorSpaceRGB(avifImage * avif, CGColorSpaceRef* ref, BOOL* shouldRelease) { +void SDCalcColorSpaceRGBAVIF(avifImage * avif, CGColorSpaceRef* ref, BOOL* shouldRelease) { static CGColorSpaceRef defaultColorSpace = NULL; { static dispatch_once_t onceToken; @@ -520,7 +520,7 @@ void CalcColorSpaceRGB(avifImage * avif, CGColorSpaceRef* ref, BOOL* shouldRelea return; } - *ref = CreateColorSpaceRGB(colorPrimaries, transferCharacteristics); + *ref = SDCreateAVIFCreateColorSpaceRGB(colorPrimaries, transferCharacteristics); if(*ref != NULL) { *shouldRelease = TRUE; } else { diff --git a/SDWebImageAVIFCoder/Classes/Conversion.m b/SDWebImageAVIFCoder/Classes/Conversion.m index ff431c8..adbc2a1 100644 --- a/SDWebImageAVIFCoder/Classes/Conversion.m +++ b/SDWebImageAVIFCoder/Classes/Conversion.m @@ -32,9 +32,9 @@ static CGImageRef CreateImageFromBuffer(avifImage * avif, vImage_Buffer* result) CGColorSpaceRef colorSpace = NULL; BOOL shouldReleaseColorSpace = FALSE; if(monochrome){ - CalcColorSpaceMono(avif, &colorSpace, &shouldReleaseColorSpace); + SDCalcColorSpaceMonoAVIF(avif, &colorSpace, &shouldReleaseColorSpace); }else{ - CalcColorSpaceRGB(avif, &colorSpace, &shouldReleaseColorSpace); + SDCalcColorSpaceRGBAVIF(avif, &colorSpace, &shouldReleaseColorSpace); } CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault; @@ -1058,6 +1058,6 @@ static CGImageRef CreateCGImage16U(avifImage * avif) { } // convert planar to ARGB/RGB -CGImageRef CreateCGImageFromAVIF(avifImage * avif) { +CGImageRef SDCreateCGImageFromAVIF(avifImage * avif) { return avifImageUsesU16(avif) ? CreateCGImage16U(avif) : CreateCGImage8(avif); } diff --git a/SDWebImageAVIFCoder/Classes/Private/ColorSpace.h b/SDWebImageAVIFCoder/Classes/Private/ColorSpace.h index 61295a1..b5af2c3 100644 --- a/SDWebImageAVIFCoder/Classes/Private/ColorSpace.h +++ b/SDWebImageAVIFCoder/Classes/Private/ColorSpace.h @@ -2,7 +2,7 @@ // ColorSpace.h // SDWebImageAVIFCoder // -// Created by psi on 2020/03/15. +// Created by Ryo Hirafuji on 2020/03/15. // #pragma once @@ -12,8 +12,8 @@ #import "avif/avif.h" #endif -CGColorSpaceRef CreateColorSpaceMono(avifNclxColourPrimaries const colorPrimaries, avifNclxTransferCharacteristics const transferCharacteristics); -CGColorSpaceRef CreateColorSpaceRGB(avifNclxColourPrimaries const colorPrimaries, avifNclxTransferCharacteristics const transferCharacteristics); +extern CGColorSpaceRef _Nullable SDCreateAVIFCreateColorSpaceMono(avifNclxColourPrimaries const colorPrimaries, avifNclxTransferCharacteristics const transferCharacteristics) __attribute__((visibility("hidden"))); +extern CGColorSpaceRef _Nullable SDCreateAVIFCreateColorSpaceRGB(avifNclxColourPrimaries const colorPrimaries, avifNclxTransferCharacteristics const transferCharacteristics) __attribute__((visibility("hidden"))); -void CalcColorSpaceMono(avifImage * avif, CGColorSpaceRef* ref, BOOL* shouldRelease); -void CalcColorSpaceRGB(avifImage * avif, CGColorSpaceRef* ref, BOOL* shouldRelease); +void SDCalcColorSpaceMonoAVIF(avifImage * _Nonnull avif, CGColorSpaceRef _Nullable * _Nonnull ref, BOOL* _Nonnull shouldRelease); +void SDCalcColorSpaceRGBAVIF(avifImage * _Nonnull avif, CGColorSpaceRef _Nullable * _Nonnull ref, BOOL* _Nonnull shouldRelease); diff --git a/SDWebImageAVIFCoder/Classes/Private/Conversion.h b/SDWebImageAVIFCoder/Classes/Private/Conversion.h index c44805b..8f26867 100644 --- a/SDWebImageAVIFCoder/Classes/Private/Conversion.h +++ b/SDWebImageAVIFCoder/Classes/Private/Conversion.h @@ -2,7 +2,7 @@ // Conversion.h // SDWebImageAVIFCoder // -// Created by psi on 2020/03/15. +// Created by Ryo Hirafuji on 2020/03/15. // #pragma once @@ -12,4 +12,4 @@ #import "avif/avif.h" #endif -CGImageRef CreateCGImageFromAVIF(avifImage * avif); +extern CGImageRef _Nullable SDCreateCGImageFromAVIF(avifImage * _Nonnull avif) __attribute__((visibility("hidden"))); diff --git a/SDWebImageAVIFCoder/Classes/SDImageAVIFCoder.m b/SDWebImageAVIFCoder/Classes/SDImageAVIFCoder.m index 36ead8a..95e9413 100644 --- a/SDWebImageAVIFCoder/Classes/SDImageAVIFCoder.m +++ b/SDWebImageAVIFCoder/Classes/SDImageAVIFCoder.m @@ -77,7 +77,7 @@ - (nullable CGImageRef)sd_createAVIFImageWithData:(nonnull NSData *)data CF_RETU avifDecoderDestroy(decoder); return nil; } - CGImageRef const image = CreateCGImageFromAVIF(decoder->image); + CGImageRef const image = SDCreateCGImageFromAVIF(decoder->image); avifDecoderDestroy(decoder); return image; } diff --git a/SDWebImageAVIFCoder/Module/SDWebImageAVIFCoder.h b/SDWebImageAVIFCoder/Module/SDWebImageAVIFCoder.h index 2b916bf..2b4271d 100644 --- a/SDWebImageAVIFCoder/Module/SDWebImageAVIFCoder.h +++ b/SDWebImageAVIFCoder/Module/SDWebImageAVIFCoder.h @@ -13,4 +13,3 @@ FOUNDATION_EXPORT double SDWebImageAVIFCoderVersionNumber; FOUNDATION_EXPORT const unsigned char SDWebImageAVIFCoderVersionString[]; - From 27372a5e2a1b88289ea8bd5ab0ec7efd05dde2de Mon Sep 17 00:00:00 2001 From: psi Date: Tue, 17 Mar 2020 18:14:58 +0900 Subject: [PATCH 9/9] Rename functions, because there are two "Create"! --- Example/Tests/Tests.m | 12 +++++------ SDWebImageAVIFCoder/Classes/ColorSpace.m | 20 +++++++++---------- SDWebImageAVIFCoder/Classes/Conversion.m | 4 ++-- .../Classes/Private/ColorSpace.h | 8 ++++---- 4 files changed, 22 insertions(+), 22 deletions(-) diff --git a/Example/Tests/Tests.m b/Example/Tests/Tests.m index 5650fc1..6c4da30 100644 --- a/Example/Tests/Tests.m +++ b/Example/Tests/Tests.m @@ -164,11 +164,11 @@ -(void)testAllColorSpaceSupportsOutput for(avifNclxTransferCharacteristics transfer = 0; transfer < kNumTransfers; ++transfer) { CGColorSpaceRef space = NULL; - space = SDCreateAVIFCreateColorSpaceRGB(primaries, transfer); + space = SDAVIFCreateColorSpaceRGB(primaries, transfer); XCTAssertTrue(CGColorSpaceSupportsOutput(space)); CGColorSpaceRelease(space); - space = SDCreateAVIFCreateColorSpaceMono(primaries, transfer); + space = SDAVIFCreateColorSpaceMono(primaries, transfer); XCTAssertTrue(CGColorSpaceSupportsOutput(space)); CGColorSpaceRelease(space); } @@ -190,7 +190,7 @@ -(void)testCalcNCLXColorSpaceFromAVIFImage CGColorSpaceRef space = NULL; BOOL shouldRelease = FALSE; - SDCalcColorSpaceRGBAVIF(img, &space, &shouldRelease); + SDAVIFCalcColorSpaceRGB(img, &space, &shouldRelease); XCTAssertTrue(CGColorSpaceSupportsOutput(space)); if(shouldRelease) { CGColorSpaceRelease(space); @@ -204,7 +204,7 @@ -(void)testCalcNCLXColorSpaceFromAVIFImage img->yuvPlanes[AVIF_CHAN_V] = NULL; img->yuvRowBytes[AVIF_CHAN_V] = 0; - SDCalcColorSpaceMonoAVIF(img, &space, &shouldRelease); + SDAVIFCalcColorSpaceMono(img, &space, &shouldRelease); XCTAssertTrue(CGColorSpaceSupportsOutput(space)); if(shouldRelease) { CGColorSpaceRelease(space); @@ -227,7 +227,7 @@ -(void)testCalcICCColorSpaceFromAVIFImage CGColorSpaceRef space = NULL; BOOL shouldRelease = FALSE; - SDCalcColorSpaceRGBAVIF(img, &space, &shouldRelease); + SDAVIFCalcColorSpaceRGB(img, &space, &shouldRelease); XCTAssertTrue(CGColorSpaceSupportsOutput(space)); if(shouldRelease) { CGColorSpaceRelease(space); @@ -241,7 +241,7 @@ -(void)testCalcICCColorSpaceFromAVIFImage img->yuvPlanes[AVIF_CHAN_V] = NULL; img->yuvRowBytes[AVIF_CHAN_V] = 0; - SDCalcColorSpaceMonoAVIF(img, &space, &shouldRelease); + SDAVIFCalcColorSpaceMono(img, &space, &shouldRelease); XCTAssertTrue(CGColorSpaceSupportsOutput(space)); if(shouldRelease) { CGColorSpaceRelease(space); diff --git a/SDWebImageAVIFCoder/Classes/ColorSpace.m b/SDWebImageAVIFCoder/Classes/ColorSpace.m index 2072c02..ff5dc57 100644 --- a/SDWebImageAVIFCoder/Classes/ColorSpace.m +++ b/SDWebImageAVIFCoder/Classes/ColorSpace.m @@ -161,7 +161,7 @@ static void CalcTransferFunction(uint16_t const transferCharacteristics, vImageT break; } } -CGColorSpaceRef SDCreateAVIFCreateColorSpaceMono(avifNclxColourPrimaries const colorPrimaries, avifNclxTransferCharacteristics const transferCharacteristics) { +CGColorSpaceRef SDAVIFCreateColorSpaceMono(avifNclxColourPrimaries const colorPrimaries, avifNclxTransferCharacteristics const transferCharacteristics) { if (@available(macOS 10.10, iOS 8.0, tvOS 8.0, *)) { vImage_Error err; vImageWhitePoint white; @@ -182,7 +182,7 @@ CGColorSpaceRef SDCreateAVIFCreateColorSpaceMono(avifNclxColourPrimaries const c } } -CGColorSpaceRef SDCreateAVIFCreateColorSpaceRGB(avifNclxColourPrimaries const colorPrimaries, avifNclxTransferCharacteristics const transferCharacteristics) { +CGColorSpaceRef SDAVIFCreateColorSpaceRGB(avifNclxColourPrimaries const colorPrimaries, avifNclxTransferCharacteristics const transferCharacteristics) { if (@available(macOS 10.10, iOS 8.0, tvOS 8.0, *)) { vImage_Error err; vImageRGBPrimaries primaries; @@ -203,7 +203,7 @@ CGColorSpaceRef SDCreateAVIFCreateColorSpaceRGB(avifNclxColourPrimaries const co } } -void SDCalcColorSpaceMonoAVIF(avifImage * avif, CGColorSpaceRef* ref, BOOL* shouldRelease) { +void SDAVIFCalcColorSpaceMono(avifImage * avif, CGColorSpaceRef* ref, BOOL* shouldRelease) { static CGColorSpaceRef defaultColorSpace; { static dispatch_once_t onceToken; @@ -249,7 +249,7 @@ void SDCalcColorSpaceMonoAVIF(avifImage * avif, CGColorSpaceRef* ref, BOOL* shou static CGColorSpaceRef sRGB = NULL; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ - sRGB = SDCreateAVIFCreateColorSpaceMono(colorPrimaries, transferCharacteristics); + sRGB = SDAVIFCreateColorSpaceMono(colorPrimaries, transferCharacteristics); if(sRGB == NULL) { sRGB = defaultColorSpace; } @@ -263,7 +263,7 @@ void SDCalcColorSpaceMonoAVIF(avifImage * avif, CGColorSpaceRef* ref, BOOL* shou static CGColorSpaceRef bt709 = NULL; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ - bt709 = SDCreateAVIFCreateColorSpaceMono(colorPrimaries, transferCharacteristics); + bt709 = SDAVIFCreateColorSpaceMono(colorPrimaries, transferCharacteristics); if(bt709 == NULL) { bt709 = defaultColorSpace; } @@ -278,7 +278,7 @@ void SDCalcColorSpaceMonoAVIF(avifImage * avif, CGColorSpaceRef* ref, BOOL* shou static CGColorSpaceRef bt2020 = NULL; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ - bt2020 = SDCreateAVIFCreateColorSpaceMono(colorPrimaries, transferCharacteristics); + bt2020 = SDAVIFCreateColorSpaceMono(colorPrimaries, transferCharacteristics); if(bt2020 == NULL) { bt2020 = defaultColorSpace; } @@ -292,7 +292,7 @@ void SDCalcColorSpaceMonoAVIF(avifImage * avif, CGColorSpaceRef* ref, BOOL* shou static CGColorSpaceRef p3 = NULL; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ - p3 = SDCreateAVIFCreateColorSpaceMono(colorPrimaries, transferCharacteristics); + p3 = SDAVIFCreateColorSpaceMono(colorPrimaries, transferCharacteristics); if(p3 == NULL) { p3 = defaultColorSpace; } @@ -302,7 +302,7 @@ void SDCalcColorSpaceMonoAVIF(avifImage * avif, CGColorSpaceRef* ref, BOOL* shou return; } - *ref = SDCreateAVIFCreateColorSpaceMono(colorPrimaries, transferCharacteristics); + *ref = SDAVIFCreateColorSpaceMono(colorPrimaries, transferCharacteristics); if(*ref != NULL) { *shouldRelease = TRUE; } else { @@ -311,7 +311,7 @@ void SDCalcColorSpaceMonoAVIF(avifImage * avif, CGColorSpaceRef* ref, BOOL* shou } } -void SDCalcColorSpaceRGBAVIF(avifImage * avif, CGColorSpaceRef* ref, BOOL* shouldRelease) { +void SDAVIFCalcColorSpaceRGB(avifImage * avif, CGColorSpaceRef* ref, BOOL* shouldRelease) { static CGColorSpaceRef defaultColorSpace = NULL; { static dispatch_once_t onceToken; @@ -520,7 +520,7 @@ void SDCalcColorSpaceRGBAVIF(avifImage * avif, CGColorSpaceRef* ref, BOOL* shoul return; } - *ref = SDCreateAVIFCreateColorSpaceRGB(colorPrimaries, transferCharacteristics); + *ref = SDAVIFCreateColorSpaceRGB(colorPrimaries, transferCharacteristics); if(*ref != NULL) { *shouldRelease = TRUE; } else { diff --git a/SDWebImageAVIFCoder/Classes/Conversion.m b/SDWebImageAVIFCoder/Classes/Conversion.m index adbc2a1..b43835b 100644 --- a/SDWebImageAVIFCoder/Classes/Conversion.m +++ b/SDWebImageAVIFCoder/Classes/Conversion.m @@ -32,9 +32,9 @@ static CGImageRef CreateImageFromBuffer(avifImage * avif, vImage_Buffer* result) CGColorSpaceRef colorSpace = NULL; BOOL shouldReleaseColorSpace = FALSE; if(monochrome){ - SDCalcColorSpaceMonoAVIF(avif, &colorSpace, &shouldReleaseColorSpace); + SDAVIFCalcColorSpaceMono(avif, &colorSpace, &shouldReleaseColorSpace); }else{ - SDCalcColorSpaceRGBAVIF(avif, &colorSpace, &shouldReleaseColorSpace); + SDAVIFCalcColorSpaceRGB(avif, &colorSpace, &shouldReleaseColorSpace); } CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault; diff --git a/SDWebImageAVIFCoder/Classes/Private/ColorSpace.h b/SDWebImageAVIFCoder/Classes/Private/ColorSpace.h index b5af2c3..b6c4a30 100644 --- a/SDWebImageAVIFCoder/Classes/Private/ColorSpace.h +++ b/SDWebImageAVIFCoder/Classes/Private/ColorSpace.h @@ -12,8 +12,8 @@ #import "avif/avif.h" #endif -extern CGColorSpaceRef _Nullable SDCreateAVIFCreateColorSpaceMono(avifNclxColourPrimaries const colorPrimaries, avifNclxTransferCharacteristics const transferCharacteristics) __attribute__((visibility("hidden"))); -extern CGColorSpaceRef _Nullable SDCreateAVIFCreateColorSpaceRGB(avifNclxColourPrimaries const colorPrimaries, avifNclxTransferCharacteristics const transferCharacteristics) __attribute__((visibility("hidden"))); +extern CGColorSpaceRef _Nullable SDAVIFCreateColorSpaceMono(avifNclxColourPrimaries const colorPrimaries, avifNclxTransferCharacteristics const transferCharacteristics) __attribute__((visibility("hidden"))); +extern CGColorSpaceRef _Nullable SDAVIFCreateColorSpaceRGB(avifNclxColourPrimaries const colorPrimaries, avifNclxTransferCharacteristics const transferCharacteristics) __attribute__((visibility("hidden"))); -void SDCalcColorSpaceMonoAVIF(avifImage * _Nonnull avif, CGColorSpaceRef _Nullable * _Nonnull ref, BOOL* _Nonnull shouldRelease); -void SDCalcColorSpaceRGBAVIF(avifImage * _Nonnull avif, CGColorSpaceRef _Nullable * _Nonnull ref, BOOL* _Nonnull shouldRelease); +void SDAVIFCalcColorSpaceMono(avifImage * _Nonnull avif, CGColorSpaceRef _Nullable * _Nonnull ref, BOOL* _Nonnull shouldRelease); +void SDAVIFCalcColorSpaceRGB(avifImage * _Nonnull avif, CGColorSpaceRef _Nullable * _Nonnull ref, BOOL* _Nonnull shouldRelease);