Skip to content

Commit

Permalink
Merge pull request #9 from link-u/fix/odd-dimensions-and-color-matrix
Browse files Browse the repository at this point in the history
Handle images with odd dimensions and fix color matrix, and add CI to check decoding functions
  • Loading branch information
dreampiggy authored Feb 7, 2020
2 parents f73ed4d + a21ca87 commit f8570a2
Show file tree
Hide file tree
Showing 7 changed files with 417 additions and 26 deletions.
59 changes: 59 additions & 0 deletions .github/workflows/check-image-decoding.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
name: Check the decoded images.

on: [push]

jobs:
build:
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- name: Build CLI tool
shell: bash
run: |
set -ex
cd Example
pod install --repo-update
xcrun xcodebuild \
-workspace SDWebImageAVIFCoder.xcworkspace \
-scheme "SDWebImageAVIFCoder_Example CLI" \
-archivePath ./CLI \
archive
- name: Clone test images
shell: bash
run: |
set -ex
git clone https://github.com/link-u/avif-sample-images.git
- name: Decode all AVIF images
shell: bash
run: |
set -ex
cd avif-sample-images
mkdir decoded
CMD="../Example/CLI.xcarchive/Products/usr/local/bin/SDWebImageAVIFCoder_Example CLI"
for file in $(find . -name \*.avif); do
file=$(basename ${file})
"${CMD}" "${file}" "./decoded/${file}.png"
done
- name: Install imagemagick to compare images.
shell: bash
run: brew install imagemagick
- name: Compare images
shell: bash
run: |
set -ex
cd avif-sample-images
for file in $(find . -name \*.avif); do
file=$(basename ${file})
if (echo ${file} | grep "\(monochrome\|crop\|rotate\|mirror\)"); then
# FIXME(ledyba-z): Check them.
echo "Ignore: ${file}"
continue
else
orig=$(cat Makefile | grep "^${file}" | sed "s/^${file}: \(.*\)$/\1/")
score=$(compare -metric PSNR "${orig}" "decoded/${file}.png" NULL: 2>&1 || true)
echo " * ${file}: ${score}"
if test $(echo "${score} >= 35.0" | bc -l) -eq 0; then
exit -1
fi
fi
done
6 changes: 6 additions & 0 deletions Example/Podfile
Original file line number Diff line number Diff line change
Expand Up @@ -17,3 +17,9 @@ target 'SDWebImageAVIFCoder_Example macOS' do
pod 'SDWebImageAVIFCoder', :path => '../'
pod 'libavif', :subspecs => ['librav1e', 'libdav1d']
end

target 'SDWebImageAVIFCoder_Example CLI' do
platform :osx, '10.10'
pod 'SDWebImageAVIFCoder', :path => '../'
pod 'libavif', :subspecs => ['librav1e', 'libdav1d']
end
187 changes: 187 additions & 0 deletions Example/SDWebImageAVIFCoder.xcodeproj/project.pbxproj

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "1130"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
buildImplicitDependencies = "YES">
<BuildActionEntries>
<BuildActionEntry
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES"
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "6D37313D23E88F6B007F654B"
BuildableName = "SDWebImageAVIFCoder_Example CLI"
BlueprintName = "SDWebImageAVIFCoder_Example CLI"
ReferencedContainer = "container:SDWebImageAVIFCoder.xcodeproj">
</BuildableReference>
</BuildActionEntry>
</BuildActionEntries>
</BuildAction>
<TestAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
shouldUseLaunchSchemeArgsEnv = "YES">
<Testables>
</Testables>
</TestAction>
<LaunchAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
debugServiceExtension = "internal"
allowLocationSimulation = "YES">
<BuildableProductRunnable
runnableDebuggingMode = "0">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "6D37313D23E88F6B007F654B"
BuildableName = "SDWebImageAVIFCoder_Example CLI"
BlueprintName = "SDWebImageAVIFCoder_Example CLI"
ReferencedContainer = "container:SDWebImageAVIFCoder.xcodeproj">
</BuildableReference>
</BuildableProductRunnable>
</LaunchAction>
<ProfileAction
buildConfiguration = "Release"
shouldUseLaunchSchemeArgsEnv = "YES"
savedToolIdentifier = ""
useCustomWorkingDirectory = "NO"
debugDocumentVersioning = "YES">
<BuildableProductRunnable
runnableDebuggingMode = "0">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "6D37313D23E88F6B007F654B"
BuildableName = "SDWebImageAVIFCoder_Example CLI"
BlueprintName = "SDWebImageAVIFCoder_Example CLI"
ReferencedContainer = "container:SDWebImageAVIFCoder.xcodeproj">
</BuildableReference>
</BuildableProductRunnable>
</ProfileAction>
<AnalyzeAction
buildConfiguration = "Debug">
</AnalyzeAction>
<ArchiveAction
buildConfiguration = "Release"
revealArchiveInOrganizer = "YES">
</ArchiveAction>
</Scheme>
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict/>
</plist>
33 changes: 33 additions & 0 deletions Example/SDWebImageAVIFCoder_Example CLI/main.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
//
// main.m
// SDWebImageAVIFCoder_Example CLI
//
// Created by psi on 2020/02/04.
// Copyright © 2020 [email protected]. All rights reserved.
//

#import <Foundation/Foundation.h>
#import <SDWebImage/SDWebImage.h>
#import <SDWebImageAVIFCoder/SDImageAVIFCoder.h>

int main(int argc, const char * argv[]) {
if(argc != 3) {
fprintf(stderr, "usage: %s <inputPath> <outputPath>\n", argv[0]);
return -1;
}
@autoreleasepool {
NSString* inputPath = [NSString stringWithUTF8String: argv[1]];
NSString* outputPath = [NSString stringWithUTF8String: argv[2]];
NSData* data = [[NSData alloc] initWithContentsOfFile: inputPath];
SDImageAVIFCoder* const coder = [SDImageAVIFCoder sharedCoder];
UIImage* img = [coder decodedImageWithData: data options:nil];

CGImageRef cgRef = [img CGImageForProposedRect:nil context:nil hints:nil];
NSBitmapImageRep *newRep = [[NSBitmapImageRep alloc] initWithCGImage:cgRef];
[newRep setSize:[img size]]; // if you want the same resolution
NSDictionary *prop = [[NSDictionary alloc] init];
NSData* pngData = [newRep representationUsingType:NSBitmapImageFileTypePNG properties: prop];
[pngData writeToFile:outputPath atomically:YES];
}
return 0;
}
75 changes: 49 additions & 26 deletions SDWebImageAVIFCoder/Classes/SDImageAVIFCoder.m
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,13 @@ static void SetupConversionInfo(avifImage * avif,

// Setup Matrix
matrix->Yp = 1.0f;
matrix->Cr_R = 2.0f * (1.0f - state->kr);
matrix->Cb_B = 2.0f * (1.0f - state->kb);
matrix->Cb_G = -2.0f * (1.0f - state->kr) * state->kr / state->kg;
matrix->Cr_G = -2.0f * (1.0f - state->kb) * state->kb / state->kg;


matrix->Cb_B = 2.0f * (1.0f - state->kb);
matrix->Cb_G = -2.0f * (1.0f - state->kb) * state->kb / state->kg;

matrix->Cr_R = 2.0f * (1.0f - state->kr);
matrix->Cr_G = -2.0f * (1.0f - state->kr) * state->kr / state->kg;

// Setup Pixel Range
switch (avif->depth) {
case 8:
Expand Down Expand Up @@ -141,8 +143,8 @@ static void ConvertAvifImagePlanar8ToRGB8(avifImage * avif, uint8_t * outPixels)
vImage_Buffer origCb = {
.data = avif->yuvPlanes[AVIF_CHAN_U],
.rowBytes = avif->yuvRowBytes[AVIF_CHAN_U],
.width = avif->width >> state.formatInfo.chromaShiftX,
.height = avif->height >> state.formatInfo.chromaShiftY,
.width = (avif->width+state.formatInfo.chromaShiftX) >> state.formatInfo.chromaShiftX,
.height = (avif->height+state.formatInfo.chromaShiftY) >> state.formatInfo.chromaShiftY,
};

if(!origCb.data) { // allocate dummy data to convert monochrome images.
Expand All @@ -159,8 +161,8 @@ static void ConvertAvifImagePlanar8ToRGB8(avifImage * avif, uint8_t * outPixels)
vImage_Buffer origCr = {
.data = avif->yuvPlanes[AVIF_CHAN_V],
.rowBytes = avif->yuvRowBytes[AVIF_CHAN_V],
.width = avif->width >> state.formatInfo.chromaShiftX,
.height = avif->height >> state.formatInfo.chromaShiftY,
.width = (avif->width+state.formatInfo.chromaShiftX) >> state.formatInfo.chromaShiftX,
.height = (avif->height+state.formatInfo.chromaShiftY) >> state.formatInfo.chromaShiftY,
};
if(!origCr.data) { // allocate dummy data to convert monochrome images.
dummyCr = calloc(origCr.width, sizeof(uint8_t));
Expand Down Expand Up @@ -282,23 +284,38 @@ static void ConvertAvifImagePlanar8ToRGB8(avifImage * avif, uint8_t * outPixels)
return;
}

((uint8_t*)origY.data)[origY.rowBytes * (origY.height-1) + origY.width ] = 255;
const vImagePixelCount alignedWidth = (origY.width+1) & (~1);
vImage_Buffer tmpY1 = {
.data = calloc(origY.width/2 * origY.height, sizeof(uint8_t)),
.width = origY.width/2,
.data = calloc(alignedWidth/2 * origY.height, sizeof(uint8_t)),
.width = alignedWidth/2,
.height = origY.height,
.rowBytes = origY.width/2 * sizeof(uint8_t),
.rowBytes = alignedWidth/2 * sizeof(uint8_t),
};
if(!tmpY1.data) {
free(argbPixels);
free(dummyCb);
free(dummyCr);
return;
}
err = vImageConvert_ChunkyToPlanar8((const void*[]){origY.data},
(const vImage_Buffer*[]){&tmpY1},
1 /* channelCount */, 2 /* src srcStrideBytes */,
alignedWidth/2, origY.height,
origY.rowBytes, kvImageNoFlags);
if(err != kvImageNoError) {
NSLog(@"Failed to separate first Y channel: %ld", err);
free(argbPixels);
free(dummyCb);
free(dummyCr);
free(tmpY1.data);
return;
}
vImage_Buffer tmpY2 = {
.data = calloc(origY.width/2 * origY.height, sizeof(uint8_t)),
.width = origY.width/2,
.data = calloc(alignedWidth/2 * origY.height, sizeof(uint8_t)),
.width = alignedWidth/2,
.height = origY.height,
.rowBytes = origY.width/2 * sizeof(uint8_t),
.rowBytes = alignedWidth/2 * sizeof(uint8_t),
};
if(!tmpY2.data) {
free(argbPixels);
Expand All @@ -307,13 +324,15 @@ static void ConvertAvifImagePlanar8ToRGB8(avifImage * avif, uint8_t * outPixels)
free(tmpY1.data);
return;
}
err= vImageConvert_ChunkyToPlanar8((const void*[]){origY.data, origY.data+1},
(const vImage_Buffer*[]){&tmpY1, &tmpY2},
2 /* channelCount */,2 /* src srcStrideBytes */,
tmpY2.width = origY.width/2;
err = vImageConvert_ChunkyToPlanar8((const void*[]){origY.data + 1},
(const vImage_Buffer*[]){&tmpY2},
1 /* channelCount */, 2 /* src srcStrideBytes */,
origY.width/2, origY.height,
origY.rowBytes, kvImageNoFlags);
tmpY2.width = alignedWidth/2;
if(err != kvImageNoError) {
NSLog(@"Failed to separate Y channel: %ld", err);
NSLog(@"Failed to separate second Y channel: %ld", err);
free(argbPixels);
free(dummyCb);
free(dummyCr);
Expand All @@ -322,10 +341,10 @@ static void ConvertAvifImagePlanar8ToRGB8(avifImage * avif, uint8_t * outPixels)
return;
}
vImage_Buffer tmpBuffer = {
.data = calloc(avif->width * avif->height * 2, sizeof(uint8_t)),
.width = avif->width/2,
.data = calloc(alignedWidth * avif->height * 2, sizeof(uint8_t)),
.width = alignedWidth/2,
.height = avif->height,
.rowBytes = avif->width / 2 * 4 * sizeof(uint8_t),
.rowBytes = alignedWidth / 2 * 4 * sizeof(uint8_t),
};
if(!tmpBuffer.data) {
free(argbPixels);
Expand Down Expand Up @@ -437,8 +456,8 @@ static void ConvertAvifImagePlanar16ToRGB16U(avifImage * avif, uint8_t * outPixe
vImage_Buffer origCb = {
.data = avif->yuvPlanes[AVIF_CHAN_U],
.rowBytes = avif->yuvRowBytes[AVIF_CHAN_U],
.width = avif->width >> state.formatInfo.chromaShiftX,
.height = avif->height >> state.formatInfo.chromaShiftY,
.width = (avif->width+state.formatInfo.chromaShiftX) >> state.formatInfo.chromaShiftX,
.height = (avif->height+state.formatInfo.chromaShiftY) >> state.formatInfo.chromaShiftY,
};

if(!origCb.data) { // allocate dummy data to convert monochrome images.
Expand All @@ -465,8 +484,8 @@ static void ConvertAvifImagePlanar16ToRGB16U(avifImage * avif, uint8_t * outPixe
vImage_Buffer origCr = {
.data = avif->yuvPlanes[AVIF_CHAN_V],
.rowBytes = avif->yuvRowBytes[AVIF_CHAN_V],
.width = avif->width >> state.formatInfo.chromaShiftX,
.height = avif->height >> state.formatInfo.chromaShiftY,
.width = (avif->width+state.formatInfo.chromaShiftX) >> state.formatInfo.chromaShiftX,
.height = (avif->height+state.formatInfo.chromaShiftY) >> state.formatInfo.chromaShiftY,
};

if(!origCr.data) { // allocate dummy data to convert monochrome images.
Expand Down Expand Up @@ -823,6 +842,10 @@ - (nullable CGImageRef)sd_createAVIFImageWithData:(nonnull NSData *)data CF_RETU
CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, dest, rowBytes * height, FreeImageData);
CGBitmapInfo bitmapInfo = usesU16 ? kCGBitmapByteOrder16Host : kCGBitmapByteOrderDefault;
bitmapInfo |= hasAlpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNone;
// FIXME: (ledyba-z): Set appropriate color space.
// Currently, there is no way to get MatrixCoefficients, TransferCharacteristics and ColourPrimaries values
// in Sequence Header OBU.
// https://github.com/AOMediaCodec/libavif/blob/7d36984b2994210b/include/avif/avif.h#L149-L236
CGColorSpaceRef colorSpaceRef = [SDImageCoderHelper colorSpaceGetDeviceRGB];
CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;
CGImageRef imageRef = CGImageCreate(width, height, bitsPerComponent, bitsPerPixel, rowBytes, colorSpaceRef, bitmapInfo, provider, NULL, NO, renderingIntent);
Expand Down

0 comments on commit f8570a2

Please sign in to comment.