Skip to content

Add support for more modern image format like JPEG XL #899

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 13 commits into from
12 changes: 11 additions & 1 deletion Package.resolved
Original file line number Diff line number Diff line change
@@ -1,5 +1,15 @@
{
"originHash" : "a16ed5d1f17dce3ee0b7cce7a04802759ee4b2faa99485261811c56f1adff67d",
"pins" : [
{
"identity" : "jxl-coder-swift",
"kind" : "remoteSourceControl",
"location" : "https://github.com/awxkee/jxl-coder-swift.git",
"state" : {
"revision" : "179264567c7dc0dd489859d5572773222358a7f5",
"version" : "1.7.3"
}
},
{
"identity" : "swift-syntax",
"kind" : "remoteSourceControl",
Expand All @@ -10,5 +20,5 @@
}
}
],
"version" : 2
"version" : 3
}
14 changes: 9 additions & 5 deletions Package.swift
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
// swift-tools-version:5.7
// swift-tools-version:5.10

import PackageDescription

let package = Package(
name: "swift-snapshot-testing",
platforms: [
.iOS(.v13),
.macOS(.v10_15),
.macOS(.v12),
.tvOS(.v13),
.watchOS(.v6),
.watchOS(.v8),
],
products: [
.library(
Expand All @@ -21,11 +21,15 @@ let package = Package(
),
],
dependencies: [
.package(url: "https://github.com/swiftlang/swift-syntax", "509.0.0"..<"601.0.0-prerelease")
.package(url: "https://github.com/swiftlang/swift-syntax", "509.0.0"..<"601.0.0-prerelease"),
.package(url: "https://github.com/awxkee/jxl-coder-swift.git", from: "1.7.3")
],
targets: [
.target(
name: "SnapshotTesting"
name: "SnapshotTesting",
dependencies: [
.product(name: "JxlCoder", package: "jxl-coder-swift")
]
),
.target(
name: "InlineSnapshotTesting",
Expand Down
3 changes: 3 additions & 0 deletions Sources/SnapshotTesting/AssertSnapshot.swift
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,9 @@ public var __record: SnapshotTestingConfiguration.Record = {
return .missing
}()

/// We can set the image format globally to better test
public var imageFormat = ImageFormat.defaultValue

/// Asserts that a given value matches a reference on disk.
///
/// - Parameters:
Expand Down
12 changes: 6 additions & 6 deletions Sources/SnapshotTesting/Snapshotting/CALayer.swift
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
/// assertSnapshot(of: layer, as: .image(precision: 0.99))
/// ```
public static var image: Snapshotting {
return .image(precision: 1)
return .image(precision: 1, format: imageFormat)
}

/// A snapshot strategy for comparing layers based on pixel equality.
Expand All @@ -25,9 +25,9 @@
/// match. 98-99% mimics
/// [the precision](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) of the
/// human eye.
public static func image(precision: Float, perceptualPrecision: Float = 1) -> Snapshotting {
public static func image(precision: Float, perceptualPrecision: Float = 1, format: ImageFormat) -> Snapshotting {
return SimplySnapshotting.image(
precision: precision, perceptualPrecision: perceptualPrecision
precision: precision, perceptualPrecision: perceptualPrecision, format: format
).pullback { layer in
let image = NSImage(size: layer.bounds.size)
image.lockFocus()
Expand All @@ -46,7 +46,7 @@
extension Snapshotting where Value == CALayer, Format == UIImage {
/// A snapshot strategy for comparing layers based on pixel equality.
public static var image: Snapshotting {
return .image()
return .image(format: imageFormat)
}

/// A snapshot strategy for comparing layers based on pixel equality.
Expand All @@ -59,12 +59,12 @@
/// human eye.
/// - traits: A trait collection override.
public static func image(
precision: Float = 1, perceptualPrecision: Float = 1, traits: UITraitCollection = .init()
precision: Float = 1, perceptualPrecision: Float = 1, traits: UITraitCollection = .init(), format: ImageFormat
)
-> Snapshotting
{
return SimplySnapshotting.image(
precision: precision, perceptualPrecision: perceptualPrecision, scale: traits.displayScale
precision: precision, perceptualPrecision: perceptualPrecision, scale: traits.displayScale, format: format
).pullback { layer in
renderer(bounds: layer.bounds, for: traits).image { ctx in
layer.setNeedsLayout()
Expand Down
15 changes: 9 additions & 6 deletions Sources/SnapshotTesting/Snapshotting/CGPath.swift
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
#if os(macOS)

import AppKit
import Cocoa
import CoreGraphics

extension Snapshotting where Value == CGPath, Format == NSImage {
/// A snapshot strategy for comparing bezier paths based on pixel equality.
public static var image: Snapshotting {
return .image()
return .image(format: imageFormat)
}

/// A snapshot strategy for comparing bezier paths based on pixel equality.
Expand All @@ -29,10 +30,11 @@
public static func image(
precision: Float = 1,
perceptualPrecision: Float = 1,
drawingMode: CGPathDrawingMode = .eoFill
drawingMode: CGPathDrawingMode = .eoFill,
format: ImageFormat
) -> Snapshotting {
return SimplySnapshotting.image(
precision: precision, perceptualPrecision: perceptualPrecision
precision: precision, perceptualPrecision: perceptualPrecision, format: format
).pullback { path in
let bounds = path.boundingBoxOfPath
var transform = CGAffineTransform(translationX: -bounds.origin.x, y: -bounds.origin.y)
Expand All @@ -52,10 +54,11 @@
#elseif os(iOS) || os(tvOS)
import UIKit


extension Snapshotting where Value == CGPath, Format == UIImage {
/// A snapshot strategy for comparing bezier paths based on pixel equality.
public static var image: Snapshotting {
return .image()
return .image(format: imageFormat)
}

/// A snapshot strategy for comparing bezier paths based on pixel equality.
Expand All @@ -68,10 +71,10 @@
/// human eye.
public static func image(
precision: Float = 1, perceptualPrecision: Float = 1, scale: CGFloat = 1,
drawingMode: CGPathDrawingMode = .eoFill
drawingMode: CGPathDrawingMode = .eoFill, format: ImageFormat
) -> Snapshotting {
return SimplySnapshotting.image(
precision: precision, perceptualPrecision: perceptualPrecision, scale: scale
precision: precision, perceptualPrecision: perceptualPrecision, scale: scale, format: format
).pullback { path in
let bounds = path.boundingBoxOfPath
let format: UIGraphicsImageRendererFormat
Expand Down
59 changes: 59 additions & 0 deletions Sources/SnapshotTesting/Snapshotting/ImageSerializer.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
import Foundation
import JxlCoder

#if !os(macOS)
import UIKit.UIImage
public typealias SnapImage = UIImage

private func EncodePNGImage(_ image: SnapImage) -> Data? {
return image.pngData()
}

private func DecodePNGImage(_ data: Data) -> SnapImage? {
UIImage(data: data)
}

#else
import AppKit.NSImage
public typealias SnapImage = NSImage

private func EncodePNGImage(_ image: SnapImage) -> Data? {
guard let cgImage = image.cgImage(forProposedRect: nil, context: nil, hints: nil) else { return nil }
let rep = NSBitmapImageRep(cgImage: cgImage)
rep.size = image.size
return rep.representation(using: .png, properties: [:])
}

private func DecodePNGImage(_ data: Data) -> SnapImage? {
NSImage(data: data)
}

#endif

package protocol DefaultValueProvider<Value> {
associatedtype Value

static var defaultValue: Value { get }
}

public enum ImageFormat: String, DefaultValueProvider {
case jxl
case png

public static var defaultValue = ImageFormat.png
}

package func EncodeImage(image: SnapImage, _ format: ImageFormat) -> Data? {
switch format {
case .jxl: return try? JXLCoder.encode(image: image)
case .png: return EncodePNGImage(image)
}
}

package func DecodeImage(data: Data, _ format: ImageFormat) -> SnapImage? {
switch format {
case .jxl: return try? JXLCoder.decode(data: data)
case .png: return DecodePNGImage(data)
}
}

6 changes: 3 additions & 3 deletions Sources/SnapshotTesting/Snapshotting/NSBezierPath.swift
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
extension Snapshotting where Value == NSBezierPath, Format == NSImage {
/// A snapshot strategy for comparing bezier paths based on pixel equality.
public static var image: Snapshotting {
return .image()
return .image(format: imageFormat)
}

/// A snapshot strategy for comparing bezier paths based on pixel equality.
Expand All @@ -24,9 +24,9 @@
/// match. 98-99% mimics
/// [the precision](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) of the
/// human eye.
public static func image(precision: Float = 1, perceptualPrecision: Float = 1) -> Snapshotting {
public static func image(precision: Float = 1, perceptualPrecision: Float = 1, format: ImageFormat) -> Snapshotting {
return SimplySnapshotting.image(
precision: precision, perceptualPrecision: perceptualPrecision
precision: precision, perceptualPrecision: perceptualPrecision, format: format
).pullback { path in
// Move path info frame:
let bounds = path.bounds
Expand Down
33 changes: 12 additions & 21 deletions Sources/SnapshotTesting/Snapshotting/NSImage.swift
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

extension Diffing where Value == NSImage {
/// A pixel-diffing strategy for NSImage's which requires a 100% match.
public static let image = Diffing.image()
public static let image = Diffing.image(format: imageFormat)

/// A pixel-diffing strategy for NSImage that allows customizing how precise the matching must be.
///
Expand All @@ -15,14 +15,14 @@
/// [the precision](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) of the
/// human eye.
/// - Returns: A new diffing strategy.
public static func image(precision: Float = 1, perceptualPrecision: Float = 1) -> Diffing {
public static func image(precision: Float = 1, perceptualPrecision: Float = 1, format: ImageFormat) -> Diffing {
return .init(
toData: { NSImagePNGRepresentation($0)! },
fromData: { NSImage(data: $0)! }
toData: { EncodeImage(image: $0, format)! },
fromData: { DecodeImage(data: $0, format)! }
) { old, new in
guard
let message = compare(
old, new, precision: precision, perceptualPrecision: perceptualPrecision)
old, new, precision: precision, perceptualPrecision: perceptualPrecision, format: format)
else { return nil }
let difference = SnapshotTesting.diff(old, new)
let oldAttachment = XCTAttachment(image: old)
Expand All @@ -42,7 +42,7 @@
extension Snapshotting where Value == NSImage, Format == NSImage {
/// A snapshot strategy for comparing images based on pixel equality.
public static var image: Snapshotting {
return .image()
return .image(format: imageFormat)
}

/// A snapshot strategy for comparing images based on pixel equality.
Expand All @@ -53,24 +53,15 @@
/// match. 98-99% mimics
/// [the precision](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) of the
/// human eye.
public static func image(precision: Float = 1, perceptualPrecision: Float = 1) -> Snapshotting {
public static func image(precision: Float = 1, perceptualPrecision: Float = 1, format: ImageFormat) -> Snapshotting {
return .init(
pathExtension: "png",
diffing: .image(precision: precision, perceptualPrecision: perceptualPrecision)
pathExtension: format.rawValue,
diffing: .image(precision: precision, perceptualPrecision: perceptualPrecision, format: format)
)
}
}

private func NSImagePNGRepresentation(_ image: NSImage) -> Data? {
guard let cgImage = image.cgImage(forProposedRect: nil, context: nil, hints: nil) else {
return nil
}
let rep = NSBitmapImageRep(cgImage: cgImage)
rep.size = image.size
return rep.representation(using: .png, properties: [:])
}

private func compare(_ old: NSImage, _ new: NSImage, precision: Float, perceptualPrecision: Float)
private func compare(_ old: NSImage, _ new: NSImage, precision: Float, perceptualPrecision: Float, format: ImageFormat)
-> String?
{
guard let oldCgImage = old.cgImage(forProposedRect: nil, context: nil, hints: nil) else {
Expand All @@ -94,8 +85,8 @@
let byteCount = oldContext.height * oldContext.bytesPerRow
if memcmp(oldData, newData, byteCount) == 0 { return nil }
guard
let pngData = NSImagePNGRepresentation(new),
let newerCgImage = NSImage(data: pngData)?.cgImage(
let imageData = EncodeImage(image: new, format),
let newerCgImage = NSImage(data: imageData)?.cgImage(
forProposedRect: nil, context: nil, hints: nil),
let newerContext = context(for: newerCgImage),
let newerData = newerContext.data
Expand Down
6 changes: 3 additions & 3 deletions Sources/SnapshotTesting/Snapshotting/NSView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
extension Snapshotting where Value == NSView, Format == NSImage {
/// A snapshot strategy for comparing views based on pixel equality.
public static var image: Snapshotting {
return .image()
return .image(format: imageFormat)
}

/// A snapshot strategy for comparing views based on pixel equality.
Expand All @@ -21,10 +21,10 @@
/// human eye.
/// - size: A view size override.
public static func image(
precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize? = nil
precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize? = nil, format: ImageFormat = imageFormat
) -> Snapshotting {
return SimplySnapshotting.image(
precision: precision, perceptualPrecision: perceptualPrecision
precision: precision, perceptualPrecision: perceptualPrecision, format: imageFormat
).asyncPullback { view in
let initialSize = view.frame.size
if let size = size { view.frame.size = size }
Expand Down
6 changes: 3 additions & 3 deletions Sources/SnapshotTesting/Snapshotting/NSViewController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
extension Snapshotting where Value == NSViewController, Format == NSImage {
/// A snapshot strategy for comparing view controller views based on pixel equality.
public static var image: Snapshotting {
return .image()
return .image(format: imageFormat)
}

/// A snapshot strategy for comparing view controller views based on pixel equality.
Expand All @@ -18,10 +18,10 @@
/// human eye.
/// - size: A view size override.
public static func image(
precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize? = nil
precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize? = nil, format: ImageFormat
) -> Snapshotting {
return Snapshotting<NSView, NSImage>.image(
precision: precision, perceptualPrecision: perceptualPrecision, size: size
precision: precision, perceptualPrecision: perceptualPrecision, size: size, format: format
).pullback { $0.view }
}
}
Expand Down
Loading
Loading