diff --git a/Sources/Kitsunebi/AnimationView.swift b/Sources/Kitsunebi/AnimationView.swift index 5c75857..d5d2aa4 100644 --- a/Sources/Kitsunebi/AnimationView.swift +++ b/Sources/Kitsunebi/AnimationView.swift @@ -22,7 +22,8 @@ open class PlayerView: UIView { private let renderQueue: DispatchQueue = .global(qos: .userInitiated) private let commandQueue: MTLCommandQueue private let textureCache: CVMetalTextureCache - private let pipelineState: MTLRenderPipelineState + private let mp4PipelineState: MTLRenderPipelineState + private let hevcPipelineState: MTLRenderPipelineState private var applicationHandler = ApplicationHandler() public weak var delegate: PlayerViewDelegate? = nil @@ -51,12 +52,16 @@ open class PlayerView: UIView { guard let metalLib = try? device.makeLibrary(URL: Bundle.module.defaultMetalLibraryURL) else { return nil } - guard let pipelineState = try? device.makeRenderPipelineState(metalLib: metalLib) else { + guard let mp4PipelineState = try? device.makeRenderPipelineState(metalLib: metalLib, fragmentFunctionName: "mp4FragmentShader") else { + return nil + } + guard let hevcPipelineState = try? device.makeRenderPipelineState(metalLib: metalLib, fragmentFunctionName: "hevcFragmentShader") else { return nil } self.commandQueue = commandQueue self.textureCache = textureCache - self.pipelineState = pipelineState + self.mp4PipelineState = mp4PipelineState + self.hevcPipelineState = hevcPipelineState super.init(frame: frame) applicationHandler.delegate = self backgroundColor = .clear @@ -75,12 +80,16 @@ open class PlayerView: UIView { guard let metalLib = try? device.makeLibrary(URL: Bundle.module.defaultMetalLibraryURL) else { return nil } - guard let pipelineState = try? device.makeRenderPipelineState(metalLib: metalLib) else { + guard let mp4PipelineState = try? device.makeRenderPipelineState(metalLib: metalLib, fragmentFunctionName: "mp4FragmentShader") else { + return nil + } + guard let hevcPipelineState = try? device.makeRenderPipelineState(metalLib: metalLib, fragmentFunctionName: "hevcFragmentShader") else { return nil } self.commandQueue = commandQueue self.textureCache = textureCache - self.pipelineState = pipelineState + self.mp4PipelineState = mp4PipelineState + self.hevcPipelineState = hevcPipelineState super.init(coder: aDecoder) applicationHandler.delegate = self backgroundColor = .clear @@ -98,7 +107,16 @@ open class PlayerView: UIView { private func renderImage(with frame: Frame, to nextDrawable: CAMetalDrawable) throws { let (baseYTexture, baseCbCrTexture, alphaYTexture) = try makeTexturesFrom(frame) + + let pipelineState: MTLRenderPipelineState + switch frame { + case .yCbCrWithA(_, _): + pipelineState = mp4PipelineState + case .yCbCrA(_): + pipelineState = hevcPipelineState + } + let renderDesc = MTLRenderPassDescriptor() renderDesc.colorAttachments[0].texture = nextDrawable.texture renderDesc.colorAttachments[0].loadAction = .clear diff --git a/Sources/Kitsunebi/Asset.swift b/Sources/Kitsunebi/Asset.swift index 95c669b..59112a1 100644 --- a/Sources/Kitsunebi/Asset.swift +++ b/Sources/Kitsunebi/Asset.swift @@ -8,16 +8,25 @@ import AVFoundation final class Asset { - private let outputSettings: [String: Any] = [ - kCVPixelBufferMetalCompatibilityKey as String: true - ] + private var outputSettings: [String: Any] { + if let pixelFormatType = pixelFormatType { + return [ + kCVPixelBufferPixelFormatTypeKey as String: pixelFormatType, + kCVPixelBufferMetalCompatibilityKey as String: true + ] + } + + return [kCVPixelBufferMetalCompatibilityKey as String: true] + } let asset: AVURLAsset + private let pixelFormatType: OSType? private var reader: AVAssetReader? = nil private var output: AVAssetReaderTrackOutput? = nil var status: AVAssetReader.Status? { reader?.status } - init(url: URL) { - asset = AVURLAsset(url: url) + init(url: URL, pixelFormatType: OSType? = nil) { + self.asset = AVURLAsset(url: url) + self.pixelFormatType = pixelFormatType } func reset() throws { diff --git a/Sources/Kitsunebi/MTLDevice+.swift b/Sources/Kitsunebi/MTLDevice+.swift index 8ecc1c7..08f745f 100644 --- a/Sources/Kitsunebi/MTLDevice+.swift +++ b/Sources/Kitsunebi/MTLDevice+.swift @@ -51,7 +51,7 @@ extension MTLDevice { metalLib: MTLLibrary, pixelFormat: MTLPixelFormat = .bgra8Unorm, vertexFunctionName: String = "vertexShader", - fragmentFunctionName: String = "fragmentShader" + fragmentFunctionName: String ) throws -> MTLRenderPipelineState { let pipelineDesc = MTLRenderPipelineDescriptor() pipelineDesc.vertexFunction = metalLib.makeFunction(name: vertexFunctionName) diff --git a/Sources/Kitsunebi/VideoEngine.swift b/Sources/Kitsunebi/VideoEngine.swift index fd4cbd6..1fe1f20 100644 --- a/Sources/Kitsunebi/VideoEngine.swift +++ b/Sources/Kitsunebi/VideoEngine.swift @@ -37,8 +37,9 @@ internal class VideoEngine: NSObject { private lazy var currentFrameIndex: Int = 0 public init(base baseVideoURL: URL, alpha alphaVideoURL: URL, fps: Int) { - let baseAsset = Asset(url: baseVideoURL) - let alphaAsset = Asset(url: alphaVideoURL) + // video range, full range両方くる可能性があるので、video rangeに統一 + let baseAsset = Asset(url: baseVideoURL, pixelFormatType: kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange) + let alphaAsset = Asset(url: alphaVideoURL, pixelFormatType: kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange) asset = .yCbCrWithA(yCbCr: baseAsset, a: alphaAsset) fpsKeeper = FPSKeeper(fps: fps) super.init() @@ -46,7 +47,8 @@ internal class VideoEngine: NSObject { } public init(hevcWithAlpha hevcWithAlphaVideoURL: URL, fps: Int) { - let hevcWithAlphaAsset = Asset(url: hevcWithAlphaVideoURL) + // video range, full range両方くる可能性があるので、video rangeに統一 + let hevcWithAlphaAsset = Asset(url: hevcWithAlphaVideoURL, pixelFormatType: kCVPixelFormatType_420YpCbCr8VideoRange_8A_TriPlanar) asset = .yCbCrA(yCbCrA: hevcWithAlphaAsset) fpsKeeper = FPSKeeper(fps: fps) super.init() diff --git a/Sources/Kitsunebi/default.metal b/Sources/Kitsunebi/default.metal index 3059d73..c00e545 100644 --- a/Sources/Kitsunebi/default.metal +++ b/Sources/Kitsunebi/default.metal @@ -23,7 +23,39 @@ vertex ColorInOut vertexShader(uint vid [[ vertex_id ]]) { return vertices[vid]; } -fragment float4 fragmentShader(ColorInOut in [[ stage_in ]], +fragment float4 mp4FragmentShader(ColorInOut in [[ stage_in ]], + texture2d baseYTexture [[ texture(0) ]], + texture2d alphaYTexture [[ texture(1) ]], + texture2d baseCbCrTexture [[ texture(2) ]]) { + constexpr sampler colorSampler; + const float4x4 ycbcrToRGBTransform = float4x4( + float4(+1.0000f, +1.0000f, +1.0000f, +0.0000f), + float4(+0.0000f, -0.3441f, +1.7720f, +0.0000f), + float4(+1.4020f, -0.7141f, +0.0000f, +0.0000f), + float4(-0.7010f, +0.5291f, -0.8860f, +1.0000f) + ); + float4 baseYUVColor = float4(baseYTexture.sample(colorSampler, in.texCoords).r, + baseCbCrTexture.sample(colorSampler, in.texCoords).rg, + 1.0f); + // yuv video range to full range + baseYUVColor.r = (baseYUVColor.r - (16.0f/255.0f)) * (255.0f/(235.0f-16.0f)); + baseYUVColor.g = (baseYUVColor.g - (16.0f/255.0f)) * (255.0f/(240.0f-16.0f)); + baseYUVColor.b = (baseYUVColor.b - (16.0f/255.0f)) * (255.0f/(240.0f-16.0f)); + + // yuv to rgb + float4 baseColor = ycbcrToRGBTransform * baseYUVColor; + + + // get alpha value + float alphaColor = alphaYTexture.sample(colorSampler, in.texCoords).r; + // video range to full range + alphaColor = (alphaColor - (16.0f/255.0f)) * (255.0f/(235.0f-16.0f)); + + return float4(baseColor.r, baseColor.g, baseColor.b, alphaColor); +} + + +fragment float4 hevcFragmentShader(ColorInOut in [[ stage_in ]], texture2d baseYTexture [[ texture(0) ]], texture2d alphaYTexture [[ texture(1) ]], texture2d baseCbCrTexture [[ texture(2) ]]) { @@ -35,13 +67,22 @@ fragment float4 fragmentShader(ColorInOut in [[ stage_in ]], float4(-0.7010f, +0.5291f, -0.8860f, +1.0000f) ); - float4 baseColor = ycbcrToRGBTransform * float4(baseYTexture.sample(colorSampler, in.texCoords).r, - baseCbCrTexture.sample(colorSampler, in.texCoords).rg, - 1.0); + float4 baseYUVColor = float4(baseYTexture.sample(colorSampler, in.texCoords).r, + baseCbCrTexture.sample(colorSampler, in.texCoords).rg, + 1.0f); - float4 alphaColor = alphaYTexture.sample(colorSampler, in.texCoords).r; + // yuv video range to full range + baseYUVColor.r = (baseYUVColor.r - (16.0f/255.0f)) * (255.0f/(235.0f-16.0f)); + baseYUVColor.g = (baseYUVColor.g - (16.0f/255.0f)) * (255.0f/(240.0f-16.0f)); + baseYUVColor.b = (baseYUVColor.b - (16.0f/255.0f)) * (255.0f/(240.0f-16.0f)); - return float4(baseColor.r, baseColor.g, baseColor.b, alphaColor.r); -} + // yuv to rgb + float4 baseColor = ycbcrToRGBTransform * baseYUVColor; + + // kCVPixelFormatType_420YpCbCr8VideoRange_8A_TriPlanar + // alphaはfull rangeのため、変更必要ない + float alphaColor = alphaYTexture.sample(colorSampler, in.texCoords).r; + return float4(baseColor.r, baseColor.g, baseColor.b, alphaColor); +}