Skip to content

Commit

Permalink
動画再生video rangeがfull rangeとして使われた問題修正
Browse files Browse the repository at this point in the history
* mp4再生で半透明になる問題
  • Loading branch information
huiping_guo committed Feb 3, 2022
1 parent c5fd4ee commit 530c11f
Show file tree
Hide file tree
Showing 5 changed files with 91 additions and 21 deletions.
28 changes: 23 additions & 5 deletions Sources/Kitsunebi/AnimationView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,8 @@ open class PlayerView: UIView {
private let renderQueue: DispatchQueue = .global(qos: .userInitiated)
private let commandQueue: MTLCommandQueue
private let textureCache: CVMetalTextureCache
private let pipelineState: MTLRenderPipelineState
private let mp4PipelineState: MTLRenderPipelineState
private let hevcPipelineState: MTLRenderPipelineState
private var applicationHandler = ApplicationHandler()

public weak var delegate: PlayerViewDelegate? = nil
Expand Down Expand Up @@ -51,12 +52,16 @@ open class PlayerView: UIView {
guard let metalLib = try? device.makeLibrary(URL: Bundle.module.defaultMetalLibraryURL) else {
return nil
}
guard let pipelineState = try? device.makeRenderPipelineState(metalLib: metalLib) else {
guard let mp4PipelineState = try? device.makeRenderPipelineState(metalLib: metalLib, fragmentFunctionName: "mp4FragmentShader") else {
return nil
}
guard let hevcPipelineState = try? device.makeRenderPipelineState(metalLib: metalLib, fragmentFunctionName: "hevcFragmentShader") else {
return nil
}
self.commandQueue = commandQueue
self.textureCache = textureCache
self.pipelineState = pipelineState
self.mp4PipelineState = mp4PipelineState
self.hevcPipelineState = hevcPipelineState
super.init(frame: frame)
applicationHandler.delegate = self
backgroundColor = .clear
Expand All @@ -75,12 +80,16 @@ open class PlayerView: UIView {
guard let metalLib = try? device.makeLibrary(URL: Bundle.module.defaultMetalLibraryURL) else {
return nil
}
guard let pipelineState = try? device.makeRenderPipelineState(metalLib: metalLib) else {
guard let mp4PipelineState = try? device.makeRenderPipelineState(metalLib: metalLib, fragmentFunctionName: "mp4FragmentShader") else {
return nil
}
guard let hevcPipelineState = try? device.makeRenderPipelineState(metalLib: metalLib, fragmentFunctionName: "hevcFragmentShader") else {
return nil
}
self.commandQueue = commandQueue
self.textureCache = textureCache
self.pipelineState = pipelineState
self.mp4PipelineState = mp4PipelineState
self.hevcPipelineState = hevcPipelineState
super.init(coder: aDecoder)
applicationHandler.delegate = self
backgroundColor = .clear
Expand All @@ -98,7 +107,16 @@ open class PlayerView: UIView {

private func renderImage(with frame: Frame, to nextDrawable: CAMetalDrawable) throws {
let (baseYTexture, baseCbCrTexture, alphaYTexture) = try makeTexturesFrom(frame)

let pipelineState: MTLRenderPipelineState

switch frame {
case .yCbCrWithA(_, _):
pipelineState = mp4PipelineState
case .yCbCrA(_):
pipelineState = hevcPipelineState
}

let renderDesc = MTLRenderPassDescriptor()
renderDesc.colorAttachments[0].texture = nextDrawable.texture
renderDesc.colorAttachments[0].loadAction = .clear
Expand Down
19 changes: 14 additions & 5 deletions Sources/Kitsunebi/Asset.swift
Original file line number Diff line number Diff line change
Expand Up @@ -8,16 +8,25 @@
import AVFoundation

final class Asset {
private let outputSettings: [String: Any] = [
kCVPixelBufferMetalCompatibilityKey as String: true
]
private var outputSettings: [String: Any] {
if let pixelFormatType = pixelFormatType {
return [
kCVPixelBufferPixelFormatTypeKey as String: pixelFormatType,
kCVPixelBufferMetalCompatibilityKey as String: true
]
}

return [kCVPixelBufferMetalCompatibilityKey as String: true]
}
let asset: AVURLAsset
private let pixelFormatType: OSType?
private var reader: AVAssetReader? = nil
private var output: AVAssetReaderTrackOutput? = nil
var status: AVAssetReader.Status? { reader?.status }

init(url: URL) {
asset = AVURLAsset(url: url)
init(url: URL, pixelFormatType: OSType? = nil) {
self.asset = AVURLAsset(url: url)
self.pixelFormatType = pixelFormatType
}

func reset() throws {
Expand Down
2 changes: 1 addition & 1 deletion Sources/Kitsunebi/MTLDevice+.swift
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ extension MTLDevice {
metalLib: MTLLibrary,
pixelFormat: MTLPixelFormat = .bgra8Unorm,
vertexFunctionName: String = "vertexShader",
fragmentFunctionName: String = "fragmentShader"
fragmentFunctionName: String
) throws -> MTLRenderPipelineState {
let pipelineDesc = MTLRenderPipelineDescriptor()
pipelineDesc.vertexFunction = metalLib.makeFunction(name: vertexFunctionName)
Expand Down
8 changes: 5 additions & 3 deletions Sources/Kitsunebi/VideoEngine.swift
Original file line number Diff line number Diff line change
Expand Up @@ -37,16 +37,18 @@ internal class VideoEngine: NSObject {
private lazy var currentFrameIndex: Int = 0

public init(base baseVideoURL: URL, alpha alphaVideoURL: URL, fps: Int) {
let baseAsset = Asset(url: baseVideoURL)
let alphaAsset = Asset(url: alphaVideoURL)
// video range, full range両方くる可能性があるので、video rangeに統一
let baseAsset = Asset(url: baseVideoURL, pixelFormatType: kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange)
let alphaAsset = Asset(url: alphaVideoURL, pixelFormatType: kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange)
asset = .yCbCrWithA(yCbCr: baseAsset, a: alphaAsset)
fpsKeeper = FPSKeeper(fps: fps)
super.init()
renderThread.start()
}

public init(hevcWithAlpha hevcWithAlphaVideoURL: URL, fps: Int) {
let hevcWithAlphaAsset = Asset(url: hevcWithAlphaVideoURL)
// video range, full range両方くる可能性があるので、video rangeに統一
let hevcWithAlphaAsset = Asset(url: hevcWithAlphaVideoURL, pixelFormatType: kCVPixelFormatType_420YpCbCr8VideoRange_8A_TriPlanar)
asset = .yCbCrA(yCbCrA: hevcWithAlphaAsset)
fpsKeeper = FPSKeeper(fps: fps)
super.init()
Expand Down
55 changes: 48 additions & 7 deletions Sources/Kitsunebi/default.metal
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,39 @@ vertex ColorInOut vertexShader(uint vid [[ vertex_id ]]) {
return vertices[vid];
}

fragment float4 fragmentShader(ColorInOut in [[ stage_in ]],
fragment float4 mp4FragmentShader(ColorInOut in [[ stage_in ]],
texture2d<float> baseYTexture [[ texture(0) ]],
texture2d<float> alphaYTexture [[ texture(1) ]],
texture2d<float> baseCbCrTexture [[ texture(2) ]]) {
constexpr sampler colorSampler;
const float4x4 ycbcrToRGBTransform = float4x4(
float4(+1.0000f, +1.0000f, +1.0000f, +0.0000f),
float4(+0.0000f, -0.3441f, +1.7720f, +0.0000f),
float4(+1.4020f, -0.7141f, +0.0000f, +0.0000f),
float4(-0.7010f, +0.5291f, -0.8860f, +1.0000f)
);
float4 baseYUVColor = float4(baseYTexture.sample(colorSampler, in.texCoords).r,
baseCbCrTexture.sample(colorSampler, in.texCoords).rg,
1.0f);
// yuv video range to full range
baseYUVColor.r = (baseYUVColor.r - (16.0f/255.0f)) * (255.0f/(235.0f-16.0f));
baseYUVColor.g = (baseYUVColor.g - (16.0f/255.0f)) * (255.0f/(240.0f-16.0f));
baseYUVColor.b = (baseYUVColor.b - (16.0f/255.0f)) * (255.0f/(240.0f-16.0f));

// yuv to rgb
float4 baseColor = ycbcrToRGBTransform * baseYUVColor;


// get alpha value
float alphaColor = alphaYTexture.sample(colorSampler, in.texCoords).r;
// video range to full range
alphaColor = (alphaColor - (16.0f/255.0f)) * (255.0f/(235.0f-16.0f));

return float4(baseColor.r, baseColor.g, baseColor.b, alphaColor);
}


fragment float4 hevcFragmentShader(ColorInOut in [[ stage_in ]],
texture2d<float> baseYTexture [[ texture(0) ]],
texture2d<float> alphaYTexture [[ texture(1) ]],
texture2d<float> baseCbCrTexture [[ texture(2) ]]) {
Expand All @@ -35,13 +67,22 @@ fragment float4 fragmentShader(ColorInOut in [[ stage_in ]],
float4(-0.7010f, +0.5291f, -0.8860f, +1.0000f)
);

float4 baseColor = ycbcrToRGBTransform * float4(baseYTexture.sample(colorSampler, in.texCoords).r,
baseCbCrTexture.sample(colorSampler, in.texCoords).rg,
1.0);
float4 baseYUVColor = float4(baseYTexture.sample(colorSampler, in.texCoords).r,
baseCbCrTexture.sample(colorSampler, in.texCoords).rg,
1.0f);

float4 alphaColor = alphaYTexture.sample(colorSampler, in.texCoords).r;
// yuv video range to full range
baseYUVColor.r = (baseYUVColor.r - (16.0f/255.0f)) * (255.0f/(235.0f-16.0f));
baseYUVColor.g = (baseYUVColor.g - (16.0f/255.0f)) * (255.0f/(240.0f-16.0f));
baseYUVColor.b = (baseYUVColor.b - (16.0f/255.0f)) * (255.0f/(240.0f-16.0f));

return float4(baseColor.r, baseColor.g, baseColor.b, alphaColor.r);
}
// yuv to rgb
float4 baseColor = ycbcrToRGBTransform * baseYUVColor;

// kCVPixelFormatType_420YpCbCr8VideoRange_8A_TriPlanar
// alphaはfull rangeのため、変更必要ない
float alphaColor = alphaYTexture.sample(colorSampler, in.texCoords).r;

return float4(baseColor.r, baseColor.g, baseColor.b, alphaColor);
}

0 comments on commit 530c11f

Please sign in to comment.