Skip to content

Commit

Permalink
Cherry-pick various fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
laktyushin committed Jan 17, 2024
1 parent 2d23d6c commit 3fc919e
Show file tree
Hide file tree
Showing 7 changed files with 103 additions and 70 deletions.
1 change: 1 addition & 0 deletions Telegram/Telegram-iOS/en.lproj/Localizable.strings
Original file line number Diff line number Diff line change
Expand Up @@ -10886,6 +10886,7 @@ Sorry for the inconvenience.";
"Call.StatusWeakSignal" = "Weak network signal";

"Conversation.ContactAddContact" = "ADD";
"Conversation.ContactAddContactLong" = "ADD CONTACT";
"Conversation.ContactMessage" = "MESSAGE";

"Chat.PlayOnceVideoMessageTooltip" = "This video message can only be played once.";
Expand Down
50 changes: 27 additions & 23 deletions submodules/Camera/Sources/CameraOutput.swift
Original file line number Diff line number Diff line change
Expand Up @@ -304,31 +304,35 @@ final class CameraOutput: NSObject {
self.currentMode = mode
self.lastSampleTimestamp = nil

let codecType: AVVideoCodecType
var orientation = orientation
let dimensions: CGSize
let videoSettings: [String: Any]
if case .roundVideo = mode {
codecType = .h264
dimensions = videoMessageDimensions.cgSize
orientation = .landscapeRight

let compressionProperties: [String: Any] = [
AVVideoAverageBitRateKey: 1000 * 1000,
AVVideoProfileLevelKey: AVVideoProfileLevelH264HighAutoLevel,
AVVideoH264EntropyModeKey: AVVideoH264EntropyModeCABAC
]
videoSettings = [
AVVideoCodecKey: AVVideoCodecType.h264,
AVVideoCompressionPropertiesKey: compressionProperties,
AVVideoWidthKey: Int(dimensions.width),
AVVideoHeightKey: Int(dimensions.height)
]
} else {
if hasHEVCHardwareEncoder {
codecType = .hevc
let codecType: AVVideoCodecType = hasHEVCHardwareEncoder ? .hevc : .h264
if orientation == .landscapeLeft || orientation == .landscapeRight {
dimensions = CGSize(width: 1920, height: 1080)
} else {
codecType = .h264
dimensions = CGSize(width: 1080, height: 1920)
}
}

guard var videoSettings = self.videoOutput.recommendedVideoSettings(forVideoCodecType: codecType, assetWriterOutputFileType: .mp4) else {
return .complete()
}

var dimensions: CGSize = CGSize(width: 1080, height: 1920)
if orientation == .landscapeLeft || orientation == .landscapeRight {
dimensions = CGSize(width: 1920, height: 1080)
}
var orientation = orientation
if case .roundVideo = mode {
videoSettings[AVVideoWidthKey] = 400
videoSettings[AVVideoHeightKey] = 400
dimensions = CGSize(width: 400, height: 400)
orientation = .landscapeRight
guard let settings = self.videoOutput.recommendedVideoSettings(forVideoCodecType: codecType, assetWriterOutputFileType: .mp4) else {
return .complete()
}
videoSettings = settings
}

let audioSettings = self.audioOutput.recommendedAudioSettingsForAssetWriter(writingTo: .mp4) ?? [:]
Expand Down Expand Up @@ -514,10 +518,10 @@ final class CameraOutput: NSObject {
let extensions = CMFormatDescriptionGetExtensions(formatDescription) as! [String: Any]

var updatedExtensions = extensions
updatedExtensions["CVBytesPerRow"] = 400 * 4
updatedExtensions["CVBytesPerRow"] = videoMessageDimensions.width * 4

var newFormatDescription: CMFormatDescription?
var status = CMVideoFormatDescriptionCreate(allocator: nil, codecType: mediaSubType, width: 400, height: 400, extensions: updatedExtensions as CFDictionary, formatDescriptionOut: &newFormatDescription)
var status = CMVideoFormatDescriptionCreate(allocator: nil, codecType: mediaSubType, width: videoMessageDimensions.width, height: videoMessageDimensions.height, extensions: updatedExtensions as CFDictionary, formatDescriptionOut: &newFormatDescription)
guard status == noErr, let newFormatDescription else {
return nil
}
Expand Down
16 changes: 7 additions & 9 deletions submodules/Camera/Sources/CameraRoundVideoFilter.swift
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,9 @@ import CoreMedia
import CoreVideo
import Metal
import Display
import TelegramCore

let videoMessageDimensions = PixelDimensions(width: 400, height: 400)

func allocateOutputBufferPool(with inputFormatDescription: CMFormatDescription, outputRetainedBufferCountHint: Int) -> (
outputBufferPool: CVPixelBufferPool?,
Expand Down Expand Up @@ -114,8 +117,7 @@ class CameraRoundVideoFilter {
}
self.inputFormatDescription = formatDescription

let diameter: CGFloat = 400.0
let circleImage = generateImage(CGSize(width: diameter, height: diameter), opaque: false, scale: 1.0, rotatedContext: { size, context in
let circleImage = generateImage(videoMessageDimensions.cgSize, opaque: false, scale: 1.0, rotatedContext: { size, context in
let bounds = CGRect(origin: .zero, size: size)
context.clear(bounds)
context.setFillColor(UIColor.white.cgColor)
Expand Down Expand Up @@ -158,7 +160,7 @@ class CameraRoundVideoFilter {

var sourceImage = CIImage(cvImageBuffer: pixelBuffer)
sourceImage = sourceImage.oriented(additional ? .leftMirrored : .right)
let scale = 400.0 / min(sourceImage.extent.width, sourceImage.extent.height)
let scale = CGFloat(videoMessageDimensions.width) / min(sourceImage.extent.width, sourceImage.extent.height)

resizeFilter.setValue(sourceImage, forKey: kCIInputImageKey)
resizeFilter.setValue(scale, forKey: kCIInputScaleKey)
Expand Down Expand Up @@ -203,18 +205,14 @@ class CameraRoundVideoFilter {
guard let finalImage else {
return nil
}

if finalImage.extent.width != 400 {
print("wtf: \(finalImage)")
}


var pbuf: CVPixelBuffer?
CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, outputPixelBufferPool!, &pbuf)
guard let outputPixelBuffer = pbuf else {
return nil
}

self.ciContext.render(finalImage, to: outputPixelBuffer, bounds: CGRect(origin: .zero, size: CGSize(width: 400, height: 400)), colorSpace: outputColorSpace)
self.ciContext.render(finalImage, to: outputPixelBuffer, bounds: CGRect(origin: .zero, size: videoMessageDimensions.cgSize), colorSpace: outputColorSpace)

return outputPixelBuffer
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -307,7 +307,18 @@ public class ChatMessageContactBubbleContentNode: ChatMessageBubbleContentNode {
}

let (messageButtonWidth, messageContinueLayout) = makeMessageButtonLayout(constrainedSize.width, nil, false, item.presentationData.strings.Conversation_ContactMessage.uppercased(), mainColor, false, false)
let (addButtonWidth, addContinueLayout) = makeAddButtonLayout(constrainedSize.width, nil, false, !canMessage && !canAdd ? item.presentationData.strings.Conversation_ViewContactDetails.uppercased() : item.presentationData.strings.Conversation_ContactAddContact.uppercased(), mainColor, false, false)

let addTitle: String
if !canMessage && !canAdd {
addTitle = item.presentationData.strings.Conversation_ViewContactDetails
} else {
if canMessage {
addTitle = item.presentationData.strings.Conversation_ContactAddContact
} else {
addTitle = item.presentationData.strings.Conversation_ContactAddContactLong
}
}
let (addButtonWidth, addContinueLayout) = makeAddButtonLayout(constrainedSize.width, nil, false, addTitle.uppercased(), mainColor, false, false)

let maxButtonWidth = max(messageButtonWidth, addButtonWidth)
var maxContentWidth: CGFloat = avatarSize.width + 7.0
Expand All @@ -327,7 +338,7 @@ public class ChatMessageContactBubbleContentNode: ChatMessageBubbleContentNode {
let lineWidth: CGFloat = 3.0

var buttonCount = 1
if canMessage {
if canMessage && canAdd {
buttonCount += 1
}
var buttonWidth = floor((boundingWidth - layoutConstants.text.bubbleInsets.right * 2.0 - lineWidth))
Expand Down Expand Up @@ -387,7 +398,7 @@ public class ChatMessageContactBubbleContentNode: ChatMessageBubbleContentNode {
strongSelf.messageButtonNode.isHidden = !canMessage

let backgroundInsets = layoutConstants.text.bubbleInsets
let backgroundFrame = CGRect(origin: CGPoint(x: backgroundInsets.left, y: backgroundInsets.top + 5.0), size: CGSize(width: contentWidth - layoutConstants.text.bubbleInsets.right * 2.0, height: layoutSize.height - 34.0))
let backgroundFrame = CGRect(origin: CGPoint(x: backgroundInsets.left, y: backgroundInsets.top + 5.0), size: CGSize(width: boundingWidth - layoutConstants.text.bubbleInsets.right * 2.0, height: layoutSize.height - 34.0))

if let statusSizeAndApply = statusSizeAndApply {
strongSelf.dateAndStatusNode.frame = CGRect(origin: CGPoint(x: layoutConstants.text.bubbleInsets.left, y: backgroundFrame.maxY + 3.0), size: statusSizeAndApply.0)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -532,7 +532,7 @@ public class VideoMessageCameraScreen: ViewController {
fileprivate var liveUploadInterface: LegacyLiveUploadInterface?
private var currentLiveUploadPath: String?
fileprivate var currentLiveUploadData: LegacyLiveUploadInterfaceResult?

fileprivate let backgroundView: UIVisualEffectView
fileprivate let containerView: UIView
fileprivate let componentHost: ComponentView<ViewControllerComponentContainer.Environment>
Expand Down Expand Up @@ -689,16 +689,27 @@ public class VideoMessageCameraScreen: ViewController {
}

func withReadyCamera(isFirstTime: Bool = false, _ f: @escaping () -> Void) {
guard let controller = self.controller else {
return
}
if #available(iOS 13.0, *) {
let _ = ((self.cameraState.isDualCameraEnabled ? self.additionalPreviewView.isPreviewing : self.mainPreviewView.isPreviewing)
|> filter { $0 }
|> take(1)).startStandalone(next: { _ in
let _ = (combineLatest(queue: Queue.mainQueue(),
self.cameraState.isDualCameraEnabled ? self.additionalPreviewView.isPreviewing : self.mainPreviewView.isPreviewing,
controller.audioSessionReady.get()
)
|> filter { $0 && $1 }
|> take(1)).startStandalone(next: { _, _ in
f()
})
} else {
Queue.mainQueue().after(0.35) {
let _ = (combineLatest(queue: Queue.mainQueue(),
.single(true) |> delay(0.35, queue: Queue.mainQueue()),
controller.audioSessionReady.get()
)
|> filter { $0 && $1 }
|> take(1)).startStandalone(next: { _, _ in
f()
}
})
}
}

Expand Down Expand Up @@ -1241,6 +1252,7 @@ public class VideoMessageCameraScreen: ViewController {
fileprivate let completion: (EnqueueMessage?, Bool?, Int32?) -> Void

private var audioSessionDisposable: Disposable?
fileprivate let audioSessionReady = ValuePromise<Bool>(false)

private let hapticFeedback = HapticFeedback()

Expand Down Expand Up @@ -1484,19 +1496,21 @@ public class VideoMessageCameraScreen: ViewController {
finalDuration = duration
}

let dimensions = PixelDimensions(width: 400, height: 400)

var thumbnailImage = video.thumbnail
if startTime > 0.0 {
let composition = composition(with: results)
let imageGenerator = AVAssetImageGenerator(asset: composition)
imageGenerator.maximumSize = CGSize(width: 400, height: 400)
imageGenerator.maximumSize = dimensions.cgSize
imageGenerator.appliesPreferredTrackTransform = true

if let cgImage = try? imageGenerator.copyCGImage(at: CMTime(seconds: startTime, preferredTimescale: composition.duration.timescale), actualTime: nil) {
thumbnailImage = UIImage(cgImage: cgImage)
}
}

let values = MediaEditorValues(peerId: self.context.account.peerId, originalDimensions: PixelDimensions(width: 400, height: 400), cropOffset: .zero, cropRect: CGRect(origin: .zero, size: CGSize(width: 400.0, height: 400.0)), cropScale: 1.0, cropRotation: 0.0, cropMirroring: false, cropOrientation: nil, gradientColors: nil, videoTrimRange: self.node.previewState?.trimRange, videoIsMuted: false, videoIsFullHd: false, videoIsMirrored: false, videoVolume: nil, additionalVideoPath: nil, additionalVideoIsDual: false, additionalVideoPosition: nil, additionalVideoScale: nil, additionalVideoRotation: nil, additionalVideoPositionChanges: [], additionalVideoTrimRange: nil, additionalVideoOffset: nil, additionalVideoVolume: nil, nightTheme: false, drawing: nil, entities: [], toolValues: [:], audioTrack: nil, audioTrackTrimRange: nil, audioTrackOffset: nil, audioTrackVolume: nil, audioTrackSamples: nil, qualityPreset: .videoMessage)
let values = MediaEditorValues(peerId: self.context.account.peerId, originalDimensions: dimensions, cropOffset: .zero, cropRect: CGRect(origin: .zero, size: dimensions.cgSize), cropScale: 1.0, cropRotation: 0.0, cropMirroring: false, cropOrientation: nil, gradientColors: nil, videoTrimRange: self.node.previewState?.trimRange, videoIsMuted: false, videoIsFullHd: false, videoIsMirrored: false, videoVolume: nil, additionalVideoPath: nil, additionalVideoIsDual: false, additionalVideoPosition: nil, additionalVideoScale: nil, additionalVideoRotation: nil, additionalVideoPositionChanges: [], additionalVideoTrimRange: nil, additionalVideoOffset: nil, additionalVideoVolume: nil, nightTheme: false, drawing: nil, entities: [], toolValues: [:], audioTrack: nil, audioTrackTrimRange: nil, audioTrackOffset: nil, audioTrackVolume: nil, audioTrackSamples: nil, qualityPreset: .videoMessage)

var resourceAdjustments: VideoMediaResourceAdjustments? = nil
if let valuesData = try? JSONEncoder().encode(values) {
Expand Down Expand Up @@ -1614,10 +1628,13 @@ public class VideoMessageCameraScreen: ViewController {
}

private func requestAudioSession() {
self.audioSessionDisposable = self.context.sharedContext.mediaManager.audioSession.push(audioSessionType: .recordWithOthers, activate: { _ in
self.audioSessionDisposable = self.context.sharedContext.mediaManager.audioSession.push(audioSessionType: .recordWithOthers, activate: { [weak self] _ in
if #available(iOS 13.0, *) {
try? AVAudioSession.sharedInstance().setAllowHapticsAndSystemSoundsDuringRecording(true)
}
if let self {
self.audioSessionReady.set(true)
}
}, deactivate: { _ in
return .single(Void())
})
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -350,11 +350,7 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
}
}
}

if isFirstTime, !self.viewOnceButton.isHidden {
self.maybePresentViewOnceTooltip()
}


let panelHeight = defaultHeight(metrics: metrics)

transition.updateFrame(node: self.deleteButton, frame: CGRect(origin: CGPoint(x: leftInset + 2.0 - UIScreenPixel, y: 1), size: CGSize(width: 40.0, height: 40)))
Expand Down Expand Up @@ -488,6 +484,10 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
}
}

if isFirstTime, !self.viewOnceButton.isHidden {
self.maybePresentViewOnceTooltip()
}

return panelHeight
}

Expand Down
44 changes: 23 additions & 21 deletions submodules/UrlHandling/Sources/UrlHandling.swift
Original file line number Diff line number Diff line change
Expand Up @@ -126,6 +126,29 @@ public func parseInternalUrl(query: String) -> ParsedInternalUrl? {
}
if !pathComponents.isEmpty && !pathComponents[0].isEmpty {
let peerName: String = pathComponents[0]

if pathComponents[0].hasPrefix("+") || pathComponents[0].hasPrefix("%20") {
let component = pathComponents[0].replacingOccurrences(of: "%20", with: "+")
if component.rangeOfCharacter(from: CharacterSet(charactersIn: "0123456789+").inverted) == nil {
var attach: String?
var startAttach: String?
if let queryItems = components.queryItems {
for queryItem in queryItems {
if let value = queryItem.value {
if queryItem.name == "attach" {
attach = value
} else if queryItem.name == "startattach" {
startAttach = value
}
}
}
}

return .phone(component.replacingOccurrences(of: "+", with: ""), attach, startAttach)
} else {
return .join(String(component.dropFirst()))
}
}
if pathComponents.count == 1 {
if let queryItems = components.queryItems {
if peerName == "socks" || peerName == "proxy" {
Expand Down Expand Up @@ -288,27 +311,6 @@ public func parseInternalUrl(query: String) -> ParsedInternalUrl? {
}
} else if pathComponents[0].hasPrefix(phonebookUsernamePathPrefix), let idValue = Int64(String(pathComponents[0][pathComponents[0].index(pathComponents[0].startIndex, offsetBy: phonebookUsernamePathPrefix.count)...])) {
return .peerId(PeerId(namespace: Namespaces.Peer.CloudUser, id: PeerId.Id._internalFromInt64Value(idValue)))
} else if pathComponents[0].hasPrefix("+") || pathComponents[0].hasPrefix("%20") {
let component = pathComponents[0].replacingOccurrences(of: "%20", with: "+")
if component.rangeOfCharacter(from: CharacterSet(charactersIn: "0123456789+").inverted) == nil {
var attach: String?
var startAttach: String?
if let queryItems = components.queryItems {
for queryItem in queryItems {
if let value = queryItem.value {
if queryItem.name == "attach" {
attach = value
} else if queryItem.name == "startattach" {
startAttach = value
}
}
}
}

return .phone(component.replacingOccurrences(of: "+", with: ""), attach, startAttach)
} else {
return .join(String(component.dropFirst()))
}
} else if pathComponents[0].hasPrefix("$") || pathComponents[0].hasPrefix("%24") {
var component = pathComponents[0].replacingOccurrences(of: "%24", with: "$")
if component.hasPrefix("$") {
Expand Down

0 comments on commit 3fc919e

Please sign in to comment.