Skip to content

Commit

Permalink
1.9.0 (380)
Browse files Browse the repository at this point in the history
  • Loading branch information
denis15yo committed Nov 26, 2024
1 parent a980c65 commit d2205e9
Show file tree
Hide file tree
Showing 21 changed files with 1,030 additions and 266 deletions.
25 changes: 7 additions & 18 deletions MODULE.bazel.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

26 changes: 19 additions & 7 deletions Nicegram/NGData/Sources/NGSettings.swift
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,9 @@ public struct NGSettings {

@NGStorage(key: "rememberFolderOnExit", defaultValue: false)
public static var rememberFolderOnExit: Bool

@NGStorage(key: "useOpenAI", defaultValue: false)
public static var useOpenAI: Bool

@NGStorage(key: "lastFolder", defaultValue: -1)
public static var lastFolder: Int32
Expand Down Expand Up @@ -124,6 +127,9 @@ public struct NGSettings {

@NGStorage(key: "hideMentionNotification", defaultValue: false)
public static var hideMentionNotification: Bool

@NGStorage(key: "appleSpeechToTextLocale", defaultValue: [:])
public static var appleSpeechToTextLocale: [Int64: Locale]
}

public struct NGWebSettings {
Expand Down Expand Up @@ -167,14 +173,11 @@ public func isPremium() -> Bool {
}

public func usetrButton() -> [(Bool, [String])] {
if isPremium() {
var ignoredLangs = NGSettings.ignoreTranslate
if !NGSettings.useIgnoreLanguages {
ignoredLangs = []
}
return [(NGSettings.oneTapTr, ignoredLangs)]
var ignoredLangs = NGSettings.ignoreTranslate
if !NGSettings.useIgnoreLanguages {
ignoredLangs = []
}
return [(false, [])]
return [(NGSettings.oneTapTr, ignoredLangs)]
}

public class SystemNGSettings {
Expand Down Expand Up @@ -217,6 +220,15 @@ public class SystemNGSettings {
UD.set(newValue, forKey: "inDoubleBottom")
}
}

public var hideReactionsToYourMessages: Bool {
get {
return UD.bool(forKey: "hideReactionsToYourMessages")
}
set {
UD.set(newValue, forKey: "hideReactionsToYourMessages")
}
}
}

public var VarSystemNGSettings = SystemNGSettings()
26 changes: 26 additions & 0 deletions Nicegram/NGSpeechToText/BUILD
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")

swift_library(
name = "NGSpeechToText",
module_name = "NGSpeechToText",
srcs = glob([
"Sources/**/*.swift",
]),
deps = [
"//submodules/AccountContext:AccountContext",
"//submodules/Display:Display",
"//submodules/ItemListUI:ItemListUI",
"//submodules/SSignalKit/SwiftSignalKit:SwiftSignalKit",
"//submodules/TelegramCore:TelegramCore",
"//submodules/TelegramPresentationData:TelegramPresentationData",
"//submodules/PresentationDataUtils:PresentationDataUtils",
"//submodules/TelegramUI/Components/ChatControllerInteraction",
"//submodules/TranslateUI:TranslateUI",
"//submodules/Media/ConvertOpusToAAC",
"//Nicegram/NGUI:NGUI",
"@swiftpkg_nicegram_assistant_ios//:FeatPremiumUI",
],
visibility = [
"//visibility:public",
],
)
224 changes: 224 additions & 0 deletions Nicegram/NGSpeechToText/Sources/ConvertSpeechToText.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,224 @@
import Foundation
import TelegramCore
import ChatControllerInteraction
import Postbox
import FeatPremiumUI
import AccountContext
import TelegramPresentationData
import NGData
import NGUI

public enum SpeechToTextMessageSource {
case chat, contextMenu
}

public func convertSpeechToText(
from source: SpeechToTextMessageSource = .chat,
languageStyle: RecognitionLanguagesControllerStyle = .normal,
context: AccountContext,
mediaFile: TelegramMediaFile,
message: Message?,
presentationData: PresentationData,
controllerInteraction: ChatControllerInteraction,
completion: (() -> Void)? = nil,
closeWithoutSelect: (() -> Void)? = nil
) {
var id: Int64?
if let peer = message?.peers.toDict().first?.value,
languageStyle == .normal {
switch EnginePeer(peer) {
case let .channel(channel):
id = channel.id.toInt64()
case let .legacyGroup(group):
id = group.id.toInt64()
case let .user(user):
id = user.id.toInt64()
default:
return
}
}

if NGSettings.useOpenAI {
startConvertSpeechToTextTask(
from: source,
context: context,
mediaFile: mediaFile,
source: .openAI,
message: message,
presentationData: presentationData,
controllerInteraction: controllerInteraction,
completion: completion
)
} else {
if let id,
let locale = NGSettings.appleSpeechToTextLocale[id] {
startConvertSpeechToTextTask(
from: source,
context: context,
mediaFile: mediaFile,
source: .apple(locale),
message: message,
presentationData: presentationData,
controllerInteraction: controllerInteraction,
completion: completion
)
} else {
showLanguages(
with: context,
controllerInteraction: controllerInteraction,
style: languageStyle
) { locale in
if let id {
var appleSpeechToTextLocale = NGSettings.appleSpeechToTextLocale
appleSpeechToTextLocale[id] = locale
NGSettings.appleSpeechToTextLocale = appleSpeechToTextLocale
}
_ = controllerInteraction.navigationController()?.popViewController(animated: true)
startConvertSpeechToTextTask(
from: source,
context: context,
mediaFile: mediaFile,
source: .apple(locale),
message: message,
presentationData: presentationData,
controllerInteraction: controllerInteraction,
completion: completion
)
} selectWhisper: {
_ = controllerInteraction.navigationController()?.popViewController(animated: true)

PremiumUITgHelper.routeToPremium(
source: .speechToText
)
} closeWithoutSelect: {
closeWithoutSelect?()
}
}
}
}

private func showLanguages(
with context: AccountContext,
controllerInteraction: ChatControllerInteraction,
style: RecognitionLanguagesControllerStyle = .normal,
selectLocale: @escaping (Locale) -> Void,
selectWhisper: @escaping () -> Void,
closeWithoutSelect: @escaping () -> Void
) {
let controller = recognitionLanguagesController(
context: context,
style: style,
selectLocale: selectLocale,
selectWhisper: selectWhisper,
closeWithoutSelect: closeWithoutSelect
)
controller.navigationPresentation = .modal

controllerInteraction.navigationController()?.pushViewController(controller, animated: true)
}

private func startConvertSpeechToTextTask(
from messageSource: SpeechToTextMessageSource,
context: AccountContext,
mediaFile: TelegramMediaFile,
source: TgSpeechToTextManager.Source,
message: Message?,
presentationData: PresentationData,
controllerInteraction: ChatControllerInteraction,
completion: (() -> Void)? = nil
) {
Task { @MainActor in
let manager = TgSpeechToTextManager(
accountContext: context
)

if messageSource == .contextMenu {
message?.setSpeechToTextLoading(context: context)
}

let result = await manager.convertSpeechToText(
mediaFile: mediaFile,
source: source
)

switch result {
case .success(let text):
switch messageSource {
case .chat:
message?.updateAudioTranscriptionAttribute(text: text, error: nil, context: context)
case .contextMenu:
message?.setSpeechToTextTranslation(text, context: context)
}
case .needsPremium:
PremiumUITgHelper.routeToPremium(
source: .speechToText
)
case .error(let error):
switch error {
case .recognition(_):
if messageSource == .contextMenu {
message?.removeSpeechToTextMeta(context: context)
}
convertSpeechToText(
from: messageSource,
languageStyle: .whisper,
context: context,
mediaFile: mediaFile,
message: message,
presentationData: presentationData,
controllerInteraction: controllerInteraction
)
case .notAvailable:
if messageSource == .contextMenu {
message?.removeSpeechToTextMeta(context: context)
}
let c = getIAPErrorController(
context: context,
"Speech to text recognizer not available.",
presentationData
)
controllerInteraction.presentGlobalOverlayController(c, nil)
case .authorizationStatus:
if messageSource == .contextMenu {
message?.removeSpeechToTextMeta(context: context)
}
let c = getIAPErrorController(
context: context,
"Speech to text recognizer autorization status error.",
presentationData
)
controllerInteraction.presentGlobalOverlayController(c, nil)
case let .api(error):
switch messageSource {
case .chat:
message?.updateAudioTranscriptionAttribute(text: "", error: error, context: context)
case .contextMenu:
message?.removeSpeechToTextMeta(context: context)
}

let c = getIAPErrorController(
context: context,
error.localizedDescription,
presentationData
)
controllerInteraction.presentGlobalOverlayController(c, nil)
case let .other(error):
switch messageSource {
case .chat:
message?.updateAudioTranscriptionAttribute(text: "", error: error, context: context)
case .contextMenu:
message?.removeSpeechToTextMeta(context: context)
}

let c = getIAPErrorController(
context: context,
error.localizedDescription,
presentationData
)
controllerInteraction.presentGlobalOverlayController(c, nil)
}
}

completion?()
}
}
Loading

0 comments on commit d2205e9

Please sign in to comment.