Skip to content

Commit

Permalink
Merge pull request #22 from FrancescoCaracciolo/nyarch-sync-0.4.4
Browse files Browse the repository at this point in the history
Minor fixes
  • Loading branch information
FrancescoCaracciolo authored Nov 2, 2024
2 parents 0c4ff24 + 0e7fff6 commit 001b2da
Show file tree
Hide file tree
Showing 8 changed files with 53 additions and 27 deletions.
2 changes: 1 addition & 1 deletion src/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@
"bing": {
"key": "bing",
"title": _("Microsoft Copilot"),
"description": _("Microsoft Copilot API"),
"description": _("Microsoft Copilot model using GPT4 - Requires a cookies json file"),
"class": BingHandler,
"secondary": True
},
Expand Down
34 changes: 32 additions & 2 deletions src/extra.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def get_distribution() -> str:
"""
if ReplaceHelper.DISTRO is None:
try:
ReplaceHelper.DISTRO = subprocess.check_output(['flatpak-spawn', '--host', 'bash', '-c', 'lsb_release -ds']).decode('utf-8').strip()
ReplaceHelper.DISTRO = subprocess.check_output(get_spawn_command() + ['bash', '-c', 'lsb_release -ds']).decode('utf-8').strip()
except subprocess.CalledProcessError:
ReplaceHelper.DISTRO = "Unknown"

Expand All @@ -102,6 +102,17 @@ def get_desktop_environment() -> str:
desktop = "Unknown"
return desktop

def get_spawn_command() -> list:
"""
Get the spawn command to run commands on the user system
Returns:
list: space diveded command
"""
if is_flatpak():
return ["flatpak-spawn", "--host"]
else:
return []
def get_image_base64(image_str: str):
"""
Get image string as base64 string, starting with data:/image/jpeg;base64,
Expand Down Expand Up @@ -287,7 +298,26 @@ def install_module(module, path):
r = subprocess.run([sys.executable, "-m", "pip", "install", "--target", path, module], capture_output=False)
return r

def can_escape_sandbox():
def is_flatpak() -> bool:
"""
Check if we are in a flatpak
Returns:
bool: True if we are in a flatpak
"""
if os.getenv("container"):
return True
return False

def can_escape_sandbox() -> bool:
"""
Check if we can escape the sandbox
Returns:
bool: True if we can escape the sandbox
"""
if not is_flatpak():
return True
try:
r = subprocess.check_output(["flatpak-spawn", "--host", "echo", "test"])
except subprocess.CalledProcessError as _:
Expand Down
6 changes: 3 additions & 3 deletions src/gtkobj.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from gi.repository import Gtk, Pango, Gio, Gdk, GtkSource, GObject, Adw, GLib
import threading

from .extra import quote_string
from .extra import get_spawn_command, quote_string

def apply_css_to_widget(widget, css_string):
provider = Gtk.CssProvider()
Expand Down Expand Up @@ -310,15 +310,15 @@ def run_console_terminal(self, widget,multithreading=False):
command = "cd " + quote_string(os.getcwd()) +"; " + self.txt + "; exec bash"
cmd = self.parent.external_terminal.split()
arguments = [s.replace("{0}", command) for s in cmd]
subprocess.Popen(["flatpak-spawn", "--host"] + arguments)
subprocess.Popen(get_spawn_command() + arguments)


def run_python(self, widget):
self.text_expander.set_visible(True)
t = self.txt.replace("'", '"""')
console_permissions = ""
if not self.parent.virtualization:
console_permissions = "flatpak-spawn --host "
console_permissions = " ".join(get_spawn_command()) + " "
process = subprocess.Popen(f"""{console_permissions}python3 -c '{t}'""", stdout=subprocess.PIPE,
stderr=subprocess.PIPE, shell=True)
stdout, stderr = process.communicate()
Expand Down
13 changes: 5 additions & 8 deletions src/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from openai import NOT_GIVEN
from g4f.Provider import RetryProvider
import base64
from .extra import convert_history_openai, extract_image, find_module, get_image_base64, get_image_path, quote_string, encode_image_base64
from .extra import convert_history_openai, extract_image, find_module, get_image_base64, get_image_path, get_spawn_command, quote_string, encode_image_base64
from .handler import Handler

class LLMHandler(Handler):
Expand Down Expand Up @@ -250,8 +250,7 @@ def __init__(self, settings, path):
good_providers = [g4f.Provider.DDG, g4f.Provider.Pizzagpt, g4f.Provider.DarkAI, g4f.Provider.Koala, g4f.Provider.NexraChatGPT, g4f.Provider.AmigoChat]
good_nongpt_providers = [g4f.Provider.ReplicateHome,g4f.Provider.RubiksAI, g4f.Provider.TeachAnything, g4f.Provider.ChatGot, g4f.Provider.FreeChatgpt, g4f.Provider.Free2GPT, g4f.Provider.DeepInfraChat, g4f.Provider.PerplexityLabs]
acceptable_providers = [g4f.Provider.ChatifyAI, g4f.Provider.Allyfy, g4f.Provider.Blackbox, g4f.Provider.Upstage, g4f.Provider.ChatHub, g4f.Provider.Upstage]
good_providers = [g4f.Provider.Bing]
self.client = g4f.client.Client(provider=RetryProvider([RetryProvider(good_providers)], shuffle=False))
self.client = g4f.client.Client(provider=RetryProvider([RetryProvider(good_providers), RetryProvider(good_nongpt_providers), RetryProvider(acceptable_providers)], shuffle=False))
self.n = 0

def generate_text(self, prompt: str, history: list[dict[str, str]] = [], system_prompt: list[str] = []) -> str:
Expand All @@ -262,7 +261,6 @@ def generate_text(self, prompt: str, history: list[dict[str, str]] = [], system_
response = self.client.chat.completions.create(
model="",
messages=history,
image=open(img, "rb") if img is not None else None,
)
return response.choices[0].message.content

Expand All @@ -275,7 +273,6 @@ def generate_text_stream(self, prompt: str, history: list[dict[str, str]] = [],
model="",
messages=history,
stream=True,
image=open(img, "rb") if img is not None else None,
)
full_message = ""
prev_message = ""
Expand Down Expand Up @@ -552,7 +549,7 @@ def generate_text(self, prompt: str, history: list[dict[str, str]] = [], system_
history.append({"User": "User", "Message": prompt})
command = command.replace("{0}", quote_string(json.dumps(history)))
command = command.replace("{1}", quote_string(json.dumps(system_prompt)))
out = check_output(["flatpak-spawn", "--host", "bash", "-c", command])
out = check_output(get_spawn_command() + ["bash", "-c", command])
return out.decode("utf-8")

def get_suggestions(self, request_prompt: str = "", amount: int = 1) -> list[str]:
Expand All @@ -563,15 +560,15 @@ def get_suggestions(self, request_prompt: str = "", amount: int = 1) -> list[str
command = command.replace("{0}", quote_string(json.dumps(self.history)))
command = command.replace("{1}", quote_string(json.dumps(self.prompts)))
command = command.replace("{2}", str(amount))
out = check_output(["flatpak-spawn", "--host", "bash", "-c", command])
out = check_output(get_spawn_command() + ["bash", "-c", command])
return json.loads(out.decode("utf-8"))

def generate_text_stream(self, prompt: str, history: list[dict[str, str]] = [], system_prompt: list[str] = [], on_update: Callable[[str], Any] = lambda _: None, extra_args: list = []) -> str:
command = self.get_setting("command")
history.append({"User": "User", "Message": prompt})
command = command.replace("{0}", quote_string(json.dumps(history)))
command = command.replace("{1}", quote_string(json.dumps(system_prompt)))
process = Popen(["flatpak-spawn", "--host", "bash", "-c", command], stdout=PIPE)
process = Popen(get_spawn_command() + ["bash", "-c", command], stdout=PIPE)
full_message = ""
prev_message = ""
while True:
Expand Down
4 changes: 2 additions & 2 deletions src/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
from gpt4all import GPT4All
from .llm import GPT4AllHandler, LLMHandler
from .gtkobj import ComboRowHelper, CopyBox, MultilineEntry
from .extra import can_escape_sandbox, override_prompts, human_readable_size
from .extra import can_escape_sandbox, get_spawn_command, override_prompts, human_readable_size

from .extensions import ExtensionLoader, NewelleExtension

Expand Down Expand Up @@ -538,7 +538,7 @@ def toggle_virtualization(self, toggle, status):
self.settings.set_boolean("virtualization", status)

def open_website(self, button):
Popen(["flatpak-spawn", "--host", "xdg-open", button.get_name()])
Popen(get_spawn_command() + ["xdg-open", button.get_name()])

def on_setting_change(self, constants: dict[str, Any], handler: Handler, key: str, force_change : bool = False):

Expand Down
4 changes: 2 additions & 2 deletions src/stt.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import pyaudio
import wave
import speech_recognition as sr
from .extra import find_module, install_module
from .extra import find_module, get_spawn_command, install_module
from .handler import Handler

class AudioRecorder:
Expand Down Expand Up @@ -351,7 +351,7 @@ def requires_sandbox_escape() -> bool:
def recognize_file(self, path):
command = self.get_setting("command")
if command is not None:
res = check_output(["flatpak-spawn", "--host", "bash", "-c", command.replace("{0}", path)]).decode("utf-8")
res = check_output(get_spawn_command() + ["bash", "-c", command.replace("{0}", path)]).decode("utf-8")
return str(res)
return None

Expand Down
13 changes: 6 additions & 7 deletions src/tts.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,11 @@
from typing import Any, Callable
from gtts import gTTS, lang
from subprocess import check_output
import os
os.environ['PYGAME_HIDE_SUPPORT_PROMPT'] = "hide"
from pygame import mixer
import threading, time, requests
import os, json, pyaudio
from .extra import can_escape_sandbox, force_sync
from .extra import can_escape_sandbox, get_spawn_command, force_sync, human_readable_size
from pydub import AudioSegment
import asyncio, random, string
from requests_toolbelt.multipart.encoder import MultipartEncoder
Expand Down Expand Up @@ -154,7 +153,7 @@ def get_voices(self) -> tuple:
return self.voices
if not self.is_installed():
return self.voices
output = check_output(["flatpak-spawn", "--host", "espeak", "--voices"]).decode("utf-8")
output = check_output(get_spawn_command() + ["espeak", "--voices"]).decode("utf-8")
# Extract the voice names from the output
lines = output.strip().split("\n")[1:]
voices = tuple()
Expand All @@ -166,18 +165,18 @@ def get_voices(self) -> tuple:

def play_audio(self, message):
self._play_lock.acquire()
check_output(["flatpak-spawn", "--host", "espeak", "-v" + str(self.get_current_voice()), message])
check_output(get_spawn_command() + ["espeak", "-v" + str(self.get_current_voice()), message])
self._play_lock.release()

def save_audio(self, message, file):
r = check_output(["flatpak-spawn", "--host", "espeak", "-f", "-v" + str(self.get_current_voice()), message, "--stdout"])
r = check_output(get_spawn_command() + ["espeak", "-f", "-v" + str(self.get_current_voice()), message, "--stdout"])
f = open(file, "wb")
f.write(r)

def is_installed(self) -> bool:
if not can_escape_sandbox():
return False
output = check_output(["flatpak-spawn", "--host", "whereis", "espeak"]).decode("utf-8")
output = check_output(get_spawn_command() + ["whereis", "espeak"]).decode("utf-8")
paths = []
if ":" in output:
paths = output.split(":")[1].split()
Expand Down Expand Up @@ -214,7 +213,7 @@ def play_audio(self, message):
command = self.get_setting("command")
if command is not None:
self._play_lock.acquire()
check_output(["flatpak-spawn", "--host", "bash", "-c", command.replace("{0}", message)])
check_output(get_spawn_command() + ["bash", "-c", command.replace("{0}", message)])
self._play_lock.release()


Expand Down
4 changes: 2 additions & 2 deletions src/window.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from .constants import AVAILABLE_LLMS, AVAILABLE_SMART_PROMPTS, AVAILABLE_TRANSLATORS, EXTRA_PROMPTS, PROMPTS, AVAILABLE_TTS, AVAILABLE_STT, AVAILABLE_AVATARS, AVAILABLE_PROMPTS
from gi.repository import Gtk, Adw, Pango, Gio, Gdk, GObject, GLib, GdkPixbuf
from .stt import AudioRecorder
from .extra import ReplaceHelper, markwon_to_pango, override_prompts, replace_variables
from .extra import ReplaceHelper, get_spawn_command, markwon_to_pango, override_prompts, replace_variables
import threading
import posixpath
import shlex,json, base64
Expand Down Expand Up @@ -998,7 +998,7 @@ def execute_terminal_command(self, command):
os.chdir(os.path.expanduser(self.main_path))
console_permissions = ""
if not self.virtualization:
console_permissions = "flatpak-spawn --host"
console_permissions = " ".join(get_spawn_command())
commands = ('\n'.join(command)).split(" && ")
txt = ""
path=self.main_path
Expand Down

0 comments on commit 001b2da

Please sign in to comment.