forked from Significant-Gravitas/AutoGPT
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request Significant-Gravitas#2373 from 0xArty/click-arg-pa…
…sing Use click to parse arguments
- Loading branch information
Showing
6 changed files
with
192 additions
and
132 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,53 +1,5 @@ | ||
"""Main script for the autogpt package.""" | ||
import logging | ||
|
||
from colorama import Fore | ||
|
||
from autogpt.agent.agent import Agent | ||
from autogpt.args import parse_arguments | ||
from autogpt.config import Config, check_openai_api_key | ||
from autogpt.logs import logger | ||
from autogpt.memory import get_memory | ||
from autogpt.prompt import construct_prompt | ||
|
||
# Load environment variables from .env file | ||
|
||
|
||
def main() -> None: | ||
"""Main function for the script""" | ||
cfg = Config() | ||
# TODO: fill in llm values here | ||
check_openai_api_key() | ||
parse_arguments() | ||
logger.set_level(logging.DEBUG if cfg.debug_mode else logging.INFO) | ||
ai_name = "" | ||
system_prompt = construct_prompt() | ||
# print(prompt) | ||
# Initialize variables | ||
full_message_history = [] | ||
next_action_count = 0 | ||
# Make a constant: | ||
triggering_prompt = ( | ||
"Determine which next command to use, and respond using the" | ||
" format specified above:" | ||
) | ||
# Initialize memory and make sure it is empty. | ||
# this is particularly important for indexing and referencing pinecone memory | ||
memory = get_memory(cfg, init=True) | ||
logger.typewriter_log( | ||
f"Using memory of type:", Fore.GREEN, f"{memory.__class__.__name__}" | ||
) | ||
logger.typewriter_log(f"Using Browser:", Fore.GREEN, cfg.selenium_web_browser) | ||
agent = Agent( | ||
ai_name=ai_name, | ||
memory=memory, | ||
full_message_history=full_message_history, | ||
next_action_count=next_action_count, | ||
system_prompt=system_prompt, | ||
triggering_prompt=triggering_prompt, | ||
) | ||
agent.start_interaction_loop() | ||
|
||
"""Auto-GPT: A GPT powered AI Assistant""" | ||
import autogpt.cli | ||
|
||
if __name__ == "__main__": | ||
main() | ||
autogpt.cli.main() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,125 @@ | ||
"""Main script for the autogpt package.""" | ||
import click | ||
|
||
|
||
@click.group(invoke_without_command=True) | ||
@click.option("-c", "--continuous", is_flag=True, help="Enable Continuous Mode") | ||
@click.option( | ||
"--skip-reprompt", | ||
"-y", | ||
is_flag=True, | ||
help="Skips the re-prompting messages at the beginning of the script", | ||
) | ||
@click.option( | ||
"--ai-settings", | ||
"-C", | ||
help="Specifies which ai_settings.yaml file to use, will also automatically skip the re-prompt.", | ||
) | ||
@click.option( | ||
"-l", | ||
"--continuous-limit", | ||
type=int, | ||
help="Defines the number of times to run in continuous mode", | ||
) | ||
@click.option("--speak", is_flag=True, help="Enable Speak Mode") | ||
@click.option("--debug", is_flag=True, help="Enable Debug Mode") | ||
@click.option("--gpt3only", is_flag=True, help="Enable GPT3.5 Only Mode") | ||
@click.option("--gpt4only", is_flag=True, help="Enable GPT4 Only Mode") | ||
@click.option( | ||
"--use-memory", | ||
"-m", | ||
"memory_type", | ||
type=str, | ||
help="Defines which Memory backend to use", | ||
) | ||
@click.option( | ||
"-b", | ||
"--browser-name", | ||
help="Specifies which web-browser to use when using selenium to scrape the web.", | ||
) | ||
@click.option( | ||
"--allow-downloads", | ||
is_flag=True, | ||
help="Dangerous: Allows Auto-GPT to download files natively.", | ||
) | ||
@click.pass_context | ||
def main( | ||
ctx: click.Context, | ||
continuous: bool, | ||
continuous_limit: int, | ||
ai_settings: str, | ||
skip_reprompt: bool, | ||
speak: bool, | ||
debug: bool, | ||
gpt3only: bool, | ||
gpt4only: bool, | ||
memory_type: str, | ||
browser_name: str, | ||
allow_downloads: bool, | ||
) -> None: | ||
""" | ||
Welcome to AutoGPT an experimental open-source application showcasing the capabilities of the GPT-4 pushing the boundaries of AI. | ||
Start an Auto-GPT assistant. | ||
""" | ||
# Put imports inside function to avoid importing everything when starting the CLI | ||
import logging | ||
|
||
from colorama import Fore | ||
|
||
from autogpt.agent.agent import Agent | ||
from autogpt.config import Config, check_openai_api_key | ||
from autogpt.configurator import create_config | ||
from autogpt.logs import logger | ||
from autogpt.memory import get_memory | ||
from autogpt.prompt import construct_prompt | ||
|
||
if ctx.invoked_subcommand is None: | ||
cfg = Config() | ||
# TODO: fill in llm values here | ||
check_openai_api_key() | ||
create_config( | ||
continuous, | ||
continuous_limit, | ||
ai_settings, | ||
skip_reprompt, | ||
speak, | ||
debug, | ||
gpt3only, | ||
gpt4only, | ||
memory_type, | ||
browser_name, | ||
allow_downloads, | ||
) | ||
logger.set_level(logging.DEBUG if cfg.debug_mode else logging.INFO) | ||
ai_name = "" | ||
system_prompt = construct_prompt() | ||
# print(prompt) | ||
# Initialize variables | ||
full_message_history = [] | ||
next_action_count = 0 | ||
# Make a constant: | ||
triggering_prompt = ( | ||
"Determine which next command to use, and respond using the" | ||
" format specified above:" | ||
) | ||
# Initialize memory and make sure it is empty. | ||
# this is particularly important for indexing and referencing pinecone memory | ||
memory = get_memory(cfg, init=True) | ||
logger.typewriter_log( | ||
"Using memory of type:", Fore.GREEN, f"{memory.__class__.__name__}" | ||
) | ||
logger.typewriter_log("Using Browser:", Fore.GREEN, cfg.selenium_web_browser) | ||
agent = Agent( | ||
ai_name=ai_name, | ||
memory=memory, | ||
full_message_history=full_message_history, | ||
next_action_count=next_action_count, | ||
system_prompt=system_prompt, | ||
triggering_prompt=triggering_prompt, | ||
) | ||
agent.start_interaction_loop() | ||
|
||
|
||
if __name__ == "__main__": | ||
main() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.