diff --git a/packages/jhehemann/customs/prediction_sum_url_content/component.yaml b/packages/jhehemann/customs/prediction_sum_url_content/component.yaml index 9d2e460f..d963c2e4 100644 --- a/packages/jhehemann/customs/prediction_sum_url_content/component.yaml +++ b/packages/jhehemann/customs/prediction_sum_url_content/component.yaml @@ -12,4 +12,34 @@ fingerprint: fingerprint_ignore_patterns: [] entry_point: prediction_sum_url_content.py callable: run -dependencies: {} +dependencies: + tqdm: + version: ==4.56.0 + google-api-python-client: + version: ==2.95.0 + googlesearch-python: + version: ==1.2.3 + requests: {} + pydantic: + version: '>=1.9.0,<3' + faiss-cpu: + version: ==1.7.4 + tiktoken: + version: ==0.5.1 + markdownify: + version: ==0.11.6 + openai: + version: ==1.11.0 + docstring-parser: + version: ==0.15 + readability-lxml: + version: ==0.8.1 + pypdf2: + version: ==3.0.1 + pandas: {} + python-dateutil: + version: ==2.8.2 + beautifulsoup4: + version: ==4.12.2 + spacy: + version: ==3.7.2 diff --git a/packages/napthaai/customs/prediction_request_rag/component.yaml b/packages/napthaai/customs/prediction_request_rag/component.yaml index f296d08f..3ce01a07 100644 --- a/packages/napthaai/customs/prediction_request_rag/component.yaml +++ b/packages/napthaai/customs/prediction_request_rag/component.yaml @@ -7,8 +7,31 @@ license: Apache-2.0 aea_version: '>=1.0.0, <2.0.0' fingerprint: __init__.py: bafybeibt7f7crtwvmkg7spy3jhscmlqltvyblzp32g6gj44v7tlo5lycuq - prediction_request_rag.py: bafybeifxoo6pcmmwved7ffwra7saku5hosfrhsmyrwhqiy7nxcblh4luce + prediction_request_rag.py: bafybeihpyddw5tctvvsr6fbwlbenkgkdz4n5ieyngt246idtxjgfrwn2ke fingerprint_ignore_patterns: [] entry_point: prediction_request_rag.py callable: run -dependencies: {} +dependencies: + google-api-python-client: + version: ==2.95.0 + googlesearch-python: + version: ==1.2.3 + requests: {} + pydantic: + version: '>=1.9.0,<3' + faiss-cpu: + version: ==1.7.4 + tiktoken: + version: ==0.5.1 + markdownify: + version: ==0.11.6 + openai: + version: ==1.11.0 + docstring-parser: + version: ==0.15 + readability-lxml: + version: ==0.8.1 + pypdf2: + version: ==3.0.1 + numpy: + version: '>=1.19.0' diff --git a/packages/napthaai/customs/prediction_request_rag/prediction_request_rag.py b/packages/napthaai/customs/prediction_request_rag/prediction_request_rag.py index a9d3c970..2e5c7377 100644 --- a/packages/napthaai/customs/prediction_request_rag/prediction_request_rag.py +++ b/packages/napthaai/customs/prediction_request_rag/prediction_request_rag.py @@ -19,6 +19,7 @@ """This module implements a Mech tool for binary predictions.""" +import re from collections import defaultdict from concurrent.futures import Future, ThreadPoolExecutor from docstring_parser import parse @@ -35,7 +36,6 @@ from readability import Document as ReadabilityDocument import requests from requests.exceptions import RequestException, TooManyRedirects -from requests.packages.urllib3.util.retry import Retry from markdownify import markdownify as md from typing import Any, Dict, Generator, List, Optional, Tuple, Callable from tiktoken import encoding_for_model @@ -65,13 +65,13 @@ def __exit__(self, exc_type, exc_value, traceback) -> None: "temperature": 0, } MAX_TOKENS = { - "gpt-3.5-turbo": 4096, + "gpt-3.5-turbo-0125": 16385, "gpt-4": 8192, } ALLOWED_TOOLS = [ "prediction-request-rag", ] -TOOL_TO_ENGINE = {tool: "gpt-3.5-turbo" for tool in ALLOWED_TOOLS} +TOOL_TO_ENGINE = {tool: "gpt-3.5-turbo-0125" for tool in ALLOWED_TOOLS} DEFAULT_NUM_URLS = defaultdict(lambda: 3) DEFAULT_NUM_QUERIES = defaultdict(lambda: 3) NUM_URLS_PER_QUERY = 5 @@ -106,15 +106,15 @@ def __exit__(self, exc_type, exc_value, traceback) -> None: """ URL_QUERY_PROMPT = """ - You are an expert fact checker in a team tasked with determining whether an event will happen before a given date in the past. -* Your role in the team to come up with search queries to be used to find relevant news articles that may help in determining whether the event occured. + You are an expert fact checker in a team tasked with determining whether an event will happen before a given date. +* Your role in the team to come up with search queries to be used to find relevant news articles that may help in determining whether the event will occur. * You are provided with the input question about the event under the label "USER_PROMPT". * You must follow the instructions under the label "INSTRUCTIONS". INSTRUCTIONS * Read the input under the label "USER_PROMPT" delimited by three backticks. * The "USER_PROMPT" is a question about whether an event will happen before a given date. -* The event will only have has two possible outcomes: either the event will happen or the event will not happen. +* The event will only have two possible outcomes: either the event will happen or the event will not happen. * If the event has more than two possible outcomes, you must ignore the rest of the instructions and output the response "Error". * You should come up with {num_queries} diverse queries to search for relevant news articles that may help in determining whether the event will occur. * Focus on capturing different aspects and interpretations of the question to ensure comprehensive coverage of the topic. @@ -208,6 +208,8 @@ def multi_queries( engine: str, num_queries: int, counter_callback: Optional[Callable[[int, int, str], None]] = None, + temperature: Optional[float] = DEFAULT_OPENAI_SETTINGS["temperature"], + max_tokens: Optional[int] = DEFAULT_OPENAI_SETTINGS["max_tokens"], ) -> List[str]: """Generate multiple queries for fetching information from the web.""" @@ -223,8 +225,8 @@ def multi_queries( response = client.chat.completions.create( model=engine, messages=messages, - temperature=DEFAULT_OPENAI_SETTINGS["temperature"], - max_tokens=DEFAULT_OPENAI_SETTINGS["max_tokens"], + temperature=temperature, + max_tokens=max_tokens, n=1, timeout=150, stop=None, @@ -245,7 +247,13 @@ def multi_queries( return queries.queries, counter_callback return queries.queries, None -def search_google(query: str, api_key: str, engine: str, num: int) -> List[str]: +def search_google( + query: str, + api_key: str, + engine: str, + num: int +) -> List[str]: + """Search Google for the given query.""" service = build("customsearch", "v1", developerKey=api_key) search = ( service.cse() @@ -286,7 +294,9 @@ def get_urls_from_queries( def find_similar_chunks( - query: str, docs_with_embeddings: List[Document], k: int = 4 + query: str, + docs_with_embeddings: List[Document], + k: int = 4 ) -> List: """Similarity search to find similar chunks to a query""" @@ -439,6 +449,8 @@ def fetch_additional_information( num_words: Optional[int] = None, num_urls: Optional[int] = None, num_queries: Optional[int] = DEFAULT_NUM_QUERIES, + temperature: Optional[float] = DEFAULT_OPENAI_SETTINGS["temperature"], + max_tokens: Optional[int] = DEFAULT_OPENAI_SETTINGS["max_tokens"], ) -> Tuple: """Fetch additional information from the web.""" @@ -449,6 +461,8 @@ def fetch_additional_information( engine=engine, num_queries=num_queries, counter_callback=counter_callback, + temperature=temperature, + max_tokens=max_tokens, ) print(f"Queries: {queries}") @@ -469,11 +483,11 @@ def fetch_additional_information( urls=urls, ) else: - texts = [] + docs = [] for url, content in islice(source_links.items(), num_urls or len(source_links)): - doc = {} - doc['text'], doc['url'] = extract_text(html=content, num_words=num_words), url - texts.append(doc) + doc = extract_text(html=content, num_words=num_words) + doc.url = url + docs.append(doc) # Remove None values from the list docs = [doc for doc in docs if doc] @@ -553,12 +567,23 @@ def adjust_additional_information( return additional_information -def run(**kwargs) -> Tuple[str, Optional[Dict[str, Any]]]: +def extract_question(prompt: str) -> str: + pattern = r'\"(.*?)\"' + try: + question = re.findall(pattern, prompt)[0] + except Exception as e: + print(f"Error extracting question: {e}") + question = prompt + + return question + + +def run(**kwargs) -> Tuple[str, Optional[str], Optional[Dict[str, Any]], Any]: """Run the task""" with OpenAIClientManager(kwargs["api_keys"]["openai"]): tool = kwargs["tool"] - prompt = kwargs["prompt"] + prompt = extract_question(kwargs["prompt"]) max_tokens = kwargs.get("max_tokens", DEFAULT_OPENAI_SETTINGS["max_tokens"]) temperature = kwargs.get("temperature", DEFAULT_OPENAI_SETTINGS["temperature"]) num_words = kwargs.get("num_words", None) @@ -572,7 +597,6 @@ def run(**kwargs) -> Tuple[str, Optional[Dict[str, Any]]]: raise ValueError(f"Tool {tool} is not supported.") engine = TOOL_TO_ENGINE[tool] - additional_information, counter_callback = fetch_additional_information( client=client, prompt=prompt, @@ -584,6 +608,8 @@ def run(**kwargs) -> Tuple[str, Optional[Dict[str, Any]]]: num_words=num_words, num_urls=num_urls, num_queries=num_queries, + temperature=temperature, + max_tokens=max_tokens, ) additional_information = adjust_additional_information( prompt, @@ -592,7 +618,8 @@ def run(**kwargs) -> Tuple[str, Optional[Dict[str, Any]]]: engine ) prediction_prompt = PREDICTION_PROMPT.format( - user_prompt=prompt, additional_information=additional_information + user_prompt=prompt, + additional_information=additional_information ) messages = [ {"role": "system", "content": SYSTEM_PROMPT}, @@ -607,6 +634,7 @@ def run(**kwargs) -> Tuple[str, Optional[Dict[str, Any]]]: timeout=150, stop=None, functions=[Results.openai_schema], + function_call={'name': 'Results'} ) results = str(Results.from_response(response)) @@ -623,7 +651,6 @@ def run(**kwargs) -> Tuple[str, Optional[Dict[str, Any]]]: output_tokens=response.usage.completion_tokens, model=engine, token_counter=count_tokens, - ) - - return results, prediction_prompt, None, counter_callback \ No newline at end of file + + return results, prediction_prompt, None, counter_callback diff --git a/packages/napthaai/customs/prediction_request_reasoning/__init__.py b/packages/napthaai/customs/prediction_request_reasoning/__init__.py new file mode 100644 index 00000000..9f4fbd55 --- /dev/null +++ b/packages/napthaai/customs/prediction_request_reasoning/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# ------------------------------------------------------------------------------ +# +# Copyright 2024 Valory AG +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# ------------------------------------------------------------------------------ + +"""This module contains the resolve market reasoning tool.""" diff --git a/packages/napthaai/customs/prediction_request_reasoning/component.yaml b/packages/napthaai/customs/prediction_request_reasoning/component.yaml new file mode 100644 index 00000000..4819851c --- /dev/null +++ b/packages/napthaai/customs/prediction_request_reasoning/component.yaml @@ -0,0 +1,37 @@ +name: prediction_request_reasoning +author: napthaai +version: 0.1.0 +type: custom +description: A tool that reasons over extracted information. +license: Apache-2.0 +aea_version: '>=1.0.0, <2.0.0' +fingerprint: + __init__.py: bafybeib36ew6vbztldut5xayk5553rylrq7yv4cpqyhwc5ktvd4cx67vwu + prediction_request_reasoning.py: bafybeien7m2b5ejuvxtrkl32ws5tkrek2jfcksxxf7tawo2hh4lhbmagee +fingerprint_ignore_patterns: [] +entry_point: prediction_request_reasoning.py +callable: run +dependencies: + google-api-python-client: + version: ==2.95.0 + googlesearch-python: + version: ==1.2.3 + requests: {} + pydantic: + version: '>=1.9.0,<3' + faiss-cpu: + version: ==1.7.4 + tiktoken: + version: ==0.5.1 + markdownify: + version: ==0.11.6 + openai: + version: ==1.11.0 + docstring-parser: + version: ==0.15 + readability-lxml: + version: ==0.8.1 + pypdf2: + version: ==3.0.1 + numpy: + version: '>=1.19.0' diff --git a/packages/napthaai/customs/prediction_request_reasoning/prediction_request_reasoning.py b/packages/napthaai/customs/prediction_request_reasoning/prediction_request_reasoning.py new file mode 100644 index 00000000..67f803f1 --- /dev/null +++ b/packages/napthaai/customs/prediction_request_reasoning/prediction_request_reasoning.py @@ -0,0 +1,794 @@ +# -*- coding: utf-8 -*- +# ------------------------------------------------------------------------------ +# +# Copyright 2024 Valory AG +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# ------------------------------------------------------------------------------ + +"""This module implements a Mech tool for binary predictions.""" + +import PyPDF2 +from collections import defaultdict +from concurrent.futures import Future, ThreadPoolExecutor +from typing import Any, Dict, Generator, List, Optional, Tuple, Callable +from docstring_parser import parse +from googleapiclient.discovery import build +from io import BytesIO +from itertools import islice +import re +import json +from openai import OpenAI +from pydantic import BaseModel, Field +import numpy as np +import faiss +import requests +from readability import Document as ReadabilityDocument +from markdownify import markdownify as md +import tiktoken +from tiktoken import encoding_for_model + +client: Optional[OpenAI] = None + + +class OpenAIClientManager: + """Client context manager for OpenAI.""" + + def __init__(self, api_key: str): + self.api_key = api_key + + def __enter__(self) -> OpenAI: + global client + if client is None: + client = OpenAI(api_key=self.api_key) + return client + + def __exit__(self, exc_type, exc_value, traceback) -> None: + global client + if client is not None: + client.close() + client = None + + +DEFAULT_OPENAI_SETTINGS = { + "max_tokens": 500, + "temperature": 0, +} +MAX_TOKENS = { + "gpt-3.5-turbo-0125": 16385, + "gpt-4-0125-preview": 8192, +} +ALLOWED_TOOLS = [ + "prediction-request-reasoning", +] +TOOL_TO_ENGINE = {tool: "gpt-4-0125-preview" for tool in ALLOWED_TOOLS} +DEFAULT_NUM_WORDS: Dict[str, Optional[int]] = defaultdict(lambda: 300) +DEFAULT_NUM_URLS = defaultdict(lambda: 3) +NUM_QUERIES = 3 +NUM_URLS_PER_QUERY = 3 +SPLITTER_CHUNK_SIZE = 300 +SPLITTER_OVERLAP = 50 +EMBEDDING_MODEL = "text-embedding-3-large" +EMBEDDING_BATCH_SIZE = 1000 +EMBEDDING_SIZE = 3072 +NUM_NEIGHBORS = 3 +BUFFER_TOKENS = 250 + + +class OpenAISchema(BaseModel): # type: ignore[misc] + @classmethod # type: ignore[misc] + @property + def openai_schema(cls) -> Dict[str, Any]: + """ + Return the schema in the format of OpenAI's schema as jsonschema + Note: + Its important to add a docstring to describe how to best use this class, it will be included in the description attribute and be part of the prompt. + Returns: + model_json_schema (dict): A dictionary in the format of OpenAI's schema as jsonschema + """ + schema = cls.model_json_schema() + docstring = parse(cls.__doc__ or "") + parameters = { + k: v for k, v in schema.items() if k not in ("title", "description") + } + for param in docstring.params: + if (name := param.arg_name) in parameters["properties"] and ( + description := param.description + ): + if "description" not in parameters["properties"][name]: + parameters["properties"][name]["description"] = description + + parameters["required"] = sorted( + k for k, v in parameters["properties"].items() if "default" not in v + ) + + if "description" not in schema: + if docstring.short_description: + schema["description"] = docstring.short_description + else: + schema["description"] = ( + f"Correctly extracted `{cls.__name__}` with all " + f"the required parameters with correct types" + ) + + return { + "name": schema["title"], + "description": schema["description"], + "parameters": parameters, + } + + @classmethod + def from_response(cls, completion: Dict[str, Any]) -> "OpenAISchema": + """ + Convert the response from OpenAI into the class instance + Args: + completion (dict): The response from OpenAI + Returns: + OpenAISchema: The instance of the class + """ + + message = completion.choices[0].message + + return cls.model_validate_json( + message.function_call.arguments, + ) + + +class Queries(OpenAISchema): + queries: List[str] + + +class MultiQuestions(OpenAISchema): + questions: List[str] + + +class Results(OpenAISchema): + p_yes: float = Field(description="Estimated probability that the event in the USER_QUESTION occurs.") + p_no: float = Field(description="Estimated probability that the event in the USER_QUESTION does not occur.") + confidence: float = Field(description="A value between 0 and 1 indicating the confidence in the prediction. 0 indicates lowest confidence value; 1 maximum confidence value.") + info_utility: float = Field(description="Utility of the information provided in ADDITIONAL_INFORMATION to help you make the prediction. 0 indicates lowest utility; 1 maximum utility.") + +class Valid(OpenAISchema): + is_valid: bool = Field(..., description="Whether the question is valid.") + reason: Optional[str] = Field(..., description="Reason that the question is invalid.") + +class Determinable(OpenAISchema): + is_determinable: bool = Field(..., description="Whether it is possible to answer the question based on the information provided and reasoning.") + +class Document(BaseModel): + text: str + url: str + embedding: Optional[List[float]] = None + + +URL_QUERY_PROMPT = """ + You are an expert fact checker in a team tasked with determining whether an event will happen before a given date. +* Your role in the team to come up with search queries to be used to find relevant news articles that may help in determining whether the event will occur. + +INSTRUCTIONS +* You are provided with the input question about the event under the label "USER_PROMPT" delimited by three backticks, which is a question about whether an event will happen before a given date. +* The event will only have two possible outcomes: either the event will happen or the event will not happen. +* You should come up with {num_queries} diverse queries to search for relevant news articles that may help in determining whether the event will occur. +* Focus on capturing different aspects and interpretations of the question to ensure comprehensive coverage of the topic. +* ONLY function calls are allowed in the response. + +USER_PROMPT: +``` +{user_prompt} +``` +""" + + +PREDICTION_PROMPT = """ +INSTRUCTIONS +* You are an expert data analyst. +* You are provided with the input question about the event under the label "USER_PROMPT". +* You are provided with a colleague's reasoning as to whether the event will occur based on online research under the label "REASONING" delimited by three backticks. +* Your task is to predict the probability of the event in the USER_PROMPT occurring. +* The answer that you give should match the answer that you come to in the reasoning field +* ONLY function calls are allowed in the response. + +USER_PROMPT: +``` +{user_prompt} +``` + +REASONING: +``` +{reasoning} +``` +""" + +REASONING_PROMPT = """ +You are an expert fact checker that takes in a question asking whether an event will happen before a given date. +Your role is to determine whether the event will happen before the date. + +INSTRUCTIONS +* You are provided with the input question about the event under the label "USER_PROMPT" delimited by three backticks, which is a question about whether an event will happen before a certain date. +* You need to determine whether the event will or will not happen. There are only two possible answers: either the event will happen or it will not happen. +* You are provided an itemized list of information under the label "ADDITIONAL_INFORMATION" delimited by three backticks, with format "ARTICLE (N), URL: (URL), CONTENT: (CONTENT)" +* Ideally, these will be news articles about the event in question. +* If an item in "ADDITIONAL_INFORMATION" is not relevant, you must ignore that item for the estimation. +* You should show your process of thinking through the problem step by step, taking the information of the various articles into consideration, and explain your reasoning for your decision as to whether an event will occur by the specified date. +* The articles will not contain all the information needed to determine the answer. In this case, you may need to make an educated guess based on certain assumptions. If you need to do this, please provide your assumptions in your explanation. +* Try to be concise in your reasoning, providing only information that is important for making a decision (aim for a response of about 100 words) +* Do not repeat the task or instructions in the response + +USER_PROMPT: +``` +{user_prompt} +``` + +ADDITIONAL_INFORMATION: +``` +{formatted_docs} +``` +""" + + +MULTI_QUESTIONS_PROMPT = """ +You are an AI language model assistant. Your task is to generate 3 +different versions of the given user question to retrieve relevant documents from a vector +database. By generating multiple perspectives on the user question, your goal is to help +the user overcome some of the limitations of the distance-based similarity search. +Provide these alternative questions separated by newlines. Original question: {question}""" + + +SYSTEM_PROMPT = """You are a world class algorithm for generating structured output from a given input.""" + + +def multi_queries( + client: OpenAI, + prompt: str, + engine: str, + num_queries: int, + counter_callback: Optional[Callable[[int, int, str], None]] = None, + temperature: float = DEFAULT_OPENAI_SETTINGS["temperature"], + max_tokens: int = DEFAULT_OPENAI_SETTINGS["max_tokens"], +) -> List[str]: + """Generate multiple queries for fetching information from the web.""" + + url_query_prompt = URL_QUERY_PROMPT.format( + user_prompt=prompt, num_queries=num_queries + ) + + messages = [ + {"role": "system", "content": SYSTEM_PROMPT}, + {"role": "user", "content": url_query_prompt}, + ] + + response = client.chat.completions.create( + model=engine, + messages=messages, + temperature=temperature, + max_tokens=max_tokens, + n=1, + timeout=150, + stop=None, + functions=[Queries.openai_schema], + function_call={'name':'Queries'} + ) + queries = Queries.from_response(response) + + # append the user's question to the list of queries + queries.queries.append(prompt) + + if counter_callback: + counter_callback( + input_tokens=response.usage.prompt_tokens, + output_tokens=response.usage.completion_tokens, + model=engine, + token_counter=count_tokens, + ) + return queries.queries, counter_callback + return queries.queries, None + + +def search_google(query: str, api_key: str, engine: str, num: int) -> List[str]: + service = build("customsearch", "v1", developerKey=api_key) + search = ( + service.cse() + .list( + q=query, + cx=engine, + num=num, + ) + .execute() + ) + return [result["link"] for result in search["items"]] + + +def get_urls_from_queries( + queries: List[str], api_key: str, engine: str, num: int +) -> List[str]: + """Get URLs from search engine queries""" + results = [] + for query in queries: + for url in search_google( + query=query, + api_key=api_key, + engine=engine, + num=num, + ): + results.append(url) + unique_results = list(set(results)) + return unique_results + + +def extract_text_from_pdf(url: str, num_words: Optional[int] = None) -> str: + """Extract text from a PDF document at the given URL.""" + try: + response = requests.get(url, timeout=20) + response.raise_for_status() + + if "application/pdf" not in response.headers.get("Content-Type", ""): + return ValueError("URL does not point to a PDF document") + + with BytesIO(response.content) as pdf_file: + reader = PyPDF2.PdfReader(pdf_file) + text = "" + for page in reader.pages: + text += page.extract_text() + + doc = Document(text=text[:num_words] if num_words else text, url=url) + + return doc + except Exception as e: + print(f"An error occurred: {e}") + return None + + +def extract_text( + client: OpenAI, + engine: str, + html: str, + num_words: Optional[int] = None, + counter_callback: Optional[Callable[[int, int, str], None]] = None, +) -> str: + """Extract text from a single HTML document""" + text = ReadabilityDocument(html).summary() + text = text = md(text, heading_style="ATX") + doc = Document(text=text[:num_words] if num_words else text, url="") + return doc, counter_callback + + +def extract_texts( + urls: List[str], + client: OpenAI, + engine: str, + counter_callback: Optional[Callable[[int, int, str], None]] = None, +) -> Tuple[List[str], Dict[str, str]]: + """Extract texts from URLs""" + extracted_texts = [] + for batch in process_in_batches(urls=urls): + for future, url in batch: + try: + if url.lower().endswith(".pdf"): + result = extract_text_from_pdf(url) + if result: + extracted_texts.append(result) + continue + result = future.result() + if result.status_code != 200: + continue + # first 4 bytes is pdf + if result.content[:4] == b"%PDF": + result = extract_text_from_pdf(url) + if result: + extracted_texts.append(result) + continue + doc, counter_callback = extract_text( + html=result.text, + client=client, + counter_callback=counter_callback, + engine=engine + ) + doc.url = url + extracted_texts.append(doc) + except requests.exceptions.ReadTimeout: + print(f"Request timed out: {url}.") + except Exception as e: + print(f"An error occurred: {e}") + return extracted_texts, counter_callback + + +def process_in_batches( + urls: List[str], window: int = 5, timeout: int = 50 +) -> Generator[None, None, List[Tuple[Future, str]]]: + """Iter URLs in batches.""" + with ThreadPoolExecutor() as executor: + for i in range(0, len(urls), window): + batch = urls[i : i + window] + futures = [ + (executor.submit(requests.get, url, timeout=timeout), url) + for url in batch + ] + yield futures + + +def recursive_character_text_splitter(text, max_tokens, overlap): + if len(text) <= max_tokens: + return [text] + else: + return [ + text[i : i + max_tokens] for i in range(0, len(text), max_tokens - overlap) + ] + + +def get_embeddings(split_docs: List[Document]) -> List[Document]: + """Get embeddings for the split documents.""" + for batch_start in range(0, len(split_docs), EMBEDDING_BATCH_SIZE): + batch_end = batch_start + EMBEDDING_BATCH_SIZE + batch = [doc.text for doc in split_docs[batch_start:batch_end]] + response = client.embeddings.create( + model=EMBEDDING_MODEL, + input=batch, + ) + for i, be in enumerate(response.data): + assert i == be.index + batch_embeddings = [e.embedding for e in response.data] + for i, doc in enumerate(split_docs[batch_start:batch_end]): + doc.embedding = batch_embeddings[i] + return split_docs + + +def find_similar_chunks( + query: str, docs_with_embeddings: List[Document], k: int = 4 +) -> List: + """Similarity search to find similar chunks to a query""" + + query_embedding = ( + client.embeddings.create( + model=EMBEDDING_MODEL, + input=query, + ) + .data[0] + .embedding + ) + + index = faiss.IndexFlatIP(EMBEDDING_SIZE) + index.add(np.array([doc.embedding for doc in docs_with_embeddings])) + D, I = index.search(np.array([query_embedding]), k) + + return [docs_with_embeddings[i] for i in I[0]] + + +def multi_questions_response( + prompt:str, + engine:str, + temperature:float = DEFAULT_OPENAI_SETTINGS["temperature"], + max_tokens:int = DEFAULT_OPENAI_SETTINGS["max_tokens"], + counter_callback: Optional[Callable[[int, int, str], None]] = None, +) -> List[str]: + """Generate multiple questions for fetching information from the web.""" + try: + multi_questions_prompt = MULTI_QUESTIONS_PROMPT.format(question=prompt) + messages = [ + {"role": "system", "content": SYSTEM_PROMPT}, + {"role": "user", "content": multi_questions_prompt}, + ] + + response = client.chat.completions.create( + model=engine, + messages=messages, + temperature=temperature, + max_tokens=max_tokens, + n=1, + timeout=150, + stop=None, + functions=[MultiQuestions.openai_schema], + function_call={"name": "MultiQuestions"} + ) + multi_questions = MultiQuestions.from_response(response) + + if counter_callback: + counter_callback( + input_tokens=response.usage.prompt_tokens, + output_tokens=response.usage.completion_tokens, + model=engine, + token_counter=count_tokens, + ) + + # append the user's question to the list of questions + multi_questions.questions.append(prompt) + + return multi_questions.questions, counter_callback + + except Exception as e: + return [prompt], counter_callback + + +def reciprocal_rank_refusion(similar_chunks: List[Document], k: int) -> List[Document]: + """Reciprocal rank refusion to re-rank the similar chunks based on the text.""" + fused_chunks = {} + for rank, doc in enumerate(similar_chunks): + doc_text = doc.text + if doc_text not in fused_chunks: + fused_chunks[doc_text] = (doc, 0) + fused_chunks[doc_text] = (doc, fused_chunks[doc_text][1] + 1 / (rank + 60)) + + sorted_fused_chunks = sorted(fused_chunks.values(), key=lambda x: x[1], reverse=True) + + return [doc for doc, _ in sorted_fused_chunks[:k]] + + +def count_tokens(text: str, model: str) -> int: + """Count the number of tokens in a text.""" + enc = encoding_for_model(model) + return len(enc.encode(text)) + + +def fetch_additional_information( + client: OpenAI, + prompt: str, + engine: str, + google_api_key: Optional[str], + google_engine_id: Optional[str], + counter_callback: Optional[Callable[[int, int, str], None]] = None, + source_links: Optional[List[str]] = None, + num_urls: Optional[int] = None, + temperature: float = DEFAULT_OPENAI_SETTINGS["temperature"], + max_tokens: int = DEFAULT_OPENAI_SETTINGS["max_tokens"], +) -> Tuple: + """Fetch additional information from the web.""" + + # generate multiple queries for fetching information from the web + queries, counter_callback = multi_queries( + client=client, + prompt=prompt, + engine=engine, + num_queries=NUM_QUERIES, + counter_callback=counter_callback, + temperature=temperature, + max_tokens=max_tokens, + ) + print(f"Queries: {queries}") + + # get the top URLs for the queries + if not source_links: + urls = get_urls_from_queries( + queries=queries, + api_key=google_api_key, + engine=google_engine_id, + num=NUM_URLS_PER_QUERY, + ) + print(f"URLs: {urls}") + + # Extract text from the URLs + docs, counter_callback = extract_texts( + urls=urls, + client=client, + counter_callback=counter_callback, + engine=engine + ) + else: + docs = [] + for url, content in islice(source_links.items(), num_urls or len(source_links)): + doc, counter_callback = extract_text( + html=content, + client=client, + counter_callback=counter_callback, + engine=engine + ) + doc.url = url + docs.append(doc) + + # Remove None values from the list + docs = [doc for doc in docs if doc] + + # remove empty documents with "" + docs = [doc for doc in docs if hasattr(doc, "text") and doc.text != ""] + + # Chunk the documents + split_docs = [] + for doc in docs: + t = recursive_character_text_splitter( + doc.text, + SPLITTER_CHUNK_SIZE, + SPLITTER_OVERLAP + ) + split_docs.extend( + [Document(text=chunk, url=doc.url) for chunk in t] + ) + print(f"Split Docs: {len(split_docs)}") + + # Remove None values from the list + split_docs = [doc for doc in split_docs if doc] + + # Embed the documents + docs_with_embeddings = get_embeddings(split_docs) + print(f"Docs with embeddings: {len(docs_with_embeddings)}") + + # multi questions prompt + questions, counter_callback = multi_questions_response( + prompt=prompt, + engine=engine, + counter_callback=counter_callback, + temperature=temperature, + max_tokens=max_tokens, + ) + print(f"Questions: {questions}") + + similar_chunks = [] + for question in questions: + similar_chunks.extend(find_similar_chunks(question, docs_with_embeddings, k=NUM_NEIGHBORS)) + print(f"Similar Chunks before refusion: {len(similar_chunks)}") + + # Reciprocal rank refusion + similar_chunks = reciprocal_rank_refusion(similar_chunks, NUM_NEIGHBORS) + print(f"Similar Chunks after refusion: {len(similar_chunks)}") + + # Format the additional information + additional_information = "\n".join( + [ + f"ARTICLE {i}, URL: {doc.url}, CONTENT: {doc.text}\n" + for i, doc in enumerate(similar_chunks) + ] + ) + + return additional_information, queries, counter_callback + + +def adjust_additional_information( + prompt: str, additional_information: str, model: str +) -> str: + """Adjust the additional_information to fit within the token budget""" + + # Initialize tiktoken encoder for the specified model + enc = tiktoken.encoding_for_model(model) + + # Encode the user prompt to calculate its token count + prompt_tokens = len(enc.encode(prompt)) + + # Calculate available tokens for additional_information + MAX_PREDICTION_PROMPT_TOKENS = ( + MAX_TOKENS[model] - DEFAULT_OPENAI_SETTINGS["max_tokens"] + ) + available_tokens = MAX_PREDICTION_PROMPT_TOKENS - prompt_tokens - BUFFER_TOKENS + + # Encode the additional_information + additional_info_tokens = enc.encode(additional_information) + + # If additional_information exceeds available tokens, truncate it + if len(additional_info_tokens) > available_tokens: + truncated_info_tokens = additional_info_tokens[:available_tokens] + # Decode tokens back to text, ensuring the output fits within the budget + additional_information = enc.decode(truncated_info_tokens) + + return additional_information + + +def extract_question(prompt: str) -> str: + pattern = r'\"(.*?)\"' + try: + question = re.findall(pattern, prompt)[0] + except Exception as e: + question = prompt + + return question + + +def run(**kwargs) -> Tuple[str, Optional[str], Optional[Dict[str, Any]], Any]: + """Run the task""" + with OpenAIClientManager(kwargs["api_keys"]["openai"]): + tool = kwargs["tool"] + prompt = extract_question(kwargs["prompt"]) + num_urls = kwargs.get("num_urls", DEFAULT_NUM_URLS[tool]) + counter_callback = kwargs.get("counter_callback", None) + api_keys = kwargs.get("api_keys", {}) + google_api_key = api_keys.get("google_api_key", None) + google_engine_id = api_keys.get("google_engine_id", None) + temperature = kwargs.get("temperature", DEFAULT_OPENAI_SETTINGS["temperature"]) + max_tokens = kwargs.get("max_tokens", DEFAULT_OPENAI_SETTINGS["max_tokens"]) + engine = TOOL_TO_ENGINE[tool] + + if tool not in ALLOWED_TOOLS: + raise ValueError(f"Tool {tool} is not supported.") + + ( + additional_information, + queries, + counter_callback, + ) = fetch_additional_information( + client=client, + prompt=prompt, + engine=engine, + google_api_key=google_api_key, + google_engine_id=google_engine_id, + counter_callback=counter_callback, + source_links=kwargs.get("source_links", None), + num_urls=num_urls, + temperature=temperature, + max_tokens=max_tokens, + ) + + # Adjust the additional_information to fit within the token budget + adjusted_info = adjust_additional_information( + prompt=PREDICTION_PROMPT, + additional_information=additional_information, + model=engine, + ) + + # Reasoning prompt + reasoning_prompt = REASONING_PROMPT.format( + user_prompt=prompt, formatted_docs=adjusted_info + ) + + # Do reasoning + messages = [ + {"role": "system", "content": SYSTEM_PROMPT}, + { + "role": "user", + "content": reasoning_prompt, + }, + ] + + # Reasoning + response_reasoning = client.chat.completions.create( + model=engine, + messages=messages, + temperature=temperature, + max_tokens=max_tokens, + n=1, + timeout=150, + stop=None, + ) + + # Extract the reasoning + reasoning = response_reasoning.choices[0].message.content + + # Prediction prompt + prediction_prompt = PREDICTION_PROMPT.format( + user_prompt=prompt, reasoning=reasoning + ) + + # Make the prediction + messages = [ + {"role": "system", "content": SYSTEM_PROMPT}, + { + "role": "user", + "content": prediction_prompt, + }, + ] + + response = client.chat.completions.create( + model=engine, + messages=messages, + temperature=temperature, + max_tokens=max_tokens, + n=1, + timeout=150, + stop=None, + functions=[Results.openai_schema], + function_call={'name':'Results'} + ) + results = str(Results.from_response(response)) + + pairs = str(results).split() + result_dict = {} + for pair in pairs: + key, value = pair.split("=") + result_dict[key] = float(value) # Convert value to float + results = result_dict + results = json.dumps(results) + if counter_callback is not None: + counter_callback( + input_tokens=response_reasoning.usage.prompt_tokens + + response.usage.prompt_tokens, + output_tokens=response_reasoning.usage.completion_tokens + + response.usage.completion_tokens, + model=engine, + token_counter=count_tokens, + ) + return results, reasoning_prompt + "////" + prediction_prompt, None, counter_callback diff --git a/packages/napthaai/customs/prediction_url_cot/__init__.py b/packages/napthaai/customs/prediction_url_cot/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/packages/napthaai/customs/prediction_url_cot/prediction_url_cot.py b/packages/napthaai/customs/prediction_url_cot/prediction_url_cot.py new file mode 100644 index 00000000..10dbd468 --- /dev/null +++ b/packages/napthaai/customs/prediction_url_cot/prediction_url_cot.py @@ -0,0 +1,574 @@ +from collections import defaultdict +from concurrent.futures import Future, ThreadPoolExecutor +from docstring_parser import parse +from googleapiclient.discovery import build +from itertools import islice +import json +import re +from io import BytesIO +import PyPDF2 +from openai import OpenAI +from pydantic import BaseModel, Field +from readability import Document as ReadabilityDocument +import requests +from requests.exceptions import RequestException, TooManyRedirects +from markdownify import markdownify as md +from typing import Any, Dict, Generator, List, Optional, Tuple, Callable +from tiktoken import encoding_for_model + +client: Optional[OpenAI] = None + +class OpenAIClientManager: + """Client context manager for OpenAI.""" + def __init__(self, api_key: str): + self.api_key = api_key + + def __enter__(self) -> OpenAI: + global client + if client is None: + client = OpenAI(api_key=self.api_key) + return client + + def __exit__(self, exc_type, exc_value, traceback) -> None: + global client + if client is not None: + client.close() + client = None + +DEFAULT_OPENAI_SETTINGS = { + "max_tokens": 300, + "temperature": 0, +} +MAX_TOKENS = { + "gpt-3.5-turbo-0125": 16385, + "gpt-4": 8192, +} +ALLOWED_TOOLS = [ + "prediction-url-cot", +] +TOOL_TO_ENGINE = {tool: "gpt-3.5-turbo-0125" for tool in ALLOWED_TOOLS} +DEFAULT_NUM_URLS = defaultdict(lambda: 3) +DEFAULT_NUM_QUERIES = defaultdict(lambda: 3) +NUM_URLS_PER_QUERY = 5 +SPLITTER_CHUNK_SIZE = 1800*2 +SPLITTER_OVERLAP = 50*4 +EMBEDDING_MODEL = "text-embedding-3-large" +EMBEDDING_BATCH_SIZE = 1000 +EMBEDDING_SIZE = 3072 +SPLITTER_MAX_TOKENS = 1800 +SPLITTER_OVERLAP = 50 +NUM_NEIGHBORS = 4 +HTTP_TIMEOUT = 20 +HTTP_MAX_REDIRECTS = 5 +HTTP_MAX_RETIES = 2 +MAX_DOC_TOKENS = 10000 + + +class OpenAISchema(BaseModel): # type: ignore[misc] + @classmethod # type: ignore[misc] + @property + def openai_schema(cls) -> Dict[str, Any]: + """ + Return the schema in the format of OpenAI's schema as jsonschema + Note: + Its important to add a docstring to describe how to best use this class, it will be included in the description attribute and be part of the prompt. + Returns: + model_json_schema (dict): A dictionary in the format of OpenAI's schema as jsonschema + """ + schema = cls.model_json_schema() + docstring = parse(cls.__doc__ or "") + parameters = { + k: v for k, v in schema.items() if k not in ("title", "description") + } + for param in docstring.params: + if (name := param.arg_name) in parameters["properties"] and ( + description := param.description + ): + if "description" not in parameters["properties"][name]: + parameters["properties"][name]["description"] = description + + parameters["required"] = sorted( + k for k, v in parameters["properties"].items() if "default" not in v + ) + + if "description" not in schema: + if docstring.short_description: + schema["description"] = docstring.short_description + else: + schema["description"] = ( + f"Correctly extracted `{cls.__name__}` with all " + f"the required parameters with correct types" + ) + + return { + "name": schema["title"], + "description": schema["description"], + "parameters": parameters, + } + + @classmethod + def from_response(cls, completion: Dict[str, Any]) -> "OpenAISchema": + """ + Convert the response from OpenAI into the class instance + Args: + completion (dict): The response from OpenAI + Returns: + OpenAISchema: The instance of the class + """ + + message = completion.choices[0].message + + return cls.model_validate_json( + message.function_call.arguments, + ) + +class Document(BaseModel): + text: str + url: str + embedding: Optional[List[float]] = None + + +class Queries(OpenAISchema): + queries: List[str] + +class Results(OpenAISchema): + p_yes: float = Field(description="Estimated probability that the event in the USER_QUESTION occurs.") + p_no: float = Field(description="Estimated probability that the event in the USER_QUESTION does not occur.") + confidence: float = Field(description="A value between 0 and 1 indicating the confidence in the prediction. 0 indicates lowest confidence value; 1 maximum confidence value.") + info_utility: float = Field(description="Utility of the information provided in ADDITIONAL_INFORMATION to help you make the prediction. 0 indicates lowest utility; 1 maximum utility.") + prediction: Optional[str] = Field(description="The predicted outcome of the event in the USER_QUESTION. Can be 'yes', 'no', or 'I don't know'.") + + +PREDICTION_PROMPT = """ +You are an AI expert in predicting events. +You are given a question and a document. +Your task is to predict whether the event in the question occurs based on the information in the document. +Only use the information in the document to make your prediction. +If you are not confident in your prediction, you can say "I don't know" in the prediction. +Please think step by step before your response. + +USER_QUESTION: +``` +{user_question} +``` + +DOCUMENT: +``` +{document} +``` +""" + +URL_QUERY_PROMPT = """ +You are an AI language model assistant. +Your task is to generate {num_queries} different queries to retrieve relevant documents from the web. +Your response will be used to fetch information from the web to help you make a prediction about the event in the USER_PROMPT. +Please think step by step before your response. + +USER_PROMPT: +``` +{user_prompt} +``` +""" + +SYSTEM_PROMPT = """You are a world class algorithm for generating structured output from a given input.""" + +def search_google(query: str, api_key: str, engine: str, num: int) -> List[str]: + service = build("customsearch", "v1", developerKey=api_key) + search = ( + service.cse() + .list( + q=query, + cx=engine, + num=num, + ) + .execute() + ) + return [result["link"] for result in search["items"]] + + +def count_tokens(text: str, model: str) -> int: + """Count the number of tokens in a text.""" + enc = encoding_for_model(model) + return len(enc.encode(text)) + + +def get_urls_from_queries( + queries: List[str], api_key: str, engine: str, num: int +) -> List[str]: + """Get URLs from search engine queries""" + results = [] + for query in queries: + try: + for url in search_google( + query=query, + api_key=api_key, + engine=engine, + num=num, + ): + results.append(url) + except Exception: + pass + unique_results = list(set(results)) + return unique_results + + +def extract_question(prompt: str) -> str: + pattern = r'\"(.*?)\"' + try: + question = re.findall(pattern, prompt)[0] + except Exception as e: + question = prompt + + return question + + +def extract_text( + html: str, + num_words: Optional[int] = None, +) -> str: + """Extract text from a single HTML document""" + text = ReadabilityDocument(html).summary() + + # use html2text to convert HTML to markdown + text = md(text, heading_style="ATX") + + if text is None: + return None + + if num_words: + text = " ".join(text.split()[:num_words]) + else: + text = " ".join(text.split()) + + doc = Document(text=text, url="") + return doc + + +def extract_text_from_pdf(url: str, num_words: Optional[int] = None) -> str: + """Extract text from a PDF document at the given URL.""" + try: + response = requests.get(url, timeout=HTTP_TIMEOUT) + response.raise_for_status() + + if "application/pdf" not in response.headers.get("Content-Type", ""): + return ValueError("URL does not point to a PDF document") + + with BytesIO(response.content) as pdf_file: + reader = PyPDF2.PdfReader(pdf_file) + text = "" + for page in reader.pages: + text += page.extract_text() + + doc = Document(text=text[:num_words] if num_words else text, date="", url=url) + print(f"Using PDF: {url}: {doc.text[:300]}...") + return doc + + except Exception as e: + print(f"An error occurred: {e}") + return None + + +def process_in_batches( + urls: List[str], + window: int = 5, + timeout: int = HTTP_TIMEOUT, + max_redirects: int = HTTP_MAX_REDIRECTS, + retries: int = HTTP_MAX_RETIES, +) -> Generator[None, None, List[Tuple[Optional[Future], str]]]: + """Iter URLs in batches with improved error handling and retry mechanism.""" + with ThreadPoolExecutor() as executor, requests.Session() as session: + session.max_redirects = max_redirects + for i in range(0, len(urls), window): + batch = urls[i : i + window] + futures = [] + for url in batch: + future = None + attempt = 0 + while attempt < retries: + try: + future = executor.submit(session.get, url, timeout=timeout) + break + except (TooManyRedirects, RequestException) as e: + print(f"Attempt {attempt + 1} failed for {url}: {e}") + attempt += 1 + if attempt == retries: + print(f"Max retries reached for {url}. Moving to next URL.") + futures.append((future, url)) + yield futures + + +def extract_texts(urls: List[str], num_words: Optional[int] = None) -> List[Document]: + """Extract texts from URLs with improved error handling, excluding failed URLs.""" + extracted_texts = [] + for batch in process_in_batches(urls=urls): + for future, url in batch: + if future is None: + continue + try: + result = future.result() + if result.status_code == 200: + # Check if URL ends with .pdf or content starts with %PDF + if url.endswith('.pdf') or result.content[:4] == b'%PDF': + doc = extract_text_from_pdf(url, num_words=num_words) + else: + doc = extract_text(html=result.text, num_words=num_words) + doc.url = url + extracted_texts.append(doc) + except Exception as e: + print(f"Error processing {url}: {e}") + continue + return extracted_texts + + +def multi_queries( + client: OpenAI, + prompt: str, + engine: str, + num_queries: int, + counter_callback: Optional[Callable[[int, int, str], None]] = None, + temperature: int = DEFAULT_OPENAI_SETTINGS["temperature"], + max_tokens: int = DEFAULT_OPENAI_SETTINGS["max_tokens"], +) -> List[str]: + """Generate multiple queries for fetching information from the web.""" + + url_query_prompt = URL_QUERY_PROMPT.format( + user_prompt=prompt, num_queries=num_queries + ) + + messages = [ + {"role": "system", "content": SYSTEM_PROMPT}, + {"role": "user", "content": url_query_prompt}, + ] + + response = client.chat.completions.create( + model=engine, + messages=messages, + temperature=temperature, + max_tokens=max_tokens, + n=1, + timeout=150, + stop=None, + functions=[Queries.openai_schema], + ) + queries = Queries.from_response(response) + + # append the user's question to the list of queries + queries.queries.append(prompt) + + if counter_callback: + counter_callback( + input_tokens=response.usage.prompt_tokens, + output_tokens=response.usage.completion_tokens, + model=engine, + token_counter=count_tokens, + ) + return queries.queries, counter_callback + return queries.queries, None + + +def fetch_additional_information( + client: OpenAI, + prompt: str, + engine: str, + google_api_key: Optional[str], + google_engine_id: Optional[str], + counter_callback: Optional[Callable[[int, int, str], None]] = None, + source_links: Optional[List[str]] = None, + num_words: Optional[int] = None, + num_urls: Optional[int] = None, + num_queries: Optional[int] = DEFAULT_NUM_QUERIES, + temperature: int = DEFAULT_OPENAI_SETTINGS["temperature"], + max_tokens: int = DEFAULT_OPENAI_SETTINGS["max_tokens"], +) -> Tuple: + """Fetch additional information from the web.""" + + # generate multiple queries for fetching information from the web + queries, counter_callback = multi_queries( + client=client, + prompt=prompt, + engine=engine, + num_queries=num_queries, + counter_callback=counter_callback, + temperature=temperature, + max_tokens=max_tokens, + ) + print(f"Queries: {queries}") + + # get the top URLs for the queries + if not source_links: + urls = get_urls_from_queries( + queries=queries, + api_key=google_api_key, + engine=google_engine_id, + num=NUM_URLS_PER_QUERY, + ) + print(f"URLs: {urls}") + + urls = list(set(urls)) + + # Extract text and dates from the URLs + docs = extract_texts( + urls=urls, + ) + else: + docs = [] + for url, content in islice(source_links.items(), num_urls or len(source_links)): + doc = extract_text(html=content, num_words=num_words) + doc.url = url + docs.append(doc) + + # Remove None values from the list + docs = [doc for doc in docs if doc] + + # remove empty documents "" + filtered_docs = [doc for doc in docs if hasattr(doc, 'text') and doc.text != ""] + + return filtered_docs, counter_callback + + +def adjust_doc_tokens( + doc: Document, + max_tokens: int, + engine: str = "gpt-3.5-turbo" +) -> Document: + """Adjust the number of tokens in the document.""" + if count_tokens(doc.text, engine) > max_tokens: + doc.text = " ".join(doc.text.split()[:max_tokens]) + return doc + + +def get_answer_from_doc( + client: OpenAI, + prompt: str, + engine: str, + doc: Document, + counter_callback: Optional[Callable[[int, int, str], None]] = None, + max_tokens: int = DEFAULT_OPENAI_SETTINGS["max_tokens"], + temperature: int = DEFAULT_OPENAI_SETTINGS["temperature"], +): + """Get an answer from the document.""" + # length of the document + print(f"Length of the document before: {count_tokens(doc.text, engine)}") + # Make sure each doc is with the max tokens + doc = adjust_doc_tokens( + doc=doc, + max_tokens=MAX_DOC_TOKENS, + engine=engine, + ) + print(f"Length of the document after: {count_tokens(doc.text, engine)}") + + #Get the answer from the document + messages = [ + {"role": "system", "content": SYSTEM_PROMPT}, + {"role": "user", "content": PREDICTION_PROMPT.format(user_question=prompt, document=doc.text)}, + ] + + response = client.chat.completions.create( + model=engine, + messages=messages, + temperature=temperature, + max_tokens=max_tokens, + n=1, + timeout=150, + stop=None, + functions=[Results.openai_schema], + function_call={'name': 'Results'} + ) + + if counter_callback: + counter_callback( + input_tokens=response.usage.prompt_tokens, + output_tokens=response.usage.completion_tokens, + model=engine, + token_counter=count_tokens, + ) + + return Results.from_response(response), counter_callback, PREDICTION_PROMPT.format(user_question=prompt, document=doc.text) + + +def get_answer( + client: OpenAI, + prompt: str, + engine: str, + additional_information: List[Document], + counter_callback: Optional[Callable[[int, int, str], None]] = None, + max_tokens: int = DEFAULT_OPENAI_SETTINGS["max_tokens"], + temperature: int = DEFAULT_OPENAI_SETTINGS["temperature"], +): + """Get an answer from the document.""" + + for doc in additional_information: + answer, counter_callback, prediction_prompt = get_answer_from_doc( + client=client, + prompt=prompt, + engine=engine, + doc=doc, + counter_callback=counter_callback, + max_tokens=max_tokens, + temperature=temperature, + ) + + if answer.prediction in ["yes", "no"]: + return answer, counter_callback, prediction_prompt + + return Results(p_yes=0.5, p_no=0.5, confidence=0.5, info_utility=0.5, prediction="I don't know"), counter_callback, prediction_prompt + +def run(**kwargs) -> Tuple[str, Optional[str], Optional[Dict[str, Any]], Any]: + """Run the task""" + with OpenAIClientManager(kwargs["api_keys"]["openai"]): + + tool = kwargs["tool"] + prompt = extract_question(kwargs["prompt"]) + max_tokens = kwargs.get("max_tokens", DEFAULT_OPENAI_SETTINGS["max_tokens"]) + temperature = kwargs.get("temperature", DEFAULT_OPENAI_SETTINGS["temperature"]) + num_words = kwargs.get("num_words", None) + num_urls = kwargs.get("num_urls", DEFAULT_NUM_URLS[tool]) + num_queries = kwargs.get("num_queries", DEFAULT_NUM_QUERIES[tool]) + counter_callback = kwargs.get("counter_callback", None) + api_keys = kwargs.get("api_keys", {}) + google_api_key = api_keys.get("google_api_key", None) + google_engine_id = api_keys.get("google_engine_id", None) + + if tool not in ALLOWED_TOOLS: + raise ValueError(f"Tool {tool} is not supported.") + + engine = TOOL_TO_ENGINE[tool] + + # fetch additional information from the web + additional_information, counter_callback = fetch_additional_information( + client=client, + prompt=prompt, + engine=engine, + google_api_key=google_api_key, + google_engine_id=google_engine_id, + counter_callback=counter_callback, + source_links=kwargs.get("source_links", None), + num_words=num_words, + num_urls=num_urls, + num_queries=num_queries, + temperature=temperature, + max_tokens=max_tokens, + ) + + # get answer from the doc + results, counter_callback, prediction_prompt = get_answer( + client=client, + prompt=prompt, + engine=engine, + additional_information=additional_information, + counter_callback=counter_callback, + max_tokens=max_tokens, + temperature=temperature, + ) + + # convert the results to a dictionary + pairs = str(results).split() + result_dict = {} + for pair in pairs: + key, value = pair.split("=") + if key != "prediction": + result_dict[key] = float(value) + + results = result_dict + results = json.dumps(results) + return results, prediction_prompt, None, counter_callback diff --git a/packages/napthaai/customs/resolve_market_reasoning/component.yaml b/packages/napthaai/customs/resolve_market_reasoning/component.yaml index da3fc655..9595157b 100644 --- a/packages/napthaai/customs/resolve_market_reasoning/component.yaml +++ b/packages/napthaai/customs/resolve_market_reasoning/component.yaml @@ -7,8 +7,31 @@ license: Apache-2.0 aea_version: '>=1.0.0, <2.0.0' fingerprint: __init__.py: bafybeib36ew6vbztldut5xayk5553rylrq7yv4cpqyhwc5ktvd4cx67vwu - resolve_market_reasoning.py: bafybeibeeyl2arpzqpwvt3acxopz5k56rzvayhvgndwsvkjfdy3gv7nznm + resolve_market_reasoning.py: bafybeiejchzo5ty37nuhxczxmabo3jl52a4wewtnqlty6253yxmzeyzhjy fingerprint_ignore_patterns: [] entry_point: resolve_market_reasoning.py callable: run -dependencies: {} +dependencies: + google-api-python-client: + version: ==2.95.0 + googlesearch-python: + version: ==1.2.3 + requests: {} + pydantic: + version: '>=1.9.0,<3' + faiss-cpu: + version: ==1.7.4 + tiktoken: + version: ==0.5.1 + markdownify: + version: ==0.11.6 + openai: + version: ==1.11.0 + docstring-parser: + version: ==0.15 + readability-lxml: + version: ==0.8.1 + pypdf2: + version: ==3.0.1 + numpy: + version: '>=1.19.0' diff --git a/packages/napthaai/customs/resolve_market_reasoning/resolve_market_reasoning.py b/packages/napthaai/customs/resolve_market_reasoning/resolve_market_reasoning.py index f2f9a594..53159074 100644 --- a/packages/napthaai/customs/resolve_market_reasoning/resolve_market_reasoning.py +++ b/packages/napthaai/customs/resolve_market_reasoning/resolve_market_reasoning.py @@ -27,7 +27,6 @@ from pydantic import BaseModel, Field from docstring_parser import parse import tiktoken -import json from openai import OpenAI import numpy as np import faiss @@ -346,6 +345,7 @@ def multi_queries( timeout=150, stop=None, functions=[Queries.openai_schema], + function_call={'name': 'Queries'} ) queries = Queries.from_response(response) @@ -359,8 +359,7 @@ def multi_queries( model=engine, token_counter=count_tokens, ) - return queries.queries, counter_callback - return queries.queries, None + return queries.queries, counter_callback def search_google(query: str, api_key: str, engine: str, num: int) -> List[str]: @@ -383,13 +382,16 @@ def get_urls_from_queries( """Get URLs from search engine queries""" results = [] for query in queries: - for url in search_google( + try: + for url in search_google( query=query, api_key=api_key, engine=engine, num=num, - ): - results.append(url) + ): + results.append(url) + except Exception as e: + print(f"An error occurred: {e}") unique_results = list(set(results)) return unique_results @@ -416,6 +418,7 @@ def get_dates( timeout=90, stop=None, functions=[Date.openai_schema], + function_call={'name': 'Date'} ) date = Date.from_response(response) if date.date_available: @@ -604,6 +607,9 @@ def fetch_additional_information( # Remove None values from the list docs = [doc for doc in docs if doc] + # remove doc with "" + docs = [doc for doc in docs if hasattr(doc, "text") and doc.text != ""] + # Chunk the documents split_docs = [] for doc in docs: @@ -670,7 +676,7 @@ def adjust_additional_information( return additional_information -def run(**kwargs) -> Tuple[Optional[str], Any, Optional[Dict[str, Any]], Any]: +def run(**kwargs) -> Tuple[str, Optional[str], Optional[Dict[str, Any]], Any]: """Run the task""" with OpenAIClientManager(kwargs["api_keys"]["openai"]): tool = kwargs["tool"] @@ -705,6 +711,7 @@ def run(**kwargs) -> Tuple[Optional[str], Any, Optional[Dict[str, Any]], Any]: timeout=150, stop=None, functions=[Valid.openai_schema], + function_call={'name': 'Valid'} ) valid_results = Valid.from_response(response_valid) @@ -776,6 +783,7 @@ def run(**kwargs) -> Tuple[Optional[str], Any, Optional[Dict[str, Any]], Any]: timeout=150, stop=None, functions=[Determinable.openai_schema], + function_call={'name': 'Determinable'} ) determinable_results = Determinable.from_response(response_determinable) @@ -795,6 +803,32 @@ def run(**kwargs) -> Tuple[Optional[str], Any, Optional[Dict[str, Any]], Any]: }, ] + response_prediction = client.chat.completions.create( + model=engine, + messages=messages, + temperature=DEFAULT_OPENAI_SETTINGS["temperature"], + max_tokens=DEFAULT_OPENAI_SETTINGS["max_tokens"], + n=1, + timeout=150, + stop=None, + functions=[Results.openai_schema], + function_call={'name': 'Results'} + ) + + results = Results.from_response(response_prediction) + print(f"Results: {results}") + + # Make the prediction + messages = [ + {"role": "system", "content": SYSTEM_PROMPT}, + { + "role": "user", + "content": PREDICTION_PROMPT.format( + user_prompt=prompt, reasoning=reasoning + ), + }, + ] + response_prediction = client.chat.completions.create( model=engine, messages=messages, @@ -818,10 +852,5 @@ def run(**kwargs) -> Tuple[Optional[str], Any, Optional[Dict[str, Any]], Any]: model=engine, token_counter=count_tokens, ) - return ( - results.json(), - reasoning, - None, - counter_callback, - ) - return results.json(), reasoning, None, None + return results.json(), reasoning, None, counter_callback + diff --git a/packages/nickcom007/customs/prediction_request_sme/component.yaml b/packages/nickcom007/customs/prediction_request_sme/component.yaml index 78af6bdd..3cab1d17 100644 --- a/packages/nickcom007/customs/prediction_request_sme/component.yaml +++ b/packages/nickcom007/customs/prediction_request_sme/component.yaml @@ -8,8 +8,21 @@ license: Apache-2.0 aea_version: '>=1.0.0, <2.0.0' fingerprint: __init__.py: bafybeibbn67pnrrm4qm3n3kbelvbs3v7fjlrjniywmw2vbizarippidtvi - prediction_request_sme.py: bafybeidvc5cgdjt4d52ntowj2gv7o5hh6egc6yzpgk2mzdfn77eeafqady + prediction_request_sme.py: bafybeibxbcgzwscsvjg3c32aplsbggqho4iel5nwpyhe7iuu2wjcmvsaf4 fingerprint_ignore_patterns: [] entry_point: prediction_request_sme.py callable: run -dependencies: {} +dependencies: + requests: {} + google-api-python-client: + version: ==2.95.0 + googlesearch-python: + version: ==1.2.3 + tiktoken: + version: ==0.5.1 + readability-lxml: + version: ==0.8.1 + markdownify: + version: ==0.11.6 + openai: + version: ==1.11.0 diff --git a/packages/nickcom007/customs/prediction_request_sme/prediction_request_sme.py b/packages/nickcom007/customs/prediction_request_sme/prediction_request_sme.py index e1636ba5..8dd87109 100644 --- a/packages/nickcom007/customs/prediction_request_sme/prediction_request_sme.py +++ b/packages/nickcom007/customs/prediction_request_sme/prediction_request_sme.py @@ -65,7 +65,7 @@ def count_tokens(text: str, model: str) -> int: DEFAULT_NUM_WORDS: Dict[str, Optional[int]] = defaultdict(lambda: 300) DEFAULT_OPENAI_SETTINGS = { "max_tokens": 500, - "temperature": 0.7, + "temperature": 0., } MAX_TOKENS = { "gpt-3.5-turbo": 4096, @@ -504,3 +504,4 @@ def run(**kwargs) -> Tuple[str, Optional[str], Optional[Dict[str, Any]], Any]: ) return response.choices[0].message.content, prediction_prompt, None, counter_callback + diff --git a/packages/nickcom007/customs/sme_generation_request/component.yaml b/packages/nickcom007/customs/sme_generation_request/component.yaml index 86fc7445..827c4b0e 100644 --- a/packages/nickcom007/customs/sme_generation_request/component.yaml +++ b/packages/nickcom007/customs/sme_generation_request/component.yaml @@ -12,4 +12,9 @@ fingerprint: fingerprint_ignore_patterns: [] entry_point: sme_generation_request.py callable: run -dependencies: {} +dependencies: + openai: + version: ==1.11.0 + requests: {} + tiktoken: + version: ==0.5.1 diff --git a/packages/packages.json b/packages/packages.json index 2c466670..0daee355 100644 --- a/packages/packages.json +++ b/packages/packages.json @@ -1,33 +1,34 @@ { "dev": { - "custom/valory/native_transfer_request/0.1.0": "bafybeihugaylajwh2fgypxatcw5qrw5qxadtrsi2h2s2246442wlvjirtm", - "custom/valory/prediction_request_claude/0.1.0": "bafybeicctwuliyqrjlnqw7x3zkltnwpmv2dfuc6drebtbhxznyklewk5km", - "custom/valory/openai_request/0.1.0": "bafybeibdcttrlgp5udygntka5fofi566pitkxhquke37ng7csvndhy4s2i", - "custom/valory/prediction_request_embedding/0.1.0": "bafybeieyebiw4vedicpkyibj4zqtcnqp2i3kvk7vpn35psud4ujmrknux4", - "custom/valory/resolve_market/0.1.0": "bafybeih3a4n3fnx4qmy3lpp4paat4wqvpgjyd5emot37cs4i5p6ii672bm", - "custom/valory/prediction_request/0.1.0": "bafybeidzexi5sowgolnzvwgyaxvl2f6bb2d22gk6er7co2gld6p5zeuseq", - "custom/valory/stability_ai_request/0.1.0": "bafybeiegbsq2ajxyipajac4mmxyvbt22ctwyuypuid6ziavqpndns6fsjy", - "custom/polywrap/prediction_with_research_report/0.1.0": "bafybeifqithnwk4cfmzkvx2pls5jdbivn3g2em667doh4f2q3guyq34ahy", - "custom/jhehemann/prediction_sum_url_content/0.1.0": "bafybeia2coxsrc3ruodypqvxqyweaqphneyh6o3j7n7gqg3ipe5n3ec6zu", - "custom/psouranis/optimization_by_prompting/0.1.0": "bafybeibgfovquvu3gg4o7y6ud2sr536v45o2dj4shqobc7bqzzdyjbinei", - "custom/nickcom007/sme_generation_request/0.1.0": "bafybeihfl4663yjkxltidavnxou7rumld5wzcr43uw23bpvb3ivefsjhv4", - "custom/nickcom007/prediction_request_sme/0.1.0": "bafybeicfenfjer2kvmnstul4bzxf7xfiupbipkjzattjsieaapdflnsyni", - "custom/napthaai/resolve_market_reasoning/0.1.0": "bafybeieu7nkhke5lfilazexaqy22xkpo6jyrbwte5soppfds3nx5xwzixq", - "custom/napthaai/prediction_request_rag/0.1.0": "bafybeihdywbiaywvia32mpdos7e4pvcnmsq4ij65otctidbd6rjf3j6tqu", + "custom/valory/native_transfer_request/0.1.0": "bafybeid22vi5xtavqhq5ir2kq6nakckm3tl72wcgftsq35ak3cboyn6eea", + "custom/valory/prediction_request_claude/0.1.0": "bafybeidmtovzewf3be6wzdsoozdyin2hvq2efw233arohv243f52jzapli", + "custom/valory/openai_request/0.1.0": "bafybeigew6ukd53n3z352wmr5xu6or3id7nsqn7vb47bxs4pg4qtkmbdiu", + "custom/valory/prediction_request_embedding/0.1.0": "bafybeibcabouespjmjoro4wzu4klwljf2jkzimy3f4roqbmk44awfq7zxi", + "custom/valory/resolve_market/0.1.0": "bafybeicv7uxujwffvojynw36ot2sr2qsyx2je4jf57v3loy47l2ln6rmje", + "custom/valory/prediction_request/0.1.0": "bafybeialwueu46qz6kummpei5alyl7hnnt7lhmmgwyhakfnytyhcftkzri", + "custom/valory/stability_ai_request/0.1.0": "bafybeieexvfgpyr5f33dvho2ajvddovln27ujp6qcix3j4l2fvsdotkq3e", + "custom/polywrap/prediction_with_research_report/0.1.0": "bafybeia3xhd2ycqo6ygojnsdlxw2yhkhxa4fbfwybusvizpcmsv2mjrjly", + "custom/jhehemann/prediction_sum_url_content/0.1.0": "bafybeibospfwywoc6ktmqy4vg3x7olcxy22zhj5zjtvgapvuyl4iexqlx4", + "custom/psouranis/optimization_by_prompting/0.1.0": "bafybeifxcara62s2xz6e3z6ozgi7hmmvk5huoypkamgdbtker3zt2yc4qy", + "custom/nickcom007/sme_generation_request/0.1.0": "bafybeib65cb3yqxnwncyjy3uawixe4vaz6awj3qrya3kgngdu6oxhftc5i", + "custom/nickcom007/prediction_request_sme/0.1.0": "bafybeicd5b5q7kjtu4i3bqz2p73jaj5oayttbrwtnk54bnigw2yqbyxt4u", + "custom/napthaai/resolve_market_reasoning/0.1.0": "bafybeiht2yl3mqbkqalcja6vbzuqr5464wm2edjkkvorqivi5xsnhudc6i", + "custom/napthaai/prediction_request_rag/0.1.0": "bafybeia7eh5qkory5i3wwfkbwzumosuwfitdf42epsgnv4chgbqeimrmpm", + "custom/napthaai/prediction_request_reasoning/0.1.0": "bafybeieazvsmsjslg2hflqbt5b3akcbkoybgrogwvesraq7jo5p3dlvh4e", "protocol/valory/acn_data_share/0.1.0": "bafybeih5ydonnvrwvy2ygfqgfabkr47s4yw3uqxztmwyfprulwfsoe7ipq", "protocol/valory/websocket_client/0.1.0": "bafybeih43mnztdv3v2hetr2k3gezg7d3yj4ur7cxdvcyaqhg65e52s5sf4", - "contract/valory/agent_mech/0.1.0": "bafybeicbhvrlug56qvwlh6b4y35xg6fytvtahhaae7xm7jssecbie576mu", + "contract/valory/agent_mech/0.1.0": "bafybeidsau5x2vjofpcdzxkg7airwkrdag65ohtxcby2ut27tfjizgnrnm", "contract/valory/agent_registry/0.1.0": "bafybeiargayav6yiztdnwzejoejstcx4idssch2h4f5arlgtzj3tgsgfmu", - "contract/valory/hash_checkpoint/0.1.0": "bafybeianr3zy3bb464jwlwwxtk2daeeh4cefy7ihw5bfn7zrn7vjs3fyga", + "contract/valory/hash_checkpoint/0.1.0": "bafybeigv2bceirhy72yajxzibi4a5wrcfptfbkjbzzko6pqdq2f4dzr3xa", "connection/valory/websocket_client/0.1.0": "bafybeiflmystocxaqblhpzqlcop2vkhsknpzjx2jomohomaxamwskeokzm", "skill/valory/contract_subscription/0.1.0": "bafybeicyugrkx5glat4p4ezwf6i7oduh26eycfie6ftd4uxrknztzl3ik4", - "skill/valory/mech_abci/0.1.0": "bafybeiagit2v4swvgwsulrqpr75qgkxhzjjkyc7yevqkggb3jblcjymytq", - "skill/valory/task_submission_abci/0.1.0": "bafybeib6yijhh5ss7mufpjghjwobxjynuaz2mf52jo54dtpyer4ovowei4", - "skill/valory/task_execution/0.1.0": "bafybeidp6ae2noa7wpklelyu5y3yuxpmw2q4e62rfdgohvuqalln74pmce", + "skill/valory/mech_abci/0.1.0": "bafybeicpuwyiiivolhfs3w6iwcx5gpcxtsf273ay7nch7f2ocxm7fd6bqm", + "skill/valory/task_submission_abci/0.1.0": "bafybeidmkzpqpvyol3636eeprkloy3z3t2nipmwvu6da3dtk2q4tjlab6u", + "skill/valory/task_execution/0.1.0": "bafybeifb5ic5lczqiekojahagyw73ylp7me4nrn7ltgycpbryvzorrqbwa", "skill/valory/websocket_client/0.1.0": "bafybeidwntmkk4b2ixq5454ycbkknclqx7a6vpn7aqpm2nw3duszqrxvta", - "skill/valory/subscription_abci/0.1.0": "bafybeiclwa2u24nv4yzke6s24qxz2uxc6tpniyjxzprlb7oinkiijdhlgy", - "agent/valory/mech/0.1.0": "bafybeifrlwdaqfnychkmnpirix43zfbamgukkpziyuy7erze7owtbiuqc4", - "service/valory/mech/0.1.0": "bafybeifmcgdmynv7ww6x334ygt6eyljqepoddtaqhlywfi2jlqzhkecsu4" + "skill/valory/subscription_abci/0.1.0": "bafybeidoqeznyhbh3znaqbfdnftzq6fdh77m35qgftdwz46nz2iwda4yam", + "agent/valory/mech/0.1.0": "bafybeia6suywbe4ptwtrmccbgiotf5rmzz6xusqbmqclzebk6sbkgtwku4", + "service/valory/mech/0.1.0": "bafybeicpthl4524tdqcek5qfuof552ibbagz4jipczimi6c2osmyfxmnm4" }, "third_party": { "protocol/valory/default/1.0.0": "bafybeifqcqy5hfbnd7fjv4mqdjrtujh2vx3p2xhe33y67zoxa6ph7wdpaq", @@ -39,21 +40,21 @@ "protocol/valory/acn/1.1.0": "bafybeidluaoeakae3exseupaea4i3yvvk5vivyt227xshjlffywwxzcxqe", "protocol/valory/ipfs/0.1.0": "bafybeiftxi2qhreewgsc5wevogi7yc5g6hbcbo4uiuaibauhv3nhfcdtvm", "protocol/valory/tendermint/0.1.0": "bafybeig4mi3vmlv5zpbjbfuzcgida6j5f2nhrpedxicmrrfjweqc5r7cra", - "contract/valory/service_registry/0.1.0": "bafybeiby5x4wfdywlenmoudbykdxohpq2nifqxfep5niqgxrjyrekyahzy", - "contract/valory/gnosis_safe_proxy_factory/0.1.0": "bafybeie6ynnoavvk2fpbn426nlp32sxrj7pz5esgebtlezy4tmx5gjretm", - "contract/valory/gnosis_safe/0.1.0": "bafybeictjc7saviboxbsdcey3trvokrgo7uoh76mcrxecxhlvcrp47aqg4", + "contract/valory/service_registry/0.1.0": "bafybeicbxmbzt757lbmyh6762lrkcrp3oeum6dk3z7pvosixasifsk6xlm", + "contract/valory/gnosis_safe_proxy_factory/0.1.0": "bafybeib6podeifufgmawvicm3xyz3uaplbcrsptjzz4unpseh7qtcpar74", + "contract/valory/gnosis_safe/0.1.0": "bafybeibq77mgzhyb23blf2eqmia3kc6io5karedfzhntvpcebeqdzrgyqa", "contract/valory/multisend/0.1.0": "bafybeig5byt5urg2d2bsecufxe5ql7f4mezg3mekfleeh32nmuusx66p4y", "connection/valory/http_client/0.23.0": "bafybeih5vzo22p2umhqo52nzluaanxx7kejvvpcpdsrdymckkyvmsim6gm", - "connection/valory/abci/0.1.0": "bafybeifbnhe4f2bll3a5o3hqji3dqx4soov7hr266rdz5vunxgzo5hggbq", - "connection/valory/ipfs/0.1.0": "bafybeiflaxrnepfn4hcnq5pieuc7ki7d422y3iqb54lv4tpgs7oywnuhhq", + "connection/valory/abci/0.1.0": "bafybeiclexb6cnsog5yjz2qtvqyfnf7x5m7tpp56hblhk3pbocbvgjzhze", + "connection/valory/ipfs/0.1.0": "bafybeihndk6hohj3yncgrye5pw7b7w2kztj3avby5u5mfk2fpjh7hqphii", "connection/valory/ledger/0.19.0": "bafybeic3ft7l7ca3qgnderm4xupsfmyoihgi27ukotnz7b5hdczla2enya", "connection/valory/p2p_libp2p_client/0.1.0": "bafybeid3xg5k2ol5adflqloy75ibgljmol6xsvzvezebsg7oudxeeolz7e", "connection/valory/http_server/0.22.0": "bafybeihpgu56ovmq4npazdbh6y6ru5i7zuv6wvdglpxavsckyih56smu7m", - "skill/valory/transaction_settlement_abci/0.1.0": "bafybeid57tozt5f3kgzmu22nbr3c3oy4p7bi2bu66rqsgnlylq6xgh2ixe", - "skill/valory/termination_abci/0.1.0": "bafybeie6h7j4hyhgj2wte64n3xyudxq4pgqcqjmslxi5tff4mb6vce2tay", - "skill/valory/abstract_round_abci/0.1.0": "bafybeigjrepaqpb3m7zunmt4hryos4vto4yyj3u6iyofdb2fotwho3bqvm", - "skill/valory/reset_pause_abci/0.1.0": "bafybeicm7onl72rfnn33pbvzwjpkl5gafeieyobfcnyresxz7kunjwmqea", - "skill/valory/registration_abci/0.1.0": "bafybeif3ln6eg53ebrfe6uicjew4uqp2ynyrcxkw5wi4jm3ixqv3ykte4a", - "skill/valory/abstract_abci/0.1.0": "bafybeihljirk3d4rgvmx2nmz3p2mp27iwh2o5euce5gccwjwrpawyjzuaq" + "skill/valory/transaction_settlement_abci/0.1.0": "bafybeigtzlk4uakmd54rxnznorcrstsr52kta474lgrnvx5ovr546vj7sq", + "skill/valory/termination_abci/0.1.0": "bafybeihq6qtbwt6i53ayqym63vhjexkcppy26gguzhhjqywfmiuqghvv44", + "skill/valory/abstract_round_abci/0.1.0": "bafybeih3enhagoql7kzpeyzzu2scpkif6y3ubakpralfnwxcvxexdyvy5i", + "skill/valory/reset_pause_abci/0.1.0": "bafybeidw4mbx3os3hmv7ley7b3g3gja7ydpitr7mxbjpwzxin2mzyt5yam", + "skill/valory/registration_abci/0.1.0": "bafybeiek7zcsxbucjwzgqfftafhfrocvc7q4yxllh2q44jeemsjxg3rcfm", + "skill/valory/abstract_abci/0.1.0": "bafybeihat4giyc4bz6zopvahcj4iw53356pbtwfn7p4d5yflwly2qhahum" } } \ No newline at end of file diff --git a/packages/polywrap/customs/prediction_with_research_report/component.yaml b/packages/polywrap/customs/prediction_with_research_report/component.yaml index d78b1d20..31b4992d 100644 --- a/packages/polywrap/customs/prediction_with_research_report/component.yaml +++ b/packages/polywrap/customs/prediction_with_research_report/component.yaml @@ -12,4 +12,21 @@ fingerprint: fingerprint_ignore_patterns: [] entry_point: prediction_with_research_report.py callable: run -dependencies: {} +dependencies: + langchain: + version: ==0.0.303 + requests: {} + tiktoken: + version: ==0.5.1 + tavily-python: + version: ==0.3.0 + pydantic: + version: '>=1.9.0,<3' + beautifulsoup4: + version: ==4.12.2 + markdownify: + version: ==0.11.6 + openai: + version: ==1.11.0 + chromadb: + version: ==0.4.13 diff --git a/packages/psouranis/customs/optimization_by_prompting/component.yaml b/packages/psouranis/customs/optimization_by_prompting/component.yaml index 2dcdef33..3429bd96 100644 --- a/packages/psouranis/customs/optimization_by_prompting/component.yaml +++ b/packages/psouranis/customs/optimization_by_prompting/component.yaml @@ -11,4 +11,38 @@ fingerprint: fingerprint_ignore_patterns: [] entry_point: optimization_by_prompting.py callable: run -dependencies: {} +dependencies: + tqdm: + version: ==4.56.0 + google-api-python-client: + version: ==2.95.0 + googlesearch-python: + version: ==1.2.3 + requests: {} + pydantic: + version: '>=1.9.0,<3' + faiss-cpu: + version: ==1.7.4 + tiktoken: + version: ==0.5.1 + markdownify: + version: ==0.11.6 + openai: + version: ==1.11.0 + docstring-parser: + version: ==0.15 + readability-lxml: + version: ==0.8.1 + pypdf2: + version: ==3.0.1 + pandas: {} + python-dateutil: + version: ==2.8.2 + beautifulsoup4: + version: ==4.12.2 + spacy: + version: ==3.7.2 + langchain: + version: ==0.0.303 + scikit-learn: + version: ==1.3.1 diff --git a/packages/valory/agents/mech/aea-config.yaml b/packages/valory/agents/mech/aea-config.yaml index 69ce2dd6..be535b2b 100644 --- a/packages/valory/agents/mech/aea-config.yaml +++ b/packages/valory/agents/mech/aea-config.yaml @@ -7,21 +7,21 @@ aea_version: '>=1.37.0, <2.0.0' fingerprint: {} fingerprint_ignore_patterns: [] connections: -- valory/abci:0.1.0:bafybeifbnhe4f2bll3a5o3hqji3dqx4soov7hr266rdz5vunxgzo5hggbq +- valory/abci:0.1.0:bafybeiclexb6cnsog5yjz2qtvqyfnf7x5m7tpp56hblhk3pbocbvgjzhze - valory/http_client:0.23.0:bafybeih5vzo22p2umhqo52nzluaanxx7kejvvpcpdsrdymckkyvmsim6gm - valory/http_server:0.22.0:bafybeihpgu56ovmq4npazdbh6y6ru5i7zuv6wvdglpxavsckyih56smu7m -- valory/ipfs:0.1.0:bafybeiflaxrnepfn4hcnq5pieuc7ki7d422y3iqb54lv4tpgs7oywnuhhq +- valory/ipfs:0.1.0:bafybeihndk6hohj3yncgrye5pw7b7w2kztj3avby5u5mfk2fpjh7hqphii - valory/ledger:0.19.0:bafybeic3ft7l7ca3qgnderm4xupsfmyoihgi27ukotnz7b5hdczla2enya - valory/p2p_libp2p_client:0.1.0:bafybeid3xg5k2ol5adflqloy75ibgljmol6xsvzvezebsg7oudxeeolz7e - valory/websocket_client:0.1.0:bafybeiflmystocxaqblhpzqlcop2vkhsknpzjx2jomohomaxamwskeokzm contracts: -- valory/agent_mech:0.1.0:bafybeicbhvrlug56qvwlh6b4y35xg6fytvtahhaae7xm7jssecbie576mu +- valory/agent_mech:0.1.0:bafybeidsau5x2vjofpcdzxkg7airwkrdag65ohtxcby2ut27tfjizgnrnm - valory/agent_registry:0.1.0:bafybeiargayav6yiztdnwzejoejstcx4idssch2h4f5arlgtzj3tgsgfmu -- valory/gnosis_safe:0.1.0:bafybeictjc7saviboxbsdcey3trvokrgo7uoh76mcrxecxhlvcrp47aqg4 -- valory/gnosis_safe_proxy_factory:0.1.0:bafybeie6ynnoavvk2fpbn426nlp32sxrj7pz5esgebtlezy4tmx5gjretm -- valory/hash_checkpoint:0.1.0:bafybeianr3zy3bb464jwlwwxtk2daeeh4cefy7ihw5bfn7zrn7vjs3fyga +- valory/gnosis_safe:0.1.0:bafybeibq77mgzhyb23blf2eqmia3kc6io5karedfzhntvpcebeqdzrgyqa +- valory/gnosis_safe_proxy_factory:0.1.0:bafybeib6podeifufgmawvicm3xyz3uaplbcrsptjzz4unpseh7qtcpar74 +- valory/hash_checkpoint:0.1.0:bafybeigv2bceirhy72yajxzibi4a5wrcfptfbkjbzzko6pqdq2f4dzr3xa - valory/multisend:0.1.0:bafybeig5byt5urg2d2bsecufxe5ql7f4mezg3mekfleeh32nmuusx66p4y -- valory/service_registry:0.1.0:bafybeiby5x4wfdywlenmoudbykdxohpq2nifqxfep5niqgxrjyrekyahzy +- valory/service_registry:0.1.0:bafybeicbxmbzt757lbmyh6762lrkcrp3oeum6dk3z7pvosixasifsk6xlm protocols: - open_aea/signing:1.0.0:bafybeihv62fim3wl2bayavfcg3u5e5cxu3b7brtu4cn5xoxd6lqwachasi - valory/abci:0.1.0:bafybeiaqmp7kocbfdboksayeqhkbrynvlfzsx4uy4x6nohywnmaig4an7u @@ -35,17 +35,17 @@ protocols: - valory/tendermint:0.1.0:bafybeig4mi3vmlv5zpbjbfuzcgida6j5f2nhrpedxicmrrfjweqc5r7cra - valory/websocket_client:0.1.0:bafybeih43mnztdv3v2hetr2k3gezg7d3yj4ur7cxdvcyaqhg65e52s5sf4 skills: -- valory/abstract_abci:0.1.0:bafybeihljirk3d4rgvmx2nmz3p2mp27iwh2o5euce5gccwjwrpawyjzuaq -- valory/abstract_round_abci:0.1.0:bafybeigjrepaqpb3m7zunmt4hryos4vto4yyj3u6iyofdb2fotwho3bqvm +- valory/abstract_abci:0.1.0:bafybeihat4giyc4bz6zopvahcj4iw53356pbtwfn7p4d5yflwly2qhahum +- valory/abstract_round_abci:0.1.0:bafybeih3enhagoql7kzpeyzzu2scpkif6y3ubakpralfnwxcvxexdyvy5i - valory/contract_subscription:0.1.0:bafybeicyugrkx5glat4p4ezwf6i7oduh26eycfie6ftd4uxrknztzl3ik4 -- valory/mech_abci:0.1.0:bafybeiagit2v4swvgwsulrqpr75qgkxhzjjkyc7yevqkggb3jblcjymytq -- valory/registration_abci:0.1.0:bafybeif3ln6eg53ebrfe6uicjew4uqp2ynyrcxkw5wi4jm3ixqv3ykte4a -- valory/reset_pause_abci:0.1.0:bafybeicm7onl72rfnn33pbvzwjpkl5gafeieyobfcnyresxz7kunjwmqea -- valory/subscription_abci:0.1.0:bafybeiclwa2u24nv4yzke6s24qxz2uxc6tpniyjxzprlb7oinkiijdhlgy -- valory/task_execution:0.1.0:bafybeidp6ae2noa7wpklelyu5y3yuxpmw2q4e62rfdgohvuqalln74pmce -- valory/task_submission_abci:0.1.0:bafybeib6yijhh5ss7mufpjghjwobxjynuaz2mf52jo54dtpyer4ovowei4 -- valory/termination_abci:0.1.0:bafybeie6h7j4hyhgj2wte64n3xyudxq4pgqcqjmslxi5tff4mb6vce2tay -- valory/transaction_settlement_abci:0.1.0:bafybeid57tozt5f3kgzmu22nbr3c3oy4p7bi2bu66rqsgnlylq6xgh2ixe +- valory/mech_abci:0.1.0:bafybeicpuwyiiivolhfs3w6iwcx5gpcxtsf273ay7nch7f2ocxm7fd6bqm +- valory/registration_abci:0.1.0:bafybeiek7zcsxbucjwzgqfftafhfrocvc7q4yxllh2q44jeemsjxg3rcfm +- valory/reset_pause_abci:0.1.0:bafybeidw4mbx3os3hmv7ley7b3g3gja7ydpitr7mxbjpwzxin2mzyt5yam +- valory/subscription_abci:0.1.0:bafybeidoqeznyhbh3znaqbfdnftzq6fdh77m35qgftdwz46nz2iwda4yam +- valory/task_execution:0.1.0:bafybeifb5ic5lczqiekojahagyw73ylp7me4nrn7ltgycpbryvzorrqbwa +- valory/task_submission_abci:0.1.0:bafybeidmkzpqpvyol3636eeprkloy3z3t2nipmwvu6da3dtk2q4tjlab6u +- valory/termination_abci:0.1.0:bafybeihq6qtbwt6i53ayqym63vhjexkcppy26gguzhhjqywfmiuqghvv44 +- valory/transaction_settlement_abci:0.1.0:bafybeigtzlk4uakmd54rxnznorcrstsr52kta474lgrnvx5ovr546vj7sq - valory/websocket_client:0.1.0:bafybeidwntmkk4b2ixq5454ycbkknclqx7a6vpn7aqpm2nw3duszqrxvta default_ledger: ethereum required_ledgers: @@ -92,7 +92,7 @@ dependencies: langchain: version: ==0.0.303 open-aea-ledger-ethereum: - version: ==1.48.0 + version: ==1.50.0 pandas: version: ==2.1.1 python-dateutil: @@ -190,6 +190,10 @@ models: multisend_address: ${str:0xA238CBeb142c10Ef7Ad8442C6D1f9E89e07e7761} service_registry_address: ${str:0x9338b5153AE39BB89f50468E608eD9d764B755fD} service_endpoint_base: ${str:https://dummy_service.autonolas.tech/} + gas_params: + gas_price: ${int:null} + max_fee_per_gas: ${int:null} + max_priority_fee_per_gas: ${int:null} init_fallback_gas: ${int:500000} manual_gas_limit: ${int:1000000} service_owner_share: ${float:0.1} diff --git a/packages/valory/contracts/agent_mech/contract.yaml b/packages/valory/contracts/agent_mech/contract.yaml index fcb20c3c..479de973 100644 --- a/packages/valory/contracts/agent_mech/contract.yaml +++ b/packages/valory/contracts/agent_mech/contract.yaml @@ -15,7 +15,7 @@ contract_interface_paths: ethereum: build/AgentMech.json dependencies: open-aea-ledger-ethereum: - version: ==1.48.0 + version: ==1.50.0 web3: version: <7,>=6.0.0 contracts: [] diff --git a/packages/valory/contracts/hash_checkpoint/contract.yaml b/packages/valory/contracts/hash_checkpoint/contract.yaml index ca231454..816c2f93 100644 --- a/packages/valory/contracts/hash_checkpoint/contract.yaml +++ b/packages/valory/contracts/hash_checkpoint/contract.yaml @@ -15,7 +15,7 @@ contract_interface_paths: ethereum: build/HashCheckpoint.json dependencies: open-aea-ledger-ethereum: - version: ==1.48.0 + version: ==1.50.0 web3: version: <7,>=6.0.0 contracts: [] diff --git a/packages/valory/customs/native_transfer_request/component.yaml b/packages/valory/customs/native_transfer_request/component.yaml index af5bfb01..5715869b 100644 --- a/packages/valory/customs/native_transfer_request/component.yaml +++ b/packages/valory/customs/native_transfer_request/component.yaml @@ -11,4 +11,8 @@ fingerprint: fingerprint_ignore_patterns: [] entry_point: native_transfer_request.py callable: run -dependencies: {} +dependencies: + tiktoken: + version: ==0.5.1 + openai: + version: ==1.11.0 diff --git a/packages/valory/customs/openai_request/component.yaml b/packages/valory/customs/openai_request/component.yaml index 61456b71..2748162d 100644 --- a/packages/valory/customs/openai_request/component.yaml +++ b/packages/valory/customs/openai_request/component.yaml @@ -11,4 +11,8 @@ fingerprint: fingerprint_ignore_patterns: [] entry_point: openai_request.py callable: run -dependencies: {} +dependencies: + openai: + version: ==1.11.0 + tiktoken: + version: ==0.5.1 diff --git a/packages/valory/customs/prediction_request/component.yaml b/packages/valory/customs/prediction_request/component.yaml index e1bdaaf3..7686d7ef 100644 --- a/packages/valory/customs/prediction_request/component.yaml +++ b/packages/valory/customs/prediction_request/component.yaml @@ -7,8 +7,33 @@ license: Apache-2.0 aea_version: '>=1.0.0, <2.0.0' fingerprint: __init__.py: bafybeibbn67pnrrm4qm3n3kbelvbs3v7fjlrjniywmw2vbizarippidtvi - prediction_request.py: bafybeia6babivxv7ikxddza6rvnl5imwkjrlnxd7vmej3ktclycoebrxj4 + prediction_request.py: bafybeiheac6lttjjmklbyjpbflx5whk7jqqs6vyvyfpomi6cwb3udnt3dm fingerprint_ignore_patterns: [] entry_point: prediction_request.py callable: run -dependencies: {} +dependencies: + google-api-python-client: + version: ==2.95.0 + googlesearch-python: + version: ==1.2.3 + requests: {} + pydantic: + version: '>=1.9.0,<3' + faiss-cpu: + version: ==1.7.4 + tiktoken: + version: ==0.5.1 + markdownify: + version: ==0.11.6 + openai: + version: ==1.11.0 + docstring-parser: + version: ==0.15 + readability-lxml: + version: ==0.8.1 + pypdf2: + version: ==3.0.1 + numpy: + version: '>=1.19.0' + spacy: + version: ==3.7.2 diff --git a/packages/valory/customs/prediction_request/prediction_request.py b/packages/valory/customs/prediction_request/prediction_request.py index 97b9d45a..53eed143 100644 --- a/packages/valory/customs/prediction_request/prediction_request.py +++ b/packages/valory/customs/prediction_request/prediction_request.py @@ -73,7 +73,7 @@ def count_tokens(text: str, model: str) -> int: DEFAULT_OPENAI_SETTINGS = { "max_tokens": 500, - "temperature": 0.7, + "temperature": 0., } ALLOWED_TOOLS = [ "prediction-offline", @@ -417,7 +417,7 @@ def adjust_additional_information( return additional_information -def run(**kwargs) -> Tuple[Optional[str], Any, Optional[Dict[str, Any]], Any]: +def run(**kwargs) -> Tuple[str, Optional[str], Optional[Dict[str, Any]], Any]: """Run the task""" with OpenAIClientManager(kwargs["api_keys"]["openai"]): tool = kwargs["tool"] diff --git a/packages/valory/customs/prediction_request_claude/component.yaml b/packages/valory/customs/prediction_request_claude/component.yaml index 59d7b90f..beef306f 100644 --- a/packages/valory/customs/prediction_request_claude/component.yaml +++ b/packages/valory/customs/prediction_request_claude/component.yaml @@ -7,8 +7,19 @@ license: Apache-2.0 aea_version: '>=1.0.0, <2.0.0' fingerprint: __init__.py: bafybeibbn67pnrrm4qm3n3kbelvbs3v7fjlrjniywmw2vbizarippidtvi - prediction_request_claude.py: bafybeienihjropcwp6ctjhmw4mh64p5mpscwwopz5hhcxx7w4en7k3md7q + prediction_request_claude.py: bafybeihwoeu7vhwqibzif46mtgw4jgbiphp3qfo3da4aeqe4nnbrzs3bqu fingerprint_ignore_patterns: [] entry_point: prediction_request_claude.py callable: run -dependencies: {} +dependencies: + google-api-python-client: + version: ==2.95.0 + googlesearch-python: + version: ==1.2.3 + requests: {} + markdownify: + version: ==0.11.6 + readability-lxml: + version: ==0.8.1 + anthropic: + version: ==0.3.11 diff --git a/packages/valory/customs/prediction_request_claude/prediction_request_claude.py b/packages/valory/customs/prediction_request_claude/prediction_request_claude.py index 550cd216..6e76050d 100644 --- a/packages/valory/customs/prediction_request_claude/prediction_request_claude.py +++ b/packages/valory/customs/prediction_request_claude/prediction_request_claude.py @@ -35,7 +35,7 @@ DEFAULT_NUM_WORDS = 300 DEFAULT_OPENAI_SETTINGS = { "max_tokens": 500, - "temperature": 0.7, + "temperature": 0., } ALLOWED_TOOLS = [ "claude-prediction-offline", @@ -278,7 +278,7 @@ def fetch_additional_information( return additional_information, counter_callback -def run(**kwargs) -> Tuple[Optional[str], Any, Optional[Dict[str, Any]], Any]: +def run(**kwargs) -> Tuple[str, Optional[str], Optional[Dict[str, Any]], Any]: """Run the task""" tool = kwargs["tool"] prompt = kwargs["prompt"] diff --git a/packages/valory/customs/prediction_request_embedding/component.yaml b/packages/valory/customs/prediction_request_embedding/component.yaml index 64112333..46fbdd85 100644 --- a/packages/valory/customs/prediction_request_embedding/component.yaml +++ b/packages/valory/customs/prediction_request_embedding/component.yaml @@ -12,4 +12,19 @@ fingerprint: fingerprint_ignore_patterns: [] entry_point: prediction_sentence_embedding.py callable: run -dependencies: {} +dependencies: + openai: + version: ==1.11.0 + tiktoken: + version: ==0.5.1 + requests: {} + google-api-python-client: + version: ==2.95.0 + googlesearch-python: + version: ==1.2.3 + python-dateutil: + version: ==2.8.2 + beautifulsoup4: + version: ==4.12.2 + spacy: + version: ==3.7.2 diff --git a/packages/valory/customs/resolve_market/component.yaml b/packages/valory/customs/resolve_market/component.yaml index 95fcaca1..4d8b37c9 100644 --- a/packages/valory/customs/resolve_market/component.yaml +++ b/packages/valory/customs/resolve_market/component.yaml @@ -11,4 +11,7 @@ fingerprint: fingerprint_ignore_patterns: [] entry_point: resolve_market.py callable: run -dependencies: {} +dependencies: + openai: + version: ==1.11.0 + requests: {} diff --git a/packages/valory/customs/stability_ai_request/component.yaml b/packages/valory/customs/stability_ai_request/component.yaml index a0f0a62b..e1acf75f 100644 --- a/packages/valory/customs/stability_ai_request/component.yaml +++ b/packages/valory/customs/stability_ai_request/component.yaml @@ -11,4 +11,7 @@ fingerprint: fingerprint_ignore_patterns: [] entry_point: stabilityai_request.py callable: run -dependencies: {} +dependencies: + requests: {} + tiktoken: + version: ==0.5.1 diff --git a/packages/valory/services/mech/service.yaml b/packages/valory/services/mech/service.yaml index 4137c400..fbaf547b 100644 --- a/packages/valory/services/mech/service.yaml +++ b/packages/valory/services/mech/service.yaml @@ -7,7 +7,7 @@ license: Apache-2.0 fingerprint: README.md: bafybeif7ia4jdlazy6745ke2k2x5yoqlwsgwr6sbztbgqtwvs3ndm2p7ba fingerprint_ignore_patterns: [] -agent: valory/mech:0.1.0:bafybeifrlwdaqfnychkmnpirix43zfbamgukkpziyuy7erze7owtbiuqc4 +agent: valory/mech:0.1.0:bafybeia6suywbe4ptwtrmccbgiotf5rmzz6xusqbmqclzebk6sbkgtwku4 number_of_agents: 4 deployment: agent: @@ -27,13 +27,17 @@ type: skill models: params: args: + gas_params: &id001 + gas_price: ${GAS_PRICE:int:null} + max_fee_per_gas: ${MAX_FEE_PER_GAS:int:null} + max_priority_fee_per_gas: ${MAX_PRIORITY_FEE_PER_GAS:int:null} multisend_address: ${MULTISEND_ADDRESS:str:0xA238CBeb142c10Ef7Ad8442C6D1f9E89e07e7761} on_chain_service_id: ${ON_CHAIN_SERVICE_ID:int:null} reset_pause_duration: ${RESET_PAUSE_DURATION:int:10} round_timeout_seconds: ${ROUND_TIMEOUT:float:150.0} use_polling: ${USE_POLLING:bool:false} service_registry_address: ${SERVICE_REGISTRY_ADDRESS:str:0x0000000000000000000000000000000000000000} - setup: &id001 + setup: &id002 all_participants: ${ALL_PARTICIPANTS:list:[]} safe_contract_address: ${SAFE_CONTRACT_ADDRESS:str:0x0000000000000000000000000000000000000000} consensus_threshold: ${CONSENSUS_THRESHOLD:int:null} @@ -65,6 +69,7 @@ type: skill models: params: args: + gas_params: *id001 multisend_address: ${MULTISEND_ADDRESS:str:0xA238CBeb142c10Ef7Ad8442C6D1f9E89e07e7761} on_chain_service_id: ${ON_CHAIN_SERVICE_ID:int:null} reset_pause_duration: ${RESET_PAUSE_DURATION:int:10} @@ -72,7 +77,7 @@ type: skill round_timeout_seconds: ${ROUND_TIMEOUT:float:150.0} use_polling: ${USE_POLLING:bool:false} service_registry_address: ${SERVICE_REGISTRY_ADDRESS:str:0x0000000000000000000000000000000000000000} - setup: *id001 + setup: *id002 share_tm_config_on_startup: ${USE_ACN:bool:false} tendermint_com_url: ${TENDERMINT_COM_URL:str:http://localhost:8080} tendermint_url: ${TENDERMINT_URL:str:http://localhost:26657} @@ -100,6 +105,7 @@ type: skill models: params: args: + gas_params: *id001 multisend_address: ${MULTISEND_ADDRESS:str:0xA238CBeb142c10Ef7Ad8442C6D1f9E89e07e7761} on_chain_service_id: ${ON_CHAIN_SERVICE_ID:int:null} reset_pause_duration: ${RESET_PAUSE_DURATION:int:10} @@ -107,7 +113,7 @@ type: skill use_polling: ${USE_POLLING:bool:false} manual_gas_limit: ${MANUAL_GAS_LIMIT:int:1000000} service_registry_address: ${SERVICE_REGISTRY_ADDRESS:str:0x0000000000000000000000000000000000000000} - setup: *id001 + setup: *id002 share_tm_config_on_startup: ${USE_ACN:bool:false} tendermint_com_url: ${TENDERMINT_COM_URL:str:http://localhost:8080} termination_from_block: ${TERMINATION_FROM_BLOCK:int:0} @@ -135,13 +141,14 @@ type: skill models: params: args: + gas_params: *id001 multisend_address: ${MULTISEND_ADDRESS:str:0xA238CBeb142c10Ef7Ad8442C6D1f9E89e07e7761} on_chain_service_id: ${ON_CHAIN_SERVICE_ID:int:null} reset_pause_duration: ${RESET_PAUSE_DURATION:int:10} round_timeout_seconds: ${ROUND_TIMEOUT:float:150.0} use_polling: ${USE_POLLING:bool:false} service_registry_address: ${SERVICE_REGISTRY_ADDRESS:str:0x0000000000000000000000000000000000000000} - setup: *id001 + setup: *id002 share_tm_config_on_startup: ${USE_ACN:bool:false} manual_gas_limit: ${MANUAL_GAS_LIMIT:int:1000000} tendermint_com_url: ${TENDERMINT_COM_URL:str:http://localhost:8080} diff --git a/packages/valory/skills/mech_abci/skill.yaml b/packages/valory/skills/mech_abci/skill.yaml index e11706fa..b2ebb954 100644 --- a/packages/valory/skills/mech_abci/skill.yaml +++ b/packages/valory/skills/mech_abci/skill.yaml @@ -20,13 +20,13 @@ contracts: [] protocols: - valory/http:1.0.0:bafybeifugzl63kfdmwrxwphrnrhj7bn6iruxieme3a4ntzejf6kmtuwmae skills: -- valory/abstract_round_abci:0.1.0:bafybeigjrepaqpb3m7zunmt4hryos4vto4yyj3u6iyofdb2fotwho3bqvm -- valory/registration_abci:0.1.0:bafybeif3ln6eg53ebrfe6uicjew4uqp2ynyrcxkw5wi4jm3ixqv3ykte4a -- valory/reset_pause_abci:0.1.0:bafybeicm7onl72rfnn33pbvzwjpkl5gafeieyobfcnyresxz7kunjwmqea -- valory/task_submission_abci:0.1.0:bafybeib6yijhh5ss7mufpjghjwobxjynuaz2mf52jo54dtpyer4ovowei4 -- valory/termination_abci:0.1.0:bafybeie6h7j4hyhgj2wte64n3xyudxq4pgqcqjmslxi5tff4mb6vce2tay -- valory/transaction_settlement_abci:0.1.0:bafybeid57tozt5f3kgzmu22nbr3c3oy4p7bi2bu66rqsgnlylq6xgh2ixe -- valory/subscription_abci:0.1.0:bafybeiclwa2u24nv4yzke6s24qxz2uxc6tpniyjxzprlb7oinkiijdhlgy +- valory/abstract_round_abci:0.1.0:bafybeih3enhagoql7kzpeyzzu2scpkif6y3ubakpralfnwxcvxexdyvy5i +- valory/registration_abci:0.1.0:bafybeiek7zcsxbucjwzgqfftafhfrocvc7q4yxllh2q44jeemsjxg3rcfm +- valory/reset_pause_abci:0.1.0:bafybeidw4mbx3os3hmv7ley7b3g3gja7ydpitr7mxbjpwzxin2mzyt5yam +- valory/task_submission_abci:0.1.0:bafybeidmkzpqpvyol3636eeprkloy3z3t2nipmwvu6da3dtk2q4tjlab6u +- valory/termination_abci:0.1.0:bafybeihq6qtbwt6i53ayqym63vhjexkcppy26gguzhhjqywfmiuqghvv44 +- valory/transaction_settlement_abci:0.1.0:bafybeigtzlk4uakmd54rxnznorcrstsr52kta474lgrnvx5ovr546vj7sq +- valory/subscription_abci:0.1.0:bafybeidoqeznyhbh3znaqbfdnftzq6fdh77m35qgftdwz46nz2iwda4yam behaviours: main: args: {} @@ -129,6 +129,10 @@ models: request_timeout: 10.0 reset_pause_duration: 10 reset_period_count: 100 + gas_params: + gas_price: null + max_fee_per_gas: null + max_priority_fee_per_gas: null reset_tendermint_after: 10 retry_attempts: 400 retry_timeout: 3 @@ -199,5 +203,5 @@ models: class_name: TendermintDialogues dependencies: open-aea-cli-ipfs: - version: ==1.48.0 + version: ==1.50.0 is_abstract: false diff --git a/packages/valory/skills/subscription_abci/skill.yaml b/packages/valory/skills/subscription_abci/skill.yaml index dea8a257..499db604 100644 --- a/packages/valory/skills/subscription_abci/skill.yaml +++ b/packages/valory/skills/subscription_abci/skill.yaml @@ -18,15 +18,15 @@ fingerprint: fingerprint_ignore_patterns: [] connections: [] contracts: -- valory/agent_mech:0.1.0:bafybeicbhvrlug56qvwlh6b4y35xg6fytvtahhaae7xm7jssecbie576mu -- valory/gnosis_safe:0.1.0:bafybeictjc7saviboxbsdcey3trvokrgo7uoh76mcrxecxhlvcrp47aqg4 +- valory/agent_mech:0.1.0:bafybeidsau5x2vjofpcdzxkg7airwkrdag65ohtxcby2ut27tfjizgnrnm +- valory/gnosis_safe:0.1.0:bafybeibq77mgzhyb23blf2eqmia3kc6io5karedfzhntvpcebeqdzrgyqa - valory/multisend:0.1.0:bafybeig5byt5urg2d2bsecufxe5ql7f4mezg3mekfleeh32nmuusx66p4y protocols: - valory/acn_data_share:0.1.0:bafybeih5ydonnvrwvy2ygfqgfabkr47s4yw3uqxztmwyfprulwfsoe7ipq - valory/contract_api:1.0.0:bafybeidgu7o5llh26xp3u3ebq3yluull5lupiyeu6iooi2xyymdrgnzq5i skills: -- valory/abstract_round_abci:0.1.0:bafybeigjrepaqpb3m7zunmt4hryos4vto4yyj3u6iyofdb2fotwho3bqvm -- valory/transaction_settlement_abci:0.1.0:bafybeid57tozt5f3kgzmu22nbr3c3oy4p7bi2bu66rqsgnlylq6xgh2ixe +- valory/abstract_round_abci:0.1.0:bafybeih3enhagoql7kzpeyzzu2scpkif6y3ubakpralfnwxcvxexdyvy5i +- valory/transaction_settlement_abci:0.1.0:bafybeigtzlk4uakmd54rxnznorcrstsr52kta474lgrnvx5ovr546vj7sq behaviours: main: args: {} diff --git a/packages/valory/skills/task_execution/skill.yaml b/packages/valory/skills/task_execution/skill.yaml index b6e1b86e..91045a6f 100644 --- a/packages/valory/skills/task_execution/skill.yaml +++ b/packages/valory/skills/task_execution/skill.yaml @@ -12,17 +12,17 @@ fingerprint: handlers.py: bafybeidbt5ezj74cgfogk3w4uw4si2grlnk5g54veyumw7g5yh6gdscywu models.py: bafybeid6befxrrbiaw7nduz4zgbm5nfc246fn2eb6rfmja6v5hmq4wtcwe utils/__init__.py: bafybeiccdijaigu6e5p2iruwo5mkk224o7ywedc7nr6xeu5fpmhjqgk24e - utils/benchmarks.py: bafybeibdwt4svz24ahok4x4h2rpeotlmlmvifccd27oizsz5bjwj6dqree + utils/benchmarks.py: bafybeidxh3zw4ac3xwt2kwsqwc5bwzkzuovvxin2wyzqwzi2apcpv4ed2i utils/cost_calculation.py: bafybeighafxied73w3mcmgziwfp3u2x6t4qlztw4kyekyq2ddgyhdge74q utils/ipfs.py: bafybeic7cbuv3tomi2xv7h2qowrqnpoufpanngzlgzljl4ptimpss3meqm utils/task.py: bafybeicb6nqd475ul6mz4hcexpva33ivkn4fygicgmlb4clu5cuzr34diy fingerprint_ignore_patterns: [] connections: - valory/ledger:0.19.0:bafybeic3ft7l7ca3qgnderm4xupsfmyoihgi27ukotnz7b5hdczla2enya -- valory/ipfs:0.1.0:bafybeiflaxrnepfn4hcnq5pieuc7ki7d422y3iqb54lv4tpgs7oywnuhhq +- valory/ipfs:0.1.0:bafybeihndk6hohj3yncgrye5pw7b7w2kztj3avby5u5mfk2fpjh7hqphii - valory/p2p_libp2p_client:0.1.0:bafybeid3xg5k2ol5adflqloy75ibgljmol6xsvzvezebsg7oudxeeolz7e contracts: -- valory/agent_mech:0.1.0:bafybeicbhvrlug56qvwlh6b4y35xg6fytvtahhaae7xm7jssecbie576mu +- valory/agent_mech:0.1.0:bafybeidsau5x2vjofpcdzxkg7airwkrdag65ohtxcby2ut27tfjizgnrnm protocols: - valory/acn_data_share:0.1.0:bafybeih5ydonnvrwvy2ygfqgfabkr47s4yw3uqxztmwyfprulwfsoe7ipq - valory/contract_api:1.0.0:bafybeidgu7o5llh26xp3u3ebq3yluull5lupiyeu6iooi2xyymdrgnzq5i diff --git a/packages/valory/skills/task_execution/utils/benchmarks.py b/packages/valory/skills/task_execution/utils/benchmarks.py index 26c26b85..58ddd1f2 100644 --- a/packages/valory/skills/task_execution/utils/benchmarks.py +++ b/packages/valory/skills/task_execution/utils/benchmarks.py @@ -29,9 +29,11 @@ class TokenCounterCallback: """Callback to count the number of tokens used in a generation.""" TOKEN_PRICES = { - "gpt-3.5-turbo": {"input": 0.001, "output": 0.002}, + "gpt-3.5-turbo": {"input": 0.0005, "output": 0.0015}, + "gpt-3.5-turbo-0125": {"input": 0.0005, "output": 0.0015}, "gpt-4": {"input": 0.03, "output": 0.06}, - "gpt-4-turbo": {"input": 0.01, "output": 0.03}, + "gpt-4-turbo-preview": {"input": 0.01, "output": 0.03}, + "gpt-4-0125-preview": {"input": 0.01, "output": 0.03}, "claude-2": {"input": 0.008, "output": 0.024}, } diff --git a/packages/valory/skills/task_submission_abci/skill.yaml b/packages/valory/skills/task_submission_abci/skill.yaml index 14b7a4b8..e93fc33d 100644 --- a/packages/valory/skills/task_submission_abci/skill.yaml +++ b/packages/valory/skills/task_submission_abci/skill.yaml @@ -19,19 +19,19 @@ fingerprint: fingerprint_ignore_patterns: [] connections: [] contracts: -- valory/agent_mech:0.1.0:bafybeicbhvrlug56qvwlh6b4y35xg6fytvtahhaae7xm7jssecbie576mu +- valory/agent_mech:0.1.0:bafybeidsau5x2vjofpcdzxkg7airwkrdag65ohtxcby2ut27tfjizgnrnm - valory/agent_registry:0.1.0:bafybeiargayav6yiztdnwzejoejstcx4idssch2h4f5arlgtzj3tgsgfmu -- valory/gnosis_safe:0.1.0:bafybeictjc7saviboxbsdcey3trvokrgo7uoh76mcrxecxhlvcrp47aqg4 +- valory/gnosis_safe:0.1.0:bafybeibq77mgzhyb23blf2eqmia3kc6io5karedfzhntvpcebeqdzrgyqa - valory/multisend:0.1.0:bafybeig5byt5urg2d2bsecufxe5ql7f4mezg3mekfleeh32nmuusx66p4y -- valory/service_registry:0.1.0:bafybeiby5x4wfdywlenmoudbykdxohpq2nifqxfep5niqgxrjyrekyahzy -- valory/hash_checkpoint:0.1.0:bafybeianr3zy3bb464jwlwwxtk2daeeh4cefy7ihw5bfn7zrn7vjs3fyga +- valory/service_registry:0.1.0:bafybeicbxmbzt757lbmyh6762lrkcrp3oeum6dk3z7pvosixasifsk6xlm +- valory/hash_checkpoint:0.1.0:bafybeigv2bceirhy72yajxzibi4a5wrcfptfbkjbzzko6pqdq2f4dzr3xa protocols: - valory/acn_data_share:0.1.0:bafybeih5ydonnvrwvy2ygfqgfabkr47s4yw3uqxztmwyfprulwfsoe7ipq - valory/contract_api:1.0.0:bafybeidgu7o5llh26xp3u3ebq3yluull5lupiyeu6iooi2xyymdrgnzq5i - valory/ledger_api:1.0.0:bafybeihdk6psr4guxmbcrc26jr2cbgzpd5aljkqvpwo64bvaz7tdti2oni skills: -- valory/abstract_round_abci:0.1.0:bafybeigjrepaqpb3m7zunmt4hryos4vto4yyj3u6iyofdb2fotwho3bqvm -- valory/transaction_settlement_abci:0.1.0:bafybeid57tozt5f3kgzmu22nbr3c3oy4p7bi2bu66rqsgnlylq6xgh2ixe +- valory/abstract_round_abci:0.1.0:bafybeih3enhagoql7kzpeyzzu2scpkif6y3ubakpralfnwxcvxexdyvy5i +- valory/transaction_settlement_abci:0.1.0:bafybeigtzlk4uakmd54rxnznorcrstsr52kta474lgrnvx5ovr546vj7sq behaviours: main: args: {} diff --git a/poetry.lock b/poetry.lock index 167404f9..968b25f5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -502,14 +502,14 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -804,64 +804,64 @@ requests = "*" [[package]] name = "coverage" -version = "7.4.2" +version = "7.4.4" description = "Code coverage measurement for Python" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bf54c3e089179d9d23900e3efc86d46e4431188d9a657f345410eecdd0151f50"}, - {file = "coverage-7.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fe6e43c8b510719b48af7db9631b5fbac910ade4bd90e6378c85ac5ac706382c"}, - {file = "coverage-7.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b98c89db1b150d851a7840142d60d01d07677a18f0f46836e691c38134ed18b"}, - {file = "coverage-7.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5f9683be6a5b19cd776ee4e2f2ffb411424819c69afab6b2db3a0a364ec6642"}, - {file = "coverage-7.4.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78cdcbf7b9cb83fe047ee09298e25b1cd1636824067166dc97ad0543b079d22f"}, - {file = "coverage-7.4.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2599972b21911111114100d362aea9e70a88b258400672626efa2b9e2179609c"}, - {file = "coverage-7.4.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ef00d31b7569ed3cb2036f26565f1984b9fc08541731ce01012b02a4c238bf03"}, - {file = "coverage-7.4.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:20a875bfd8c282985c4720c32aa05056f77a68e6d8bbc5fe8632c5860ee0b49b"}, - {file = "coverage-7.4.2-cp310-cp310-win32.whl", hash = "sha256:b3f2b1eb229f23c82898eedfc3296137cf1f16bb145ceab3edfd17cbde273fb7"}, - {file = "coverage-7.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7df95fdd1432a5d2675ce630fef5f239939e2b3610fe2f2b5bf21fa505256fa3"}, - {file = "coverage-7.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a8ddbd158e069dded57738ea69b9744525181e99974c899b39f75b2b29a624e2"}, - {file = "coverage-7.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81a5fb41b0d24447a47543b749adc34d45a2cf77b48ca74e5bf3de60a7bd9edc"}, - {file = "coverage-7.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2412e98e70f16243be41d20836abd5f3f32edef07cbf8f407f1b6e1ceae783ac"}, - {file = "coverage-7.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb79414c15c6f03f56cc68fa06994f047cf20207c31b5dad3f6bab54a0f66ef"}, - {file = "coverage-7.4.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf89ab85027427d351f1de918aff4b43f4eb5f33aff6835ed30322a86ac29c9e"}, - {file = "coverage-7.4.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a178b7b1ac0f1530bb28d2e51f88c0bab3e5949835851a60dda80bff6052510c"}, - {file = "coverage-7.4.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:06fe398145a2e91edaf1ab4eee66149c6776c6b25b136f4a86fcbbb09512fd10"}, - {file = "coverage-7.4.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:18cac867950943fe93d6cd56a67eb7dcd2d4a781a40f4c1e25d6f1ed98721a55"}, - {file = "coverage-7.4.2-cp311-cp311-win32.whl", hash = "sha256:f72cdd2586f9a769570d4b5714a3837b3a59a53b096bb954f1811f6a0afad305"}, - {file = "coverage-7.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:d779a48fac416387dd5673fc5b2d6bd903ed903faaa3247dc1865c65eaa5a93e"}, - {file = "coverage-7.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:adbdfcda2469d188d79771d5696dc54fab98a16d2ef7e0875013b5f56a251047"}, - {file = "coverage-7.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ac4bab32f396b03ebecfcf2971668da9275b3bb5f81b3b6ba96622f4ef3f6e17"}, - {file = "coverage-7.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:006d220ba2e1a45f1de083d5022d4955abb0aedd78904cd5a779b955b019ec73"}, - {file = "coverage-7.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3733545eb294e5ad274abe131d1e7e7de4ba17a144505c12feca48803fea5f64"}, - {file = "coverage-7.4.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42a9e754aa250fe61f0f99986399cec086d7e7a01dd82fd863a20af34cbce962"}, - {file = "coverage-7.4.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:2ed37e16cf35c8d6e0b430254574b8edd242a367a1b1531bd1adc99c6a5e00fe"}, - {file = "coverage-7.4.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:b953275d4edfab6cc0ed7139fa773dfb89e81fee1569a932f6020ce7c6da0e8f"}, - {file = "coverage-7.4.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:32b4ab7e6c924f945cbae5392832e93e4ceb81483fd6dc4aa8fb1a97b9d3e0e1"}, - {file = "coverage-7.4.2-cp312-cp312-win32.whl", hash = "sha256:f5df76c58977bc35a49515b2fbba84a1d952ff0ec784a4070334dfbec28a2def"}, - {file = "coverage-7.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:34423abbaad70fea9d0164add189eabaea679068ebdf693baa5c02d03e7db244"}, - {file = "coverage-7.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5b11f9c6587668e495cc7365f85c93bed34c3a81f9f08b0920b87a89acc13469"}, - {file = "coverage-7.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:51593a1f05c39332f623d64d910445fdec3d2ac2d96b37ce7f331882d5678ddf"}, - {file = "coverage-7.4.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69f1665165ba2fe7614e2f0c1aed71e14d83510bf67e2ee13df467d1c08bf1e8"}, - {file = "coverage-7.4.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3c8bbb95a699c80a167478478efe5e09ad31680931ec280bf2087905e3b95ec"}, - {file = "coverage-7.4.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:175f56572f25e1e1201d2b3e07b71ca4d201bf0b9cb8fad3f1dfae6a4188de86"}, - {file = "coverage-7.4.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8562ca91e8c40864942615b1d0b12289d3e745e6b2da901d133f52f2d510a1e3"}, - {file = "coverage-7.4.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d9a1ef0f173e1a19738f154fb3644f90d0ada56fe6c9b422f992b04266c55d5a"}, - {file = "coverage-7.4.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f40ac873045db4fd98a6f40387d242bde2708a3f8167bd967ccd43ad46394ba2"}, - {file = "coverage-7.4.2-cp38-cp38-win32.whl", hash = "sha256:d1b750a8409bec61caa7824bfd64a8074b6d2d420433f64c161a8335796c7c6b"}, - {file = "coverage-7.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b4ae777bebaed89e3a7e80c4a03fac434a98a8abb5251b2a957d38fe3fd30088"}, - {file = "coverage-7.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3ff7f92ae5a456101ca8f48387fd3c56eb96353588e686286f50633a611afc95"}, - {file = "coverage-7.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:861d75402269ffda0b33af94694b8e0703563116b04c681b1832903fac8fd647"}, - {file = "coverage-7.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3507427d83fa961cbd73f11140f4a5ce84208d31756f7238d6257b2d3d868405"}, - {file = "coverage-7.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bf711d517e21fb5bc429f5c4308fbc430a8585ff2a43e88540264ae87871e36a"}, - {file = "coverage-7.4.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c00e54f0bd258ab25e7f731ca1d5144b0bf7bec0051abccd2bdcff65fa3262c9"}, - {file = "coverage-7.4.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f8e845d894e39fb53834da826078f6dc1a933b32b1478cf437007367efaf6f6a"}, - {file = "coverage-7.4.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:840456cb1067dc350af9080298c7c2cfdddcedc1cb1e0b30dceecdaf7be1a2d3"}, - {file = "coverage-7.4.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c11ca2df2206a4e3e4c4567f52594637392ed05d7c7fb73b4ea1c658ba560265"}, - {file = "coverage-7.4.2-cp39-cp39-win32.whl", hash = "sha256:3ff5bdb08d8938d336ce4088ca1a1e4b6c8cd3bef8bb3a4c0eb2f37406e49643"}, - {file = "coverage-7.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:ac9e95cefcf044c98d4e2c829cd0669918585755dd9a92e28a1a7012322d0a95"}, - {file = "coverage-7.4.2-pp38.pp39.pp310-none-any.whl", hash = "sha256:f593a4a90118d99014517c2679e04a4ef5aee2d81aa05c26c734d271065efcb6"}, - {file = "coverage-7.4.2.tar.gz", hash = "sha256:1a5ee18e3a8d766075ce9314ed1cb695414bae67df6a4b0805f5137d93d6f1cb"}, + {file = "coverage-7.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0be5efd5127542ef31f165de269f77560d6cdef525fffa446de6f7e9186cfb2"}, + {file = "coverage-7.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ccd341521be3d1b3daeb41960ae94a5e87abe2f46f17224ba5d6f2b8398016cf"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fa497a8ab37784fbb20ab699c246053ac294d13fc7eb40ec007a5043ec91f8"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1a93009cb80730c9bca5d6d4665494b725b6e8e157c1cb7f2db5b4b122ea562"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:690db6517f09336559dc0b5f55342df62370a48f5469fabf502db2c6d1cffcd2"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:09c3255458533cb76ef55da8cc49ffab9e33f083739c8bd4f58e79fecfe288f7"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ce1415194b4a6bd0cdcc3a1dfbf58b63f910dcb7330fe15bdff542c56949f87"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b91cbc4b195444e7e258ba27ac33769c41b94967919f10037e6355e998af255c"}, + {file = "coverage-7.4.4-cp310-cp310-win32.whl", hash = "sha256:598825b51b81c808cb6f078dcb972f96af96b078faa47af7dfcdf282835baa8d"}, + {file = "coverage-7.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:09ef9199ed6653989ebbcaacc9b62b514bb63ea2f90256e71fea3ed74bd8ff6f"}, + {file = "coverage-7.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f9f50e7ef2a71e2fae92774c99170eb8304e3fdf9c8c3c7ae9bab3e7229c5cf"}, + {file = "coverage-7.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:623512f8ba53c422fcfb2ce68362c97945095b864cda94a92edbaf5994201083"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0513b9508b93da4e1716744ef6ebc507aff016ba115ffe8ecff744d1322a7b63"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40209e141059b9370a2657c9b15607815359ab3ef9918f0196b6fccce8d3230f"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a2b2b78c78293782fd3767d53e6474582f62443d0504b1554370bde86cc8227"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:73bfb9c09951125d06ee473bed216e2c3742f530fc5acc1383883125de76d9cd"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f384c3cc76aeedce208643697fb3e8437604b512255de6d18dae3f27655a384"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54eb8d1bf7cacfbf2a3186019bcf01d11c666bd495ed18717162f7eb1e9dd00b"}, + {file = "coverage-7.4.4-cp311-cp311-win32.whl", hash = "sha256:cac99918c7bba15302a2d81f0312c08054a3359eaa1929c7e4b26ebe41e9b286"}, + {file = "coverage-7.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:b14706df8b2de49869ae03a5ccbc211f4041750cd4a66f698df89d44f4bd30ec"}, + {file = "coverage-7.4.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:201bef2eea65e0e9c56343115ba3814e896afe6d36ffd37bab783261db430f76"}, + {file = "coverage-7.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:41c9c5f3de16b903b610d09650e5e27adbfa7f500302718c9ffd1c12cf9d6818"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d898fe162d26929b5960e4e138651f7427048e72c853607f2b200909794ed978"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ea79bb50e805cd6ac058dfa3b5c8f6c040cb87fe83de10845857f5535d1db70"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce4b94265ca988c3f8e479e741693d143026632672e3ff924f25fab50518dd51"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:00838a35b882694afda09f85e469c96367daa3f3f2b097d846a7216993d37f4c"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fdfafb32984684eb03c2d83e1e51f64f0906b11e64482df3c5db936ce3839d48"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:69eb372f7e2ece89f14751fbcbe470295d73ed41ecd37ca36ed2eb47512a6ab9"}, + {file = "coverage-7.4.4-cp312-cp312-win32.whl", hash = "sha256:137eb07173141545e07403cca94ab625cc1cc6bc4c1e97b6e3846270e7e1fea0"}, + {file = "coverage-7.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:d71eec7d83298f1af3326ce0ff1d0ea83c7cb98f72b577097f9083b20bdaf05e"}, + {file = "coverage-7.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d5ae728ff3b5401cc320d792866987e7e7e880e6ebd24433b70a33b643bb0384"}, + {file = "coverage-7.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc4f1358cb0c78edef3ed237ef2c86056206bb8d9140e73b6b89fbcfcbdd40e1"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8130a2aa2acb8788e0b56938786c33c7c98562697bf9f4c7d6e8e5e3a0501e4a"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf271892d13e43bc2b51e6908ec9a6a5094a4df1d8af0bfc360088ee6c684409"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4cdc86d54b5da0df6d3d3a2f0b710949286094c3a6700c21e9015932b81447e"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ae71e7ddb7a413dd60052e90528f2f65270aad4b509563af6d03d53e979feafd"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:38dd60d7bf242c4ed5b38e094baf6401faa114fc09e9e6632374388a404f98e7"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa5b1c1bfc28384f1f53b69a023d789f72b2e0ab1b3787aae16992a7ca21056c"}, + {file = "coverage-7.4.4-cp38-cp38-win32.whl", hash = "sha256:dfa8fe35a0bb90382837b238fff375de15f0dcdb9ae68ff85f7a63649c98527e"}, + {file = "coverage-7.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:b2991665420a803495e0b90a79233c1433d6ed77ef282e8e152a324bbbc5e0c8"}, + {file = "coverage-7.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b799445b9f7ee8bf299cfaed6f5b226c0037b74886a4e11515e569b36fe310d"}, + {file = "coverage-7.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b4d33f418f46362995f1e9d4f3a35a1b6322cb959c31d88ae56b0298e1c22357"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aadacf9a2f407a4688d700e4ebab33a7e2e408f2ca04dbf4aef17585389eff3e"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c95949560050d04d46b919301826525597f07b33beba6187d04fa64d47ac82e"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff7687ca3d7028d8a5f0ebae95a6e4827c5616b31a4ee1192bdfde697db110d4"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5fc1de20b2d4a061b3df27ab9b7c7111e9a710f10dc2b84d33a4ab25065994ec"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c74880fc64d4958159fbd537a091d2a585448a8f8508bf248d72112723974cbd"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:742a76a12aa45b44d236815d282b03cfb1de3b4323f3e4ec933acfae08e54ade"}, + {file = "coverage-7.4.4-cp39-cp39-win32.whl", hash = "sha256:d89d7b2974cae412400e88f35d86af72208e1ede1a541954af5d944a8ba46c57"}, + {file = "coverage-7.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:9ca28a302acb19b6af89e90f33ee3e1906961f94b54ea37de6737b7ca9d8827c"}, + {file = "coverage-7.4.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:b2c5edc4ac10a7ef6605a966c58929ec6c1bd0917fb8c15cb3363f65aa40e677"}, + {file = "coverage-7.4.4.tar.gz", hash = "sha256:c901df83d097649e257e803be22592aedfd5182f07b3cc87d640bbb9afd50f49"}, ] [package.dependencies] @@ -872,44 +872,44 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "42.0.4" +version = "42.0.5" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-42.0.4-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:ffc73996c4fca3d2b6c1c8c12bfd3ad00def8621da24f547626bf06441400449"}, - {file = "cryptography-42.0.4-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:db4b65b02f59035037fde0998974d84244a64c3265bdef32a827ab9b63d61b18"}, - {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad9c385ba8ee025bb0d856714f71d7840020fe176ae0229de618f14dae7a6e2"}, - {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69b22ab6506a3fe483d67d1ed878e1602bdd5912a134e6202c1ec672233241c1"}, - {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:e09469a2cec88fb7b078e16d4adec594414397e8879a4341c6ace96013463d5b"}, - {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3e970a2119507d0b104f0a8e281521ad28fc26f2820687b3436b8c9a5fcf20d1"}, - {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:e53dc41cda40b248ebc40b83b31516487f7db95ab8ceac1f042626bc43a2f992"}, - {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c3a5cbc620e1e17009f30dd34cb0d85c987afd21c41a74352d1719be33380885"}, - {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6bfadd884e7280df24d26f2186e4e07556a05d37393b0f220a840b083dc6a824"}, - {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:01911714117642a3f1792c7f376db572aadadbafcd8d75bb527166009c9f1d1b"}, - {file = "cryptography-42.0.4-cp37-abi3-win32.whl", hash = "sha256:fb0cef872d8193e487fc6bdb08559c3aa41b659a7d9be48b2e10747f47863925"}, - {file = "cryptography-42.0.4-cp37-abi3-win_amd64.whl", hash = "sha256:c1f25b252d2c87088abc8bbc4f1ecbf7c919e05508a7e8628e6875c40bc70923"}, - {file = "cryptography-42.0.4-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:15a1fb843c48b4a604663fa30af60818cd28f895572386e5f9b8a665874c26e7"}, - {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1327f280c824ff7885bdeef8578f74690e9079267c1c8bd7dc5cc5aa065ae52"}, - {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ffb03d419edcab93b4b19c22ee80c007fb2d708429cecebf1dd3258956a563a"}, - {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:1df6fcbf60560d2113b5ed90f072dc0b108d64750d4cbd46a21ec882c7aefce9"}, - {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:44a64043f743485925d3bcac548d05df0f9bb445c5fcca6681889c7c3ab12764"}, - {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:3c6048f217533d89f2f8f4f0fe3044bf0b2090453b7b73d0b77db47b80af8dff"}, - {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6d0fbe73728c44ca3a241eff9aefe6496ab2656d6e7a4ea2459865f2e8613257"}, - {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:887623fe0d70f48ab3f5e4dbf234986b1329a64c066d719432d0698522749929"}, - {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ce8613beaffc7c14f091497346ef117c1798c202b01153a8cc7b8e2ebaaf41c0"}, - {file = "cryptography-42.0.4-cp39-abi3-win32.whl", hash = "sha256:810bcf151caefc03e51a3d61e53335cd5c7316c0a105cc695f0959f2c638b129"}, - {file = "cryptography-42.0.4-cp39-abi3-win_amd64.whl", hash = "sha256:a0298bdc6e98ca21382afe914c642620370ce0470a01e1bef6dd9b5354c36854"}, - {file = "cryptography-42.0.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f8907fcf57392cd917892ae83708761c6ff3c37a8e835d7246ff0ad251d9298"}, - {file = "cryptography-42.0.4-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:12d341bd42cdb7d4937b0cabbdf2a94f949413ac4504904d0cdbdce4a22cbf88"}, - {file = "cryptography-42.0.4-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1cdcdbd117681c88d717437ada72bdd5be9de117f96e3f4d50dab3f59fd9ab20"}, - {file = "cryptography-42.0.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0e89f7b84f421c56e7ff69f11c441ebda73b8a8e6488d322ef71746224c20fce"}, - {file = "cryptography-42.0.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f1e85a178384bf19e36779d91ff35c7617c885da487d689b05c1366f9933ad74"}, - {file = "cryptography-42.0.4-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d2a27aca5597c8a71abbe10209184e1a8e91c1fd470b5070a2ea60cafec35bcd"}, - {file = "cryptography-42.0.4-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4e36685cb634af55e0677d435d425043967ac2f3790ec652b2b88ad03b85c27b"}, - {file = "cryptography-42.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f47be41843200f7faec0683ad751e5ef11b9a56a220d57f300376cd8aba81660"}, - {file = "cryptography-42.0.4.tar.gz", hash = "sha256:831a4b37accef30cccd34fcb916a5d7b5be3cbbe27268a02832c3e450aea39cb"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, + {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, + {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, + {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, + {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, + {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, + {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, ] [package.dependencies] @@ -1273,14 +1273,14 @@ test = ["coverage", "hypothesis (>=4.18.0,<5)", "pytest (>=6.2.5,<7)", "pytest-x [[package]] name = "eth-hash" -version = "0.6.0" +version = "0.7.0" description = "eth-hash: The Ethereum hashing function, keccak256, sometimes (erroneously) called sha3" category = "main" optional = false python-versions = ">=3.8, <4" files = [ - {file = "eth-hash-0.6.0.tar.gz", hash = "sha256:ae72889e60db6acbb3872c288cfa02ed157f4c27630fcd7f9c8442302c31e478"}, - {file = "eth_hash-0.6.0-py3-none-any.whl", hash = "sha256:9f8daaa345764f8871dc461855049ac54ae4291d780279bce6fce7f24e3f17d3"}, + {file = "eth-hash-0.7.0.tar.gz", hash = "sha256:bacdc705bfd85dadd055ecd35fd1b4f846b671add101427e089a4ca2e8db310a"}, + {file = "eth_hash-0.7.0-py3-none-any.whl", hash = "sha256:b8d5a230a2b251f4a291e3164a23a14057c4a6de4b0aa4a16fa4dc9161b57e2f"}, ] [package.dependencies] @@ -1459,14 +1459,14 @@ files = [ [[package]] name = "fastapi" -version = "0.109.2" +version = "0.110.0" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "fastapi-0.109.2-py3-none-any.whl", hash = "sha256:2c9bab24667293b501cad8dd388c05240c850b58ec5876ee3283c47d6e1e3a4d"}, - {file = "fastapi-0.109.2.tar.gz", hash = "sha256:f3817eac96fe4f65a2ebb4baa000f394e55f5fccdaf7f75250804bc58f354f73"}, + {file = "fastapi-0.110.0-py3-none-any.whl", hash = "sha256:87a1f6fb632a218222c5984be540055346a8f5d8a68e8f6fb647b1dc9934de4b"}, + {file = "fastapi-0.110.0.tar.gz", hash = "sha256:266775f0dcc95af9d3ef39bad55cff525329a931d5fd51930aadd4f428bf7ff3"}, ] [package.dependencies] @@ -1479,19 +1479,19 @@ all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)" [[package]] name = "filelock" -version = "3.13.1" +version = "3.13.2" description = "A platform independent file lock." category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, - {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, + {file = "filelock-3.13.2-py3-none-any.whl", hash = "sha256:e4c33bc026ace328551af557d4d34f59566c98acd4ed66c13b4335f114f04f7a"}, + {file = "filelock-3.13.2.tar.gz", hash = "sha256:9e2106260b5f65600a31bc503721e3db7e64598bb406ebc5921aeaafe441ba34"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] typing = ["typing-extensions (>=4.8)"] [[package]] @@ -1518,14 +1518,14 @@ dotenv = ["python-dotenv"] [[package]] name = "flatbuffers" -version = "23.5.26" +version = "24.3.7" description = "The FlatBuffers serialization format for Python" category = "main" optional = false python-versions = "*" files = [ - {file = "flatbuffers-23.5.26-py2.py3-none-any.whl", hash = "sha256:c0ff356da363087b915fde4b8b45bdda73432fc17cddb3c8157472eab1422ad1"}, - {file = "flatbuffers-23.5.26.tar.gz", hash = "sha256:9ea1144cac05ce5d86e2859f431c6cd5e66cd9c78c558317c7955fb8d4c78d89"}, + {file = "flatbuffers-24.3.7-py2.py3-none-any.whl", hash = "sha256:80c4f5dcad0ee76b7e349671a0d657f2fbba927a0244f88dd3f5ed6a3694e1fc"}, + {file = "flatbuffers-24.3.7.tar.gz", hash = "sha256:0895c22b9a6019ff2f4de2e5e2f7cd15914043e6e7033a94c0c6369422690f22"}, ] [[package]] @@ -1617,14 +1617,14 @@ files = [ [[package]] name = "fsspec" -version = "2024.2.0" +version = "2024.3.1" description = "File-system specification" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "fsspec-2024.2.0-py3-none-any.whl", hash = "sha256:817f969556fa5916bc682e02ca2045f96ff7f586d45110fcb76022063ad2c7d8"}, - {file = "fsspec-2024.2.0.tar.gz", hash = "sha256:b6ad1a679f760dda52b1168c859d01b7b80648ea6f7f7c7f5a8a91dc3f3ecb84"}, + {file = "fsspec-2024.3.1-py3-none-any.whl", hash = "sha256:918d18d41bf73f0e2b261824baeb1b124bcf771767e3a26425cd7dec3332f512"}, + {file = "fsspec-2024.3.1.tar.gz", hash = "sha256:f39780e282d7d117ffb42bb96992f8a90795e4d0fb0f661a70ca39fe9c43ded9"}, ] [package.extras] @@ -1653,19 +1653,20 @@ tqdm = ["tqdm"] [[package]] name = "google-api-core" -version = "2.17.1" +version = "2.18.0" description = "Google API client core library" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-core-2.17.1.tar.gz", hash = "sha256:9df18a1f87ee0df0bc4eea2770ebc4228392d8cc4066655b320e2cfccb15db95"}, - {file = "google_api_core-2.17.1-py3-none-any.whl", hash = "sha256:610c5b90092c360736baccf17bd3efbcb30dd380e7a6dc28a71059edb8bd0d8e"}, + {file = "google-api-core-2.18.0.tar.gz", hash = "sha256:62d97417bfc674d6cef251e5c4d639a9655e00c45528c4364fbfebb478ce72a9"}, + {file = "google_api_core-2.18.0-py3-none-any.whl", hash = "sha256:5a63aa102e0049abe85b5b88cb9409234c1f70afcda21ce1e40b285b9629c1d6"}, ] [package.dependencies] google-auth = ">=2.14.1,<3.0.dev0" googleapis-common-protos = ">=1.56.2,<2.0.dev0" +proto-plus = ">=1.22.3,<2.0.0dev" protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" requests = ">=2.18.0,<3.0.0.dev0" @@ -1695,14 +1696,14 @@ uritemplate = ">=3.0.1,<5" [[package]] name = "google-auth" -version = "2.28.1" +version = "2.29.0" description = "Google Authentication Library" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "google-auth-2.28.1.tar.gz", hash = "sha256:34fc3046c257cedcf1622fc4b31fc2be7923d9b4d44973d481125ecc50d83885"}, - {file = "google_auth-2.28.1-py2.py3-none-any.whl", hash = "sha256:25141e2d7a14bfcba945f5e9827f98092716e99482562f15306e5b026e21aa72"}, + {file = "google-auth-2.29.0.tar.gz", hash = "sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360"}, + {file = "google_auth-2.29.0-py2.py3-none-any.whl", hash = "sha256:d452ad095688cd52bae0ad6fafe027f6a6d6f560e810fec20914e17a09526415"}, ] [package.dependencies] @@ -1735,14 +1736,14 @@ httplib2 = ">=0.19.0" [[package]] name = "googleapis-common-protos" -version = "1.62.0" +version = "1.63.0" description = "Common protobufs used in Google APIs" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "googleapis-common-protos-1.62.0.tar.gz", hash = "sha256:83f0ece9f94e5672cced82f592d2a5edf527a96ed1794f0bab36d5735c996277"}, - {file = "googleapis_common_protos-1.62.0-py2.py3-none-any.whl", hash = "sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07"}, + {file = "googleapis-common-protos-1.63.0.tar.gz", hash = "sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e"}, + {file = "googleapis_common_protos-1.63.0-py2.py3-none-any.whl", hash = "sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632"}, ] [package.dependencies] @@ -2080,14 +2081,14 @@ socks = ["socksio (>=1.0.0,<2.0.0)"] [[package]] name = "huggingface-hub" -version = "0.20.3" +version = "0.22.0" description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" category = "main" optional = false python-versions = ">=3.8.0" files = [ - {file = "huggingface_hub-0.20.3-py3-none-any.whl", hash = "sha256:d988ae4f00d3e307b0c80c6a05ca6dbb7edba8bba3079f74cda7d9c2e562a7b6"}, - {file = "huggingface_hub-0.20.3.tar.gz", hash = "sha256:94e7f8e074475fbc67d6a71957b678e1b4a74ff1b64a644fd6cbb83da962d05d"}, + {file = "huggingface_hub-0.22.0-py3-none-any.whl", hash = "sha256:72dea96299751699180184c06a4689e54cbfacecb1a3d08ac7a269c884bb17c3"}, + {file = "huggingface_hub-0.22.0.tar.gz", hash = "sha256:304f1e235c68c0a9f58bced47f13d6df241a5b4e3678f4981aa1e4f4bce63f6d"}, ] [package.dependencies] @@ -2100,15 +2101,17 @@ tqdm = ">=4.42.1" typing-extensions = ">=3.7.4.3" [package.extras] -all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "mypy (==1.5.1)", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.1.3)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.3.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] cli = ["InquirerPy (==0.3.4)"] -dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "mypy (==1.5.1)", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.1.3)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.3.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] -inference = ["aiohttp", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)"] -quality = ["mypy (==1.5.1)", "ruff (>=0.1.3)"] +hf-transfer = ["hf-transfer (>=0.1.4)"] +inference = ["aiohttp", "minijinja (>=1.0)"] +quality = ["mypy (==1.5.1)", "ruff (>=0.3.0)"] tensorflow = ["graphviz", "pydot", "tensorflow"] -testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] -torch = ["torch"] +tensorflow-testing = ["keras (<3.0)", "tensorflow"] +testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "minijinja (>=1.0)", "numpy", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] +torch = ["safetensors", "torch"] typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"] [[package]] @@ -2172,19 +2175,19 @@ files = [ [[package]] name = "importlib-resources" -version = "6.1.1" +version = "6.4.0" description = "Read resources from Python packages" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_resources-6.1.1-py3-none-any.whl", hash = "sha256:e8bf90d8213b486f428c9c39714b920041cb02c184686a3dee24905aaa8105d6"}, - {file = "importlib_resources-6.1.1.tar.gz", hash = "sha256:3893a00122eafde6894c59914446a512f728a0c1a45f9bb9b63721b6bacf0b4a"}, + {file = "importlib_resources-6.4.0-py3-none-any.whl", hash = "sha256:50d10f043df931902d4194ea07ec57960f66a80449ff867bfe782b4c486ba78c"}, + {file = "importlib_resources-6.4.0.tar.gz", hash = "sha256:cdb2b453b8046ca4e3798eb1d84f3cce1446a0e8e7b5ef4efb600f19fc398145"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff", "zipp (>=3.17)"] +testing = ["jaraco.test (>=5.4)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] [[package]] name = "iniconfig" @@ -2652,23 +2655,22 @@ files = [ [[package]] name = "marshmallow" -version = "3.20.2" +version = "3.21.1" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "marshmallow-3.20.2-py3-none-any.whl", hash = "sha256:c21d4b98fee747c130e6bc8f45c4b3199ea66bc00c12ee1f639f0aeca034d5e9"}, - {file = "marshmallow-3.20.2.tar.gz", hash = "sha256:4c1daff273513dc5eb24b219a8035559dc573c8f322558ef85f5438ddd1236dd"}, + {file = "marshmallow-3.21.1-py3-none-any.whl", hash = "sha256:f085493f79efb0644f270a9bf2892843142d80d7174bbbd2f3713f2a589dc633"}, + {file = "marshmallow-3.21.1.tar.gz", hash = "sha256:4e65e9e0d80fc9e609574b9983cf32579f305c718afb30d7233ab818571768c3"}, ] [package.dependencies] packaging = ">=17.0" [package.extras] -dev = ["pre-commit (>=2.4,<4.0)", "pytest", "pytz", "simplejson", "tox"] -docs = ["alabaster (==0.7.15)", "autodocsumm (==0.2.12)", "sphinx (==7.2.6)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"] -lint = ["pre-commit (>=2.4,<4.0)"] +dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] +docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.2.6)", "sphinx-issues (==4.0.0)", "sphinx-version-warning (==1.1.2)"] tests = ["pytest", "pytz", "simplejson"] [[package]] @@ -3011,37 +3013,37 @@ files = [ [[package]] name = "onnxruntime" -version = "1.17.0" +version = "1.17.1" description = "ONNX Runtime is a runtime accelerator for Machine Learning models" category = "main" optional = false python-versions = "*" files = [ - {file = "onnxruntime-1.17.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:d2b22a25a94109cc983443116da8d9805ced0256eb215c5e6bc6dcbabefeab96"}, - {file = "onnxruntime-1.17.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4c87d83c6f58d1af2675fc99e3dc810f2dbdb844bcefd0c1b7573632661f6fc"}, - {file = "onnxruntime-1.17.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dba55723bf9b835e358f48c98a814b41692c393eb11f51e02ece0625c756b797"}, - {file = "onnxruntime-1.17.0-cp310-cp310-win32.whl", hash = "sha256:ee48422349cc500273beea7607e33c2237909f58468ae1d6cccfc4aecd158565"}, - {file = "onnxruntime-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:f34cc46553359293854e38bdae2ab1be59543aad78a6317e7746d30e311110c3"}, - {file = "onnxruntime-1.17.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:16d26badd092c8c257fa57c458bb600d96dc15282c647ccad0ed7b2732e6c03b"}, - {file = "onnxruntime-1.17.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6f1273bebcdb47ed932d076c85eb9488bc4768fcea16d5f2747ca692fad4f9d3"}, - {file = "onnxruntime-1.17.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cb60fd3c2c1acd684752eb9680e89ae223e9801a9b0e0dc7b28adabe45a2e380"}, - {file = "onnxruntime-1.17.0-cp311-cp311-win32.whl", hash = "sha256:4b038324586bc905299e435f7c00007e6242389c856b82fe9357fdc3b1ef2bdc"}, - {file = "onnxruntime-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:93d39b3fa1ee01f034f098e1c7769a811a21365b4883f05f96c14a2b60c6028b"}, - {file = "onnxruntime-1.17.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:90c0890e36f880281c6c698d9bc3de2afbeee2f76512725ec043665c25c67d21"}, - {file = "onnxruntime-1.17.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7466724e809a40e986b1637cba156ad9fc0d1952468bc00f79ef340bc0199552"}, - {file = "onnxruntime-1.17.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d47bee7557a8b99c8681b6882657a515a4199778d6d5e24e924d2aafcef55b0a"}, - {file = "onnxruntime-1.17.0-cp312-cp312-win32.whl", hash = "sha256:bb1bf1ee575c665b8bbc3813ab906e091a645a24ccc210be7932154b8260eca1"}, - {file = "onnxruntime-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:ac2f286da3494b29b4186ca193c7d4e6a2c1f770c4184c7192c5da142c3dec28"}, - {file = "onnxruntime-1.17.0-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1ec485643b93e0a3896c655eb2426decd63e18a278bb7ccebc133b340723624f"}, - {file = "onnxruntime-1.17.0-cp38-cp38-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:83c35809cda898c5a11911c69ceac8a2ac3925911854c526f73bad884582f911"}, - {file = "onnxruntime-1.17.0-cp38-cp38-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fa464aa4d81df818375239e481887b656e261377d5b6b9a4692466f5f3261edc"}, - {file = "onnxruntime-1.17.0-cp38-cp38-win32.whl", hash = "sha256:b7b337cd0586f7836601623cbd30a443df9528ef23965860d11c753ceeb009f2"}, - {file = "onnxruntime-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:fbb9faaf51d01aa2c147ef52524d9326744c852116d8005b9041809a71838878"}, - {file = "onnxruntime-1.17.0-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:5a06ab84eaa350bf64b1d747b33ccf10da64221ed1f38f7287f15eccbec81603"}, - {file = "onnxruntime-1.17.0-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d3d11db2c8242766212a68d0b139745157da7ce53bd96ba349a5c65e5a02357"}, - {file = "onnxruntime-1.17.0-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5632077c3ab8b0cd4f74b0af9c4e924be012b1a7bcd7daa845763c6c6bf14b7d"}, - {file = "onnxruntime-1.17.0-cp39-cp39-win32.whl", hash = "sha256:61a12732cba869b3ad2d4e29ab6cb62c7a96f61b8c213f7fcb961ba412b70b37"}, - {file = "onnxruntime-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:461fa0fc7d9c392c352b6cccdedf44d818430f3d6eacd924bb804fdea2dcfd02"}, + {file = "onnxruntime-1.17.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:d43ac17ac4fa3c9096ad3c0e5255bb41fd134560212dc124e7f52c3159af5d21"}, + {file = "onnxruntime-1.17.1-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:55b5e92a4c76a23981c998078b9bf6145e4fb0b016321a8274b1607bd3c6bd35"}, + {file = "onnxruntime-1.17.1-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ebbcd2bc3a066cf54e6f18c75708eb4d309ef42be54606d22e5bdd78afc5b0d7"}, + {file = "onnxruntime-1.17.1-cp310-cp310-win32.whl", hash = "sha256:5e3716b5eec9092e29a8d17aab55e737480487deabfca7eac3cd3ed952b6ada9"}, + {file = "onnxruntime-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:fbb98cced6782ae1bb799cc74ddcbbeeae8819f3ad1d942a74d88e72b6511337"}, + {file = "onnxruntime-1.17.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:36fd6f87a1ecad87e9c652e42407a50fb305374f9a31d71293eb231caae18784"}, + {file = "onnxruntime-1.17.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:99a8bddeb538edabc524d468edb60ad4722cff8a49d66f4e280c39eace70500b"}, + {file = "onnxruntime-1.17.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd7fddb4311deb5a7d3390cd8e9b3912d4d963efbe4dfe075edbaf18d01c024e"}, + {file = "onnxruntime-1.17.1-cp311-cp311-win32.whl", hash = "sha256:606a7cbfb6680202b0e4f1890881041ffc3ac6e41760a25763bd9fe146f0b335"}, + {file = "onnxruntime-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:53e4e06c0a541696ebdf96085fd9390304b7b04b748a19e02cf3b35c869a1e76"}, + {file = "onnxruntime-1.17.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:40f08e378e0f85929712a2b2c9b9a9cc400a90c8a8ca741d1d92c00abec60843"}, + {file = "onnxruntime-1.17.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ac79da6d3e1bb4590f1dad4bb3c2979d7228555f92bb39820889af8b8e6bd472"}, + {file = "onnxruntime-1.17.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ae9ba47dc099004e3781f2d0814ad710a13c868c739ab086fc697524061695ea"}, + {file = "onnxruntime-1.17.1-cp312-cp312-win32.whl", hash = "sha256:2dff1a24354220ac30e4a4ce2fb1df38cb1ea59f7dac2c116238d63fe7f4c5ff"}, + {file = "onnxruntime-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:6226a5201ab8cafb15e12e72ff2a4fc8f50654e8fa5737c6f0bd57c5ff66827e"}, + {file = "onnxruntime-1.17.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:cd0c07c0d1dfb8629e820b05fda5739e4835b3b82faf43753d2998edf2cf00aa"}, + {file = "onnxruntime-1.17.1-cp38-cp38-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:617ebdf49184efa1ba6e4467e602fbfa029ed52c92f13ce3c9f417d303006381"}, + {file = "onnxruntime-1.17.1-cp38-cp38-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9dae9071e3facdf2920769dceee03b71c684b6439021defa45b830d05e148924"}, + {file = "onnxruntime-1.17.1-cp38-cp38-win32.whl", hash = "sha256:835d38fa1064841679433b1aa8138b5e1218ddf0cfa7a3ae0d056d8fd9cec713"}, + {file = "onnxruntime-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:96621e0c555c2453bf607606d08af3f70fbf6f315230c28ddea91754e17ad4e6"}, + {file = "onnxruntime-1.17.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:7a9539935fb2d78ebf2cf2693cad02d9930b0fb23cdd5cf37a7df813e977674d"}, + {file = "onnxruntime-1.17.1-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:45c6a384e9d9a29c78afff62032a46a993c477b280247a7e335df09372aedbe9"}, + {file = "onnxruntime-1.17.1-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4e19f966450f16863a1d6182a685ca33ae04d7772a76132303852d05b95411ea"}, + {file = "onnxruntime-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e2ae712d64a42aac29ed7a40a426cb1e624a08cfe9273dcfe681614aa65b07dc"}, + {file = "onnxruntime-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:f7e9f7fb049825cdddf4a923cfc7c649d84d63c0134315f8e0aa9e0c3004672c"}, ] [package.dependencies] @@ -3054,19 +3056,19 @@ sympy = "*" [[package]] name = "open-aea" -version = "1.48.0" +version = "1.50.0" description = "Open Autonomous Economic Agent framework (without vendor lock-in)" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "open-aea-1.48.0.tar.gz", hash = "sha256:68d4e2e30e2ba744e5671e3bba6e0d19f952d6f3ebe81dc9e875ab4b83d47fb8"}, - {file = "open_aea-1.48.0-py3-none-any.whl", hash = "sha256:4ada92470bf5cdf1e36137853e21962feb69c298569fe6e1b8cb0b4ce43ab762"}, - {file = "open_aea-1.48.0-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:f3403fb0262c404458aeb37195111c5dd64c4746c7a00f0aa2c2e5f9a29413f3"}, - {file = "open_aea-1.48.0-py3-none-manylinux1_x86_64.whl", hash = "sha256:d7e3d84dd4bddf63f5f71e2feb6cf12c7885e2e095991fe7dd3b778000008d0b"}, - {file = "open_aea-1.48.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:5ef42189f2abf9929e7cf5ca88249a85ff02f02a6dd27442cbacedd77286faff"}, - {file = "open_aea-1.48.0-py3-none-win32.whl", hash = "sha256:649a1b44878e303ed97bfac2e6e117d96df2aa71cdd95ec917df09dd562e0b37"}, - {file = "open_aea-1.48.0-py3-none-win_amd64.whl", hash = "sha256:87aa15517ffce1abc5b1806ac35d51436237e214ccc0d3edffe0132d03d99f60"}, + {file = "open-aea-1.50.0.tar.gz", hash = "sha256:7354fbdfc3246cce2a69d50b5b3e6441b28cbe3ed689b7186f9afa6d035a153f"}, + {file = "open_aea-1.50.0-py3-none-any.whl", hash = "sha256:0768e13dc2844e519bb3af93f71795db17724b2ad47f68ce483496757b2be4ba"}, + {file = "open_aea-1.50.0-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:b6824c9c1290fa2f9fc03f286ca80efd75c3a7e1f0affeb962d9396ef266870a"}, + {file = "open_aea-1.50.0-py3-none-manylinux1_x86_64.whl", hash = "sha256:59c899b073686cbffc9a1ec7086a7d7869597467e66028a01e9d7f8ac2a69d42"}, + {file = "open_aea-1.50.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0284f8fa6bfea5516b880cf89ef03fb671c61773be70dfa1abc4d8063c7726e8"}, + {file = "open_aea-1.50.0-py3-none-win32.whl", hash = "sha256:57d0e79690e76b774056d64632065de2779d199ae2801132ed89c11f4d797077"}, + {file = "open_aea-1.50.0-py3-none-win_amd64.whl", hash = "sha256:fc552754e9696e8df35598f5dab2d795119dbc0fc0ecbcec1f9cb75718f1db7e"}, ] [package.dependencies] @@ -3082,7 +3084,7 @@ py-multibase = ">=1.0.0" py-multicodec = ">=0.2.0" pymultihash = "0.8.2" pytest = {version = ">=7.0.0,<7.3.0", optional = true, markers = "extra == \"all\""} -python-dotenv = ">=0.14.0,<0.18.0" +python-dotenv = ">=0.14.0,<0.22.0" pyyaml = "6.0.1" requests = "2.28.1" semver = ">=2.9.1,<3.0.0" @@ -3094,14 +3096,14 @@ test-tools = ["click (==8.0.2)", "coverage (>=6.4.4,<8.0.0)", "jsonschema (>=4.3 [[package]] name = "open-aea-cli-ipfs" -version = "1.48.0" +version = "1.50.0" description = "CLI extension for open AEA framework wrapping IPFS functionality." category = "main" optional = false python-versions = "*" files = [ - {file = "open-aea-cli-ipfs-1.48.0.tar.gz", hash = "sha256:3b6dec90af92461744cfd2df7c1012261d8478a830bc06f9c2d6019e544fa173"}, - {file = "open_aea_cli_ipfs-1.48.0-py3-none-any.whl", hash = "sha256:ef99b0569325fd526629ec4aa0fb82cad61f8dcf087833e8bfffaa4c1672a3df"}, + {file = "open-aea-cli-ipfs-1.50.0.tar.gz", hash = "sha256:03c43d69a602f2cb82e6408fc145c15a63c3c30187daa0e8ceb70b77bdd48295"}, + {file = "open_aea_cli_ipfs-1.50.0-py3-none-any.whl", hash = "sha256:30060c6e317838a83b10e0460ab35400b205e827916290964606661bcf250f99"}, ] [package.dependencies] @@ -3111,14 +3113,14 @@ pytest = ">=7.0.0,<7.3.0" [[package]] name = "open-aea-ledger-cosmos" -version = "1.48.0" +version = "1.50.0" description = "Python package wrapping the public and private key cryptography and ledger api of Cosmos." category = "main" optional = false python-versions = "*" files = [ - {file = "open-aea-ledger-cosmos-1.48.0.tar.gz", hash = "sha256:8ad359133f7a6210822d348e4f21fe44c9a939375e772bef15ace5cefdd3662b"}, - {file = "open_aea_ledger_cosmos-1.48.0-py3-none-any.whl", hash = "sha256:1e1fe10fe3cf5595af4b41bf71aa0ad33edd9f57d09d63a61c2d2514767faee9"}, + {file = "open-aea-ledger-cosmos-1.50.0.tar.gz", hash = "sha256:62f55b88947cbd645b2c621f378305f614b13f25b67bc7fe9d6dccba67a28557"}, + {file = "open_aea_ledger_cosmos-1.50.0-py3-none-any.whl", hash = "sha256:b5c437c2f8d2a64e510dd40c47d7493c0049fa4e9cb9342308baa80cdea3e8c6"}, ] [package.dependencies] @@ -3130,14 +3132,14 @@ pycryptodome = ">=3.10.1,<4.0.0" [[package]] name = "open-aea-ledger-ethereum" -version = "1.48.0" +version = "1.50.0" description = "Python package wrapping the public and private key cryptography and ledger api of Ethereum." category = "main" optional = false python-versions = "*" files = [ - {file = "open-aea-ledger-ethereum-1.48.0.tar.gz", hash = "sha256:0d93065cc609ce1c885ecc4e259556315157fa6496f4eb56136c2bbac933dc5f"}, - {file = "open_aea_ledger_ethereum-1.48.0-py3-none-any.whl", hash = "sha256:402fa79d2a5824705bc8f389d0c7d54ccd7d2135fe554bf127a447b276b786a9"}, + {file = "open-aea-ledger-ethereum-1.50.0.tar.gz", hash = "sha256:d6784d604450d0fe9aeb008b2eb12bfc87d630391f8b82b591d67c1781adaaab"}, + {file = "open_aea_ledger_ethereum-1.50.0-py3-none-any.whl", hash = "sha256:619105843718d0b8270d9039749101eec3aead5c38d7041c541832e3d8e4826c"}, ] [package.dependencies] @@ -3148,32 +3150,32 @@ web3 = ">=6.0.0,<7" [[package]] name = "open-aea-test-autonomy" -version = "0.14.6" +version = "0.14.10" description = "Plugin containing test tools for open-autonomy packages." category = "main" optional = false python-versions = "*" files = [ - {file = "open-aea-test-autonomy-0.14.6.tar.gz", hash = "sha256:3d4acc87854003b0445140a83c58c00ccc5a9c2fc9b496bbcef81aa1bf8fce99"}, - {file = "open_aea_test_autonomy-0.14.6-py3-none-any.whl", hash = "sha256:b89eabb59ee4b5de97ef10e0a8c5ca0b9a30ee15b2fa7913d10be8c675a34101"}, + {file = "open-aea-test-autonomy-0.14.10.tar.gz", hash = "sha256:099018d48480f4b959f5c4c4daa0d2263d3a3ebb2d4db7a6787a68f50c61485a"}, + {file = "open_aea_test_autonomy-0.14.10-py3-none-any.whl", hash = "sha256:2ee08824bb1855f9f8365ae3803feb19da625d0fda3e3bec380c34b1380c8c7f"}, ] [package.dependencies] docker = "6.1.2" -open-aea = {version = ">=1.48.0,<2.0.0", extras = ["all"]} -open-aea-ledger-ethereum = ">=1.48.0,<2.0.0" +open-aea = {version = ">=1.50.0,<2.0.0", extras = ["all"]} +open-aea-ledger-ethereum = ">=1.50.0,<2.0.0" pytest = "7.2.1" [[package]] name = "open-autonomy" -version = "0.14.6" +version = "0.14.10" description = "A framework for the creation of autonomous agent services." category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "open-autonomy-0.14.6.tar.gz", hash = "sha256:c6ac014ec723af4f6342cf6f908a59d785dfd22679b4c3a3330de81c33dc8872"}, - {file = "open_autonomy-0.14.6-py3-none-any.whl", hash = "sha256:52e6613aad898631820a44b829bed08640f038303d037e7068f38e2df0d11cfa"}, + {file = "open-autonomy-0.14.10.tar.gz", hash = "sha256:346ecc4f0ccfea4c90e4b26f5e1bb13ab7c98351a57806b5f2430c011c4d8a5a"}, + {file = "open_autonomy-0.14.10-py3-none-any.whl", hash = "sha256:867d89bd2bb263143783e2f55202e27a0ffdc1e109909c4c64a7a087d5331f7a"}, ] [package.dependencies] @@ -3185,20 +3187,21 @@ Flask = ">=2.0.2,<3.0.0" gql = "3.5.0" hexbytes = "*" jsonschema = ">=4.3.0,<4.4.0" -open-aea = {version = "1.48.0", extras = ["all"]} -open-aea-cli-ipfs = "1.48.0" +open-aea = {version = "1.50.0", extras = ["all"]} +open-aea-cli-ipfs = "1.50.0" protobuf = ">=4.21.6,<4.25.0" pytest = "7.2.1" -python-dotenv = ">=0.14.5,<0.18.0" +python-dotenv = ">=0.14.5,<0.22.0" requests-toolbelt = "1.0.0" texttable = "1.6.7" +typing-extensions = ">=3.10.0.2" valory-docker-compose = "1.29.3" watchdog = ">=2.1.6" werkzeug = "2.0.3" [package.extras] -all = ["click (==8.0.2)", "coverage (>=6.4.4,<8.0.0)", "open-aea-cli-ipfs (==1.48.0)", "pytest (>=7.0.0,<7.3.0)", "python-dotenv (>=0.14.5,<0.18.0)", "texttable (==1.6.7)"] -cli = ["click (==8.0.2)", "coverage (>=6.4.4,<8.0.0)", "open-aea-cli-ipfs (==1.48.0)", "pytest (>=7.0.0,<7.3.0)", "python-dotenv (>=0.14.5,<0.18.0)", "texttable (==1.6.7)"] +all = ["click (==8.0.2)", "coverage (>=6.4.4,<8.0.0)", "open-aea-cli-ipfs (==1.50.0)", "pytest (>=7.0.0,<7.3.0)", "python-dotenv (>=0.14.5,<0.22.0)", "texttable (==1.6.7)"] +cli = ["click (==8.0.2)", "coverage (>=6.4.4,<8.0.0)", "open-aea-cli-ipfs (==1.50.0)", "pytest (>=7.0.0,<7.3.0)", "python-dotenv (>=0.14.5,<0.22.0)", "texttable (==1.6.7)"] [[package]] name = "openai" @@ -3387,14 +3390,14 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "posthog" -version = "3.4.2" +version = "3.5.0" description = "Integrate PostHog into any python application." category = "main" optional = false python-versions = "*" files = [ - {file = "posthog-3.4.2-py2.py3-none-any.whl", hash = "sha256:c7e79b2e585d16e93749874bcbcdad78d857037398ce0d8d6c474a04d0bd3bbe"}, - {file = "posthog-3.4.2.tar.gz", hash = "sha256:f0eafa663fbc4a942b49b6168a62a890635407044bbc7593051dcb9cc1208873"}, + {file = "posthog-3.5.0-py2.py3-none-any.whl", hash = "sha256:3c672be7ba6f95d555ea207d4486c171d06657eb34b3ce25eb043bfe7b6b5b76"}, + {file = "posthog-3.5.0.tar.gz", hash = "sha256:8f7e3b2c6e8714d0c0c542a2109b83a7549f63b7113a133ab2763a89245ef2ef"}, ] [package.dependencies] @@ -3456,6 +3459,24 @@ files = [ cymem = ">=2.0.2,<2.1.0" murmurhash = ">=0.28.0,<1.1.0" +[[package]] +name = "proto-plus" +version = "1.23.0" +description = "Beautiful, Pythonic protocol buffers." +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "proto-plus-1.23.0.tar.gz", hash = "sha256:89075171ef11988b3fa157f5dbd8b9cf09d65fffee97e29ce403cd8defba19d2"}, + {file = "proto_plus-1.23.0-py3-none-any.whl", hash = "sha256:a829c79e619e1cf632de091013a4173deed13a55f326ef84f05af6f50ff4c82c"}, +] + +[package.dependencies] +protobuf = ">=3.19.0,<5.0.0dev" + +[package.extras] +testing = ["google-api-core[grpc] (>=1.31.5)"] + [[package]] name = "protobuf" version = "4.24.4" @@ -3679,19 +3700,19 @@ files = [ [[package]] name = "pydantic" -version = "2.6.1" +version = "2.6.4" description = "Data validation using Python type hints" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.6.1-py3-none-any.whl", hash = "sha256:0b6a909df3192245cb736509a92ff69e4fef76116feffec68e93a567347bae6f"}, - {file = "pydantic-2.6.1.tar.gz", hash = "sha256:4fd5c182a2488dc63e6d32737ff19937888001e2a6d86e94b3f233104a5d1fa9"}, + {file = "pydantic-2.6.4-py3-none-any.whl", hash = "sha256:cc46fce86607580867bdc3361ad462bab9c222ef042d3da86f2fb333e1d916c5"}, + {file = "pydantic-2.6.4.tar.gz", hash = "sha256:b1704e0847db01817624a6b86766967f552dd9dbf3afba4004409f908dcc84e6"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.16.2" +pydantic-core = "2.16.3" typing-extensions = ">=4.6.1" [package.extras] @@ -3699,91 +3720,91 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.16.2" +version = "2.16.3" description = "" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.16.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3fab4e75b8c525a4776e7630b9ee48aea50107fea6ca9f593c98da3f4d11bf7c"}, - {file = "pydantic_core-2.16.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8bde5b48c65b8e807409e6f20baee5d2cd880e0fad00b1a811ebc43e39a00ab2"}, - {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2924b89b16420712e9bb8192396026a8fbd6d8726224f918353ac19c4c043d2a"}, - {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:16aa02e7a0f539098e215fc193c8926c897175d64c7926d00a36188917717a05"}, - {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:936a787f83db1f2115ee829dd615c4f684ee48ac4de5779ab4300994d8af325b"}, - {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:459d6be6134ce3b38e0ef76f8a672924460c455d45f1ad8fdade36796df1ddc8"}, - {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9ee4febb249c591d07b2d4dd36ebcad0ccd128962aaa1801508320896575ef"}, - {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40a0bd0bed96dae5712dab2aba7d334a6c67cbcac2ddfca7dbcc4a8176445990"}, - {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:870dbfa94de9b8866b37b867a2cb37a60c401d9deb4a9ea392abf11a1f98037b"}, - {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:308974fdf98046db28440eb3377abba274808bf66262e042c412eb2adf852731"}, - {file = "pydantic_core-2.16.2-cp310-none-win32.whl", hash = "sha256:a477932664d9611d7a0816cc3c0eb1f8856f8a42435488280dfbf4395e141485"}, - {file = "pydantic_core-2.16.2-cp310-none-win_amd64.whl", hash = "sha256:8f9142a6ed83d90c94a3efd7af8873bf7cefed2d3d44387bf848888482e2d25f"}, - {file = "pydantic_core-2.16.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:406fac1d09edc613020ce9cf3f2ccf1a1b2f57ab00552b4c18e3d5276c67eb11"}, - {file = "pydantic_core-2.16.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce232a6170dd6532096cadbf6185271e4e8c70fc9217ebe105923ac105da9978"}, - {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a90fec23b4b05a09ad988e7a4f4e081711a90eb2a55b9c984d8b74597599180f"}, - {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8aafeedb6597a163a9c9727d8a8bd363a93277701b7bfd2749fbefee2396469e"}, - {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9957433c3a1b67bdd4c63717eaf174ebb749510d5ea612cd4e83f2d9142f3fc8"}, - {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0d7a9165167269758145756db43a133608a531b1e5bb6a626b9ee24bc38a8f7"}, - {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dffaf740fe2e147fedcb6b561353a16243e654f7fe8e701b1b9db148242e1272"}, - {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8ed79883b4328b7f0bd142733d99c8e6b22703e908ec63d930b06be3a0e7113"}, - {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cf903310a34e14651c9de056fcc12ce090560864d5a2bb0174b971685684e1d8"}, - {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:46b0d5520dbcafea9a8645a8164658777686c5c524d381d983317d29687cce97"}, - {file = "pydantic_core-2.16.2-cp311-none-win32.whl", hash = "sha256:70651ff6e663428cea902dac297066d5c6e5423fda345a4ca62430575364d62b"}, - {file = "pydantic_core-2.16.2-cp311-none-win_amd64.whl", hash = "sha256:98dc6f4f2095fc7ad277782a7c2c88296badcad92316b5a6e530930b1d475ebc"}, - {file = "pydantic_core-2.16.2-cp311-none-win_arm64.whl", hash = "sha256:ef6113cd31411eaf9b39fc5a8848e71c72656fd418882488598758b2c8c6dfa0"}, - {file = "pydantic_core-2.16.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:88646cae28eb1dd5cd1e09605680c2b043b64d7481cdad7f5003ebef401a3039"}, - {file = "pydantic_core-2.16.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7b883af50eaa6bb3299780651e5be921e88050ccf00e3e583b1e92020333304b"}, - {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bf26c2e2ea59d32807081ad51968133af3025c4ba5753e6a794683d2c91bf6e"}, - {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99af961d72ac731aae2a1b55ccbdae0733d816f8bfb97b41909e143de735f522"}, - {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02906e7306cb8c5901a1feb61f9ab5e5c690dbbeaa04d84c1b9ae2a01ebe9379"}, - {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5362d099c244a2d2f9659fb3c9db7c735f0004765bbe06b99be69fbd87c3f15"}, - {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ac426704840877a285d03a445e162eb258924f014e2f074e209d9b4ff7bf380"}, - {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b94cbda27267423411c928208e89adddf2ea5dd5f74b9528513f0358bba019cb"}, - {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6db58c22ac6c81aeac33912fb1af0e930bc9774166cdd56eade913d5f2fff35e"}, - {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396fdf88b1b503c9c59c84a08b6833ec0c3b5ad1a83230252a9e17b7dfb4cffc"}, - {file = "pydantic_core-2.16.2-cp312-none-win32.whl", hash = "sha256:7c31669e0c8cc68400ef0c730c3a1e11317ba76b892deeefaf52dcb41d56ed5d"}, - {file = "pydantic_core-2.16.2-cp312-none-win_amd64.whl", hash = "sha256:a3b7352b48fbc8b446b75f3069124e87f599d25afb8baa96a550256c031bb890"}, - {file = "pydantic_core-2.16.2-cp312-none-win_arm64.whl", hash = "sha256:a9e523474998fb33f7c1a4d55f5504c908d57add624599e095c20fa575b8d943"}, - {file = "pydantic_core-2.16.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ae34418b6b389d601b31153b84dce480351a352e0bb763684a1b993d6be30f17"}, - {file = "pydantic_core-2.16.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:732bd062c9e5d9582a30e8751461c1917dd1ccbdd6cafb032f02c86b20d2e7ec"}, - {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b52776a2e3230f4854907a1e0946eec04d41b1fc64069ee774876bbe0eab55"}, - {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef551c053692b1e39e3f7950ce2296536728871110e7d75c4e7753fb30ca87f4"}, - {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ebb892ed8599b23fa8f1799e13a12c87a97a6c9d0f497525ce9858564c4575a4"}, - {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa6c8c582036275997a733427b88031a32ffa5dfc3124dc25a730658c47a572f"}, - {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ba0884a91f1aecce75202473ab138724aa4fb26d7707f2e1fa6c3e68c84fbf"}, - {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7924e54f7ce5d253d6160090ddc6df25ed2feea25bfb3339b424a9dd591688bc"}, - {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69a7b96b59322a81c2203be537957313b07dd333105b73db0b69212c7d867b4b"}, - {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7e6231aa5bdacda78e96ad7b07d0c312f34ba35d717115f4b4bff6cb87224f0f"}, - {file = "pydantic_core-2.16.2-cp38-none-win32.whl", hash = "sha256:41dac3b9fce187a25c6253ec79a3f9e2a7e761eb08690e90415069ea4a68ff7a"}, - {file = "pydantic_core-2.16.2-cp38-none-win_amd64.whl", hash = "sha256:f685dbc1fdadb1dcd5b5e51e0a378d4685a891b2ddaf8e2bba89bd3a7144e44a"}, - {file = "pydantic_core-2.16.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:55749f745ebf154c0d63d46c8c58594d8894b161928aa41adbb0709c1fe78b77"}, - {file = "pydantic_core-2.16.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b30b0dd58a4509c3bd7eefddf6338565c4905406aee0c6e4a5293841411a1286"}, - {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18de31781cdc7e7b28678df7c2d7882f9692ad060bc6ee3c94eb15a5d733f8f7"}, - {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5864b0242f74b9dd0b78fd39db1768bc3f00d1ffc14e596fd3e3f2ce43436a33"}, - {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8f9186ca45aee030dc8234118b9c0784ad91a0bb27fc4e7d9d6608a5e3d386c"}, - {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc6f6c9be0ab6da37bc77c2dda5f14b1d532d5dbef00311ee6e13357a418e646"}, - {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa057095f621dad24a1e906747179a69780ef45cc8f69e97463692adbcdae878"}, - {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ad84731a26bcfb299f9eab56c7932d46f9cad51c52768cace09e92a19e4cf55"}, - {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3b052c753c4babf2d1edc034c97851f867c87d6f3ea63a12e2700f159f5c41c3"}, - {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e0f686549e32ccdb02ae6f25eee40cc33900910085de6aa3790effd391ae10c2"}, - {file = "pydantic_core-2.16.2-cp39-none-win32.whl", hash = "sha256:7afb844041e707ac9ad9acad2188a90bffce2c770e6dc2318be0c9916aef1469"}, - {file = "pydantic_core-2.16.2-cp39-none-win_amd64.whl", hash = "sha256:9da90d393a8227d717c19f5397688a38635afec89f2e2d7af0df037f3249c39a"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f60f920691a620b03082692c378661947d09415743e437a7478c309eb0e4f82"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:47924039e785a04d4a4fa49455e51b4eb3422d6eaacfde9fc9abf8fdef164e8a"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6294e76b0380bb7a61eb8a39273c40b20beb35e8c87ee101062834ced19c545"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe56851c3f1d6f5384b3051c536cc81b3a93a73faf931f404fef95217cf1e10d"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9d776d30cde7e541b8180103c3f294ef7c1862fd45d81738d156d00551005784"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:72f7919af5de5ecfaf1eba47bf9a5d8aa089a3340277276e5636d16ee97614d7"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:4bfcbde6e06c56b30668a0c872d75a7ef3025dc3c1823a13cf29a0e9b33f67e8"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ff7c97eb7a29aba230389a2661edf2e9e06ce616c7e35aa764879b6894a44b25"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9b5f13857da99325dcabe1cc4e9e6a3d7b2e2c726248ba5dd4be3e8e4a0b6d0e"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a7e41e3ada4cca5f22b478c08e973c930e5e6c7ba3588fb8e35f2398cdcc1545"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60eb8ceaa40a41540b9acae6ae7c1f0a67d233c40dc4359c256ad2ad85bdf5e5"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7beec26729d496a12fd23cf8da9944ee338c8b8a17035a560b585c36fe81af20"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:22c5f022799f3cd6741e24f0443ead92ef42be93ffda0d29b2597208c94c3753"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:eca58e319f4fd6df004762419612122b2c7e7d95ffafc37e890252f869f3fb2a"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed957db4c33bc99895f3a1672eca7e80e8cda8bd1e29a80536b4ec2153fa9804"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:459c0d338cc55d099798618f714b21b7ece17eb1a87879f2da20a3ff4c7628e2"}, - {file = "pydantic_core-2.16.2.tar.gz", hash = "sha256:0ba503850d8b8dcc18391f10de896ae51d37fe5fe43dbfb6a35c5c5cad271a06"}, + {file = "pydantic_core-2.16.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:75b81e678d1c1ede0785c7f46690621e4c6e63ccd9192af1f0bd9d504bbb6bf4"}, + {file = "pydantic_core-2.16.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9c865a7ee6f93783bd5d781af5a4c43dadc37053a5b42f7d18dc019f8c9d2bd1"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:162e498303d2b1c036b957a1278fa0899d02b2842f1ff901b6395104c5554a45"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f583bd01bbfbff4eaee0868e6fc607efdfcc2b03c1c766b06a707abbc856187"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b926dd38db1519ed3043a4de50214e0d600d404099c3392f098a7f9d75029ff8"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:716b542728d4c742353448765aa7cdaa519a7b82f9564130e2b3f6766018c9ec"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc4ad7f7ee1a13d9cb49d8198cd7d7e3aa93e425f371a68235f784e99741561f"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bd87f48924f360e5d1c5f770d6155ce0e7d83f7b4e10c2f9ec001c73cf475c99"}, + {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0df446663464884297c793874573549229f9eca73b59360878f382a0fc085979"}, + {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4df8a199d9f6afc5ae9a65f8f95ee52cae389a8c6b20163762bde0426275b7db"}, + {file = "pydantic_core-2.16.3-cp310-none-win32.whl", hash = "sha256:456855f57b413f077dff513a5a28ed838dbbb15082ba00f80750377eed23d132"}, + {file = "pydantic_core-2.16.3-cp310-none-win_amd64.whl", hash = "sha256:732da3243e1b8d3eab8c6ae23ae6a58548849d2e4a4e03a1924c8ddf71a387cb"}, + {file = "pydantic_core-2.16.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:519ae0312616026bf4cedc0fe459e982734f3ca82ee8c7246c19b650b60a5ee4"}, + {file = "pydantic_core-2.16.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b3992a322a5617ded0a9f23fd06dbc1e4bd7cf39bc4ccf344b10f80af58beacd"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d62da299c6ecb04df729e4b5c52dc0d53f4f8430b4492b93aa8de1f541c4aac"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2acca2be4bb2f2147ada8cac612f8a98fc09f41c89f87add7256ad27332c2fda"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b662180108c55dfbf1280d865b2d116633d436cfc0bba82323554873967b340"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7c6ed0dc9d8e65f24f5824291550139fe6f37fac03788d4580da0d33bc00c97"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1bb0827f56654b4437955555dc3aeeebeddc47c2d7ed575477f082622c49e"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e56f8186d6210ac7ece503193ec84104da7ceb98f68ce18c07282fcc2452e76f"}, + {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:936e5db01dd49476fa8f4383c259b8b1303d5dd5fb34c97de194560698cc2c5e"}, + {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33809aebac276089b78db106ee692bdc9044710e26f24a9a2eaa35a0f9fa70ba"}, + {file = "pydantic_core-2.16.3-cp311-none-win32.whl", hash = "sha256:ded1c35f15c9dea16ead9bffcde9bb5c7c031bff076355dc58dcb1cb436c4721"}, + {file = "pydantic_core-2.16.3-cp311-none-win_amd64.whl", hash = "sha256:d89ca19cdd0dd5f31606a9329e309d4fcbb3df860960acec32630297d61820df"}, + {file = "pydantic_core-2.16.3-cp311-none-win_arm64.whl", hash = "sha256:6162f8d2dc27ba21027f261e4fa26f8bcb3cf9784b7f9499466a311ac284b5b9"}, + {file = "pydantic_core-2.16.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f56ae86b60ea987ae8bcd6654a887238fd53d1384f9b222ac457070b7ac4cff"}, + {file = "pydantic_core-2.16.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9bd22a2a639e26171068f8ebb5400ce2c1bc7d17959f60a3b753ae13c632975"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4204e773b4b408062960e65468d5346bdfe139247ee5f1ca2a378983e11388a2"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f651dd19363c632f4abe3480a7c87a9773be27cfe1341aef06e8759599454120"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aaf09e615a0bf98d406657e0008e4a8701b11481840be7d31755dc9f97c44053"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e47755d8152c1ab5b55928ab422a76e2e7b22b5ed8e90a7d584268dd49e9c6b"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:500960cb3a0543a724a81ba859da816e8cf01b0e6aaeedf2c3775d12ee49cade"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf6204fe865da605285c34cf1172879d0314ff267b1c35ff59de7154f35fdc2e"}, + {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d33dd21f572545649f90c38c227cc8631268ba25c460b5569abebdd0ec5974ca"}, + {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49d5d58abd4b83fb8ce763be7794d09b2f50f10aa65c0f0c1696c677edeb7cbf"}, + {file = "pydantic_core-2.16.3-cp312-none-win32.whl", hash = "sha256:f53aace168a2a10582e570b7736cc5bef12cae9cf21775e3eafac597e8551fbe"}, + {file = "pydantic_core-2.16.3-cp312-none-win_amd64.whl", hash = "sha256:0d32576b1de5a30d9a97f300cc6a3f4694c428d956adbc7e6e2f9cad279e45ed"}, + {file = "pydantic_core-2.16.3-cp312-none-win_arm64.whl", hash = "sha256:ec08be75bb268473677edb83ba71e7e74b43c008e4a7b1907c6d57e940bf34b6"}, + {file = "pydantic_core-2.16.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b1f6f5938d63c6139860f044e2538baeee6f0b251a1816e7adb6cbce106a1f01"}, + {file = "pydantic_core-2.16.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2a1ef6a36fdbf71538142ed604ad19b82f67b05749512e47f247a6ddd06afdc7"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:704d35ecc7e9c31d48926150afada60401c55efa3b46cd1ded5a01bdffaf1d48"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d937653a696465677ed583124b94a4b2d79f5e30b2c46115a68e482c6a591c8a"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9803edf8e29bd825f43481f19c37f50d2b01899448273b3a7758441b512acf8"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72282ad4892a9fb2da25defeac8c2e84352c108705c972db82ab121d15f14e6d"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f752826b5b8361193df55afcdf8ca6a57d0232653494ba473630a83ba50d8c9"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4384a8f68ddb31a0b0c3deae88765f5868a1b9148939c3f4121233314ad5532c"}, + {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4b2bf78342c40b3dc830880106f54328928ff03e357935ad26c7128bbd66ce8"}, + {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:13dcc4802961b5f843a9385fc821a0b0135e8c07fc3d9949fd49627c1a5e6ae5"}, + {file = "pydantic_core-2.16.3-cp38-none-win32.whl", hash = "sha256:e3e70c94a0c3841e6aa831edab1619ad5c511199be94d0c11ba75fe06efe107a"}, + {file = "pydantic_core-2.16.3-cp38-none-win_amd64.whl", hash = "sha256:ecdf6bf5f578615f2e985a5e1f6572e23aa632c4bd1dc67f8f406d445ac115ed"}, + {file = "pydantic_core-2.16.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:bda1ee3e08252b8d41fa5537413ffdddd58fa73107171a126d3b9ff001b9b820"}, + {file = "pydantic_core-2.16.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:21b888c973e4f26b7a96491c0965a8a312e13be108022ee510248fe379a5fa23"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be0ec334369316fa73448cc8c982c01e5d2a81c95969d58b8f6e272884df0074"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b5b6079cc452a7c53dd378c6f881ac528246b3ac9aae0f8eef98498a75657805"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ee8d5f878dccb6d499ba4d30d757111847b6849ae07acdd1205fffa1fc1253c"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7233d65d9d651242a68801159763d09e9ec96e8a158dbf118dc090cd77a104c9"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6119dc90483a5cb50a1306adb8d52c66e447da88ea44f323e0ae1a5fcb14256"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:578114bc803a4c1ff9946d977c221e4376620a46cf78da267d946397dc9514a8"}, + {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d8f99b147ff3fcf6b3cc60cb0c39ea443884d5559a30b1481e92495f2310ff2b"}, + {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4ac6b4ce1e7283d715c4b729d8f9dab9627586dafce81d9eaa009dd7f25dd972"}, + {file = "pydantic_core-2.16.3-cp39-none-win32.whl", hash = "sha256:e7774b570e61cb998490c5235740d475413a1f6de823169b4cf94e2fe9e9f6b2"}, + {file = "pydantic_core-2.16.3-cp39-none-win_amd64.whl", hash = "sha256:9091632a25b8b87b9a605ec0e61f241c456e9248bfdcf7abdf344fdb169c81cf"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:36fa178aacbc277bc6b62a2c3da95226520da4f4e9e206fdf076484363895d2c"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:dcca5d2bf65c6fb591fff92da03f94cd4f315972f97c21975398bd4bd046854a"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a72fb9963cba4cd5793854fd12f4cfee731e86df140f59ff52a49b3552db241"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b60cc1a081f80a2105a59385b92d82278b15d80ebb3adb200542ae165cd7d183"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cbcc558401de90a746d02ef330c528f2e668c83350f045833543cd57ecead1ad"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fee427241c2d9fb7192b658190f9f5fd6dfe41e02f3c1489d2ec1e6a5ab1e04a"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4cb85f693044e0f71f394ff76c98ddc1bc0953e48c061725e540396d5c8a2e1"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b29eeb887aa931c2fcef5aa515d9d176d25006794610c264ddc114c053bf96fe"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a425479ee40ff021f8216c9d07a6a3b54b31c8267c6e17aa88b70d7ebd0e5e5b"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5c5cbc703168d1b7a838668998308018a2718c2130595e8e190220238addc96f"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b6add4c0b39a513d323d3b93bc173dac663c27b99860dd5bf491b240d26137"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f76ee558751746d6a38f89d60b6228fa174e5172d143886af0f85aa306fd89"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:00ee1c97b5364b84cb0bd82e9bbf645d5e2871fb8c58059d158412fee2d33d8a"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:287073c66748f624be4cef893ef9174e3eb88fe0b8a78dc22e88eca4bc357ca6"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed25e1835c00a332cb10c683cd39da96a719ab1dfc08427d476bce41b92531fc"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:86b3d0033580bd6bbe07590152007275bd7af95f98eaa5bd36f3da219dcd93da"}, + {file = "pydantic_core-2.16.3.tar.gz", hash = "sha256:1cac689f80a3abab2d3c0048b29eea5751114054f032a941a32de4c852c59cad"}, ] [package.dependencies] @@ -3834,14 +3855,14 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] [[package]] name = "pyparsing" -version = "3.1.1" +version = "3.1.2" description = "pyparsing module - Classes and methods to define and execute parsing grammars" category = "main" optional = false python-versions = ">=3.6.8" files = [ - {file = "pyparsing-3.1.1-py3-none-any.whl", hash = "sha256:32c7c0b711493c72ff18a981d24f28aaf9c1fb7ed5e9667c9e84e3db623bdbfb"}, - {file = "pyparsing-3.1.1.tar.gz", hash = "sha256:ede28a1a32462f5a9705e07aea48001a08f7cf81a021585011deba701581a0db"}, + {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, + {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, ] [package.extras] @@ -4052,14 +4073,14 @@ six = ">=1.5" [[package]] name = "python-dotenv" -version = "0.17.1" +version = "0.21.1" description = "Read key-value pairs from a .env file and set them as environment variables" category = "main" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "python-dotenv-0.17.1.tar.gz", hash = "sha256:b1ae5e9643d5ed987fc57cc2583021e38db531946518130777734f9589b3141f"}, - {file = "python_dotenv-0.17.1-py2.py3-none-any.whl", hash = "sha256:00aa34e92d992e9f8383730816359647f358f4a3be1ba45e5a5cefd27ee91544"}, + {file = "python-dotenv-0.21.1.tar.gz", hash = "sha256:1c93de8f636cde3ce377292818d0e440b6e45a82f215c3744979151fa8151c49"}, + {file = "python_dotenv-0.21.1-py3-none-any.whl", hash = "sha256:41e12e0318bebc859fcc4d97d4db8d20ad21721a6aa5047dd59f090391cb549a"}, ] [package.extras] @@ -4475,20 +4496,20 @@ files = [ [[package]] name = "setuptools" -version = "69.1.0" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -4526,14 +4547,14 @@ webhdfs = ["requests"] [[package]] name = "sniffio" -version = "1.3.0" +version = "1.3.1" description = "Sniff out which async library your code is running under" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, - {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] [[package]] @@ -4675,61 +4696,61 @@ files = [ [[package]] name = "sqlalchemy" -version = "2.0.27" +version = "2.0.29" description = "Database Abstraction Library" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.27-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d04e579e911562f1055d26dab1868d3e0bb905db3bccf664ee8ad109f035618a"}, - {file = "SQLAlchemy-2.0.27-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fa67d821c1fd268a5a87922ef4940442513b4e6c377553506b9db3b83beebbd8"}, - {file = "SQLAlchemy-2.0.27-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c7a596d0be71b7baa037f4ac10d5e057d276f65a9a611c46970f012752ebf2d"}, - {file = "SQLAlchemy-2.0.27-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:954d9735ee9c3fa74874c830d089a815b7b48df6f6b6e357a74130e478dbd951"}, - {file = "SQLAlchemy-2.0.27-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5cd20f58c29bbf2680039ff9f569fa6d21453fbd2fa84dbdb4092f006424c2e6"}, - {file = "SQLAlchemy-2.0.27-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:03f448ffb731b48323bda68bcc93152f751436ad6037f18a42b7e16af9e91c07"}, - {file = "SQLAlchemy-2.0.27-cp310-cp310-win32.whl", hash = "sha256:d997c5938a08b5e172c30583ba6b8aad657ed9901fc24caf3a7152eeccb2f1b4"}, - {file = "SQLAlchemy-2.0.27-cp310-cp310-win_amd64.whl", hash = "sha256:eb15ef40b833f5b2f19eeae65d65e191f039e71790dd565c2af2a3783f72262f"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6c5bad7c60a392850d2f0fee8f355953abaec878c483dd7c3836e0089f046bf6"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3012ab65ea42de1be81fff5fb28d6db893ef978950afc8130ba707179b4284a"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbcd77c4d94b23e0753c5ed8deba8c69f331d4fd83f68bfc9db58bc8983f49cd"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d177b7e82f6dd5e1aebd24d9c3297c70ce09cd1d5d37b43e53f39514379c029c"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:680b9a36029b30cf063698755d277885d4a0eab70a2c7c6e71aab601323cba45"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1306102f6d9e625cebaca3d4c9c8f10588735ef877f0360b5cdb4fdfd3fd7131"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-win32.whl", hash = "sha256:5b78aa9f4f68212248aaf8943d84c0ff0f74efc65a661c2fc68b82d498311fd5"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-win_amd64.whl", hash = "sha256:15e19a84b84528f52a68143439d0c7a3a69befcd4f50b8ef9b7b69d2628ae7c4"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0de1263aac858f288a80b2071990f02082c51d88335a1db0d589237a3435fe71"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce850db091bf7d2a1f2fdb615220b968aeff3849007b1204bf6e3e50a57b3d32"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dfc936870507da96aebb43e664ae3a71a7b96278382bcfe84d277b88e379b18"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4fbe6a766301f2e8a4519f4500fe74ef0a8509a59e07a4085458f26228cd7cc"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4535c49d961fe9a77392e3a630a626af5baa967172d42732b7a43496c8b28876"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0fb3bffc0ced37e5aa4ac2416f56d6d858f46d4da70c09bb731a246e70bff4d5"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-win32.whl", hash = "sha256:7f470327d06400a0aa7926b375b8e8c3c31d335e0884f509fe272b3c700a7254"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-win_amd64.whl", hash = "sha256:f9374e270e2553653d710ece397df67db9d19c60d2647bcd35bfc616f1622dcd"}, - {file = "SQLAlchemy-2.0.27-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e97cf143d74a7a5a0f143aa34039b4fecf11343eed66538610debc438685db4a"}, - {file = "SQLAlchemy-2.0.27-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7b5a3e2120982b8b6bd1d5d99e3025339f7fb8b8267551c679afb39e9c7c7f1"}, - {file = "SQLAlchemy-2.0.27-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e36aa62b765cf9f43a003233a8c2d7ffdeb55bc62eaa0a0380475b228663a38f"}, - {file = "SQLAlchemy-2.0.27-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5ada0438f5b74c3952d916c199367c29ee4d6858edff18eab783b3978d0db16d"}, - {file = "SQLAlchemy-2.0.27-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b1d9d1bfd96eef3c3faedb73f486c89e44e64e40e5bfec304ee163de01cf996f"}, - {file = "SQLAlchemy-2.0.27-cp37-cp37m-win32.whl", hash = "sha256:ca891af9f3289d24a490a5fde664ea04fe2f4984cd97e26de7442a4251bd4b7c"}, - {file = "SQLAlchemy-2.0.27-cp37-cp37m-win_amd64.whl", hash = "sha256:fd8aafda7cdff03b905d4426b714601c0978725a19efc39f5f207b86d188ba01"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ec1f5a328464daf7a1e4e385e4f5652dd9b1d12405075ccba1df842f7774b4fc"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ad862295ad3f644e3c2c0d8b10a988e1600d3123ecb48702d2c0f26771f1c396"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48217be1de7d29a5600b5c513f3f7664b21d32e596d69582be0a94e36b8309cb"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e56afce6431450442f3ab5973156289bd5ec33dd618941283847c9fd5ff06bf"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:611068511b5531304137bcd7fe8117c985d1b828eb86043bd944cebb7fae3910"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b86abba762ecfeea359112b2bb4490802b340850bbee1948f785141a5e020de8"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-win32.whl", hash = "sha256:30d81cc1192dc693d49d5671cd40cdec596b885b0ce3b72f323888ab1c3863d5"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-win_amd64.whl", hash = "sha256:120af1e49d614d2525ac247f6123841589b029c318b9afbfc9e2b70e22e1827d"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d07ee7793f2aeb9b80ec8ceb96bc8cc08a2aec8a1b152da1955d64e4825fcbac"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cb0845e934647232b6ff5150df37ceffd0b67b754b9fdbb095233deebcddbd4a"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fc19ae2e07a067663dd24fca55f8ed06a288384f0e6e3910420bf4b1270cc51"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b90053be91973a6fb6020a6e44382c97739736a5a9d74e08cc29b196639eb979"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2f5c9dfb0b9ab5e3a8a00249534bdd838d943ec4cfb9abe176a6c33408430230"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:33e8bde8fff203de50399b9039c4e14e42d4d227759155c21f8da4a47fc8053c"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-win32.whl", hash = "sha256:d873c21b356bfaf1589b89090a4011e6532582b3a8ea568a00e0c3aab09399dd"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-win_amd64.whl", hash = "sha256:ff2f1b7c963961d41403b650842dc2039175b906ab2093635d8319bef0b7d620"}, - {file = "SQLAlchemy-2.0.27-py3-none-any.whl", hash = "sha256:1ab4e0448018d01b142c916cc7119ca573803a4745cfe341b8f95657812700ac"}, - {file = "SQLAlchemy-2.0.27.tar.gz", hash = "sha256:86a6ed69a71fe6b88bf9331594fa390a2adda4a49b5c06f98e47bf0d392534f8"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4c142852ae192e9fe5aad5c350ea6befe9db14370b34047e1f0f7cf99e63c63b"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:99a1e69d4e26f71e750e9ad6fdc8614fbddb67cfe2173a3628a2566034e223c7"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ef3fbccb4058355053c51b82fd3501a6e13dd808c8d8cd2561e610c5456013c"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d6753305936eddc8ed190e006b7bb33a8f50b9854823485eed3a886857ab8d1"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0f3ca96af060a5250a8ad5a63699180bc780c2edf8abf96c58af175921df847a"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c4520047006b1d3f0d89e0532978c0688219857eb2fee7c48052560ae76aca1e"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-win32.whl", hash = "sha256:b2a0e3cf0caac2085ff172c3faacd1e00c376e6884b5bc4dd5b6b84623e29e4f"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-win_amd64.whl", hash = "sha256:01d10638a37460616708062a40c7b55f73e4d35eaa146781c683e0fa7f6c43fb"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:308ef9cb41d099099fffc9d35781638986870b29f744382904bf9c7dadd08513"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:296195df68326a48385e7a96e877bc19aa210e485fa381c5246bc0234c36c78e"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a13b917b4ffe5a0a31b83d051d60477819ddf18276852ea68037a144a506efb9"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f6d971255d9ddbd3189e2e79d743ff4845c07f0633adfd1de3f63d930dbe673"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:61405ea2d563407d316c63a7b5271ae5d274a2a9fbcd01b0aa5503635699fa1e"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:de7202ffe4d4a8c1e3cde1c03e01c1a3772c92858837e8f3879b497158e4cb44"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-win32.whl", hash = "sha256:b5d7ed79df55a731749ce65ec20d666d82b185fa4898430b17cb90c892741520"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-win_amd64.whl", hash = "sha256:205f5a2b39d7c380cbc3b5dcc8f2762fb5bcb716838e2d26ccbc54330775b003"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d96710d834a6fb31e21381c6d7b76ec729bd08c75a25a5184b1089141356171f"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:52de4736404e53c5c6a91ef2698c01e52333988ebdc218f14c833237a0804f1b"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c7b02525ede2a164c5fa5014915ba3591730f2cc831f5be9ff3b7fd3e30958e"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dfefdb3e54cd15f5d56fd5ae32f1da2d95d78319c1f6dfb9bcd0eb15d603d5d"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a88913000da9205b13f6f195f0813b6ffd8a0c0c2bd58d499e00a30eb508870c"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fecd5089c4be1bcc37c35e9aa678938d2888845a134dd016de457b942cf5a758"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-win32.whl", hash = "sha256:8197d6f7a3d2b468861ebb4c9f998b9df9e358d6e1cf9c2a01061cb9b6cf4e41"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-win_amd64.whl", hash = "sha256:9b19836ccca0d321e237560e475fd99c3d8655d03da80c845c4da20dda31b6e1"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:87a1d53a5382cdbbf4b7619f107cc862c1b0a4feb29000922db72e5a66a5ffc0"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a0732dffe32333211801b28339d2a0babc1971bc90a983e3035e7b0d6f06b93"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90453597a753322d6aa770c5935887ab1fc49cc4c4fdd436901308383d698b4b"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ea311d4ee9a8fa67f139c088ae9f905fcf0277d6cd75c310a21a88bf85e130f5"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5f20cb0a63a3e0ec4e169aa8890e32b949c8145983afa13a708bc4b0a1f30e03"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-win32.whl", hash = "sha256:e5bbe55e8552019c6463709b39634a5fc55e080d0827e2a3a11e18eb73f5cdbd"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-win_amd64.whl", hash = "sha256:c2f9c762a2735600654c654bf48dad388b888f8ce387b095806480e6e4ff6907"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7e614d7a25a43a9f54fcce4675c12761b248547f3d41b195e8010ca7297c369c"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:471fcb39c6adf37f820350c28aac4a7df9d3940c6548b624a642852e727ea586"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:988569c8732f54ad3234cf9c561364221a9e943b78dc7a4aaf35ccc2265f1930"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dddaae9b81c88083e6437de95c41e86823d150f4ee94bf24e158a4526cbead01"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:334184d1ab8f4c87f9652b048af3f7abea1c809dfe526fb0435348a6fef3d380"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:38b624e5cf02a69b113c8047cf7f66b5dfe4a2ca07ff8b8716da4f1b3ae81567"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-win32.whl", hash = "sha256:bab41acf151cd68bc2b466deae5deeb9e8ae9c50ad113444151ad965d5bf685b"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-win_amd64.whl", hash = "sha256:52c8011088305476691b8750c60e03b87910a123cfd9ad48576d6414b6ec2a1d"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3071ad498896907a5ef756206b9dc750f8e57352113c19272bdfdc429c7bd7de"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dba622396a3170974f81bad49aacebd243455ec3cc70615aeaef9e9613b5bca5"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b184e3de58009cc0bf32e20f137f1ec75a32470f5fede06c58f6c355ed42a72"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c37f1050feb91f3d6c32f864d8e114ff5545a4a7afe56778d76a9aec62638ba"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bda7ce59b06d0f09afe22c56714c65c957b1068dee3d5e74d743edec7daba552"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:25664e18bef6dc45015b08f99c63952a53a0a61f61f2e48a9e70cec27e55f699"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-win32.whl", hash = "sha256:77d29cb6c34b14af8a484e831ab530c0f7188f8efed1c6a833a2c674bf3c26ec"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-win_amd64.whl", hash = "sha256:04c487305ab035a9548f573763915189fc0fe0824d9ba28433196f8436f1449c"}, + {file = "SQLAlchemy-2.0.29-py3-none-any.whl", hash = "sha256:dc4ee2d4ee43251905f88637d5281a8d52e916a021384ec10758826f5cbae305"}, + {file = "SQLAlchemy-2.0.29.tar.gz", hash = "sha256:bd9566b8e58cabd700bc367b60e90d9349cd16f0984973f98a9a09f9c64e86f0"}, ] [package.dependencies] @@ -4969,14 +4990,14 @@ torch = ["torch (>=1.6.0)"] [[package]] name = "threadpoolctl" -version = "3.3.0" +version = "3.4.0" description = "threadpoolctl" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "threadpoolctl-3.3.0-py3-none-any.whl", hash = "sha256:6155be1f4a39f31a18ea70f94a77e0ccd57dced08122ea61109e7da89883781e"}, - {file = "threadpoolctl-3.3.0.tar.gz", hash = "sha256:5dac632b4fa2d43f42130267929af3ba01399ef4bd1882918e92dbc30365d30c"}, + {file = "threadpoolctl-3.4.0-py3-none-any.whl", hash = "sha256:8f4c689a65b23e5ed825c8436a92b818aac005e0f3715f6a1664d7c7ee29d262"}, + {file = "threadpoolctl-3.4.0.tar.gz", hash = "sha256:f11b491a03661d6dd7ef692dd422ab34185d982466c49c8f98c8f716b5c93196"}, ] [[package]] @@ -5276,14 +5297,14 @@ telegram = ["requests"] [[package]] name = "typer" -version = "0.9.0" +version = "0.9.4" description = "Typer, build great CLIs. Easy to code. Based on Python type hints." category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "typer-0.9.0-py3-none-any.whl", hash = "sha256:5d96d986a21493606a358cae4461bd8cdf83cbf33a5aa950ae629ca3b51467ee"}, - {file = "typer-0.9.0.tar.gz", hash = "sha256:50922fd79aea2f4751a8e0408ff10d2662bd0c8bbfa84755a699f3bada2978b2"}, + {file = "typer-0.9.4-py3-none-any.whl", hash = "sha256:aa6c4a4e2329d868b80ecbaf16f807f2b54e192209d7ac9dd42691d63f7a54eb"}, + {file = "typer-0.9.4.tar.gz", hash = "sha256:f714c2d90afae3a7929fcd72a3abb08df305e1ff61719381384211c4070af57f"}, ] [package.dependencies] @@ -5294,18 +5315,18 @@ typing-extensions = ">=3.7.4.3" all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"] doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"] -test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] +test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.971)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -5367,14 +5388,14 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "uvicorn" -version = "0.27.1" +version = "0.29.0" description = "The lightning-fast ASGI server." category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "uvicorn-0.27.1-py3-none-any.whl", hash = "sha256:5c89da2f3895767472a35556e539fd59f7edbe9b1e9c0e1c99eebeadc61838e4"}, - {file = "uvicorn-0.27.1.tar.gz", hash = "sha256:3d9a267296243532db80c83a959a3400502165ade2c1338dea4e67915fd4745a"}, + {file = "uvicorn-0.29.0-py3-none-any.whl", hash = "sha256:2c2aac7ff4f4365c206fd773a39bf4ebd1047c238f8b8268ad996829323473de"}, + {file = "uvicorn-0.29.0.tar.gz", hash = "sha256:6a69214c0b6a087462412670b3ef21224fa48cae0e452b5883e8e8bdfdd11dd0"}, ] [package.dependencies] @@ -5921,4 +5942,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "dcd312b0c03cd0461269039d6cdb50bacb2a1272cb2979e00b3015c5d49f534c" +content-hash = "c0dac08c63e95c1b87889415bac0a70cd8ae248595f35aa1fe008b730c6097e1" diff --git a/pyproject.toml b/pyproject.toml index 89569cd8..e87868dc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,7 @@ include = "packages" [tool.poetry.dependencies] python = "^3.10" -open-autonomy = "==0.14.6" +open-autonomy = "==0.14.10" openai = "==1.11.0" requests = "==2.28.1" mech-client = "==0.2.5" @@ -24,14 +24,14 @@ py-multibase = "==1.0.3" py-multicodec = "==0.2.1" grpcio = "==1.53.0" asn1crypto = "<1.5.0,>=1.4.0" -open-aea-ledger-ethereum = "==1.48.0" -open-aea-ledger-cosmos = "==1.48.0" +open-aea-ledger-ethereum = "==1.50.0" +open-aea-ledger-cosmos = "==1.50.0" protobuf = "<4.25.0,>=4.21.6" hypothesis = "==6.21.6" -open-aea-test-autonomy = "==0.14.6" +open-aea-test-autonomy = "==0.14.10" web3 = "<7,>=6.0.0" ipfshttpclient = "==0.8.0a2" -open-aea-cli-ipfs = "==1.48.0" +open-aea-cli-ipfs = "==1.50.0" pytest-asyncio = "*" aiohttp = "<4.0.0,>=3.8.5" certifi = "*" diff --git a/tox.ini b/tox.ini index 43013a2b..6ef61786 100644 --- a/tox.ini +++ b/tox.ini @@ -18,7 +18,7 @@ deps = [deps-packages] deps = {[deps-tests]deps} - open-autonomy==0.14.6 + open-autonomy==0.14.10 openai==0.27.2 requests==2.28.1 mech-client==0.2.5 @@ -26,14 +26,14 @@ deps = py-multicodec==0.2.1 grpcio==1.53.0 asn1crypto<1.5.0,>=1.4.0 - open-aea-ledger-ethereum==1.48.0 - open-aea-ledger-cosmos==1.48.0 + open-aea-ledger-ethereum==1.50.0 + open-aea-ledger-cosmos==1.50.0 protobuf<4.25.0,>=4.21.6 hypothesis==6.21.6 - open-aea-test-autonomy==0.14.6 + open-aea-test-autonomy==0.14.10 web3<7,>=6.0.0 ipfshttpclient==0.8.0a2 - open-aea-cli-ipfs==1.48.0 + open-aea-cli-ipfs==1.50.0 pytest-asyncio aiohttp<4.0.0,>=3.8.5 certifi @@ -127,7 +127,7 @@ skipsdist = True usedevelop = True deps = protobuf<4.25.0,>=4.21.6 - open-autonomy[all]==0.14.6 + open-autonomy[all]==0.14.10 commands = autonomy init --reset --author ci --remote --ipfs --ipfs-node "/dns/registry.autonolas.tech/tcp/443/https" autonomy packages sync