From 7eda6bc06bc4c32850029f54b9b4c22f3124296e Mon Sep 17 00:00:00 2001 From: Marco Vinciguerra Date: Sun, 22 Sep 2024 18:55:05 +0200 Subject: [PATCH 1/4] fix: issue about parser --- scrapegraphai/nodes/generate_answer_node.py | 10 ++++------ scrapegraphai/utils/research_web.py | 4 +++- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/scrapegraphai/nodes/generate_answer_node.py b/scrapegraphai/nodes/generate_answer_node.py index 96ca0238..d380d7b8 100644 --- a/scrapegraphai/nodes/generate_answer_node.py +++ b/scrapegraphai/nodes/generate_answer_node.py @@ -96,14 +96,12 @@ def execute(self, state: dict) -> dict: output_parser = get_structured_output_parser(self.node_config["schema"]) format_instructions = "NA" else: - if not isinstance(self.llm_model, ChatBedrock): - output_parser = get_pydantic_output_parser(self.node_config["schema"]) - format_instructions = output_parser.get_format_instructions() + output_parser = get_pydantic_output_parser(self.node_config["schema"]) + format_instructions = output_parser.get_format_instructions() else: - if not isinstance(self.llm_model, ChatBedrock): - output_parser = JsonOutputParser() - format_instructions = output_parser.get_format_instructions() + output_parser = JsonOutputParser() + format_instructions = output_parser.get_format_instructions() if isinstance(self.llm_model, (ChatOpenAI, AzureChatOpenAI)) \ and not self.script_creator \ diff --git a/scrapegraphai/utils/research_web.py b/scrapegraphai/utils/research_web.py index 0a10c8f2..7e978ffd 100644 --- a/scrapegraphai/utils/research_web.py +++ b/scrapegraphai/utils/research_web.py @@ -60,7 +60,9 @@ def search_on_web(query: str, search_engine: str = "Google", elif search_engine.lower() == "searxng": url = f"http://localhost:{port}" - params = {"q": query, "format": "json"} + params = {"q": query, + "format": "json", + "engines": "google,duckduckgo,brave,qwant,bing"} response = requests.get(url, params=params) From 65b8675586186c2227c1cc824327e4864b66ce2f Mon Sep 17 00:00:00 2001 From: Marco Vinciguerra Date: Sun, 22 Sep 2024 21:10:57 +0200 Subject: [PATCH 2/4] Update smart_scraper_multi_concat_graph.py --- .../graphs/smart_scraper_multi_concat_graph.py | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/scrapegraphai/graphs/smart_scraper_multi_concat_graph.py b/scrapegraphai/graphs/smart_scraper_multi_concat_graph.py index 1adda1a8..1ee2c56e 100644 --- a/scrapegraphai/graphs/smart_scraper_multi_concat_graph.py +++ b/scrapegraphai/graphs/smart_scraper_multi_concat_graph.py @@ -60,18 +60,13 @@ def _create_graph(self) -> BaseGraph: BaseGraph: A graph instance representing the web scraping and searching workflow. """ - smart_scraper_instance = SmartScraperGraph( - prompt="", - source="", - config=self.copy_config, - schema=self.copy_schema - ) - graph_iterator_node = GraphIteratorNode( input="user_prompt & urls", output=["results"], node_config={ - "graph_instance": smart_scraper_instance, + "graph_instance": SmartScraperGraph, + "scraper_config": self.copy_config, + "scraper_schema": self.copy_schema, } ) From 69880b680a5d1a2aee48502a8f3a3948b26c1371 Mon Sep 17 00:00:00 2001 From: Marco Vinciguerra Date: Sun, 22 Sep 2024 21:16:16 +0200 Subject: [PATCH 3/4] Update generate_answer_node.py --- scrapegraphai/nodes/generate_answer_node.py | 143 ++++++++------------ 1 file changed, 53 insertions(+), 90 deletions(-) diff --git a/scrapegraphai/nodes/generate_answer_node.py b/scrapegraphai/nodes/generate_answer_node.py index d380d7b8..15686ec1 100644 --- a/scrapegraphai/nodes/generate_answer_node.py +++ b/scrapegraphai/nodes/generate_answer_node.py @@ -1,6 +1,3 @@ -""" -GenerateAnswerNode Module -""" from typing import List, Optional from langchain.prompts import PromptTemplate from langchain_core.output_parsers import JsonOutputParser @@ -12,29 +9,12 @@ from tqdm import tqdm from .base_node import BaseNode from ..utils.output_parser import get_structured_output_parser, get_pydantic_output_parser -from ..prompts import (TEMPLATE_CHUNKS, - TEMPLATE_NO_CHUNKS, TEMPLATE_MERGE, - TEMPLATE_CHUNKS_MD, TEMPLATE_NO_CHUNKS_MD, - TEMPLATE_MERGE_MD) +from ..prompts import ( + TEMPLATE_CHUNKS, TEMPLATE_NO_CHUNKS, TEMPLATE_MERGE, + TEMPLATE_CHUNKS_MD, TEMPLATE_NO_CHUNKS_MD, TEMPLATE_MERGE_MD +) class GenerateAnswerNode(BaseNode): - """ - A node that generates an answer using a large language model (LLM) based on the user's input - and the content extracted from a webpage. It constructs a prompt from the user's input - and the scraped content, feeds it to the LLM, and parses the LLM's response to produce - an answer. - - Attributes: - llm_model: An instance of a language model client, configured for generating answers. - verbose (bool): A flag indicating whether to show print statements during execution. - - Args: - input (str): Boolean expression defining the input keys needed from the state. - output (List[str]): List of output keys to be updated in the state. - node_config (dict): Additional configuration for the node. - node_name (str): The unique identifier name for the node, defaulting to "GenerateAnswer". - """ - def __init__( self, input: str, @@ -43,91 +23,73 @@ def __init__( node_name: str = "GenerateAnswer", ): super().__init__(node_name, "node", input, output, 2, node_config) - self.llm_model = node_config["llm_model"] if isinstance(node_config["llm_model"], ChatOllama): - self.llm_model.format="json" - - self.verbose = ( - True if node_config is None else node_config.get("verbose", False) - ) - self.force = ( - False if node_config is None else node_config.get("force", False) - ) - self.script_creator = ( - False if node_config is None else node_config.get("script_creator", False) - ) - self.is_md_scraper = ( - False if node_config is None else node_config.get("is_md_scraper", False) - ) + self.llm_model.format = "json" + self.verbose = node_config.get("verbose", False) + self.force = node_config.get("force", False) + self.script_creator = node_config.get("script_creator", False) + self.is_md_scraper = node_config.get("is_md_scraper", False) self.additional_info = node_config.get("additional_info") def execute(self, state: dict) -> dict: - """ - Generates an answer by constructing a prompt from the user's input and the scraped - content, querying the language model, and parsing its response. - - Args: - state (dict): The current state of the graph. The input keys will be used - to fetch the correct data from the state. - - Returns: - dict: The updated state with the output key containing the generated answer. - - Raises: - KeyError: If the input keys are not found in the state, indicating - that the necessary information for generating an answer is missing. - """ - self.logger.info(f"--- Executing {self.node_name} Node ---") - input_keys = self.get_input_keys(state) + input_keys = self.get_input_keys(state) input_data = [state[key] for key in input_keys] user_prompt = input_data[0] doc = input_data[1] if self.node_config.get("schema", None) is not None: - if isinstance(self.llm_model, (ChatOpenAI, ChatMistralAI)): self.llm_model = self.llm_model.with_structured_output( - schema = self.node_config["schema"]) + schema=self.node_config["schema"] + ) output_parser = get_structured_output_parser(self.node_config["schema"]) format_instructions = "NA" else: - output_parser = get_pydantic_output_parser(self.node_config["schema"]) - format_instructions = output_parser.get_format_instructions() - + if not isinstance(self.llm_model, ChatBedrock): + output_parser = get_pydantic_output_parser(self.node_config["schema"]) + format_instructions = output_parser.get_format_instructions() + else: + output_parser = None + format_instructions = "" else: - output_parser = JsonOutputParser() - format_instructions = output_parser.get_format_instructions() + if not isinstance(self.llm_model, ChatBedrock): + output_parser = JsonOutputParser() + format_instructions = output_parser.get_format_instructions() + else: + output_parser = None + format_instructions = "" if isinstance(self.llm_model, (ChatOpenAI, AzureChatOpenAI)) \ and not self.script_creator \ or self.force \ and not self.script_creator or self.is_md_scraper: - - template_no_chunks_prompt = TEMPLATE_NO_CHUNKS_MD - template_chunks_prompt = TEMPLATE_CHUNKS_MD - template_merge_prompt = TEMPLATE_MERGE_MD + template_no_chunks_prompt = TEMPLATE_NO_CHUNKS_MD + template_chunks_prompt = TEMPLATE_CHUNKS_MD + template_merge_prompt = TEMPLATE_MERGE_MD else: - template_no_chunks_prompt = TEMPLATE_NO_CHUNKS - template_chunks_prompt = TEMPLATE_CHUNKS - template_merge_prompt = TEMPLATE_MERGE + template_no_chunks_prompt = TEMPLATE_NO_CHUNKS + template_chunks_prompt = TEMPLATE_CHUNKS + template_merge_prompt = TEMPLATE_MERGE if self.additional_info is not None: - template_no_chunks_prompt = self.additional_info + template_no_chunks_prompt - template_chunks_prompt = self.additional_info + template_chunks_prompt - template_merge_prompt = self.additional_info + template_merge_prompt + template_no_chunks_prompt = self.additional_info + template_no_chunks_prompt + template_chunks_prompt = self.additional_info + template_chunks_prompt + template_merge_prompt = self.additional_info + template_merge_prompt if len(doc) == 1: prompt = PromptTemplate( - template=template_no_chunks_prompt , + template=template_no_chunks_prompt, input_variables=["question"], - partial_variables={"context": doc, - "format_instructions": format_instructions}) - chain = prompt | self.llm_model | output_parser + partial_variables={"context": doc, "format_instructions": format_instructions} + ) + chain = prompt | self.llm_model + if output_parser: + chain = chain | output_parser answer = chain.invoke({"question": user_prompt}) state.update({self.output[0]: answer}) @@ -135,27 +97,28 @@ def execute(self, state: dict) -> dict: chains_dict = {} for i, chunk in enumerate(tqdm(doc, desc="Processing chunks", disable=not self.verbose)): - prompt = PromptTemplate( - template=TEMPLATE_CHUNKS, + template=template_chunks_prompt, input_variables=["question"], - partial_variables={"context": chunk, - "chunk_id": i + 1, - "format_instructions": format_instructions}) + partial_variables={"context": chunk, "chunk_id": i + 1, "format_instructions": format_instructions} + ) chain_name = f"chunk{i+1}" - chains_dict[chain_name] = prompt | self.llm_model | output_parser + chains_dict[chain_name] = prompt | self.llm_model + if output_parser: + chains_dict[chain_name] = chains_dict[chain_name] | output_parser async_runner = RunnableParallel(**chains_dict) - - batch_results = async_runner.invoke({"question": user_prompt}) + batch_results = async_runner.invoke({"question": user_prompt}) merge_prompt = PromptTemplate( - template = template_merge_prompt , - input_variables=["context", "question"], - partial_variables={"format_instructions": format_instructions}, - ) + template=template_merge_prompt, + input_variables=["context", "question"], + partial_variables={"format_instructions": format_instructions} + ) - merge_chain = merge_prompt | self.llm_model | output_parser + merge_chain = merge_prompt | self.llm_model + if output_parser: + merge_chain = merge_chain | output_parser answer = merge_chain.invoke({"context": batch_results, "question": user_prompt}) state.update({self.output[0]: answer}) From 8ce08baf01d7757c6fdcab0333405787c67d2dbc Mon Sep 17 00:00:00 2001 From: Marco Vinciguerra Date: Sun, 22 Sep 2024 22:25:01 +0200 Subject: [PATCH 4/4] fix: graph Iterator node --- scrapegraphai/graphs/smart_scraper_multi_concat_graph.py | 4 ++-- scrapegraphai/nodes/graph_iterator_node.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/scrapegraphai/graphs/smart_scraper_multi_concat_graph.py b/scrapegraphai/graphs/smart_scraper_multi_concat_graph.py index 1ee2c56e..2097e1ca 100644 --- a/scrapegraphai/graphs/smart_scraper_multi_concat_graph.py +++ b/scrapegraphai/graphs/smart_scraper_multi_concat_graph.py @@ -66,8 +66,8 @@ def _create_graph(self) -> BaseGraph: node_config={ "graph_instance": SmartScraperGraph, "scraper_config": self.copy_config, - "scraper_schema": self.copy_schema, - } + }, + schema=self.copy_schema, ) concat_answers_node = ConcatAnswersNode( diff --git a/scrapegraphai/nodes/graph_iterator_node.py b/scrapegraphai/nodes/graph_iterator_node.py index e38461f1..f7fd944f 100644 --- a/scrapegraphai/nodes/graph_iterator_node.py +++ b/scrapegraphai/nodes/graph_iterator_node.py @@ -130,7 +130,7 @@ async def _async_run(graph): if url.startswith("http"): graph.input_key = "url" participants.append(graph) - + futures = [_async_run(graph) for graph in participants] answers = await tqdm.gather(