"answer": "\"\"\"\nExample of custom graph using existing nodes\n\"\"\"\n\nimport os\nfrom dotenv import load_dotenv\nfrom langchain_openai import OpenAIEmbeddings\nfrom langchain_openai import AzureChatOpenAI\nfrom langchain_openai import AzureOpenAIEmbeddings\nfrom scrapegraphai.graphs import BaseGraph\nfrom scrapegraphai.nodes import FetchNode, ParseNode, RAGNode, GenerateAnswerNode, RobotsNode\nload_dotenv()\n\n# ************************************************\n# Define the configuration for the graph\n# ************************************************\n\n# ************************************************\n# Define the configuration for the graph\n# ************************************************\n\nllm_model_instance = AzureChatOpenAI(\n openai_api_version=os.environ[\"AZURE_OPENAI_API_VERSION\"],\n azure_deployment=os.environ[\"AZURE_OPENAI_CHAT_DEPLOYMENT_NAME\"]\n)\n\nembedder_model_instance = AzureOpenAIEmbeddings(\n azure_deployment=os.environ[\"AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT_NAME\"],\n openai_api_version=os.environ[\"AZURE_OPENAI_API_VERSION\"],\n)\n\n# ************************************************\n# Create the JSONScraperGraph instance and run it\n# ************************************************\n\ngraph_config = {\n \"llm\": {\"model_instance\": llm_model_instance},\n \"embeddings\": {\"model_instance\": embedder_model_instance}\n}\n# define the nodes for the graph\nrobot_node = RobotsNode(\n input=\"url\",\n output=[\"is_scrapable\"],\n node_config={\n \"llm_model\": llm_model_instance,\n \"force_scraping\": True,\n \"verbose\": True,\n }\n)\n\nfetch_node = FetchNode(\n input=\"url | local_dir\",\n output=[\"doc\", \"link_urls\", \"img_urls\"],\n node_config={\n \"verbose\": True,\n \"headless\": True,\n }\n)\nparse_node = ParseNode(\n input=\"doc\",\n output=[\"parsed_doc\"],\n node_config={\n \"chunk_size\": 4096,\n \"verbose\": True,\n }\n)\nrag_node = RAGNode(\n input=\"user_prompt & (parsed_doc | doc)\",\n output=[\"relevant_chunks\"],\n node_config={\n \"llm_model\": llm_model_instance,\n \"embedder_model\": embedder_model_instance,\n \"verbose\": True,\n }\n)\ngenerate_answer_node = GenerateAnswerNode(\n input=\"user_prompt & (relevant_chunks | parsed_doc | doc)\",\n output=[\"answer\"],\n node_config={\n \"llm_model\": llm_model_instance,\n \"verbose\": True,\n }\n)\n\n# ************************************************\n# Create the graph by defining the connections\n# ************************************************\n\ngraph = BaseGraph(\n nodes=[\n robot_node,\n fetch_node,\n parse_node,\n rag_node,\n generate_answer_node,\n ],\n edges=[\n (robot_node, fetch_node),\n (fetch_node, parse_node),\n (parse_node, rag_node),\n (rag_node, generate_answer_node)\n ],\n entry_point=robot_node\n)\n\n# ************************************************\n# Execute the graph\n# ************************************************\n\nresult, execution_info = graph.execute({\n \"user_prompt\": \"Describe the content\",\n \"url\": \"https://example.com/\"\n})\n\n# get the answer from the result\nresult = result.get(\"answer\", \"No answer found.\")\nprint(result)\n"
0 commit comments