|
6 | 6 | from dotenv import load_dotenv
|
7 | 7 | from scrapegraphai.models import OpenAI
|
8 | 8 | from scrapegraphai.graphs import BaseGraph
|
9 |
| -from scrapegraphai.nodes import FetchTextNode, ParseTextNode, GenerateAnswerNode |
| 9 | +from scrapegraphai.nodes import FetchTextNode, ParseNode, RAGNode, GenerateAnswerNode |
10 | 10 |
|
11 | 11 | load_dotenv()
|
12 | 12 |
|
|
20 | 20 | }
|
21 | 21 | model = OpenAI(llm_config)
|
22 | 22 |
|
23 |
| -with open("text_example.txt", "r", encoding="utf-8") as file: |
| 23 | +curr_dir = os.path.dirname(__file__) |
| 24 | +file_path = os.path.join(curr_dir, "text_example.txt") |
| 25 | + |
| 26 | +with open(file_path, "r", encoding="utf-8") as file: |
24 | 27 | text = file.read()
|
25 | 28 |
|
26 | 29 |
|
27 | 30 | # define the nodes for the graph
|
28 |
| -fetch_html_node = FetchTextNode("load_html") |
29 |
| -parse_document_node = ParseTextNode("parse_document") |
| 31 | +fetch_html_node = FetchTextNode("load_html_from_text") |
| 32 | +parse_document_node = ParseNode(doc_type="text", chunks_size=4000, node_name="parse_document") |
| 33 | +rag_node = RAGNode(model, "rag") |
30 | 34 | generate_answer_node = GenerateAnswerNode(model, "generate_answer")
|
31 | 35 |
|
32 | 36 | # create the graph
|
33 | 37 | graph = BaseGraph(
|
34 | 38 | nodes={
|
35 | 39 | fetch_html_node,
|
36 | 40 | parse_document_node,
|
| 41 | + rag_node, |
37 | 42 | generate_answer_node
|
38 | 43 | },
|
39 | 44 | edges={
|
40 | 45 | (fetch_html_node, parse_document_node),
|
41 |
| - (parse_document_node, generate_answer_node) |
| 46 | + (parse_document_node, rag_node), |
| 47 | + (rag_node, generate_answer_node) |
42 | 48 | },
|
43 | 49 | entry_point=fetch_html_node
|
44 | 50 | )
|
|
0 commit comments