vikramvasudevan commited on
Commit
c18d7a8
·
verified ·
1 Parent(s): c6893be

Upload folder using huggingface_hub

Browse files
Files changed (1) hide show
  1. graph_helper.py +4 -38
graph_helper.py CHANGED
@@ -72,47 +72,13 @@ def generate_graph() -> CompiledStateGraph:
72
 
73
  return {"messages": messages}
74
 
75
- # def chatNode(state: ChatState) -> ChatState:
76
- # messages = state["messages"]
77
- # system_prompt = None
78
- # new_messages = []
79
-
80
- # for m in messages:
81
- # if isinstance(m, ToolMessage):
82
- # print("m.name = ", m.name)
83
- # if m.name == "format_scripture_answer":
84
- # system_prompt = m.content
85
- # else:
86
- # new_messages.append(m)
87
-
88
- # if system_prompt:
89
- # full_history = [
90
- # SystemMessage(content=system_prompt),
91
- # SystemMessage(
92
- # content="⚠️ Do NOT summarize or compress the context from the query tool. It will be passed directly to another tool that formats the answer."
93
- # ),
94
- # SystemMessage(
95
- # content="You MUST call the `format_scripture_answer` tool if the user question is about scripture content and the query tool has returned a result."
96
- # ),
97
- # ] + new_messages
98
- # else:
99
- # full_history = messages
100
-
101
- # # 🔍 Debug log (optional)
102
- # # print("\n🧠 LLM Full History:")
103
- # # for m in full_history:
104
- # # print(f"- {m.type.upper()}: {m.content[:100]}...\n")
105
-
106
- # ai_response = llm.invoke(full_history)
107
- # return {"messages": messages + [ai_response]}
108
-
109
  graph = StateGraph(ChatState)
110
  graph.add_node("init", init_system_prompt_node)
111
- graph.add_node("chat", chatNode)
112
  graph.add_node("tools", ToolNode(tools))
113
  graph.add_edge(START, "init")
114
- graph.add_edge("init", "chat")
115
- graph.add_conditional_edges("chat", tools_condition, "tools")
116
- graph.add_edge("tools", "chat")
117
 
118
  return graph.compile(checkpointer=memory)
 
72
 
73
  return {"messages": messages}
74
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
75
  graph = StateGraph(ChatState)
76
  graph.add_node("init", init_system_prompt_node)
77
+ graph.add_node("llm", chatNode)
78
  graph.add_node("tools", ToolNode(tools))
79
  graph.add_edge(START, "init")
80
+ graph.add_edge("init", "llm")
81
+ graph.add_conditional_edges("llm", tools_condition, "tools")
82
+ graph.add_edge("tools", "llm")
83
 
84
  return graph.compile(checkpointer=memory)