79578176

Date: 2025-04-16 21:56:56
Score: 0.5
Natty:
Report link

The other workaround i find here like this
I am not sure if this correct approach?
~~~

llm = ChatOpenAI(temperature=1.0)

class State(TypedDict):
  messages : Annotated[list, add_messages]
  
graph_builder = StateGraph(State) 

print(graph_builder)
chat_history = []

def  chatbot(state:State):
  
  return {"messages":llm.invoke(state['messages'])}
  

  



def past_message_node(state: State):
  chat_history.append(state['messages'])
  print("Conversation history")
  past_message = chat_history
  print("Message History:",past_message)
  return state
  

graph_builder.add_node("chatbot", chatbot)
graph_builder.add_edge(START, "chatbot")
graph_builder.add_node("past_message_node", past_message_node)
graph_builder.add_edge("chatbot", "past_message_node")
graph_builder.add_edge("past_message_node", END)
graph = graph_builder.compile()
chat_history  = []
while True:
  user_input = input("User: ")

  if user_input.lower() in ['quit', 'q']:
    print("Chat Ended")
    break
  for event in graph.stream({"messages": ("user", user_input)}):
    for value in event.values():
      messages = value["messages"]
      if (isinstance(messages, list)):
        last_message = messages[-1]
      else:
        last_message = messages
      print("Assistent: ", last_message.content)

~~~

Reasons:
  • Long answer (-1):
  • Has code block (-0.5):
  • Contains question mark (0.5):
  • Self-answer (0.5):
  • Low reputation (1):
Posted by: Hari Umesh