Reputation: 1
async def get_response(self, messages, model):
try:
memory = self.memory
llm = ChatOllama(model=model)
config = {"configurable": {"thread_id": "abc123"}}
prompt = ChatPromptTemplate.from_messages(
[
("system", "You are a helpful assistant"),
MessagesPlaceholder("chat_history", optional=True),
("user", "{input}"),
MessagesPlaceholder("agent_scratchpad"),
]
)
agent = create_openai_functions_agent(llm=llm, tools=self.tools, prompt=prompt)
agent_executor = AgentExecutor(agent=agent, tools=self.tools)
# Extract the user input from the messages
user_input = next((message["content"] for message in messages if message["role"] == "user"), "")
# Wrap the list of messages in a dictionary and include the user input
formatted_input = {
"messages": [{"role": message["role"], "content": message["content"]} for message in messages],
"input": user_input
}
response = await agent_executor.ainvoke(formatted_input)
if response is None:
logging.error("Received None response from ainvoke")
return "Error: No response received from the agent."
# Check if the response includes intermediate steps
if isinstance(response, dict):
# Attempt to get intermediate steps if they exist
intermediate_steps = response.get('intermediate_steps', None)
final_answer = response.get('final_answer', 'No final answer found.')
if intermediate_steps is not None:
# Log or process intermediate steps if needed
for step in intermediate_steps:
logging.debug(f"Intermediate Step: {step}")
return final_answer
Whenever this function is called, it will return: 'NoneType' object is not iterable.
I expect the output to be the result of the query. Because I want to use function calling with Ollama, only ChatOllama(model=model) can be used (just that you are informed) because ChatOllama is the only class that has .bind_tools() method. All other implementations of Ollama don't implement .bind_tools().
I'm clueless at this point.
Upvotes: 0
Views: 76