Reputation: 759
I can't figure out how to use a custom function as a tool with langgraph + ChatOpenAI and Ollama. The "TavilySearchResults" and "DuckDuckGoSearchResults" community tools work fine, but even the simplest custom function produces an error Assistant: Error: <function_name> is not a valid tool, try one of [tool].
I'm ultimately trying to use a custom tool (defined in another script), but I can't seem to get anything but the example's tavily to work.
How do I get custom functions to work in this situation?
Here's my script, taken mostly from https://langchain-ai.github.io/langgraph/tutorials/introduction/#requirements
from typing_extensions import TypedDict
from langchain_openai import ChatOpenAI
from langchain_community.tools.tavily_search import TavilySearchResults
from langchain_community.tools import DuckDuckGoSearchResults
from langchain_community.utilities import DuckDuckGoSearchAPIWrapper
from langchain_core.messages import BaseMessage
from langgraph.checkpoint.memory import MemorySaver
from langgraph.graph import StateGraph
from langgraph.graph.message import add_messages
from langgraph.prebuilt import ToolNode, tools_condition
from langchain.pydantic_v1 import BaseModel, Field
from langchain.tools import BaseTool, StructuredTool, tool
import sys
import os
from dotenv import load_dotenv
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
from web_search import ddg_search, get_top_search_results_pages
load_dotenv()
LANGCHAIN_API_KEY = os.getenv("LANGCHAIN_API_KEY")
TAVILY_API_KEY = os.getenv("TAVILY_API_KEY")
OLLAMA_BASE_URL = "http://192.168.76.10:11434"
# go to http://192.168.76.10:3000/ to add more models, gear at the top of the chat window
MODEL_NAME = "llama3.2:3b"
#### Define the tools and add them to the tools list/array
# This works by itself
# tool = TavilySearchResults(max_results=2)
# tools = [tool]
# This works by itself
# tool = DuckDuckGoSearchResults(max_results=2)
# tools = [tool]
@tool
def fake_search(query: str) -> str:
"""Fake search function for testing purposes."""
return f"FAKE SEARCH: {query}"
tools = [fake_search]
# @tool
# def ddg_search(query: str, max_results: int = None, source: str = None) -> str:
# """Use DuckDuckGo to do a web search for the top search results for a query.
# Args:
# query (str): The query to search for.
# max_results (int): The maximum number of search results to return.
# source (str): The source to look from.
# """
# wrapper = DuckDuckGoSearchAPIWrapper(max_results=max_results)
# ddgsr = DuckDuckGoSearchResults(api_wrapper=wrapper, source=source)
# results = ddgsr.invoke(query)
# return results
# tools = [ddg_search]
# This is what I ultimately want to work
# @tool
# def ddg_search_custom(query: str) -> str:
# """Use DuckDuckGo to search for the top search results for a query.
# Args:
# query (str): The query to search for.
# """
# # My custom search function
# # results = get_top_search_results_pages(query, max_results=2)
# results = "THIS IS A CUSTOM TOOL CALL OUTPUT"
# return results
# tools = [ddg_search]
#### Define the LLMs and bind the tools to the LLMs
llm = ChatOpenAI(model=MODEL_NAME
,api_key="ollama"
,base_url=OLLAMA_BASE_URL + "/v1"
# ,api_key=LANGCHAIN_API_KEY
)
llm_with_tools = llm.bind_tools(tools)
#### Define the state for the graph
class State(TypedDict):
messages: Annotated[list, add_messages]
#### Define the chatbot function to use as a node in the graph
def chatbot(state: State):
return {"messages": [llm_with_tools.invoke(state["messages"])]}
#### Define the graph
graph_builder = StateGraph(State)
#### Add the chatbot function as a node to the graph
graph_builder.add_node("chatbot", chatbot)
#### Add the tools as a node in the graph
tool_node = ToolNode(tools=[tool])
graph_builder.add_node("tools", tool_node)
#### Add the conditional edges to the graph
graph_builder.add_conditional_edges("chatbot",
tools_condition,)
#### Any time a tool is called, we return to the chatbot to decide the next step
graph_builder.add_edge("tools", "chatbot")
graph_builder.set_entry_point("chatbot")
graph = graph_builder.compile()
############################################################
########## code for running below ##########
############################################################
def stream_graph_updates(user_input: str):
for event in graph.stream({"messages": [{"role": "user", "content": user_input}]}):
for value in event.values():
print("Assistant:", value["messages"][-1].content)
user_input = "What do you know about using custom functions for tools in LangGraph?"
print("User: " + user_input)
stream_graph_updates(user_input)
quit()
Output:
For example, replace imports like: `from langchain.pydantic_v1 import BaseModel`
with: `from pydantic import BaseModel`
or the v1 compatibility namespace if you are working in a code base that has not been fully upgraded to pydantic 2 yet. from pydantic.v1 import BaseModel
exec(code, run_globals)
User: What do you know about using custom functions for tools in LangGraph?
Assistant:
Assistant: Error: fake_search is not a valid tool, try one of [tool].
Assistant: I found that in LangGraph, custom functions can be used within tools to extend their functionality. Here are some key points to consider:
... <ommited lengthy answer>
Upvotes: 0
Views: 16