santiago calvo
santiago calvo

Reputation: 103

Circular Reference detected Langraph Node.js

I've been going crazy trying to fix this issue, I'm trying to make a personal project using LangGraph and I get this error

file:///E:/proyects/NPC%20Agent%20LLM/node_modules/@langchain/langgraph-checkpoint/dist/serde/jsonplus.js:27
            throw new TypeError("Circular reference detected");
                  ^
TypeError: Circular reference detected
    at _serialize (file:///E:/proyects/NPC%20Agent%20LLM/node_modules/@langchain/langgraph-checkpoint/dist/serde/jsonplus.js:27:19)
    at file:///E:/proyects/NPC%20Agent%20LLM/node_modules/@langchain/langgraph-checkpoint/dist/serde/jsonplus.js:32:32
    at Array.map (<anonymous>)
    at _serialize (file:///E:/proyects/NPC%20Agent%20LLM/node_modules/@langchain/langgraph-checkpoint/dist/serde/jsonplus.js:32:18)
    at file:///E:/proyects/NPC%20Agent%20LLM/node_modules/@langchain/langgraph-checkpoint/dist/serde/jsonplus.js:41:98
    at Array.map (<anonymous>)
    at _serialize (file:///E:/proyects/NPC%20Agent%20LLM/node_modules/@langchain/langgraph-checkpoint/dist/serde/jsonplus.js:41:58)
    at file:///E:/proyects/NPC%20Agent%20LLM/node_modules/@langchain/langgraph-checkpoint/dist/serde/jsonplus.js:41:98
    at Array.map (<anonymous>)
    at _serialize (file:///E:/proyects/NPC%20Agent%20LLM/node_modules/@langchain/langgraph-checkpoint/dist/serde/jsonplus.js:41:58)

This is my code so far

import {
  AIMessage,
  BaseMessage,
  HumanMessage,
  SystemMessage,
} from "@langchain/core/messages";
import { tool } from "@langchain/core/tools";
import { z } from "zod";
import { ChatOpenAI } from "@langchain/openai";
import { START, StateGraph, END } from "@langchain/langgraph";
import { MemorySaver, Annotation } from "@langchain/langgraph";
import { ToolNode } from "@langchain/langgraph/prebuilt";
import { SYSTEM_PROMPT, REFLECT_PROMPT } from "../prompts/prompts.js";
import { InterfaceService } from "./interfaceService.js";
import { logger } from "./logging.js";

export class LangchainGraphService {
  private graphState;
  private workflow;
  private memory: MemorySaver;
  private threadId: string;
  private model: ChatOpenAI;
  private interfaceService: InterfaceService;

  constructor(apiKey: string) {
    // Initialize the model and bind tools
    this.model = new ChatOpenAI({
      model: "gpt-4o",
      temperature: 0,
      apiKey: apiKey, // Pass the OpenAI API key dynamically
    });

    // Initialize the graph state
    this.graphState = Annotation.Root({
      messages: Annotation<BaseMessage[]>({
        reducer: (x, y) => x.concat(y),
      }),
      screenshot_state: Annotation<string>(),
    });

    // Initialize tools
    const pcTool = tool(
      async ({ query }: { query: string }) => {
        logger.log(`Executing tool for query: ${query}`);
        return "COMMAND SUCCESSFUL"; // Placeholder logic for tool functionality
      },
      {
        name: "use_pc",
        description: "Perform computer operations.",
        schema: z.object({
          query: z.string().describe("Describe what to do with the computer."),
        }),
      }
    );

    const tools = [pcTool];
    const toolNode = new ToolNode(tools);

    this.model = this.model.bindTools(tools) as ChatOpenAI;

    // Initialize memory for state persistence
    this.memory = new MemorySaver();

    // Thread ID for persistence of conversation threads
    this.threadId = "default-thread";

    // Initialize the interface service
    this.interfaceService = new InterfaceService();

    // Define the state graph workflow
    this.workflow = new StateGraph(this.graphState)
      .addNode("screenshot" as typeof START, this.screenshotNode.bind(this))
      .addNode("reflect" as typeof START, this.reflectNode.bind(this))
      .addNode("agent" as typeof START, this.callModel.bind(this))
      .addNode("tools" as typeof START, toolNode)
      .addEdge(START, "screenshot" as typeof START)
      .addEdge("screenshot" as typeof START, "reflect" as typeof START)
      .addEdge("reflect" as typeof START, "agent" as typeof START)
      .addConditionalEdges(
        "agent" as typeof START,
        this.shouldContinue.bind(this),
        ["tools" as typeof START, END]
      )
      .addEdge("tools" as typeof START, "agent" as typeof START);
  }

  /**
   * Takes a screenshot and stores it in the graph state.
   */
  private async screenshotNode(_: typeof this.graphState.State) {
    const img = await this.interfaceService.captureScreenshot();
    logger.log(`Screenshot captured: ${img ? "success" : "failure"}`);
    return { screenshot_state: img || "" };
  }

  /**
   * Reflects on the task and appends the reflection to the state.
   */
  private async reflectNode(state: typeof this.graphState.State) {
    const notes = state.messages.filter((msg) => msg instanceof AIMessage);
    const notesAsString = notes.map((msg) => msg.content).join("\n");
    const completePrompt = `${REFLECT_PROMPT}\n${notesAsString}`;

    logger.log("Reflection prompt: " + completePrompt);
    const response = await this.model.invoke([
      new SystemMessage(SYSTEM_PROMPT),
      new HumanMessage(completePrompt),
    ]);

    return {
      messages: [...state.messages, response],
    };
  }

  /**
   * Calls the model to process the state and returns the updated messages.
   */
  private async callModel(state: typeof this.graphState.State) {
    const response = await this.model.invoke(state.messages);
    return {
      messages: [...state.messages, response],
    };
  }

  /**
   * Determines if the workflow should continue to tool calls or end.
   */
  private shouldContinue(state: typeof this.graphState.State) {
    const lastMessage = state.messages[state.messages.length - 1] as AIMessage;
    return lastMessage.tool_calls?.length ? "tools" : END;
  }

  /**
   * Runs the graph workflow with the provided prompt.
   */
  public async run(prompt: string, threadId?: string): Promise<string> {
    logger.log("Starting new run with prompt: " + prompt);

    // Set a custom thread ID if provided (for conversation continuity)
    if (threadId) {
      this.threadId = threadId;
    }

    const newMessage = new HumanMessage(prompt);

    try {
      // Compile the workflow with memory persistence
      const app = this.workflow.compile({ checkpointer: this.memory });

      // Run the workflow
      const finalState = await app.invoke(
        {
          messages: [newMessage],
          screenshot_state: "",
        },
        { configurable: { thread_id: this.threadId } }
      );

      logger.log("Final state: " + JSON.stringify(finalState));
      return finalState.messages[finalState.messages.length - 1].content;
    } catch (err) {
      logger.error("Error running the workflow: " + err);
      throw err;
    }
  }
}

From what I've gathered and things I tried the issue seem to be storing messages in state, I'm not sure what is causing the issue honestly but it seems to be I'm having some sort of circular reference in the state.

For some more context I found this is the method finding the circular reference

const _serialize = (value, seen = new WeakSet()) => {
    const defaultValue = _default("", value);
    if (defaultValue === null) {
        return "null";
    }
    else if (typeof defaultValue === "string") {
        return JSON.stringify(defaultValue);
    }
    else if (typeof defaultValue === "number" ||
        typeof defaultValue === "boolean") {
        return defaultValue.toString();
    }
    else if (typeof defaultValue === "object") {
        if (seen.has(defaultValue)) {
            throw new TypeError("Circular reference detected");
        }
        seen.add(defaultValue);
        if (Array.isArray(defaultValue)) {
            const result = `[${defaultValue
                .map((item) => _serialize(item, seen))
                .join(",")}]`;
            seen.delete(defaultValue);
            return result;
        }
        else if (isLangChainSerializable(defaultValue)) {
            return JSON.stringify(defaultValue);
        }
        else {
            const entries = Object.entries(defaultValue).map(([k, v]) => `${JSON.stringify(k)}:${_serialize(v, seen)}`);
            const result = `{${entries.join(",")}}`;
            seen.delete(defaultValue);
            return result;
        }
    }
    // Only be reached for functions or symbols
    return JSON.stringify(defaultValue);
};

Which comes from

@langchain/langgraph-checkpoint/dist/serde/jsonplus.js:27

I'm at a loss right now, this might be a bug or I might be doing something wrong. Any help is much apreciated. Thanks!

Upvotes: 0

Views: 66

Answers (0)

Related Questions