Not able to create asynchronous chromadb

I'd like to create a status_checker api endpoint in fastapi to track the creation of chromadb embeddings. Also I'd like to create these embeddings in async mode. Below is the code, but it is giving error.

Version: llama-index 0.10.12

Code We tried

import os
import asyncio
from fastapi import FastAPI, File, UploadFile
from fastapi.responses import JSONResponse
import chromadb
from llama_index.vector_stores.chroma import ChromaVectorStore
from llama_index.core import VectorStoreIndex, StorageContext, SimpleDirectoryReader
from llama_index.core.retrievers import RecursiveRetriever
from llama_index.core.query_engine import RetrieverQueryEngine
app = FastAPI()
@app.post("/upload")
async def upload_file(file: UploadFile = File(...)):
    try:
        #Ensure 'docs' directory exists
        if not os.path.exists("docs"):
           os.makedirs("docs")
    
        # Write the file to server with its original filename
        file_path = os.path.join("docs", file.filename)
        with open(file_path, "wb") as f:
           f.write(await file.read())
        from rag_define import define_rag
        asyncio.create_task(define_rag())
        return JSONResponse(content={"message": "File uploaded successfully"})

    except Exception as e:
       return JSONResponse(content={"error": str(e)}, status_code=500)
@app.post("/status")
async def status_checker():
   return global_variable.upload_in_progress

async def define_rag():
    documents = SimpleDirectoryReader(input_dir="./docs",required_exts = [".docx",".doc",".pdf",".txt"]).load_data()
    if os.path.exists('./chroma_db'):
        
        print("\*\*\*\*\*\*\*\*\*\*\*\*\*\*\*\*\*\*\*\*\*\*\*\*\*utilizing pre generated embeddings from chromadb folder")
        chroma_client = chromadb.PersistentClient(path="./chroma_db")
        chroma_collection = chroma_client.get_or_create_collection("quickstart")
        vector_store = ChromaVectorStore(chroma_collection=chroma_collection)
        vector_index_chunk = VectorStoreIndex.from_vector_store(vector_store, embed_model=global_variable.embed_model,use_async=True, show_progress=True)
    else:
        chroma_client = chromadb.PersistentClient(path="./chroma_db")
        chroma_collection = chroma_client.get_or_create_collection("quickstart")
        vector_store = ChromaVectorStore(chroma_collection=chroma_collection)
        storage_context = StorageContext.from_defaults(vector_store=vector_store)
        #index = VectorStoreIndex.from_documents(documents,storage_context=storage_context)
        vector_index_chunk = await VectorStoreIndex(all_nodes,       embed_model=global_variable.embed_model,storage_context=storage_context,use_async=True, show_progress=True)
    vector_retriever_chunk = vector_index_chunk.as_retriever(similarity_top_k=5)
    global_variable.retriever_chunk = RecursiveRetriever(
        "vector",
        retriever_dict={"vector": vector_retriever_chunk},
        node_dict=all_nodes_dict,
        verbose=True,
        )
    print("Vector store creation done")
    global_variable.upload_in_progress = 1
    global_variable.query_engine_chunk = RetrieverQueryEngine.from_args(
        global_variable.retriever_chunk,
        llm=global_variable.llm, 
        text_qa_template=global_variable.text_qa_template
        )

Error we Got

vector_index_chunk = await VectorStoreIndex(all_nodes, embed_model=global_variable.embed_model, storage_context=storage_context, use_async=True, show_progress=True)
                               ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/home/lib/python3.11/site-packages/llama_index/core/indices/vector_store/base.py", line 74, in __init__
    super().__init__(
  File "/home/lib/python3.11/site-packages/llama_index/core/indices/base.py", line 91, in __init__
    index_struct = self.build_index_from_nodes(
                   ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/home/lib/python3.11/site-packages/llama_index/core/indices/vector_store/base.py", line 307, in build_index_from_nodes
    return self._build_index_from_nodes(nodes, **insert_kwargs)
           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/home/lib/python3.11/site-packages/llama_index/core/indices/vector_store/base.py", line 277, in _build_index_from_nodes
    run_async_tasks(tasks)
  File "/homelib/python3.11/site-packages/llama_index/core/async_utils.py", line 49, in run_async_tasks
    outputs: List[Any] = asyncio.run(_gather())
                         ^^^^^^^^^^^^^^^^^^^^^^
  File "/homelib/python3.11/asyncio/runners.py", line 186, in run
    raise RuntimeError(
RuntimeError: asyncio.run() cannot be called from a running event loop
/home/miniconda3/envs/lib/python3.11/site-packages/uvicorn/protocols/http/httptools_impl.py:-1: RuntimeWarning: coroutine 'run_async_tasks.<locals>._gather' was never awaited
RuntimeWarning: Enable tracemalloc to get the object allocation traceback
/home/lib/python3.11/site-packages/uvicorn/protocols/http/httptools_impl.py:-1: RuntimeWarning: coroutine 'VectorStoreIndex._async_add_nodes_to_index' was never awaited
RuntimeWarning: Enable tracemalloc to get the object allocation traceback

What we expect

This Step should be executed successfully:

asyncio.create_task(define_rag())

async def define_rag():
    documents = SimpleDirectoryReader(input_dir="./docs",required_exts = [".docx",".doc",".pdf",".txt"]).load_data()

This status checker should check and return the statement while executing create_task.

@app.post("/status") 
async def status_checker():     
    return global_variable.upload_in_progress

Upvotes: 0

Views: 338

Answers (0)

Related Questions