skeitel
skeitel

Reputation: 271

How to add question and GPT Api response variables to Gradio chatbot()?

I have a program that extracts the information form a previously-created and working json file, and then uses chatGPT API to query this document. I need to input the initial user input and then the response to his input all within the bot scrolling up normally, and I need to use the gr.Chatbot option (gr.Interface is not an option). Unfortunately the grChatbot() docs are not detailed enough so I can't solve it. I have created the function question_answer_NEW and also tried with the "bot" function provided as example in Gradio docs, but no results. Thank you. Here's the code:

import gradio as gr
import random
import time
from llama_index import SimpleDirectoryReader, GPTListIndex, readers, GPTSimpleVectorIndex, LLMPredictor, PromptHelper
from langchain.chat_models import ChatOpenAI
from langchain import OpenAI
import sys
import os
from IPython.display import Markdown, display
import gradio, openai
os.environ["OPENAI_API_KEY"] = 'sk-...' #present in code
messages = [{"role": "system", "content": "You are a helpful customer service assistant."}]

global query
global question
global response

def question_answer_NEW(question):
    index = GPTSimpleVectorIndex.load_from_disk('index.json')
    for i in range(3): #while True: 
        query = question
        response = index.query(query, response_mode="compact")        
        messages.append({"User asks": question, "System response": response.response.strip()})
        #print(messages[-1])''
        print(query, response.response.strip())
        return query, response.response.strip()


with gr.Blocks() as demo:
    chatbot = gr.Chatbot()
    msg = gr.Textbox() #placeholder = 'test'
    clear = gr.Button("Clear")
    
    global user_message

    def user(user_message, history):
        print('I am user message', user_message)
        print(history + [[user_message, None]])
        return "", history + [[user_message, None]]
    


    def bot(history):
        index = GPTSimpleVectorIndex.load_from_disk('index.json')
        bot_message = index.query(user_message, response_mode="compact")
        print('I am bot message', bot_message)
        history[-1][1] = bot_message
        time.sleep(1)
        return history
        return bot_message

    msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(bot, chatbot, chatbot)
    clear.click(lambda: None, None, chatbot, queue=False)

if __name__ == "__main__":
    demo.launch()


Upvotes: 0

Views: 918

Answers (1)

skeitel
skeitel

Reputation: 271

I made it work. This is the solution:

from IPython.display import Markdown, display
from llama_index import SimpleDirectoryReader, GPTListIndex, readers, GPTSimpleVectorIndex, LLMPredictor, PromptHelper
from langchain.chat_models import ChatOpenAI
from langchain import OpenAI
import gradio as gr
import random, time
import sys
import os

os.environ["OPENAI_API_KEY"] = '' #insert your key there

def construct_index(directory_path):
    global index
    # set maximum input size
    max_input_size = 4096
    # set number of output tokens
    num_outputs = 500
    # set maximum chunk overlap
    max_chunk_overlap = 20
    # set chunk size limit
    chunk_size_limit = 600 

    llm_predictor = LLMPredictor(llm = ChatOpenAI(temperature=0.1, model_name='gpt-3.5-turbo', max_tokens=num_outputs)) #original temp was .5
    prompt_helper = PromptHelper(max_input_size, num_outputs, max_chunk_overlap, chunk_size_limit=chunk_size_limit)
 
    documents = SimpleDirectoryReader(directory_path).load_data()
    global index
    index = GPTSimpleVectorIndex(
        documents, llm_predictor=llm_predictor, prompt_helper=prompt_helper)
    index.save_to_disk('index.json')
    return index

construct_index("context_data/data/done")

#Define chat function
def chat(chat_history, user_input):
      index = GPTSimpleVectorIndex.load_from_disk('index.json')
      bot_response = index.query(user_input)
      response = ""
      for letter in ''.join(bot_response.response): #[bot_response[i:i+1] for i in range(0, len(bot_response), 1)]:
          response += letter + ""
          yield chat_history + [(user_input, response.strip())]
     
#Build interface
with gr.Blocks() as demo:
    with gr.Tab('Chat with this helpful AI assistant'):
          chatbot = gr.Chatbot()
          message = gr.Textbox (label = 'Write your message here and press "enter"')
          message.submit(chat, [chatbot, message], chatbot).then(lambda: None, None, message, queue=False)

demo.queue().launch(debug = True, share = True)
     

Upvotes: 1

Related Questions