Llama Index + Supabase + Gradio
import os
import gradio as gr
from llama_index import VectorStoreIndex, SimpleDirectoryReader
from llama_index.vector_stores import SupabaseVectorStore
Set up environment variable and data
os.environ['OPENAI_API_KEY'] = "[your_openai_api_key]"
documents = SimpleDirectoryReader('data').load_data()
index = VectorStoreIndex.from_documents(documents)
index.storage_context.persist()
DB_PASSWORD = os.environ['DB_PASSWORD']
DB_CONNECTION = f"postgresql://postgres:{DB_PASSWORD}@localhost:5431/db"
vector_store = SupabaseVectorStore(
postgres_connection_string=DB_CONNECTION,
collection_name='base_demo'
)
query_engine = index.as_query_engine()
def main(question):
response = query_engine.query(question)
return response
Gradio interface
ui = gr.Interface(
fn=main,
inputs=gr.inputs.Textbox(placeholder="Enter your question about the author..."),
outputs=gr.outputs.Textbox()
)
ui.launch()
Top comments (0)