DEV Community

artydev
artydev

Posted on

The smallest chatbot implementation using Langchain Ollama, and Python

Thanks to : Understand Ollama and LangChain Chat History in 10 minutes

from langchain_community.llms import Ollama
from langchain_core.messages import HumanMessage, AIMessage
from langchain_core.prompts import ChatPromptTemplate,  MessagesPlaceholder

llm = Ollama(model = "llama3")

chat_history = []

chat_history = [
    ("human","My name is John")
]

prompt_template = ChatPromptTemplate.from_messages([
    ("system","your name is HAL, greeet user and answer questions with simple responses"),
    MessagesPlaceholder(variable_name= "chat_history"),
    ("human","{input}")
])

chain = prompt_template | llm

def  main ():
    while True:
        question = input("You : ")
        if question == "done":
            return
        response = chain.invoke({"input" : question,  "chat_history": chat_history})
        chat_history.append(HumanMessage(content=question))
        chat_history.append(AIMessage(content=response))

        print("AI  : " + response)

if __name__ == "__main__":
    main()
Enter fullscreen mode Exit fullscreen mode

Example of discussion:

Image description

Top comments (0)