DEV Community

parmarjatin4911@gmail.com
parmarjatin4911@gmail.com

Posted on

MemGPT + AutoGen + Open Source LLM

**

MemGPT + AutoGen + Open Source LLM**

2 Agents

import autogen
from memgpt.autogen.memgpt_agent import create_memgpt_autogen_agent_from_config

config_list = [
{
"model" : "default",
"api_base" : "http://localhost:5001/v1",
"api_key" : "NULL",
}
]

llm_config = {"config_list" : config_list}

user_proxy = autogen.UserProxyAgent(
name="user_proxy",
code_execution_config={
"work_dir" : "coding",
},
default_auto_reply="..."
)

coder = create_memgpt_autogen_agent_from_config(
name="MemGPTcoder",
llm_config=llm_config,
system_message="You are a python developer"
)

user_proxy.initiate_chat(
coder,
message="create a python function to find the first 5 even numbers"
)

Installing Text generation web ui

git clone https://github.com/oobabooga/text-generation-webui
cd text-generation-webui
bash start_macos.sh --api --listen --extensions openai

export OPENAI_API_BASE="http://localhost:5000"
export BACKEND_TYPE="webui"

Run the code

python app.py

3 Agents

"""Example of how to add MemGPT into an AutoGen groupchat

Begin by doing:
pip install "pyautogen[teachable]"
pip install pymemgpt
or
pip install -e . (inside the MemGPT home directory)
"""
import autogen
from memgpt.autogen.memgpt_agent import create_memgpt_autogen_agent_from_config

import random
random_seed = random.randint(0, 1000)
config_list = [
{
"model": "gpt-4",
"api_base": "http://localhost:8000",
"api_key": "NULL",
"seed": random_seed,
}
]

llm_config = {"config_list": config_list, "seed": random_seed}

The user agent

user_proxy = autogen.UserProxyAgent(
name="User_proxy",
system_message="I am Mervin Praison, a human admin.",
code_execution_config={"work_dir": "groupchat"},
default_auto_reply="...", # Set a default auto-reply message here (non-empty auto-reply is required for LM Studio)
)

The agent playing the role of the product manager (PM)

pm = autogen.AssistantAgent(
name="Product_manager",
system_message="Creative in software product ideas.",
llm_config=llm_config,
default_auto_reply="...", # Set a default auto-reply message here (non-empty auto-reply is required for LM Studio)
)

coder = create_memgpt_autogen_agent_from_config(
"MemGPT_coder",
llm_config=llm_config,
system_message=f"You are participating in a group chat with a user ({user_proxy.name}) "
f"and a product manager ({pm.name}).",
)

groupchat = autogen.GroupChat(agents=[user_proxy, pm, coder], messages=[], max_round=12)
manager = autogen.GroupChatManager(groupchat=groupchat, llm_config=llm_config)

user_proxy.initiate_chat(
manager,
message="Create python code that prints the first 10 numbers of the fibonacci sequence and save the file",
)

Required: LiteLLM in port number 8000 and Web Gen UI at port 5000

export OPENAI_API_BASE="http://127.0.0.1:5000"
export BACKEND_TYPE="webui"

LiteLLM

litellm --model ollama/mistral --add_function_to_prompt --drop_params --request_timeout 1000 --debug --max_tokens 5000

Using LM Studio

export OPENAI_API_BASE="http://127.0.0.1:1234"
export BACKEND_TYPE="lmstudio"

import autogen
from memgpt.autogen.memgpt_agent import create_memgpt_autogen_agent_from_config

import random
random_seed = random.randint(0, 1000)

config_list = [
{
"model": "gpt-4",
"api_base": "http://localhost:1234/v1",
}
]

llm_config = {"config_list": config_list, "seed": random_seed}

pm = autogen.AssistantAgent(
name="Product_manager",
system_message="Creative in software product ideas.",
llm_config=llm_config,
default_auto_reply="...",
)

user_proxy = autogen.UserProxyAgent(
name="User_proxy",
system_message="I am Mervin Praison, a human admin.",
code_execution_config={"work_dir": "coding"},
default_auto_reply="...",

)

coder = create_memgpt_autogen_agent_from_config(
"MemGPT_coder",
llm_config=llm_config,
system_message=f"You are participating in a group chat with a user ({user_proxy.name}) "
f"and a product manager ({pm.name}).",
)

groupchat = autogen.GroupChat(agents=[user_proxy, pm, coder], messages=[], max_round=12)
manager = autogen.GroupChatManager(groupchat=groupchat, llm_config=llm_config)

user_proxy.initiate_chat(
manager,
message="Create python code that prints the first 10 numbers of the fibonacci sequence and save the file",
)

Top comments (0)