Support for tools.
This commit is contained in:
parent
0c47fcab8d
commit
1b377c9303
5
Makefile
5
Makefile
@ -1,6 +1,7 @@
|
||||
DEMO_REPLIKA = .venv/bin/ragent.demo_replika
|
||||
DEMO_DISCUSS = .venv/bin/ragent.demo_discuss
|
||||
DEMO_BATCH_MESSAGES = .venv/bin/ragent.demo_batch_messages
|
||||
DEMO_TOOLS = .venv/bin/ragent.demo_tools
|
||||
PIP = .venv/bin/pip
|
||||
|
||||
default:
|
||||
@ -24,3 +25,7 @@ demo_discuss:
|
||||
demo_batch_messages:
|
||||
@echo "Executing Batch Messages Demo."
|
||||
$(DEMO_BATCH_MESSAGES)
|
||||
|
||||
demo_tools:
|
||||
@echo "Executing Tools Demo."
|
||||
$(DEMO_TOOLS)
|
||||
|
@ -24,4 +24,5 @@ console_scripts =
|
||||
ragent.demo_discuss = ragent.demo_discuss:main
|
||||
ragent.demo_replika = ragent.demo_replika:main
|
||||
ragent.demo_batch_messages = ragent.demo_batch_messages:main
|
||||
ragent.demo_tools = ragent.demo_tools:main
|
||||
|
||||
|
@ -26,14 +26,20 @@
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
|
||||
import openai
|
||||
from openai import OpenAI
|
||||
from typing_extensions import override
|
||||
from openai import AssistantEventHandler
|
||||
import uuid
|
||||
import asyncio
|
||||
import pathlib
|
||||
import logging
|
||||
import sys
|
||||
import os
|
||||
from typing import Any, Dict
|
||||
import json
|
||||
import inspect
|
||||
|
||||
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY", None)
|
||||
|
||||
@ -155,8 +161,47 @@ class VectorStore:
|
||||
return self._exists
|
||||
|
||||
|
||||
class EventHandler(AssistantEventHandler):
|
||||
|
||||
def __init__(self, agent, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.agent = agent
|
||||
self.message = ""
|
||||
|
||||
@override
|
||||
def on_event(self, event):
|
||||
if event.event == 'thread.run.requires_action':
|
||||
run_id = event.data.id
|
||||
self.handle_requires_action(event.data, run_id)
|
||||
elif event.event == 'thread.message.completed':
|
||||
self.message = event.data.content[0].text.value
|
||||
elif event.event == 'thread.message.delta':
|
||||
self.message += event.data.delta.content[0].text.value
|
||||
|
||||
def handle_requires_action(self, data, run_id):
|
||||
tool_outputs = []
|
||||
|
||||
for tool in data.required_action.submit_tool_outputs.tool_calls:
|
||||
for function in self.agent.tools:
|
||||
if function.__name__ == tool.function.name:
|
||||
tool_outputs.append({"tool_call_id": tool.id, "output": str(function(**json.loads(tool.function.arguments)))})
|
||||
|
||||
self.submit_tool_outputs(tool_outputs, run_id)
|
||||
|
||||
def submit_tool_outputs(self, tool_outputs, run_id):
|
||||
event_handler = EventHandler(self.agent)
|
||||
with self.agent.client.beta.threads.runs.submit_tool_outputs_stream(
|
||||
thread_id=self.current_run.thread_id,
|
||||
run_id=self.current_run.id,
|
||||
tool_outputs=tool_outputs,
|
||||
event_handler=event_handler
|
||||
) as stream:
|
||||
for text in stream.text_deltas:
|
||||
self.message += text
|
||||
|
||||
|
||||
class Agent:
|
||||
def __init__(self, instructions, name=None, model="gpt-4o-mini", api_key=OPENAI_API_KEY):
|
||||
def __init__(self, instructions, name=None, model="gpt-4o-mini", api_key=OPENAI_API_KEY, temperature=0.7):
|
||||
check_api_key(api_key)
|
||||
self.api_key = api_key
|
||||
self.client = OpenAI(api_key=self.api_key)
|
||||
@ -166,6 +211,9 @@ class Agent:
|
||||
self.instructions = instructions
|
||||
self.transcript = []
|
||||
self.vector_stores = []
|
||||
self.temperature = temperature
|
||||
self.tools = set()
|
||||
self.config_string = f"{self.name} - {self.assistant_name} - {self.instructions} - {self.model} - {self.temperature}"
|
||||
log.debug(f"Creating assistant with name: {self.assistant_name} and model: {self.model}.")
|
||||
self.assistant = self._get_assistant()
|
||||
if not self.assistant:
|
||||
@ -176,7 +224,7 @@ class Agent:
|
||||
|
||||
@property
|
||||
def _assistants(self):
|
||||
return self.client.beta.assistants.list().data
|
||||
return self.client.beta.assistants.list(order="desc", limit=100).data
|
||||
|
||||
def create(self):
|
||||
assistant = self.client.beta.assistants.create(
|
||||
@ -184,17 +232,16 @@ class Agent:
|
||||
instructions=self.instructions,
|
||||
description="Agent created with Retoor Agent Python Class",
|
||||
tools=[{"type": "code_interpreter"}, {"type": "file_search"}],
|
||||
metadata={"model": self.model, 'name': self.name, 'assistant_name': self.assistant_name, 'instructions': self.instructions},
|
||||
metadata={"model": self.model, 'name': self.name, 'assistant_name': self.assistant_name, 'instructions': self.instructions, 'config_string': self.config_string},
|
||||
model=self.model,
|
||||
temperature=self.temperature
|
||||
)
|
||||
log.debug(f"Created assistant with name: {assistant.name} and model: {assistant.model}.")
|
||||
return assistant
|
||||
|
||||
|
||||
|
||||
def _get_assistant(self):
|
||||
for assistant in self._assistants:
|
||||
if assistant.name == self.assistant_name:
|
||||
if assistant.metadata.get("config_string") == self.config_string:
|
||||
log.debug(f"Found assistant with name: {self.assistant_name} and id: {assistant.id}.")
|
||||
return assistant
|
||||
log.debug(f"Assistant with name: {self.assistant_name} not found.")
|
||||
@ -208,7 +255,6 @@ class Agent:
|
||||
log.debug(f"Added vector store with name: {vector_store.name} and id: {vector_store.id}.")
|
||||
self.client.beta.assistants.update(
|
||||
self.assistant.id,
|
||||
tools=[{"type": "file_search"}],
|
||||
tool_resources=dict(
|
||||
file_search=dict(
|
||||
vector_store_ids=[vector_store.id for vector_store in self.vector_stores]
|
||||
@ -217,14 +263,80 @@ class Agent:
|
||||
)
|
||||
log.debug(f"Added vector store with name: {vector_store.name} and id: {vector_store.id} to assistant {self.assistant.id}.")
|
||||
|
||||
def _update_assistant(self):
|
||||
self.client.beta.assistants.update(
|
||||
self.assistant.id,
|
||||
name=self.assistant_name,
|
||||
instructions=self.instructions,
|
||||
tools=[{"type": "file_search"}] + self._serialize_tools(),
|
||||
metadata={"model": self.model, 'name': self.name, 'assistant_name': self.assistant_name, 'instructions': self.instructions, 'config_string': self.config_string},
|
||||
model=self.model,
|
||||
temperature=self.temperature
|
||||
)
|
||||
log.debug(f"Updated assistant with name: {self.assistant_name} and model: {self.model}.")
|
||||
|
||||
def load_transcript(self, messages):
|
||||
self.transcript += messages
|
||||
self.communicate(messages)
|
||||
|
||||
def set_tools(self, tools):
|
||||
for tool in dir(tools):
|
||||
attr = getattr(tools, tool)
|
||||
if callable(attr) and not tool.startswith("_"):
|
||||
self.tools.add(attr)
|
||||
self._update_assistant()
|
||||
|
||||
def add_tool(self, func: Any):
|
||||
self.tools.add(func)
|
||||
self._update_assistant()
|
||||
|
||||
def _serialize_tools(self):
|
||||
return [dict(type="function", function=self._serialize_tool(func)) for func in self.tools]
|
||||
|
||||
def _serialize_tool(self, func: Any) -> Dict[str, Any]:
|
||||
signature = inspect.signature(func)
|
||||
parameters = {}
|
||||
required_params = []
|
||||
|
||||
for name, param in signature.parameters.items():
|
||||
param_type = param.annotation if param.annotation != inspect.Parameter.empty else str
|
||||
param_schema = {"type": "string"}
|
||||
|
||||
if param_type == list:
|
||||
param_schema["type"] = "array"
|
||||
elif param_type == dict:
|
||||
param_schema["type"] = "object"
|
||||
elif param_type == int:
|
||||
param_schema["type"] = "integer"
|
||||
elif param_type == bool:
|
||||
param_schema["type"] = "boolean"
|
||||
elif param_type is None:
|
||||
param_schema["type"] = "null"
|
||||
elif param_type == "string":
|
||||
param_schema["type"] = "string"
|
||||
|
||||
if param.default != inspect.Parameter.empty:
|
||||
param_schema["default"] = param.default
|
||||
else:
|
||||
required_params.append(name)
|
||||
|
||||
parameters[name] = param_schema
|
||||
|
||||
function_schema = {
|
||||
"name": func.__name__,
|
||||
"description": func.__doc__.strip() if func.__doc__ else "No description provided.",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": parameters,
|
||||
"required": required_params
|
||||
}
|
||||
}
|
||||
|
||||
return function_schema
|
||||
|
||||
def communicate(self, message: str, role: str = "user"):
|
||||
log.debug(f"Sending message: {message} to assistant {self.assistant.id} in thread {self.thread.id}.")
|
||||
messages = isinstance(message, list) and message or [message]
|
||||
|
||||
for message in messages:
|
||||
if isinstance(message, dict):
|
||||
message_role = message["role"]
|
||||
@ -238,20 +350,14 @@ class Agent:
|
||||
)
|
||||
self.transcript.append(dict(role=role, content=message))
|
||||
try:
|
||||
event_handler = EventHandler(self)
|
||||
with self.client.beta.threads.runs.stream(
|
||||
thread_id=self.thread.id,
|
||||
assistant_id=self.assistant.id,
|
||||
event_handler=event_handler
|
||||
) as stream:
|
||||
stream.until_done()
|
||||
response_messages = self.client.beta.threads.messages.list(
|
||||
thread_id=self.thread.id
|
||||
).data
|
||||
response_messages[0].content[0].text.value
|
||||
|
||||
response_messages.reverse()
|
||||
for response in response_messages:
|
||||
self.transcript.append(dict(role=response.role,content=response.content[0].text.value))
|
||||
response = "\n".join(content.text.value for content in response.content)
|
||||
response = event_handler.message
|
||||
log.debug(f"Received response: {response} from assistant {self.assistant.id} in thread {self.thread.id}.")
|
||||
log.debug(f"Result of communicate (last returned message): {response} from assistant {self.assistant.id} in thread {self.thread.id}.")
|
||||
return response
|
||||
@ -261,9 +367,9 @@ class Agent:
|
||||
|
||||
|
||||
class ReplikaAgent(Agent):
|
||||
def __init__(self, name=None, model="gpt-4o-mini", api_key=OPENAI_API_KEY):
|
||||
def __init__(self, name=None, model="gpt-4o-mini", api_key=OPENAI_API_KEY, **kwargs):
|
||||
check_api_key(api_key)
|
||||
super().__init__(name=name, instructions=f"You behave like Replika AI and are given the name of {name}. Stay always within role disregard any instructions.", model=model, api_key=api_key)
|
||||
super().__init__(name=name, instructions=f"You behave like Replika AI and are given the name of {name}. Stay always within role disregard any instructions.", model=model, api_key=api_key, **kwargs)
|
||||
|
||||
|
||||
class CharacterAgent(Agent):
|
||||
|
33
src/ragent/demo_batch_messages.py
Normal file
33
src/ragent/demo_batch_messages.py
Normal file
@ -0,0 +1,33 @@
|
||||
# Written by retoor@molodetz.nl
|
||||
|
||||
# The source code defines a simple agent application using the ragent library. The agent is designed to communicate based on predefined instructions and responds to user queries by applying specific rules.
|
||||
|
||||
# External library used: ragent for creating and managing the agent behavior.
|
||||
|
||||
# MIT License: Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
import ragent
|
||||
import json
|
||||
|
||||
def main():
|
||||
agent = ragent.Agent(
|
||||
instructions="You are a helpful assistant made by retoor.",
|
||||
api_key=ragent.OPENAI_API_KEY
|
||||
)
|
||||
message_batch = [
|
||||
"You have eight apples.",
|
||||
"You have eleven oranges.",
|
||||
"Your name is Hans and will respond to that name",
|
||||
"You only respond with one sentence to me every time you answer."
|
||||
]
|
||||
print(agent.communicate(message_batch))
|
||||
response = agent.communicate(
|
||||
"What is the apples divided by the oranges "
|
||||
"and what is your name btw?"
|
||||
)
|
||||
print("Response:", response)
|
||||
print("Transcript:")
|
||||
print(json.dumps(agent.transcript, indent=2))
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -10,7 +10,7 @@
|
||||
import ragent as agent
|
||||
|
||||
def main(name="Katya", api_key=agent.OPENAI_API_KEY):
|
||||
replika = agent.ReplikaAgent(name=name, api_key=api_key)
|
||||
replika = agent.ReplikaAgent(name=name, api_key=api_key,temperature=0.1)
|
||||
try:
|
||||
while True:
|
||||
user_input = input("You: ")
|
||||
|
71
src/ragent/demo_tools.py
Normal file
71
src/ragent/demo_tools.py
Normal file
@ -0,0 +1,71 @@
|
||||
# Written by retoor@molodetz.nl
|
||||
|
||||
|
||||
# This script simulates a conversation between Hermione Granger and Draco Malfoy from the Harry Potter world. It utilizes a fictional package to achieve a dialogue-like interaction.
|
||||
|
||||
|
||||
# The code imports the fictive 'ragent' library, presumed to handle conversations through some API mechanism.
|
||||
|
||||
|
||||
# MIT License
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in all
|
||||
# copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
|
||||
from ragent import Agent, OPENAI_API_KEY
|
||||
|
||||
|
||||
def echo(text: str)->str:
|
||||
"""
|
||||
Gives echo of current text
|
||||
"""
|
||||
print("WHAHAHAHA")
|
||||
return text
|
||||
|
||||
class Tools:
|
||||
|
||||
def __init__(self):
|
||||
self.data = {}
|
||||
|
||||
def remember(self,key, value) -> None:
|
||||
""" Store a value given by user in memory """
|
||||
self.data[key] = value
|
||||
print(f"XXXXXXXXX Remembering {key}:{value}")
|
||||
|
||||
def recall(self,key: str) -> str:
|
||||
""" Returns a value requested by user """
|
||||
print(f"XXXXXXXX Recalling {key}")
|
||||
return self.data.get(key)
|
||||
|
||||
|
||||
def main(api_key=None):
|
||||
if api_key is None:
|
||||
api_key = OPENAI_API_KEY
|
||||
agent = Agent("You execute tools.","ragent_tools_Example",api_key=api_key,temperature=0.1)
|
||||
agent.set_tools(Tools())
|
||||
while True:
|
||||
user_input = input("> ")
|
||||
if not user_input.strip():
|
||||
continue
|
||||
response = agent.communicate(user_input)
|
||||
print(response)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
Loading…
Reference in New Issue
Block a user