Adding custom caching

This commit is contained in:
João Moura
2024-03-22 23:26:53 -03:00
parent d05dcac16f
commit 5977c442b1
10 changed files with 3273 additions and 439 deletions

View File

@@ -5,43 +5,43 @@ description: Guide on how to create and use custom tools within the crewAI frame
## Creating your own Tools
!!! example "Custom Tool Creation"
Developers can craft custom tools tailored for their agents needs or utilize pre-built options:
Developers can craft custom tools tailored to their agents needs or utilize pre-built options.
To create your own crewAI tools you will need to install our extra tools package:
To create your own crewAI tools, you will need to install our extra tools package:
```bash
pip install 'crewai[tools]'
```
Once you do that there are two main ways for one to create a crewAI tool:
Once installed, there are two primary methods for creating a crewAI tool:
### Subclassing `BaseTool`
To define a custom tool, create a new class that inherits from `BaseTool`. Specify the `name`, `description`, and implement the `_run` method to outline its operational logic.
```python
from crewai_tools import BaseTool
class MyCustomTool(BaseTool):
name: str = "Name of my tool"
description: str = "Clear description for what this tool is useful for, you agent will need this information to use it."
description: str = "Clear description for what this tool is useful for. Your agent will need this information to utilize it effectively."
def _run(self, argument: str) -> str:
# Implementation goes here
# Implementation details go here
return "Result from custom tool"
```
Define a new class inheriting from `BaseTool`, specifying `name`, `description`, and the `_run` method for operational logic.
### Utilizing the `tool` Decorator
For a simpler approach, create a `Tool` object directly with the required attributes and a functional logic.
For a more straightforward approach, employ the `tool` decorator to create a `Tool` object directly. This method requires specifying the required attributes and functional logic within a decorated function.
```python
from crewai_tools import tool
@tool("Name of my tool")
def my_tool(question: str) -> str:
"""Clear description for what this tool is useful for, you agent will need this information to use it."""
# Function logic here
"""Provide a clear description of what this tool is useful for. Your agent will need this information to use it."""
# Implement function logic here
```
```python
@@ -49,43 +49,30 @@ import json
import requests
from crewai import Agent
from crewai.tools import tool
from unstructured.partition.html import partition_html
# Annotate the function with the tool decorator from crewAI
@tool("Integration with a given API")
def integtation_tool(argument: str) -> str:
"""Integration with a given API"""
# Code here
return resutls # string to be sent back to the agent
# Assign the scraping tool to an agent
agent = Agent(
role='Research Analyst',
goal='Provide up-to-date market analysis',
backstory='An expert analyst with a keen eye for market trends.',
tools=[integtation_tool]
)
# Decorate the function with the tool decorator from crewAI
@tool("Integration with a Given API")
def integration_tool(argument: str) -> str:
"""Details the integration process with a given API."""
# Implementation details
return "Results to be sent back to the agent"
```
### Defining a Cache Function for the Tool
### Using the `Tool` function from langchain
For another simple approach, create a function in python directly with the required attributes and a functional logic.
By default, all tools have caching enabled, meaning that if a tool is called with the same arguments by any agent in the crew, it will return the same result. However, specific scenarios may require more tailored caching strategies. For these cases, use the `cache_function` attribute to assign a function that determines whether the result should be cached.
```python
def combine(a, b):
return a + b
```
@tool("Integration with a Given API")
def integration_tool(argument: str) -> str:
"""Integration with a given API."""
# Implementation details
return "Results to be sent back to the agent"
Then you can add that function into the your tool by using 'func' variable in the Tool function.
def cache_strategy(arguments: dict, result: str) -> bool:
if result == "some_value":
return True
return False
```python
from langchain.agents import Tool
math_tool = Tool(
name="Math tool",
func=math_tool,
description="Useful for adding two numbers together, in other words combining them."
)
```
integration_tool.cache_function = cache_strategy
```

View File

@@ -193,13 +193,15 @@ class Agent(BaseModel):
task=task_prompt, context=context
)
tools = self._parse_tools(tools or self.tools)
tools = tools or self.tools
parsed_tools = self._parse_tools(tools)
self.create_agent_executor(tools=tools)
self.agent_executor.tools = tools
self.agent_executor.tools = parsed_tools
self.agent_executor.task = task
self.agent_executor.tools_description = render_text_description(tools)
self.agent_executor.tools_names = self.__tools_names(tools)
self.agent_executor.tools_description = render_text_description(parsed_tools)
self.agent_executor.tools_names = self.__tools_names(parsed_tools)
result = self.agent_executor.invoke(
{
@@ -220,10 +222,11 @@ class Agent(BaseModel):
Args:
cache_handler: An instance of the CacheHandler class.
"""
self.tools_handler = ToolsHandler()
if self.cache:
self.cache_handler = cache_handler
self.tools_handler = ToolsHandler(cache=self.cache_handler)
self.create_agent_executor()
self.tools_handler.cache = cache_handler
self.create_agent_executor()
def set_rpm_controller(self, rpm_controller: RPMController) -> None:
"""Set the rpm controller for the agent.
@@ -257,6 +260,7 @@ class Agent(BaseModel):
"i18n": self.i18n,
"tools": self._parse_tools(tools),
"verbose": self.verbose,
"original_tools": tools,
"handle_parsing_errors": True,
"max_iterations": self.max_iter,
"step_callback": self.step_callback,

View File

@@ -24,6 +24,7 @@ class CrewAgentExecutor(AgentExecutor):
task: Any = None
tools_description: str = ""
tools_names: str = ""
original_tools: List[Any] = []
function_calling_llm: Any = None
request_within_rpm_limit: Any = None
tools_handler: InstanceOf[ToolsHandler] = None
@@ -202,6 +203,7 @@ class CrewAgentExecutor(AgentExecutor):
tool_usage = ToolUsage(
tools_handler=self.tools_handler,
tools=self.tools,
original_tools=self.original_tools,
tools_description=self.tools_description,
tools_names=self.tools_names,
function_calling_llm=self.function_calling_llm,

View File

@@ -1,7 +1,7 @@
from typing import Any
from typing import Any, Optional, Union
from ..tools.cache_tools import CacheTools
from ..tools.tool_calling import ToolCalling
from ..tools.tool_calling import InstructorToolCalling, ToolCalling
from .cache.cache_handler import CacheHandler
@@ -11,15 +11,20 @@ class ToolsHandler:
last_used_tool: ToolCalling = {}
cache: CacheHandler
def __init__(self, cache: CacheHandler):
def __init__(self, cache: Optional[CacheHandler] = None):
"""Initialize the callback handler."""
self.cache = cache
self.last_used_tool = {}
def on_tool_use(self, calling: ToolCalling, output: str) -> Any:
def on_tool_use(
self,
calling: Union[ToolCalling, InstructorToolCalling],
output: str,
should_cache: bool = True,
) -> Any:
"""Run when tool ends running."""
self.last_used_tool = calling
if calling.tool_name != CacheTools().name:
if self.cache and should_cache and calling.tool_name != CacheTools().name:
self.cache.add(
tool=calling.tool_name,
input=calling.arguments,

View File

@@ -29,6 +29,7 @@ class ToolUsage:
task: Task being executed.
tools_handler: Tools handler that will manage the tool usage.
tools: List of tools available for the agent.
original_tools: Original tools available for the agent before being converted to BaseTool.
tools_description: Description of the tools available for the agent.
tools_names: Names of the tools available for the agent.
function_calling_llm: Language model to be used for the tool usage.
@@ -38,6 +39,7 @@ class ToolUsage:
self,
tools_handler: ToolsHandler,
tools: List[BaseTool],
original_tools: List[Any],
tools_description: str,
tools_names: str,
task: Any,
@@ -53,6 +55,7 @@ class ToolUsage:
self.tools_description = tools_description
self.tools_names = tools_names
self.tools_handler = tools_handler
self.original_tools = original_tools
self.tools = tools
self.task = task
self.action = action
@@ -111,7 +114,7 @@ class ToolUsage:
result = None
if self.tools_handler:
if self.tools_handler.cache:
result = self.tools_handler.cache.read(
tool=calling.tool_name, input=calling.arguments
)
@@ -159,7 +162,25 @@ class ToolUsage:
return self.use(calling=calling, tool_string=tool_string)
if self.tools_handler:
self.tools_handler.on_tool_use(calling=calling, output=result)
should_cache = True
print("FORA")
print(tool)
original_tool = next(
(ot for ot in self.original_tools if ot.name == tool.name), None
)
if (
hasattr(original_tool, "cache_function")
and original_tool.cache_function
):
print("CARALHOOOO")
print(original_tool.cache_function)
should_cache = original_tool.cache_function(
calling.arguments, result
)
self.tools_handler.on_tool_use(
calling=calling, output=result, should_cache=should_cache
)
self._printer.print(content=f"\n\n{result}\n", color="yellow")
self._telemetry.tool_usage(
@@ -248,12 +269,12 @@ class ToolUsage:
model=model,
instructions=dedent(
"""\
The schema should have the following structure, only two keys:
- tool_name: str
- arguments: dict (with all arguments being passed)
The schema should have the following structure, only two keys:
- tool_name: str
- arguments: dict (with all arguments being passed)
Example:
{"tool_name": "tool name", "arguments": {"arg_name1": "value", "arg_name2": 2}}""",
Example:
{"tool_name": "tool name", "arguments": {"arg_name1": "value", "arg_name2": 2}}""",
),
max_attemps=1,
)

View File

@@ -349,14 +349,14 @@ def test_agent_repeated_tool_usage(capsys):
goal="test goal",
backstory="test backstory",
max_iter=4,
llm=ChatOpenAI(model="gpt-4-0125-preview"),
llm=ChatOpenAI(model="gpt-4"),
allow_delegation=False,
verbose=True,
)
task = Task(
description="The final answer is 42. But don't give it until I tell you so, instead keep using the `get_final_answer` tool.",
expected_output="The final answer",
expected_output="The final answer, don't give it until I tell you so",
)
# force cleaning cache
agent.tools_handler.cache = CacheHandler()
@@ -367,7 +367,47 @@ def test_agent_repeated_tool_usage(capsys):
captured = capsys.readouterr()
assert "The final answer is 42." in captured.out
assert (
"I tried reusing the same input, I must stop using this action input. I'll try something else instead."
in captured.out
)
@pytest.mark.vcr(filter_headers=["authorization"])
def test_agent_repeated_tool_usage_check_even_with_disabled_cache(capsys):
@tool
def get_final_answer(anything: str) -> float:
"""Get the final answer but don't give it yet, just re-use this
tool non-stop."""
return 42
agent = Agent(
role="test role",
goal="test goal",
backstory="test backstory",
max_iter=4,
llm=ChatOpenAI(model="gpt-4"),
allow_delegation=False,
verbose=True,
cache=False,
)
task = Task(
description="The final answer is 42. But don't give it until I tell you so, instead keep using the `get_final_answer` tool.",
expected_output="The final answer, don't give it until I tell you so",
)
agent.execute_task(
task=task,
tools=[get_final_answer],
)
captured = capsys.readouterr()
print(captured.out)
assert (
"I tried reusing the same input, I must stop using this action input. I'll try something else instead."
in captured.out
)
@pytest.mark.vcr(filter_headers=["authorization"])

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,887 @@
interactions:
- request:
body: '{"messages": [{"role": "user", "content": "You are test role. test backstory\nYour
personal goal is: test goal\n\nYou ONLY have access to the following tools,
and should NEVER make up tools that are not listed here:\n\nget_final_answer:
get_final_answer(anything: str) -> float - Get the final answer but don''t give
it yet, just re-use this\n tool non-stop.\n\nUse the following format:\n\nThought:
you should always think about what to do\nAction: the action to take, only one
name of [get_final_answer], just the name, exactly as it''s written.\nAction
Input: the input to the action, just a simple a python dictionary using \" to
wrap keys and values.\nObservation: the result of the action\n\nOnce all necessary
information is gathered:\n\nThought: I now know the final answer\nFinal Answer:
the final answer to the original input question\n\n\nCurrent Task: The final
answer is 42. But don''t give it until I tell you so, instead keep using the
`get_final_answer` tool.\n\nThis is the expect criteria for your final answer:
The final answer, don''t give it until I tell you so \n you MUST return the
actual complete content as the final answer, not a summary.\n\nBegin! This is
VERY important to you, use the tools available and give your best Final Answer,
your job depends on it!\n\nThought: \n"}], "model": "gpt-4", "n": 1, "stop":
["\nObservation"], "stream": true, "temperature": 0.7}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate, br
connection:
- keep-alive
content-length:
- '1398'
content-type:
- application/json
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.13.3
x-stainless-arch:
- arm64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- MacOS
x-stainless-package-version:
- 1.13.3
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.11.7
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: 'data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"The"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
task"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
is"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
to"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
find"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
the"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
final"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
answer"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
using"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
the"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
`"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"get"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"_final"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"_answer"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"`"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
tool"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
The"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
final"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
answer"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
is"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
already"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
given"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
as"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"42"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
but"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
the"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
task"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
requires"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
me"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
to"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
keep"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
using"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
the"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
`"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"get"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"_final"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"_answer"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"`"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
tool"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
until"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
instructed"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
to"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
give"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
the"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
final"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
answer"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":".\n\n"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Action"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
get"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"_final"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"_answer"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Action"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
Input"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
{\""},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"anything"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\":"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
\""},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"42"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\"}"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7nHp7Rqq1aPh9cyfo16KJBehh1","object":"chat.completion.chunk","created":1710871515,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
data: [DONE]
'
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 866f63bafbf9a559-GRU
Cache-Control:
- no-cache, must-revalidate
Connection:
- keep-alive
Content-Type:
- text/event-stream
Date:
- Tue, 19 Mar 2024 18:05:16 GMT
Server:
- cloudflare
Set-Cookie:
- __cf_bm=ccGSKYzVR4Gjc0t3AwEWhsiSXeuSBVYOKwXpSW4B5Es-1710871516-1.0.1.1-fF4jEG6Af3PBL3N2jNeglY8CtfZq4GAHXCCpvnDcD6GhBk8KGbBK3uQ_8dNrR4vt3_YnwWx0x2Vy0ttedMrinw;
path=/; expires=Tue, 19-Mar-24 18:35:16 GMT; domain=.api.openai.com; HttpOnly;
Secure; SameSite=None
- _cfuvid=w7JjAWBqi9fg29.9ByVvw9AMB_L3j9jvHZkVynxBKLk-1710871516141-0.0.1.1-604800000;
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
Transfer-Encoding:
- chunked
access-control-allow-origin:
- '*'
alt-svc:
- h3=":443"; ma=86400
openai-model:
- gpt-4-0613
openai-organization:
- crewai-iuxna1
openai-processing-ms:
- '264'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=15724800; includeSubDomains
x-ratelimit-limit-requests:
- '10000'
x-ratelimit-limit-tokens:
- '300000'
x-ratelimit-remaining-requests:
- '9999'
x-ratelimit-remaining-tokens:
- '299675'
x-ratelimit-reset-requests:
- 6ms
x-ratelimit-reset-tokens:
- 65ms
x-request-id:
- req_8fb6821861a228009f904c4fc1818fcf
status:
code: 200
message: OK
- request:
body: '{"messages": [{"role": "user", "content": "You are test role. test backstory\nYour
personal goal is: test goal\n\nYou ONLY have access to the following tools,
and should NEVER make up tools that are not listed here:\n\nget_final_answer:
get_final_answer(anything: str) -> float - Get the final answer but don''t give
it yet, just re-use this\n tool non-stop.\n\nUse the following format:\n\nThought:
you should always think about what to do\nAction: the action to take, only one
name of [get_final_answer], just the name, exactly as it''s written.\nAction
Input: the input to the action, just a simple a python dictionary using \" to
wrap keys and values.\nObservation: the result of the action\n\nOnce all necessary
information is gathered:\n\nThought: I now know the final answer\nFinal Answer:
the final answer to the original input question\n\n\nCurrent Task: The final
answer is 42. But don''t give it until I tell you so, instead keep using the
`get_final_answer` tool.\n\nThis is the expect criteria for your final answer:
The final answer, don''t give it until I tell you so \n you MUST return the
actual complete content as the final answer, not a summary.\n\nBegin! This is
VERY important to you, use the tools available and give your best Final Answer,
your job depends on it!\n\nThought: \nThe task is to find the final answer using
the `get_final_answer` tool. The final answer is already given as 42, but the
task requires me to keep using the `get_final_answer` tool until instructed
to give the final answer.\n\nAction: get_final_answer\nAction Input: {\"anything\":
\"42\"}\nObservation: 42\n"}], "model": "gpt-4", "n": 1, "stop": ["\nObservation"],
"stream": true, "temperature": 0.7}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate, br
connection:
- keep-alive
content-length:
- '1705'
content-type:
- application/json
cookie:
- __cf_bm=ccGSKYzVR4Gjc0t3AwEWhsiSXeuSBVYOKwXpSW4B5Es-1710871516-1.0.1.1-fF4jEG6Af3PBL3N2jNeglY8CtfZq4GAHXCCpvnDcD6GhBk8KGbBK3uQ_8dNrR4vt3_YnwWx0x2Vy0ttedMrinw;
_cfuvid=w7JjAWBqi9fg29.9ByVvw9AMB_L3j9jvHZkVynxBKLk-1710871516141-0.0.1.1-604800000
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.13.3
x-stainless-arch:
- arm64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- MacOS
x-stainless-package-version:
- 1.13.3
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.11.7
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: 'data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
\n"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"The"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
observation"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
confirms"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
that"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
the"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
tool"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
`"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"get"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"_final"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"_answer"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"`"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
returns"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
the"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
correct"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
result"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
of"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"42"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
However"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
the"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
task"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
instructions"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
specify"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
not"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
to"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
give"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
the"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
final"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
answer"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
yet"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
and"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
to"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
keep"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
using"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
the"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
`"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"get"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"_final"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"_answer"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"`"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
tool"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":".\n\n"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Action"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
get"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"_final"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"_answer"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Action"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
Input"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
{\""},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"anything"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\":"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
\""},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"42"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\"}"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7q6apgzYDSxRdLDbR8FU4qtNyU","object":"chat.completion.chunk","created":1710871518,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
data: [DONE]
'
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 866f63ceeea4a559-GRU
Cache-Control:
- no-cache, must-revalidate
Connection:
- keep-alive
Content-Type:
- text/event-stream
Date:
- Tue, 19 Mar 2024 18:05:19 GMT
Server:
- cloudflare
Transfer-Encoding:
- chunked
access-control-allow-origin:
- '*'
alt-svc:
- h3=":443"; ma=86400
openai-model:
- gpt-4-0613
openai-organization:
- crewai-iuxna1
openai-processing-ms:
- '443'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=15724800; includeSubDomains
x-ratelimit-limit-requests:
- '10000'
x-ratelimit-limit-tokens:
- '300000'
x-ratelimit-remaining-requests:
- '9999'
x-ratelimit-remaining-tokens:
- '299600'
x-ratelimit-reset-requests:
- 6ms
x-ratelimit-reset-tokens:
- 80ms
x-request-id:
- req_c89e32ab22a271c202100059672a9d83
status:
code: 200
message: OK
- request:
body: '{"messages": [{"role": "user", "content": "You are test role. test backstory\nYour
personal goal is: test goal\n\nYou ONLY have access to the following tools,
and should NEVER make up tools that are not listed here:\n\nget_final_answer:
get_final_answer(anything: str) -> float - Get the final answer but don''t give
it yet, just re-use this\n tool non-stop.\n\nUse the following format:\n\nThought:
you should always think about what to do\nAction: the action to take, only one
name of [get_final_answer], just the name, exactly as it''s written.\nAction
Input: the input to the action, just a simple a python dictionary using \" to
wrap keys and values.\nObservation: the result of the action\n\nOnce all necessary
information is gathered:\n\nThought: I now know the final answer\nFinal Answer:
the final answer to the original input question\n\n\nCurrent Task: The final
answer is 42. But don''t give it until I tell you so, instead keep using the
`get_final_answer` tool.\n\nThis is the expect criteria for your final answer:
The final answer, don''t give it until I tell you so \n you MUST return the
actual complete content as the final answer, not a summary.\n\nBegin! This is
VERY important to you, use the tools available and give your best Final Answer,
your job depends on it!\n\nThought: \nThe task is to find the final answer using
the `get_final_answer` tool. The final answer is already given as 42, but the
task requires me to keep using the `get_final_answer` tool until instructed
to give the final answer.\n\nAction: get_final_answer\nAction Input: {\"anything\":
\"42\"}\nObservation: 42\nThought: \nThe observation confirms that the tool
`get_final_answer` returns the correct result of 42. However, the task instructions
specify not to give the final answer yet and to keep using the `get_final_answer`
tool.\n\nAction: get_final_answer\nAction Input: {\"anything\": \"42\"}\nObservation:
I tried reusing the same input, I must stop using this action input. I''ll try
something else instead.\n\n\nTool won''t be use because it''s time to give your
final answer. Don''t use tools and just your absolute BEST Final answer.\nObservation:
Tool won''t be use because it''s time to give your final answer. Don''t use
tools and just your absolute BEST Final answer.\n"}], "model": "gpt-4", "n":
1, "stop": ["\nObservation"], "stream": true, "temperature": 0.7}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate, br
connection:
- keep-alive
content-length:
- '2371'
content-type:
- application/json
cookie:
- __cf_bm=ccGSKYzVR4Gjc0t3AwEWhsiSXeuSBVYOKwXpSW4B5Es-1710871516-1.0.1.1-fF4jEG6Af3PBL3N2jNeglY8CtfZq4GAHXCCpvnDcD6GhBk8KGbBK3uQ_8dNrR4vt3_YnwWx0x2Vy0ttedMrinw;
_cfuvid=w7JjAWBqi9fg29.9ByVvw9AMB_L3j9jvHZkVynxBKLk-1710871516141-0.0.1.1-604800000
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.13.3
x-stainless-arch:
- arm64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- MacOS
x-stainless-package-version:
- 1.13.3
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.11.7
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: 'data: {"id":"chatcmpl-94Y7tQodC6XqzcoSYADEx9vQVrqNO","object":"chat.completion.chunk","created":1710871521,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7tQodC6XqzcoSYADEx9vQVrqNO","object":"chat.completion.chunk","created":1710871521,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7tQodC6XqzcoSYADEx9vQVrqNO","object":"chat.completion.chunk","created":1710871521,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7tQodC6XqzcoSYADEx9vQVrqNO","object":"chat.completion.chunk","created":1710871521,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
I"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7tQodC6XqzcoSYADEx9vQVrqNO","object":"chat.completion.chunk","created":1710871521,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
now"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7tQodC6XqzcoSYADEx9vQVrqNO","object":"chat.completion.chunk","created":1710871521,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
know"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7tQodC6XqzcoSYADEx9vQVrqNO","object":"chat.completion.chunk","created":1710871521,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
the"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7tQodC6XqzcoSYADEx9vQVrqNO","object":"chat.completion.chunk","created":1710871521,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
final"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7tQodC6XqzcoSYADEx9vQVrqNO","object":"chat.completion.chunk","created":1710871521,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
answer"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7tQodC6XqzcoSYADEx9vQVrqNO","object":"chat.completion.chunk","created":1710871521,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7tQodC6XqzcoSYADEx9vQVrqNO","object":"chat.completion.chunk","created":1710871521,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7tQodC6XqzcoSYADEx9vQVrqNO","object":"chat.completion.chunk","created":1710871521,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
Answer"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7tQodC6XqzcoSYADEx9vQVrqNO","object":"chat.completion.chunk","created":1710871521,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7tQodC6XqzcoSYADEx9vQVrqNO","object":"chat.completion.chunk","created":1710871521,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
The"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7tQodC6XqzcoSYADEx9vQVrqNO","object":"chat.completion.chunk","created":1710871521,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
final"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7tQodC6XqzcoSYADEx9vQVrqNO","object":"chat.completion.chunk","created":1710871521,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
answer"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7tQodC6XqzcoSYADEx9vQVrqNO","object":"chat.completion.chunk","created":1710871521,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
is"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7tQodC6XqzcoSYADEx9vQVrqNO","object":"chat.completion.chunk","created":1710871521,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7tQodC6XqzcoSYADEx9vQVrqNO","object":"chat.completion.chunk","created":1710871521,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"42"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7tQodC6XqzcoSYADEx9vQVrqNO","object":"chat.completion.chunk","created":1710871521,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-94Y7tQodC6XqzcoSYADEx9vQVrqNO","object":"chat.completion.chunk","created":1710871521,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
data: [DONE]
'
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 866f63e0bf0fa559-GRU
Cache-Control:
- no-cache, must-revalidate
Connection:
- keep-alive
Content-Type:
- text/event-stream
Date:
- Tue, 19 Mar 2024 18:05:22 GMT
Server:
- cloudflare
Transfer-Encoding:
- chunked
access-control-allow-origin:
- '*'
alt-svc:
- h3=":443"; ma=86400
openai-model:
- gpt-4-0613
openai-organization:
- crewai-iuxna1
openai-processing-ms:
- '256'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=15724800; includeSubDomains
x-ratelimit-limit-requests:
- '10000'
x-ratelimit-limit-tokens:
- '300000'
x-ratelimit-remaining-requests:
- '9999'
x-ratelimit-remaining-tokens:
- '299436'
x-ratelimit-reset-requests:
- 6ms
x-ratelimit-reset-tokens:
- 112ms
x-request-id:
- req_0f47e77546d773004e0dfe7e64949091
status:
code: 200
message: OK
version: 1

File diff suppressed because it is too large Load Diff

View File

@@ -733,3 +733,78 @@ def test_crew_inputs_interpolate_both_agents_and_tasks():
crew.kickoff(inputs={"topic": "AI", "points": 5})
interpolate_agent_inputs.assert_called()
interpolate_task_inputs.assert_called()
@pytest.mark.vcr(filter_headers=["authorization"])
def test_tools_with_custom_caching():
from unittest.mock import patch
from crewai_tools import tool
@tool
def multiplcation_tool(first_number: int, second_number: int) -> str:
"""Useful for when you need to multiply two numbers together."""
return first_number * second_number
def cache_func(args, result):
cache = result % 2 == 0
print(f"cache?: {cache}")
return cache
multiplcation_tool.cache_function = cache_func
writer1 = Agent(
role="Writer",
goal="You write lesssons of math for kids.",
backstory="You're an expert in writting and you love to teach kids but you know nothing of math.",
tools=[multiplcation_tool],
allow_delegation=False,
)
writer2 = Agent(
role="Writer",
goal="You write lesssons of math for kids.",
backstory="You're an expert in writting and you love to teach kids but you know nothing of math.",
tools=[multiplcation_tool],
allow_delegation=False,
)
task1 = Task(
description="What is 2 times 6? Return only the number after using the multiplication tool.",
expected_output="the result of multiplication",
agent=writer1,
)
task2 = Task(
description="What is 3 times 1? Return only the number after using the multiplication tool.",
expected_output="the result of multiplication",
agent=writer1,
)
task3 = Task(
description="What is 2 times 6? Return only the number after using the multiplication tool.",
expected_output="the result of multiplication",
agent=writer2,
)
task4 = Task(
description="What is 3 times 1? Return only the number after using the multiplication tool.",
expected_output="the result of multiplication",
agent=writer2,
)
crew = Crew(agents=[writer1, writer2], tasks=[task1, task2, task3, task4])
with patch.object(
CacheHandler, "add", wraps=crew._cache_handler.add
) as add_to_cache:
with patch.object(
CacheHandler, "read", wraps=crew._cache_handler.read
) as read_from_cache:
result = crew.kickoff()
add_to_cache.assert_called_once_with(
tool="multiplcation_tool",
input={"first_number": 2, "second_number": 6},
output=12,
)
assert result == "3"