diff --git a/.env.template b/.env.template index 31d8ccc698..7a4a22bb9c 100644 --- a/.env.template +++ b/.env.template @@ -90,30 +90,18 @@ OPENAI_API_KEY=your-openai-api-key ### EMBEDDINGS ## EMBEDDING_MODEL - Model to use for creating embeddings -## EMBEDDING_TOKENIZER - Tokenizer to use for chunking large inputs -## EMBEDDING_TOKEN_LIMIT - Chunk size limit for large inputs # EMBEDDING_MODEL=text-embedding-ada-002 -# EMBEDDING_TOKENIZER=cl100k_base -# EMBEDDING_TOKEN_LIMIT=8191 ################################################################################ ### MEMORY ################################################################################ ### MEMORY_BACKEND - Memory backend type -## local - Default -## pinecone - Pinecone (if configured) +## json_file - Default ## redis - Redis (if configured) -## milvus - Milvus (if configured - also works with Zilliz) ## MEMORY_INDEX - Name of index created in Memory backend (Default: auto-gpt) -# MEMORY_BACKEND=local -# MEMORY_INDEX=auto-gpt - -### PINECONE -## PINECONE_API_KEY - Pinecone API Key (Example: my-pinecone-api-key) -## PINECONE_ENV - Pinecone environment (region) (Example: us-west-2) -# PINECONE_API_KEY=your-pinecone-api-key -# PINECONE_ENV=your-pinecone-region +# MEMORY_BACKEND=json_file +# MEMORY_INDEX=auto-gpt-memory ### REDIS ## REDIS_HOST - Redis host (Default: localhost, use "redis" for docker-compose) @@ -125,38 +113,6 @@ OPENAI_API_KEY=your-openai-api-key # REDIS_PASSWORD= # WIPE_REDIS_ON_START=True -### WEAVIATE -## MEMORY_BACKEND - Use 'weaviate' to use Weaviate vector storage -## WEAVIATE_HOST - Weaviate host IP -## WEAVIATE_PORT - Weaviate host port -## WEAVIATE_PROTOCOL - Weaviate host protocol (e.g. 'http') -## USE_WEAVIATE_EMBEDDED - Whether to use Embedded Weaviate -## WEAVIATE_EMBEDDED_PATH - File system path were to persist data when running Embedded Weaviate -## WEAVIATE_USERNAME - Weaviate username -## WEAVIATE_PASSWORD - Weaviate password -## WEAVIATE_API_KEY - Weaviate API key if using API-key-based authentication -# WEAVIATE_HOST="127.0.0.1" -# WEAVIATE_PORT=8080 -# WEAVIATE_PROTOCOL="http" -# USE_WEAVIATE_EMBEDDED=False -# WEAVIATE_EMBEDDED_PATH="/home/me/.local/share/weaviate" -# WEAVIATE_USERNAME= -# WEAVIATE_PASSWORD= -# WEAVIATE_API_KEY= - -### MILVUS -## MILVUS_ADDR - Milvus remote address (e.g. localhost:19530, https://xxx-xxxx.xxxx.xxxx.zillizcloud.com:443) -## MILVUS_USERNAME - username for your Milvus database -## MILVUS_PASSWORD - password for your Milvus database -## MILVUS_SECURE - True to enable TLS. (Default: False) -## Setting MILVUS_ADDR to a `https://` URL will override this setting. -## MILVUS_COLLECTION - Milvus collection, change it if you want to start a new memory and retain the old memory. -# MILVUS_ADDR=localhost:19530 -# MILVUS_USERNAME= -# MILVUS_PASSWORD= -# MILVUS_SECURE= -# MILVUS_COLLECTION=autogpt - ################################################################################ ### IMAGE GENERATION PROVIDER ################################################################################ diff --git a/BULLETIN.md b/BULLETIN.md index 841dd3ac1b..17c38b8c98 100644 --- a/BULLETIN.md +++ b/BULLETIN.md @@ -43,3 +43,11 @@ Auto-GPT now has support for plugins! With plugins, you can extend Auto-GPT's ab adding support for third-party services and more. See https://github.com/Significant-Gravitas/Auto-GPT-Plugins for instructions and available plugins. Specific plugins can be allowlisted/denylisted in .env. + +## Memory backend deprecation ⚠️ +The Milvus, Pinecone and Weaviate memory backends were rendered incompatible +by work on the memory system, and have been removed in `master`. The Redis +memory store was also temporarily removed but we aim to merge a new implementation +before the next release. +Whether built-in support for the others will be added back in the future is subject to +discussion, feel free to pitch in: https://github.com/Significant-Gravitas/Auto-GPT/discussions/4280 diff --git a/autogpt/agent/agent.py b/autogpt/agent/agent.py index ed9fea6051..8c7a5456d7 100644 --- a/autogpt/agent/agent.py +++ b/autogpt/agent/agent.py @@ -5,11 +5,14 @@ from datetime import datetime from colorama import Fore, Style from autogpt.app import execute_command, get_command +from autogpt.commands.command import CommandRegistry from autogpt.config import Config +from autogpt.config.ai_config import AIConfig from autogpt.json_utils.json_fix_llm import fix_json_using_multiple_techniques from autogpt.json_utils.utilities import LLM_DEFAULT_RESPONSE_FORMAT, validate_json -from autogpt.llm import chat_with_ai, create_chat_completion, create_chat_message -from autogpt.llm.token_counter import count_string_tokens +from autogpt.llm.base import ChatSequence +from autogpt.llm.chat import chat_with_ai, create_chat_completion +from autogpt.llm.utils import count_string_tokens from autogpt.log_cycle.log_cycle import ( FULL_MESSAGE_HISTORY_FILE_NAME, NEXT_ACTION_FILE_NAME, @@ -19,6 +22,8 @@ from autogpt.log_cycle.log_cycle import ( LogCycleHandler, ) from autogpt.logs import logger, print_assistant_thoughts +from autogpt.memory.message_history import MessageHistory +from autogpt.memory.vector import VectorMemory from autogpt.speech import say_text from autogpt.spinner import Spinner from autogpt.utils import clean_input @@ -31,7 +36,6 @@ class Agent: Attributes: ai_name: The name of the agent. memory: The memory object to use. - full_message_history: The full message history. next_action_count: The number of actions to execute. system_prompt: The system prompt is the initial prompt that defines everything the AI needs to know to achieve its task successfully. @@ -56,24 +60,19 @@ class Agent: def __init__( self, - ai_name, - memory, - full_message_history, - next_action_count, - command_registry, - config, - system_prompt, - triggering_prompt, - workspace_directory, + ai_name: str, + memory: VectorMemory, + next_action_count: int, + command_registry: CommandRegistry, + config: AIConfig, + system_prompt: str, + triggering_prompt: str, + workspace_directory: str, ): cfg = Config() self.ai_name = ai_name self.memory = memory - self.summary_memory = ( - "I was created." # Initial memory necessary to avoid hallucination - ) - self.last_memory_index = 0 - self.full_message_history = full_message_history + self.history = MessageHistory(self) self.next_action_count = next_action_count self.command_registry = command_registry self.config = config @@ -114,7 +113,7 @@ class Agent: self.config.ai_name, self.created_at, self.cycle_count, - self.full_message_history, + [m.raw() for m in self.history], FULL_MESSAGE_HISTORY_FILE_NAME, ) if ( @@ -132,8 +131,6 @@ class Agent: self, self.system_prompt, self.triggering_prompt, - self.full_message_history, - self.memory, cfg.fast_token_limit, ) # TODO: This hardcodes the model to use GPT3.5. Make this an argument @@ -260,9 +257,7 @@ class Agent: # Execute command if command_name is not None and command_name.lower().startswith("error"): - result = ( - f"Command {command_name} threw the following error: {arguments}" - ) + result = f"Could not execute command: {arguments}" elif command_name == "human_feedback": result = f"Human feedback: {user_input}" elif command_name == "self_feedback": @@ -286,7 +281,7 @@ class Agent: str(command_result), cfg.fast_llm_model ) memory_tlength = count_string_tokens( - str(self.summary_memory), cfg.fast_llm_model + str(self.history.summary_message()), cfg.fast_llm_model ) if result_tlength + memory_tlength + 600 > cfg.fast_token_limit: result = f"Failure: command {command_name} returned too much output. \ @@ -302,12 +297,10 @@ class Agent: # Check if there's a result from the command append it to the message # history if result is not None: - self.full_message_history.append(create_chat_message("system", result)) + self.history.add("system", result, "action_result") logger.typewriter_log("SYSTEM: ", Fore.YELLOW, result) else: - self.full_message_history.append( - create_chat_message("system", "Unable to execute command") - ) + self.history.add("system", "Unable to execute command", "action_result") logger.typewriter_log( "SYSTEM: ", Fore.YELLOW, "Unable to execute command" ) @@ -343,17 +336,18 @@ class Agent: thought = thoughts.get("thoughts", "") feedback_thoughts = thought + reasoning + plan - messages = [{"role": "user", "content": feedback_prompt + feedback_thoughts}] + prompt = ChatSequence.for_model(llm_model) + prompt.add("user", feedback_prompt + feedback_thoughts) self.log_cycle_handler.log_cycle( self.config.ai_name, self.created_at, self.cycle_count, - messages, + prompt.raw(), PROMPT_SUPERVISOR_FEEDBACK_FILE_NAME, ) - feedback = create_chat_completion(messages, model=llm_model) + feedback = create_chat_completion(prompt) self.log_cycle_handler.log_cycle( self.config.ai_name, diff --git a/autogpt/agent/agent_manager.py b/autogpt/agent/agent_manager.py index 17fb35d878..8560b0ec2a 100644 --- a/autogpt/agent/agent_manager.py +++ b/autogpt/agent/agent_manager.py @@ -1,10 +1,9 @@ """Agent manager for managing GPT agents""" from __future__ import annotations -from typing import List - -from autogpt.config.config import Config -from autogpt.llm import Message, create_chat_completion +from autogpt.config import Config +from autogpt.llm.base import ChatSequence +from autogpt.llm.chat import Message, create_chat_completion from autogpt.singleton import Singleton @@ -13,55 +12,55 @@ class AgentManager(metaclass=Singleton): def __init__(self): self.next_key = 0 - self.agents = {} # key, (task, full_message_history, model) + self.agents: dict[ + int, tuple[str, list[Message], str] + ] = {} # key, (task, full_message_history, model) self.cfg = Config() # Create new GPT agent # TODO: Centralise use of create_chat_completion() to globally enforce token limit - def create_agent(self, task: str, prompt: str, model: str) -> tuple[int, str]: + def create_agent( + self, task: str, creation_prompt: str, model: str + ) -> tuple[int, str]: """Create a new agent and return its key Args: task: The task to perform - prompt: The prompt to use - model: The model to use + creation_prompt: Prompt passed to the LLM at creation + model: The model to use to run this agent Returns: The key of the new agent """ - messages: List[Message] = [ - {"role": "user", "content": prompt}, - ] + messages = ChatSequence.for_model(model, [Message("user", creation_prompt)]) + for plugin in self.cfg.plugins: if not plugin.can_handle_pre_instruction(): continue - if plugin_messages := plugin.pre_instruction(messages): - messages.extend(iter(plugin_messages)) + if plugin_messages := plugin.pre_instruction(messages.raw()): + messages.extend([Message(**raw_msg) for raw_msg in plugin_messages]) # Start GPT instance - agent_reply = create_chat_completion( - model=model, - messages=messages, - ) + agent_reply = create_chat_completion(prompt=messages) - messages.append({"role": "assistant", "content": agent_reply}) + messages.add("assistant", agent_reply) plugins_reply = "" for i, plugin in enumerate(self.cfg.plugins): if not plugin.can_handle_on_instruction(): continue - if plugin_result := plugin.on_instruction(messages): + if plugin_result := plugin.on_instruction([m.raw() for m in messages]): sep = "\n" if i else "" plugins_reply = f"{plugins_reply}{sep}{plugin_result}" if plugins_reply and plugins_reply != "": - messages.append({"role": "assistant", "content": plugins_reply}) + messages.add("assistant", plugins_reply) key = self.next_key # This is done instead of len(agents) to make keys unique even if agents # are deleted self.next_key += 1 - self.agents[key] = (task, messages, model) + self.agents[key] = (task, list(messages), model) for plugin in self.cfg.plugins: if not plugin.can_handle_post_instruction(): @@ -83,33 +82,30 @@ class AgentManager(metaclass=Singleton): task, messages, model = self.agents[int(key)] # Add user message to message history before sending to agent - messages.append({"role": "user", "content": message}) + messages = ChatSequence.for_model(model, messages) + messages.add("user", message) for plugin in self.cfg.plugins: if not plugin.can_handle_pre_instruction(): continue - if plugin_messages := plugin.pre_instruction(messages): - for plugin_message in plugin_messages: - messages.append(plugin_message) + if plugin_messages := plugin.pre_instruction([m.raw() for m in messages]): + messages.extend([Message(**raw_msg) for raw_msg in plugin_messages]) # Start GPT instance - agent_reply = create_chat_completion( - model=model, - messages=messages, - ) + agent_reply = create_chat_completion(prompt=messages) - messages.append({"role": "assistant", "content": agent_reply}) + messages.add("assistant", agent_reply) plugins_reply = agent_reply for i, plugin in enumerate(self.cfg.plugins): if not plugin.can_handle_on_instruction(): continue - if plugin_result := plugin.on_instruction(messages): + if plugin_result := plugin.on_instruction([m.raw() for m in messages]): sep = "\n" if i else "" plugins_reply = f"{plugins_reply}{sep}{plugin_result}" # Update full message history if plugins_reply and plugins_reply != "": - messages.append({"role": "assistant", "content": plugins_reply}) + messages.add("assistant", plugins_reply) for plugin in self.cfg.plugins: if not plugin.can_handle_post_instruction(): diff --git a/autogpt/app.py b/autogpt/app.py index b001376a6c..da67c35f02 100644 --- a/autogpt/app.py +++ b/autogpt/app.py @@ -7,14 +7,13 @@ from autogpt.commands.command import CommandRegistry, command from autogpt.commands.web_requests import scrape_links, scrape_text from autogpt.config import Config from autogpt.logs import logger -from autogpt.memory import get_memory +from autogpt.memory.vector import get_memory from autogpt.processing.text import summarize_text from autogpt.prompts.generator import PromptGenerator from autogpt.speech import say_text from autogpt.url_utils.validators import validate_url CFG = Config() -AGENT_MANAGER = AgentManager() def is_valid_int(value: str) -> bool: @@ -114,24 +113,20 @@ def execute_command( # TODO: Remove commands below after they are moved to the command registry. command_name = map_command_synonyms(command_name.lower()) - if command_name == "memory_add": - return get_memory(CFG).add(arguments["string"]) - # TODO: Change these to take in a file rather than pasted code, if # non-file is given, return instructions "Input should be a python # filepath, write your code to file and try again - else: - for command in prompt.commands: - if ( - command_name == command["label"].lower() - or command_name == command["name"].lower() - ): - return command["function"](**arguments) - return ( - f"Unknown command '{command_name}'. Please refer to the 'COMMANDS'" - " list for available commands and only respond in the specified JSON" - " format." - ) + for command in prompt.commands: + if ( + command_name == command["label"].lower() + or command_name == command["name"].lower() + ): + return command["function"](**arguments) + return ( + f"Unknown command '{command_name}'. Please refer to the 'COMMANDS'" + " list for available commands and only respond in the specified JSON" + " format." + ) except Exception as e: return f"Error: {str(e)}" @@ -141,7 +136,7 @@ def execute_command( ) @validate_url def get_text_summary(url: str, question: str) -> str: - """Return the results of a Google search + """Get the text summary of a webpage Args: url (str): The url to scrape @@ -151,14 +146,15 @@ def get_text_summary(url: str, question: str) -> str: str: The summary of the text """ text = scrape_text(url) - summary = summarize_text(url, text, question) + summary, _ = summarize_text(text, question=question) + return f""" "Result" : {summary}""" @command("get_hyperlinks", "Get hyperlinks", '"url": ""') @validate_url def get_hyperlinks(url: str) -> Union[str, List[str]]: - """Return the results of a Google search + """Get all hyperlinks on a webpage Args: url (str): The url to scrape @@ -186,6 +182,8 @@ def start_agent(name: str, task: str, prompt: str, model=CFG.fast_llm_model) -> Returns: str: The response of the agent """ + agent_manager = AgentManager() + # Remove underscores from name voice_name = name.replace("_", " ") @@ -195,13 +193,13 @@ def start_agent(name: str, task: str, prompt: str, model=CFG.fast_llm_model) -> # Create agent if CFG.speak_mode: say_text(agent_intro, 1) - key, ack = AGENT_MANAGER.create_agent(task, first_message, model) + key, ack = agent_manager.create_agent(task, first_message, model) if CFG.speak_mode: say_text(f"Hello {voice_name}. Your task is as follows. {task}.") # Assign task (prompt), get response - agent_response = AGENT_MANAGER.message_agent(key, prompt) + agent_response = agent_manager.message_agent(key, prompt) return f"Agent {name} created with key {key}. First response: {agent_response}" @@ -211,7 +209,7 @@ def message_agent(key: str, message: str) -> str: """Message an agent with a given key and message""" # Check if the key is a valid integer if is_valid_int(key): - agent_response = AGENT_MANAGER.message_agent(int(key), message) + agent_response = AgentManager().message_agent(int(key), message) else: return "Invalid key, must be an integer." @@ -229,7 +227,7 @@ def list_agents() -> str: str: A list of all agents """ return "List of agents:\n" + "\n".join( - [str(x[0]) + ": " + x[1] for x in AGENT_MANAGER.list_agents()] + [str(x[0]) + ": " + x[1] for x in AgentManager().list_agents()] ) @@ -243,5 +241,5 @@ def delete_agent(key: str) -> str: Returns: str: A message indicating whether the agent was deleted or not """ - result = AGENT_MANAGER.delete_agent(key) + result = AgentManager().delete_agent(key) return f"Agent {key} deleted." if result else f"Agent {key} does not exist." diff --git a/autogpt/commands/analyze_code.py b/autogpt/commands/analyze_code.py index cff8a737e1..b2af33e20e 100644 --- a/autogpt/commands/analyze_code.py +++ b/autogpt/commands/analyze_code.py @@ -2,7 +2,7 @@ from __future__ import annotations from autogpt.commands.command import command -from autogpt.llm import call_ai_function +from autogpt.llm.utils import call_ai_function @command( diff --git a/autogpt/commands/file_operations.py b/autogpt/commands/file_operations.py index 0fe8114db5..cf0325e287 100644 --- a/autogpt/commands/file_operations.py +++ b/autogpt/commands/file_operations.py @@ -4,7 +4,7 @@ from __future__ import annotations import hashlib import os import os.path -from typing import Dict, Generator, Literal, Tuple +from typing import Generator, Literal import charset_normalizer import requests @@ -15,6 +15,7 @@ from autogpt.commands.command import command from autogpt.commands.file_operations_utils import read_textual_file from autogpt.config import Config from autogpt.logs import logger +from autogpt.memory.vector import MemoryItem, VectorMemory from autogpt.spinner import Spinner from autogpt.utils import readable_file_size @@ -28,7 +29,9 @@ def text_checksum(text: str) -> str: return hashlib.md5(text.encode("utf-8")).hexdigest() -def operations_from_log(log_path: str) -> Generator[Tuple[Operation, str, str | None]]: +def operations_from_log( + log_path: str, +) -> Generator[tuple[Operation, str, str | None], None, None]: """Parse the file operations log and return a tuple containing the log entries""" try: log = open(log_path, "r", encoding="utf-8") @@ -45,6 +48,7 @@ def operations_from_log(log_path: str) -> Generator[Tuple[Operation, str, str | try: path, checksum = (x.strip() for x in tail.rsplit(" #", maxsplit=1)) except ValueError: + logger.warn(f"File log entry lacks checksum: '{line}'") path, checksum = tail.strip(), None yield (operation, path, checksum) elif operation == "delete": @@ -53,7 +57,7 @@ def operations_from_log(log_path: str) -> Generator[Tuple[Operation, str, str | log.close() -def file_operations_state(log_path: str) -> Dict: +def file_operations_state(log_path: str) -> dict[str, str]: """Iterates over the operations log and returns the expected state. Parses a log file at CFG.file_logger_path to construct a dictionary that maps @@ -156,43 +160,41 @@ def read_file(filename: str) -> str: """ try: content = read_textual_file(filename, logger) + + # TODO: invalidate/update memory when file is edited + file_memory = MemoryItem.from_text_file(content, filename) + if len(file_memory.chunks) > 1: + return file_memory.summary + return content except Exception as e: return f"Error: {str(e)}" def ingest_file( - filename: str, memory, max_length: int = 4000, overlap: int = 200 + filename: str, + memory: VectorMemory, ) -> None: """ Ingest a file by reading its content, splitting it into chunks with a specified maximum length and overlap, and adding the chunks to the memory storage. - :param filename: The name of the file to ingest - :param memory: An object with an add() method to store the chunks in memory - :param max_length: The maximum length of each chunk, default is 4000 - :param overlap: The number of overlapping characters between chunks, default is 200 + Args: + filename: The name of the file to ingest + memory: An object with an add() method to store the chunks in memory """ try: - logger.info(f"Working with file {filename}") + logger.info(f"Ingesting file {filename}") content = read_file(filename) - content_length = len(content) - logger.info(f"File length: {content_length} characters") - chunks = list(split_file(content, max_length=max_length, overlap=overlap)) + # TODO: differentiate between different types of files + file_memory = MemoryItem.from_text_file(content, filename) + logger.debug(f"Created memory: {file_memory.dump()}") + memory.add(file_memory) - num_chunks = len(chunks) - for i, chunk in enumerate(chunks): - logger.info(f"Ingesting chunk {i + 1} / {num_chunks} into memory") - memory_to_add = ( - f"Filename: {filename}\n" f"Content part#{i + 1}/{num_chunks}: {chunk}" - ) - - memory.add(memory_to_add) - - logger.info(f"Done ingesting {num_chunks} chunks from {filename}.") + logger.info(f"Ingested {len(file_memory.e_chunks)} chunks from {filename}") except Exception as err: - logger.info(f"Error while ingesting file '{filename}': {err}") + logger.warn(f"Error while ingesting file '{filename}': {err}") @command("write_to_file", "Write to file", '"filename": "", "text": ""') diff --git a/autogpt/commands/file_operations_utils.py b/autogpt/commands/file_operations_utils.py index cf29dc364e..e408c6fa64 100644 --- a/autogpt/commands/file_operations_utils.py +++ b/autogpt/commands/file_operations_utils.py @@ -10,6 +10,7 @@ from bs4 import BeautifulSoup from pylatexenc.latex2text import LatexNodes2Text from autogpt import logs +from autogpt.logs import logger class ParserStrategy: @@ -21,6 +22,7 @@ class ParserStrategy: class TXTParser(ParserStrategy): def read(self, file_path): charset_match = charset_normalizer.from_path(file_path).best() + logger.debug(f"Reading '{file_path}' with encoding '{charset_match.encoding}'") return str(charset_match) @@ -150,9 +152,7 @@ def read_textual_file(file_path: str, logger: logs.Logger): parser = extension_to_parser.get(file_extension) if not parser: if is_binary: - raise ValueError( - "Unsupported binary file format: {}".format(file_extension) - ) + raise ValueError(f"Unsupported binary file format: {file_extension}") # fallback to txt file parser (to support script and code files loading) parser = TXTParser() file_context = FileContext(parser, logger) diff --git a/autogpt/commands/improve_code.py b/autogpt/commands/improve_code.py index ff4cf48c0d..d2665f3ff3 100644 --- a/autogpt/commands/improve_code.py +++ b/autogpt/commands/improve_code.py @@ -3,7 +3,7 @@ from __future__ import annotations import json from autogpt.commands.command import command -from autogpt.llm import call_ai_function +from autogpt.llm.utils import call_ai_function @command( diff --git a/autogpt/commands/web_requests.py b/autogpt/commands/web_requests.py index 60e54252d4..156ba1033c 100644 --- a/autogpt/commands/web_requests.py +++ b/autogpt/commands/web_requests.py @@ -100,13 +100,3 @@ def scrape_links(url: str) -> str | list[str]: hyperlinks = extract_hyperlinks(soup, url) return format_hyperlinks(hyperlinks) - - -def create_message(chunk, question): - """Create a message for the user to summarize a chunk of text""" - return { - "role": "user", - "content": f'"""{chunk}""" Using the above text, answer the following' - f' question: "{question}" -- if the question cannot be answered using the' - " text, summarize the text.", - } diff --git a/autogpt/commands/web_selenium.py b/autogpt/commands/web_selenium.py index 4f5ad30ea5..72f849d046 100644 --- a/autogpt/commands/web_selenium.py +++ b/autogpt/commands/web_selenium.py @@ -4,28 +4,39 @@ from __future__ import annotations import logging from pathlib import Path from sys import platform +from typing import Optional, Type from bs4 import BeautifulSoup -from selenium import webdriver from selenium.common.exceptions import WebDriverException from selenium.webdriver.chrome.options import Options as ChromeOptions +from selenium.webdriver.chrome.service import Service as ChromeDriverService +from selenium.webdriver.chrome.webdriver import WebDriver as ChromeDriver from selenium.webdriver.common.by import By from selenium.webdriver.edge.options import Options as EdgeOptions +from selenium.webdriver.edge.service import Service as EdgeDriverService +from selenium.webdriver.edge.webdriver import WebDriver as EdgeDriver from selenium.webdriver.firefox.options import Options as FirefoxOptions +from selenium.webdriver.firefox.service import Service as GeckoDriverService +from selenium.webdriver.firefox.webdriver import WebDriver as FirefoxDriver from selenium.webdriver.remote.webdriver import WebDriver from selenium.webdriver.safari.options import Options as SafariOptions +from selenium.webdriver.safari.webdriver import WebDriver as SafariDriver from selenium.webdriver.support import expected_conditions as EC from selenium.webdriver.support.wait import WebDriverWait from webdriver_manager.chrome import ChromeDriverManager from webdriver_manager.firefox import GeckoDriverManager -from webdriver_manager.microsoft import EdgeChromiumDriverManager +from webdriver_manager.microsoft import EdgeChromiumDriverManager as EdgeDriverManager -import autogpt.processing.text as summary from autogpt.commands.command import command from autogpt.config import Config +from autogpt.logs import logger +from autogpt.memory.vector import MemoryItem, NoMemory, get_memory from autogpt.processing.html import extract_hyperlinks, format_hyperlinks +from autogpt.processing.text import summarize_text from autogpt.url_utils.validators import validate_url +BrowserOptions = ChromeOptions | EdgeOptions | FirefoxOptions | SafariOptions + FILE_DIR = Path(__file__).parent.parent CFG = Config() @@ -55,14 +66,14 @@ def browse_website(url: str, question: str) -> str: return f"Error: {msg}" add_header(driver) - summary_text = summary.summarize_text(url, text, question, driver) + summary = summarize_memorize_webpage(url, text, question, driver) links = scrape_links_with_selenium(driver, url) # Limit links to 5 if len(links) > 5: links = links[:5] close_browser(driver) - return f"Answer gathered from website: {summary_text} \n \n Links: {links}" + return f"Answer gathered from website: {summary}\n\nLinks: {links}" def scrape_text_with_selenium(url: str) -> tuple[WebDriver, str]: @@ -76,14 +87,14 @@ def scrape_text_with_selenium(url: str) -> tuple[WebDriver, str]: """ logging.getLogger("selenium").setLevel(logging.CRITICAL) - options_available = { + options_available: dict[str, Type[BrowserOptions]] = { "chrome": ChromeOptions, - "safari": SafariOptions, - "firefox": FirefoxOptions, "edge": EdgeOptions, + "firefox": FirefoxOptions, + "safari": SafariOptions, } - options = options_available[CFG.selenium_web_browser]() + options: BrowserOptions = options_available[CFG.selenium_web_browser]() options.add_argument( "user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.5615.49 Safari/537.36" ) @@ -92,17 +103,17 @@ def scrape_text_with_selenium(url: str) -> tuple[WebDriver, str]: if CFG.selenium_headless: options.headless = True options.add_argument("--disable-gpu") - driver = webdriver.Firefox( - executable_path=GeckoDriverManager().install(), options=options + driver = FirefoxDriver( + service=GeckoDriverService(GeckoDriverManager().install()), options=options + ) + elif CFG.selenium_web_browser == "edge": + driver = EdgeDriver( + service=EdgeDriverService(EdgeDriverManager().install()), options=options ) elif CFG.selenium_web_browser == "safari": # Requires a bit more setup on the users end # See https://developer.apple.com/documentation/webkit/testing_with_webdriver_in_safari - driver = webdriver.Safari(options=options) - elif CFG.selenium_web_browser == "edge": - driver = webdriver.Edge( - executable_path=EdgeChromiumDriverManager().install(), options=options - ) + driver = SafariDriver(options=options) else: if platform == "linux" or platform == "linux2": options.add_argument("--disable-dev-shm-usage") @@ -115,10 +126,10 @@ def scrape_text_with_selenium(url: str) -> tuple[WebDriver, str]: chromium_driver_path = Path("/usr/bin/chromedriver") - driver = webdriver.Chrome( - executable_path=chromium_driver_path + driver = ChromeDriver( + service=ChromeDriverService(str(chromium_driver_path)) if chromium_driver_path.exists() - else ChromeDriverManager().install(), + else ChromeDriverService(ChromeDriverManager().install()), options=options, ) driver.get(url) @@ -188,3 +199,30 @@ def add_header(driver: WebDriver) -> None: driver.execute_script(overlay_script) except Exception as e: print(f"Error executing overlay.js: {e}") + + +def summarize_memorize_webpage( + url: str, text: str, question: str, driver: Optional[WebDriver] = None +) -> str: + """Summarize text using the OpenAI API + + Args: + url (str): The url of the text + text (str): The text to summarize + question (str): The question to ask the model + driver (WebDriver): The webdriver to use to scroll the page + + Returns: + str: The summary of the text + """ + if not text: + return "Error: No text to summarize" + + text_length = len(text) + logger.info(f"Text length: {text_length} characters") + + memory = get_memory(CFG) + + new_memory = MemoryItem.from_webpage(text, url, question=question) + memory.add(new_memory) + return new_memory.summary diff --git a/autogpt/commands/write_tests.py b/autogpt/commands/write_tests.py index c6a0332474..1f4c4dbbc4 100644 --- a/autogpt/commands/write_tests.py +++ b/autogpt/commands/write_tests.py @@ -4,7 +4,7 @@ from __future__ import annotations import json from autogpt.commands.command import command -from autogpt.llm import call_ai_function +from autogpt.llm.utils import call_ai_function @command( diff --git a/autogpt/config/ai_config.py b/autogpt/config/ai_config.py index 0599f10480..3872463f4e 100644 --- a/autogpt/config/ai_config.py +++ b/autogpt/config/ai_config.py @@ -7,11 +7,12 @@ from __future__ import annotations import os import platform from pathlib import Path -from typing import Any, Optional, Type +from typing import Optional import distro import yaml +from autogpt.commands.command import CommandRegistry from autogpt.prompts.generator import PromptGenerator # Soon this will go in a folder where it remembers more stuff about the run(s) @@ -53,8 +54,8 @@ class AIConfig: self.ai_role = ai_role self.ai_goals = ai_goals self.api_budget = api_budget - self.prompt_generator = None - self.command_registry = None + self.prompt_generator: PromptGenerator | None = None + self.command_registry: CommandRegistry | None = None @staticmethod def load(config_file: str = SAVE_FILE) -> "AIConfig": diff --git a/autogpt/config/config.py b/autogpt/config/config.py index 7e8c0c37a6..94255333c5 100644 --- a/autogpt/config/config.py +++ b/autogpt/config/config.py @@ -17,8 +17,8 @@ class Config(metaclass=Singleton): def __init__(self) -> None: """Initialize the Config class""" - self.workspace_path = None - self.file_logger_path = None + self.workspace_path: str = None + self.file_logger_path: str = None self.debug_mode = False self.continuous_mode = False @@ -58,9 +58,6 @@ class Config(metaclass=Singleton): self.fast_token_limit = int(os.getenv("FAST_TOKEN_LIMIT", 4000)) self.smart_token_limit = int(os.getenv("SMART_TOKEN_LIMIT", 8000)) self.embedding_model = os.getenv("EMBEDDING_MODEL", "text-embedding-ada-002") - self.embedding_tokenizer = os.getenv("EMBEDDING_TOKENIZER", "cl100k_base") - self.embedding_token_limit = int(os.getenv("EMBEDDING_TOKEN_LIMIT", 8191)) - self.browse_chunk_max_length = int(os.getenv("BROWSE_CHUNK_MAX_LENGTH", 3000)) self.browse_spacy_language_model = os.getenv( "BROWSE_SPACY_LANGUAGE_MODEL", "en_core_web_sm" ) @@ -99,28 +96,6 @@ class Config(metaclass=Singleton): self.google_api_key = os.getenv("GOOGLE_API_KEY") self.custom_search_engine_id = os.getenv("CUSTOM_SEARCH_ENGINE_ID") - self.pinecone_api_key = os.getenv("PINECONE_API_KEY") - self.pinecone_region = os.getenv("PINECONE_ENV") - - self.weaviate_host = os.getenv("WEAVIATE_HOST") - self.weaviate_port = os.getenv("WEAVIATE_PORT") - self.weaviate_protocol = os.getenv("WEAVIATE_PROTOCOL", "http") - self.weaviate_username = os.getenv("WEAVIATE_USERNAME", None) - self.weaviate_password = os.getenv("WEAVIATE_PASSWORD", None) - self.weaviate_scopes = os.getenv("WEAVIATE_SCOPES", None) - self.weaviate_embedded_path = os.getenv("WEAVIATE_EMBEDDED_PATH") - self.weaviate_api_key = os.getenv("WEAVIATE_API_KEY", None) - self.use_weaviate_embedded = ( - os.getenv("USE_WEAVIATE_EMBEDDED", "False") == "True" - ) - - # milvus or zilliz cloud configuration. - self.milvus_addr = os.getenv("MILVUS_ADDR", "localhost:19530") - self.milvus_username = os.getenv("MILVUS_USERNAME") - self.milvus_password = os.getenv("MILVUS_PASSWORD") - self.milvus_collection = os.getenv("MILVUS_COLLECTION", "autogpt") - self.milvus_secure = os.getenv("MILVUS_SECURE") == "True" - self.image_provider = os.getenv("IMAGE_PROVIDER") self.image_size = int(os.getenv("IMAGE_SIZE", 256)) self.huggingface_api_token = os.getenv("HUGGINGFACE_API_TOKEN") @@ -146,14 +121,13 @@ class Config(metaclass=Singleton): " (KHTML, like Gecko) Chrome/83.0.4103.97 Safari/537.36", ) + self.memory_backend = os.getenv("MEMORY_BACKEND", "json_file") + self.memory_index = os.getenv("MEMORY_INDEX", "auto-gpt-memory") + self.redis_host = os.getenv("REDIS_HOST", "localhost") - self.redis_port = os.getenv("REDIS_PORT", "6379") + self.redis_port = int(os.getenv("REDIS_PORT", "6379")) self.redis_password = os.getenv("REDIS_PASSWORD", "") self.wipe_redis_on_start = os.getenv("WIPE_REDIS_ON_START", "True") == "True" - self.memory_index = os.getenv("MEMORY_INDEX", "auto-gpt") - # Note that indexes must be created on db 0 in redis, this is not configurable. - - self.memory_backend = os.getenv("MEMORY_BACKEND", "local") self.plugins_dir = os.getenv("PLUGINS_DIR", "plugins") self.plugins: List[AutoGPTPluginTemplate] = [] @@ -250,18 +224,6 @@ class Config(metaclass=Singleton): """Set the model to use for creating embeddings.""" self.embedding_model = value - def set_embedding_tokenizer(self, value: str) -> None: - """Set the tokenizer to use when creating embeddings.""" - self.embedding_tokenizer = value - - def set_embedding_token_limit(self, value: int) -> None: - """Set the token limit for creating embeddings.""" - self.embedding_token_limit = value - - def set_browse_chunk_max_length(self, value: int) -> None: - """Set the browse_website command chunk max length value.""" - self.browse_chunk_max_length = value - def set_openai_api_key(self, value: str) -> None: """Set the OpenAI API key value.""" self.openai_api_key = value @@ -286,14 +248,6 @@ class Config(metaclass=Singleton): """Set the custom search engine id value.""" self.custom_search_engine_id = value - def set_pinecone_api_key(self, value: str) -> None: - """Set the Pinecone API key value.""" - self.pinecone_api_key = value - - def set_pinecone_region(self, value: str) -> None: - """Set the Pinecone region value.""" - self.pinecone_region = value - def set_debug_mode(self, value: bool) -> None: """Set the debug mode value.""" self.debug_mode = value diff --git a/autogpt/configurator.py b/autogpt/configurator.py index 98bd0b26d0..f156f2c719 100644 --- a/autogpt/configurator.py +++ b/autogpt/configurator.py @@ -6,9 +6,9 @@ from colorama import Back, Fore, Style from autogpt import utils from autogpt.config import Config -from autogpt.llm.llm_utils import check_model +from autogpt.llm.utils import check_model from autogpt.logs import logger -from autogpt.memory import get_supported_memory_backends +from autogpt.memory.vector import get_supported_memory_backends CFG = Config() diff --git a/autogpt/json_utils/json_fix_llm.py b/autogpt/json_utils/json_fix_llm.py index 73f766480d..9e9fe5338d 100644 --- a/autogpt/json_utils/json_fix_llm.py +++ b/autogpt/json_utils/json_fix_llm.py @@ -11,7 +11,7 @@ from regex import regex from autogpt.config import Config from autogpt.json_utils.json_fix_general import correct_json -from autogpt.llm import call_ai_function +from autogpt.llm.utils import call_ai_function from autogpt.logs import logger from autogpt.speech import say_text diff --git a/autogpt/llm/__init__.py b/autogpt/llm/__init__.py index 76eaf3b724..22a743c06a 100644 --- a/autogpt/llm/__init__.py +++ b/autogpt/llm/__init__.py @@ -1,4 +1,3 @@ -from autogpt.llm.api_manager import ApiManager from autogpt.llm.base import ( ChatModelInfo, ChatModelResponse, @@ -8,18 +7,8 @@ from autogpt.llm.base import ( Message, ModelInfo, ) -from autogpt.llm.chat import chat_with_ai, create_chat_message, generate_context -from autogpt.llm.llm_utils import ( - call_ai_function, - chunked_tokens, - create_chat_completion, - get_ada_embedding, -) -from autogpt.llm.modelsinfo import COSTS -from autogpt.llm.token_counter import count_message_tokens, count_string_tokens __all__ = [ - "ApiManager", "Message", "ModelInfo", "ChatModelInfo", @@ -27,14 +16,4 @@ __all__ = [ "LLMResponse", "ChatModelResponse", "EmbeddingModelResponse", - "create_chat_message", - "generate_context", - "chat_with_ai", - "call_ai_function", - "create_chat_completion", - "get_ada_embedding", - "chunked_tokens", - "COSTS", - "count_message_tokens", - "count_string_tokens", ] diff --git a/autogpt/llm/api_manager.py b/autogpt/llm/api_manager.py index 82cf1c5733..7442579de8 100644 --- a/autogpt/llm/api_manager.py +++ b/autogpt/llm/api_manager.py @@ -6,6 +6,7 @@ import openai from openai import Model from autogpt.config import Config +from autogpt.llm.base import MessageDict from autogpt.llm.modelsinfo import COSTS from autogpt.logs import logger from autogpt.singleton import Singleton @@ -28,7 +29,7 @@ class ApiManager(metaclass=Singleton): def create_chat_completion( self, - messages: list, # type: ignore + messages: list[MessageDict], model: str | None = None, temperature: float = None, max_tokens: int | None = None, @@ -71,7 +72,7 @@ class ApiManager(metaclass=Singleton): self.update_cost(prompt_tokens, completion_tokens, model) return response - def update_cost(self, prompt_tokens, completion_tokens, model): + def update_cost(self, prompt_tokens, completion_tokens, model: str): """ Update the total cost, prompt tokens, and completion tokens. @@ -80,6 +81,9 @@ class ApiManager(metaclass=Singleton): completion_tokens (int): The number of tokens used in the completion. model (str): The model used for the API call. """ + # the .model property in API responses can contain version suffixes like -v2 + model = model[:-3] if model.endswith("-v2") else model + self.total_prompt_tokens += prompt_tokens self.total_completion_tokens += completion_tokens self.total_cost += ( diff --git a/autogpt/llm/base.py b/autogpt/llm/base.py index 722e0f0f1e..5018b70376 100644 --- a/autogpt/llm/base.py +++ b/autogpt/llm/base.py @@ -1,12 +1,28 @@ +from __future__ import annotations + from dataclasses import dataclass, field -from typing import List, TypedDict +from math import ceil, floor +from typing import List, Literal, TypedDict + +MessageRole = Literal["system", "user", "assistant"] +MessageType = Literal["ai_response", "action_result"] -class Message(TypedDict): +class MessageDict(TypedDict): + role: MessageRole + content: str + + +@dataclass +class Message: """OpenAI Message object containing a role and the message content""" - role: str + role: MessageRole content: str + type: MessageType | None = None + + def raw(self) -> MessageDict: + return {"role": self.role, "content": self.content} @dataclass @@ -31,6 +47,13 @@ class ChatModelInfo(ModelInfo): pass +@dataclass +class TextModelInfo(ModelInfo): + """Struct for text completion model information.""" + + pass + + @dataclass class EmbeddingModelInfo(ModelInfo): """Struct for embedding model information.""" @@ -38,6 +61,73 @@ class EmbeddingModelInfo(ModelInfo): embedding_dimensions: int +@dataclass +class ChatSequence: + """Utility container for a chat sequence""" + + model: ChatModelInfo + messages: list[Message] = field(default_factory=list) + + def __getitem__(self, i: int): + return self.messages[i] + + def __iter__(self): + return iter(self.messages) + + def __len__(self): + return len(self.messages) + + def append(self, message: Message): + return self.messages.append(message) + + def extend(self, messages: list[Message] | ChatSequence): + return self.messages.extend(messages) + + def insert(self, index: int, *messages: Message): + for message in reversed(messages): + self.messages.insert(index, message) + + @classmethod + def for_model(cls, model_name: str, messages: list[Message] | ChatSequence = []): + from autogpt.llm.providers.openai import OPEN_AI_CHAT_MODELS + + if not model_name in OPEN_AI_CHAT_MODELS: + raise ValueError(f"Unknown chat model '{model_name}'") + + return ChatSequence( + model=OPEN_AI_CHAT_MODELS[model_name], messages=list(messages) + ) + + def add(self, message_role: MessageRole, content: str): + self.messages.append(Message(message_role, content)) + + @property + def token_length(self): + from autogpt.llm.utils import count_message_tokens + + return count_message_tokens(self.messages, self.model.name) + + def raw(self) -> list[MessageDict]: + return [m.raw() for m in self.messages] + + def dump(self) -> str: + SEPARATOR_LENGTH = 42 + + def separator(text: str): + half_sep_len = (SEPARATOR_LENGTH - 2 - len(text)) / 2 + return f"{floor(half_sep_len)*'-'} {text.upper()} {ceil(half_sep_len)*'-'}" + + formatted_messages = "\n".join( + [f"{separator(m.role)}\n{m.content}" for m in self.messages] + ) + return f""" +============== ChatSequence ============== +Length: {self.token_length} tokens; {len(self.messages)} messages +{formatted_messages} +========================================== +""" + + @dataclass class LLMResponse: """Standard response struct for a response from an LLM model.""" diff --git a/autogpt/llm/chat.py b/autogpt/llm/chat.py index a59fe73e9d..c55016f26c 100644 --- a/autogpt/llm/chat.py +++ b/autogpt/llm/chat.py @@ -1,260 +1,200 @@ +from __future__ import annotations + import time from random import shuffle +from typing import TYPE_CHECKING -from openai.error import RateLimitError +if TYPE_CHECKING: + from autogpt.agent.agent import Agent from autogpt.config import Config from autogpt.llm.api_manager import ApiManager -from autogpt.llm.base import Message -from autogpt.llm.llm_utils import create_chat_completion -from autogpt.llm.token_counter import count_message_tokens +from autogpt.llm.base import ChatSequence, Message +from autogpt.llm.utils import count_message_tokens, create_chat_completion from autogpt.log_cycle.log_cycle import CURRENT_CONTEXT_FILE_NAME from autogpt.logs import logger +from autogpt.memory.vector import MemoryItem, get_memory cfg = Config() -def create_chat_message(role, content) -> Message: - """ - Create a chat message with the given role and content. - - Args: - role (str): The role of the message sender, e.g., "system", "user", or "assistant". - content (str): The content of the message. - - Returns: - dict: A dictionary containing the role and content of the message. - """ - return {"role": role, "content": content} - - -def generate_context(prompt, relevant_memory, full_message_history, model): - current_context = [ - create_chat_message("system", prompt), - create_chat_message( - "system", f"The current time and date is {time.strftime('%c')}" - ), - # create_chat_message( - # "system", - # f"This reminds you of these events from your past:\n{relevant_memory}\n\n", - # ), - ] - - # Add messages from the full message history until we reach the token limit - next_message_to_add_index = len(full_message_history) - 1 - insertion_index = len(current_context) - # Count the currently used tokens - current_tokens_used = count_message_tokens(current_context, model) - return ( - next_message_to_add_index, - current_tokens_used, - insertion_index, - current_context, - ) - - # TODO: Change debug from hardcode to argument def chat_with_ai( - agent, prompt, user_input, full_message_history, permanent_memory, token_limit + agent: Agent, + system_prompt: str, + user_input: str, + token_limit: int, ): - """Interact with the OpenAI API, sending the prompt, user input, message history, - and permanent memory.""" - while True: - try: - """ - Interact with the OpenAI API, sending the prompt, user input, - message history, and permanent memory. + """ + Interact with the OpenAI API, sending the prompt, user input, + message history, and permanent memory. - Args: - prompt (str): The prompt explaining the rules to the AI. - user_input (str): The input from the user. - full_message_history (list): The list of all messages sent between the - user and the AI. - permanent_memory (Obj): The memory object containing the permanent - memory. - token_limit (int): The maximum number of tokens allowed in the API call. + Args: + system_prompt (str): The prompt explaining the rules to the AI. + user_input (str): The input from the user. + token_limit (int): The maximum number of tokens allowed in the API call. - Returns: - str: The AI's response. - """ - model = cfg.fast_llm_model # TODO: Change model from hardcode to argument - # Reserve 1000 tokens for the response - logger.debug(f"Token limit: {token_limit}") - send_token_limit = token_limit - 1000 + Returns: + str: The AI's response. + """ + model = cfg.fast_llm_model # TODO: Change model from hardcode to argument + # Reserve 1000 tokens for the response + logger.debug(f"Token limit: {token_limit}") + send_token_limit = token_limit - 1000 - # if len(full_message_history) == 0: - # relevant_memory = "" - # else: - # recent_history = full_message_history[-5:] - # shuffle(recent_history) - # relevant_memories = permanent_memory.get_relevant( - # str(recent_history), 5 - # ) - # if relevant_memories: - # shuffle(relevant_memories) - # relevant_memory = str(relevant_memories) - relevant_memory = "" - logger.debug(f"Memory Stats: {permanent_memory.get_stats()}") + # if len(agent.history) == 0: + # relevant_memory = "" + # else: + # recent_history = agent.history[-5:] + # shuffle(recent_history) + # relevant_memories = agent.memory.get_relevant( + # str(recent_history), 5 + # ) + # if relevant_memories: + # shuffle(relevant_memories) + # relevant_memory = str(relevant_memories) + # logger.debug(f"Memory Stats: {agent.memory.get_stats()}") + relevant_memory = [] - ( - next_message_to_add_index, - current_tokens_used, - insertion_index, - current_context, - ) = generate_context(prompt, relevant_memory, full_message_history, model) + message_sequence = ChatSequence.for_model( + model, + [ + Message("system", system_prompt), + Message("system", f"The current time and date is {time.strftime('%c')}"), + # Message( + # "system", + # f"This reminds you of these events from your past:\n{relevant_memory}\n\n", + # ), + ], + ) - # while current_tokens_used > 2500: - # # remove memories until we are under 2500 tokens - # relevant_memory = relevant_memory[:-1] - # ( - # next_message_to_add_index, - # current_tokens_used, - # insertion_index, - # current_context, - # ) = generate_context( - # prompt, relevant_memory, full_message_history, model - # ) + # Add messages from the full message history until we reach the token limit + next_message_to_add_index = len(agent.history) - 1 + insertion_index = len(message_sequence) + # Count the currently used tokens + current_tokens_used = message_sequence.token_length - current_tokens_used += count_message_tokens( - [create_chat_message("user", user_input)], model - ) # Account for user input (appended later) + # while current_tokens_used > 2500: + # # remove memories until we are under 2500 tokens + # relevant_memory = relevant_memory[:-1] + # ( + # next_message_to_add_index, + # current_tokens_used, + # insertion_index, + # current_context, + # ) = generate_context( + # prompt, relevant_memory, agent.history, model + # ) - current_tokens_used += 500 # Account for memory (appended later) TODO: The final memory may be less than 500 tokens + # Account for user input (appended later) + user_input_msg = Message("user", user_input) + current_tokens_used += count_message_tokens([user_input_msg], model) - # Add Messages until the token limit is reached or there are no more messages to add. - while next_message_to_add_index >= 0: - # print (f"CURRENT TOKENS USED: {current_tokens_used}") - message_to_add = full_message_history[next_message_to_add_index] + current_tokens_used += 500 # Reserve space for new_summary_message - tokens_to_add = count_message_tokens([message_to_add], model) - if current_tokens_used + tokens_to_add > send_token_limit: - # save_memory_trimmed_from_context_window( - # full_message_history, - # next_message_to_add_index, - # permanent_memory, - # ) - break + # Add Messages until the token limit is reached or there are no more messages to add. + for cycle in reversed(list(agent.history.per_cycle())): + messages_to_add = [msg for msg in cycle if msg is not None] + tokens_to_add = count_message_tokens(messages_to_add, model) + if current_tokens_used + tokens_to_add > send_token_limit: + break - # Add the most recent message to the start of the current context, - # after the two system prompts. - current_context.insert( - insertion_index, full_message_history[next_message_to_add_index] - ) + # Add the most recent message to the start of the chain, + # after the system prompts. + message_sequence.insert(insertion_index, *messages_to_add) + current_tokens_used += tokens_to_add - # Count the currently used tokens - current_tokens_used += tokens_to_add + # Update & add summary of trimmed messages + if len(agent.history) > 0: + new_summary_message, trimmed_messages = agent.history.trim_messages( + current_message_chain=list(message_sequence), + ) + tokens_to_add = count_message_tokens([new_summary_message], model) + message_sequence.insert(insertion_index, new_summary_message) + current_tokens_used += tokens_to_add - 500 - # Move to the next most recent message in the full message history - next_message_to_add_index -= 1 - from autogpt.memory_management.summary_memory import ( - get_newly_trimmed_messages, - update_running_summary, - ) + # FIXME: uncomment when memory is back in use + # memory_store = get_memory(cfg) + # for _, ai_msg, result_msg in agent.history.per_cycle(trimmed_messages): + # memory_to_add = MemoryItem.from_ai_action(ai_msg, result_msg) + # logger.debug(f"Storing the following memory:\n{memory_to_add.dump()}") + # memory_store.add(memory_to_add) - # Insert Memories - if len(full_message_history) > 0: - ( - newly_trimmed_messages, - agent.last_memory_index, - ) = get_newly_trimmed_messages( - full_message_history=full_message_history, - current_context=current_context, - last_memory_index=agent.last_memory_index, - ) + api_manager = ApiManager() + # inform the AI about its remaining budget (if it has one) + if api_manager.get_total_budget() > 0.0: + remaining_budget = api_manager.get_total_budget() - api_manager.get_total_cost() + if remaining_budget < 0: + remaining_budget = 0 + budget_message = f"Your remaining API budget is ${remaining_budget:.3f}" + ( + " BUDGET EXCEEDED! SHUT DOWN!\n\n" + if remaining_budget == 0 + else " Budget very nearly exceeded! Shut down gracefully!\n\n" + if remaining_budget < 0.005 + else " Budget nearly exceeded. Finish up.\n\n" + if remaining_budget < 0.01 + else "\n\n" + ) + logger.debug(budget_message) + message_sequence.add("system", budget_message) + current_tokens_used += count_message_tokens([message_sequence[-1]], model) - agent.summary_memory = update_running_summary( - agent, - current_memory=agent.summary_memory, - new_events=newly_trimmed_messages, - ) - current_context.insert(insertion_index, agent.summary_memory) + # Append user input, the length of this is accounted for above + message_sequence.append(user_input_msg) - api_manager = ApiManager() - # inform the AI about its remaining budget (if it has one) - if api_manager.get_total_budget() > 0.0: - remaining_budget = ( - api_manager.get_total_budget() - api_manager.get_total_cost() - ) - if remaining_budget < 0: - remaining_budget = 0 - system_message = ( - f"Your remaining API budget is ${remaining_budget:.3f}" - + ( - " BUDGET EXCEEDED! SHUT DOWN!\n\n" - if remaining_budget == 0 - else " Budget very nearly exceeded! Shut down gracefully!\n\n" - if remaining_budget < 0.005 - else " Budget nearly exceeded. Finish up.\n\n" - if remaining_budget < 0.01 - else "\n\n" - ) - ) - logger.debug(system_message) - current_context.append(create_chat_message("system", system_message)) + plugin_count = len(cfg.plugins) + for i, plugin in enumerate(cfg.plugins): + if not plugin.can_handle_on_planning(): + continue + plugin_response = plugin.on_planning( + agent.config.prompt_generator, message_sequence.raw() + ) + if not plugin_response or plugin_response == "": + continue + tokens_to_add = count_message_tokens( + [Message("system", plugin_response)], model + ) + if current_tokens_used + tokens_to_add > send_token_limit: + logger.debug(f"Plugin response too long, skipping: {plugin_response}") + logger.debug(f"Plugins remaining at stop: {plugin_count - i}") + break + message_sequence.add("system", plugin_response) - # Append user input, the length of this is accounted for above - current_context.extend([create_chat_message("user", user_input)]) + # Calculate remaining tokens + tokens_remaining = token_limit - current_tokens_used + # assert tokens_remaining >= 0, "Tokens remaining is negative. + # This should never happen, please submit a bug report at + # https://www.github.com/Torantulino/Auto-GPT" - plugin_count = len(cfg.plugins) - for i, plugin in enumerate(cfg.plugins): - if not plugin.can_handle_on_planning(): - continue - plugin_response = plugin.on_planning( - agent.config.prompt_generator, current_context - ) - if not plugin_response or plugin_response == "": - continue - tokens_to_add = count_message_tokens( - [create_chat_message("system", plugin_response)], model - ) - if current_tokens_used + tokens_to_add > send_token_limit: - logger.debug("Plugin response too long, skipping:", plugin_response) - logger.debug("Plugins remaining at stop:", plugin_count - i) - break - current_context.append(create_chat_message("system", plugin_response)) + # Debug print the current context + logger.debug(f"Token limit: {token_limit}") + logger.debug(f"Send Token Count: {current_tokens_used}") + logger.debug(f"Tokens remaining for response: {tokens_remaining}") + logger.debug("------------ CONTEXT SENT TO AI ---------------") + for message in message_sequence: + # Skip printing the prompt + if message.role == "system" and message.content == system_prompt: + continue + logger.debug(f"{message.role.capitalize()}: {message.content}") + logger.debug("") + logger.debug("----------- END OF CONTEXT ----------------") + agent.log_cycle_handler.log_cycle( + agent.config.ai_name, + agent.created_at, + agent.cycle_count, + message_sequence.raw(), + CURRENT_CONTEXT_FILE_NAME, + ) - # Calculate remaining tokens - tokens_remaining = token_limit - current_tokens_used - # assert tokens_remaining >= 0, "Tokens remaining is negative. - # This should never happen, please submit a bug report at - # https://www.github.com/Torantulino/Auto-GPT" + # TODO: use a model defined elsewhere, so that model can contain + # temperature and other settings we care about + assistant_reply = create_chat_completion( + prompt=message_sequence, + max_tokens=tokens_remaining, + ) - # Debug print the current context - logger.debug(f"Token limit: {token_limit}") - logger.debug(f"Send Token Count: {current_tokens_used}") - logger.debug(f"Tokens remaining for response: {tokens_remaining}") - logger.debug("------------ CONTEXT SENT TO AI ---------------") - for message in current_context: - # Skip printing the prompt - if message["role"] == "system" and message["content"] == prompt: - continue - logger.debug(f"{message['role'].capitalize()}: {message['content']}") - logger.debug("") - logger.debug("----------- END OF CONTEXT ----------------") - agent.log_cycle_handler.log_cycle( - agent.config.ai_name, - agent.created_at, - agent.cycle_count, - current_context, - CURRENT_CONTEXT_FILE_NAME, - ) + # Update full message history + agent.history.append(user_input_msg) + agent.history.add("assistant", assistant_reply, "ai_response") - # TODO: use a model defined elsewhere, so that model can contain - # temperature and other settings we care about - assistant_reply = create_chat_completion( - model=model, - messages=current_context, - max_tokens=tokens_remaining, - ) - - # Update full message history - full_message_history.append(create_chat_message("user", user_input)) - full_message_history.append( - create_chat_message("assistant", assistant_reply) - ) - - return assistant_reply - except RateLimitError: - # TODO: When we switch to langchain, this is built in - logger.warn("Error: ", "API Rate Limit Reached. Waiting 10 seconds...") - time.sleep(10) + return assistant_reply diff --git a/autogpt/llm/modelsinfo.py b/autogpt/llm/modelsinfo.py index 1bebcbf93f..425472dec4 100644 --- a/autogpt/llm/modelsinfo.py +++ b/autogpt/llm/modelsinfo.py @@ -7,4 +7,5 @@ COSTS = { "gpt-4-32k": {"prompt": 0.06, "completion": 0.12}, "gpt-4-32k-0314": {"prompt": 0.06, "completion": 0.12}, "text-embedding-ada-002": {"prompt": 0.0004, "completion": 0.0}, + "text-davinci-003": {"prompt": 0.02, "completion": 0.02}, } diff --git a/autogpt/llm/providers/openai.py b/autogpt/llm/providers/openai.py index 188d5cf75b..acaf06719f 100644 --- a/autogpt/llm/providers/openai.py +++ b/autogpt/llm/providers/openai.py @@ -1,37 +1,74 @@ -from autogpt.llm.base import ChatModelInfo, EmbeddingModelInfo +from autogpt.llm.base import ChatModelInfo, EmbeddingModelInfo, TextModelInfo OPEN_AI_CHAT_MODELS = { - "gpt-3.5-turbo": ChatModelInfo( - name="gpt-3.5-turbo", - prompt_token_cost=0.002, - completion_token_cost=0.002, - max_tokens=4096, - ), - "gpt-4": ChatModelInfo( - name="gpt-4", - prompt_token_cost=0.03, - completion_token_cost=0.06, - max_tokens=8192, - ), - "gpt-4-32k": ChatModelInfo( - name="gpt-4-32k", - prompt_token_cost=0.06, - completion_token_cost=0.12, - max_tokens=32768, - ), + info.name: info + for info in [ + ChatModelInfo( + name="gpt-3.5-turbo", + prompt_token_cost=0.002, + completion_token_cost=0.002, + max_tokens=4096, + ), + ChatModelInfo( + name="gpt-3.5-turbo-0301", + prompt_token_cost=0.002, + completion_token_cost=0.002, + max_tokens=4096, + ), + ChatModelInfo( + name="gpt-4", + prompt_token_cost=0.03, + completion_token_cost=0.06, + max_tokens=8192, + ), + ChatModelInfo( + name="gpt-4-0314", + prompt_token_cost=0.03, + completion_token_cost=0.06, + max_tokens=8192, + ), + ChatModelInfo( + name="gpt-4-32k", + prompt_token_cost=0.06, + completion_token_cost=0.12, + max_tokens=32768, + ), + ChatModelInfo( + name="gpt-4-32k-0314", + prompt_token_cost=0.06, + completion_token_cost=0.12, + max_tokens=32768, + ), + ] +} + +OPEN_AI_TEXT_MODELS = { + info.name: info + for info in [ + TextModelInfo( + name="text-davinci-003", + prompt_token_cost=0.02, + completion_token_cost=0.02, + max_tokens=4097, + ), + ] } OPEN_AI_EMBEDDING_MODELS = { - "text-embedding-ada-002": EmbeddingModelInfo( - name="text-embedding-ada-002", - prompt_token_cost=0.0004, - completion_token_cost=0.0, - max_tokens=8191, - embedding_dimensions=1536, - ), + info.name: info + for info in [ + EmbeddingModelInfo( + name="text-embedding-ada-002", + prompt_token_cost=0.0004, + completion_token_cost=0.0, + max_tokens=8191, + embedding_dimensions=1536, + ), + ] } -OPEN_AI_MODELS = { +OPEN_AI_MODELS: dict[str, ChatModelInfo | EmbeddingModelInfo | TextModelInfo] = { **OPEN_AI_CHAT_MODELS, + **OPEN_AI_TEXT_MODELS, **OPEN_AI_EMBEDDING_MODELS, } diff --git a/autogpt/llm/llm_utils.py b/autogpt/llm/utils/__init__.py similarity index 50% rename from autogpt/llm/llm_utils.py rename to autogpt/llm/utils/__init__.py index fcd1b2d83a..b6bcc1d39f 100644 --- a/autogpt/llm/llm_utils.py +++ b/autogpt/llm/utils/__init__.py @@ -2,20 +2,58 @@ from __future__ import annotations import functools import time -from itertools import islice from typing import List, Literal, Optional +from unittest.mock import patch -import numpy as np import openai -import tiktoken +import openai.api_resources.abstract.engine_api_resource as engine_api_resource +import openai.util from colorama import Fore, Style -from openai.error import APIError, RateLimitError, Timeout +from openai.error import APIError, RateLimitError +from openai.openai_object import OpenAIObject from autogpt.config import Config -from autogpt.llm.api_manager import ApiManager -from autogpt.llm.base import Message from autogpt.logs import logger +from ..api_manager import ApiManager +from ..base import ChatSequence, Message +from .token_counter import * + + +def metered(func): + """Adds ApiManager metering to functions which make OpenAI API calls""" + api_manager = ApiManager() + + openai_obj_processor = openai.util.convert_to_openai_object + + def update_usage_with_response(response: OpenAIObject): + try: + usage = response.usage + logger.debug(f"Reported usage from call to model {response.model}: {usage}") + api_manager.update_cost( + response.usage.prompt_tokens, + response.usage.completion_tokens if "completion_tokens" in usage else 0, + response.model, + ) + except Exception as err: + logger.warn(f"Failed to update API costs: {err.__class__.__name__}: {err}") + + def metering_wrapper(*args, **kwargs): + openai_obj = openai_obj_processor(*args, **kwargs) + if isinstance(openai_obj, OpenAIObject) and "usage" in openai_obj: + update_usage_with_response(openai_obj) + return openai_obj + + def metered_func(*args, **kwargs): + with patch.object( + engine_api_resource.util, + "convert_to_openai_object", + side_effect=metering_wrapper, + ): + return func(*args, **kwargs) + + return metered_func + def retry_openai_api( num_retries: int = 10, @@ -93,23 +131,57 @@ def call_ai_function( # For each arg, if any are None, convert to "None": args = [str(arg) if arg is not None else "None" for arg in args] # parse args to comma separated string - args: str = ", ".join(args) - messages: List[Message] = [ - { - "role": "system", - "content": f"You are now the following python function: ```# {description}" - f"\n{function}```\n\nOnly respond with your `return` value.", - }, - {"role": "user", "content": args}, - ] + arg_str: str = ", ".join(args) - return create_chat_completion(model=model, messages=messages, temperature=0) + prompt = ChatSequence.for_model( + model, + [ + Message( + "system", + f"You are now the following python function: ```# {description}" + f"\n{function}```\n\nOnly respond with your `return` value.", + ), + Message("user", arg_str), + ], + ) + return create_chat_completion(prompt=prompt, temperature=0) + + +@metered +@retry_openai_api() +def create_text_completion( + prompt: str, + model: Optional[str], + temperature: Optional[float], + max_output_tokens: Optional[int], +) -> str: + cfg = Config() + if model is None: + model = cfg.fast_llm_model + if temperature is None: + temperature = cfg.temperature + + if cfg.use_azure: + kwargs = {"deployment_id": cfg.get_azure_deployment_id_for_model(model)} + else: + kwargs = {"model": model} + + response = openai.Completion.create( + **kwargs, + prompt=prompt, + temperature=temperature, + max_tokens=max_output_tokens, + api_key=cfg.openai_api_key, + ) + return response.choices[0].text # Overly simple abstraction until we create something better # simple retry mechanism when getting a rate error or a bad gateway +@metered +@retry_openai_api() def create_chat_completion( - messages: List[Message], # type: ignore + prompt: ChatSequence, model: Optional[str] = None, temperature: float = None, max_tokens: Optional[int] = None, @@ -126,23 +198,23 @@ def create_chat_completion( str: The response from the chat completion """ cfg = Config() + if model is None: + model = prompt.model.name if temperature is None: temperature = cfg.temperature - num_retries = 10 - warned_user = False logger.debug( f"{Fore.GREEN}Creating chat completion with model {model}, temperature {temperature}, max_tokens {max_tokens}{Fore.RESET}" ) for plugin in cfg.plugins: if plugin.can_handle_chat_completion( - messages=messages, + messages=prompt.raw(), model=model, temperature=temperature, max_tokens=max_tokens, ): message = plugin.handle_chat_completion( - messages=messages, + messages=prompt.raw(), model=model, temperature=temperature, max_tokens=max_tokens, @@ -151,57 +223,19 @@ def create_chat_completion( return message api_manager = ApiManager() response = None - for attempt in range(num_retries): - backoff = 2 ** (attempt + 2) - try: - if cfg.use_azure: - response = api_manager.create_chat_completion( - deployment_id=cfg.get_azure_deployment_id_for_model(model), - model=model, - messages=messages, - temperature=temperature, - max_tokens=max_tokens, - ) - else: - response = api_manager.create_chat_completion( - model=model, - messages=messages, - temperature=temperature, - max_tokens=max_tokens, - ) - break - except RateLimitError: - logger.debug( - f"{Fore.RED}Error: ", f"Reached rate limit, passing...{Fore.RESET}" - ) - if not warned_user: - logger.double_check( - f"Please double check that you have setup a {Fore.CYAN + Style.BRIGHT}PAID{Style.RESET_ALL} OpenAI API Account. " - + f"You can read more here: {Fore.CYAN}https://docs.agpt.co/setup/#getting-an-api-key{Fore.RESET}" - ) - warned_user = True - except (APIError, Timeout) as e: - if e.http_status != 502: - raise - if attempt == num_retries - 1: - raise - logger.debug( - f"{Fore.RED}Error: ", - f"API Bad gateway. Waiting {backoff} seconds...{Fore.RESET}", - ) - time.sleep(backoff) - if response is None: - logger.typewriter_log( - "FAILED TO GET RESPONSE FROM OPENAI", - Fore.RED, - "Auto-GPT has failed to get a response from OpenAI's services. " - + f"Try running Auto-GPT again, and if the problem the persists try running it with `{Fore.CYAN}--debug{Fore.RESET}`.", - ) - logger.double_check() - if cfg.debug_mode: - raise RuntimeError(f"Failed to get response after {num_retries} retries") - else: - quit(1) + + if cfg.use_azure: + kwargs = {"deployment_id": cfg.get_azure_deployment_id_for_model(model)} + else: + kwargs = {"model": model} + + response = api_manager.create_chat_completion( + **kwargs, + messages=prompt.raw(), + temperature=temperature, + max_tokens=max_tokens, + ) + resp = response.choices[0].message["content"] for plugin in cfg.plugins: if not plugin.can_handle_on_response(): @@ -210,91 +244,6 @@ def create_chat_completion( return resp -def batched(iterable, n): - """Batch data into tuples of length n. The last batch may be shorter.""" - # batched('ABCDEFG', 3) --> ABC DEF G - if n < 1: - raise ValueError("n must be at least one") - it = iter(iterable) - while batch := tuple(islice(it, n)): - yield batch - - -def chunked_tokens(text, tokenizer_name, chunk_length): - tokenizer = tiktoken.get_encoding(tokenizer_name) - tokens = tokenizer.encode(text) - chunks_iterator = batched(tokens, chunk_length) - yield from chunks_iterator - - -def get_ada_embedding(text: str) -> List[float]: - """Get an embedding from the ada model. - - Args: - text (str): The text to embed. - - Returns: - List[float]: The embedding. - """ - cfg = Config() - model = cfg.embedding_model - text = text.replace("\n", " ") - - if cfg.use_azure: - kwargs = {"engine": cfg.get_azure_deployment_id_for_model(model)} - else: - kwargs = {"model": model} - - embedding = create_embedding(text, **kwargs) - return embedding - - -@retry_openai_api() -def create_embedding( - text: str, - *_, - **kwargs, -) -> openai.Embedding: - """Create an embedding using the OpenAI API - - Args: - text (str): The text to embed. - kwargs: Other arguments to pass to the OpenAI API embedding creation call. - - Returns: - openai.Embedding: The embedding object. - """ - cfg = Config() - chunk_embeddings = [] - chunk_lengths = [] - for chunk in chunked_tokens( - text, - tokenizer_name=cfg.embedding_tokenizer, - chunk_length=cfg.embedding_token_limit, - ): - embedding = openai.Embedding.create( - input=[chunk], - api_key=cfg.openai_api_key, - **kwargs, - ) - api_manager = ApiManager() - api_manager.update_cost( - prompt_tokens=embedding.usage.prompt_tokens, - completion_tokens=0, - model=cfg.embedding_model, - ) - chunk_embeddings.append(embedding["data"][0]["embedding"]) - chunk_lengths.append(len(chunk)) - - # do weighted avg - chunk_embeddings = np.average(chunk_embeddings, axis=0, weights=chunk_lengths) - chunk_embeddings = chunk_embeddings / np.linalg.norm( - chunk_embeddings - ) # normalize the length to one - chunk_embeddings = chunk_embeddings.tolist() - return chunk_embeddings - - def check_model( model_name: str, model_type: Literal["smart_llm_model", "fast_llm_model"] ) -> str: diff --git a/autogpt/llm/token_counter.py b/autogpt/llm/utils/token_counter.py similarity index 98% rename from autogpt/llm/token_counter.py rename to autogpt/llm/utils/token_counter.py index 5e13920ed8..bd1dcf1b3b 100644 --- a/autogpt/llm/token_counter.py +++ b/autogpt/llm/utils/token_counter.py @@ -53,7 +53,7 @@ def count_message_tokens( num_tokens = 0 for message in messages: num_tokens += tokens_per_message - for key, value in message.items(): + for key, value in message.raw().items(): num_tokens += len(encoding.encode(value)) if key == "name": num_tokens += tokens_per_name diff --git a/autogpt/main.py b/autogpt/main.py index 25d7161ef9..03f685db63 100644 --- a/autogpt/main.py +++ b/autogpt/main.py @@ -5,12 +5,12 @@ from pathlib import Path from colorama import Fore, Style -from autogpt.agent.agent import Agent +from autogpt.agent import Agent from autogpt.commands.command import CommandRegistry from autogpt.config import Config, check_openai_api_key from autogpt.configurator import create_config from autogpt.logs import logger -from autogpt.memory import get_memory +from autogpt.memory.vector import get_memory from autogpt.plugins import scan_plugins from autogpt.prompts.prompt import DEFAULT_TRIGGERING_PROMPT, construct_main_ai_config from autogpt.utils import ( @@ -160,7 +160,6 @@ def run_auto_gpt( ai_name = ai_config.ai_name # print(prompt) # Initialize variables - full_message_history = [] next_action_count = 0 # add chat plugins capable of report to logger @@ -184,7 +183,6 @@ def run_auto_gpt( agent = Agent( ai_name=ai_name, memory=memory, - full_message_history=full_message_history, next_action_count=next_action_count, command_registry=command_registry, config=ai_config, diff --git a/autogpt/memory/__init__.py b/autogpt/memory/__init__.py deleted file mode 100644 index a955a3e7bf..0000000000 --- a/autogpt/memory/__init__.py +++ /dev/null @@ -1,96 +0,0 @@ -from autogpt.logs import logger -from autogpt.memory.local import LocalCache -from autogpt.memory.no_memory import NoMemory - -# List of supported memory backends -# Add a backend to this list if the import attempt is successful -supported_memory = ["local", "no_memory"] - -try: - from autogpt.memory.redismem import RedisMemory - - supported_memory.append("redis") -except ImportError: - RedisMemory = None - -try: - from autogpt.memory.pinecone import PineconeMemory - - supported_memory.append("pinecone") -except ImportError: - PineconeMemory = None - -try: - from autogpt.memory.weaviate import WeaviateMemory - - supported_memory.append("weaviate") -except ImportError: - WeaviateMemory = None - -try: - from autogpt.memory.milvus import MilvusMemory - - supported_memory.append("milvus") -except ImportError: - MilvusMemory = None - - -def get_memory(cfg, init=False): - memory = None - if cfg.memory_backend == "pinecone": - if not PineconeMemory: - logger.warn( - "Error: Pinecone is not installed. Please install pinecone" - " to use Pinecone as a memory backend." - ) - else: - memory = PineconeMemory(cfg) - if init: - memory.clear() - elif cfg.memory_backend == "redis": - if not RedisMemory: - logger.warn( - "Error: Redis is not installed. Please install redis-py to" - " use Redis as a memory backend." - ) - else: - memory = RedisMemory(cfg) - elif cfg.memory_backend == "weaviate": - if not WeaviateMemory: - logger.warn( - "Error: Weaviate is not installed. Please install weaviate-client to" - " use Weaviate as a memory backend." - ) - else: - memory = WeaviateMemory(cfg) - elif cfg.memory_backend == "milvus": - if not MilvusMemory: - logger.warn( - "Error: pymilvus sdk is not installed." - "Please install pymilvus to use Milvus or Zilliz Cloud as memory backend." - ) - else: - memory = MilvusMemory(cfg) - elif cfg.memory_backend == "no_memory": - memory = NoMemory(cfg) - - if memory is None: - memory = LocalCache(cfg) - if init: - memory.clear() - return memory - - -def get_supported_memory_backends(): - return supported_memory - - -__all__ = [ - "get_memory", - "LocalCache", - "RedisMemory", - "PineconeMemory", - "NoMemory", - "MilvusMemory", - "WeaviateMemory", -] diff --git a/autogpt/memory/base.py b/autogpt/memory/base.py deleted file mode 100644 index d99cadde8e..0000000000 --- a/autogpt/memory/base.py +++ /dev/null @@ -1,31 +0,0 @@ -"""Base class for memory providers.""" -import abc - -from autogpt.singleton import AbstractSingleton - - -class MemoryProviderSingleton(AbstractSingleton): - @abc.abstractmethod - def add(self, data): - """Adds to memory""" - pass - - @abc.abstractmethod - def get(self, data): - """Gets from memory""" - pass - - @abc.abstractmethod - def clear(self): - """Clears memory""" - pass - - @abc.abstractmethod - def get_relevant(self, data, num_relevant=5): - """Gets relevant memory for""" - pass - - @abc.abstractmethod - def get_stats(self): - """Get stats from memory""" - pass diff --git a/autogpt/memory/local.py b/autogpt/memory/local.py deleted file mode 100644 index 18b7255cb5..0000000000 --- a/autogpt/memory/local.py +++ /dev/null @@ -1,126 +0,0 @@ -from __future__ import annotations - -import dataclasses -from pathlib import Path -from typing import Any, List - -import numpy as np -import orjson - -from autogpt.llm import get_ada_embedding -from autogpt.memory.base import MemoryProviderSingleton - -EMBED_DIM = 1536 -SAVE_OPTIONS = orjson.OPT_SERIALIZE_NUMPY | orjson.OPT_SERIALIZE_DATACLASS - - -def create_default_embeddings(): - return np.zeros((0, EMBED_DIM)).astype(np.float32) - - -@dataclasses.dataclass -class CacheContent: - texts: List[str] = dataclasses.field(default_factory=list) - embeddings: np.ndarray = dataclasses.field( - default_factory=create_default_embeddings - ) - - -class LocalCache(MemoryProviderSingleton): - """A class that stores the memory in a local file""" - - def __init__(self, cfg) -> None: - """Initialize a class instance - - Args: - cfg: Config object - - Returns: - None - """ - workspace_path = Path(cfg.workspace_path) - self.filename = workspace_path / f"{cfg.memory_index}.json" - - self.filename.touch(exist_ok=True) - - file_content = b"{}" - with self.filename.open("w+b") as f: - f.write(file_content) - - self.data = CacheContent() - - def add(self, text: str): - """ - Add text to our list of texts, add embedding as row to our - embeddings-matrix - - Args: - text: str - - Returns: None - """ - if "Command Error:" in text: - return "" - self.data.texts.append(text) - - embedding = get_ada_embedding(text) - - vector = np.array(embedding).astype(np.float32) - vector = vector[np.newaxis, :] - self.data.embeddings = np.concatenate( - [ - self.data.embeddings, - vector, - ], - axis=0, - ) - - with open(self.filename, "wb") as f: - out = orjson.dumps(self.data, option=SAVE_OPTIONS) - f.write(out) - return text - - def clear(self) -> str: - """ - Clears the data in memory. - - Returns: A message indicating that the memory has been cleared. - """ - self.data = CacheContent() - return "Obliviated" - - def get(self, data: str) -> list[Any] | None: - """ - Gets the data from the memory that is most relevant to the given data. - - Args: - data: The data to compare to. - - Returns: The most relevant data. - """ - return self.get_relevant(data, 1) - - def get_relevant(self, text: str, k: int) -> list[Any]: - """ " - matrix-vector mult to find score-for-each-row-of-matrix - get indices for top-k winning scores - return texts for those indices - Args: - text: str - k: int - - Returns: List[str] - """ - embedding = get_ada_embedding(text) - - scores = np.dot(self.data.embeddings, embedding) - - top_k_indices = np.argsort(scores)[-k:][::-1] - - return [self.data.texts[i] for i in top_k_indices] - - def get_stats(self) -> tuple[int, tuple[int, ...]]: - """ - Returns: The stats of the local cache. - """ - return len(self.data.texts), self.data.embeddings.shape diff --git a/autogpt/memory/message_history.py b/autogpt/memory/message_history.py new file mode 100644 index 0000000000..fcb96a94d1 --- /dev/null +++ b/autogpt/memory/message_history.py @@ -0,0 +1,204 @@ +from __future__ import annotations + +import copy +import json +from dataclasses import dataclass, field +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from autogpt.agent import Agent + +from autogpt.config import Config +from autogpt.json_utils.utilities import ( + LLM_DEFAULT_RESPONSE_FORMAT, + is_string_valid_json, +) +from autogpt.llm.base import ChatSequence, Message, MessageRole, MessageType +from autogpt.llm.utils import create_chat_completion +from autogpt.log_cycle.log_cycle import PROMPT_SUMMARY_FILE_NAME, SUMMARY_FILE_NAME +from autogpt.logs import logger + + +@dataclass +class MessageHistory: + agent: Agent + + messages: list[Message] = field(default_factory=list) + summary: str = "I was created" + + last_trimmed_index: int = 0 + + def __getitem__(self, i: int): + return self.messages[i] + + def __iter__(self): + return iter(self.messages) + + def __len__(self): + return len(self.messages) + + def add( + self, + role: MessageRole, + content: str, + type: MessageType | None = None, + ): + return self.append(Message(role, content, type)) + + def append(self, message: Message): + return self.messages.append(message) + + def trim_messages( + self, + current_message_chain: list[Message], + ) -> tuple[Message, list[Message]]: + """ + Returns a list of trimmed messages: messages which are in the message history + but not in current_message_chain. + + Args: + current_message_chain (list[Message]): The messages currently in the context. + + Returns: + Message: A message with the new running summary after adding the trimmed messages. + list[Message]: A list of messages that are in full_message_history with an index higher than last_trimmed_index and absent from current_message_chain. + """ + # Select messages in full_message_history with an index higher than last_trimmed_index + new_messages = [ + msg for i, msg in enumerate(self) if i > self.last_trimmed_index + ] + + # Remove messages that are already present in current_message_chain + new_messages_not_in_chain = [ + msg for msg in new_messages if msg not in current_message_chain + ] + + if not new_messages_not_in_chain: + return self.summary_message(), [] + + new_summary_message = self.update_running_summary( + new_events=new_messages_not_in_chain + ) + + # Find the index of the last message processed + last_message = new_messages_not_in_chain[-1] + self.last_trimmed_index = self.messages.index(last_message) + + return new_summary_message, new_messages_not_in_chain + + def per_cycle(self, messages: list[Message] | None = None): + """ + Yields: + Message: a message containing user input + Message: a message from the AI containing a proposed action + Message: the message containing the result of the AI's proposed action + """ + messages = messages or self.messages + for i in range(0, len(messages) - 1): + ai_message = messages[i] + if ai_message.type != "ai_response": + continue + user_message = ( + messages[i - 1] if i > 0 and messages[i - 1].role == "user" else None + ) + result_message = messages[i + 1] + try: + assert is_string_valid_json( + ai_message.content, LLM_DEFAULT_RESPONSE_FORMAT + ), "AI response is not a valid JSON object" + assert result_message.type == "action_result" + + yield user_message, ai_message, result_message + except AssertionError as err: + logger.debug( + f"Invalid item in message history: {err}; Messages: {messages[i-1:i+2]}" + ) + + def summary_message(self) -> Message: + return Message( + "system", + f"This reminds you of these events from your past: \n{self.summary}", + ) + + def update_running_summary(self, new_events: list[Message]) -> Message: + """ + This function takes a list of dictionaries representing new events and combines them with the current summary, + focusing on key and potentially important information to remember. The updated summary is returned in a message + formatted in the 1st person past tense. + + Args: + new_events (List[Dict]): A list of dictionaries containing the latest events to be added to the summary. + + Returns: + str: A message containing the updated summary of actions, formatted in the 1st person past tense. + + Example: + new_events = [{"event": "entered the kitchen."}, {"event": "found a scrawled note with the number 7"}] + update_running_summary(new_events) + # Returns: "This reminds you of these events from your past: \nI entered the kitchen and found a scrawled note saying 7." + """ + cfg = Config() + + if not new_events: + return self.summary_message() + + # Create a copy of the new_events list to prevent modifying the original list + new_events = copy.deepcopy(new_events) + + # Replace "assistant" with "you". This produces much better first person past tense results. + for event in new_events: + if event.role.lower() == "assistant": + event.role = "you" + + # Remove "thoughts" dictionary from "content" + try: + content_dict = json.loads(event.content) + if "thoughts" in content_dict: + del content_dict["thoughts"] + event.content = json.dumps(content_dict) + except json.decoder.JSONDecodeError: + if cfg.debug_mode: + logger.error(f"Error: Invalid JSON: {event.content}\n") + + elif event.role.lower() == "system": + event.role = "your computer" + + # Delete all user messages + elif event.role == "user": + new_events.remove(event) + + prompt = f'''Your task is to create a concise running summary of actions and information results in the provided text, focusing on key and potentially important information to remember. + +You will receive the current summary and the your latest actions. Combine them, adding relevant key information from the latest development in 1st person past tense and keeping the summary concise. + +Summary So Far: +""" +{self.summary} +""" + +Latest Development: +""" +{new_events or "Nothing new happened."} +""" +''' + + prompt = ChatSequence.for_model(cfg.fast_llm_model, [Message("user", prompt)]) + self.agent.log_cycle_handler.log_cycle( + self.agent.config.ai_name, + self.agent.created_at, + self.agent.cycle_count, + prompt.raw(), + PROMPT_SUMMARY_FILE_NAME, + ) + + self.summary = create_chat_completion(prompt) + + self.agent.log_cycle_handler.log_cycle( + self.agent.config.ai_name, + self.agent.created_at, + self.agent.cycle_count, + self.summary, + SUMMARY_FILE_NAME, + ) + + return self.summary_message() diff --git a/autogpt/memory/milvus.py b/autogpt/memory/milvus.py deleted file mode 100644 index 40694df558..0000000000 --- a/autogpt/memory/milvus.py +++ /dev/null @@ -1,162 +0,0 @@ -""" Milvus memory storage provider.""" -import re - -from pymilvus import Collection, CollectionSchema, DataType, FieldSchema, connections - -from autogpt.config import Config -from autogpt.llm import get_ada_embedding -from autogpt.memory.base import MemoryProviderSingleton - - -class MilvusMemory(MemoryProviderSingleton): - """Milvus memory storage provider.""" - - def __init__(self, cfg: Config) -> None: - """Construct a milvus memory storage connection. - - Args: - cfg (Config): Auto-GPT global config. - """ - self.configure(cfg) - - connect_kwargs = {} - if self.username: - connect_kwargs["user"] = self.username - connect_kwargs["password"] = self.password - - connections.connect( - **connect_kwargs, - uri=self.uri or "", - address=self.address or "", - secure=self.secure, - ) - - self.init_collection() - - def configure(self, cfg: Config) -> None: - # init with configuration. - self.uri = None - self.address = cfg.milvus_addr - self.secure = cfg.milvus_secure - self.username = cfg.milvus_username - self.password = cfg.milvus_password - self.collection_name = cfg.milvus_collection - # use HNSW by default. - self.index_params = { - "metric_type": "IP", - "index_type": "HNSW", - "params": {"M": 8, "efConstruction": 64}, - } - - if (self.username is None) != (self.password is None): - raise ValueError( - "Both username and password must be set to use authentication for Milvus" - ) - - # configured address may be a full URL. - if re.match(r"^(https?|tcp)://", self.address) is not None: - self.uri = self.address - self.address = None - - if self.uri.startswith("https"): - self.secure = True - - # Zilliz Cloud requires AutoIndex. - if re.match(r"^https://(.*)\.zillizcloud\.(com|cn)", self.uri) is not None: - self.index_params = { - "metric_type": "IP", - "index_type": "AUTOINDEX", - "params": {}, - } - - def init_collection(self) -> None: - """Initialize collection in vector database.""" - fields = [ - FieldSchema(name="pk", dtype=DataType.INT64, is_primary=True, auto_id=True), - FieldSchema(name="embeddings", dtype=DataType.FLOAT_VECTOR, dim=1536), - FieldSchema(name="raw_text", dtype=DataType.VARCHAR, max_length=65535), - ] - - # create collection if not exist and load it. - self.schema = CollectionSchema(fields, "auto-gpt memory storage") - self.collection = Collection(self.collection_name, self.schema) - # create index if not exist. - if not self.collection.has_index(): - self.collection.release() - self.collection.create_index( - "embeddings", - self.index_params, - index_name="embeddings", - ) - self.collection.load() - - def add(self, data) -> str: - """Add an embedding of data into memory. - - Args: - data (str): The raw text to construct embedding index. - - Returns: - str: log. - """ - embedding = get_ada_embedding(data) - result = self.collection.insert([[embedding], [data]]) - _text = ( - "Inserting data into memory at primary key: " - f"{result.primary_keys[0]}:\n data: {data}" - ) - return _text - - def get(self, data): - """Return the most relevant data in memory. - Args: - data: The data to compare to. - """ - return self.get_relevant(data, 1) - - def clear(self) -> str: - """Drop the index in memory. - - Returns: - str: log. - """ - self.collection.drop() - self.collection = Collection(self.collection_name, self.schema) - self.collection.create_index( - "embeddings", - self.index_params, - index_name="embeddings", - ) - self.collection.load() - return "Obliviated" - - def get_relevant(self, data: str, num_relevant: int = 5): - """Return the top-k relevant data in memory. - Args: - data: The data to compare to. - num_relevant (int, optional): The max number of relevant data. - Defaults to 5. - - Returns: - list: The top-k relevant data. - """ - # search the embedding and return the most relevant text. - embedding = get_ada_embedding(data) - search_params = { - "metrics_type": "IP", - "params": {"nprobe": 8}, - } - result = self.collection.search( - [embedding], - "embeddings", - search_params, - num_relevant, - output_fields=["raw_text"], - ) - return [item.entity.value_of_field("raw_text") for item in result[0]] - - def get_stats(self) -> str: - """ - Returns: The stats of the milvus cache. - """ - return f"Entities num: {self.collection.num_entities}" diff --git a/autogpt/memory/no_memory.py b/autogpt/memory/no_memory.py deleted file mode 100644 index 0371e96ae8..0000000000 --- a/autogpt/memory/no_memory.py +++ /dev/null @@ -1,73 +0,0 @@ -"""A class that does not store any data. This is the default memory provider.""" -from __future__ import annotations - -from typing import Any - -from autogpt.memory.base import MemoryProviderSingleton - - -class NoMemory(MemoryProviderSingleton): - """ - A class that does not store any data. This is the default memory provider. - """ - - def __init__(self, cfg): - """ - Initializes the NoMemory provider. - - Args: - cfg: The config object. - - Returns: None - """ - pass - - def add(self, data: str) -> str: - """ - Adds a data point to the memory. No action is taken in NoMemory. - - Args: - data: The data to add. - - Returns: An empty string. - """ - return "" - - def get(self, data: str) -> list[Any] | None: - """ - Gets the data from the memory that is most relevant to the given data. - NoMemory always returns None. - - Args: - data: The data to compare to. - - Returns: None - """ - return None - - def clear(self) -> str: - """ - Clears the memory. No action is taken in NoMemory. - - Returns: An empty string. - """ - return "" - - def get_relevant(self, data: str, num_relevant: int = 5) -> list[Any] | None: - """ - Returns all the data in the memory that is relevant to the given data. - NoMemory always returns None. - - Args: - data: The data to compare to. - num_relevant: The number of relevant data to return. - - Returns: None - """ - return None - - def get_stats(self): - """ - Returns: An empty dictionary as there are no stats in NoMemory. - """ - return {} diff --git a/autogpt/memory/pinecone.py b/autogpt/memory/pinecone.py deleted file mode 100644 index 1c73ebd788..0000000000 --- a/autogpt/memory/pinecone.py +++ /dev/null @@ -1,78 +0,0 @@ -import pinecone -from colorama import Fore, Style - -from autogpt.llm import get_ada_embedding -from autogpt.logs import logger -from autogpt.memory.base import MemoryProviderSingleton - - -class PineconeMemory(MemoryProviderSingleton): - def __init__(self, cfg): - pinecone_api_key = cfg.pinecone_api_key - pinecone_region = cfg.pinecone_region - pinecone.init(api_key=pinecone_api_key, environment=pinecone_region) - dimension = 1536 - metric = "cosine" - pod_type = "p1" - table_name = "auto-gpt" - # this assumes we don't start with memory. - # for now this works. - # we'll need a more complicated and robust system if we want to start with - # memory. - self.vec_num = 0 - - try: - pinecone.whoami() - except Exception as e: - logger.typewriter_log( - "FAILED TO CONNECT TO PINECONE", - Fore.RED, - Style.BRIGHT + str(e) + Style.RESET_ALL, - ) - logger.double_check( - "Please ensure you have setup and configured Pinecone properly for use." - + f"You can check out {Fore.CYAN + Style.BRIGHT}" - "https://docs.agpt.co/configuration/memory/#pinecone-api-key-setup" - f"{Style.RESET_ALL} to ensure you've set up everything correctly." - ) - exit(1) - - if table_name not in pinecone.list_indexes(): - logger.typewriter_log( - "Connecting Pinecone. This may take some time...", Fore.MAGENTA, "" - ) - pinecone.create_index( - table_name, dimension=dimension, metric=metric, pod_type=pod_type - ) - self.index = pinecone.Index(table_name) - - def add(self, data): - vector = get_ada_embedding(data) - # no metadata here. We may wish to change that long term. - self.index.upsert([(str(self.vec_num), vector, {"raw_text": data})]) - _text = f"Inserting data into memory at index: {self.vec_num}:\n data: {data}" - self.vec_num += 1 - return _text - - def get(self, data): - return self.get_relevant(data, 1) - - def clear(self): - self.index.delete(deleteAll=True) - return "Obliviated" - - def get_relevant(self, data, num_relevant=5): - """ - Returns all the data in the memory that is relevant to the given data. - :param data: The data to compare to. - :param num_relevant: The number of relevant data to return. Defaults to 5 - """ - query_embedding = get_ada_embedding(data) - results = self.index.query( - query_embedding, top_k=num_relevant, include_metadata=True - ) - sorted_results = sorted(results.matches, key=lambda x: x.score) - return [str(item["metadata"]["raw_text"]) for item in sorted_results] - - def get_stats(self): - return self.index.describe_index_stats() diff --git a/autogpt/memory/redismem.py b/autogpt/memory/redismem.py deleted file mode 100644 index 9b126f0124..0000000000 --- a/autogpt/memory/redismem.py +++ /dev/null @@ -1,156 +0,0 @@ -"""Redis memory provider.""" -from __future__ import annotations - -from typing import Any - -import numpy as np -import redis -from colorama import Fore, Style -from redis.commands.search.field import TextField, VectorField -from redis.commands.search.indexDefinition import IndexDefinition, IndexType -from redis.commands.search.query import Query - -from autogpt.llm import get_ada_embedding -from autogpt.logs import logger -from autogpt.memory.base import MemoryProviderSingleton - -SCHEMA = [ - TextField("data"), - VectorField( - "embedding", - "HNSW", - {"TYPE": "FLOAT32", "DIM": 1536, "DISTANCE_METRIC": "COSINE"}, - ), -] - - -class RedisMemory(MemoryProviderSingleton): - def __init__(self, cfg): - """ - Initializes the Redis memory provider. - - Args: - cfg: The config object. - - Returns: None - """ - redis_host = cfg.redis_host - redis_port = cfg.redis_port - redis_password = cfg.redis_password - self.dimension = 1536 - self.redis = redis.Redis( - host=redis_host, - port=redis_port, - password=redis_password, - db=0, # Cannot be changed - ) - self.cfg = cfg - - # Check redis connection - try: - self.redis.ping() - except redis.ConnectionError as e: - logger.typewriter_log( - "FAILED TO CONNECT TO REDIS", - Fore.RED, - Style.BRIGHT + str(e) + Style.RESET_ALL, - ) - logger.double_check( - "Please ensure you have setup and configured Redis properly for use. " - + f"You can check out {Fore.CYAN + Style.BRIGHT}" - f"https://docs.agpt.co/configuration/memory/#redis-setup{Style.RESET_ALL}" - " to ensure you've set up everything correctly." - ) - exit(1) - - if cfg.wipe_redis_on_start: - self.redis.flushall() - try: - self.redis.ft(f"{cfg.memory_index}").create_index( - fields=SCHEMA, - definition=IndexDefinition( - prefix=[f"{cfg.memory_index}:"], index_type=IndexType.HASH - ), - ) - except Exception as e: - logger.warn("Error creating Redis search index: ", e) - existing_vec_num = self.redis.get(f"{cfg.memory_index}-vec_num") - self.vec_num = int(existing_vec_num.decode("utf-8")) if existing_vec_num else 0 - - def add(self, data: str) -> str: - """ - Adds a data point to the memory. - - Args: - data: The data to add. - - Returns: Message indicating that the data has been added. - """ - if "Command Error:" in data: - return "" - vector = get_ada_embedding(data) - vector = np.array(vector).astype(np.float32).tobytes() - data_dict = {b"data": data, "embedding": vector} - pipe = self.redis.pipeline() - pipe.hset(f"{self.cfg.memory_index}:{self.vec_num}", mapping=data_dict) - _text = ( - f"Inserting data into memory at index: {self.vec_num}:\n" f"data: {data}" - ) - self.vec_num += 1 - pipe.set(f"{self.cfg.memory_index}-vec_num", self.vec_num) - pipe.execute() - return _text - - def get(self, data: str) -> list[Any] | None: - """ - Gets the data from the memory that is most relevant to the given data. - - Args: - data: The data to compare to. - - Returns: The most relevant data. - """ - return self.get_relevant(data, 1) - - def clear(self) -> str: - """ - Clears the redis server. - - Returns: A message indicating that the memory has been cleared. - """ - self.redis.flushall() - return "Obliviated" - - def get_relevant(self, data: str, num_relevant: int = 5) -> list[Any] | None: - """ - Returns all the data in the memory that is relevant to the given data. - Args: - data: The data to compare to. - num_relevant: The number of relevant data to return. - - Returns: A list of the most relevant data. - """ - query_embedding = get_ada_embedding(data) - base_query = f"*=>[KNN {num_relevant} @embedding $vector AS vector_score]" - query = ( - Query(base_query) - .return_fields("data", "vector_score") - .sort_by("vector_score") - .dialect(2) - ) - query_vector = np.array(query_embedding).astype(np.float32).tobytes() - - try: - results = self.redis.ft(f"{self.cfg.memory_index}").search( - query, query_params={"vector": query_vector} - ) - except Exception as e: - logger.warn("Error calling Redis search: ", e) - return None - return [result.data for result in results.docs] - - def get_stats(self): - """ - Returns: The stats of the memory index. - """ - return self.redis.ft(f"{self.cfg.memory_index}").info() diff --git a/autogpt/memory/vector/__init__.py b/autogpt/memory/vector/__init__.py new file mode 100644 index 0000000000..aaaf83fb53 --- /dev/null +++ b/autogpt/memory/vector/__init__.py @@ -0,0 +1,138 @@ +from autogpt.config import Config +from autogpt.logs import logger + +from .memory_item import MemoryItem, MemoryItemRelevance +from .providers.base import VectorMemoryProvider as VectorMemory +from .providers.json_file import JSONFileMemory +from .providers.no_memory import NoMemory + +# List of supported memory backends +# Add a backend to this list if the import attempt is successful +supported_memory = ["json_file", "no_memory"] + +# try: +# from .providers.redis import RedisMemory + +# supported_memory.append("redis") +# except ImportError: +# RedisMemory = None + +# try: +# from .providers.pinecone import PineconeMemory + +# supported_memory.append("pinecone") +# except ImportError: +# PineconeMemory = None + +# try: +# from .providers.weaviate import WeaviateMemory + +# supported_memory.append("weaviate") +# except ImportError: +# WeaviateMemory = None + +# try: +# from .providers.milvus import MilvusMemory + +# supported_memory.append("milvus") +# except ImportError: +# MilvusMemory = None + + +def get_memory(cfg: Config, init=False) -> VectorMemory: + memory = None + + match cfg.memory_backend: + case "json_file": + memory = JSONFileMemory(cfg) + + case "pinecone": + raise NotImplementedError( + "The Pinecone memory backend has been rendered incompatible by work on " + "the memory system, and was removed. Whether support will be added back " + "in the future is subject to discussion, feel free to pitch in: " + "https://github.com/Significant-Gravitas/Auto-GPT/discussions/4280" + ) + # if not PineconeMemory: + # logger.warn( + # "Error: Pinecone is not installed. Please install pinecone" + # " to use Pinecone as a memory backend." + # ) + # else: + # memory = PineconeMemory(cfg) + # if init: + # memory.clear() + + case "redis": + raise NotImplementedError( + "The Redis memory backend has been rendered incompatible by work on " + "the memory system, and has been removed temporarily." + ) + # if not RedisMemory: + # logger.warn( + # "Error: Redis is not installed. Please install redis-py to" + # " use Redis as a memory backend." + # ) + # else: + # memory = RedisMemory(cfg) + + case "weaviate": + raise NotImplementedError( + "The Weaviate memory backend has been rendered incompatible by work on " + "the memory system, and was removed. Whether support will be added back " + "in the future is subject to discussion, feel free to pitch in: " + "https://github.com/Significant-Gravitas/Auto-GPT/discussions/4280" + ) + # if not WeaviateMemory: + # logger.warn( + # "Error: Weaviate is not installed. Please install weaviate-client to" + # " use Weaviate as a memory backend." + # ) + # else: + # memory = WeaviateMemory(cfg) + + case "milvus": + raise NotImplementedError( + "The Milvus memory backend has been rendered incompatible by work on " + "the memory system, and was removed. Whether support will be added back " + "in the future is subject to discussion, feel free to pitch in: " + "https://github.com/Significant-Gravitas/Auto-GPT/discussions/4280" + ) + # if not MilvusMemory: + # logger.warn( + # "Error: pymilvus sdk is not installed." + # "Please install pymilvus to use Milvus or Zilliz Cloud as memory backend." + # ) + # else: + # memory = MilvusMemory(cfg) + + case "no_memory": + memory = NoMemory() + + case _: + raise ValueError( + f"Unknown memory backend '{cfg.memory_backend}'. Please check your config." + ) + + if memory is None: + memory = JSONFileMemory(cfg) + + return memory + + +def get_supported_memory_backends(): + return supported_memory + + +__all__ = [ + "get_memory", + "MemoryItem", + "MemoryItemRelevance", + "JSONFileMemory", + "NoMemory", + "VectorMemory", + # "RedisMemory", + # "PineconeMemory", + # "MilvusMemory", + # "WeaviateMemory", +] diff --git a/autogpt/memory/vector/memory_item.py b/autogpt/memory/vector/memory_item.py new file mode 100644 index 0000000000..c57b87aa3e --- /dev/null +++ b/autogpt/memory/vector/memory_item.py @@ -0,0 +1,223 @@ +from __future__ import annotations + +import dataclasses +import json +from typing import Literal + +import numpy as np + +from autogpt.config import Config +from autogpt.llm import Message +from autogpt.llm.utils import count_string_tokens +from autogpt.logs import logger +from autogpt.processing.text import chunk_content, split_text, summarize_text + +from .utils import Embedding, get_embedding + +MemoryDocType = Literal["webpage", "text_file", "code_file", "agent_history"] + + +@dataclasses.dataclass +class MemoryItem: + """Memory object containing raw content as well as embeddings""" + + raw_content: str + summary: str + chunks: list[str] + chunk_summaries: list[str] + e_summary: Embedding + e_chunks: list[Embedding] + metadata: dict + + def relevance_for(self, query: str, e_query: Embedding | None = None): + return MemoryItemRelevance.of(self, query, e_query) + + @staticmethod + def from_text( + text: str, + source_type: MemoryDocType, + metadata: dict = {}, + how_to_summarize: str | None = None, + question_for_summary: str | None = None, + ): + cfg = Config() + logger.debug(f"Memorizing text:\n{'-'*32}\n{text}\n{'-'*32}\n") + + chunks = [ + chunk + for chunk, _ in ( + split_text(text, cfg.embedding_model) + if source_type != "code_file" + else chunk_content(text, cfg.embedding_model) + ) + ] + logger.debug("Chunks: " + str(chunks)) + + chunk_summaries = [ + summary + for summary, _ in [ + summarize_text( + text_chunk, + instruction=how_to_summarize, + question=question_for_summary, + ) + for text_chunk in chunks + ] + ] + logger.debug("Chunk summaries: " + str(chunk_summaries)) + + e_chunks = get_embedding(chunks) + + summary = ( + chunk_summaries[0] + if len(chunks) == 1 + else summarize_text( + "\n\n".join(chunk_summaries), + instruction=how_to_summarize, + question=question_for_summary, + )[0] + ) + logger.debug("Total summary: " + summary) + + # TODO: investigate search performance of weighted average vs summary + # e_average = np.average(e_chunks, axis=0, weights=[len(c) for c in chunks]) + e_summary = get_embedding(summary) + + metadata["source_type"] = source_type + + return MemoryItem( + text, + summary, + chunks, + chunk_summaries, + e_summary, + e_chunks, + metadata=metadata, + ) + + @staticmethod + def from_text_file(content: str, path: str): + return MemoryItem.from_text(content, "text_file", {"location": path}) + + @staticmethod + def from_code_file(content: str, path: str): + # TODO: implement tailored code memories + return MemoryItem.from_text(content, "code_file", {"location": path}) + + @staticmethod + def from_ai_action(ai_message: Message, result_message: Message): + # The result_message contains either user feedback + # or the result of the command specified in ai_message + + if ai_message["role"] != "assistant": + raise ValueError(f"Invalid role on 'ai_message': {ai_message['role']}") + + result = ( + result_message["content"] + if result_message["content"].startswith("Command") + else "None" + ) + user_input = ( + result_message["content"] + if result_message["content"].startswith("Human feedback") + else "None" + ) + memory_content = ( + f"Assistant Reply: {ai_message['content']}" + "\n\n" + f"Result: {result}" + "\n\n" + f"Human Feedback: {user_input}" + ) + + return MemoryItem.from_text( + text=memory_content, + source_type="agent_history", + how_to_summarize="if possible, also make clear the link between the command in the assistant's response and the command result. Do not mention the human feedback if there is none", + ) + + @staticmethod + def from_webpage(content: str, url: str, question: str | None = None): + return MemoryItem.from_text( + text=content, + source_type="webpage", + metadata={"location": url}, + question_for_summary=question, + ) + + def dump(self) -> str: + token_length = count_string_tokens(self.raw_content, Config().embedding_model) + return f""" +=============== MemoryItem =============== +Length: {token_length} tokens in {len(self.e_chunks)} chunks +Metadata: {json.dumps(self.metadata, indent=2)} +---------------- SUMMARY ----------------- +{self.summary} +------------------ RAW ------------------- +{self.raw_content} +========================================== +""" + + +@dataclasses.dataclass +class MemoryItemRelevance: + """ + Class that encapsulates memory relevance search functionality and data. + Instances contain a MemoryItem and its relevance scores for a given query. + """ + + memory_item: MemoryItem + for_query: str + summary_relevance_score: float + chunk_relevance_scores: list[float] + + @staticmethod + def of( + memory_item: MemoryItem, for_query: str, e_query: Embedding | None = None + ) -> MemoryItemRelevance: + e_query = e_query or get_embedding(for_query) + _, srs, crs = MemoryItemRelevance.calculate_scores(memory_item, e_query) + return MemoryItemRelevance( + for_query=for_query, + memory_item=memory_item, + summary_relevance_score=srs, + chunk_relevance_scores=crs, + ) + + @staticmethod + def calculate_scores( + memory: MemoryItem, compare_to: Embedding + ) -> tuple[float, float, list[float]]: + """ + Calculates similarity between given embedding and all embeddings of the memory + + Returns: + float: the aggregate (max) relevance score of the memory + float: the relevance score of the memory summary + list: the relevance scores of the memory chunks + """ + summary_relevance_score = np.dot(memory.e_summary, compare_to) + chunk_relevance_scores = np.dot(memory.e_chunks, compare_to) + logger.debug(f"Relevance of summary: {summary_relevance_score}") + logger.debug(f"Relevance of chunks: {chunk_relevance_scores}") + + relevance_scores = [summary_relevance_score, *chunk_relevance_scores] + logger.debug(f"Relevance scores: {relevance_scores}") + return max(relevance_scores), summary_relevance_score, chunk_relevance_scores + + @property + def score(self) -> float: + """The aggregate relevance score of the memory item for the given query""" + return max([self.summary_relevance_score, *self.chunk_relevance_scores]) + + @property + def most_relevant_chunk(self) -> tuple[str, float]: + """The most relevant chunk of the memory item + its score for the given query""" + i_relmax = np.argmax(self.chunk_relevance_scores) + return self.memory_item.chunks[i_relmax], self.chunk_relevance_scores[i_relmax] + + def __str__(self): + return ( + f"{self.memory_item.summary} ({self.summary_relevance_score}) " + f"{self.chunk_relevance_scores}" + ) diff --git a/autogpt/memory/vector/providers/__init__.py b/autogpt/memory/vector/providers/__init__.py new file mode 100644 index 0000000000..12a23b6000 --- /dev/null +++ b/autogpt/memory/vector/providers/__init__.py @@ -0,0 +1,7 @@ +from .json_file import JSONFileMemory +from .no_memory import NoMemory + +__all__ = [ + "JSONFileMemory", + "NoMemory", +] diff --git a/autogpt/memory/vector/providers/base.py b/autogpt/memory/vector/providers/base.py new file mode 100644 index 0000000000..969d893476 --- /dev/null +++ b/autogpt/memory/vector/providers/base.py @@ -0,0 +1,74 @@ +import abc +import functools +from typing import MutableSet, Sequence + +import numpy as np + +from autogpt.config.config import Config +from autogpt.logs import logger +from autogpt.singleton import AbstractSingleton + +from .. import MemoryItem, MemoryItemRelevance +from ..utils import Embedding, get_embedding + + +class VectorMemoryProvider(MutableSet[MemoryItem], AbstractSingleton): + @abc.abstractmethod + def __init__(self, config: Config): + pass + + def get(self, query: str) -> MemoryItemRelevance | None: + """ + Gets the data from the memory that is most relevant to the given query. + + Args: + data: The data to compare to. + + Returns: The most relevant Memory + """ + result = self.get_relevant(query, 1) + return result[0] if result else None + + def get_relevant(self, query: str, k: int) -> Sequence[MemoryItemRelevance]: + """ + Returns the top-k most relevant memories for the given query + + Args: + query: the query to compare stored memories to + k: the number of relevant memories to fetch + + Returns: + list[MemoryItemRelevance] containing the top [k] relevant memories + """ + if len(self) < 1: + return [] + + logger.debug( + f"Searching for {k} relevant memories for query '{query}'; " + f"{len(self)} memories in index" + ) + + relevances = self.score_memories_for_relevance(query) + logger.debug(f"Memory relevance scores: {[str(r) for r in relevances]}") + + # take last k items and reverse + top_k_indices = np.argsort([r.score for r in relevances])[-k:][::-1] + + return [relevances[i] for i in top_k_indices] + + def score_memories_for_relevance( + self, for_query: str + ) -> Sequence[MemoryItemRelevance]: + """ + Returns MemoryItemRelevance for every memory in the index. + Implementations may override this function for performance purposes. + """ + e_query: Embedding = get_embedding(for_query) + return [m.relevance_for(for_query, e_query) for m in self] + + def get_stats(self) -> tuple[int, int]: + """ + Returns: + tuple (n_memories: int, n_chunks: int): the stats of the memory index + """ + return len(self), functools.reduce(lambda t, m: t + len(m.e_chunks), self, 0) diff --git a/autogpt/memory/vector/providers/json_file.py b/autogpt/memory/vector/providers/json_file.py new file mode 100644 index 0000000000..46446a9c52 --- /dev/null +++ b/autogpt/memory/vector/providers/json_file.py @@ -0,0 +1,68 @@ +from __future__ import annotations + +from pathlib import Path +from typing import Iterator + +import orjson + +from autogpt.config import Config +from autogpt.logs import logger + +from ..memory_item import MemoryItem +from .base import VectorMemoryProvider + + +class JSONFileMemory(VectorMemoryProvider): + """Memory backend that stores memories in a JSON file""" + + SAVE_OPTIONS = orjson.OPT_SERIALIZE_NUMPY | orjson.OPT_SERIALIZE_DATACLASS + + file_path: Path + memories: list[MemoryItem] + + def __init__(self, cfg: Config) -> None: + """Initialize a class instance + + Args: + cfg: Config object + + Returns: + None + """ + workspace_path = Path(cfg.workspace_path) + self.file_path = workspace_path / f"{cfg.memory_index}.json" + self.file_path.touch() + logger.debug(f"Initialized {__name__} with index path {self.file_path}") + + self.memories = [] + self.save_index() + + def __iter__(self) -> Iterator[MemoryItem]: + return iter(self.memories) + + def __contains__(self, x: MemoryItem) -> bool: + return x in self.memories + + def __len__(self) -> int: + return len(self.memories) + + def add(self, item: MemoryItem): + self.memories.append(item) + self.save_index() + return len(self.memories) + + def discard(self, item: MemoryItem): + try: + self.remove(item) + except: + pass + + def clear(self): + """Clears the data in memory.""" + self.memories.clear() + self.save_index() + + def save_index(self): + logger.debug(f"Saving memory index to file {self.file_path}") + with self.file_path.open("wb") as f: + return f.write(orjson.dumps(self.memories, option=self.SAVE_OPTIONS)) diff --git a/autogpt/memory/vector/providers/no_memory.py b/autogpt/memory/vector/providers/no_memory.py new file mode 100644 index 0000000000..01f6c18014 --- /dev/null +++ b/autogpt/memory/vector/providers/no_memory.py @@ -0,0 +1,36 @@ +"""A class that does not store any data. This is the default memory provider.""" +from __future__ import annotations + +from typing import Iterator, Optional + +from autogpt.config.config import Config + +from .. import MemoryItem +from .base import VectorMemoryProvider + + +class NoMemory(VectorMemoryProvider): + """ + A class that does not store any data. This is the default memory provider. + """ + + def __init__(self, config: Optional[Config] = None): + pass + + def __iter__(self) -> Iterator[MemoryItem]: + return iter([]) + + def __contains__(self, x: MemoryItem) -> bool: + return False + + def __len__(self) -> int: + return 0 + + def add(self, item: MemoryItem): + pass + + def discard(self, item: MemoryItem): + pass + + def clear(self): + pass diff --git a/autogpt/memory/vector/utils.py b/autogpt/memory/vector/utils.py new file mode 100644 index 0000000000..d01f710848 --- /dev/null +++ b/autogpt/memory/vector/utils.py @@ -0,0 +1,71 @@ +from typing import Any, overload + +import numpy as np +import numpy.typing as npt +import openai + +from autogpt.config import Config +from autogpt.llm.utils import metered, retry_openai_api +from autogpt.logs import logger + +Embedding = list[np.float32] | np.ndarray[Any, np.dtype[np.float32]] +"""Embedding vector""" +TText = list[int] +"""Token array representing text""" + + +@overload +def get_embedding(input: str | TText) -> Embedding: + ... + + +@overload +def get_embedding(input: list[str] | list[TText]) -> list[Embedding]: + ... + + +@metered +@retry_openai_api() +def get_embedding( + input: str | TText | list[str] | list[TText], +) -> Embedding | list[Embedding]: + """Get an embedding from the ada model. + + Args: + input: Input text to get embeddings for, encoded as a string or array of tokens. + Multiple inputs may be given as a list of strings or token arrays. + + Returns: + List[float]: The embedding. + """ + cfg = Config() + multiple = isinstance(input, list) and all(not isinstance(i, int) for i in input) + + if isinstance(input, str): + input = input.replace("\n", " ") + elif multiple and isinstance(input[0], str): + input = [text.replace("\n", " ") for text in input] + + model = cfg.embedding_model + if cfg.use_azure: + kwargs = {"engine": cfg.get_azure_deployment_id_for_model(model)} + else: + kwargs = {"model": model} + + logger.debug( + f"Getting embedding{f's for {len(input)} inputs' if multiple else ''}" + f" with model '{model}'" + + (f" via Azure deployment '{kwargs['engine']}'" if cfg.use_azure else "") + ) + + embeddings = openai.Embedding.create( + input=input, + api_key=cfg.openai_api_key, + **kwargs, + ).data + + if not multiple: + return embeddings[0]["embedding"] + + embeddings = sorted(embeddings, key=lambda x: x["index"]) + return [d["embedding"] for d in embeddings] diff --git a/autogpt/memory/weaviate.py b/autogpt/memory/weaviate.py deleted file mode 100644 index 0b4f58b55a..0000000000 --- a/autogpt/memory/weaviate.py +++ /dev/null @@ -1,127 +0,0 @@ -import weaviate -from weaviate import Client -from weaviate.embedded import EmbeddedOptions -from weaviate.util import generate_uuid5 - -from autogpt.llm import get_ada_embedding -from autogpt.logs import logger -from autogpt.memory.base import MemoryProviderSingleton - - -def default_schema(weaviate_index): - return { - "class": weaviate_index, - "properties": [ - { - "name": "raw_text", - "dataType": ["text"], - "description": "original text for the embedding", - } - ], - } - - -class WeaviateMemory(MemoryProviderSingleton): - def __init__(self, cfg): - auth_credentials = self._build_auth_credentials(cfg) - - url = f"{cfg.weaviate_protocol}://{cfg.weaviate_host}:{cfg.weaviate_port}" - - if cfg.use_weaviate_embedded: - self.client = Client( - embedded_options=EmbeddedOptions( - hostname=cfg.weaviate_host, - port=int(cfg.weaviate_port), - persistence_data_path=cfg.weaviate_embedded_path, - ) - ) - - logger.info( - f"Weaviate Embedded running on: {url} with persistence path: {cfg.weaviate_embedded_path}" - ) - else: - self.client = Client(url, auth_client_secret=auth_credentials) - - self.index = WeaviateMemory.format_classname(cfg.memory_index) - self._create_schema() - - @staticmethod - def format_classname(index): - # weaviate uses capitalised index names - # The python client uses the following code to format - # index names before the corresponding class is created - index = index.replace("-", "_") - if len(index) == 1: - return index.capitalize() - return index[0].capitalize() + index[1:] - - def _create_schema(self): - schema = default_schema(self.index) - if not self.client.schema.contains(schema): - self.client.schema.create_class(schema) - - def _build_auth_credentials(self, cfg): - if cfg.weaviate_username and cfg.weaviate_password: - return weaviate.AuthClientPassword( - cfg.weaviate_username, cfg.weaviate_password - ) - if cfg.weaviate_api_key: - return weaviate.AuthApiKey(api_key=cfg.weaviate_api_key) - else: - return None - - def add(self, data): - vector = get_ada_embedding(data) - - doc_uuid = generate_uuid5(data, self.index) - data_object = {"raw_text": data} - - with self.client.batch as batch: - batch.add_data_object( - uuid=doc_uuid, - data_object=data_object, - class_name=self.index, - vector=vector, - ) - - return f"Inserting data into memory at uuid: {doc_uuid}:\n data: {data}" - - def get(self, data): - return self.get_relevant(data, 1) - - def clear(self): - self.client.schema.delete_all() - - # weaviate does not yet have a neat way to just remove the items in an index - # without removing the entire schema, therefore we need to re-create it - # after a call to delete_all - self._create_schema() - - return "Obliterated" - - def get_relevant(self, data, num_relevant=5): - query_embedding = get_ada_embedding(data) - try: - results = ( - self.client.query.get(self.index, ["raw_text"]) - .with_near_vector({"vector": query_embedding, "certainty": 0.7}) - .with_limit(num_relevant) - .do() - ) - - if len(results["data"]["Get"][self.index]) > 0: - return [ - str(item["raw_text"]) for item in results["data"]["Get"][self.index] - ] - else: - return [] - - except Exception as err: - logger.warn(f"Unexpected error {err=}, {type(err)=}") - return [] - - def get_stats(self): - result = self.client.query.aggregate(self.index).with_meta_count().do() - class_data = result["data"]["Aggregate"][self.index] - - return class_data[0]["meta"] if class_data else {} diff --git a/autogpt/memory_management/store_memory.py b/autogpt/memory_management/store_memory.py deleted file mode 100644 index e37434ef5e..0000000000 --- a/autogpt/memory_management/store_memory.py +++ /dev/null @@ -1,33 +0,0 @@ -from autogpt.json_utils.utilities import ( - LLM_DEFAULT_RESPONSE_FORMAT, - is_string_valid_json, -) -from autogpt.logs import logger - - -def format_memory(assistant_reply, next_message_content): - # the next_message_content is a variable to stores either the user_input or the command following the assistant_reply - result = ( - "None" if next_message_content.startswith("Command") else next_message_content - ) - user_input = ( - "None" - if next_message_content.startswith("Human feedback") - else next_message_content - ) - - return f"Assistant Reply: {assistant_reply}\nResult: {result}\nHuman Feedback:{user_input}" - - -def save_memory_trimmed_from_context_window( - full_message_history, next_message_to_add_index, permanent_memory -): - while next_message_to_add_index >= 0: - message_content = full_message_history[next_message_to_add_index]["content"] - if is_string_valid_json(message_content, LLM_DEFAULT_RESPONSE_FORMAT): - next_message = full_message_history[next_message_to_add_index + 1] - memory_to_add = format_memory(message_content, next_message["content"]) - logger.debug(f"Storing the following memory: {memory_to_add}") - permanent_memory.add(memory_to_add) - - next_message_to_add_index -= 1 diff --git a/autogpt/memory_management/summary_memory.py b/autogpt/memory_management/summary_memory.py deleted file mode 100644 index a13b63f3af..0000000000 --- a/autogpt/memory_management/summary_memory.py +++ /dev/null @@ -1,143 +0,0 @@ -import copy -import json -from typing import Dict, List, Tuple - -from autogpt.agent import Agent -from autogpt.config import Config -from autogpt.llm.llm_utils import create_chat_completion -from autogpt.log_cycle.log_cycle import PROMPT_SUMMARY_FILE_NAME, SUMMARY_FILE_NAME -from autogpt.logs import logger - -cfg = Config() - - -def get_newly_trimmed_messages( - full_message_history: List[Dict[str, str]], - current_context: List[Dict[str, str]], - last_memory_index: int, -) -> Tuple[List[Dict[str, str]], int]: - """ - This function returns a list of dictionaries contained in full_message_history - with an index higher than prev_index that are absent from current_context. - - Args: - full_message_history (list): A list of dictionaries representing the full message history. - current_context (list): A list of dictionaries representing the current context. - last_memory_index (int): An integer representing the previous index. - - Returns: - list: A list of dictionaries that are in full_message_history with an index higher than last_memory_index and absent from current_context. - int: The new index value for use in the next loop. - """ - # Select messages in full_message_history with an index higher than last_memory_index - new_messages = [ - msg for i, msg in enumerate(full_message_history) if i > last_memory_index - ] - - # Remove messages that are already present in current_context - new_messages_not_in_context = [ - msg for msg in new_messages if msg not in current_context - ] - - # Find the index of the last message processed - new_index = last_memory_index - if new_messages_not_in_context: - last_message = new_messages_not_in_context[-1] - new_index = full_message_history.index(last_message) - - return new_messages_not_in_context, new_index - - -def update_running_summary( - agent: Agent, current_memory: str, new_events: List[Dict[str, str]] -) -> str: - """ - This function takes a list of dictionaries representing new events and combines them with the current summary, - focusing on key and potentially important information to remember. The updated summary is returned in a message - formatted in the 1st person past tense. - - Args: - new_events (List[Dict]): A list of dictionaries containing the latest events to be added to the summary. - - Returns: - str: A message containing the updated summary of actions, formatted in the 1st person past tense. - - Example: - new_events = [{"event": "entered the kitchen."}, {"event": "found a scrawled note with the number 7"}] - update_running_summary(new_events) - # Returns: "This reminds you of these events from your past: \nI entered the kitchen and found a scrawled note saying 7." - """ - # Create a copy of the new_events list to prevent modifying the original list - new_events = copy.deepcopy(new_events) - - # Replace "assistant" with "you". This produces much better first person past tense results. - for event in new_events: - if event["role"].lower() == "assistant": - event["role"] = "you" - - # Remove "thoughts" dictionary from "content" - try: - content_dict = json.loads(event["content"]) - if "thoughts" in content_dict: - del content_dict["thoughts"] - event["content"] = json.dumps(content_dict) - except json.decoder.JSONDecodeError: - if cfg.debug_mode: - logger.error(f"Error: Invalid JSON: {event['content']}\n") - - elif event["role"].lower() == "system": - event["role"] = "your computer" - - # Delete all user messages - elif event["role"] == "user": - new_events.remove(event) - - # This can happen at any point during execution, not just the beginning - if len(new_events) == 0: - new_events = "Nothing new happened." - - prompt = f'''Your task is to create a concise running summary of actions and information results in the provided text, focusing on key and potentially important information to remember. - -You will receive the current summary and the your latest actions. Combine them, adding relevant key information from the latest development in 1st person past tense and keeping the summary concise. - -Summary So Far: -""" -{current_memory} -""" - -Latest Development: -""" -{new_events} -""" -''' - - messages = [ - { - "role": "user", - "content": prompt, - } - ] - agent.log_cycle_handler.log_cycle( - agent.config.ai_name, - agent.created_at, - agent.cycle_count, - messages, - PROMPT_SUMMARY_FILE_NAME, - ) - - current_memory = create_chat_completion(messages, cfg.fast_llm_model) - - agent.log_cycle_handler.log_cycle( - agent.config.ai_name, - agent.created_at, - agent.cycle_count, - current_memory, - SUMMARY_FILE_NAME, - ) - - message_to_return = { - "role": "system", - "content": f"This reminds you of these events from your past: \n{current_memory}", - } - - return message_to_return diff --git a/autogpt/plugins.py b/autogpt/plugins.py index bf77ef748c..780593a8df 100644 --- a/autogpt/plugins.py +++ b/autogpt/plugins.py @@ -1,6 +1,6 @@ """Handles loading of plugins.""" -import importlib +import importlib.util import json import os import zipfile @@ -12,7 +12,7 @@ from zipimport import zipimporter import openapi_python_client import requests from auto_gpt_plugin_template import AutoGPTPluginTemplate -from openapi_python_client.cli import Config as OpenAPIConfig +from openapi_python_client.config import Config as OpenAPIConfig from autogpt.config import Config from autogpt.logs import logger @@ -152,7 +152,7 @@ def initialize_openai_plugins( ) prev_cwd = Path.cwd() os.chdir(openai_plugin_client_dir) - Path("ai-plugin.json") + if not os.path.exists("client"): client_results = openapi_python_client.create_new_client( url=manifest_spec["manifest"]["api"]["url"], @@ -170,9 +170,13 @@ def initialize_openai_plugins( "client", "client/client/client.py" ) module = importlib.util.module_from_spec(spec) - spec.loader.exec_module(module) + + try: + spec.loader.exec_module(module) + finally: + os.chdir(prev_cwd) + client = module.Client(base_url=url) - os.chdir(prev_cwd) manifest_spec["client"] = client return manifests_specs diff --git a/autogpt/processing/text.py b/autogpt/processing/text.py index 4687fb710c..aadc93ef49 100644 --- a/autogpt/processing/text.py +++ b/autogpt/processing/text.py @@ -1,170 +1,234 @@ """Text processing functions""" -from typing import Dict, Generator, Optional +from math import ceil +from typing import Optional import spacy -from selenium.webdriver.remote.webdriver import WebDriver +import tiktoken from autogpt.config import Config -from autogpt.llm import count_message_tokens, create_chat_completion +from autogpt.llm.base import ChatSequence +from autogpt.llm.providers.openai import OPEN_AI_MODELS +from autogpt.llm.utils import count_string_tokens, create_chat_completion from autogpt.logs import logger -from autogpt.memory import get_memory +from autogpt.utils import batch CFG = Config() +def _max_chunk_length(model: str, max: Optional[int] = None) -> int: + model_max_input_tokens = OPEN_AI_MODELS[model].max_tokens - 1 + if max is not None and max > 0: + return min(max, model_max_input_tokens) + return model_max_input_tokens + + +def must_chunk_content( + text: str, for_model: str, max_chunk_length: Optional[int] = None +) -> bool: + return count_string_tokens(text, for_model) > _max_chunk_length( + for_model, max_chunk_length + ) + + +def chunk_content( + content: str, + for_model: str, + max_chunk_length: Optional[int] = None, + with_overlap=True, +): + """Split content into chunks of approximately equal token length.""" + + MAX_OVERLAP = 200 # limit overlap to save tokens + + if not must_chunk_content(content, for_model, max_chunk_length): + yield content, count_string_tokens(content, for_model) + return + + max_chunk_length = max_chunk_length or _max_chunk_length(for_model) + + tokenizer = tiktoken.encoding_for_model(for_model) + + tokenized_text = tokenizer.encode(content) + total_length = len(tokenized_text) + n_chunks = ceil(total_length / max_chunk_length) + + chunk_length = ceil(total_length / n_chunks) + overlap = min(max_chunk_length - chunk_length, MAX_OVERLAP) if with_overlap else 0 + + for token_batch in batch(tokenized_text, chunk_length + overlap, overlap): + yield tokenizer.decode(token_batch), len(token_batch) + + +def summarize_text( + text: str, instruction: Optional[str] = None, question: Optional[str] = None +) -> tuple[str, None | list[tuple[str, str]]]: + """Summarize text using the OpenAI API + + Args: + text (str): The text to summarize + instruction (str): Additional instruction for summarization, e.g. "focus on information related to polar bears", "omit personal information contained in the text" + + Returns: + str: The summary of the text + list[(summary, chunk)]: Text chunks and their summary, if the text was chunked. + None otherwise. + """ + if not text: + raise ValueError("No text to summarize") + + if instruction and question: + raise ValueError("Parameters 'question' and 'instructions' cannot both be set") + + model = CFG.fast_llm_model + + if question: + instruction = ( + f'include any information that can be used to answer the question "{question}". ' + "Do not directly answer the question itself" + ) + + summarization_prompt = ChatSequence.for_model(model) + + token_length = count_string_tokens(text, model) + logger.info(f"Text length: {token_length} tokens") + + # reserve 50 tokens for summary prompt, 500 for the response + max_chunk_length = _max_chunk_length(model) - 550 + logger.info(f"Max chunk length: {max_chunk_length} tokens") + + if not must_chunk_content(text, model, max_chunk_length): + # summarization_prompt.add("user", text) + summarization_prompt.add( + "user", + "Write a concise summary of the following text" + f"{f'; {instruction}' if instruction is not None else ''}:" + "\n\n\n" + f'LITERAL TEXT: """{text}"""' + "\n\n\n" + "CONCISE SUMMARY: The text is best summarized as" + # "Only respond with a concise summary or description of the user message." + ) + + logger.debug(f"Summarizing with {model}:\n{summarization_prompt.dump()}\n") + summary = create_chat_completion( + summarization_prompt, temperature=0, max_tokens=500 + ) + + logger.debug(f"\n{'-'*16} SUMMARY {'-'*17}\n{summary}\n{'-'*42}\n") + return summary.strip(), None + + summaries: list[str] = [] + chunks = list(split_text(text, for_model=model, max_chunk_length=max_chunk_length)) + + for i, (chunk, chunk_length) in enumerate(chunks): + logger.info( + f"Summarizing chunk {i + 1} / {len(chunks)} of length {chunk_length} tokens" + ) + summary, _ = summarize_text(chunk, instruction) + summaries.append(summary) + + logger.info(f"Summarized {len(chunks)} chunks") + + summary, _ = summarize_text("\n\n".join(summaries)) + + return summary.strip(), [ + (summaries[i], chunks[i][0]) for i in range(0, len(chunks)) + ] + + def split_text( text: str, - max_length: int = CFG.browse_chunk_max_length, - model: str = CFG.fast_llm_model, - question: str = "", -) -> Generator[str, None, None]: - """Split text into chunks of a maximum length + for_model: str = CFG.fast_llm_model, + with_overlap=True, + max_chunk_length: Optional[int] = None, +): + """Split text into chunks of sentences, with each chunk not exceeding the maximum length Args: text (str): The text to split - max_length (int, optional): The maximum length of each chunk. Defaults to 8192. + for_model (str): The model to chunk for; determines tokenizer and constraints + max_length (int, optional): The maximum length of each chunk Yields: str: The next chunk of text Raises: - ValueError: If the text is longer than the maximum length + ValueError: when a sentence is longer than the maximum length """ - flattened_paragraphs = " ".join(text.split("\n")) - nlp = spacy.load(CFG.browse_spacy_language_model) + max_length = _max_chunk_length(for_model, max_chunk_length) + + # flatten paragraphs to improve performance + text = text.replace("\n", " ") + text_length = count_string_tokens(text, for_model) + + if text_length < max_length: + yield text, text_length + return + + n_chunks = ceil(text_length / max_length) + target_chunk_length = ceil(text_length / n_chunks) + + nlp: spacy.language.Language = spacy.load(CFG.browse_spacy_language_model) nlp.add_pipe("sentencizer") - doc = nlp(flattened_paragraphs) - sentences = [sent.text.strip() for sent in doc.sents] + doc = nlp(text) + sentences = [sentence.text.strip() for sentence in doc.sents] - current_chunk = [] + current_chunk: list[str] = [] + current_chunk_length = 0 + last_sentence = None + last_sentence_length = 0 - for sentence in sentences: - message_with_additional_sentence = [ - create_message(" ".join(current_chunk) + " " + sentence, question) - ] + i = 0 + while i < len(sentences): + sentence = sentences[i] + sentence_length = count_string_tokens(sentence, for_model) + expected_chunk_length = current_chunk_length + 1 + sentence_length - expected_token_usage = ( - count_message_tokens(messages=message_with_additional_sentence, model=model) - + 1 - ) - if expected_token_usage <= max_length: + if ( + expected_chunk_length < max_length + # try to create chunks of approximately equal size + and expected_chunk_length - (sentence_length / 2) < target_chunk_length + ): current_chunk.append(sentence) - else: - yield " ".join(current_chunk) - current_chunk = [sentence] - message_this_sentence_only = [ - create_message(" ".join(current_chunk), question) + current_chunk_length = expected_chunk_length + + elif sentence_length < max_length: + if last_sentence: + yield " ".join(current_chunk), current_chunk_length + current_chunk = [] + current_chunk_length = 0 + + if with_overlap: + overlap_max_length = max_length - sentence_length - 1 + if last_sentence_length < overlap_max_length: + current_chunk += [last_sentence] + current_chunk_length += last_sentence_length + 1 + elif overlap_max_length > 5: + # add as much from the end of the last sentence as fits + current_chunk += [ + list( + chunk_content( + last_sentence, + for_model, + overlap_max_length, + ) + ).pop()[0], + ] + current_chunk_length += overlap_max_length + 1 + + current_chunk += [sentence] + current_chunk_length += sentence_length + + else: # sentence longer than maximum length -> chop up and try again + sentences[i : i + 1] = [ + chunk + for chunk, _ in chunk_content(sentence, for_model, target_chunk_length) ] - expected_token_usage = ( - count_message_tokens(messages=message_this_sentence_only, model=model) - + 1 - ) - if expected_token_usage > max_length: - raise ValueError( - f"Sentence is too long in webpage: {expected_token_usage} tokens." - ) + continue + + i += 1 + last_sentence = sentence + last_sentence_length = sentence_length if current_chunk: - yield " ".join(current_chunk) - - -def summarize_text( - url: str, text: str, question: str, driver: Optional[WebDriver] = None -) -> str: - """Summarize text using the OpenAI API - - Args: - url (str): The url of the text - text (str): The text to summarize - question (str): The question to ask the model - driver (WebDriver): The webdriver to use to scroll the page - - Returns: - str: The summary of the text - """ - if not text: - return "Error: No text to summarize" - - model = CFG.fast_llm_model - text_length = len(text) - logger.info(f"Text length: {text_length} characters") - - summaries = [] - chunks = list( - split_text( - text, max_length=CFG.browse_chunk_max_length, model=model, question=question - ), - ) - scroll_ratio = 1 / len(chunks) - - for i, chunk in enumerate(chunks): - if driver: - scroll_to_percentage(driver, scroll_ratio * i) - logger.info(f"Adding chunk {i + 1} / {len(chunks)} to memory") - - memory_to_add = f"Source: {url}\n" f"Raw content part#{i + 1}: {chunk}" - - memory = get_memory(CFG) - memory.add(memory_to_add) - - messages = [create_message(chunk, question)] - tokens_for_chunk = count_message_tokens(messages, model) - logger.info( - f"Summarizing chunk {i + 1} / {len(chunks)} of length {len(chunk)} characters, or {tokens_for_chunk} tokens" - ) - - summary = create_chat_completion( - model=model, - messages=messages, - ) - summaries.append(summary) - logger.info( - f"Added chunk {i + 1} summary to memory, of length {len(summary)} characters" - ) - - memory_to_add = f"Source: {url}\n" f"Content summary part#{i + 1}: {summary}" - - memory.add(memory_to_add) - - logger.info(f"Summarized {len(chunks)} chunks.") - - combined_summary = "\n".join(summaries) - messages = [create_message(combined_summary, question)] - - return create_chat_completion( - model=model, - messages=messages, - ) - - -def scroll_to_percentage(driver: WebDriver, ratio: float) -> None: - """Scroll to a percentage of the page - - Args: - driver (WebDriver): The webdriver to use - ratio (float): The percentage to scroll to - - Raises: - ValueError: If the ratio is not between 0 and 1 - """ - if ratio < 0 or ratio > 1: - raise ValueError("Percentage should be between 0 and 1") - driver.execute_script(f"window.scrollTo(0, document.body.scrollHeight * {ratio});") - - -def create_message(chunk: str, question: str) -> Dict[str, str]: - """Create a message for the chat completion - - Args: - chunk (str): The chunk of text to summarize - question (str): The question to answer - - Returns: - Dict[str, str]: The message to send to the chat completion - """ - return { - "role": "user", - "content": f'"""{chunk}""" Using the above text, answer the following' - f' question: "{question}" -- if the question cannot be answered using the text,' - " summarize the text.", - } + yield " ".join(current_chunk), current_chunk_length diff --git a/autogpt/prompts/generator.py b/autogpt/prompts/generator.py index 2ae9123b23..adf6489432 100644 --- a/autogpt/prompts/generator.py +++ b/autogpt/prompts/generator.py @@ -1,6 +1,9 @@ """ A module for generating custom prompt strings.""" import json -from typing import Any, Callable, Dict, List, Optional +from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional + +if TYPE_CHECKING: + from autogpt.commands.command import CommandRegistry class PromptGenerator: @@ -19,7 +22,7 @@ class PromptGenerator: self.resources = [] self.performance_evaluation = [] self.goals = [] - self.command_registry = None + self.command_registry: CommandRegistry | None = None self.name = "Bob" self.role = "AI" self.response_format = { diff --git a/autogpt/prompts/prompt.py b/autogpt/prompts/prompt.py index e69fc4b799..eeeea3f992 100644 --- a/autogpt/prompts/prompt.py +++ b/autogpt/prompts/prompt.py @@ -3,7 +3,7 @@ from colorama import Fore from autogpt.config.ai_config import AIConfig from autogpt.config.config import Config from autogpt.config.prompt_config import PromptConfig -from autogpt.llm import ApiManager +from autogpt.llm.api_manager import ApiManager from autogpt.logs import logger from autogpt.prompts.generator import PromptGenerator from autogpt.setup import prompt_user diff --git a/autogpt/setup.py b/autogpt/setup.py index 967645c120..67cae5da49 100644 --- a/autogpt/setup.py +++ b/autogpt/setup.py @@ -7,7 +7,8 @@ from jinja2 import Template from autogpt import utils from autogpt.config import Config from autogpt.config.ai_config import AIConfig -from autogpt.llm import create_chat_completion +from autogpt.llm.base import ChatSequence, Message +from autogpt.llm.chat import create_chat_completion from autogpt.logs import logger from autogpt.prompts.default_prompts import ( DEFAULT_SYSTEM_PROMPT_AICONFIG_AUTOMATIC, @@ -175,17 +176,15 @@ def generate_aiconfig_automatic(user_prompt) -> AIConfig: DEFAULT_TASK_PROMPT_AICONFIG_AUTOMATIC ).render(user_prompt=user_prompt) # Call LLM with the string as user input - messages = [ - { - "role": "system", - "content": system_prompt, - }, - { - "role": "user", - "content": prompt_ai_config_automatic, - }, - ] - output = create_chat_completion(messages, CFG.fast_llm_model) + output = create_chat_completion( + ChatSequence.for_model( + CFG.fast_llm_model, + [ + Message("system", system_prompt), + Message("user", prompt_ai_config_automatic), + ], + ) + ) # Debug LLM Output logger.debug(f"AI Config Generator Raw Output: {output}") diff --git a/autogpt/speech/eleven_labs.py b/autogpt/speech/eleven_labs.py index 06806b56aa..c1e3aff52e 100644 --- a/autogpt/speech/eleven_labs.py +++ b/autogpt/speech/eleven_labs.py @@ -4,7 +4,7 @@ import os import requests from playsound import playsound -from autogpt.config import Config +from autogpt.config.config import Config from autogpt.speech.base import VoiceBase PLACEHOLDERS = {"your-voice-id"} diff --git a/autogpt/speech/say.py b/autogpt/speech/say.py index 917fa65cbb..4cc82e198a 100644 --- a/autogpt/speech/say.py +++ b/autogpt/speech/say.py @@ -2,7 +2,7 @@ import threading from threading import Semaphore -from autogpt.config import Config +from autogpt.config.config import Config from autogpt.speech.base import VoiceBase from autogpt.speech.brian import BrianSpeech from autogpt.speech.eleven_labs import ElevenLabsSpeech diff --git a/autogpt/utils.py b/autogpt/utils.py index 8685d40263..8eb95deb91 100644 --- a/autogpt/utils.py +++ b/autogpt/utils.py @@ -17,6 +17,15 @@ except ImportError: from autogpt.config import Config +def batch(iterable, max_batch_length: int, overlap: int = 0): + """Batch data from iterable into slices of length N. The last batch may be shorter.""" + # batched('ABCDEFG', 3) --> ABC DEF G + if max_batch_length < 1: + raise ValueError("n must be at least one") + for i in range(0, len(iterable), max_batch_length - overlap): + yield iterable[i : i + max_batch_length] + + def clean_input(prompt: str = "", talk=False): try: cfg = Config() diff --git a/data_ingestion.py b/data_ingestion.py index c907cedf45..e2d98d1c5b 100644 --- a/data_ingestion.py +++ b/data_ingestion.py @@ -3,7 +3,7 @@ import logging from autogpt.commands.file_operations import ingest_file, list_files from autogpt.config import Config -from autogpt.memory import get_memory +from autogpt.memory.vector import VectorMemory, get_memory cfg = Config() @@ -21,14 +21,14 @@ def configure_logging(): return logging.getLogger("AutoGPT-Ingestion") -def ingest_directory(directory, memory, args): +def ingest_directory(directory: str, memory: VectorMemory, args): """ Ingest all files in a directory by calling the ingest_file function for each file. :param directory: The directory containing the files to ingest :param memory: An object with an add() method to store the chunks in memory """ - global logger + logger = logging.getLogger("AutoGPT-Ingestion") try: files = list_files(directory) for file in files: diff --git a/docs/challenges/building_challenges.md b/docs/challenges/building_challenges.md index c62c32d052..ad567238b0 100644 --- a/docs/challenges/building_challenges.md +++ b/docs/challenges/building_challenges.md @@ -33,7 +33,7 @@ Create your agent fixture. ```python def kubernetes_agent( - agent_test_config, memory_local_cache, workspace: Workspace + agent_test_config, memory_json_file, workspace: Workspace ): # Please choose the commands your agent will need to beat the challenges, the full list is available in the main.py # (we 're working on a better way to design this, for now you have to look at main.py) @@ -56,7 +56,7 @@ def kubernetes_agent( agent = Agent( # We also give the AI a name ai_name="Kubernetes-Demo", - memory=memory_local_cache, + memory=memory_json_file, full_message_history=[], command_registry=command_registry, config=ai_config, @@ -131,5 +131,3 @@ def test_information_retrieval_challenge_a(kubernetes_agent, monkeypatch) -> Non ``` - - diff --git a/docs/configuration/memory.md b/docs/configuration/memory.md index 7d7075986f..452a6eac9e 100644 --- a/docs/configuration/memory.md +++ b/docs/configuration/memory.md @@ -1,3 +1,9 @@ +!!! warning + The Pinecone, Milvus and Weaviate memory backends were rendered incompatible + by work on the memory system, and have been removed in `master`. + Whether support will be added back in the future is subject to discussion, + feel free to pitch in: https://github.com/Significant-Gravitas/Auto-GPT/discussions/4280 + ## Setting Your Cache Type By default, Auto-GPT set up with Docker Compose will use Redis as its memory backend. @@ -6,7 +12,7 @@ Otherwise, the default is LocalCache (which stores memory in a JSON file). To switch to a different backend, change the `MEMORY_BACKEND` in `.env` to the value that you want: -* `local` uses a local JSON cache file +* `json_file` uses a local JSON cache file * `pinecone` uses the Pinecone.io account you configured in your ENV settings * `redis` will use the redis cache that you configured * `milvus` will use the milvus cache that you configured diff --git a/tests/conftest.py b/tests/conftest.py index 6e6f0ad380..3644fd85ff 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -4,11 +4,11 @@ from pathlib import Path import pytest from pytest_mock import MockerFixture -from autogpt.config import Config -from autogpt.llm import ApiManager +from autogpt.config.config import Config +from autogpt.llm.api_manager import ApiManager from autogpt.workspace import Workspace -pytest_plugins = ["tests.integration.agent_factory"] +pytest_plugins = ["tests.integration.agent_factory", "tests.integration.memory.utils"] PROXY = os.environ.get("PROXY") diff --git a/tests/integration/agent_factory.py b/tests/integration/agent_factory.py index b96953feb7..1781a3f12e 100644 --- a/tests/integration/agent_factory.py +++ b/tests/integration/agent_factory.py @@ -3,7 +3,7 @@ import pytest from autogpt.agent import Agent from autogpt.commands.command import CommandRegistry from autogpt.config import AIConfig, Config -from autogpt.memory import LocalCache, NoMemory, get_memory +from autogpt.memory.vector import NoMemory, get_memory from autogpt.prompts.prompt import DEFAULT_TRIGGERING_PROMPT from autogpt.workspace import Workspace @@ -20,20 +20,10 @@ def agent_test_config(config: Config): @pytest.fixture -def memory_local_cache(agent_test_config: Config): +def memory_json_file(agent_test_config: Config): was_memory_backend = agent_test_config.memory_backend - agent_test_config.set_memory_backend("local_cache") - yield get_memory(agent_test_config, init=True) - - agent_test_config.set_memory_backend(was_memory_backend) - - -@pytest.fixture -def memory_none(agent_test_config: Config): - was_memory_backend = agent_test_config.memory_backend - - agent_test_config.set_memory_backend("no_memory") + agent_test_config.set_memory_backend("json_file") yield get_memory(agent_test_config, init=True) agent_test_config.set_memory_backend(was_memory_backend) @@ -64,7 +54,6 @@ def browser_agent(agent_test_config, memory_none: NoMemory, workspace: Workspace agent = Agent( ai_name="", memory=memory_none, - full_message_history=[], command_registry=command_registry, config=ai_config, next_action_count=0, @@ -103,7 +92,6 @@ def writer_agent(agent_test_config, memory_none: NoMemory, workspace: Workspace) agent = Agent( ai_name="", memory=memory_none, - full_message_history=[], command_registry=command_registry, config=ai_config, next_action_count=0, @@ -116,9 +104,7 @@ def writer_agent(agent_test_config, memory_none: NoMemory, workspace: Workspace) @pytest.fixture -def memory_management_agent( - agent_test_config, memory_local_cache, workspace: Workspace -): +def memory_management_agent(agent_test_config, memory_json_file, workspace: Workspace): command_registry = CommandRegistry() command_registry.import_commands("autogpt.commands.file_operations") command_registry.import_commands("autogpt.app") @@ -138,8 +124,7 @@ def memory_management_agent( agent = Agent( ai_name="", - memory=memory_local_cache, - full_message_history=[], + memory=memory_json_file, command_registry=command_registry, config=ai_config, next_action_count=0, @@ -153,7 +138,7 @@ def memory_management_agent( @pytest.fixture def get_company_revenue_agent( - agent_test_config, memory_local_cache, workspace: Workspace + agent_test_config, memory_json_file, workspace: Workspace ): command_registry = CommandRegistry() command_registry.import_commands("autogpt.commands.file_operations") @@ -172,7 +157,7 @@ def get_company_revenue_agent( Config().set_continuous_mode(False) agent = Agent( ai_name="Get-CompanyRevenue", - memory=memory_local_cache, + memory=memory_json_file, full_message_history=[], command_registry=command_registry, config=ai_config, @@ -186,7 +171,7 @@ def get_company_revenue_agent( @pytest.fixture -def kubernetes_agent(memory_local_cache, workspace: Workspace): +def kubernetes_agent(memory_json_file, workspace: Workspace): command_registry = CommandRegistry() command_registry.import_commands("autogpt.commands.file_operations") command_registry.import_commands("autogpt.app") @@ -205,7 +190,7 @@ def kubernetes_agent(memory_local_cache, workspace: Workspace): Config().set_continuous_mode(False) agent = Agent( ai_name="Kubernetes-Demo", - memory=memory_local_cache, + memory=memory_json_file, full_message_history=[], command_registry=command_registry, config=ai_config, diff --git a/tests/integration/cassettes/test_llm_utils/test_get_ada_embedding.yaml b/tests/integration/cassettes/test_llm_utils/test_get_ada_embedding.yaml deleted file mode 100644 index ec4636e097..0000000000 --- a/tests/integration/cassettes/test_llm_utils/test_get_ada_embedding.yaml +++ /dev/null @@ -1,168 +0,0 @@ -interactions: -- request: - body: '{"input": [[1985]], "model": "text-embedding-ada-002", "encoding_format": - "base64"}' - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - Connection: - - keep-alive - Content-Length: - - '83' - Content-Type: - - application/json - method: POST - uri: https://api.openai.com/v1/embeddings - response: - body: - string: !!binary | - H4sIAAAAAAAAA1SaSxO6Orvl5++n2LWn9FsiIgl7xl3kkiAgYldXlyAiKHJNgJw6371L/6dOd08c - QAqV5HnWWr/kP/71119/t1ld5NPf//z197sap7//x/fa/Tbd/v7nr//5r7/++uuv//h9/n8jiyYr - 7vfqU/6G/25Wn3ux/P3PX/x/X/m/g/756++DcanIKGZXd/X5SZTuBpdQW6+lYQqkfQ5NFN8pSgIw - zBGFBLyF+0x9ZATRzFVcC+xx3aJPfw3cJaBFA2RnibBlW4EuBNKhlLJ5dbH/QRJYDniFELe0Ryw1 - zy4l2M8lVX4IVLvccrDkD26WikG50PuxCQFdnkwEiSTcsFJvgcv2m6mB50DfUpu5FLAPV1ZQMZ8b - IvW7azZ//KsHo/R9nYA/OPXa3M0WXkbEI3Dus2z9lNsEjEngUv+4PdWrN6EKdpdVoMba1vqqHZkH - b+fNG4mbTQRmWGsFxIdzQM3Kfkbt1AUcBI3doc1xk9ZLJVoC3ClDSm3yBtl4AC8Bvk47CzunCbFl - aqkGQtGR0A74sT4HY8DDq8puaE3xHC1C9H7BD749CHUSFM03CxuwDd2YQD5eskks+B4idSLUZd1V - n+fOt2FzPT2pkptdNtu6EQD6Cg2sRFSr1/NdKvcmSu74EM8ioDerUyT9vHfIWElpvUwkz0F2c2e0 - vXpNNvuXsQe81EdofRp6LfDVTYMNUUx6PtQEjL//y79kHiuu1mRzc3xWsDg6TwQepu6OOhkEae88 - TKzIbykanVhZ5SOLI3zWZCmjogx6OCXDhAT9yuoulKYevMWdRl2+GOtu9W6a9CHZBxvBramHc8Up - MNsImCJ0jBgThDiA13A6U8UU5IxJ8keQGG8I2At5RecPahjD4yJBrGZgAl2aDgjGwQth47BVXR5l - eQyPQVnRMISPjMUvNgNJeRm4GC/BsMh0SsDUKRt6PB+LelHlIJCl9SYjYfewsjbe5Rb8MNvDONKd - gclhbEAMywgba6u762TdAkjVIkJrd3Pc9antBBgEgUIEXZPZdDzHL/hCTxEtOGkHtg5XBQiuvsUe - DR76a+oCCDe720rWtGncxT69JHi49BTbp/jo8vxe5mEjlx02733NVuNIb4BDao0PgNnDnOWLBoNH - YlB8ebb6ulyVUS5at8MW8c5DeVb2IdQv8RN7R9F02VEWRxBM2Yka77HVZzqLMbwdpBSJVyNzRymr - rY2zKwusPUQpIrNlFYC+AgOHmmCxbfbkS3mMeR47t0UB/ImJHvBP64C4GV/rebiFCKKw66h78mRA - twSKwOJ5DtvXng4ru62tlPsgx+63Hpep/Sgwm2cXH55jxNZzfL4BkkeYHPqNMVBkCSFkaL8isbq4 - gPmml0AnmldsPYNbvYiZBwEcZhcHqV9my/bSWVDZRwOaUSyxWs4WT17k15keClcZmCRTHua8lmGn - 2QTR9CDbEtabDUaSqLHoMsfUkgoo+WRNL5+oc6ruJqnc3qeeHIwRldNjBXb81aCHeE7ZOnKrAMs6 - JeRZtn00qXIQwho5Cek+J21Y5/NJg547drRQwStaLreWg+M2irB7ZM96em4kDSiX65bsDjAeRv5o - FxDt+JEqUVmzWUkwhD23rthXls0wLRvTAWgnjGhV9/nQ77ZKLpUhxNTYmEs0LU5pyNKTK4gEBjmb - 9N1DA/zhpmPNutz0JdZ8A3Kq4OOD8HnUtBma8M99fXOpaxLfHF56VUxFwtozfS10vYc5jhl671zD - 7ayTOErIrC4EGB9xmPbVK4aBW2dorrdAXwN9HGExwJCikPu4zMx2BF6OmU6k0DCGuXWjXLKi/EVj - 6cPYlD+EGSr704CPt/NnWBRijjBqDzLaXMIuYrvNkgIm5DYRWWXWs7XbOlC0e4/aBlcNzNn0PcyX - 4YiA8UmHlRVVDBsOKvQU8Za+RmkqgcwZPIy2xgiWNNzOsBncC9Wt/D6Q7a0RwFsKrlhP44ixuK1z - mJ9QT93w2LqzFJocrO+tRA9G1NdLW18R7N2qIuLuRoalfTwrYMvjTOZvf1qorhBgHHuB6tK+cef9 - dXFgBIUnqTt7x2b1/kkgi/oLavuDDKZdJdrSSqWeatz5zOa38EHAy7BPzYDYYAG9ToDs9yrhv/O7 - 3kQ7Br4WHLD71bdOeCgF5OhVRnJYTWDmzWCG7gXK2MxhmlH95s9gPrsaPvrPul6GduWhpZUdPp5a - N2PcoxThd/6pJrx1sN5EJYGnk8OhrdsLw7AUhgfdNssRF36WqOtJGUDH9I5Ym54LYx6gDjjipiH8 - GzRgILEkwqnjJHqA2qGeuYfFwfO6a5Akth82c+RUwlZrZ3o+eHt3GRvZAiS7xPiwOw86I0WqASsq - XiiYHiqYL1AeAa34CTufxHKX61vUwIDHiR6bOM3owxV4MCXdhM3g4ej8RRs8OBlsR80VvrMlLdIW - 3nfBgJXJ9tz3tXReEIKAYteXjtHylLwGevfXA59j+VMvt9vZhtKaythJKk0XnCFuwFefscdLJhtP - bPZkJd16JMolO2PGpChw674Q4QXqsDnzm1y69MuduvxWi3jyiRA8bz2Rekfxra/2W4JwSk0DR8G5 - 1ufr6Qkh+qQ+eX/1tputOYUtvzHJOnU2W80EImmTcBHa8Ks0LPYlTOC2O26o/yjs73qiyR9/5CVW - XM/LPJbgFFUB2QUiZatq6RUsJPVB5u/7FOIks+HhaliIPWSZjUXjJHB95C69U3DRp+zJV0BYbJ6c - 0rAG7KffB6kosW/K7bCcmBVAk5Idtk/1h43h6CuSu79cUNTZF0DuYPSAWRkOmWt5dafuPqxwDdID - RuVRdZmcuCvciMlEXSW9slV1Zw74/rPCSF6O7uortgAJ6Z9oEndrNKLXksPMs2qqHhRNHxkALyhe - vC1GZr4b5lrqCYyO1pZaqcTrHXgVAbwcNiN267qsV2z2OZzsV0ldheJh1nrowBRr7/96fxetRvBU - azySmk2Qje1JEeHWbRD1YL9j1FPzFiaf2id9EunZ+pATGy59ZKNdIGLwez5wwnOGjbJ86Gy6lTG0 - 3vHtT723vJnOcCPGE3a//WnZVbMDY+7IYeWtGtkCWMtDn/VP9OaAoDMuSBLAKamAtdi26+UdVCv0 - nxHGB0/P63arZ8p+P8aAjKfdFgxTIvYSSooQqy0NdRbWqgDezcajWno5RLO5X0W407WeOob7yGbX - OzcSnYH01e8SzHSeEyjPUkidgdcH5sqjBEs+7mgx4339HtpVgKfH1FBNSEyXP1qAg+vmiqj9XHYZ - M7o0gWP4qAhI1MWdw9HUYErUiXqqvrB5TIpv/QoT1qT7yBbnagRAfUgmthscg4UF6igp/aTR42ab - DvOYezYYNchh/bW/6qPJm7lUJCeGkjXjs7EWmAIN1/Cxsk+ygcwukCCIA4aNoHtHdMCnAOyPTwfj - G8uHpRh8Afh+XSGiOfXPn/HAj5CGBKa2jKJE4aExFh+qtJH4/X1eCEP9HpE9pYrO25u4grz+vmHn - 5o4ZScclgGVVMNTZgLqtsWYljPJUo97xibOlPro3aXEDhUbcbR+tGZgVyNXeh6r5rEbjKh17OJ+P - GrX7gg0lW4sc7PjMoLb9GdksWoIEHTUryV5528OuhwUH6xzX1ASdWn/rPQbVkLwRj/kFLP6wjlB7 - 4hrJqDfA1m1OhmQJwZlG5XrK/ughamObmvrwZrPjDC0wN35GcQsUIDiLpsHwfD5SDfcO4B2nbqF0 - 0mqql+sSMRSebHghLwsHXVC79OfHp1ueYPu+J1EHl9oCxFsHqj9Tk9EPM3t4etAGH+73rT7LzeqA - 0UjP3/mANT2MPJKm4XbCasRb7uI3fgjuZ/FEHfG0uuNmjGYQcy6HXpL6yKbaPdrwlzd1IWuHpZxp - DrFcINJ882XHrLEBh0tLv/mnGlbFPoYgDcQc65L7za9bQ4GOESJqQU8DW+tc3sCoPj/Y3Yk70FWH - aw4DeTeTfWVe69mSqwamD67E5tfv7OxeKCC+kopsK/uZLbOcIVDySUd21/eoz3TQJCg81itWlxxF - 8x4VIQzoWiNxF16GOS9bAukzS4i8v1tgvVw3CkRvTsAq92n1dc3nVN5fNx15g04dhL0g3eDY2io1 - Y/kzrKNRxrK6hCV2HU3Jfs8D4oGcsKY9Fn1BtL8BdFYAdeAxc8k3/8GnlgmoV/rRnRtzEX71RK83 - q/35j1jKK/tNfdS/wHxvFQu8zWYlUXDW9bksHg7sbo6N3SRt2PrJsARffL9DrDcrdyBp38DPfCoQ - f38r2Swmai5/x1Nlu22jKaDFC7KovRDh5FTunBp3WwqCUKEqunLR3KVlKC+39YX4aUjBKD+NEX7X - P1r22UOfYzbeoDffTtS5uV7GErZW0h+9KQJDX0JBhkDfLQo1N09nmL/5Bhiu5VPjmqjD7tUYLSik - EpA6ZsqwO2VVDlKiT+h1KIaoS4ugBd/5or/1O3qBGcD0yV/oY5tifW0rGoKNadyRYFuBy0javwD4 - NMo3f94zFtZH/ud/aELW/pvXRgluwfP0x68vh2tg/PG/1tpHOvXUuP/lETKpQslI5AkatGqgI5k/ - 3cGiawuE9VPSyfztZ+xZ1go8v8cYeyipavbzb/GjD6minXuXFLrbg7g01m8+7Fxat9oN4g+j1AIX - nH3rrwVj66gYJY9b1L8uLw1WtpqSLd6fM4YsLtj7fJNjJMIzWPL3ywMavozUupb20AfSoYILtDn6 - 84vELv0S5kpIkbjrpuwPfwmGjYQku8uBcFtvEiSkfRIQHludJa8PhEuyAei1trU708ER4SSHB3zU - W06fV9Ks8BxeD1SP0rSe57UP4VooPFbf1gHM6LUUcvp4GGT51gfbA1mE9f1YYLuRlWi9kgGB4C5O - 9GAvtvu5yjoPz5pYY4zugst2+tOBumSH1LxHyJ0jyo9wauua7O3XNVtMO2tg7+V3IontAcxD/Jqh - ubwIvbc6ijrTqCXotpJIFc5FbHYvcw4vnNFj5z5u6mWWIw8qkt/Qg3cLszXnPR7IhVVS1XluMxI/ - xBBAKeapW5ApY5301mBNDlck2h8PrMn7JIJFbs7UiJ8kG03B9cDxWdzx8aPVbOVUmUBouguxfbPP - mKjWNxhGYEWQj0/ZrJOa/62fnz7U6zPUUjgEbfdnfuf8VDgwy94HrMe+566L/+bhKPgIl7zI6U8h - mhrw3oHDL78z9s3nIqGvHDvGEjG25ftY+vYDrGsvvp6PKioBMFaMxML29UXfXRTw43nuLaBgvb6R - Bb/8B9FI74f5+RbKXx6jaJtVQ0f3Q7k/12cNB3nc6lRO1QoWAxeixi1BtPiNGUL+teGpbQvD0Pey - IoLp824JTO59NpNJ6SFVFEjawWh1VlbtDF/K3qUG94CMvqdRA6p7fP78l7twyyPYJ3ZlYr+ePjW5 - vi0L1iVnozJc02hOHa2Rv36ILOUprpcj/tjw+rY+2Dm/d4xOJ92Tf/xJfTAB0N96JpxrUttQ02ht - xgDJ19gZ8c//rBN7QMDrJkedOVSzMWzPMdyOyoT2xn2pxy9fg21cNtP+vHPcHnGuBiEkFlW2mDHm - OfsZdpYAsMKLhT5eT08OjALpCfzsNLaquO1hcZgcwhwJuIvbXA1gb5wDko4XO1vEArbAvXAyteFh - cNnpLGvQng2dPkprqic+WlJZGe0r2fNmw358FBRJxMii6Zcf38x/+QXndfvSf/4QvJ6XHmvRvhtY - 4D0k6W7A5Ntv+2H83oe2m2Y0g9D55rVAg/0oQiKxswB6Im0SiWxMheL9Z2LdMPYV+OWH42mjusTM - ux7MZltgMy0k1ke58gLf+aFq7nhgTjiQ/3nf2mVZh+HKGgIDeTtj7CQkI6qSWFAUP5i8u+d9oD1M - uP03X9JDFdpRb5XXGG4dZ0fN4NG7bMcXJehasqBebD9gXiW1lW9pvKO6ewrAt7+uwB1Ch2rlNAHW - 8bsGfip0Ii/8Zjq7bo4N+PJq+s1/YHs+vnK4+YjoD+9dvFeWAF0cAT7l12Egx3qL4ErFnhpxiIf9 - GV9FuM5Cg60mr4cF7oIS6s3WJFs327G1GVMEI3nOqbbfpdGojXsR7urHEYlfXracuM4B37z8zW9P - 0FOtWIF9D05fvnZnzO65HIIXDfHXr331UirBTn1H1OCEsP7pgXQqh4rI85Vn6+/9LE5ikr3/rIcv - b7Z+/AEf4aq7c+ebKQDGjIkgVLk+7697B2qCpJGBuqCeOHbWwEswEI6mZpuRn14GhG/JJtKd+udX - 4I9H+8dmZfP97At/+MzmxuAw4c/Og/sxAdTyzoQxJeBz8PE39pdv7/W3U3UpLDUlxt4NfMD4q9d3 - lxT4y1N0iryBQLe95tRNPu9h0TYOAvBS6/T40XS2rHYfSycGKbVhcmRbAroQLgN5YG+gA+sfFkhg - 2PYJEoT9AkZwfcdgE79V6pq+r/M6fwqg3qEL9peDEs1uqCtw95JK8tRufs1AUNnAOJxe1Do5mr47 - inog/fR9zmPb/fILBRKXn7G3T/WMbu61DS89uyMAETeQjU57sG4yRIZZoe46n68KtNj1g621Z+54 - VK0S2l6c4JsalD+9QeCb12mmcChbrdsg/vIZxpxfRayMCg++L0WLoDFCl11lXQDvs5lQnz/J7Mej - gZvUzc+/RuyRqSncZ1eFfv25y3/rWT6cT3eMkiCrd+mIE7BjoMX6+/OqWXWTX/CrF4QT4ZbNh+3m - 9ocXWf1QZXMfbC3ocXBPvTPl2Ki3XAo43q7oob/OP55syGopqPSX10lTAQPoUS1SWynf7penoD9+ - 6dSdrXoXk1SERg71n37UfSY4DfzuT2C9I162Oz1CBxo599O/dlhF5hB4E5cEF8MCAG2WbIR5sb38 - /HJEj2pbwN4r7ljV9B2YuQeCkmJtEsRzjxysxZ4UkJ6klSoXdacT64RmcNw8O3pQjIFR7TlDuM2s - FStqumEL3KUV7EygIMsbXwPtD3tJEh6BjP0xnPSfvsBbAkpSfCIuY0HoJ0AXLh2av3x6Fpw1he5B - 1bC2zPMwvgXqwaOPY4pbfxvN8f4kyE8zeFFzIyqR8Ov/H1+2qdL2ZOgVWw3+jDfubyXaYWfiwIdc - P9SrpHR4PbeqAdqoGagenHWXLP7E//whPpyTKWNfvyh/88mf/YWV2h4C1s4LsDMgqtP4YSXQo/mZ - BhMph13Udx7cbZMNgS/ryabuo4jwiFyMcXRTsuWbp6BUPg/kQ1EdLQFNGrgxrTs2pE8E+tsaSvAy - te6Pp4AJLoMB9XiDsdtMhb5uu30KN9GxR2y6JMP84rcB1IVzh7oqx9my9ZAnhfvXif78ycKUjQPS - zb7+7S8w8kQ3AkVJawjvbIp6XuUlgbzURmQP19plV9aMsBssHZtA93Xy88O/vK+ViaiP0r0U5K8f - psahcKNZcKQbbFwUY6x2oT6/KBbAb/9HxyWuZ+mZVjByKo/65oV3R/zwNPjjZUyITsMo908e3LVD - S/g4pDWTkOj94Y+XdHOpZ16wbLgRhSOST/CarSMnCWDevp/426/An/xvc+1Av+uVzXIj2dI+Fy5Y - iY7IpZddj8D+qhK0+er7aqR7CAso+kRsmr6epQu7yTW2Ttj/wCaaP5z1gostEqpc0hHQIuo18OXZ - ZJGO/cDsXsjB8T4w0hqqGHV0k3I//4rV6D5n9P1CEjjs9SO27mrGGA6iFGZw80Q722U1WY6cLe2U - LsV2RdRhXvkcwfAyZjTcbnBGfzx4t26m335a9NvvBV8egY+k58A01GYKv/yY+rMj6N3LA9ZPn7AD - d80wbMWbAr/9AvuqoLBp3r1n+M1zROhj7+tPrQpu1BNP3nZaRUtyaEZ4xK+G/Pj1ck2mAv78uTek - gt5utrMo/fTLFHiSrVn/GeUfn4y/ej0XVdjCe5xLGD0kTWeKb8ywAsz4+jPObVM8a/CsSTVG/srp - 0zBWJfyUq0Qkv/m4q+ynrz/1AGrPYaszxC8Q5I8EzdrZ0dmLditYrhcfW5X/YqOPDhW8XVf05XOb - aC3sxYITjD2qLM1xYCu1R2guDUFSv9tnP14n9/dbivFxkw5ffbaBwCqLKi/j5tIHbsr9ZVOpWHck - oLPrPnZk8zsT6BNx0dgdzyOA1tum+uodwWrdakn8+3cq4D//9ddf/+t3wqBp78X7ezBgKpbp3/99 - VODft/vt3zwv/JsKf04ikPFWFn//81+HEP7uhrbppv89ta/iM/79z1/bP6cN/p7a6fb+fy7/6/td - //mv/wMAAP//AwDOXgQl4SAAAA== - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 7c09bf823fb50b70-AMS - Connection: - - keep-alive - Content-Encoding: - - gzip - Content-Type: - - application/json - Date: - - Mon, 01 May 2023 17:29:41 GMT - Server: - - cloudflare - access-control-allow-origin: - - '*' - alt-svc: - - h3=":443"; ma=86400, h3-29=":443"; ma=86400 - openai-organization: - - user-kd1j0bcill5flig1m29wdaof - openai-processing-ms: - - '69' - openai-version: - - '2020-10-01' - strict-transport-security: - - max-age=15724800; includeSubDomains - x-ratelimit-limit-requests: - - '3000' - x-ratelimit-remaining-requests: - - '2999' - x-ratelimit-reset-requests: - - 20ms - x-request-id: - - 555d4ffdb6ceac9f62f60bb64d87170d - status: - code: 200 - message: OK -version: 1 diff --git a/tests/integration/cassettes/test_llm_utils/test_get_ada_embedding_large_context.yaml b/tests/integration/cassettes/test_llm_utils/test_get_ada_embedding_large_context.yaml deleted file mode 100644 index b6d6a75b01..0000000000 --- a/tests/integration/cassettes/test_llm_utils/test_get_ada_embedding_large_context.yaml +++ /dev/null @@ -1,1484 +0,0 @@ -interactions: -- request: - body: '{"input": [[5289, 564, 71, 773, 89, 2332, 19747, 87, 664, 71, 8311, 85, - 392, 25299, 11032, 940, 89, 85, 398, 80, 73, 5481, 48558, 86, 1919, 1634, 8890, - 5346, 44185, 53060, 450, 392, 68273, 64145, 511, 77, 80, 3696, 71, 2042, 28990, - 44490, 19456, 89, 21807, 44490, 6331, 3180, 23361, 71, 455, 76, 583, 87, 21436, - 34695, 352, 70, 953, 278, 59524, 2857, 93000, 641, 6043, 73, 70, 36463, 7141, - 21807, 391, 1273, 8536, 566, 71, 7513, 580, 263, 936, 10333, 86, 983, 822, 9700, - 73, 3316, 80, 1320, 365, 27278, 564, 67, 12591, 3576, 36851, 2465, 1739, 657, - 6375, 89, 23361, 6885, 352, 88, 14046, 292, 70, 32204, 648, 300, 73, 32547, - 869, 20854, 28679, 89, 11805, 70, 9081, 80, 11251, 87, 28679, 6194, 85, 906, - 64, 300, 16501, 70494, 6511, 51622, 87, 80, 8462, 295, 13536, 49463, 604, 64, - 333, 79, 302, 5469, 36068, 267, 5289, 19870, 911, 71, 587, 4912, 32868, 64087, - 22650, 89, 3368, 83, 50798, 391, 273, 365, 72, 1233, 604, 3858, 48380, 710, - 6375, 93000, 3013, 427, 7796, 66, 11160, 77, 10910, 85, 4777, 5969, 276, 7717, - 87, 71, 21325, 80, 73389, 3576, 86, 29238, 544, 1026, 31019, 20218, 3524, 64, - 7270, 7783, 80, 1498, 77, 1739, 5979, 327, 49450, 1791, 36409, 86, 582, 894, - 73, 85, 89, 2174, 87, 40494, 20824, 40078, 89, 80, 4317, 1609, 85, 3601, 73, - 86, 36068, 894, 56871, 89, 4680, 67, 2562, 1494, 6331, 16017, 2332, 21046, 86, - 11823, 18854, 85, 10616, 85, 23913, 73, 86682, 441, 1494, 360, 1978, 3013, 2562, - 511, 86, 427, 5298, 81, 2580, 89835, 14694, 86, 3059, 26738, 65736, 84, 1394, - 4458, 677, 85, 3368, 71, 15479, 391, 713, 35531, 566, 94869, 325, 14029, 52360, - 80, 406, 1290, 10616, 79, 773, 3315, 365, 89, 73, 2111, 13045, 73, 89, 14694, - 710, 95210, 1804, 79, 20824, 80, 4086, 19555, 936, 1091, 301, 936, 1494, 73, - 10784, 1029, 77, 1302, 1310, 86, 80, 24752, 89, 73, 89, 1302, 343, 38977, 324, - 25457, 9902, 88, 4168, 67, 3841, 953, 96518, 4777, 3696, 80, 61057, 90563, 80, - 20127, 86, 3418, 24672, 80, 796, 88, 85, 1026, 80, 8910, 285, 7864, 406, 14694, - 454, 361, 85245, 83, 664, 43210, 3368, 27278, 9832, 89, 5010, 89, 762, 86, 19747, - 299, 23808, 69057, 80, 4922, 20211, 71, 73, 404, 89, 1395, 830, 939, 5298, 402, - 83, 80, 70, 1631, 97582, 3306, 7387, 73, 89, 2034, 66, 3906, 53060, 339, 343, - 582, 2465, 80, 52926, 2642, 1739, 69057, 3906, 13649, 85, 3178, 291, 66, 267, - 67910, 24672, 2201, 2933, 34175, 4777, 23361, 10784, 73, 764, 1517, 87, 12825, - 49755, 24752, 89, 265, 307, 85, 80, 689, 332, 1331, 76, 1031, 1658, 325, 2303, - 36851, 87, 8892, 365, 87, 402, 17606, 80, 12407, 4341, 85, 78498, 370, 3074, - 455, 44183, 79224, 830, 20811, 906, 361, 73, 2580, 80, 359, 258, 10753, 2329, - 37406, 50139, 67702, 10616, 56029, 7513, 2438, 402, 3258, 80, 22491, 4748, 359, - 41129, 2034, 11032, 1658, 65, 3714, 65, 1919, 40153, 363, 2580, 52333, 73, 87, - 80, 45015, 4086, 3141, 69, 392, 76, 19747, 21486, 708, 88, 1273, 21486, 85, - 764, 67, 16373, 4223, 641, 53060, 784, 8977, 73, 85, 1739, 6043, 321, 89, 18571, - 49450, 32589, 450, 88, 2303, 62497, 49595, 80, 49755, 1494, 80, 302, 2689, 87, - 29238, 89, 1551, 8088, 76, 20926, 87, 2034, 66, 785, 22873, 53060, 8480, 708, - 88, 906, 73, 4680, 80, 22119, 89, 398, 78498, 1919, 67, 64087, 318, 24409, 869, - 754, 20990, 1498, 23760, 648, 728, 89, 22619, 29224, 1216, 2933, 85, 1386, 8587, - 7737, 87, 17043, 24990, 70561, 47171, 6375, 67, 1073, 85, 89835, 3933, 85407, - 564, 38359, 3647, 60338, 89, 17008, 10079, 80, 385, 1394, 32837, 1247, 14170, - 74, 936, 1658, 87, 73, 365, 73, 21807, 76, 50139, 2629, 20568, 80, 1609, 5241, - 21206, 96518, 301, 36722, 2249, 74, 581, 75, 641, 1494, 31019, 6780, 967, 361, - 288, 77, 49755, 1631, 73, 11160, 20990, 876, 4511, 69, 675, 78, 352, 15809, - 363, 65, 9110, 20990, 2034, 82, 544, 28546, 939, 73, 37227, 266, 70, 3013, 7911, - 365, 14625, 65, 3841, 62559, 69, 2658, 76, 2302, 41922, 383, 347, 75, 3172, - 784, 634, 89, 26554, 74, 75406, 544, 71, 1146, 20926, 10784, 32589, 71, 894, - 64, 2319, 10784, 509, 15720, 327, 2701, 45015, 89, 344, 9810, 71, 344, 29700, - 2194, 5308, 303, 85, 23361, 402, 86, 939, 283, 6870, 2005, 73, 1919, 5308, 858, - 18854, 74, 11032, 911, 65, 80, 10784, 3576, 4223, 5289, 69, 2249, 37406, 44508, - 89, 2739, 80, 70, 80, 10462, 80, 23001, 31019, 80, 40429, 9092, 19072, 80, 56102, - 15444, 598, 30986, 85, 23866, 3933, 67, 1494, 86, 21325, 73, 2580, 18571, 675, - 19239, 876, 352, 24990, 87, 1336, 14029, 78, 762, 9258, 2319, 9081, 87, 24244, - 50536, 15720, 668, 85, 359, 86, 652, 70, 73, 70, 675, 355, 1725, 3696, 65941, - 295, 93797, 9081, 1273, 49891, 830, 581, 360, 302, 89, 14878, 87, 80, 73, 44067, - 86, 80, 5544, 49450, 7141, 65, 2465, 70, 344, 49463, 303, 10616, 3172, 906, - 74, 1658, 9832, 88564, 65, 80, 1216, 69, 89, 58153, 9832, 85, 4168, 32204, 2067, - 2465, 773, 73, 28774, 89, 5308, 18571, 49463, 300, 664, 14694, 3178, 71, 38844, - 80, 876, 13423, 65, 1146, 87, 34175, 327, 1394, 68916, 302, 74, 78498, 22016, - 450, 65, 2005, 7141, 80, 352, 31764, 73, 49865, 641, 16684, 17692, 2067, 9697, - 28679, 73, 85514, 80, 337, 1804, 939, 84, 80, 906, 15618, 69, 8671, 894, 385, - 89, 318, 13028, 18458, 70391, 89, 12328, 89, 5010, 49404, 285, 764, 32868, 3423, - 70, 5192, 73, 70, 80, 24244, 566, 3172, 346, 31764, 89, 1273, 75, 3427, 87, - 80, 38361, 1631, 4714, 78930, 21807, 24990, 40078, 86, 3906, 1310, 70, 406, - 80, 360, 1100, 87, 2032, 73, 49865, 668, 37406, 6358, 24244, 6634, 265, 1100, - 64, 16711, 822, 80, 52926, 8910, 3059, 1247, 4922, 87, 24244, 16400, 1729, 70, - 1320, 427, 85, 355, 22524, 42908, 1729, 8510, 80, 89, 26929, 1897, 5230, 363, - 80, 858, 89, 80, 64087, 637, 80, 48480, 69, 18577, 2303, 307, 818, 41129, 953, - 66, 2912, 2067, 73, 822, 8671, 1247, 22619, 1073, 383, 67665, 8212, 752, 10759, - 23866, 71, 1026, 89, 26929, 50139, 71, 16274, 8462, 82, 764, 391, 318, 74, 2465, - 9832, 3013, 713, 16400, 89, 1065, 86, 39911, 74, 773, 81, 2042, 77, 1029, 9701, - 22524, 80, 24672, 71, 22619, 76, 65653, 2034, 73, 581, 4748, 17008, 1320, 6634, - 1498, 85, 80, 73, 85, 402, 67800, 637, 77, 820, 302, 12082, 4657, 831, 437, - 74, 31881, 89, 11328, 9117, 80, 6358, 343, 363, 5964, 65, 546, 85, 32478, 1498, - 87, 84, 454, 295, 3933, 88, 762, 1394, 69, 677, 96518, 88, 25299, 1098, 764, - 3919, 447, 82649, 3423, 70, 566, 2249, 89, 302, 2201, 88, 983, 1498, 1320, 333, - 3368, 34175, 84, 392, 87, 71, 16400, 93000, 8186, 27752, 74, 2465, 2642, 7141, - 74, 11823, 11927, 80, 18499, 454, 85, 9907, 1662, 4171, 12273, 2706, 36325, - 89, 7387, 65, 4922, 20926, 89, 23576, 73, 27515, 80, 86611, 9991, 333, 343, - 2881, 20990, 3423, 540, 74, 796, 3013, 89, 85, 36068, 14485, 10056, 14878, 27515, - 2465, 1026, 89, 2332, 303, 76, 28927, 43210, 3306, 81, 1658, 3576, 327, 292, - 76627, 3415, 351, 75, 447, 89, 2302, 291, 75, 4086, 53289, 21703, 89, 392, 2042, - 677, 3013, 70, 73, 784, 89, 14878, 6358, 81, 17156, 78, 582, 24752, 9996, 68054, - 2312, 18571, 906, 830, 76, 344, 263, 87, 4680, 83, 12498, 65019, 4912, 65, 346, - 10644, 75, 9902, 45228, 564, 73, 13423, 6331, 39911, 24235, 774, 74, 6789, 85, - 1919, 441, 4748, 273, 28679, 25299, 41922, 1073, 68475, 18571, 3978, 89, 675, - 66, 1897, 361, 37406, 9810, 53835, 66467, 87, 74, 1729, 894, 61057, 564, 81, - 27515, 728, 86, 2562, 652, 73, 1336, 2739, 675, 71, 1897, 869, 677, 675, 86, - 41796, 73, 637, 80, 24752, 2319, 1498, 8671, 28996, 13671, 9110, 375, 450, 1347, - 69, 689, 72, 1100, 51318, 1609, 2042, 69, 303, 3933, 69, 1347, 8318, 86, 3647, - 74, 710, 85, 4680, 24012, 278, 89, 70, 788, 7911, 28679, 70226, 41796, 2174, - 57208, 11328, 292, 5308, 21888, 13536, 5308, 982, 65, 375, 820, 564, 32962, - 8998, 2438, 74, 15479, 51622, 8587, 283, 2303, 67, 2857, 347, 372, 11823, 65, - 303, 631, 77118, 92216, 3696, 65, 80, 2329, 4341, 74, 6200, 73, 70, 12591, 87, - 80, 39911, 9902, 1320, 7387, 838, 56871, 61318, 86, 34175, 12328, 46082, 73, - 87, 4341, 75406, 36825, 80, 16284, 12940, 13752, 70, 73, 88, 5544, 67, 75092, - 30683, 80, 566, 65, 675, 84, 70678, 375, 511, 70, 74, 50900, 2465, 88, 9081, - 70, 324, 80546, 7387, 24672, 7501, 87, 4511, 38977, 69, 73, 27278, 303, 2025, - 5979, 4317, 915, 14170, 80, 9978, 12825, 32204, 1310, 14878, 89, 13423, 11032, - 27515, 13536, 80, 7369, 86, 339, 14878, 89, 28531, 2303, 8587, 73, 85514, 501, - 80, 501, 427, 4777, 45228, 939, 85, 774, 87, 6789, 4867, 19456, 71, 6885, 455, - 75, 728, 65, 303, 675, 81, 3172, 72, 20629, 72, 587, 302, 86, 89, 3696, 86, - 8480, 49463, 869, 295, 73, 385, 80, 73, 1658, 13183, 87, 94908, 8480, 3601, - 1546, 99355, 66, 6194, 5192, 77, 376, 73, 88, 1729, 85, 329, 19285, 31268, 406, - 641, 1463, 76, 392, 27780, 10470, 347, 1029, 22524, 376, 69, 22016, 86, 784, - 288, 36851, 85407, 10056, 87, 6870, 9092, 292, 84, 1354, 70, 604, 723, 876, - 80, 51759, 1320, 4922, 2171, 66, 22119, 85590, 85, 365, 65, 12328, 69, 283, - 73, 86, 67800, 713, 309, 5308, 8892, 84, 554, 2329, 60933, 80, 74, 332, 86, - 5308, 2042, 83, 652, 39152, 275, 69, 8783, 73, 28774, 24672, 87, 3893, 370, - 79, 1662, 81, 24752, 3178, 20824, 71, 31019, 80, 78, 80, 21659, 1026, 301, 675, - 2701, 76, 8671, 51622, 4214, 8311, 87, 80, 295, 12825, 89, 303, 5964, 92512, - 784, 74, 99066, 31019, 97523, 454, 93000, 8671, 581, 50922, 80, 634, 77, 3893, - 18854, 19239, 18631, 75, 50139, 7488, 455, 41129, 7453, 80, 4171, 86, 16618, - 25105, 4341, 10776, 5683, 360, 9810, 89835, 1791, 31881, 70, 7050, 34106, 12825, - 17134, 983, 27515, 74, 34175, 8050, 7488, 10910, 84, 354, 299, 2332, 87, 18244, - 85, 441, 710, 13423, 2332, 70, 2032, 68, 2319, 73, 89, 764, 1604, 35555, 39152, - 40078, 8050, 73, 2234, 65, 575, 1800, 38361, 73, 10616, 14878, 49450, 3919, - 20369, 6375, 1725, 86, 8462, 325, 49755, 347, 83, 73, 87, 7513, 89, 830, 12328, - 71, 8890, 97523, 303, 1631, 83, 70391, 29684, 1031, 87, 752, 31881, 1320, 4239, - 1347, 784, 49450, 3576, 6591, 70494, 664, 2203, 2329, 86, 80, 36708, 80, 70981, - 85, 1764, 80, 7047, 796, 78, 1073, 263, 7501, 450, 375, 16820, 73, 1310, 25457, - 80, 7488, 65019, 1026, 355, 73, 14606, 89, 325, 295, 447, 73, 291, 69, 89, 36238, - 911, 66, 14092, 3013, 344, 77, 398, 4086, 88, 12412, 53835, 7387, 66, 1551, - 87, 73, 604, 73873, 1631, 1517, 318, 13009, 74, 376, 89, 8050, 664, 4193, 2000, - 87, 74, 4086, 454, 8890, 73, 89, 32547, 3919, 10333, 554, 63643, 89, 327, 80, - 784, 88, 3988, 13884, 664, 73, 88, 22119, 10196, 80, 8088, 41796, 343, 85, 28679, - 65, 8998, 11032, 1662, 361, 56029, 3368, 36851, 72245, 2034, 14878, 87, 2319, - 69, 73, 4942, 80, 70, 34097, 67, 406, 62497, 17156, 869, 1546, 7141, 10616, - 80, 1729, 87, 68156, 1662, 10630, 4922, 15930, 11160, 55109, 74, 25299, 73, - 59524, 579, 12825, 86, 822, 454, 77, 4171, 58305, 1517, 12498, 77584, 5192, - 31764, 3716, 675, 383, 21486, 291, 8998, 11823, 321, 19639, 8587, 309, 66565, - 3635, 3013, 89, 906, 3418, 7759, 18894, 89, 762, 275, 1658, 3933, 398, 22016, - 337, 747, 2249, 56029, 566, 94869, 2249, 4458, 670, 65, 16220, 89, 8050, 8587, - 85, 80, 301, 74, 511, 266, 79, 6194, 331, 87, 540, 6410, 1494, 1273, 4484, 4341, - 1073, 283, 11377, 84, 3935, 89470, 80, 70, 292, 76, 1105, 2642, 20824, 87, 32837, - 303, 12825, 4942, 406, 80, 74901, 44067, 89, 21206, 15618, 1065, 77, 2445, 2580, - 73384, 5010, 38181, 1065, 354, 77, 5972, 664, 82999, 689, 2302, 1910, 74, 1791, - 258, 1810, 71, 7006, 85, 953, 8587, 80, 501, 377, 77, 2739, 361, 25123, 54761, - 2005, 307, 96518, 22619, 1494, 76, 1146, 18442, 14878, 2201, 30148, 11949, 85, - 13092, 728, 1065, 30506, 29684, 14878, 13183, 14878, 65, 8088, 86, 19870, 86, - 544, 85, 11160, 1498, 69, 89, 5104, 71137, 28774, 331, 1031, 79, 91410, 80, - 29224, 49450, 93000, 5964, 1394, 8890, 84, 80, 42908, 10176, 953, 11251, 24672, - 87, 355, 55233, 25100, 74, 4511, 16400, 20211, 17137, 3306, 20990, 372, 13855, - 309, 89, 47689, 2701, 1910, 20127, 363, 80, 354, 637, 9117, 329, 50864, 427, - 80, 278, 89, 708, 343, 62369, 1302, 16400, 73, 20191, 3576, 16618, 70, 302, - 69, 392, 8480, 764, 65, 80, 321, 377, 675, 6489, 3059, 84, 80, 85514, 4223, - 69, 12328, 74, 6634, 6358, 17043, 1739, 89, 21807, 641, 67492, 1336, 49595, - 4341, 17008, 710, 9701, 87, 797, 92869, 5418, 2469, 60013, 377, 564, 28679, - 1897, 50139, 418, 87, 11253, 329, 78498, 265, 4239, 377, 2329, 20926, 49463, - 2580, 8212, 283, 3841, 46894, 762, 86, 7047, 462, 80, 47928, 80, 1331, 85, 451, - 20568, 80, 3172, 27093, 43210, 2445, 40078, 1026, 89, 80, 359, 75, 360, 7501, - 5469, 710, 6358, 677, 74, 5469, 80, 5192, 71, 14029, 94055, 325, 8890, 383, - 52333, 85, 2008, 74, 7488, 36415, 12328, 74, 713, 80, 2302, 41622, 11253, 80, - 784, 89, 501, 4937, 21206, 89, 3013, 74, 710, 86, 564, 1065, 81, 27455, 359, - 81, 708, 347, 70, 13183, 2554, 88472, 28679, 80, 1494, 82, 8480, 7439, 86, 652, - 34175, 74, 13423, 39911, 455, 21807, 953, 66, 4223, 14460, 81, 38359, 93000, - 14736, 66, 7759, 68, 3524, 336, 71, 61952, 329, 7369, 28927, 581, 77, 303, 42818, - 331, 14606, 76, 3172, 1026, 11089, 52971, 80, 20824, 73, 16276, 6194, 89, 18577, - 677, 70, 331, 1551, 87, 86, 7387, 773, 37406, 2411, 355, 355, 4912, 85, 80, - 1273, 39911, 31721, 9907, 376, 20191, 406, 2249, 869, 14170, 1336, 2438, 1551, - 1347, 8088, 86626, 80, 2303, 80, 10616, 10776, 65019, 21659, 41196, 333, 989, - 8050, 10011, 581, 509, 67, 3714, 1026, 80, 69, 1804, 75, 13565, 31881, 89, 352, - 87, 68054, 363, 84, 3178, 2727, 710, 42848, 77827, 2303, 72, 332, 784, 273, - 45015, 5230, 3601, 292, 454, 36722, 566, 343, 318, 1864, 2042, 2554, 333, 44508, - 87, 85, 48029, 20127, 3172, 953, 71, 29684, 1310, 69, 657, 12825, 34175, 329, - 3178, 66, 2465, 80, 34175, 73, 360, 283, 73, 20211, 830, 8088, 85, 80, 17851, - 73, 11377, 1030, 69, 1498, 85, 4086, 8050, 67, 10616, 45283, 86, 5481, 80, 38359, - 22580, 84, 7453, 581, 830, 70, 4168, 675, 89, 70, 1919, 75, 604, 74, 664, 12591, - 41625, 31603, 675, 52926, 35401, 17008, 39152, 581, 30407, 9802, 89, 38977, - 6151, 86, 39749, 52767, 24672, 89, 12082, 73, 88, 4341, 73, 1026, 16400, 12825, - 16373, 564, 4657, 80, 354, 8462, 14029, 2003, 53060, 80, 17708, 1609, 87, 959, - 69, 754, 89, 2957, 544, 19456, 57208, 20211, 9991, 71, 18244, 74, 4355, 33863, - 16400, 81, 10784, 69, 575, 7783, 1804, 89, 4010, 798, 5544, 1273, 69, 19456, - 89, 7047, 4835, 65, 80, 19747, 455, 6194, 1336, 76, 75406, 3055, 587, 70, 80, - 4086, 10776, 85, 11828, 73, 451, 42908, 2249, 73, 50139, 1336, 79, 5735, 566, - 3906, 73, 614, 87, 86, 1302, 352, 70, 80, 302, 83, 14912, 80, 70, 20926, 23866, - 2562, 83, 16468, 277, 10002, 2562, 15930, 73, 6780, 648, 4458, 70678, 22619, - 76, 579, 74, 19072, 84, 566, 89, 784, 12591, 363, 47689, 383, 1263, 86, 58305, - 371, 76, 2200, 2203, 5010, 3919, 67, 13028, 346, 582, 5241, 71, 2042, 4239, - 73, 7783, 1729, 70391, 2249, 15930, 2727, 2005, 4912, 46341, 1216, 75406, 14736, - 69, 332, 18596, 347, 75, 3933, 3055, 27960, 86, 89, 265, 1100, 9810, 1100, 869, - 36238, 1804, 85, 12328, 12825, 80, 32962, 39911, 8385, 80, 93797, 14029, 77827, - 418, 2303, 361, 65019, 954, 86746, 953, 52971, 540, 3716, 363, 89, 4680, 80, - 8385, 318, 544, 5683, 1791, 80, 1498, 79, 316, 10196, 80, 14088, 347, 3696, - 80, 4991, 83, 2174, 50226, 65, 89044, 17692, 7911, 80, 5104, 4381, 752, 32204, - 1331, 66, 3306, 21486, 73, 88, 1320, 75, 20824, 87, 11823, 88, 2701, 5010, 87, - 86, 5192, 32962, 784, 3423, 4055, 1498, 3178, 301, 80, 3716, 1216, 818, 48380, - 8035, 285, 588, 65, 540, 15770, 83, 7387, 785, 86, 3576, 2302, 52097, 7910, - 88, 38359, 45015, 13536, 3059, 66443, 10462, 80, 1498, 21325, 73, 1729, 4777, - 22619, 73, 28990, 89, 29700, 3427, 728, 344, 11805, 267, 86656, 392, 7513, 831, - 7910, 391, 33539, 9117, 300, 4341, 8783, 869, 2203, 2249, 85, 2042, 1631, 37406, - 2329, 70, 80, 3447, 1662, 13223, 57208, 11089, 65019, 73, 10776, 81, 302, 4168, - 1411, 450, 49712, 80, 8050, 3251, 3423, 53016, 70, 447, 86, 2117, 352, 48045, - 17063, 86611, 1310, 89, 587, 652, 32962, 60013, 3933, 14170, 87, 1031, 97523, - 675, 44067, 5104, 77, 45015, 333, 8050, 70, 80, 525, 404, 87, 392, 41796, 69, - 24752, 3696, 441, 30986, 49463, 21436, 67, 7387, 44508, 87, 3055, 675, 14606, - 69, 894, 2715, 84, 83438, 73, 5673, 6358, 354, 2445, 34854, 32547, 73, 23361, - 5308, 4086, 1631, 1551, 68467, 77, 45015, 73, 89, 70, 6733, 447, 333, 3647, - 66, 22119, 535, 73, 86, 1498, 86, 4080, 88, 73822, 365, 41982, 454, 20127, 80, - 1336, 392, 830, 75, 10776, 6723, 80, 74, 2580, 51924, 40261, 2042, 19275, 983, - 86, 869, 82, 62497, 86611, 822, 64, 5171, 73, 6292, 19571, 1302, 27093, 370, - 79, 14773, 447, 61057, 89, 80, 70, 83256, 80, 28546, 89, 80, 85369, 28679, 74, - 3418, 283, 74, 89, 3368, 85, 953, 87, 352, 15618, 23265, 3576, 625, 784, 65, - 318, 1197, 359, 88, 4171, 44567, 71, 46341, 2032, 16373, 354, 88, 78498, 10910, - 3906, 85, 354, 64087, 2445, 85, 17134, 6511, 6375, 89, 19747, 65, 80, 581, 87, - 1289, 31721, 8934, 752, 12591, 11949, 80, 1634, 12082, 333, 89, 33863, 24990, - 5308, 67, 3841, 12328, 44970, 73, 25457, 85, 764, 8671, 35401, 2067, 73, 42848, - 454, 80, 89, 1347, 4835, 89, 19239, 80, 31695, 511, 8480, 309, 5308, 77, 2329, - 22119, 73, 8890, 1411, 1273, 69, 2302, 61828, 75, 13856, 5230, 33721, 1030, - 327, 20191, 16276, 52926, 24244, 71, 876, 76, 1658, 52971, 74, 402, 69, 418, - 42848, 73, 6358, 38828, 28774, 302, 79566, 49463, 79, 1791, 85, 8998, 52097, - 80, 30506, 564, 89, 641, 375, 84, 2580, 41221, 4991, 36851, 8050, 796, 2332, - 87, 74, 18571, 906, 288, 360, 6634, 85, 80, 73, 9112, 1910, 74, 10056, 86, 85, - 12940, 51622, 1130, 86, 16618, 344, 73, 11823, 351, 3315, 2319, 22580, 37762, - 80, 7521, 81, 376, 79, 24286, 337, 71, 9907, 8480, 94055, 3906, 1395, 80, 69, - 9117, 85, 41796, 18442, 71, 44508, 339, 38359, 33367, 8587, 77, 7501, 5298, - 5241, 5701, 86, 14668, 5455, 45015, 37975, 71, 632, 8890, 27515, 28679, 2580, - 50536, 1347, 6200, 26397, 376, 8462, 73, 998, 295, 346, 88, 20990, 18442, 87, - 1336, 74, 17134, 87, 73, 70, 80, 14170, 87624, 85, 3696, 69, 4239, 86, 85, 40924, - 74, 818, 69, 75406, 24244, 566, 32962, 80, 675, 73, 88, 68054, 2249, 65, 344, - 2465, 652, 3306, 82999, 78, 7453, 363, 27278, 41287, 427, 89, 324, 1331, 484, - 69, 2034, 3418, 652, 32962, 70, 10470, 71, 3906, 28679, 45062, 2465, 20926, - 17008, 876, 1631, 544, 87, 14912, 27093, 18287, 85, 32204, 35734, 69, 1031, - 73, 566, 80, 2676, 72, 5308, 42528, 17008, 3906, 60013, 33367, 28774, 85, 3315, - 64, 316, 89835, 89, 4171, 76, 1551, 20990, 89, 316, 713, 80, 1100, 20824, 19456, - 80, 70, 21620, 77, 8892, 12407, 455, 86, 93000, 34152, 275, 73, 10036, 9490, - 352, 83, 6632, 278, 377, 65, 2554, 337, 309, 3919, 74901, 18442, 22506, 2580, - 7501, 74, 80, 40229, 12825, 86, 5972, 85, 8212, 336, 3576, 6151, 80, 4942, 74, - 87, 80, 25337, 71, 2034, 80, 3933, 5972, 13536, 11285, 38361, 89, 80, 38844, - 89, 2562, 89, 89687, 1551, 14694, 14606, 59524, 1662, 81, 764, 278, 59524, 652, - 3013, 747, 454, 25299, 87, 1551, 2465, 9081, 67800, 73873, 450, 339, 76627, - 53450, 89, 65116, 511, 747, 360, 89, 677, 86, 49568, 6511, 8587, 8587, 5781, - 9081, 89, 4168, 83, 80, 23001, 55282, 831, 10616, 406, 3633, 2249, 18854, 830, - 80, 74, 501, 363, 3696, 1791, 85, 28927, 17207, 73, 16820, 80, 28927, 441, 89, - 15479, 86, 327, 76, 1897, 575, 69243, 4991, 657, 70, 32589, 83079, 36463, 89, - 2249, 677, 86, 1919, 20510, 85, 383, 20103, 352, 73, 85, 8783, 78, 80, 46341, - 6358, 764, 78, 28679, 85, 89, 28774, 69, 33863, 20211, 93141, 2201, 71, 869, - 76, 447, 8311, 1897, 85, 89, 80, 23361, 72801, 4748, 11949, 544, 637, 86746, - 1910, 20211, 6200, 86, 437, 52552, 66, 60933, 1872, 14088, 501, 59524, 80, 70, - 10056, 582, 14170, 69, 20926, 752, 1411, 91943, 14170, 89, 3172, 13423, 59524, - 564, 343, 3172, 67702, 25299, 2034, 73, 1100, 80546, 1658, 265, 773, 65, 16400, - 86, 830, 87, 20926, 509, 44183, 73, 299, 71, 78512, 9112, 1804, 80546, 19747, - 80, 634, 80, 1082, 85, 5972, 13423, 266, 15618, 86, 16400, 64087, 321, 1551, - 277, 80, 3893, 68054, 708, 68, 354, 23361, 65, 18577, 79, 2303, 73, 57208, 1910, - 89, 17043, 1910, 302, 17063, 73, 377, 12407, 84, 7488, 55233, 50798, 398, 73, - 10776, 49595, 80, 3632, 5308, 16276, 50676, 454, 44970, 2329, 22119, 72801, - 2319, 70, 785, 71, 18244, 74, 28679, 87, 1072, 383, 89, 15720, 85, 1286, 8587, - 57208, 27278, 668, 20720, 546, 3647, 73, 361, 9490, 1919, 95328, 85, 10084, - 67, 1026, 20824, 25457, 17008, 85, 5683, 347, 343, 784, 544, 998, 2303, 25453, - 81, 2171, 86, 441, 85, 89, 8910, 30407, 73, 3251, 70, 5469, 73, 11377, 355, - 392, 83, 73, 6194, 87, 80, 604, 983, 3178, 87, 587, 86, 664, 47689, 269, 75, - 80, 13637, 283, 73, 9701, 73, 9902, 4835, 41796, 4171, 74, 16400, 79939, 45015, - 43505, 80, 17063, 85, 9112, 88, 39911, 5721, 89833, 73, 4748, 85, 5298, 14029, - 376, 13694, 65, 4341, 2067, 76423, 89, 3251, 85407, 418, 9701, 300, 13009, 85, - 40185, 754, 1169, 61057, 9832, 77, 1215, 87, 18571, 89, 22580, 42583, 2329, - 85, 6331, 67, 4223, 69013, 376, 8480, 86, 1331, 56201, 5104, 983, 77, 1517, - 65116, 45228, 983, 77, 337, 80, 81606, 65, 336, 83, 1336, 77, 564, 21068, 80, - 9810, 85, 1336, 2562, 346, 1463, 85, 30407, 1216, 28927, 266, 58153, 7501, 869, - 87, 44508, 94869, 302, 77, 936, 26738, 20103, 50798, 9112, 71, 1725, 4575, 33963, - 2562, 343, 67, 3576, 17692, 76, 16400, 2465, 9907, 283, 27093, 74, 939, 339, - 73, 89, 1725, 31881, 74, 1800, 87, 2203, 70, 3306, 785, 74, 89, 42908, 11032, - 2857, 69, 406, 65019, 73, 613, 427, 62497, 70, 7501, 67, 4010, 89, 8998, 75, - 2857, 84, 1509, 36825, 1631, 88, 85, 14029, 23001, 7737, 80, 5010, 41796, 10176, - 20127, 19239, 74, 2303, 4341, 11823, 329, 77, 1031, 80, 4777, 406, 1677, 87, - 1065, 69, 73, 283, 9907, 89, 21206, 88, 73, 22119, 23866, 80, 3251, 73, 49865, - 652, 728, 55442, 392, 86746, 7737, 85, 49755, 351, 14172, 83470, 3423, 74, 3919, - 23048, 1889, 80, 70, 20553, 89, 3601, 307, 49463, 2067, 509, 53060, 773, 10002, - 86, 32589, 544, 75, 59524, 11823, 486, 88, 73, 1662, 301, 7387, 344, 360, 86, - 24468, 677, 42528, 360, 85, 1310, 70, 74, 2438, 1897, 89, 278, 41796, 3696, - 86746, 22619, 85, 2067, 69, 73, 88, 23361, 84, 8587, 983, 2332, 80, 450, 27093, - 88823, 8783, 73, 47689, 540, 14460, 73, 511, 90906, 69, 1026, 331, 28927, 11949, - 30797, 1791, 76, 28927, 5298, 4777, 258, 76, 4168, 70, 540, 11032, 344, 87, - 80, 301, 1662, 49595, 17134, 87, 80, 11160, 398, 80, 89, 6331, 77, 43686, 51622, - 831, 1247, 88, 8892, 19639, 42426, 87, 6870, 427, 329, 3919, 65, 4080, 24752, - 69, 1804, 3601, 3906, 1216, 23001, 7787, 36851, 20127, 1098, 316, 3433, 51622, - 2445, 70, 5308, 7521, 85, 1029, 11927, 1091, 86, 85, 21659, 11823, 2438, 86, - 561, 80, 10333, 7369, 331, 16276, 45228, 34175, 604, 6358, 17134, 75, 1273, - 85, 5308, 71, 4168, 689, 21206, 2332, 19747, 20127, 10056, 1146, 2933, 49865, - 86, 1029, 12940, 708, 5104, 73, 911, 418, 69, 2411, 88, 5308, 21888, 3906, 70391, - 668, 46790, 80, 581, 18854, 93366, 8050, 544, 25457, 18571, 39152, 86, 1791, - 710, 15444, 80, 4086, 13183, 329, 1631, 80, 22016, 1100, 1302, 9092, 1100, 11823, - 689, 2580, 370, 501, 28927, 10910, 1631, 69, 5308, 69, 575, 7501, 39911, 88, - 376, 2302, 3205, 40617, 450, 67, 33863, 511, 16820, 365, 295, 269, 677, 2329, - 67039, 73, 7141, 4777, 7907, 3178, 58345, 2329, 56871, 273, 73, 20854, 9701, - 48380, 86, 876, 8890, 1551, 89, 325, 23361, 49568, 86, 80, 3172, 1631, 89, 70, - 73, 86, 8480, 89, 5192, 44508, 2392, 11823, 454, 87, 307, 2303, 75, 24990, 1551, - 27455, 7047, 5981, 80, 1100, 869, 71, 67139, 13044, 45015, 3906, 70, 73, 9117, - 88, 8462, 32868, 31695, 86, 80, 24244, 74, 89, 17319, 377, 69, 4511, 89, 5701, - 48380, 876, 92562, 7387, 318, 1494, 87, 17008, 641, 7488, 71, 42510, 4922, 75, - 49537, 8783, 42441, 24672, 66, 375, 79, 302, 42792, 6331, 80, 2302, 86, 637, - 29238, 76736, 541, 1336, 12255, 581, 87, 427, 78498, 53904, 3919, 1347, 7387, - 23361, 8998, 71, 43505, 80, 33495, 2302, 84, 12328, 346, 652, 89, 80, 74, 33863, - 31881, 9117, 773, 13536, 80, 4680, 68, 295, 1725, 648, 71, 57647, 85407, 1609, - 67, 34175, 85, 40924, 8998, 867, 5418, 710, 634, 48372, 1662, 38181, 3013, 17063, - 19239, 73, 4239, 80, 18577, 81, 7488, 80, 1247, 70, 45015, 269, 86, 11823, 26397, - 89, 7783, 70, 52097, 73, 876, 28990, 89, 329, 77, 80, 376, 74, 5308, 7907, 337, - 2580, 19072, 728, 5972, 71, 402, 28679, 87, 540, 3576, 85514, 13423, 74, 2641, - 3251, 65, 391, 32868, 89, 80, 8998, 23361, 263, 87, 7513, 87, 9092, 4010, 65, - 5192, 266, 67175, 300, 2933, 455, 22580, 38359, 13545, 42908, 80, 85514, 6200, - 773, 1718, 77, 2329, 7521, 3601, 17319, 73, 53289, 491, 55911, 402, 283, 89, - 3933, 8212, 76, 675, 86, 89, 12591, 337, 4458, 347, 76, 23361, 3141, 86, 93471, - 44183, 8587, 77, 36068, 511, 20824, 2192, 38181, 70, 14088, 81, 7501, 5485, - 4922, 73, 70, 26397, 1072, 80, 1662, 708, 2580, 32547, 12183, 80, 20191, 89, - 9117, 2026, 76, 2303, 86, 85, 89, 587, 73, 88, 752, 74, 1739, 318, 87, 25105, - 9786, 89835, 1216, 1729, 87, 307, 28679, 324, 764, 89, 12407, 324, 21659, 67039, - 23866, 73, 418, 336, 53289, 6009, 67, 3423, 4239, 564, 4763, 2689, 28844, 11949, - 4341, 10616, 77, 23866, 1791, 65, 1662, 3628, 278, 89835, 377, 70391, 89, 14244, - 1072, 85, 5010, 303, 1804, 6723, 80, 88, 73, 85, 604, 24752, 46224, 418, 3601, - 771, 10616, 983, 1394, 1631, 83, 302, 38977, 74, 11328, 75, 1498, 89, 24705, - 24244, 2067, 89, 455, 31721, 65, 7910, 86, 316, 86, 80, 17063, 5683, 1031, 27515, - 71, 9117, 77, 1725, 2067, 32837, 2329, 85, 2438, 23001, 1136, 75611, 728, 10647, - 79, 713, 74, 84995, 365, 361, 74452, 65019, 89, 12825, 4168, 74, 32204, 73, - 8311, 1551, 1551, 44970, 1517, 1551, 62369, 89, 28679, 4942, 24823, 78604, 3172, - 8783, 14029, 67, 417, 2067, 80, 73, 13536, 71, 27782, 2808, 38359, 74, 664, - 73, 332, 764, 78604, 2312, 31268, 2739, 85, 360, 2026, 4086, 46640, 28073, 544, - 14878, 14878, 73, 86, 70494, 27455, 8212, 19072, 73, 87, 71, 2249, 69, 4168, - 61057, 277, 4223, 11805, 79, 39911, 3919, 83, 39911, 4341, 29700, 1729, 339, - 86, 876, 44606, 299, 64, 3141, 83, 4942, 10776, 81, 24244, 18442, 9832, 9490, - 80, 22016, 10616, 3315, 316, 8671, 67, 2580, 417, 31881, 15618, 278, 8088, 4168, - 4171, 500, 89, 85, 1026, 14088, 71, 10616, 21807, 10784, 81, 2727, 14878, 52253, - 10616, 87, 13855, 1336, 822, 336, 1247, 58305, 8318, 359, 299, 73, 87, 876, - 38828, 1072, 84159, 10784, 96304, 455, 3906, 80, 876, 76, 8587, 31721, 5701, - 73, 25734, 89, 752, 64, 458, 9907, 48380, 346, 45102, 12591, 62155, 20926, 93797, - 69, 1072, 351, 87, 7141, 288, 73, 8050, 78, 831, 288, 11823, 73, 22580, 3696, - 764, 10470, 86, 21807, 451, 5979, 324, 74, 11377, 86, 7821, 318, 299, 73, 501, - 668, 36851, 351, 89, 80, 1804, 14606, 73, 7387, 89, 80, 21206, 17008, 89687, - 70, 20824, 89835, 71, 1146, 2319, 1725, 22016, 21659, 80, 89, 3368, 316, 347, - 501, 7783, 85, 70678, 894, 32868, 1498, 86, 2249, 8910, 31881, 85, 9813, 375, - 24012, 80, 265, 1026, 21620, 303, 8207, 2641, 1029, 70, 4010, 664, 6194, 365, - 39911, 318, 713, 89, 39911, 3933, 84, 4317, 80, 5010, 31172, 80, 24672, 68, - 4341, 339, 69, 2857, 67, 2453, 24244, 587, 80, 38977, 53835, 31695, 351, 89, - 455, 44067, 86, 5683, 69, 14029, 327, 2868, 69, 1065, 64, 309, 80546, 1791, - 7141, 66, 1631, 21796, 86, 63958, 3172, 2249, 8587, 44183, 1264, 80, 17043, - 50798, 784, 87, 22119, 39911, 10462, 80, 677, 5469, 80, 797, 2739, 7387, 80, - 29684, 71, 2329, 31695, 789, 87, 3447, 87, 1215, 73, 1065, 19555, 89835, 32478, - 86, 80, 3524, 822, 8311, 11823, 1029, 30243, 78498, 1662, 540, 19456, 20553, - 2026, 80, 2438, 83079, 12498, 3696, 10188, 258, 5104, 846, 303, 70, 1919, 21807, - 19639, 35531, 418, 21206, 3055, 4223, 351, 4010, 73, 301, 81, 73, 1517, 911, - 36851, 337, 89, 80, 36238, 285, 46640, 20824, 1658, 21796, 42050, 2067, 263, - 1658, 261, 641, 34175, 2580, 566, 65116, 27278, 34664, 73, 61057, 80, 9112, - 325, 73, 5861, 2249, 7047, 48480, 10470, 2701, 79, 7513, 87, 398, 71, 2329, - 7521, 29224, 80, 31721, 60933, 80, 581, 1631, 18894, 6375, 89, 85, 564, 86, - 641, 89, 32547, 295, 14736, 8318, 90752, 18244, 87, 71137, 83079, 5298, 641, - 8536, 89, 7501, 2249, 39911, 18894, 36996, 12591, 3172, 69, 488, 1347, 3368, - 12082, 70, 80, 25457, 69, 1551, 544, 56029, 1919, 78, 80, 383, 23913, 402, 299, - 797, 8212, 85, 1157, 7453, 73, 95945, 22473, 85, 8480, 6194, 76, 575, 365, 773, - 91410, 32589, 89, 70, 73, 283, 71830, 20218, 9700, 1725, 344, 89, 2034, 7783, - 2302, 74, 2642, 89, 4657, 773, 8998, 65, 7737, 269, 4575, 75593, 74, 728, 4942, - 1347, 75, 3841, 581, 13536, 60013, 509, 46790, 80, 1073, 79, 60933, 73, 17007, - 372, 89, 1517, 89, 383, 40952, 1002, 316, 87, 32962, 74, 392, 46341, 10616, - 69, 3919, 31764, 4317, 773, 69, 12825, 3576, 2249, 339, 20926, 894, 12183, 89, - 73, 15618, 8207, 76, 588, 7387, 86, 7141, 79, 11089, 8998, 84, 352, 16618, 56871, - 89, 29443, 31764, 906, 73, 70, 44490, 450, 77, 1729, 19260, 675, 39911, 1919, - 7513, 69, 70494, 392, 10616, 32868, 2067, 17553, 61057, 418, 5418, 80, 2784, - 8318, 511, 77, 51622, 87, 14878, 76, 89, 2880, 64087, 9902, 1230, 80, 3576, - 454, 2303, 41129, 3524, 89, 13752, 441, 6358, 73, 10056, 12498, 3251, 32868, - 14736, 566, 70, 80, 4458, 752, 677, 343, 67, 376, 664, 2880, 4777, 85, 10470, - 86, 1100, 89, 9258, 288, 48380, 16400, 668, 8512, 634, 83, 8186, 5682, 43505, - 664, 12082, 540, 85, 26738, 80, 1725, 6009, 3141, 796, 7521, 42930, 47689, 537, - 1302, 12329, 79, 8910, 5010, 27515, 346, 80, 89, 39911, 74, 80, 336, 7910, 80, - 723, 462, 11160, 3919, 1073, 87, 73, 347, 637, 88, 1498, 70, 21486, 1031, 15444, - 73, 21796, 60013, 11949, 89, 3368, 88, 69, 2329, 1551, 349, 18442, 901, 85, - 336, 3251, 582, 2438, 48872, 450, 9081, 303, 1065, 76, 6200, 66, 1273, 581, - 85, 22532, 1347, 76, 55506, 44970, 73, 87, 8050, 52253, 28990, 86, 6358, 48872, - 87, 1739, 74, 89, 80, 44970, 80, 2042, 7501, 72801, 67455, 427, 16017, 85, 324, - 31764, 1729, 78, 380, 8671, 57367, 2319, 82, 1336, 89, 15444, 73, 8376, 2315, - 64, 454, 46640, 69057, 383, 73, 361, 80, 3059, 85, 316, 587, 86, 587, 637, 5308, - 5298, 70, 1394, 6733, 80, 11032, 80, 8998, 8212, 73, 87, 462, 71, 45015, 86, - 89, 8998, 34175, 69, 89, 6200, 14606, 85, 2332, 79, 876, 894, 68566, 8783, 86, - 11345, 65, 1631, 1302, 89, 9081, 8480, 73, 5972, 8587, 80, 2445, 2026, 1017, - 377, 88, 451, 277, 18244, 581, 336, 604, 8311, 85, 7737, 86, 89, 1031, 79, 3415, - 73, 1609, 79, 45015, 17319, 87, 86, 13536, 939, 89, 14088, 77, 1100, 21068, - 85, 89, 93366, 86, 4168, 2933, 73, 21206, 509, 28774, 71, 11949, 566, 6200, - 22619, 38041, 28204, 10647, 96518, 22619, 3326, 67919, 23361, 52971, 292, 65, - 713, 5308, 295, 76, 4867, 1568, 1729, 89, 10188, 88, 936, 300, 336, 3306, 3368, - 1065, 9326, 65, 7387, 2465, 869, 66, 9555, 83079, 1494, 87, 3423, 652, 11805, - 3906, 73, 70, 1910, 87, 10097, 72, 4171, 86, 8892, 24409, 80, 5230, 2554, 73, - 52097, 80, 2026, 38977, 365, 4080, 89, 32868, 16711, 80, 352, 84, 80, 23361, - 74, 80, 89, 66777, 773, 87, 21620, 1551, 34856, 462, 402, 27455, 8480, 55568, - 1919, 67, 39911, 87, 3178, 67665, 80, 3601, 4080, 89, 764, 17704, 80, 324, 74, - 9996, 80, 333, 87, 1919, 8385, 1739, 302, 4922, 14406, 830, 76, 96416, 89, 1910, - 89, 85, 5010, 67800, 45015, 77, 16373, 89, 1804, 76, 14029, 376, 77, 23577, - 4970, 73873, 11823, 1983, 74, 80, 3601, 67, 9092, 385, 22575, 672, 69057, 13045, - 354, 38361, 8212, 86, 89, 4991, 8088, 71, 7197, 19239, 38041, 64087, 875, 657, - 66, 773, 24244, 74, 4680, 9832, 74, 4890, 85, 332, 19555, 86, 2106, 77, 6151, - 1065, 346, 5308, 969, 5104, 28679, 74, 13536, 85, 1031, 509, 75, 3172, 15802, - 80, 5010, 75, 49450, 3933, 87, 1658, 87, 327, 47171, 21543, 34175, 87, 49358, - 398, 31764, 55233, 392, 44508, 73, 302, 73, 36722, 12940, 71, 73, 7488, 12825, - 89189, 37406, 52971, 3427, 4341, 3141, 876, 70, 5010, 324, 7047, 73, 2438, 3368, - 46224, 762, 385, 73, 11949, 2203, 31764, 66, 46640, 86, 5010, 65, 2580, 89, - 3153, 4664, 78930, 88, 1546, 398, 2034, 86, 5237, 44490, 784, 84, 263, 391, - 2580, 85, 346, 14468, 79, 2067, 85, 42818, 18442, 8385, 677, 1662, 2067, 86, - 383, 6723, 1073, 64145, 1026, 587, 2629, 383, 80, 8671, 40078, 953, 24990, 2026, - 66867, 1568, 66, 1551, 8088, 38041, 74, 764, 70, 32478, 49450, 32204, 15555, - 31146, 15809, 546, 15618, 689, 78498, 5308, 361, 73, 8462, 24738, 266, 26039, - 71, 7910, 80, 875, 53835, 88, 28679, 73640, 17008, 70, 5308, 34146, 71, 710, - 85, 8910, 8311, 2562, 42818, 6511, 796, 22119, 74, 87, 1394, 6200, 70, 46640, - 47154, 89, 6151, 581, 3906, 1273, 10470, 25299, 80, 30407, 299, 6292, 266, 73, - 63885, 1336, 20553, 1631, 86, 372, 752, 41796, 85, 336, 74, 1065, 66, 16499, - 69, 6358, 38385, 10776, 71, 23913, 276, 80, 73, 13855, 77, 764, 68054, 23866, - 796, 80, 1725, 4239, 71, 544, 2480, 89, 657, 86, 80, 78, 343, 73, 2603, 42511, - 69, 69057, 5964, 7369, 8462, 14029, 261, 12989, 11949, 49463, 73, 87, 4010, - 90231, 365, 450, 79, 29238, 85, 427, 82, 65167, 41129, 31019, 68411, 1347, 47689, - 70678, 1551, 87, 22119, 3418, 728, 65, 80, 657, 80, 73, 10820, 1029, 80, 30506, - 42908, 54899, 677, 451, 80, 20990, 85, 10036, 71, 2445, 20962, 708, 69, 764, - 652, 796, 7501, 80, 1498, 78930, 5010, 10196, 80, 1072, 295, 87, 23361, 70, - 45015, 70, 351, 67800, 86626, 66, 634, 1631, 9092, 73, 797, 87, 19866, 12498, - 74, 3172, 70, 2303, 74, 12498, 7737, 65, 3601, 2727, 80, 360, 2034, 32204, 2727, - 65, 1609, 5972, 89, 4680, 5981, 42818, 7501, 1302, 38977, 65019, 88, 797, 80574, - 300, 1658, 73, 4657, 309, 97715, 5192, 6009, 24027, 10333, 84, 80, 14029, 40924, - 2111, 67, 728, 372, 85, 365, 18854, 86, 5289, 276, 9902, 80, 359, 20553, 1804, - 339, 8311, 1065, 370, 869, 73, 10776, 77, 80, 1764, 10333, 76, 1910, 8311, 437, - 80, 4777, 77, 657, 74, 93000, 447, 85514, 6375, 4912, 36851, 42908, 7047, 25299, - 74, 1273, 21436, 76731, 1658, 85, 953, 88, 9700, 5104, 36825, 544, 7513, 73, - 438, 89, 11253, 10784, 73, 2026, 70, 74, 8207, 309, 25772, 4239, 38181, 3251, - 65, 831, 377, 8480, 87, 22873, 83, 73, 94817, 637, 5746, 3423, 953, 65, 427, - 41129, 89, 46224, 73, 1247, 370, 87, 300, 8248, 86, 2249, 8910, 85, 2221, 710, - 303, 1247, 87, 86, 1331, 87, 18577, 85, 26397, 16276, 3458, 80, 2857, 377, 11251, - 89, 7387, 42908, 302, 39911, 77, 385, 39911, 42050, 710, 89, 40078, 89835, 70, - 5289, 344, 11328, 80, 1339, 355, 31721, 41523, 344, 2438, 73, 7501, 333, 2203, - 540, 375, 79, 521, 5981, 588, 80, 89, 85, 10796, 87479, 8050, 73, 4239, 9988, - 76, 1631, 81, 8088, 79, 1394, 82, 2912, 70, 73, 52513, 822, 511, 9188, 450, - 10176, 46640, 87, 86, 398, 333, 86, 8385, 80, 300, 1897, 55773, 303, 1072, 2319, - 89, 9996, 664, 31268, 13855, 939, 69, 27457, 39911, 911, 55748, 347, 83, 73, - 86, 581, 6331, 370, 70, 73, 9907, 3105, 752, 78, 70391, 5298, 74, 28679, 65, - 22524, 74, 64087, 80, 78, 3251, 28400, 1116, 80, 74, 796, 24459, 723, 85, 89, - 3418, 344, 27515, 40078, 93797, 85, 1658, 6194, 73, 501, 2933, 71, 95143, 11206, - 71, 73, 5079, 1073, 74, 3178, 5683, 3431, 81, 40078, 89, 1354, 73, 86, 3013, - 303, 89, 454, 70, 80, 784, 80, 42818, 3368, 71, 7501, 67800, 85, 73445, 73976, - 86, 5650, 6194, 79, 318, 66, 318, 12328, 65, 2200, 13536, 31019, 3059, 689, - 3251, 6511, 80546, 564, 77, 1517, 28774, 4080, 41129, 292, 81, 783, 65, 70678, - 24752, 73, 89, 894, 939, 39152, 566, 85, 77827, 80, 4511, 72, 1130, 5964, 20811, - 263, 74, 339, 8910, 71, 8566, 604, 332, 1320, 16400, 307, 4763, 67665, 69, 73, - 12591, 42050, 19072, 80, 3906, 89, 392, 73, 7488, 89, 1122, 418, 20103, 80, - 56871, 4880, 65, 80, 74, 427, 566, 796, 77, 5701, 80, 74, 27455, 25817, 1551, - 309, 540, 354, 94908, 8998, 4223, 14878, 8050, 23001, 87, 1065, 65, 9907, 1494, - 1910, 581, 85514, 365, 88, 21888, 85, 5298, 998, 402, 9991, 85, 303, 299, 2496, - 45015, 3427, 352, 74, 80, 797, 23707, 87, 13441, 365, 49891, 66, 450, 343, 26397, - 1273, 455, 318, 267, 80, 7047, 1590, 318, 71, 20127, 564, 75, 303, 710, 5469, - 3251, 8207, 318, 75, 10784, 2438, 79, 87348, 4168, 74, 34870, 302, 80, 3458, - 80, 88, 50153, 1233, 86, 344, 23048, 370, 7521, 10002, 1100, 9700, 337, 20553, - 87, 363, 9451, 6194, 73, 63643, 93000, 6511, 9110, 73, 2649, 80, 89, 80, 70, - 9490, 86804, 87, 18577, 74, 3415, 7910, 6388, 1498, 27457, 1073, 70, 80, 69, - 18442, 359, 58153, 73, 9907, 540, 89, 3251, 18458, 365, 5192, 74, 412, 327, - 509, 78, 343, 73, 20127, 18442, 85, 19072, 10616, 391, 85, 80, 11160, 10056, - 911, 24752, 4922, 86, 85, 42908, 1631, 343, 318, 89, 641, 2784, 1729, 1030, - 5972, 15479, 1900, 2933, 43505, 2439, 9509, 728, 3978, 13536, 89, 32868, 6455, - 2249, 8998, 89, 56871, 10784, 784, 5308, 49712, 89, 22524, 52097, 509, 45015, - 8910, 2329, 73, 3418, 677, 4912, 80, 69, 24752, 11377, 10776, 391, 21325, 6511, - 552, 2319, 1658, 49712, 9700, 70, 73, 8998, 80, 285, 365, 64305, 89, 61057, - 1729, 86, 5298, 71, 544, 307, 798, 23913, 70, 2303, 263, 76, 16400, 89, 48380, - 33323, 80, 24244, 48372, 18926, 7910, 3418, 85, 1551, 4341, 677, 44067, 3601, - 3696, 48610, 3427, 89, 710, 10470, 587, 36722, 49463, 45015, 13009, 9700, 79, - 372, 30987, 81964, 21796, 48380, 588, 371, 2201, 3059, 89, 55418, 47154, 73, - 53289, 13722, 3524, 2642, 18854, 1764, 68, 16618, 80, 764, 80, 5241, 1604, 5972, - 88, 352, 1662, 64087, 80, 4991, 10910, 953, 80, 303, 80, 1498, 376, 377, 88, - 6634, 80, 1466, 7453, 94912, 9490, 8462, 15618, 65, 710, 89833, 75, 80, 88, - 437, 73873, 89, 2312, 318, 66, 359, 9832, 3601, 8207, 23361, 74, 418, 67, 34175, - 86, 80, 25457, 66, 333, 85, 3178, 1725, 268, 80, 4484, 604, 84, 4341, 5469, - 89, 78498, 89, 35531, 67800, 4852, 441, 65, 93000, 33721, 318, 74, 604, 36851, - 64145, 93797, 2465, 80, 61057, 302, 3316, 2191, 31019, 19456, 20824, 637, 8826, - 21486, 351, 79, 6414, 3178, 1725, 80, 89, 42818, 80, 404, 1498, 86, 85, 588, - 74, 634, 14088, 87, 23361, 80, 47689, 30506, 1029, 12119, 80381, 5964, 66, 1498, - 73, 6292, 291, 89, 6256, 7835, 1498, 66, 7453, 73, 87, 19239, 582, 5308, 93529, - 1634, 88, 329, 10616, 86746, 20191, 402, 4080, 86, 2005, 6511, 361, 74, 5308, - 450, 66, 8462, 6780, 363, 4719, 85, 9810, 80, 29238, 80, 1910, 13855, 309, 69, - 1498, 9092, 4922, 5418, 830, 79, 1100, 12682, 83, 73, 87, 73, 318, 66, 3423, - 65, 80, 2332, 3906, 370, 710, 327, 28546, 87, 7488, 86, 12940, 773, 6043, 80, - 10847, 84506, 352, 830, 65, 10616, 329, 22119, 4458, 64, 427, 79, 372, 21659, - 31881, 3423, 44970, 1662, 1233, 8510, 31577, 81, 1031, 2308, 87, 318, 12498, - 18299, 88, 869, 4657, 773, 339, 3841, 72, 4912, 3518, 406, 73, 3251, 65019, - 88, 7047, 10630, 9092, 53060, 9902, 82999, 4625, 309, 38361, 67800, 16848, 70, - 675, 42848, 87, 8671, 28990, 25604, 74470, 1662, 79, 7737, 418, 2690, 12328, - 3893, 24672, 398, 53016, 32759, 3181, 72, 906, 652, 80, 13075, 31019, 48872, - 96304, 5104, 637, 4381, 683, 70, 604, 1073, 625, 74, 58598, 28990, 418, 501, - 80, 1216, 53016, 87, 43723, 66, 352, 85, 12940, 747, 404, 2026, 14170, 80, 8088, - 1031, 967, 86, 911, 45228, 73873, 88, 1331, 581, 906, 85, 70391, 17008, 80, - 275, 22619, 2739, 85, 16400, 4317, 80, 509, 372, 343, 21659, 2176, 80, 333, - 1065, 9780, 73, 375, 20372, 1677, 71, 10056, 9902, 89, 5697, 336, 675, 71, 641, - 29684, 1030, 39152, 911, 19747, 80, 32204, 1662, 1065, 4484, 89388, 1609, 3647, - 94869, 6036, 1764, 15258, 1551, 16373, 5289, 85393, 5469, 2411, 77, 1551, 85, - 47154, 365, 69, 26397, 76, 1026, 5544, 1494, 15618, 52971, 11089, 51622, 70391, - 4991, 83079, 7737, 71, 450, 65, 16276, 84, 5308, 939, 24244, 9289, 89, 19072, - 462, 1729, 11805, 36851, 329, 4010, 307, 333, 1662, 38041, 11160, 1658, 796, - 5237, 45228, 8050, 70, 80, 10776, 9092, 1631, 363, 34175, 2070, 36068, 68054, - 14949, 1031, 3906, 73, 13536, 71, 10470, 83448, 5010, 9092, 77, 7488, 84, 300, - 25770, 26636, 544, 86, 4010, 22473, 8536, 3251, 76, 1286, 261, 17704, 4341, - 70, 27455, 11872, 55568, 351, 74, 10056, 80, 12498, 70, 73, 46790, 566, 377, - 398, 67, 1100, 376, 38361, 69, 24012, 580, 85, 80, 11851, 32589, 377, 22619, - 316, 3576, 652, 70, 2329, 3251, 343, 2319, 82, 441, 36851, 79, 20127, 71, 1609, - 29700, 89, 92216, 87, 398, 1517, 73, 637, 67, 5346, 71, 7501, 7141, 96518, 6098, - 81, 418, 85, 10470, 83, 544, 41796, 89, 89835, 6358, 85, 4777, 564, 35804, 604, - 80, 38181, 302, 95947, 70, 12723, 412, 80546, 395, 1273, 1029, 300, 87, 3368, - 1273, 454, 74, 5972, 74, 1030, 12748, 80, 450, 20127, 5964, 69, 351, 75, 9907, - 9092, 77827, 7063, 84, 2319, 266, 303, 365, 18577, 1073, 675, 3647, 69, 454, - 87, 8587, 73, 85, 3893, 28679, 80, 25299, 1029, 57208, 9092, 9117, 2034, 89, - 2689, 5972, 9991, 79, 12498, 74, 89, 29684, 2302, 29684, 76064, 89, 14694, 288, - 98667, 7501, 80, 39172, 2005, 73, 3479, 14244, 73, 13183, 31019, 344, 360, 22016, - 1936, 9081, 1725, 30506, 11160, 1919, 81, 42908, 80, 89, 4223, 86, 2302, 418, - 3172, 87, 1729, 303, 80, 2329, 566, 85, 1336, 13536, 85, 44970, 9832, 73, 86, - 302, 73, 1220, 18571, 774, 46224, 10784, 5544, 73, 4912, 1371, 12940, 1073, - 73, 12082, 80, 1394, 53060, 437, 6043, 511, 17137, 85, 1065, 88, 4010, 7044, - 32204, 451, 88, 7387, 6511, 1320, 283, 327, 581, 70, 33863, 23913, 89, 20824, - 22016, 1394, 40924, 344, 89, 754, 7453, 73, 85514, 80, 2668, 575, 67, 1725, - 675, 69, 73, 15720, 67782, 418, 69, 41796, 87, 302, 73, 54909, 10616, 2312, - 74901, 74, 36238, 2580, 18854, 2042, 89, 399, 89, 31268, 74, 45015, 38041, 5469, - 12498, 13391, 14029, 652, 66565, 3178, 35949, 1631, 4341, 5469, 49938, 65, 1897, - 295, 2111, 344, 47154, 73, 2580, 73, 65019, 10333, 8462, 78, 1065, 6634, 71, - 1804, 267, 66, 40078, 3013, 74, 6331, 355, 1729, 80, 24672, 540, 35531, 2445, - 15008, 89, 5104, 1273, 88, 69, 1494, 1091, 2249, 88, 37975, 2857, 9832, 25299, - 710, 70, 80, 89, 97523, 23866, 261, 89, 70678, 764, 87, 18577, 21486, 8480, - 69468, 3841, 20926, 80, 352, 12498, 73, 42528, 36825, 301, 80, 418, 89, 45788, - 337, 9832, 89, 85, 12498, 76, 1030, 1394, 11377, 73, 869, 728, 5241, 773, 88, - 3340, 85, 1310, 80, 637, 4239, 30407, 3415, 68, 71696, 762, 302, 19239, 89, - 1320, 1394, 764, 93000, 4680, 71, 2249, 69057, 501, 23760, 6200, 91998, 18400, - 85, 20926, 7501, 73976, 79, 46390, 80, 318, 370, 18577, 343, 1725, 70, 1658, - 4991, 96605, 73, 1029, 9832, 376, 67, 8536, 301, 89, 331, 85, 1347, 1931, 73, - 6632, 301, 278, 46341, 784, 1725, 63643, 60013, 32589, 89, 1310, 89, 24990, - 86, 18854, 4937, 4680, 87, 71, 483, 355, 455, 25299, 664, 67702, 1073, 9195, - 73, 6634, 59524, 74, 80, 2739, 18854, 74, 501, 1331, 86, 26738, 391, 1073, 64145, - 70, 2067, 359, 47875, 3251, 1498, 3933, 6780, 1739, 4912, 4223, 418, 79, 1897, - 664, 65, 10188, 80, 983, 11823, 266, 48320, 5192, 36851, 74, 8311, 876, 15444, - 3906, 87, 2332, 75, 80, 14878, 2465, 4835, 983, 76, 8462, 784, 5469, 575, 3013, - 59524, 375, 12328, 73, 20127, 89, 983, 9112, 713, 324, 86, 11253, 7819, 1092, - 86, 89, 339, 74, 15959, 295, 51924, 1073, 89, 31721, 325, 1331, 309, 85, 12407, - 8671, 59524, 20211, 4484, 998, 283, 8318, 88, 73, 2312, 1036, 21888, 7004, 2026, - 5241, 80, 2332, 36171, 87, 2303, 89, 26929, 1366, 20127, 71, 70678, 1551, 84, - 773, 3806, 21206, 278, 81, 9664, 76, 38359, 85, 1394, 773, 869, 3906, 953, 347, - 76, 1978, 383, 41622, 509, 339, 80, 17156, 266, 9117, 87, 84, 12748, 88467, - 708, 15618, 73, 46341, 447, 48380, 80, 7208, 74, 87, 70, 73, 70, 1290, 88, 324, - 302, 78, 336, 3306, 86, 361, 4922, 89, 73, 764, 307, 31172, 8671, 70, 283, 96518, - 31422, 88, 73, 2159, 332, 2067, 710, 668, 8462, 31764, 70, 3013, 19271, 86, - 657, 79, 316, 74, 447, 263, 44970, 5481, 44490, 17043, 32973, 4168, 2302, 74, - 7501, 9832, 451, 1247, 575, 344, 2042, 1031, 81, 1739, 69417, 273, 11289, 81, - 2445, 307, 28679, 1273, 8385, 648, 78, 511, 2445, 7047, 76188, 441, 2192, 80, - 54803, 9700, 5308, 68054, 71, 398, 3415, 953, 88, 73, 10462, 363, 31172, 370, - 89, 9810, 71, 1517, 52719, 84, 587, 3418, 82, 3423, 1729, 89, 876, 82, 3415, - 73, 85, 2642, 22524, 377, 4942, 73, 564, 9700, 87, 2106, 8318, 88, 80, 22619, - 752, 70, 1394, 75, 3178, 4835, 89, 657, 18571, 1729, 8207, 86, 89, 92216, 3172, - 4942, 3933, 332, 939, 57208, 9991, 363, 19239, 4511, 89, 73, 347, 86, 9081, - 70, 8587, 2580, 80, 325, 9490, 3906, 19747, 5481, 70678, 5544, 67, 34175, 29229, - 13616, 73, 12130, 36851, 85, 80, 36068, 6200, 427, 17704, 80, 3251, 85, 31695, - 402, 3516, 869, 74, 383, 2067, 72801, 79, 1791, 73, 939, 70, 1604, 3368, 86746, - 87, 392, 75, 543, 4835, 88, 80, 4086, 94055, 16400, 71, 75406, 339, 625, 1729, - 31764, 15479, 511, 1551, 5598, 398, 1320, 346, 11377, 437, 901, 85, 80, 38041, - 73, 59448, 4223, 85, 3601, 283, 16820, 41796, 85, 4381, 336, 55498, 283, 22119, - 14878, 44183, 455, 3059, 21538, 16017, 73, 5683, 89, 372, 708, 28927, 9832, - 66, 98667, 752, 80, 69, 89, 1231, 8088, 11222, 1394, 3306, 71, 1517, 911, 85, - 17134, 2302, 85, 818, 7141, 65, 5298, 70, 3601, 7521, 4355, 3223, 84, 347, 70, - 4080, 61057, 46341, 1804, 28927, 46341, 47154, 16400, 8462, 12119, 32547, 1658, - 12591, 20127, 12825, 84, 80, 275, 3172, 10470, 9117, 417, 73, 894, 34967, 80, - 7488, 86, 80, 953, 2312, 17134, 86, 288, 77, 343, 10616, 25105, 29238, 74, 9110, - 39152, 86, 1310, 76, 385, 1610, 74, 89, 9509, 61318, 77, 78498, 73, 88, 5418, - 56178, 89, 9197, 398, 347, 55655, 25299, 5289, 3893, 25105, 675, 43210, 398, - 65, 2329, 376, 2203, 17134, 75, 333, 89, 10910, 1336, 87, 32837, 75406, 80, - 4010, 80, 1026, 75, 3368, 68, 55334, 42818, 80, 89, 1551, 31695, 86, 10784, - 2329, 9701, 544, 81, 339, 20990, 10796, 75, 1094, 14606, 85, 1336, 45228, 3576, - 336, 11949, 391, 86, 8088, 46224, 291, 36851, 424, 73, 86, 3415, 1609, 89, 4168, - 88, 47391, 8587, 28990, 29700, 80, 3055, 402, 14928, 67, 7488, 876, 76, 6780, - 88, 36825, 42908, 2312, 3306, 2739, 80, 25295, 30506, 501, 21066, 93000, 2562, - 7215, 337, 80, 88, 677, 69, 33863, 80, 16820, 572, 35278, 77, 80, 1065, 64, - 309, 77, 544, 1149, 68054, 784, 2332, 67, 2641, 73, 88, 9117, 87, 4922, 79, - 911, 83, 10616, 11823, 275, 73, 21620, 31268, 70678, 1029, 98496, 12328, 10910, - 79, 416, 85, 89, 664, 26397, 71, 73, 32962, 10056, 85, 17134, 1146, 80, 30463, - 1551, 24990, 70, 41670, 604, 67702, 11032, 7488, 385, 581, 80, 1247, 11823, - 87, 9701, 501, 1146, 352, 76, 44183, 21620, 936, 9700, 76, 1216, 76, 2396, 2249, - 2739, 73873, 87, 18664, 88, 1919, 86, 85, 2042, 5972, 28546, 73, 86, 11089, - 22619, 80, 99161, 1216, 45228, 2438, 70, 511, 79, 391, 25299, 939, 86, 1718, - 80, 70, 1764, 21796, 74, 47154, 67800, 9490, 28679, 67, 365, 406, 73, 43974, - 89, 19239, 652, 71, 73, 89, 3841, 60013, 2249, 69, 5584, 6194, 70, 80, 73, 21807, - 58305, 329, 1029, 74, 7453, 1216, 80, 3141, 97816, 4511, 17134, 60933, 45015, - 74, 5721, 664, 5010, 327, 74, 80, 295, 346, 89, 24527, 1331, 96250, 344, 89, - 80, 8088, 953, 82, 2727, 318, 352, 347, 8207, 73, 88, 8462, 75, 796, 14606, - 1662, 540, 17851, 36825, 1031, 80, 359, 1310, 11823, 566, 588, 73, 5038, 4942, - 49595, 8462, 4942, 32204, 85, 1029, 73, 4484, 309, 9902, 85, 2857, 10784, 82969, - 747, 88, 64087, 302, 86, 4080, 73, 87, 31764, 728, 91295, 95143, 7513, 869, - 359, 67020]], "model": "text-embedding-ada-002", "encoding_format": "base64"}' - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - Connection: - - keep-alive - Content-Length: - - '45399' - Content-Type: - - application/json - method: POST - uri: https://api.openai.com/v1/embeddings - response: - body: - string: !!binary | - H4sIAAAAAAAAA1R6XROyvJbs/f4Vb723nF2CClnsOwREvkwQEHVq6hQoIqAiHwmQqfnvp/TZNafm - xgtIIQmdXt298l//+Ouvv5usyq/D3//66+9n2Q9//5/vtVs6pH//66//+Mdff/3113/9fv/XyPyV - 5bdb+S5+w383y/ctn/7+11/i/1z5/4P+9dffPK51Zlm5w7mkNi+wXlnNrHNYerPffK4Qe1bI3MOi - 6Oa2C3X4PNM120LZV5MsPW2kif1Edu7uzscyWyXImg93LLn2bIyrW9qg7N0kOGq7VTYt12WAzDg8 - E3PBlt1U3E82IsXZIM5dEHlzXQ81ii0W0PF9LSOuN5CiPksntvGEYzSqjViqj4e1YMbwWXq9J5xN - tFoEB2JK/JIxIZ80YLfwyUjp4W5QvaxH3VN1iLVRrx09zrquVp1AsFAkp2rQiJeC7OMZd69FUHFo - HBGm55ERzxE9Y7x0eopky1jjhm53fGLPtQLz3b1QMRm31SRsdhZyNQNjtTaJMeeofcFhffdpIbIh - o900UPCYVjH8jhNvitmbouydXSjnn7KafdkZ4bHvPYLf8dIrHseugd/8rNwNvFFT/RIJaSRh9dJF - FevM5gz7ZzGzVBEO3UD3+wD69epNdhyzaG78kIIRgkT26+UWjfdVYUHXuwbTjRUx+gopV3RenW40 - iaa4Y+nufkXHSqHM8+cPH8++ZymDOZRs+xDXEeV+eQVpfjpEOzRtNulX+oJ5v6vZVq4HPssX2159 - ulhi2jQssmEc7BRNqPiQqMGWNxrIwNCAtSPG7FYZLRcigGxt1kxHvdx9nDGagZ2Shorn3jCmwZpq - aJDkElK9hGq6+MOIeBrpbPdODbRiO0GAL54Yvqt2JTnsDkjLUo1s9o/amE/B9gXwfN7JqZaNTJp4 - bKKQvyX8ePVON662rYhMnVREZ2LOebB8J6gt5pztZEutBmFDTHhcaotsGs8xBoLUFoyWtmR/6XjF - QprG4DTFkRnuS4/G9ycQ1cg7tmwfBgmfHHYX0PEmjFRab+4ZD7c4hU14rvFKttSOa/rzBSS9bsnh - Ng/VuPaWBZgTw7S9xj5azckN5OZMUrKr5Sobda+qYbdiO7KnNzfiCS8KGIL3DQtZ4XaSrYGLhnVN - 2HmdxsaUXbVa1U+iTwKla7vZsG6hvOAvj2hPOUJDrp1EOGO9Z9viYkfz6rBuQFjjJ9GFdp3NEQqX - ixlEnxHjlkZj2MgxvKrKJFqRxhV3anmUF6ZZUK4KGHGnkgDmTotZlnlbg0OQ6mglxzFzy6D1GuPz - tkDq1zb7rqfHj/1Fg+dCjohX54E3LhPPRif7bdK50KKKP2a4omJ6x3/wzoVlsYQuMHwq3M5rPoXK - 2gXVOIS0Nzuj49q895GFP5TEHHnestTWS9g+Y4Ec9cmsRJTeCvCRajINDR2iSfFcI1F1Cqyk0j5j - +/BsI/nGB1q5ZWRMl+ch/X1vRubr2uBJCgpSLl6Ej0dQO/o2dymkZoEI/sQ6mtx80cIiWx6Jpx/v - xsii2USkNzSy/Y7v5NZzYbWlNXP5uzbG5BgpatKiA71NJ5k/FpeZotSMBMolWzdGCaMW7jmViO99 - SmNQS9KDOo8q84Nsgca6S21Ay3iicgm8+iDT9IFtD5w5N/NZjdfgdgU3tAws5ctLRaVscwVu9zti - a89PNw8brYbsfbmQrWvP3sCV6gXTp5/ocr7bnMufXQJ1KXCK8F7mfLdVcgjfwor2kUa68eztAjRa - y4jZr7YzxvLKbXRqU6DhTRdRJx2DK1ocu4x5+OB4Iv4sl8ruvjgQ7bp5opEzMYD4qrjM8XzBmM/a - w1Ky4ZgQ52Q3Hi21tQhyBQ0zCpYaTD4zXbkesEnMaH5x3mXXFL58zyJnZ3j8Zl0EGMd0Q8z7a46G - UBlt1cIdZbo9XI35g+YExpBishMHyvtHf29gEzpvZjDf7OgPL3Xmq8xL6NugTjgDIAgpc7qbhqTm - sFPQqOQGXQPaRNP5ZpnyD0+6w27ZsL/NLvLr3KeX4bDgFL26BNISU7b/4Vd8rmflu//Jt54aY+bs - BWjagFBppBtj0m/lGR6dt2E75eMbo+51LwTYfLHTkDyyWeo+M4z0ZVM+W7ExHtVehzTwZLKX1m9v - XG3LJQjmS8JSPZ+8KTPPI7hb1SO+hp8dbcRWRN4Nd1Rp7HXGr0chh1CeGdkchNJ4+EPeoPXGdNlG - ezce0+atD7/6oGQrXE2va5CgLJszst0wI5oPZtVD9egCQpq6NcZzpShwKhKFuG5dId71YwHmZ3Fm - Ptsb3vDR3i+QpbkgRhV/UK9+1gn68iXl8iNAY7gYYrgqpYgVbxHxZTc/SqjJUacVs4/RUPtSCY3/ - uuJptYiz7/xmVMnZFi+Mh1PVMkpsqNcd/4P3Sa2MAslna2B4ExmoTvU5gPA9KBgkp6jGFndrkI5u - hIUiffEJSx9AxvLUECOTVNSroVbCCbBIaR9uKj69ZwtJziYjmiHORqfL1x7FylHD5f1dd+PeXRQw - nhuPGWur4NP+KiQoFhsNw7wLq/EkRliNJPdItLi8cD4FbgBpH4wEa3sejTdtFKDrZOvLF7eOK5dW - BO2kC199lFRDcT+5kKx0h+ESvSPWHm0KT9G4MMfE947vzGWzbu2pI3r+iL2pC9wzqF1ofvGmG/Ne - O8QovnZ7shUWNzQFxtYH1O5lYvCPXk3ai2NY8NpjyWXaZNMru5bKtjMEDJdjhr76ylp3QeiQXVY6 - xmg9Ugtu/o7R5UdA1ZRl5qxsVYsTO7lsKnGRvxXoZnND17b2yqZnByIaw9uBeP7s8Pn0VlNYBgEw - 7VAjr6cIp+j7fYlvuTof3i5gtGenFT4FhpqNgb3EaNloEztFG4TaVscmLFztxpzHTcz6fDy/YLXt - a3bd+abx0Yh3VpVbeSSeJLXe/OP73/Nd0dSz0dTDP/uJ7Y6LtJqisNHBr/cEL6sPy2j/2Kfo8ra2 - zFrCi491VCbw3T/Ez9ftd33iAi7HcE/8/WPMeCS+NLi8JJtZlyE0ZgXzAlgjV8RhS1ZN82MBgGEO - 8WCkajZrTppCc85PZHOjecQi6SEqS1n8sE0x+tk8aOb442c65vnK4Pf6mCuPy8v6t96s8kMDJx8s - ZjnXpuKKjXs0iGRPF7reRnO6OmP46ge8YKVdrcry6sJXnzOsVHnGn0W6RHR4qFSNfTGbF9FeQZ6X - crqKNhma9cWegnTQMLstRDNiqZ2F8NkNW6ZNE+3GnMUzcPvmMl0Su27apkMAbGHdfvXPm26KacEO - pwmdW00zui8+FfEyDxSpgcXHSxS+lMDpOebOzjC6cZW84GywgUJPzGpKBIsinxaYbevlKxq++EQi - T7KffjbmpBjWoCN/QRzsAqLoVSW//UA2SaYZs1ckNoiGOGJxPa2N4TTQdK3CcoEXLlhZ31+JpVS8 - 3zLbv5nZ/JAfIZzyx4bsOXyqfqcdcjiBL9IFSo1oXA2zAPbgMqK/hNCbF51cooV88MhOdns+dQTX - 4FaNQKyPkHV/9Mnmen0xq6m8aloJEsBLioHs1iZEf/Ss+CgqukjFh0HtKguUynVPWL2wkn/12wv1 - zktmztLL+CBeWgCq7Wy2UdTI48ZnjYEu6plthYWKJnRsElhfBZP5MQzelF+MHIZATFlyjKvshx90 - CIQYL+d7gyi7+6bSOnpDp9VCzOY8CnOw33imPF4tsnnpPlxVNUpCn2ehq7h1zAGcsj6TS6Xp6Hmr - AlNFe00jbhV6Ga+at4h+7+cxue+mvaUnahyKNvGspR+Ni/y9Br26u1SN+DObQ49j+Pofoq+iFnFx - v3fRV7/Q5+xM3nT1mhBYgypaJ30SDYvL3EOBuoT53kf3uKQW9c9fYWHpsmqOI7cHd7vwiFk2WrYs - XKdV5M+nwV35eqBpM2AF6Vt+oup+1Xh8L/c5WK/2gRcg8YqGohdDuU7uTNucCtQ/DVeHmPY6w7dr - 441ejwRoW7EgmxgKNNW+WsIPv8KhEIw2FL0E9uy4Ysaymo15OqIQdmpnEswDzZsITLmK8pOAmbwS - OZfPTIOEBRHxPJIZ88Fga7TeWC5G98bweL7/XJXP00F4+cXTTx///CnZpOOim7H11NF59UqpGNFT - xWhovNQsq3K2a89lNVbKskG3M35jHhlaNh8nC8N3/sw+QFVN2dV+Qbi4M2LuloD+4PlVPUz25cfo - p9eABJ6F5eFwR09hQyzUpYn39aOjN9ZRm8D5OZbEaJlVTe1I/J8/ZJ4aWGiWL5qtjpYYsa8fjz5n - aaKQR/X+D99xeFsaejZejuW7IKJpGaQu+vppslnWdTROj1uAgnvGaXWT3G48megF0gs8ZkdZySc3 - XzVw3W59huGO0LhUT7kizUuLcmdXeePjWDVwu8Y9u4dRxD+PRRrAPEKIZT0tqu6dWD7Yh1DCfNOc - jCYfg1oNjIfMTGdWok8aSSGYTykivlyybLTZZCnivW8ZPl2zbNTd94hkqrdsFxi3bHwMUQ7Sa8DM - pNfum38sYrhkvsuCrFhHM3pVMRxRWxFstoeM37a5q8T1xSC7pU89thy0ECF/MrC4vXTVMPZm/cc/ - b9z70mBHY2vCULYhw2maeuPgtzU6d/ROtsOoZ3NfOhh1dhmx7aGiiGWvfAadnUxislXG6SM4JuAv - jR358edk1pMC/vkYE2ckeTc+hixH0bb9+dclGrXjiNXAoZy5/efZcTJmDVzuMSHeNlLR5LCTAMLa - fzJPPy686WU2JaSBIzOD2cesD7WHryqTazFnb2cdY5fMhMLevPASXWrOg9dO//E1XW/1XTW9Rc2F - VZWeyEYqK4NrxiNVv9+T2bKqV8t75wZwPz8QfUwbiU8+TXO4HnyTed35ns2QGNoff+ofBfxvffr1 - w8SbV17GPWOyIfeWJ2Lr2qXi2Tt8oXbvDsQJu9bjemifUVQlN7br48KgX/5Tzvfnm+mw3xkSU5se - kiX3aZMcw2gePgFF7BY8md2/mm7oiraH9RYTtrujFZoyIwa0VjyVmblXRn0ptgLSLT0mrnwPs7ka - gytsanwl+x8fSWrxUm8ntSVWwvVq9fWb8k3bS8xe50bUU2SdQTxmHZ6K/uZNxVofQexez5/+ifoY - +ynqTOfAtsqLeVPWFwr6+hNifhZ7Po7zdIZccmxSdNLBaLP4DsjbGRPBsk2zbmZODfrnsiN+erp5 - 3/cX0HYvbsk9utTZhP1cQyf9syOb+8FH43utJuv11ieYhSfDWxq7g6VOGwC69m91xu/PQwBPWU2Y - tRaP3ie2sxTw8nkm/uo18V64nHWonVZn33wj4ununqPXQXDYdrkvMl4duAiO+37SxVzsMp7uTlfE - Pm5B7HQOslHV+jOwjUfpZOsmHz/2GaMv3qlssx7xYNXZsJ/OA9m1Mc+aH57P9/ebLneyjJjKLgnU - 24OG10JyiIbNYK3hsace/chO2s2vY1eihavfvvlOmdEIpSJ4kf0hhqosvni1XXguUER++KNgvtYo - toQ9baM06Jhxjk2k3j2fedHYZKPSSksoGM9ooz0/Vfm+Iu3np5jVQ5qNOkf9z78R3eF9xZ87htGv - Hm1SkfIezfsEHiDIzFxLu0w8V7OClm4iEP8apd04B6IA3+/D9iR5Z81PnxNcu1RI09QYH/2pAU9R - cjyqT8VgTj2NcNOIhIdLx7ve67kA3zyCOOf3uxsSAffout35GK1iJ5rXWlxAPhkGXjrXpusgCDV0 - 2FoXKm1YlTFXlxKEi/UZM5BLPry/keUP//o3z5kWU1ugjYyudHHfM8RNawWg32aH7fbOgH71DPJb - Q5i+TWg1hMraRgJuV2w3JJuvP1oLIAT5iD8289HUrcMSZGksiNYmEx/sjxkodrZTvn7DiuYdRzXo - i3pD9Hnpc/apnz5c7gkhbnJm2RQeah8xSc/+5FOzjQwL/uz/uG47iq2nBt/9QdfJ5VH96jU6YJQz - 77bTu+VK9kHRlMv0yw/RnBLNAlSXMvFyp0L9PYccvs+nM2EOQvuPb0NhBxXZnu69Nyx0fY0OySem - q2614dImSRX0psoH52nuZKO1xiYs+bpnm3WxQeJUXkzg+ugyTf042XRKlBSOvrv+6tHZ+NVTmEch - JBrpezT1i1aD9C1TYhRMMYY7W10B7XUNw/Xk8DkKsQIEn/ZUmU2pmx09bdBSfvcYYth7Xz0Z//Ie - LEaz9W8//9X/1N1ujhHvr4GvXr3AIjfhIKGzs9JeylA23/qUOxGvL14MeL5FlN8mFFHyrGMQ98qM - +TXxMv5cYRG9a/GN1x4s+dhfiQnu4oDodJv3FaebKkCLR5VQvqvjqOm3mfXbz1T56s3p4j9HaNiC - MFyvajSd880a/erBKa7bqr/n4lX5+W3SFAof7Na2ALD1IpZRdN4wP1YCnNXyRLaaL6KhX5S62h/3 - K6aLFBlTtXRywIb/JEbBP9E7tbMADsFwItagOd0YOLOufPM5Kn79+bTtti/l6CHEDLyYvF++DC1k - GU70jcfp69gVyD1IFjOSdECsF20N1YEssF0txd5k7F0TlOcG//LqaHp2oggbajwISceJz1HVaco+ - uqnffP0ezQu4iOBc25Rsd7isppvif9cvQnRh3NJsDGbdhvd4exJ7Lp9cerzLXB1bYjEDaaMxppdX - iFy5Rn/8sZgMBlW/foicnzuoeH3elVCplUusK9O8poAVwCGAmAo6jfjqm2fLjpCHbDNtt97orOwX - 5NEp+KMnx3YJmvytb/QlK89uBpOu4T32JXGjdOwmBrWCNm5DMVrzBn3zFwGG5hSyrW+San4NWYzk - g8yJZh6CaP4gJUGqEYW/+p799APC+aqks8EXiB/jB1a//Ej0eVa77/yCP/npthNr3gSz6/74hVbD - sM3GZWK4MLGG/um3DLcqsFRiN3uWf/UV3ytFqH7zIlq/N8yYv3kgWphWQcxZdrh4QS1Fcfj0mM3r - nTcw6BVwG01j200XG/1jiK7Qxp8X0faen/WDqFC4rD8a2We4NHpap2eQCD4SM/f0TFqSJFE+K3+k - 4oJv+SSOuYCmjQDM+fIdpbtriPYbWWekKVI0G3Q4K835esISJ49qZN1HUf7kU0UqdkyXr1QxQduz - 0JL2nN/yzaj+9MC2OlmIbl9Bqpb2UsSz0J6zofbVAjV+fSVHLTANfj0u81/egR/RbZNJP783at2F - SlD23ayKVoFwO9y+fif0uteLFkj5nAMqxDmgofmgQsklz/7lnd702VRnOLVnYKZ4drLR+DATts9E - wHP90PjXn/hoPGcYT6JhoFExPgXMUXgnREqXaLbL+xXmepWy7XqziPhl7NI//Qv9/HkZc9wqJUyf - u0gnTzhmrGreS9S9sEUFfa4MfuyOPrzrd4tlEy8qHt1mXT1kWsIueWXyadVsBGS0fUuc7Pkyhv1N - cUE8Ng3D+mwYqxttLegH/4OX96Co/uRJrrbB7E/9DRfPBM671fzL+6vPdz3hXT9btjktR4PTZlHD - MvFlcrcy2Rvnph5hcfxkRFdJk3H64emfftVPz4vJMVOgiN2colWQfvPeWoOH0TrEgnUdzfgetGqj - lykxX+qC0/3idoXLw7WJLzlFx/eHawLeJp7o5anM2ZtN+hoUhG2qVl1htGIo2EqQRZR2j2LK/vDN - t/9Kh68eZYN8WsNnJ+jErno/G0IZYvTt9+GFEM98JJe8AFtgLeVJOvD25y/mp3X6o/f/5NtfvqXr - XWUbvXnBI8hUa//wQ/PtP4L6Qjp9bw6RN6ZpaaHVkF/Y9ix03RyoVxcc4RqSg1tto5meoIZ4DARy - DhQ9EwNb8OFCtiHbfOvFJBVzDuna7YiT36VuyLW7iG61VeJlwvVOfLmHEmjVr5nfjltvONHWB6cp - j0SfxmvWf/MJdaO4l3/rXxTXhfKZAk42fsarsfTuGnrs7xjL7SnIpmt5DcG92lucuKt1N1KEz8it - WoERKbc86bPpzmg17K9fvvhwqjlhCurD16m8OvRR8z58dKCu5xJ3soyKf+oBw+tz31B+k9xKggx0 - NBfsSKE6NtncB08L0iThdA4/l2hkkWL96Y/99NhXX5XyWryqbK+fr0a/rzob7dxNjOd5vlXT2xUx - /PpL9Syl1SzptyWKN1rAAn0OOO+M+gV//04F/Pc//vrrP38nDF7NLX9+DwYM+TT883+OCvwzvaX/ - FMXlP9nyz0kE2qdF/ve//n0I4e9P17w+w/8dmjp/93//6y+QVOnPgYO/h2ZIn//7zj++//jf//h/ - AAAA//8DADtB0HjnIAAA - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 7c09bf85ef231c7e-AMS - Connection: - - keep-alive - Content-Encoding: - - gzip - Content-Type: - - application/json - Date: - - Mon, 01 May 2023 17:29:42 GMT - Server: - - cloudflare - access-control-allow-origin: - - '*' - alt-svc: - - h3=":443"; ma=86400, h3-29=":443"; ma=86400 - openai-organization: - - user-kd1j0bcill5flig1m29wdaof - openai-processing-ms: - - '406' - openai-version: - - '2020-10-01' - strict-transport-security: - - max-age=15724800; includeSubDomains - x-ratelimit-limit-requests: - - '3000' - x-ratelimit-remaining-requests: - - '2999' - x-ratelimit-reset-requests: - - 20ms - x-request-id: - - 3c27a4a0263197ea84526b8aa83799d6 - status: - code: 200 - message: OK -- request: - body: '{"input": [[359, 74901, 1146, 437, 11949, 3933, 74901, 728, 86, 511, 73, - 5241, 65116, 4991, 375, 10333, 20191, 89, 383, 22650, 1551, 327, 1810, 1804, - 80, 347, 11089, 3013, 752, 1100, 73976, 303, 32547, 73, 3696, 3906, 77, 969, - 455, 62615, 6581, 66, 501, 67, 2649, 12825, 8536, 89, 2067, 85, 80, 70, 74, - 755, 74, 352, 73, 4086, 3178, 87, 80, 35401, 376, 15618, 655, 13423, 33863, - 3933, 911, 77, 32204, 2689, 454, 29200, 2176, 1739, 65, 8998, 38844, 4223, 953, - 14088, 21486, 1316, 12328, 1725, 21486, 24752, 86, 1394, 5308, 321, 31019, 89, - 80, 73, 5981, 88, 69, 2249, 19072, 23576, 9110, 47154, 876, 73, 2319, 1029, - 6292, 69057, 2857, 20191, 2329, 80, 6455, 46640, 351, 89, 5289, 876, 77584, - 73, 2026, 84, 361, 23361, 87, 85, 4680, 85, 80, 13009, 86, 1725, 73, 641, 80, - 45015, 85, 447, 1072, 86, 22374, 14029, 83, 28679, 73, 70, 5298, 7369, 79, 544, - 12591, 327, 9701, 80, 2465, 84, 455, 11949, 7881, 74, 4912, 44508, 80, 5298, - 17553, 31268, 85, 604, 77, 11032, 6331, 9490, 45444, 450, 6236, 1919, 67, 20990, - 34175, 1804, 73, 41101, 80, 277, 73, 89, 664, 723, 49755, 80, 2191, 28531, 8587, - 15555, 79, 675, 42818, 375, 3601, 82, 1334, 76, 89, 511, 87, 73, 318, 4080, - 85, 1146, 80, 983, 796, 12825, 67, 4086, 78, 336, 1897, 80, 70, 11032, 7501, - 85, 12825, 651, 79, 16017, 80, 268, 69, 4021, 15817, 3757, 16400, 652, 28679, - 5964, 73, 17063, 80, 9907, 67039, 848, 85, 5308, 76, 657, 65, 1216, 88, 40924, - 11949, 76, 377, 79, 1091, 258, 343, 4080, 13752, 70, 27515, 265, 1273, 85, 89, - 44970, 71, 708, 582, 637, 1395, 1662, 83, 27457, 5308, 8942, 1498, 360, 89, - 10002, 67, 4317, 269, 587, 86, 13616, 31268, 73873, 299, 80, 73, 14088, 455, - 8480, 1609, 372, 53060, 48380, 17008, 48380, 35401, 40078, 45854, 652, 80, 21796, - 1551, 89, 9832, 87, 74, 1029, 307, 61057, 85, 12498, 648, 541, 267, 89, 70, - 24752, 80, 18577, 43210, 74, 370, 80, 830, 4341, 87, 8890, 4890, 31218, 641, - 82, 6331, 85, 80, 14170, 71, 325, 48186, 16400, 68054, 346, 9991, 78930, 939, - 1310, 4239, 93000, 73, 2067, 86, 11032, 43210, 8311, 80, 38768, 86, 1494, 2312, - 92216, 41670, 13028, 48802, 89, 7369, 69, 2005, 73, 7783, 953, 85, 49450, 73, - 1029, 402, 84274, 1026, 76, 89, 85, 664, 8088, 75163, 376, 20127, 80, 10470, - 406, 13183, 10616, 36722, 511, 1394, 87, 60933, 1031, 76, 1540, 5308, 66, 939, - 634, 4991, 29682, 351, 332, 4086, 19639, 15618, 16017, 49450, 73, 87, 20191, - 10176, 80, 32204, 32589, 1116, 355, 36238, 10470, 74, 2312, 76, 60013, 375, - 71, 55506, 87, 8671, 89, 11089, 8910, 74, 9326, 3933, 23048, 86, 441, 718, 7047, - 75, 398, 455, 73, 85, 20211, 343, 1565, 2042, 14088, 69516, 1517, 82, 5697, - 80, 2554, 88, 2005, 65736, 17043, 677, 19747, 28927, 69, 332, 70, 17207, 80, - 74, 1729, 2249, 82, 911, 71, 983, 664, 80, 301, 450, 83, 2465, 3423, 307, 84, - 4317, 80, 88, 275, 3172, 1800, 69, 352, 762, 36255, 953, 3696, 3919, 10470, - 64087, 46640, 73, 2329, 81, 8050, 13009, 20191, 71, 73, 365, 23797, 14088, 78604, - 3055, 8783, 85, 12953, 53060, 398, 79, 391, 22619, 540, 6043, 73, 85, 664, 77, - 2235, 86, 344, 79, 708, 89, 406, 73, 360, 2933, 41287, 9092, 88, 2026, 344, - 89, 56099, 2201, 1073, 359, 65, 35401, 86, 1897, 78989, 2130, 89, 43312, 67702, - 3906, 86611, 89, 894, 17692, 85, 30407, 86, 18854, 74, 710, 65, 93000, 13536, - 588, 764, 69722, 11949, 22650, 86, 501, 24244, 637, 418, 5298, 89, 450, 88593, - 919, 10036, 4341, 8480, 4341, 9907, 418, 87, 86, 80, 16276, 5104, 18854, 11328, - 14878, 80, 269, 5298, 84, 80, 68412, 89, 4777, 70114, 575, 67, 4355, 351, 32868, - 1146, 4341, 1331, 70, 5298, 77, 1498, 87, 2302, 18577, 998, 87, 664, 85, 1347, - 1729, 9700, 708, 11949, 73, 2302, 74, 89, 73, 906, 1220, 21620, 80, 69, 40078, - 5972, 372, 16916, 1215, 93797, 22016, 78772, 5289, 65, 7369, 12328, 9092, 67, - 2302, 70, 7387, 80, 81706, 444, 647, 3418, 80, 73, 1073, 28679, 77, 4223, 894, - 2701, 70, 6511, 10200, 4511, 77, 333, 27455, 509, 3368, 74, 17008, 93000, 18571, - 19747, 87, 85, 5972, 84, 70014, 24752, 876, 28927, 18287, 318, 27515, 14029, - 80, 30407, 1394, 82, 441, 266, 76, 45854, 1498, 582, 77, 24752, 74, 17134, 66, - 5981, 67, 1100, 2562, 325, 86, 295, 55655, 89, 1331, 85, 1302, 80, 1216, 32868, - 324, 35940, 1331, 9907, 2562, 9081, 4336, 83256, 46341, 303, 85, 2332, 70, 55418, - 1030, 370, 73, 3368, 5241, 263, 29238, 8998, 23866, 2641, 80, 764, 5289, 15306, - 1662, 28679, 77827, 8303, 20824, 35401, 23361, 9907, 85, 354, 86, 48738, 6993, - 80, 1026, 1146, 1910, 12498, 80, 8212, 12825, 65, 391, 22119, 87, 22873, 8088, - 24244, 915, 81, 11927, 6511, 307, 509, 52926, 1546, 4239, 47689, 27093, 28927, - 24037, 1546, 81, 4223, 1320, 5673, 283, 69, 418, 87, 71, 455, 85, 4080, 80, - 16618, 302, 911, 75, 2203, 48380, 80, 998, 67466, 14029, 1302, 72, 318, 51788, - 84, 1813, 89, 31764, 32204, 1302, 3601, 44970, 80, 22119, 71, 29684, 4942, 1094, - 318, 31722, 15008, 427, 79, 3178, 25506, 84, 355, 9092, 81, 4991, 55427, 73, - 7005, 7141, 49595, 3427, 87, 332, 86, 80, 831, 71, 9701, 76, 44183, 295, 277, - 363, 64087, 327, 53016, 85, 392, 24672, 8892, 3223, 83, 361, 89573, 69, 1029, - 82, 12940, 352, 6200, 73, 42848, 5469, 31695, 275, 64003, 581, 2739, 710, 360, - 40078, 581, 21659, 29238, 29700, 74, 77827, 12591, 79, 309, 587, 728, 9326, - 8934, 53835, 325, 3059, 450, 83, 80, 8783, 402, 79, 10616, 363, 80, 11949, 1631, - 352, 383, 21206, 1546, 35531, 74, 42818, 1029, 43505, 1791, 6511, 70678, 4922, - 564, 56749, 89, 11328, 404, 3306, 76, 89, 657, 462, 447, 79, 447, 87, 86611, - 8088, 71, 10036, 675, 27093, 84, 49450, 6331, 80, 69, 89, 70, 10616, 352, 354, - 324, 89, 18577, 1302, 337, 2303, 36722, 1517, 15930, 69, 8510, 18854, 67, 50798, - 1804, 77, 24752, 18571, 15720, 675, 1546, 71, 4912, 346, 28990, 24752, 544, - 36238, 87, 2034, 6583, 276, 69, 73, 88, 303, 69, 5701, 2642, 25947, 377, 511, - 77, 9117, 89, 3306, 65, 60933, 73, 31268, 39342, 511, 5701, 84, 360, 29229, - 72, 324, 12825, 4937, 65, 2580, 16618, 87, 71, 25013, 2642, 398, 19072, 50798, - 10056, 86, 85, 10056, 6292, 72, 540, 83, 3906, 266, 13009, 634, 3172, 31881, - 347, 8480, 46224, 4484, 8783, 2130, 74, 911, 9988, 67, 1331, 1910, 22016, 29238, - 4239, 47689, 38844, 22650, 89, 544, 22619, 71, 4680, 44067, 822, 88, 85, 2438, - 86, 3647, 4680, 39911, 11328, 27093, 69, 318, 318, 1994, 7141, 876, 7513, 21659, - 2319, 2076, 11328, 2298, 4223, 784, 85, 89, 80, 11823, 418, 1065, 300, 55233, - 441, 80, 74, 87, 24752, 44970, 67039, 89, 81, 73, 267, 23913, 1007, 9700, 65, - 27093, 28927, 77827, 3576, 39172, 1146, 953, 75721, 86, 455, 87, 86, 18854, - 80, 8318, 540, 5418, 327, 321, 21796, 24752, 1885, 7737, 324, 66, 566, 84, 1739, - 1739, 22780, 16276, 84, 80, 285, 894, 86, 830, 42848, 418, 23913, 28073, 20811, - 86, 344, 3647, 86, 406, 2332, 70, 8910, 1551, 2308, 339, 14088, 69, 7488, 72, - 12583, 316, 15618, 65, 20926, 11032, 71, 12412, 50536, 44067, 13855, 728, 278, - 876, 581, 23913, 2727, 44970, 447, 8480, 540, 7047, 5132, 4341, 333, 27093, - 1320, 72801, 31721, 78930, 462, 309, 23576, 19639, 59270, 32962, 437, 648, 1247, - 1919, 9902, 1568, 4484, 983, 24752, 87, 418, 540, 89, 80, 34175, 14878, 12940, - 2201, 4748, 375, 65, 1919, 65023, 564, 43518, 1100, 89, 85, 10776, 8839, 72540, - 12498, 4215, 713, 36726, 73, 87, 10616, 77, 10616, 45015, 359, 65, 5298, 343, - 92216, 87, 85, 344, 86, 22650, 953, 23913, 70, 31764, 64305, 75, 876, 70, 31695, - 318, 876, 85, 28679, 31268, 52971, 78498, 12940, 73873, 6194, 77, 3601, 84, - 2249, 75, 1551, 88, 38977, 3696, 936, 80, 70, 20975, 5010, 9081, 80, 764, 12184, - 333, 637, 18854, 87, 7521, 3919, 634, 78, 1540, 89, 1662, 83, 39911, 8088, 4317, - 329, 70, 3527, 40924, 74, 894, 1065, 18577, 74, 2641, 80, 336, 83, 1662, 59524, - 5104, 1517, 70, 2554, 4168, 85, 39911, 73, 796, 37406, 89, 11099, 31172, 80, - 17134, 84, 2005, 71, 28774, 2034, 15809, 1216, 69, 26397, 1100, 588, 301, 4777, - 831, 2985, 1029, 94908, 9081, 1609, 8050, 73873, 20926, 275, 9832, 64519, 86611, - 2933, 73, 1394, 7387, 818, 70678, 89, 1739, 21486, 73, 540, 86, 2067, 3306, - 16276, 7369, 43813, 4341, 641, 588, 35531, 74, 6358, 84, 427, 40078, 94346, - 8207, 10169, 89, 31695, 13223, 333, 355, 9110, 81656, 399, 79, 7907, 4021, 33721, - 8942, 52971, 1394, 80, 325, 85, 796, 14736, 637, 6098, 40101, 74, 383, 441, - 80, 17008, 437, 85, 22619, 80, 398, 87, 80, 73, 88, 5544, 8892, 24244, 652, - 89, 23048, 80, 6200, 65, 3841, 43189, 1029, 9991, 303, 1026, 81, 302, 69, 418, - 87, 566, 784, 9902, 370, 76, 32589, 1302, 48380, 501, 2192, 80, 60013, 2034, - 1065, 41922, 3251, 9490, 1320, 69, 451, 321, 1336, 361, 29700, 16400, 77, 8671, - 5544, 657, 81, 73, 911, 2312, 454, 1347, 13855, 2562, 80, 1310, 68054, 2642, - 4942, 8207, 86, 331, 67702, 6034, 92216, 14170, 58781, 1609, 19239, 74, 50798, - 377, 1310, 89, 564, 8050, 1919, 2857, 6043, 39911, 77, 9117, 1347, 76, 751, - 41129, 89, 8212, 68054, 35531, 70678, 2857, 80, 9902, 67800, 89, 406, 17851, - 89, 3013, 72, 360, 24752, 9902, 4341, 88, 830, 4385, 72, 1094, 1108, 437, 7217, - 70, 3933, 88, 80, 2580, 9509, 73, 29700, 641, 894, 5964, 89, 85, 894, 540, 8783, - 9490, 67039, 363, 81, 6511, 83, 80, 20811, 392, 3315, 2303, 2642, 70, 3306, - 2076, 76, 89, 30142, 38041, 4937, 11253, 73, 4086, 581, 8376, 982, 402, 2319, - 20306, 23457, 89, 44490, 675, 383, 22619, 80, 70, 80, 69, 3415, 68, 8635, 31172, - 86, 11032, 447, 67, 4835, 65, 26397, 1962, 65, 5469, 343, 1029, 66, 1677, 87, - 318, 86, 85, 1072, 39152, 1302, 1336, 80, 12591, 321, 7521, 42488, 87, 1546, - 17082, 17851, 418, 5972, 23361, 69, 38253, 66, 60013, 680, 73, 34485, 989, 18244, - 87, 398, 5298, 327, 1072, 3145, 355, 3178, 1739, 532, 20926, 81964, 22619, 32478, - 73, 30275, 392, 10616, 87, 73, 86, 89, 21206, 80, 943, 80, 49938, 3576, 8376, - 7717, 10784, 1105, 5481, 1247, 494, 86746, 9996, 55767, 17704, 75406, 28990, - 77, 62604, 5481, 13028, 710, 1273, 53835, 77, 39911, 6375, 15618, 919, 307, - 773, 85, 24752, 87, 5104, 454, 89, 35531, 8934, 13855, 451, 71, 2642, 347, 3178, - 261, 87, 371, 2727, 402, 67020, 8462, 1100, 85, 360, 376, 40924, 84, 20127, - 28679, 26047, 39152, 23866, 10616, 44183, 28927, 32589, 5104, 69, 1146, 45015, - 351, 13183, 708, 1247, 85, 1310, 98714, 2191, 69, 12407, 359, 544, 80, 74, 6870, - 84, 89835, 70, 8890, 352, 74901, 4385, 80, 3306, 86, 347, 86, 18244, 98493, - 3601, 316, 89, 501, 22650, 1273, 76, 24990, 511, 911, 77, 5544, 406, 32204, - 10616, 70, 376, 4744, 85, 3576, 84, 392, 71, 3178, 81, 20127, 3893, 80, 70, - 344, 87020, 303, 65, 66362, 41933, 45015, 71, 9110, 86, 383, 764, 75406, 2496, - 23913, 283, 80, 50139, 80, 56414, 581, 1302, 24990, 80, 7521, 77, 8480, 391, - 3315, 23866, 24752, 1347, 67665, 73, 18442, 85, 501, 74, 2641, 48836, 19260, - 588, 1662, 77827, 9451, 86, 540, 2319, 31881, 73, 6634, 564, 7513, 511, 1073, - 276, 79, 2933, 80, 89, 4171, 16276, 73, 3841, 89, 4937, 80, 7521, 566, 9794, - 6194, 80, 73, 10776, 14694, 84, 710, 10470, 67, 1302, 360, 73, 27093, 9490, - 1146, 36068, 9490, 76, 8671, 70, 60083, 333, 3055, 10333, 797, 12883, 73, 89, - 3013, 87, 21557, 80, 18577, 70, 1609, 14088, 75, 1791, 283, 73, 519, 11032, - 1336, 723, 51998, 20127, 53289, 88, 402, 677, 418, 588, 85, 325, 12498, 96518, - 4239, 6358, 4680, 87, 6133, 73, 86, 8376, 774, 303, 1546, 21807, 8637, 4680, - 450, 447, 20127, 86, 47154, 11089, 74, 11949, 5979, 316, 73, 4086, 28774, 70, - 21796, 80, 77152, 3178, 9902, 7369, 83, 73, 2562, 26738, 86, 11160, 455, 93797, - 8839, 1923, 85, 4381, 80, 5964, 347, 869, 85, 20926, 85, 1394, 343, 86, 29700, - 6194, 21206, 267, 1031, 14736, 67, 906, 80, 49755, 20127, 604, 1347, 277, 24752, - 74, 4239, 454, 80, 1498, 73, 9112, 35401, 1302, 73873, 8910, 2026, 4086, 87, - 86, 4341, 36238, 73, 89, 337, 266, 894, 20975, 3013, 89, 80, 61057, 46640, 318, - 1546, 32962, 822, 261, 952, 283, 88, 70828, 85, 7047, 93797, 5531, 1146, 1656, - 89, 70, 89835, 2034, 641, 4341, 3423, 85, 1031, 10616, 70, 713, 66, 4744, 94356, - 28400, 85, 6292, 80, 295, 7737, 81, 45015, 1073, 86, 455, 2319, 1609, 65, 2329, - 552, 7513, 1791, 69, 73, 1216, 13423, 87, 70, 19456, 1336, 352, 13009, 89, 5010, - 78930, 86, 13584, 604, 41196, 47154, 21659, 82969, 5893, 21796, 89, 80, 1494, - 31019, 87, 19639, 87, 73, 16499, 73, 10470, 894, 2580, 20211, 454, 9117, 324, - 80, 73, 2005, 337, 5308, 258, 76, 89, 3933, 8207, 80, 2445, 77, 80, 8318, 352, - 80, 652, 11328, 35531, 80, 7369, 8671, 6194, 89, 275, 4168, 69, 89, 398, 343, - 75, 4341, 752, 301, 94055, 10462, 46640, 267, 51622, 360, 96518, 22119, 73, - 86, 677, 2329, 12940, 86, 12806, 7783, 49450, 18287, 74, 1631, 352, 86, 6194, - 3340, 75, 10616, 302, 73, 376, 31172, 9832, 2642, 22016, 5289, 31172, 56266, - 14170, 81, 1919, 87, 1026, 80, 2642, 267, 1494, 447, 85, 1813, 3601, 80, 8910, - 76, 1072, 64, 37135, 391, 10776, 1073, 21301, 45228, 85, 16400, 74, 939, 70, - 1100, 96518, 299, 80, 441, 5481, 94908, 18287, 939, 78, 15769, 79, 5308, 14878, - 86, 8318, 23913, 398, 327, 5641, 501, 318, 28679, 17851, 2034, 5531, 564, 40078, - 49755, 28679, 71, 1551, 87, 35478, 9902, 302, 21206, 5010, 75, 8658, 20824, - 76, 8671, 332, 15720, 418, 7047, 79, 1273, 3972, 9700, 67702, 983, 7387, 86, - 89, 81, 73, 36722, 73, 47689, 70, 713, 276, 81, 2465, 20990, 45015, 831, 2303, - 764, 267, 511, 31695, 21888, 316, 11949, 494, 87, 25105, 82969, 4223, 352, 65, - 2880, 9902, 87, 20824, 5308, 796, 96518, 10333, 71, 398, 86, 4355, 3524, 50798, - 23913, 89, 699, 2329, 47342, 10196, 266, 67, 402, 40078, 2249, 34946, 17043, - 80, 372, 66, 634, 7501, 41796, 87, 74, 64003, 53835, 70, 784, 49993, 3415, 327, - 77, 10036, 441, 67665, 81, 273, 501, 86, 13536, 86, 4168, 14170, 45015, 24244, - 2319, 21659, 1310, 88, 80, 4239, 70, 1331, 291, 89, 67039, 73, 51924, 14878, - 2784, 42528, 7141, 76, 16017, 71, 12328, 81, 21486, 79, 1031, 392, 4484, 588, - 58305, 901, 365, 329, 2249, 657, 1804, 447, 45015, 23866, 1729, 67, 4991, 1498, - 13536, 3978, 2005, 1336, 339, 85, 43210, 98714, 343, 92791, 74, 677, 76, 15618, - 876, 8462, 911, 14029, 9907, 70, 346, 3524, 89, 59064, 822, 40078, 2392, 25457, - 402, 9700, 81, 2554, 14966, 86, 16400, 89, 11328, 69, 4171, 79, 22537, 8480, - 18571, 44508, 5192, 710, 12812, 830, 501, 17704, 86, 12498, 2365, 19747, 1031, - 77, 38844, 80, 4080, 88, 8783, 2319, 1310, 70, 73, 57071, 1551, 94375, 89, 28990, - 89, 4175, 9081, 1056, 23866, 10784, 5192, 87, 4922, 64, 1658, 27093, 2034, 52877, - 27515, 4086, 81, 40617, 83079, 45015, 70, 80, 19239, 8088, 4942, 2933, 9832, - 359, 76, 19383, 9832, 86, 5192, 817, 73640, 45015, 38977, 324, 6713, 71, 1302, - 677, 35989, 9832, 266, 92110, 14736, 14029, 8892, 73, 87, 71, 94908, 406, 49463, - 33721, 300, 70, 52097, 302, 12841, 2249, 8088, 85, 10470, 40078, 71, 2642, 74, - 67039, 13997, 648, 39911, 19555, 26738, 84, 580, 18854, 80, 82999, 333, 93000, - 2661, 540, 85, 6151, 677, 5746, 19456, 73, 85, 4193, 14606, 75, 303, 94468, - 85, 2676, 544, 81, 4922, 4239, 79, 450, 1029, 14694, 1273, 1494, 1072, 11823, - 91311, 80, 4942, 675, 675, 10462, 29229, 18854, 65019, 85, 78498, 509, 383, - 336, 41796, 20990, 373, 12825, 80, 1273, 76, 581, 267, 80, 47689, 5254, 87, - 14485, 53060, 9700, 352, 94817, 1658, 710, 96518, 764, 80, 82999, 88, 321, 1065, - 1072, 14029, 38844, 7521, 69, 50798, 16373, 73, 70, 9996, 77, 3368, 307, 5972, - 351, 1003, 88, 936, 351, 372, 391, 34175, 303, 1273, 398, 75, 447, 21963, 63643, - 5683, 4355, 1662, 23576, 93000, 34175, 89, 8510, 332, 74, 8462, 77, 351, 87, - 1609, 87, 86, 1725, 65, 511, 8207, 7387, 509, 73, 31695, 85, 1031, 80, 93797, - 89, 9832, 73, 26929, 13028, 23866, 25658, 87, 72231, 1073, 75, 13045, 579, 5308, - 71, 73873, 22119, 77, 752, 78, 762, 68054, 365, 683, 27780, 31019, 1336, 67, - 2332, 2739, 86, 859, 14878, 2076, 710, 9701, 5981, 2465, 87, 365, 9258, 277, - 58152, 321, 404, 51622, 300, 14029, 40078, 5289, 1302, 69, 73, 7141, 41796, - 1468, 7141, 677, 87, 2192, 318, 94023, 66, 664, 80, 604, 18854, 10630, 3716, - 455, 540, 36851, 88, 1073, 65, 4239, 15809, 26804, 764, 1394, 4991, 80, 88, - 73, 7864, 385, 82999, 268, 18287, 60933, 86, 1138, 67, 3746, 21297, 74, 456, - 4835, 820, 19555, 80, 2203, 22016, 6733, 62355, 76, 1216, 5972, 11305, 675, - 41796, 4239, 911, 332, 9832, 88, 347, 10497, 71, 4171, 1897, 2067, 49755, 20127, - 33863, 17528, 1286, 21659, 3423, 9117, 1073, 14878, 81, 1320, 64, 1540, 1030, - 327, 7513, 7387, 69, 89, 4378, 29684, 4937, 23361, 6091, 85, 3315, 31935, 6292, - 1813, 19456, 79, 9117, 796, 26929, 86, 9081, 4341, 68, 1302, 2302, 6194, 48380, - 86, 1568, 1919, 359, 337, 19747, 6200, 375, 65, 15618, 86, 564, 69, 1026, 73, - 87, 8674, 773, 19639, 86, 455, 2739, 1273, 67, 402, 2439, 90625, 2438, 66565, - 363, 579, 318, 47154, 2329, 372, 12825, 76, 1751, 76, 906, 34175, 69, 95233, - 1216, 76, 2302, 1336, 72, 592, 9700, 73873, 441, 68991, 13536, 66565, 4575, - 406, 89, 588, 80, 1073, 77, 3919, 93000, 5104, 1609, 3906, 87, 14694, 817, 33686, - 7513, 325, 25105, 8311, 87, 25457, 5104, 73, 89, 53060, 894, 70, 16276, 604, - 21659, 710, 2445, 69, 41460, 7910, 5104, 28679, 266, 38041, 6375, 26929, 71, - 14029, 45015, 71, 50813, 773, 87, 321, 85, 89, 275, 54964, 73, 38361, 344, 6043, - 4843, 939, 73, 16400, 1320, 23960, 73, 52926, 19654, 3251, 73, 9117, 25105, - 80, 76166, 637, 76, 38359, 398, 21659, 55282, 81, 18565, 1631, 73, 354, 1302, - 14606, 876, 44508, 9112, 74, 80, 73, 87, 80, 392, 31695, 291, 2067, 66, 8462, - 17134, 84, 5308, 1031, 79, 540, 316, 831, 80, 38361, 29238, 3145, 73873, 31695, - 581, 8747, 316, 339, 89, 784, 1100, 74, 75406, 1662, 81, 377, 81, 2174, 69, - 647, 7453, 73, 85, 27455, 80, 385, 89, 398, 1142, 75, 2554, 3418, 98667, 53289, - 73, 22619, 74, 8311, 1273, 54681, 1302, 2054, 10743, 74, 87, 70, 8998, 1394, - 28679, 57208, 20990, 983, 69, 3315, 53478, 266, 66, 86427, 80, 332, 77, 24752, - 65, 32345, 4151, 332, 87, 3306, 4681, 50798, 7501, 80, 13423, 28990, 74, 637, - 73873, 365, 86, 2249, 89, 85, 412, 86, 1072, 84, 3524, 27455, 4511, 10910, 21796, - 80, 33721, 588, 40429, 3576, 309, 74, 5289, 48872, 1494, 78498, 55506, 87, 35531, - 5972, 46341, 12825, 21620, 6511, 80, 38306, 17378, 354, 80, 69, 73, 85, 8587, - 80, 406, 747, 337, 51924, 85, 3423, 7047, 89, 27515, 1026, 85, 1029, 83, 641, - 77, 5544, 45015, 437, 13652, 764, 447, 351, 1103, 6733, 540, 80, 377, 544, 540, - 74, 70391, 10776, 87, 5308, 939, 87, 339, 3178, 1791, 1551, 80574, 1394, 75, - 12407, 60435, 80, 677, 81, 15720, 50139, 3919, 78, 581, 80, 89, 73, 581, 19072, - 80, 74, 80, 73, 32868, 69, 652, 376, 582, 354, 73, 361, 785, 21963, 87, 321, - 713, 52926, 73, 18854, 83079, 1919, 501, 3251, 92216, 450, 4942, 74, 544, 4285, - 1055, 392, 27515, 1546, 82, 2302, 354, 87, 80, 1910, 86, 10776, 87, 5230, 53835, - 71, 31695, 337, 4912, 1517, 69858, 392, 3906, 71, 2329, 544, 620, 80, 47689, - 5241, 80, 73, 86, 9112, 72, 454, 12825, 77468, 18571, 1273, 66, 71690, 15809, - 344, 6331, 372, 28990, 2933, 88, 7783, 80, 25604, 86, 85, 8671, 1073, 9490, - 1729, 88, 354, 17704, 73, 38041, 1031, 2143, 49712, 73, 634, 76, 36463, 652, - 302, 83, 73, 87, 10616, 15618, 377, 88, 73, 17247, 283, 4484, 1029, 2005, 1910, - 2034, 31268, 668, 73, 48372, 5010, 376, 74, 14878, 81, 26397, 86, 675, 8783, - 2642, 70, 80, 343, 80, 4777, 295, 13584, 86, 5964, 7141, 71, 8458, 2303, 8212, - 94908, 5308, 71, 13423, 85, 89, 3306, 848, 1072, 344, 67665, 2329, 87, 450, - 2868, 1073, 74, 17156, 80, 4223, 89835, 89, 6634, 21046, 73, 14244, 6200, 89, - 14694, 263, 79, 1031, 10776, 65019, 1551, 406, 76, 6358, 8088, 318, 71, 1804, - 8883, 10910, 23001, 24012, 80, 73, 95398, 2034, 3906, 80, 12341, 14606, 87, - 391, 73, 2329, 485, 75406, 24990, 46640, 64087, 754, 822, 80, 70, 376, 269, - 87, 27515, 86, 5308, 50139, 1395, 402, 3013, 86, 1026, 16400, 67702, 1764, 4922, - 19644, 65, 4341, 8587, 89835, 511, 91485, 80, 69, 73, 19456, 73, 8462, 728, - 1094, 31268, 87, 71, 5981, 89, 80, 372, 9700, 26397, 73, 21620, 51622, 1026, - 4223, 87, 4175, 2332, 76, 17912, 69, 10176, 80, 38359, 2727, 34175, 80, 9991, - 6414, 19747, 333, 8255, 352, 418, 677, 60933, 346, 70391, 80, 9112, 351, 89, - 540, 267, 49463, 1551, 2562, 4341, 3893, 501, 73, 1658, 13045, 285, 69, 754, - 80, 5701, 71, 39911, 81, 73, 953, 83214, 8311, 73, 19555, 86, 9112, 392, 40078, - 3251, 52971, 87, 28679, 67, 2438, 80, 285, 7521, 50798, 383, 22119, 604, 80, - 7387, 339, 28927, 41817, 28990, 10516, 1658, 2302, 91650, 26618, 1729, 3013, - 641, 4991, 73, 2312, 12940, 24620, 36068, 51622, 73, 6634, 29224, 372, 88, 11949, - 295, 20211, 17134, 5298, 87, 437, 402, 1347, 939, 63643, 20926, 89, 7141, 80, - 1273, 36825, 8783, 73, 4912, 5972, 85, 33863, 869, 303, 2042, 74, 1072, 455, - 664, 1394, 19072, 8385, 86, 1631, 81, 35063, 73, 1609, 88, 10616, 67, 64087, - 8480, 292, 41196, 31019, 316, 70, 939, 45015, 309, 89, 18577, 2554, 73, 1897, - 41129, 333, 71, 3893, 18442, 24752, 29811, 65, 33863, 3696, 18571, 7129, 80, - 2303, 668, 911, 87, 10056, 95233, 10910, 1320, 18577, 16276, 24752, 5298, 86, - 89, 2042, 16731, 38359, 6870, 80, 20824, 1897, 80, 1347, 39911, 1517, 78498, - 796, 73, 21340, 86, 46224, 278, 1029, 82, 3251, 23866, 67702, 3251, 4659, 30506, - 1394, 398, 754, 80, 70, 1517, 82, 2933, 89, 43441, 5418, 67175, 1729, 28700, - 63848, 316, 273, 9117, 2739, 73, 1029, 72801, 3633, 73, 85, 8671, 316, 17156, - 12328, 12682, 4171, 42294, 5192, 73, 1216, 32204, 87, 70, 399, 21963, 89, 360, - 14736, 16400, 1813, 21206, 74, 427, 1494, 5104, 346, 444, 84, 5241, 23866, 2067, - 79, 10470, 11328, 74, 2465, 70, 73, 87, 2933, 88, 3696, 69, 9081, 17134, 1725, - 8212, 1031, 19866, 77, 3576, 7908, 31803, 773, 43686, 1498, 1658, 17156, 511, - 22619, 86, 7141, 2042, 77, 6194, 85, 6200, 1658, 5469, 2562, 84, 23361, 65, - 5104, 450, 67, 1725, 73, 11089, 67039, 343, 5469, 49755, 25658, 2465, 10470, - 73, 4838, 23866, 74, 66867, 911, 4341, 1658, 27106, 352, 41796, 91998, 16514, - 1031, 65, 8536, 80, 969, 869, 38181, 12116, 87, 73, 1919, 359, 85, 5308, 75, - 29700, 70, 80, 2026, 2302, 69, 10784, 89, 1725, 86, 4991, 79, 27515, 71, 7369, - 69, 418, 62585, 65, 376, 77, 10910, 266, 8385, 575, 29700, 1517, 566, 11748, - 85, 42818, 4484, 2580, 69, 1494, 8671, 65019, 9258, 316, 1791, 9081, 575, 87, - 1026, 81, 620, 589, 80, 332, 7387, 24672, 73, 38361, 89, 70, 876, 89, 4239, - 21888, 83448, 45775, 85, 455, 5241, 67, 796, 42908, 64123, 76, 1073, 79, 564, - 18894, 2067, 1725, 65, 80, 24244, 375, 45015, 1331, 953, 13009, 89, 32962, 7501, - 70678, 2329, 77, 39911, 73, 21807, 31881, 648, 57932, 70, 292, 77, 80, 2438, - 29238, 10616, 22524, 285, 26397, 74, 80, 1302, 911, 1195, 86, 89, 4239, 79, - 2319, 89, 2303, 37406, 27476, 73, 13536, 80, 2176, 1944, 441, 31141, 69, 1336, - 15802, 73822, 21888, 1962, 91299, 32204, 376, 53060, 447, 5298, 85, 634, 359, - 752, 7521, 3919, 87, 1910, 2445, 49450, 73, 27093, 7737, 2689, 8088, 22016, - 86, 336, 16094, 73, 728, 18244, 87, 10056, 18287, 1302, 69, 587, 81, 73, 13075, - 67665, 3427, 675, 80723, 80, 52926, 73, 1897, 20990, 301, 70, 3906, 80, 3696, - 73, 566, 587, 8892, 80, 427, 29443, 70118, 69, 73, 87, 85, 637, 77, 3601, 37406, - 86, 9188, 2933, 80, 73, 398, 1919, 65, 8587, 316, 3423, 85, 20191, 6194, 346, - 1100, 12821, 450, 77, 455, 82969, 75406, 4080, 32280, 43776, 49755, 2580, 76, - 299, 23770, 5701, 5241, 65, 1031, 85, 6511, 675, 24672, 77, 80, 1662, 8587, - 1065, 4223, 566, 1030, 80, 2026, 82, 713, 24752, 455, 46246, 540, 365, 93797, - 41101, 17043, 460, 71, 91849, 4991, 327, 73, 87, 1336, 2727, 10616, 18244, 31019, - 316, 402, 42818, 7129, 587, 89, 31764, 7513, 3306, 21206, 383, 73, 85, 501, - 80, 69, 3251, 1546, 51924, 80, 1100, 2042, 46051, 86, 49595, 89, 299, 321, 38639, - 24752, 66, 1729, 939, 1302, 402, 76, 9490, 60933, 4843, 67, 447, 84, 455, 31268, - 22016, 80, 269, 97523, 455, 491, 77, 31881, 3368, 9701, 915, 65, 939, 8050, - 67, 3368, 81, 2727, 41129, 309, 944, 76, 876, 2026, 83, 33863, 2857, 2933, 71, - 2852, 2005, 4168, 73, 87, 86, 34855, 18854, 427, 75, 1662, 41625, 1897, 3035, - 43505, 3696, 70828, 73, 3415, 587, 77, 46390, 455, 50139, 23361, 70, 2034, 3180, - 309, 66, 88183, 347, 1791, 1126, 4341, 73, 18442, 85, 89, 5192, 1336, 50226, - 35531, 2465, 75, 5192, 372, 21963, 2642, 3251, 6870, 1551, 7501, 94055, 11328, - 1897, 434, 69, 89, 73, 1551, 80, 936, 73, 346, 1065, 73, 89, 1100, 2642, 3675, - 73, 1395, 352, 73, 4341, 45775, 347, 76, 728, 80, 906, 2319, 88, 327, 1764, - 351, 1055, 283, 88, 89835, 2026, 85, 89, 59125, 876, 76, 12940, 6780, 53146, - 71, 360, 65, 41670, 53289, 1751, 858, 15444, 339, 710, 87, 437, 89, 73, 11253, - 2329, 76, 15809, 327, 80, 2026, 764, 2312, 4991, 18244, 4317, 581, 77, 1331, - 65, 8910, 92216, 59448, 86, 3035, 73, 2857, 77, 3178, 664, 87, 46341, 70678, - 3647, 85, 42908, 11032, 587, 79, 346, 1411, 6511, 67702, 4341, 80, 78, 336, - 38361, 70678, 80, 7521, 80, 50139, 89, 27093, 604, 28990, 49461, 69, 3696, 69, - 3059, 1609, 44528, 12407, 9700, 87, 953, 7947, 87, 8510, 6511, 73, 70404, 65, - 2580, 80, 4086, 16115, 23866, 80, 1302, 29200, 73, 752, 87, 327, 86, 509, 86, - 3933, 321, 89, 454, 2465, 2249, 4680, 86, 1320, 31764, 31695, 73, 14736, 7783, - 87, 29684, 2562, 80, 70, 80, 10616, 7737, 377, 566, 329, 19285, 3906, 41922, - 1498, 35531, 89, 16017, 6634, 4511, 2465, 19239, 82969, 8839, 77, 564, 69, 1031, - 267, 73640, 87, 28679, 24990, 983, 1065, 7369, 89, 80, 344, 8462, 84, 648, 65, - 3415, 285, 44490, 75406, 7488, 14736, 67, 337, 16276, 718, 29684, 89, 3316, - 73, 4010, 36068, 4341, 25299, 708, 71, 4937, 69, 73, 22619, 2465, 876, 8587, - 74, 49755, 80, 1725, 86, 7501, 1026, 80, 4341, 85, 1029, 6885, 266, 1551, 564, - 87, 8510, 80, 8207, 2034, 2706, 50039, 41922, 80, 12591, 321, 63643, 85, 540, - 86, 89, 598, 79, 5697, 762, 23866, 89, 355, 14029, 19456, 17063, 20372, 764, - 31764, 73873, 27495, 648, 575, 81, 28927, 74, 2641, 8207, 89, 73, 56871, 1546, - 392, 67, 5981, 80, 5298, 4239, 35531, 89, 81, 360, 86, 7911, 511, 8998, 44183, - 266, 32204, 52719, 418, 86, 1725, 87, 39385, 73, 376, 9117, 347, 79, 1813, 869, - 74, 939, 87, 9779, 657, 5308, 82, 1609, 80, 15720, 73, 3415, 1065, 20811, 906, - 74, 87, 797, 69, 32589, 1302, 42051, 333, 27455, 1336, 13351, 73, 36825, 343, - 6780, 80, 292, 31019, 267, 3601, 5289, 1631, 566, 87, 35401, 796, 70, 8910, - 15809, 50798, 1517, 41796, 3178, 81, 8671, 5010, 2344, 86, 455, 30863, 318, - 3059, 11823, 73, 8376, 62220, 94908, 26738, 86, 4835, 1517, 87, 53016, 17063, - 85, 6200, 5981, 77, 14468, 1725, 10056, 8050, 94346, 2329, 73, 4175, 79, 24752, - 87, 5981, 2201, 447, 2642, 86, 9218, 20582, 77, 10776, 50139, 5346, 79, 361, - 1146, 1247, 29238, 1065, 26929, 97523, 89, 7369, 21620, 754, 68, 283, 50798, - 4744, 86, 346, 31422, 88, 352, 26474, 359, 16276, 71, 1302, 648, 1216, 89, 13752, - 604, 79, 911, 87, 84, 2826, 12825, 351, 876, 29443, 74, 6870, 80, 82969, 2154, - 65, 3696, 587, 40213, 12328, 588, 10784, 85, 2411, 9907, 6358, 85, 19747, 332, - 773, 80, 20553, 587, 76, 370, 547, 547, 4021, 79, 11032, 1725, 291, 77, 32204, - 54964, 370, 73370, 39911, 2438, 15618, 44183, 4223, 87, 2249, 7453, 89, 953, - 1347, 17704, 70, 7141, 88, 31278, 604, 67, 86682, 5721, 637, 27093, 50536, 911, - 12328, 86746, 9810, 73, 88, 69, 894, 7737, 53016, 73, 51622, 40375, 66353, 710, - 71, 4719, 1100, 24672, 32962, 21206, 61057, 764, 73, 50536, 2067, 5683, 47617, - 18244, 71, 4080, 1029, 17319, 1494, 16618, 17156, 80, 333, 363, 56871, 12328, - 75, 12591, 19239, 77, 19639, 89835, 5308, 5532, 73873, 31019, 361, 70, 32589, - 375, 5481, 344, 27515, 71, 5794, 47875, 385, 25299, 68151, 566, 14046, 44183, - 39911, 79, 391, 35478, 1347, 73, 3315, 21963, 74882, 89, 677, 1320, 351, 86, - 41796, 89, 63502, 87, 94908, 983, 51622, 17008, 8088, 5981, 77, 2034, 75, 1030, - 2701, 303, 70, 911, 17134, 89, 53835, 77, 1643, 64, 352, 83, 80, 74, 69057, - 9991, 16400, 31764, 73, 22619, 6331, 80, 22016, 87, 906, 604, 26397, 3368, 68, - 427, 3919, 73, 20824, 5192, 392, 80723, 88, 14736, 26474, 86, 85, 8212, 20191, - 80, 318, 5701, 39911, 8910, 18664, 86, 708, 71, 5104, 953, 80, 18442, 347, 11089, - 4341, 365, 6489, 80, 88, 9907, 2396, 70, 6358, 6331, 3172, 406, 4469, 3251, - 5010, 57504, 65, 80, 1320, 76, 1310, 37541, 953, 71, 7369, 278, 7488, 45015, - 798, 318, 15930, 29700, 57671, 343, 32962, 89, 23361, 45062, 391, 12940, 96295, - 35203, 85, 1804, 23001, 50536, 2332, 65, 35401, 6200, 71, 652, 13752, 4010, - 5701, 85, 80, 8207, 89, 4239, 89, 48380, 648, 28679, 6194, 43210, 29684, 2562, - 74, 89, 7369, 4080, 41922, 80, 418, 70, 375, 65, 402, 76, 1007, 454, 74, 1065, - 81, 2005, 71, 11823, 588, 80, 833, 24672, 331, 85, 1331, 9832, 81, 566, 652, - 80, 5981, 1273, 71, 447, 4341, 70, 1494, 267, 83, 73, 32547, 710, 4912, 8512, - 24620, 89, 9700, 6200, 302, 258, 18577, 14833, 501, 73, 1347, 18577, 1517, 17008, - 587, 1336, 6194, 13545, 69, 332, 406, 89, 540, 911, 20824, 998, 2933, 30403, - 68003, 62497, 797, 98418, 70014, 15618, 74, 14029, 89, 7521, 65, 7488, 5192, - 85, 1072, 30863, 5469, 1146, 391, 454, 85, 2629, 80, 2176, 89, 27278, 509, 88, - 30506, 84, 1739, 87, 788, 1517, 77, 2438, 4239, 9451, 648, 830, 86, 346, 587, - 89, 332, 6438, 93000, 26397, 32589, 3516, 31695, 363, 85, 12328, 74, 3315, 337, - 1609, 327, 876, 352, 613, 8826, 2642, 1739, 66, 44970, 73, 85, 22619, 85, 1331, - 76, 634, 29700, 1031, 3576, 88, 6043, 80, 395, 5469, 69, 347, 87, 710, 80, 74, - 70391, 467, 5241, 370, 637, 69, 339, 3524, 44490, 73, 441, 36107, 4484, 73, - 70, 2642, 82999, 87, 6885, 1609, 17319, 86, 32478, 80, 64087, 7513, 4168, 9701, - 351, 6375, 86, 21206, 5469, 355, 1031, 67, 4171, 7737, 4080, 9701, 1394, 2642, - 2034, 8536, 324, 87, 32589, 42888, 15809, 351, 30506, 13136, 62327, 73, 983, - 2042, 52097, 20898, 16618, 59524, 796, 376, 9996, 3013, 38984, 4171, 87, 86, - 9991, 85, 89, 2249, 1286, 22119, 540, 2465, 14088, 20127, 89, 7369, 6511, 80, - 42818, 28990, 65, 80, 566, 18571, 85, 588, 39911, 2005, 372, 97523, 38844, 20926, - 80, 18894, 64087, 14736, 1494, 86, 1494, 7521, 65, 5104, 73, 70, 27093, 84, - 564, 20926, 74, 6194, 385, 80, 3919, 764, 87, 80, 491, 53060, 830, 822, 45015, - 89, 3306, 290, 37406, 675, 84, 7488, 71, 1331, 30407, 1310, 1026, 1026, 5469, - 894, 3933, 88, 634, 87, 14736, 31881, 89, 73005, 363, 67, 4223, 3316, 75, 8510, - 80, 18287, 911, 70, 582, 3178, 80897, 3418, 87, 2329, 86, 31019, 17008, 2026, - 74, 14170, 3893, 86, 355, 11377, 4010, 5964, 74, 80, 89, 359, 1804, 44490, 73, - 11949, 26205, 343, 89, 25299, 4086, 38041, 1725, 86, 377, 86, 332, 939, 339, - 36068, 67039, 333, 2862, 6370, 13536, 3524, 263, 456, 18854, 65116, 74, 354, - 4080, 33721, 86, 1764, 359, 6194, 269, 450, 66, 3055, 6672, 73, 3368, 18499, - 85, 2857, 69, 73, 10630, 85, 564, 85, 1729, 5972, 80, 6634, 87183, 5308, 65, - 19747, 74, 1394, 77, 2933, 23361, 953, 2933, 85, 17008, 300, 73, 354, 8207, - 52360, 2303, 604, 10470, 73078, 1639, 17063, 89, 652, 18287, 391, 66, 564, 45015, - 86746, 5469, 8503, 87, 8050, 70, 77827, 45015, 5469, 53289, 4922, 69, 4010, - 404, 1729, 447, 675, 333, 13536, 292, 86459, 337, 1336, 89, 8480, 6511, 94908, - 2249, 540, 876, 73, 784, 754, 1662, 69, 7488, 44970, 9197, 278, 1273, 73, 1910, - 363, 9902, 95780, 14606, 80, 754, 69, 729, 7141, 5192, 21888, 283, 1289, 1662, - 805, 66, 634, 6780, 352, 79, 1609, 69, 1498, 31019, 375, 4381, 89835, 12825, - 73, 4744, 78930, 318, 13423, 24990, 370, 19639, 73, 55233, 1394, 3427, 41796, - 69, 7387, 70, 52513, 2319, 87, 2445, 86, 347, 31695, 60013, 652, 3933, 80, 8892, - 5289, 21807, 657, 54532, 81, 20553, 28679, 9991, 86, 8318, 2005, 82999, 604, - 49712, 3368, 406, 7521, 16017, 728, 1331, 581, 7513, 73, 18894, 87, 75406, 80, - 89, 85, 2642, 5746, 7141, 88, 44490, 339, 1494, 27515, 23361, 14773, 31881, - 10916, 32962, 295, 9832, 89, 1517, 73, 85, 80, 25604, 21796, 321, 81, 8671, - 894, 23361, 43505, 71, 73, 953, 41196, 2445, 318, 16618, 11805, 20127, 35940, - 372, 80, 2303, 85, 2445, 77, 45015, 71, 2438, 80, 51622, 344, 87, 4223, 86, - 16417, 34263, 80, 32204, 73, 88, 347, 48372, 3893, 39152, 399, 96518, 1072, - 299, 3576, 69, 98347, 6100, 22016, 15618, 16915, 8462, 73873, 17134, 77, 406, - 86, 96250, 344, 65, 764, 89, 21206, 7044, 78, 1662, 1727, 80, 30506, 43319, - 830, 2365, 3919, 29238, 73, 7783, 88, 1551, 69, 1146, 383, 641, 3172, 87, 278, - 5104, 258, 81, 73, 333, 6043, 73, 1631, 35531, 3315, 392, 9902, 80, 73, 27515, - 9081, 3919, 316, 581, 2203, 74, 398, 370, 17063, 42050, 2580, 73, 5298, 652, - 87, 71, 52097, 2465, 8890, 86, 4223, 8212, 97816, 1498, 346, 70, 73, 2580, 18854, - 78604, 1310, 84, 1512, 25457, 89, 1026, 8462, 2329, 88, 2396, 68, 1263, 75, - 3933, 75092, 8587, 3826, 80, 437, 4680, 81, 3933, 3988, 936, 10644, 70, 564, - 36722, 572, 8910, 710, 12214, 31019, 87, 3655, 796, 65, 80, 73, 70, 73, 267, - 344, 80, 3893, 327, 70, 31764, 1910, 36851, 2249, 604, 4991, 544, 70, 5298, - 90173, 953, 9991, 88, 60933, 2067, 5972, 42908, 604, 87, 10776, 21659, 9334, - 22619, 1394, 31268, 1310, 55895, 3576, 4991, 14122, 72, 801, 4239, 427, 50798, - 796, 40078, 74, 2580, 88, 1910, 74, 343, 354, 80, 444, 16373, 89, 73, 11328, - 28927, 73, 67910, 5230, 76, 10910, 22524, 44490, 99692, 3159, 14088, 5241, 80, - 1100, 7737, 74901, 4942, 11851, 10470, 7737, 2203, 86, 85, 80, 73, 87, 80, 418, - 85, 4922, 77, 10784, 818, 295, 35531, 89, 9700, 352, 87, 86, 292, 49755, 3601, - 29238, 652, 604, 36851, 4680, 78, 343, 36825, 36068, 80, 1336, 1729, 83, 58631, - 1302, 80, 295, 80, 73, 344, 69, 22016, 86, 657, 3368, 85, 25299, 53470, 454, - 8910, 80, 402, 10776, 79, 1662, 2203, 3919, 65, 5972, 35531, 789, 48380, 16915, - 69, 2329, 2329, 73, 2629, 12825, 713, 1030, 70391, 3646, 2470, 32547, 21878, - 5697, 80, 89, 22524, 370, 12498, 953, 67, 1302, 3059, 953, 1029, 6455, 2580, - 73, 2312, 21888, 80, 1320, 1517, 81, 713, 73, 8998, 8998, 22580, 89, 64145, - 73, 418, 24752, 318, 83, 27455, 80, 70, 266, 77, 80, 3172, 3458, 797, 9810, - 87, 2201, 2176, 2249, 1347, 87, 3251, 89, 6634, 71, 3423, 89, 80, 652, 95589, - 9490, 332, 10470, 8480, 21796, 46640, 9902, 35278, 12116, 67, 5972, 76, 4625, - 73, 63643, 1631, 509, 73, 36068, 41287, 2850, 20850, 581, 2203, 4777, 372, 80546, - 19456, 544, 81, 544, 7488, 77, 773, 76, 44906, 352, 5192, 89, 31019, 9117, 894, - 32589, 2347, 65, 1130, 58153, 370, 31268, 65, 418, 89, 16276, 1029, 68054, 2701, - 69, 295, 85, 28990, 80, 65019, 365, 28679, 76, 7387, 85, 1739, 46483, 72, 414, - 39911, 24990, 2826, 797, 4484, 60933, 86, 2042, 818, 392, 564, 73873, 48642, - 3919, 31881, 29238, 352, 24672, 77, 3906, 4922, 65, 24752, 5972, 3418, 637, - 35531, 42818, 4777, 327, 43505, 454, 70, 1804, 9996, 50798, 15720, 911, 1658, - 70, 1302, 307, 3520, 876, 437, 5964, 86, 3696, 51924, 31695, 652, 13856, 300, - 4168, 73, 2438, 80, 1494, 73, 89, 73, 462, 85407, 7369, 21206, 75, 11823, 437, - 19555, 17063, 74, 2635, 31695, 74, 9832, 67, 321, 73, 818, 6043, 16944, 84001, - 73, 5241, 49463, 89, 24738, 10630, 3601, 385, 28927, 80, 2933, 25105, 73, 939, - 13752, 73, 86, 4484, 1804, 10470, 5683, 15479, 70391, 55334, 80, 69, 3858, 67, - 427, 344, 309, 26397, 68054, 9092, 80, 56871, 20926, 93797, 70, 73, 87, 1149, - 3172, 447, 14088, 2319, 69, 8510, 73640, 43505, 404, 3576, 38149, 509, 564, - 325, 21796, 68054, 4223, 303, 15930, 385, 89, 42908, 1609, 65, 27093, 10616, - 17156, 12825, 2642, 32589, 333, 363, 15720, 89, 80, 321, 85, 267, 89, 22580, - 1517, 635, 1791, 52971, 7453, 19275, 483, 307, 1302, 64, 940, 74, 1201, 70, - 754, 83079, 1919, 9700, 71, 2580, 302, 77, 3893, 4168, 32204, 74, 18442, 73, - 6511, 418, 10188, 906, 2249, 69, 28679, 52926, 7488, 80, 18571, 89, 14694, 66, - 6194, 7387, 86746, 4922, 83, 6410, 49755, 32893, 73, 3433, 64, 2176, 71, 34175, - 85, 998, 80, 3647, 89, 85, 9832, 80, 261, 80, 38361, 4680, 2701, 86611, 1056, - 72, 9490, 80, 3601, 89, 7513, 355, 383, 10616, 11032, 87, 1065, 76, 89649, 67800, - 1247, 74, 87, 20211, 8619, 20811, 86, 830, 65, 81913, 80, 26397, 74, 7737, 333, - 8088, 13028, 24037, 77827, 63502, 21486, 581, 5981, 3423, 55233, 321, 63643, - 85, 89, 3601, 86, 89, 1216, 86, 5981, 33863, 2249, 5308, 23707, 5544, 38977, - 723, 7141, 40924, 21206, 38844, 9110, 85, 581, 18854, 72455, 344, 9996, 72245, - 2249, 87, 49463, 3524, 87, 3696, 80, 911, 2562, 83, 40016, 80, 7513, 1498, 87, - 4937, 73, 70, 73, 10776, 4922, 1498, 23866, 50536, 20218, 587, 18287, 85, 302, - 3322, 14694, 10033, 24468, 648, 65, 21620, 4341, 2312, 327, 73, 18894, 8910, - 74, 7737, 3251, 831, 73, 85, 1739, 2933, 85, 2302, 664, 86, 89, 85, 406, 85, - 339, 74, 9451, 76, 10776, 87, 80, 31695, 316, 10616, 81, 16400, 548, 69, 351, - 72863, 31695, 80, 376, 17008, 2910, 587, 5893, 648, 269, 12828, 56871, 39911, - 7387, 17553, 5289, 39911, 4341, 2249, 6489, 728, 4835, 74, 37955, 86, 536, 564, - 45015, 65, 4086, 73, 10630, 325, 331, 301, 983, 74, 383, 71, 385, 2203, 77, - 876, 911, 5972, 89, 13752, 24990, 70678, 708, 70494, 89, 4835, 42818, 5683, - 73, 89, 652, 303, 4937, 86, 12825, 47689, 953, 88, 80, 64087, 4168, 25105, 89, - 70, 652, 74, 728, 359, 61345, 5683, 74, 3545, 2026, 79936, 11513, 28546, 831, - 1073, 20990, 85, 1394, 76, 21659, 372, 38844, 6200, 27093, 13855, 8998, 2203, - 89, 451, 953, 13423, 45015, 23760, 58153, 911, 82, 98347, 7387, 73, 52926, 9907, - 73, 88, 6703, 575, 4680, 88, 3841, 713, 437, 73, 59524, 1609, 43505, 6358, 93797, - 1073, 276, 12940, 31764, 7513, 18571, 16276, 53191, 915, 88, 41129, 818, 343, - 1910, 31268, 3601, 73, 1739, 80, 370, 303, 38181, 69, 447, 74, 1072, 575, 4484, - 8311, 1026, 88, 5308, 675, 86, 28990, 641, 92216, 88, 80, 70, 25457, 73, 70, - 5972, 9786, 80, 88, 509, 9991, 89, 5298, 450, 74, 1919, 372, 9092, 566, 939, - 77, 10176, 52926, 80, 69, 34750, 398, 65, 80, 21963, 302, 67, 6723, 569, 86, - 80897, 86, 12328, 73, 88, 80, 88, 85, 4937, 75, 375, 65116, 86, 316, 27780, - 71, 73, 70, 73, 88, 7737, 307, 377, 14088, 53060, 9810, 87, 74, 50039, 36851, - 86, 869, 42848, 23866, 77, 80, 337, 32962, 69, 6194, 17063, 67702, 329, 5964, - 911, 86746, 74, 5964, 40637, 80, 1347, 501, 47154, 331, 85, 14029, 8088, 85, - 3852, 634, 560, 39851, 1662, 14878, 40078, 764, 63913, 10784, 65, 7783, 404, - 2034, 35478, 391, 44508, 3696, 86, 3059, 85, 31695, 2067, 55911, 18244, 35531, - 80, 295, 3906, 7453, 269, 10910, 86, 16400, 38844, 869, 23361, 10056, 92216, - 18244, 2411, 544, 2857, 93797, 86, 316, 86, 383, 18287, 1026, 822, 32204, 87, - 818, 80, 7387, 575, 8892, 81, 5697, 1759, 65, 7737, 66, 3251, 12498, 437, 73, - 2302, 303, 88, 32478, 86, 80, 47689, 10056, 7521, 69, 344, 96518, 509, 796, - 87, 73, 89, 70, 73, 70, 19239, 344, 53289, 4657, 80, 634, 77, 6802, 582, 67139, - 728, 89, 59367, 3893, 27455, 70678, 501, 14606, 80, 15618, 360, 8088, 18442, - 455, 21888, 58305, 347, 89, 347, 74, 7047, 72, 292, 906, 78, 581, 14088, 360, - 375, 73, 27476, 20127, 764, 71, 28679, 936, 80, 89, 36825, 80, 88, 1729, 983, - 1065, 37406, 97523, 4680, 1897, 1347, 87, 89835, 31881, 70, 355, 55233, 22650, - 7349, 9081, 267, 18854, 40952, 337, 14736, 5964, 74, 60933, 375, 40924, 10616, - 70, 3841, 337, 73, 85, 33367, 31764, 677, 80, 44183, 372, 71, 22873, 637, 88, - 80, 7488, 66, 2249, 70, 509, 94055, 19639, 81, 713, 21486, 85, 4777, 93797, - 1273, 18287, 80, 73, 2438, 38359, 3858, 9991, 9081, 89687, 85, 20824, 87, 41622, - 22016, 392, 797, 17008, 18571, 27515, 73, 82553, 86, 621, 19260, 1146, 89, 29684, - 324, 71, 48233, 80, 2067, 28546, 8998, 68467, 540, 6358, 73, 86, 2067, 87, 14878, - 784, 1791, 11032, 56871, 318, 1551, 4991, 63345, 796, 26397, 1100, 77, 324, - 89, 4922, 76, 754, 80, 12591, 533, 2026, 84, 1498, 1804, 73, 86, 418, 654, 74, - 8212, 675, 6200, 1320, 73, 392, 3251, 6375, 1756, 88, 351, 67, 22650, 73, 454, - 69, 634, 87, 6511, 34175, 80, 3172, 9112, 85, 15464, 799, 68, 21963, 38359, - 23797, 74, 6489, 70678, 19456, 71, 42818, 89, 553, 22119, 66867, 5469, 1764, - 31697, 953, 75, 462, 25105, 488, 65, 27322, 73, 67702, 25299, 80, 4010, 92216, - 4010, 74, 1910, 9907, 3172, 42528, 352, 302, 273, 75, 302, 68, 2113, 39911, - 76, 1625, 17851, 80, 4625, 32752, 784, 80, 637, 88, 1336, 6091, 40078, 80, 52926, - 25457, 418, 70, 4777, 92562, 11328, 11949, 46390, 10056, 74, 380, 73, 89, 641, - 87, 71, 728, 266, 5972, 1302, 70, 19747, 67, 796, 327, 70, 1100, 4748, 266, - 34175, 450, 361, 85, 8480, 32204, 69, 604, 1791, 447, 544, 85, 267, 437, 52971, - 876, 76, 12821, 647, 80, 61828, 2580, 894, 87, 40078, 8480, 325, 14736, 63502, - 46640, 544, 1331, 3172, 85260, 894, 69, 1302, 89, 728, 86, 85, 1073, 377, 8671, - 19456, 80, 784, 87, 84, 77827, 713, 94869, 8998, 11949, 1729, 637, 21620, 84712, - 66, 7387, 6043, 84, 329, 89, 309, 2201, 1498, 55233, 2332, 5192, 89, 38977, - 566, 27515, 80, 69, 677, 25105, 762, 73, 70, 74, 677, 1247, 71, 25457, 80, 78, - 300, 1725, 45228, 85, 6511, 74, 637, 45015, 2465, 983, 64087, 1302, 713, 11377, - 40924, 89, 46341, 983, 85, 41129, 1791, 7141, 41796, 969, 14898, 73, 10056, - 86, 511, 14736, 70678, 4086, 604, 71, 302, 78, 1662, 65, 5010, 79, 540, 2465, - 7910, 333, 87, 4010, 86, 5683, 85, 406, 876, 20990, 11328, 73, 637, 81, 418, - 295, 295, 86, 45015, 28774, 8892, 80, 9700, 53835, 25457, 13423, 42908, 77576, - 797, 85, 657, 9701, 96819, 2438, 70, 59194, 73, 441, 52971, 17134, 86, 40153, - 1494, 10056, 87, 73, 4937, 86, 375, 1804, 75, 90377, 73, 57504, 1791, 1897, - 23866, 273, 3036, 292, 402, 21963, 983, 38977, 295, 4080, 80, 29224, 89, 752, - 74, 80, 830, 8783, 2933, 45015, 7369, 77827, 1395, 73, 1394, 1394, 67, 5010, - 77, 10910, 2933, 15618, 19456, 32962, 85, 5972, 88, 40078, 8890, 80, 12654, - 12498, 80, 3306, 258, 71027, 13066, 85, 50798, 80, 939, 2302, 22374, 19239, - 1302, 87, 73, 3418, 4010, 89, 501, 92180, 73, 63643, 321, 93213, 84, 21659, - 303, 81, 73, 2438, 83227, 86, 8998, 65, 458, 1302, 14736, 45015, 20926, 5964, - 32204, 784, 25013, 80, 4239, 81, 747, 21659, 652, 604, 30506, 7387, 710, 9490, - 1494, 89, 1336, 73, 2933, 88, 15278, 2580, 1100, 275, 8658, 70, 74, 1739, 437, - 27752, 87, 73, 41139, 74, 5759, 7369, 3935, 12328, 17270, 354, 85, 14720, 292, - 9117, 31268, 2192, 88, 45854, 376, 764, 6511, 1030, 24470, 51622, 6358, 23866, - 10616, 3919, 70, 5010, 67800, 980, 21888, 581, 77, 4239, 375, 295, 852, 74, - 13183, 80, 11160, 77, 2312, 89, 3423, 65, 3427, 7453, 80, 74, 20176, 8747, 28679, - 80, 1625, 71, 1146, 6539, 78, 648, 79, 1331, 85, 16400, 69, 11949, 80, 11251, - 6043, 797, 652, 11032, 78930, 332, 74, 89, 392, 59524, 85, 267, 80, 3368, 24041, - 83470, 796, 41922, 292, 86, 2668, 82, 9902, 689, 1900, 80546, 19555, 77, 39911, - 81, 9786, 2319, 89, 745, 5701, 78, 336, 9907, 70, 3601, 88, 85, 5298, 80, 9672, - 13846, 70, 329, 49755, 375, 85, 3013, 31881, 11377, 82999, 637, 4341, 398, 88, - 88548, 16400, 11377, 39152, 80, 1631, 45015, 6358, 85, 8998, 58305, 652, 78930, - 69, 73, 830, 70, 74, 3368, 657, 9490, 89, 752, 69, 26235, 86, 652, 3306, 72801, - 73, 2312, 5544, 69, 752, 20211, 392, 50397, 3601, 773, 3368, 43389, 1394, 1910, - 544, 8910, 81, 64087, 331, 85, 23361, 277, 2303, 332, 80, 73640, 2465, 1919, - 67039, 74083, 604, 1791, 13536, 74, 14088, 587, 1195, 269, 1751, 42528, 80, - 35401, 89, 3368, 94908, 728, 85, 89, 46224, 447, 87, 18442, 4341, 78604, 4922, - 12825, 331, 283, 309, 85, 12184, 2303, 14088, 406, 634, 346, 88, 31764, 26397, - 3368, 60013, 713, 70, 1739, 87, 587, 1029, 2933, 78, 42930, 2003, 86, 48380, - 71, 17008, 5683, 24470, 8998, 74, 80, 344, 5010, 93000, 501, 40762, 4922, 66565, - 80, 344, 76, 575, 2042, 76, 1026, 509, 73, 1804, 939, 7047, 5010, 92238, 292, - 73, 354, 84, 70391, 5972, 88, 846, 2465, 37406, 89, 14088, 100120, 14878, 20990, - 564, 31147, 983, 69, 664, 31019, 11823, 3418, 14170, 392, 49595, 7453, 40078, - 8998, 36851, 80, 38359, 20553, 67039, 1662, 14878, 80, 44067, 2423, 316, 80, - 13634, 24672, 66, 1029, 79, 3696, 24990, 80, 300, 17008, 80, 70, 901, 764, 1336, - 76, 20926, 74, 11089, 23361, 14029, 454, 1029, 637, 18571, 85, 9081, 87, 86611, - 509, 953, 80, 36825, 80, 354, 89, 7387, 44490, 4239, 7521, 77, 1910, 9907, 654, - 42050, 1029, 52000, 65, 1791, 73, 301, 70, 581, 88, 73, 1517, 8480, 73, 635, - 346, 20990, 24244, 85, 80, 74, 89, 16276, 93797, 86, 953, 87, 92241, 677, 85, - 36851, 998, 2580, 7488, 87, 44508, 5490, 14029, 85, 26738, 10033, 84, 7387, - 76, 2562, 66565, 728, 75, 30728, 5964, 74, 4239, 78, 292, 80, 88, 80, 4942, - 24672, 19239, 1146, 1394, 75406, 73, 89, 1302, 77, 5104, 9864, 87, 354, 53450, - 80, 73, 1247, 7047, 70, 2034, 77, 70391, 78498, 370, 6634, 20920, 9117, 79, - 10470, 5118, 9490, 392, 63643, 89, 540, 67, 4355, 80, 634, 85, 23361, 92715, - 2580, 85393, 80, 70, 32752, 46640, 2554, 2826, 4171, 4239, 12940, 2034, 88, - 9112, 88, 18126, 1091, 78, 1100, 377, 93797, 13045, 365, 23361, 71, 35035, 32547, - 13092, 85514, 1494, 73, 2727, 85, 14485, 87, 74, 1031, 7717, 80, 10333, 70, - 80, 23001, 74, 398, 87, 1302, 64123, 86, 2034, 5308, 85, 42818, 303, 5923, 88, - 5964, 7387, 13009, 51924, 87, 710, 69057, 325, 24752, 44508, 16510, 4937, 35531, - 9832, 36825, 1336, 339, 69, 66945, 73, 1247, 86746, 89, 5192, 71, 25299, 455, - 77, 455, 3315, 402, 1751, 14606, 819, 9117, 731, 30407, 28679, 2739, 2130, 331, - 4719, 92238, 79, 2249, 1804, 75, 2329, 575, 6043, 84, 327, 75384, 5104, 299, - 5717, 81, 466, 4752, 86, 89, 80, 1302, 652, 65, 4922, 544, 7141, 87, 46790, - 321, 32868, 69, 6511, 88, 911, 466, 75406, 3524, 444, 2933, 71, 29652, 20990, - 69, 7369, 17134, 69, 451, 372, 318, 953, 87, 501, 1302, 1804, 76, 365, 5972, - 2249, 28679, 80, 14088, 1029, 28990, 372, 100111, 6634, 63813, 2329, 2329, 331, - 27520, 80, 347, 8462, 46341, 44490, 575, 5010, 24470, 75, 3415, 3524, 454, 4381, - 566, 713, 4458, 64, 2319, 74, 3919, 86, 1026, 75, 11823, 89, 634, 14088, 69, - 1247, 88, 292, 321, 302, 80, 3988, 6634, 73, 41196, 2191, 88, 69, 7387, 1739, - 74, 27455, 4010, 84, 2249, 76, 2026, 85514, 752, 17008, 86, 329, 4484, 278, - 3172, 25100, 43505, 867, 81, 939, 27515, 64381, 70, 80, 22016, 86, 10776, 89, - 4908, 89, 70, 14092, 830, 25457, 427, 754, 75406, 20218, 1073, 5230, 85, 377, - 74, 4080, 1233, 19639, 12498, 39911, 2629, 12158, 13536, 582, 14170, 884, 85, - 38844, 79224, 4168, 87, 71, 2249, 73, 70, 80, 70, 19072, 953, 56029, 2641, 69, - 73, 2465, 36851, 73, 3059, 17008, 20824, 911, 80, 69417, 18442, 87, 51071, 360, - 89, 8480, 268, 75, 3906, 89835, 1073, 75, 9117, 1919, 88, 360, 96518, 8462, - 28679, 29700, 70, 1302, 65, 7488, 4239, 65116, 74, 21088, 2465, 10784, 85, 936, - 38359, 3647, 27278, 89, 8480, 8480, 3906, 1631, 1494, 295, 80, 318, 1631, 268, - 8050, 89, 3919, 61057, 77827, 39911, 354, 416, 337, 89, 9700, 69605, 6200, 1919, - 69, 7513, 73, 26738]], "model": "text-embedding-ada-002", "encoding_format": - "base64"}' - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - Connection: - - keep-alive - Content-Length: - - '44957' - Content-Type: - - application/json - method: POST - uri: https://api.openai.com/v1/embeddings - response: - body: - string: !!binary | - H4sIAAAAAAAAA1SaWxOyOp/l799PsWvfOm+JICTsO04CAiYoiDg1NQWICKjIIQmkq7/7lD5dPdU3 - XhDkEJL/+q2V/Me//vrr7y5vymL6+5+//n7W4/T3//oeu2VT9vc/f/3vf/31119//cfv93+cWb7y - 8nar39Xv9F9j/b6V89///CX895H/f9I/f/0NxKClxgp4EdfwvQYlDBwa3usln99ibsHPRthT/bmU - YN7QvQGd6pBRa3NdIgZyzYUZLJ9km2bU5HVlJbCPzgNaz+LczCxxQ7g84QabxFUHhmxYKtWq3mAH - vdbDcgHvlUw3jkPUdbpv5tVkMxjHpYrY0DYmF2ZuwLN8V4gUaC6fywe31WovjtSYmevTdR8sYJ34 - EhJvzSun5/wUACO2a3pAVgqoEB8ruFRuhZEvSjkRVg9PlTQkIm7Gg0k3uHhBefPpSRs6SzQbpSfA - I2sDjOqK8slscwM0couwi0DKR/uVBsol2RLymo5mxI/uo4ftVTZwUEq7Zk7EPYFed68Qh1o8zO1m - pcDkNQ/Ub7rHwPlQi2COj5Qe/N3DXDbWnMK3xxhZT7eQTzu4bmGFrgv1QnLzZ80+JwAb9IEkYsjD - +EgHF949uKV3sbF9njz6FFqHUsDG9dNHizhVBqhGuODdDTyaKciPNWwXqSRsPVScry7bFG5hmhG0 - imROW0kW4PAWTWpJeBsxrLAACg25USw2L3/UkF3BLGkOSAblNMxhsl1gYxcuDXdP0iwPnEIgP6yW - KHIY5LyCWQvabWHgkzm+cuYLvgiIdgSo92Gez80tiyG9nq/U+LZ3xHcVcE/ICwmb6ymaZbEt4Il+ - GAGOa5qb4xEI8H33dtQ3/XXOr/6YwCx5HMhqPRlc0BCqFdGvR3wQT3E015+yhkqt5jjzg35Ypqtr - gO/7o1ezTD5zV5CBWHM6jM+9zYdhPZdw38wXethusMnNXBPUwNwZBLR6PdDTHJ7Uz21XIqZtM3Na - baMtXHa9Tk1tZs3YOLWmHmOxRKoLnGihWR4qyWGDqXW5PCLSypkIk3t+Qo9nooONddgTaI/LHQfA - DCPWP0QN2qukxm6TeLnkf9IOAFfhGC1oBGz3dkKo+6aJd0YrNNN8NGO4xOmC/SI9DVIzfzrFyayQ - FsbF8QneTiGkZhDiK3K0YTPEqQgk543QwqIajLphxNBiL0B3I94MU5ajEbLSrLF3OUoDu6iLslZx - aFCjeu74om+ZBe/EXmHUHKyIPcVOBNsz5kQRp3noay/WYPDoDHqLw6aZKx5mQFAVh2pv38iJrKU9 - DKTZpwcVtPlybswUon15xL/xvbBFEuDNen/IahcVnLtJZ8FojiXsxLszZ8nZOsF3Ne6pfu5avsSd - m0Cujw0OtMfDX86X1UnRr7OAz+en6vPbKSSQt8IH38aLHkn6uIdwRt6TLGDI/T6DgQvy/JAQOrIi - Z49mGwJNvmkYb5fJnCRWnGQNnk7f+rHlvLpCF5wlYpPv980XVjUlrObExbuNfvGXQ8xf6uFpQWxT - Zx9x/xP2ilxQhP1qjnJWRy8EJ8cYqNW4WTPn5iZTXSfTkXi/BjmPxjwG4bua6K65RZzMq2AF58dp - ROK3vizdZ4QwiIcdEdO9mi9AH7ZA259VtLg7J2dinDNwmEhDzQM8cQZqJ4VetM2wdo6Gpr/qexfG - 2q4jomhagGb2O4X4Kb2xIa0Rn0kOTxCXN0QGLXjzwU06G+7y3ZXMN02N2GXv2nDINjZaeif1Z7ON - NPklo4lqVDX5nGOvAmrS59iyFtGfbqt9/NMDqtGQ5ktuxJnylHc9DrbVGXBNNS2IwMWl1oWvIuZ3 - KwJi0r0RgG4V9Zr8dmEY7VPqkc5q5nJyEVz5cYudKwB8iQSrhnmpdfQ2y9HAz3mGIPDJDlvbj95Q - r9EK1Vt3Di0E4dOwqxclMBBUGTvf+kme60GDcXMgFJ12q4guhoDgvagARYCtzYGJOQP10xGpE40x - 4JFUFQBW1EQ0D2ST3W6+Ap5YzChiqQ0Wd9O3YIutDzoGYhNNVD4vECH9Qm1/3ZgzvlBREedyh3VV - LpoZFeMCt5xxJBkbO5ql08Lgu0g41fPS47woYwXowLXp/Xh8+XPSw1bJmtojnciihpdpp8A+8VS8 - t0s28DFNLKiGgk2A/NwNrDiKDF5UXcKHZ/4ESzGYBgx225gGxuXtcymHK/jpNgHeB9Nl4MeX6cKm - 03zq3W51PqVG7cKxUZ5EKUw5GlvproCWnC54D8qpedpQq2CumyMODmMTMflMDSjebIoNmGLAkXso - gEcsi0bb2I3YQwx6SGTjiXW+2ud8pO8UFH58JavEMHy2Yb0G1VLjeC/tAi6A2slgnuOEMF028q8+ - Zkp6ZjWqv/N3roWhB9uwm7B/3Ujggy9vAYSyzdCczEnEzHnvwmCnxIQvu0dErhsOYYXyBbF0feEs - nk8VRCs3o442jA2/7AoXBBL38d7zIB9CyTpBB2+f2HuIjcnOSLDBfnPOsB8p1jD3Bq9grF5TtJ3f - hs/bx72FG8pW1E0zbDLIxxEkdiHR/ePx4cuhWi+/90Hr7S5tPryuOrW4HLfY63HDGchdD37rCz6s - ho3/dK69BQ0FD2hzfqomLQ/RAimrdaIuL9df6vemht7RcWnA8g7Mct+FIOu3NUZ5G0dUiK8V7PZd - i111xOZXP05gXgSKjT045PM7lVuQ8KXDZqav+eQY4QKyimypkXPLZGD76OAUbDTsbdrKpGonV8AX - kiO2hFDj8+v0yKCiuAa136UcsewNRcCzfoWtx3pvSh/5c4Ju4i5YqzoHLLc5CoD2VlR8GC96TlTW - E2g1zxU1zuFnYPo16uFNb1/YPfmdP/RCm8AsogBtQ53k86m5eqDYVRY9njsL8Cm4bqEjPRfy+FBj - WC5t2UOopZheSeRH8zArsUp8ImAcCdXAb+1kAQbbGttz35pUH/UV7EvW0sCp0oYvz6yH/bwd8KHe - +v4871YB+F6fGuK+49OubxCUBGWNDao8wDKJnwq2JLzgwBTGfL4XKw9cDzeC5Pup5kzcow5szweO - jds5adh8kws4JqAjauQRn43WlsAtzDLs+G+Hz7I+d/CQxD49ZPZnWPZgYXDgc40eswNMNvA8BuMp - rhGorJKPWNkG4KeXwvngNCzbJSlUpv2O5FsfNvMpEBVYetKGtCs/4ptGASKMKONk4/cfwF5zs4XA - 3j3oXu7eAxdUzYPJmGL6qxfjtx5DF9oedY57JSff+QeLjbAl8rifB65Xggg/oCvpIZLqgYnVBcKb - 9fxgqwPtwEXv0kOzjwlS/Z1uzvNODIASiEeiNJEbsebpV6Au/AzJxg6DpcnlBMof4qNJKEhEb2CV - gqm8mmQTFkMz8Q9VlN7JAupImybinwZr8FAfe4xKwRwmEr69P/NXw1b7490Ksly7Y+N+Z9HkSP7r - xyN4HyfO8DSvOwV2z/REfYVIPk3DxwlSE4XYWSgyGftsethPzZmEoeJxvtJLD7DZ/2BbNFv+uqvg - BTDTT1i/abdo2Spaqh4/eYu4dSvAfK3MHtzelBL5BMtouZZ999M7JH/rAXPMmYEfr+0df2eyUNdK - 2Kz8GE3lvY+4vCYd0M1uoNdKOOdLvbYFqKeGRg2s18N8vl5O8Muj1NkiKxfWgqtA28096triAL79 - YygT7k40+/IcvQExA9trGJOxuqYmCT2jg63vHgkQ7ixfulu1wK8fItBSdCCqDrSh4ThPon75SXhB - WYMd5RM+fPFgZB+1A+o4t2iwnt0wfvkV4HyA1N4kLF/WhmarEagfX15so/lguQFUg5IheNq1/myy - OwJfvsTXve7n8+HxIUC7ugFFWV4DLqiuB61p7qh5F/bNsDxPHZzdOKSJZdwHtrqUpXJPxhcSZm1u - WLgBIQgOpxHrRr8d+EhYDFftUcOWdj3n7IMfHZDJ8Ui16PbMudsJJfjxm2/npKHrEyggF7ORHqz6 - 1Tye5fSC72gKECuXxzAfb+4KmikbkDyKL392F7uHws6/EQl5RsOFGRgwtNI39eXns+FW4iuwSp83 - rF1E219W12OmLpfAwt9617Bj+YHQPmcOWrp+4VxN3wXc3/oD2tytyJ9/fk0/I4KY+/D8Ga39ACKR - Gdi567O/6NutDXVfN6ejX2QmP+cnpCaZENCyrvCX54QQzm4S4oN+uuTTXXBK6DTGSDVfOEdsc00J - XJ2IinFmOoMwkiQF8MLe9GAWs8l3R3sF7+tBxwY9HCIqe1yE7Ik8aqnlOmeiqCDIa/2Cd5qumNMH - rUe4Hpac7vZVaL4XpMcq0sSerI4fPVpgIGugUA8hNsbkbs5nKtcgNhQFbWNSg2XOSf3zY2hR6k/D - T3N6AmcYBUiZ30XOF7RPYF3CkdpB2prLfe+yP/0P8pPq83lOBOV40gN6sNNg4LdTOgJXDrc0pCGO - Xn7UxVC03Jhq/MNMeu83ITTb+op4dfAa1u2vBeQXaSTSUw0axiPpBX0yjXjf3Xa5oJVhCyq6WFTL - rhXo7XYXwq+e0EOX3Yaff4B+vSDqqiM1WSvdt3/88VWNOZ/pe58CPJQ+NtdhM/CXfLDB7J+jb76A - AbspUwYG+zaTuWi1fKnfagVUUcVIud3qiNZVEMPc7sLv96WA66grwW2lJNjqZ28gfJuuoF8zRO0W - rMA0WmiBUbF+/OGj/hRm4h/+0qBTmkuVnz34Xk7B16988i+P23Cy8JXqTSOB+QVCQz1SzaVaqPSg - j/zbFvZrJ0XCaaUNwpf3gCZ+CJm7szwwmJgICtrrQvdriURdAwsBipYX0y/vDmPvtSMcxuseGyyq - +bKpMwVc9vJM9aKtoiWmTgvqt+xiFHZXczEVM1GrMtC/eYneSJqbnsCFRDZ60rvVCEWtiao4bCrq - vk5PwBaQQfDVD4zfq6whVX5zIcPhTLYFx4CZGVEU8X3KsO28jIitpi2B3/wIB5/7lC9b3SGgV+oT - 9S+vELBLHhfQkd4LtXXT4VKyNrdwoEFOVnxYTMaCwlVKiByK5bs+LDfYxZA8ugORpeWR8+EWaZCE - lkezk/syB6/RSgh8WcT7tYTyZffxArhv+AWxt1/nnGy0Wt3lzvXr597R0uunGiT364n6lVUCHqus - U1GuxfiP3iZnK4Rp20RE0K6bnI5kG4MbmqPf+PYZtc0VKM96hY23W0X8+PJdyOyPh9/PcOt3Ryft - oCHkCdaUp9/001CkUC/eNjbAAHx2CXEMss4Jfzxk9vezBSFBlU0PvZr6X1724HMF1gQ+ue5v9MvS - q/i5eRPhcRIGZtwHEci+71O8sTXAdyulhsnnpeLD8nLN6XHxvF8+Ri14nvjMPODKH9CXFJnWGfDe - 34xw8Q4OeX/5o/fuY60cT2aAcbVu/MXcpRU0pKCj5vVIzWllPuw//narM3EYG5Z2UJEShu29G4NJ - VUNRBX0h/td8ZRtvgdtO35HXV8/p85ynIIg/O+yb/j3vtDJ9weBjZIShS+gTxe4EWGa9hQ+9ujWX - 8Wn98fM4kC6tz97ncwd5FL6wk8DWHwWHZ8pH9zRqf/NI1h4UBI/UcIngn8NmdhfUb8eiDqhRLjkY - zyaBcPGwg11rXEwWu4MCpSaBZGuNiz/2k+lBsb251JA3Z1+6i16qfPUcm9KbD2RtuDYsSHGnv3xy - esjdC+oH8Uwet+nI57dehdBwdk9svuWk4RjdOuUsKAIONmruc3tnLNBRUYLN+tJGRCiMEbRusibS - l4f4DAcIM4cVCGwCC/B1by2gFpQdxe2A80ncHBGQ3bP85//8lQ8t7D7rlOq/++Owa8Gmfh3JM96M - +ZI51xhazXtFVp/Cz2fn2ttQKtUX1ZNZjEbeXwXw5Snq3HzfXCwtf/38O6p//TWtOQHf58GGumtz - qhRH78eLNIBy0oyut4dQ219UIrIwN+fG3muw3TUTaQel+JOXArG9u9Sf4Xng890rIZAPb3pwNkc+ - OHUb/vwA1aNdb3LvwmPIb0VIr1rnDSK5pwgww7LQQ3+8fVbdOhtOcGXiPb9Ww3CeXi/4zT+pDWQP - yJdoCGGGXQHvKs8dlpOsl+DLN2SbT0rEW88OwddPE0EYb2DWmpXwh3+8wpRzvhgwAF0kYrIxWmGY - fvwn7PY3rPuFYrJEqslPL7ArmmufHTZ3C+4EfYODY0qHEfJ2hKH+PiN569cRC3viwTGRO+p01Ttf - DtJJA9+8E1uuscvJVnfGnz6gzUsjA4kLjkCprgH6fq+I9dswVIPCvH155OgvK2PtKbtnZJHFxg9z - SdOsgKu3dUL8ABdOXk3BwHd8o3C+J/noejqEwUfLyDqXp2apvUKDtUIyukdO1fBQCkKg37OUgBlu - hvH+aU8AP06PP3kjja01VPSVv6fu/K59elHs1S9vo1+eGwiJTQsasVVj13wVDbkvpATP87rFB7M4 - +uwuyBkodrWFUelgfzm+Rw8+BvFF9Q81GvHSJj00yCnAB7k2h4lf8gpyZb2gp6Yr/nJWYgLeRcyR - tFZY9DkFKwX8/MTxiC1/8/X3kL3UFdpUihyNt53SAiWd2R9+mk7Baguiz6qkOFaXnDXON//YWSmO - DngV9Ze27IArn7YY7UOtEc8krKH7HjfUk9WK80IDKUTj64KN6vnkvDzkDNqPy4uI/gkN37xdABtp - fyG5unpFGyhpveoD8qEaOGmmmF2nEbgptPCuaz75BofVS5Xdi4xRlhtcKI6rBZo88rAVCjafgy3Q - 4HvailRDl9AU8oSPcLwShe7Oz5s/Y2LYkLBshe1vPsl2S5wCu/W2ZDlpSs5Ni7RQftjtV19yn8n7 - WgQrEwbkp3+z5Ey9ssnzD7W2n0ezxBS3gPvKARu9szU/03m0wS1fQryvV9hnv3zoW2/ovjwqA9fc - 8KQKJzfDcVWbQHTSvQUnISm+9dgyByOXFFgMwwF11b3wx69/+HM9HAnasNzPwQrqB+FMb4fY9b95 - X6mmcx0iMEzku36lnaBlWwXWn4nOpbEZEADTyaXWV+++eaIBETIv1EseNp8B2xaA2YOH5NTgYLRQ - T2DOV/rX71L++uXpp4acsPs5qsOCc38BbF8U1NLtPf/yPoJxgwm1WhSaf9ZbhM2pou7bSAa2ixfl - xyuIt68hmjNXSX71Esl2GQ7TZQ4YQKdsog4vjGa5q7xVE62vv/Ui84fFgAgOn7eElPCk+dx+fwJQ - KPnq124u1+hWg7u32pKOW04uasWhg6zUa3T9vEQwP+TqBehR1al92HzyBVXuCNdKExC4HAkg4WpT - AsetD9jyz+HA8x1LFMzME5G2qM37/TWOVaVMY+q1m4zT5Zl1YK08Aqwd+nzoDc5dePQVA+PiVeeL - wYEL3RY0dOdy0CxueT3B5WBcqWeFOJqPN22lGmsRk666Q396OnEIyrDIiPo++nz55b1f/kVCsTdN - CV/rRa2riv3h6UlrRBFa0jpCK2F2Gp7d9B4erMUl9WkinNNUOcFPJwWol8Mx+q4vjIqWJMq3fjr5 - /JFIAvjaiHAQrAmgh3vJ4JJzj3rPGke/8QBthO/Yz8TK/61vyG334fjwzYN+/Qn3rvlB4I41IEE/ - NaB3HSnWFaXlX782Alq4A/atZ9fM06hmUAeejTFREvPrH1J4ogPDv/H3OCTx65dv/upBw77rlWDZ - dfovH83JOo4LGL7riShn4ZTP2rEv4FlnBFstYn778zMoN2Lsre+RT0dbH+Fq252p9x4/JjcxGGE6 - VyHdlZdVMybSd3ynKKJaupbAH777MPwkPgk/0fjzV5KwXePv+nLOfnx2sJiLtesY5BOQpBG0Rt3T - /c39+H94eBpSDV/Qa91M4efgwmv13mBDqT8D8/xzoD6I9EFhuTuYlDjFCljep8ZaAyKfEWXFgLwt - IEbetAPCz6/+yU/ald3w2JIgtOLNAbvBuTYJuaeBKpXrF943QzMsxeBrYBscL2jZFELENTcN4ff5 - kFQftZxVzkGE4la8YbfbaIP00+dAWMvU+fI1z2yaysL22GAL65CPYXpd4GmwewKqOYrmXJI9qHjG - +uc/IpLSYfmzHursZ51L33xLiQfpTDZLWeR8VIQaLtPl8B2/jcmmOkPQreIbtrttPSwpbRgIxWNJ - zVuP+bLzK/LL87AGhVPzJ985yzeFvHlRN9y0Xi+4SPWWmujzjn75MPz7tyvgP//111//57fD4NXd - yud3Y8BUztO//3urwL+zW/ZvQRD/TcU/OxHImFXl3//81yaEvz9D9/pM/3fq2vI9/v3PX1BQ/2vD - wd9TN2XP/9nyr+8d//Nf/w8AAP//AwCe/y/15yAAAA== - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 7c09bf89ed1e1c7e-AMS - Connection: - - keep-alive - Content-Encoding: - - gzip - Content-Type: - - application/json - Date: - - Mon, 01 May 2023 17:29:43 GMT - Server: - - cloudflare - access-control-allow-origin: - - '*' - alt-svc: - - h3=":443"; ma=86400, h3-29=":443"; ma=86400 - openai-organization: - - user-kd1j0bcill5flig1m29wdaof - openai-processing-ms: - - '471' - openai-version: - - '2020-10-01' - strict-transport-security: - - max-age=15724800; includeSubDomains - x-ratelimit-limit-requests: - - '3000' - x-ratelimit-remaining-requests: - - '2999' - x-ratelimit-reset-requests: - - 20ms - x-request-id: - - a9c89e6436668562329d35d627f16245 - status: - code: 200 - message: OK -version: 1 diff --git a/tests/integration/cassettes/test_local_cache/test_get_relevant.yaml b/tests/integration/cassettes/test_local_cache/test_get_relevant.yaml deleted file mode 100644 index 8a11796b41..0000000000 --- a/tests/integration/cassettes/test_local_cache/test_get_relevant.yaml +++ /dev/null @@ -1,997 +0,0 @@ -interactions: -- request: - body: '{"input": ["Sample text 1"], "model": "text-embedding-ada-002", "encoding_format": - "base64"}' - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - Connection: - - keep-alive - Content-Length: - - '92' - Content-Type: - - application/json - method: POST - uri: https://api.openai.com/v1/embeddings - response: - body: - string: !!binary | - H4sIAAAAAAAAA1R6SxOyOrfm/PsVu/aU/kpFJIs9AwG5miB4o6urSwQREJFLAuTU+e9d+p463T1x - ALGiycpzW/mPf/31199NUmb34e9//vr7VfTD3//j+yy9Dbe///nrf/7rr7/++us/fp//38isTrI0 - Ld75b/jvZfFOs+nvf/5a/veT/zvon7/+NuPiQ5nS1Anz5acM4uJ1ZNpa6bouyZMjLJdQMZs0H071 - gcrA3keVHUtt4GxxmpbKpd8Q5urcTppOaEXZRcct8U/hJ+LRfchQhzZv4iqmnoz4sN3CaV17ePPq - P15npqcc9Pf9yLCTWmi8r4mBLoLgMxWNqKR6tKTwcM83rLRb8OZ4jLcgX4URN/ux8KgSvxtUsFJl - ji1XiBfJs1dO/JwwcyO5+oS8tQGQGluyS6ibDOB1N3AIDfAQNarXy5f4iE5iTpgpd5bOk8GtoPH0 - HsOptJOxux9cqNitYI6nd2WR7zcusGJ7JGpq5px/KtyjfIMRUzeG7bHJkVto/O1AVxpqk1m0BRFN - SbH/vdeHi6MBXGZXI/jlC5wv2PWIOOlvTDvOQTJpbiSjOiUWBn67JNy/Xn3wyyUi2HaSjkVXVwX5 - yBFTYytLhsxY9LBYXp/EROPEh9rYuqjFqkWiNiiSWeR2C/JN7Yl/Pr8iflyNAbCk7Yg1r4/JpO0M - G8xTsGGOfa35tHx1Mypq4cX2/uB59LA73SC1IoMR65B3w1IPd0qRVwqx/fuKT055zWVf2nV487pf - +fRQ7Roqeq6JqepFN54Wnx088MckesuJ3t+PqyvE75tLZ3o8JPMhMlr4iF2GZWU36/w45bmSc/HI - dLy0+GQ3VoMEB9XM552YTNkhCiFU45KiTaXpq9qOJbg894jo7eGgz6zoCpgyqhHttEbelK2XPRQv - pWLO+/XoRr69jhBErUy2w1R11K1iEbQNbYlNLi3nPpF6KNr9kaloj/mYvhoK8Ts1iZvG124+NMxA - S6FGeNwYjc6mdyOB3jIdo4Sq5ci3wQzWLgqZKgPhc56QEbFzLBNVjktv1q39HQQtCEhc58vvfCOF - o9V/8GrLRG/EdZWBtspkorfhAk3Ds/VBavSQOc6ClJOmNT3o7+xIdPKu0LiT9mcYqLRlD+uglnxx - WI0gLsUdMbfUSqbMWFMQUfDBrWaXXcv2xxEuihQwa65PUecf7zbo/RIRuz28k2lJ9XAhn0WJqcl0 - jJpTezuDoF08st9Pli6K2b4CRweN2K3aJDRcbRuI5lRiu+tcd62elwKasl5j3/pG7RvWNdSZb7Mt - fY1Rfw9e7YabqoXFSyZ7rHhMdzADATOrZ/eo74bABr2bF7jE4Qf1lXWtwdkqKv2eRj7nPZ5huoQp - 3by8mI+ysLuDYFaIHA2ZdsxE3oyGz+fAtFVYc25eZwOma3JkhlUZ3YgRwihXypoZxUg5m86rCriG - MDG37Oy1b+loo/ipFsQx4lXZiuvgKhedfyJeLIlRf0rFUZaKpiPGM4m42BQTVRabycYoyYJo5NOB - /v4PcR8z6bgvhi749InpSp/LaKoX8xHBZT4Tc/Pu9LES4wJC9dwQuzvPel9ZCwEtlrcn80f/xbn0 - DnuwrBMmuNq9kikw1ZtS9ZgSo3rb+ojjZwHdOtdYGNmWN873cwPB4aUwL/G3nZhnkg2N2ZlULoYK - fUBUJZCjzsALYSGgQWu2LgzDAqhQzxKnsRbsAMKsJbozyeWc7ycXILUV4i7GRufF9ixDcmcYr4Ps - HU1ZPM8/fGV7Zn6i+Q2LCqzj7sFIwaSOfZS4gGY3AXMVKU9md3J24EK5ZNvDAXs0h3knT+mg4sXz - mnv9/Vk1Pzwi1iy+SiqkwRZEFH6IK9wT1M+fvQyOc26Yux2R1/zqoz6PFcPv6ovfnlEp/ttsmRXd - PL3vXv4ZhdbKomsehwmT4HMHTRFuzDEQ72YRoSUSsHOmrMjijn+UAW+++8H2YWR6IjvlNTQ7puFl - zVSPhviDoajhxbCzeKFRXtUAoowHLO1x5VF309TQaOcb28X6q+PJZ1dAEloiXqFHyHlSdTtUPJsN - nkRJRZ+l9RTBneaantTjs5ycMijAIujD9M+iTJh0kEK4CODjObotEY2VRJCDu3tjhhWZnRhebCo7 - 6rzHyzL0EV+wdY9++OBPgcbnHPtLcDe+S2JPtEs+mmK1CaP8Tqx+LyQ0dz5XFHf9iuz7riypgoYW - tNHBxLs5p6TvPmxEl0+bM3OzufJZSbsePbaSSHR3JZaTrYU5DP3KJMbr9vL4wjmHSBNzkcXVxtep - aIsimPfpitf0dI64eSyO8JkuM3OzRdjNwlHK5STOHsRVmlGflVrH8P0+Mcpli9o8vNfyZ/Uu6Glj - 7vUex88c9I4azL8kozeHF7VHx70VEPyObH391SOQC++CrjYXmXfSPi9Q8hAt4s9rt6wF2wkgCWBB - jAKPnPvHowvxKwjJtbHVkhejlAHs9xIhz2uuz65wodDo95Bg761EQ/2UrsCSpvutVzc4t0kF8+hu - KRLte0lF1Li/98xUX3k5yo9RBH+tMlz0fhxN2UnsYbrdDySklfadb8zgNOHyWz+G/hnmjw9SZ3vM - /uy9iPu6k/3wll0/+y5hIz7vIK4OB7oOG+AzO1sCTJHVEFzPV9RF4qWBKr8/sBifomjOHcVA2ux7 - zLxXbzSzQ5WDX6xlvDAdu0uHNMxA3NgTO4ufUzmzMwGkNy+PuYqc8p7vmkJ2hWpippycEfc/rQSO - fXl/+dLlfKEKM2LpOqPyFqceP8qqCrgPJOKPiwKNcpSGsHKDgKS+N3j88yhDkLPgTgda3fQ5ViIB - vvqRuIVX82bHXiKS6FXGY2xl0TQcZUCwJxJVosvTew16Hij44y2wEm5rfVhelwVoCtyY8+odfT7k - VqX85ntYIYt6y5Yo4qaj0GpHnG64dEmLHt7jwH71Ml26qIVsvX8y7Xsee2s9NXApA4lop1OqcylD - GIYqaJlh6auE6hdpRkt46My7lkn0Pd85uOtdSHaryuXf+rlDa2Qdc9y573pLrSSAm+BSCWuat/6U - fAuhvdHxeNcq3ortx4bUfXLm3WCORouFLqTODegkmjqfbHzK4LHPMUZXLY34cbiOkDyWFvEvt2fU - p8dnj4qWHOnzIhgRT+5e/dOfzBNbuexlPC3Rg7S7P/px/vIVGtomJLYvRGh2XX6EBVogpjdtk/Ai - cjGqgvLG7G6ToSlQxxbqu+cxvbHdZLIVl8KXf+hnV9yT8b6oa3C0jUGMZx+iL56KKCRhR6xg30R8 - Go8GxHXsMFdXu4QVl1gAwZ4ZXj77GbWh0rpwNKuJbJmdelOw5zUAOY3Ev7gHfdaHCIOZtE/K2zmP - ZjHdSCh+XfZUXjw+Ho21coYpkFdU1lqtY8U2k+WhOr2Y4wg5n4Y4u8n652Ox3fmWebzFQQWPrSxi - MTaW0TTQl/tbH5qjMSmnpR4aKN/4CE9rF5fjPRgaOUkpwYuyWHX9V8+idN/0xLDEORq0amfAdPaf - uIvX725ava4GXMpQYubm7ekzy9gd4vqI2fc8cB4NfQb6p/bp0vSGkiqL1Q0sO3EpHKebzhedXYPg - CSXTRrLpxmpOM5SNqw2d2XUfTTVqayQf7gfmCkrnzfn52sIHKS5RkZ973/WRwXqIlHm32zGZw1AC - tMK5yqzzKY/4Fw9BTrwSr0efdLxorrAR5x4T936CbsS24KPlYpiJTTYNnxkvCtTsjQ9JVscAfSCX - baS3rU1s0jh8GFqnRzGVPsyTJsNbT4p+Br6VCEZJ+uxoeLH7P/rKseprMv3wrd3VJd7U2NLHzrvZ - qGAv9PVbTVL9+Lwxcky0dbLv5t/5CPcuZ+5itD2qy5kN3So1CBGE0euKle0jTR5rYoVm4M1CrAZQ - ZxeBCk76RqMFzg2ksn8zxzJuyTQsrnfw11tGrOnZltyY1wDhdWkz/3LTIl5MlyX68gtumhuN6rzf - zUhvioC+pNUdfWBY7aAo5wlPK81Cs6h6LdTH4Y37+Yyi/ucXpvh2wtxWKPrjh8w7v9I5SLcJX3Rq - pfz8sB95C8SK3LTBfzKdblmLvO6zM0awrmfCrKhrvTnWAkMxb9WLOK9hjYbM5uMPr4n79UujZY89 - nKYoY9/zo3dmqi3htHyEXz6Cbqqf0g26Tb9kN2UX6lPNPQNJHzViu1tMkqkufEOuj23P9HaZ8unB - 4xDkxCm//m1bjph5gLhznIntK6beV9cyRNVV27D9F48nW9n2imWSMxWTtIumi2nK6Ov3qKwhtRsr - Lz9Dks0eRdeiL/lU9iIMdZoyN0ViR396MTV3B/xcJ/uS366kQLg1TKZx4ZbMYvt0Qb7bFdnj18w/ - l+VuifTX8Y7X3DK6b/3RP/ip3pdPNNS16oIZEWCm1t08LpmOij6Kvvvhi859/WTDgsc5IxB10aRt - NFvh6jpkHhrenEvn9xXgivZYweIBzYecVJBLhsFUGT31mdUoAItsPsw/Ld/JzBjN5K9/pfzDuN7f - bWwjcVHEVIzrORpq/9lA8WRLvFhMe33EgGXQm7f31VerpNV3mxmmSB2Iq7y1RHybDxfVp76hMJ/8 - jhfbTNqw0ghYXI8vb7qcNhiEvfogptbJ+lCncNw0ZuV++StH7Q9PvviDV9u9pLPPUt6Cu/b2+LWl - 74j+6hUX7E5X8s3Wx1k2JJQkXciscCdwKp5uN8iVZ822Ufvy5jgU7jA8DZGosl15Q5ZlGPT6dqUK - 1fbJKts/MEot32HYMxdRYxWtBJemrohhXJ6o5/mth6PFK6p88kyfLmqbweUzJGy3vofdeF9bOyQY - mYFlRVZ4g+NPAUNurhiuA5lPMG1scMjkUSYoWiTqVbNF4vg5sH1YfX9P3apScFOLn5/Ue9ktDBjy - rUynMZc6vnDcClhoERxszic0DcdZAMHsRIwkUutD0D2XcHkXDdutIo83cp/vkHlJe6YTs+bcX5hn - eSG1B+aJ1sC56We5nM+rCEs4zL3RAu0KwX1/+sNvLTtbAHVYLJmaXnA3u82zBr3EFV3W2dObanVn - IMfCI7M/49Wb2QlCJD0rRrYDqcvZXV1FENfbDSOCEHh8VC4qsDo0CDG7uuMmtZeoTl2fOdWO6WPq - uz7EpXv74mPjzQzSHvyXwIlRPAjinz7MlWFQgDQf5iTc/GwM9OUbslu94nJaXvMlXMoLx3BRsE71 - j35HjRafiJpeaMeKywGAb2VCBe+Yo/qw3TfQLRObDl89N7vKeFa6zcSYtipNtMQGvcvizB3K1Mst - 6Xe3yw6q++FDVNmZOTNF2P74A8fe61NOy7tU/fI45hbeDo3dVbv+yau0i2AkortbBOsvfuKFIX3K - H38r4lidibo5WfqErvUZjo74pCv0mBErdqEAHSjlHz081CrewXLBZppGu8LrO+/mAt/ZAtE/fh19 - /awE7fbMCbFKnFB9NfdKMZz3+Hnxz96k7S5LOI32m2lHvtAnyGcX8rUjkN95o2ES3OHhBBlLadkl - s1hoLcix1jFTPxy9wTl/QlnqLwpd05MY9fj0yoBVrs+sAK91JmFLQl8/h8XlNPAJdE+GKoufRG+f - WjIEN6GAfCX4THcIdHxUHqqsTTbB0rd+ukU9ir88gQqO3yDu66krL3iS0wAxn8/NW8sgicsbvkTK - mvP2PBqyHFxPX30Rd3zC26uymPyBmAhHnEvqJkSP/duh4s3Syok46xyS2+v81Sv7jpuv1Rb0blzQ - 7nZbRuyjKVuEh5gx/BqwPtWUnwF/LIP5p+qiDwM93JHU7XdY8IygG090NUMSSAaxmyT1xnTuvvnk - tmc7cWcj/vMvjEkJOyzHhlMlWN7l1szfxPb7nT7+8peYwYuYGU0jtuCNIbc2TfDGfQ5lvzusK0ju - TcT8izvp7CjbW/TNa+gru7NuGixJRa0veMyKHN+bLoaMQc7CO9vFYlEO2anbgrMj7JsvbX75ZgGO - urb/jBf1ive//ITOdJV3PLmgJZKeNSPGNx/48cWmqihh/sRxt4F7e0PDsNHwdNrmHdXzEqDOsM3M - dPfi07CoXCQY+YJsWZvocy61ZyRsE4uZmbDvZn0qBDiBMNPkehjKnguqoCzE1xHTmvQ6Ty6NDR1C - b2Ju0yaZwya5gv7c9RR996e39ncXnTazgKXufdDFePOQZZGPLtstn0rHzIAGv/XBEj4q5Wug8R2G - 5nWiDCo34v6RiWgKpBXTjqT85gdCBc6+upLIdg98VuJIAKnRQrbvHbOctJBfUdG6MXO+53tMXzmF - bvVcEH9aKsmIMwOgqtGWKrTsojHVix5S5wq4XeRrNNTrcInYrWZEO72GhE0SqiHIgpAY+U7iQx1f - ZXD8/M5cPYy98bffjmGumTatonL+5rmoPrI32++DNR9q/9Oi4LQaMJy2sTcNiyBTjsZr/OVv3rCc - 614W1ANhxlOZ+DRYowoPy62/+PLWabzxfWD3qaCyZusd+1S7HjV7p6J5esGlqK9kCvl6azOiXM/e - mCLPRt/1JbrXRH/8FwSnecbfPAK9Lk7aoHZXlWy30pNuWraWjz4LVyDemooljc9y/sMH+pKijM86 - iUP0WZANwy8v8L79hCXIZe3gEbGeM+mBpT9+qoWt+NW/doDM+FXhMW325RK34Etc22B6NNMkGqtZ - G8FKcp1Zh6JMVjUqakjVzZ14sbmJRvlyOKJU14/E9oclGl5ICKDRkhMum26tD/YZBFS9QWD2fhrR - aDn+7ZcvEuwZY0e/+TZydEFjpj7rSf37P/izdZh1KEN9WOrDiHA1xkxvkJWMeN3doXjJH+bPxlBO - mtJXKE5RQyd+h4jqpBdkM10fib1/6N6PL5D8OF8xzMa+W5tUFSG4SC3NbJA6tnC29Z985scPc+xm - AnzzXCwvcOuNaRCDbEbGjtyE3vcmzU1kYHHB2TZ4LPjnh1/C3rzRBWz9aKyCxx1+ea13PezLOW5M - H5IH2jIi3GdvGmydwkli2z/1w4tKw8CYnBAC2z6Z6rar5aHEFIvrvuBULLQGzPN5Rb55hcd/+/fr - T7h3HSMq6FOh1OGLMVc9r6I+FfkVllxqWOath26Ol5ABy4UrM+9bB/W4IxLgonli3oYPzvwjWyLz - iJWff+d9mmq+chr3BV5889hpeYUCkmy1I/Zn30UDyNVRyvmGEFXiH692ib5TOOEn5t3iVyfqpAf4 - 5d3qvSzQF5+O6Mv3xJS0yXtdiqUL4WX2f3lqOSyvjYi++gA3jYK8lqmxiE5SazHtOD3LIXi/RoBj - l/7R780JvVTQ6zQiqhRuEHUV6Qj5xlXpOjwzj+qkAri83yXTRLPkc6M2qtytuzUVvnzDpTO7gT81 - NsPuXHDmz+cd+GPlU+rpT8QWEAVKt7o5dJQKV+dRrmD07Rcx4wW6zj8l2qJsc4kpXIaUT867Cf/k - Yf7Xz021/2lkRx333/oOuvaX/6ZbiNnRQGnSjTjbQeOrA62+/DjreRDAdDQ+jCywGolhpbcQXkYf - D8Ne69hxNYagjbsIr9Jr1PHjSgoguNs3jMRW7vj0cG4/vfjD53IY7McV4ue2IPs9cTzacKlHSaii - 7/55SS/vbkeIO7oiKsJpyRdvo4DHzmiJxdEYcfOlqBBcFi/m1GTFe5mYBRQdPpFfPv315xRVzzwh - JF+PnBWXVwOCI3aUN6399cvB/ecfifnNU2elpc2PH/H0zccG+BxakLPThRGl2SWzfhlnpXGkFUv3 - 01sfZ+/RIDlIAyp/83++cLIAQt+byZ5d1t4vfwdrP03MMHZbPmTFIADfLlyyjZwVGpaWUmxaHzxy - 4ShIeOL51fq0KjFthXvCp0tpiZBi+u2fnMqIRVsewLcfTB+WNib9LPsydKN3xcu8OaM5D+8Vqh/H - BTl885XhmwcoR3udY3m7p8msxKxFzd6rqODyqZxscwXQLbY5Xk/zp5sCpxTh+/6Pv2kq+Rmib/+T - bXt/k8xMMoVfv5D96bf+8tpv/kqMcnsv52+/GnD+qsg26jgfU6S7v7wQr7n8LLnvNQJ895fOga8n - 4q+efDgmTP/4u2QthZ4AD02U8Rw8HnwIVKkF6yC8mauedt3MHHOLqt6n/6Vflq2FIYi1J9O//WD+ - 6WuMgsTI2DagWjJibm/h79+tgP/8119//a/fDYO6SbPX92LAkE3Dv//7qsC/b+nt38ul+G8m/rmJ - QPtbnv39z39dQvj70zX1Z/jfQ1Nl7/7vf/6S/tw2+Htohtvr/3n8r+9c//mv/wMAAP//AwCkxTP2 - 4SAAAA== - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 7be211a83807169e-SJC - Connection: - - keep-alive - Content-Encoding: - - gzip - Content-Type: - - application/json - Date: - - Wed, 26 Apr 2023 21:55:16 GMT - Server: - - cloudflare - access-control-allow-origin: - - '*' - alt-svc: - - h3=":443"; ma=86400, h3-29=":443"; ma=86400 - openai-organization: - - user-adtx4fhfg1qsiyzdoaxciooj - openai-processing-ms: - - '311' - openai-version: - - '2020-10-01' - strict-transport-security: - - max-age=15724800; includeSubDomains - x-ratelimit-limit-requests: - - '3000' - x-ratelimit-remaining-requests: - - '2999' - x-ratelimit-reset-requests: - - 20ms - x-request-id: - - 665023d2f6ce38148bbb6db93c66bb82 - status: - code: 200 - message: OK -- request: - body: '{"input": ["Sample text 2"], "model": "text-embedding-ada-002", "encoding_format": - "base64"}' - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - Connection: - - keep-alive - Content-Length: - - '92' - Content-Type: - - application/json - method: POST - uri: https://api.openai.com/v1/embeddings - response: - body: - string: !!binary | - H4sIAAAAAAAAA1R6W9O6vJbn/f4Ub723zi4BkYR9x0nOJCiKOjU1BYgICMghAdLV331K/7t6um+e - Cx6EWmHld1r5j3/89dffXVrl2fT3v/76+12O09//63vtkUzJ3//663//46+//vrrP35//8edeZPm - j0fZFr/bf/8s20e+/P2vv7j/uvL/b/rXX3+PA19Q5yVYKZuW7ix1zX5Dre0xABNeX2e5CviZms81 - 0sd7il0Y1XxL8bzt9YkEuIBShzOyRHOYzvviHcJ7sOcQ3y4LY7wDOJB7rY9VVxe80ffjEppne8WO - dMf6qAmbEKjHViZygrSUlXZ5hkrRC1Q1PAKIs0ENOOJnhbbCbklZthVjyDYSILtj1nsLFTY3sL0J - B4qj+w5Mwno0ZPNq6jRQYBPR7XlW5Jfmd9jdEDNdYL3fwPQcytRA5MCWtxfYQDIuBtWG5KbPqXAZ - oXZmL1S/RSdlt4tygm53wNSQhjKaHq3fQLFe3vhg3a/6sr5LDv7eb0GlTKduVVYJ3bwzql1cgrVQ - +wSeQs2mRhJZw+dsKTc44scbq3tzieZtznzoDlpBHUfGw8yu7Ax68/lAgiK+PCadphkYu7JGrNF4 - j5TMVaD9sCV6YXFUdYEQ3KA2Nhh774dXkSZWbzB6XB3sBTUDK6lEAX4cN8XeoxMrdpy8G0hl2mAD - 33iwuscxgzgSUsLhG8+WxAhCCdHPQg2nQMPaPlYXdst1okqf0GEq2SrI19exxepbGyuWCkIJHHQ7 - YV/lzYhNSxHDodEgPtz8MBpVSWqgfEkQqQ7eNVpfwzGE8SoX1DRVzhsDVSqlOn3qRAyrXp97pPhy - kSU6NQYVRuQ1HE+gUDcX6itXGbBHFyMoZ+KAxGyn6Fx492eYXbMXtt7bI1vTSY/h3EUFPvgtGdYy - pSI4xNUTFQJ5D+vrWfvw4oEKuw7ZDlTcXV3I53mCA/RRKq4vzhDeNuKeBs/rJVpdEMXQ/CgYu6dT - AxYZdSLEzm2kVvyWdeJadgbFpLewtjmI1SRA3oSH7jlQ97HrqklY7wYIlFuNbfaA3vjxtzZEFGo4 - /X6PhdSfXuJ2/UKWJp3B8pT6BAofN8NurPHDLKJbAXfzPqAu6V/VXMHsBu+FfcLGtz9X93Es5f33 - kfFD8hgJ4g+EbuzoWLG5x8DOSbPC6LRP0ZzzExg5nrehv8mvBJrnTzTVenaDXsEfsGXcn2Cmk462 - 2+3lQR1e8nVKr8kG9jO6YTUzzIpHvVhDfeRK7HAvC8zZ+VrD8pL41M52irfwn+cJMCoqBHwgqvpY - S0/wvVYKWd6aX7F8CRVY12BCIjk73rfeG4wO3EidZitFJG+iGbw01KFV2N7BuufPBO5R8SRiK0rR - HO9DQcYhcEnz8K7VwqaohhoNML7f2EMnRFIlKeFPGkU78xUx5TiW+0UxMXWybvYovZ4ghCfUU1z4 - R31h+L5Kd2VnYmMwi2rdvRkC2/VEEUBR53VVo8fg9mwnxBM7S+mNaQbQzssLBzfTYXxlhURmpmMi - iTl1Nas0KPYwXgTs99kJrEPfJbB1o5nsvZgxqjyODRwz4Y1xuCsqZu0/MdSXlOJDXTTReNQYgc0G - mVRxT43ODvBcQysTW+wIHyXlvTcw4X3CCsbH4zNtrRIjEGeZRfNSvkVD/u4h0D7dhv6+z64yiALG - g7ojaapC9u13+0995dNr0lm8cSX81oOazvxEbM3CHFqHwx4f9I8d8Xn6LAH/hiJGFOTRrKEwh6+5 - pGjPvVq2dAnpgJCPE8WdWuvr8TrYcG1djxq1Uno0xE4DuSnQqHK+P9OZdyINXiTcU1WTr188NRpA - SvQhUbM46dxEzQjFEN2xtb+bwzIddQFeayT+9mtFL8aFwI3CDHqQy6PH1PvjBA/x60nt4DFE6/JQ - QzkWXws12FuvFlFyBHi75CG2LVMBs1h/aniiukaD81EAq35NOGgL7xqRYC6GZbjYNvykZksPaal4 - fF91OfhQaGANL8dhed7GE7RmppC1nztvWcwrBPvd6YJmL917ZOuAGYAV5EQ2D/u0171pheGcf9CK - blM0tfZRgm3SqOR9NI8DcVjfAJyfHKqnflvN7hpCGPI6Tw+t9QIr4oYVTkJVIjZslYEFxcMGJEkl - InbZ6w8egq357tD0wBBMPz6S1NHA0eVcRGuViRC6EV5x8Ga113nGNIMzlDqyGgPzvv13BrHV9Oij - bD8V2Qx7CY7sssHGMIcDEx73HsRfoBI2i1Ot56zrYGhSgrjdZY2YHvcKnELFxd/1HpYWjivw4ntM - M3OzTynadhpsCt7CyPHv0ZzUNgTHj5/TpEbMmz9SH++cej9gm09aNitHjwAx9O9oSd962sWvzSiV - BwsgTksoG5eTzIGLFV+pfXUPA5O4QQIhr/LY9wfGVisQFWBQypO5fGfVFI/rBjrTPcBO5DVs1OpP - AUcyL0iGKIqYVYWh9MVXfGLY06f3K0f7qxIX2HatNJrbdWng0G0CbD5XpncX/IEw0NUaccH2ODD7 - mUIQeTcZ7fg6qqaMHyXohPHzD5/OhxkRsJXmI1ZXZa0o9d4i/NaPT83xoK/XUXMhmrSFbMOdMsw/ - fG6v3Uy1scMVpZWiyU+0ztSQl5dHw/eph4tYMcL5Vh+t2jRIYHHxiI1oJ4JOYI7462eqZOiUdtXc - nkF9E3Sqq9IwsCx2MnjX5yvW46muxvObKbBXxSONK7vSJ2jONjRmzSL8SEV9xA/VhIkkRAS8H96w - 1PcpA473VDHeNe+K/N5/bKKAcLpj6BTitYCdkjTYfPXSMO0yW4ObS//CZvayBjZ0TQmlLsiIXPld - RL/6AWZMyLDyrDV97EdXk0zNb9CyHDfR1POyD1QMVWpzQzSwDng2fMrmDgGcpfqs3bgc1P2MsZ7D - OJqtvV3I4zU84/uniXX2VtcZ9BH3wP6zOaQrhucS5O+zRZ2bM+k0cA0Tvvn8SA9H/gDWbcvX8J6G - HFbF7BUtwactf/WQZYnLoX5fKw22ihsRiR8yne1HYMPEaQBWQGBX8/P+MOAeCg11UykDZJY3Z5iY - Q4+uqWOkwvZScMDbIAOjA5iGgeDeh57xcKnOobVi4f3dQN4vEQ2c9BORw/pKIHzhN9rY5Sdi+Z0X - Ydmeb9i/bR3wURa8AV6xOxC54/JqBgAIEHRPSl1zOui0uYQ1HOdx8+MrsNAxuQGdPkrEve5Nuvbe - pQBfvKCPr/5eP/o4S9v0eaBKEveA+LrvwvvtUlG3m2JvaWG9QtbeciQcTk/A5P0rgXO6f2OHVq+h - yAq/B189g5XLrmXzPbv58Bw55Ps8F6zdXRb+zTc3rR5IRQ8EHg+xifHySbwl1g0TQuGgYUN3an2U - NtQEL8FSyGcjnnQ29+IMC6zrhB+aGixi2RLI7/YGPpyBMdAksDpgEYFRM4Rvth6yMIYTE0saRHY1 - MKu6hZLIGQsCvker+fDYQ8lu6w/VlzZm882WEqnPTx5hnl2yD85u4c9fkPotflLCULcBeph12CZn - RydfvgHf/YEqa13BUj4+M6zqIaHaJfe9z+vcwp8+J9wtVaL1DoYeDldNpZ4aymzEB7IBRkgA6fjE - YutbGxrYXUKd2sM0pbO0aU1on6hPhn74iu/5icCS5Q6RKooBy2I1hwR2V6wl7dNbPJ3bwNTZIaqO - 9pCu55DV8JFcObIZ+5XNTt/5QFKJgZg48QPLAafsR+22I7A7S+k8Pl4j+OojspPFTzULmcjB42tU - 8RGVd71/a1UNr6+oRRvzLVckNIIz2I1nBck0lbx5oywJjNAZYf+zauAPn/300iGaAm++h5UNK5jb - SPIvaTRbU5RJj6Q/oM0J68N6GuwNJCgOkawEqzfX+5cNY15cqHNctWqKHNDB08MQMDpbnsfO85aT - ZvWxw7/9tu6vibhvxzKgf9bjcn+4UHF4n6z6Ix0YBsMIBdZ42NkrPVuG4ejD0K9Dqu+euTfIE3cG - ennjcDB/jgM7r7YEskBZ0fvmhyl98UYOn+XHQRy+XQBr23cMj/f9ierZRWLL3OUmvKw0QJfw+kiX - 5SGZ8Osv0XsD+HRh+DjLZrys2HlZkff++mcQ5mWGZPQpBtrHxxnOVnynNqdWw5qK801OdkDD6FjQ - dOqadAQWG49Iun2WiHXmHMLbxdvSb/9X0y0/dUCkOJp21/OSsr7gV0jXDNLk9jlG6+4NENiNKiVC - j1SwSJdbBjvutKFGFNjsq39cGO1MA2txoESrs0E12Fy6F/75HbqStw+uDS9Sa06LYdoqSi1/9x+1 - PheVMVrZClCv+ZOwy2ZOWZRXBkRchREI+7ViZYTiHz9SfdrZ3sxfFENm44Z+++lRsZYlCeD0+U6t - YkmGxXbrAmruZcWe79GhwXObQ241CzK/fTHq/RFocCBlTrGMAGvF3dPe82dt/PInHb7PuwFTfqc4 - OPapPtNXbIPEL3yKFrHXGdkfDTloAKTOKVurlYuPREqbwMG4YeOw3BvvBpFe20RCMGbT67JX4Ho6 - Eaot1jqsUhQ3MMXpk2y+zxtT4TFKm7aZENF4wqgjP2agP14x2itbZxjyuyzBZxwhjE+0ZeuWlZ30 - rgKOlHzDD7/1AnqZcKT86s353kAffOjAqA82hTfrsYqgKRj/xvPxYD4FgPTGJltX4sD4UsIVfnZZ - ifb5bFfzZIU9JN5tiw1p0KJpnU0B2kJbE+B7uFqs4LrC7NhuaeBF8zDVO3qCJzu4U8/nt8O0KeTu - p6+RHOnPlGwKX4C/vGa/1tBbWb1qsK73E4KuIeqE3Lz5p48xyvNLumg3r4OX2Qi++Cow4h7HHGaj - OlF0wtVQt45G4Ddf+vqNLqLuul9h6s9Pqu0I0RkXiDYkXrIl/JSP0azVnxL8+Nq3pmCg+WxuwOMz - KEhSnxMbQ+CKEumJRzVsqQPf2kdR0B9VTOZiuqQscH0TbPMkxqga/YqxWvXhZOOUqvdHzwZu2ENp - fV4y6mUbu5pHts7wMTbHP3yy8N+A4J77B2y5sZculXUb/9Rjl7RNSSQ/Rohv95ZasugMw2jtc7BZ - tYzs5lQZOE0QTuCXb1n6VgG74qp14HZxtkjgWVUtt9O7gRvo7b/5yNlj3aH1oSA6V6xEoe8tp8Ig - EuoPBWrHOIsW3HAhBIV8Io10aoYZHxoIB0m8Y91R+nQtnciWo+Qu4r6W23SV/VaB1uHm4sc3H5sP - 5pOTrOerxQFQABs2/GSAr1/Dpoe9lEr5iYNizd5kd9jZEalvsIPfvJDw2/4zzLtDTqCyvjVqRrns - sWGzrUHUriJ6zS1Kp4ejrXAUIoyt7XEC/UXqczh57ojeQnaKFmWxIDy9NmfqxosD5vNSGLCp5hs2 - bqmS7qqmtqEvtjya7f2LzeCxKvJP7zmHfKz+5D2lu3lQt4yHYUROdYJvPjv+wUvyLqYE7lvZIez8 - 9r1VewkC/JjYRfL9SfRFcoYbdPP+je3VuDJCSMbBl/x+kOG0Dt4aC8QGJj6eqfuwuWjdsr6HPCcz - jDSeALJlfQfTnAzUmg7zsFy5isAuEc5U4bfHiGRegKR3VRyoqc12RGHjNZBahkk2eX6JaFBcbMja - JEdC2zY6u/JyDMVHZOFDc5sqcqmPifzLR4zjRR2E17NG4JFcOMQ/T4gNRrdZJW7CGtlv2yZdDw2M - gaxnGuGduGQL2ys9pJE3E3h48/o6n60cKP78oJZx34K5avSzHHAniIT1EVWrYC0J9Nbl+O987pfv - ZBjq2Jo4Aywm0DYwAMv9h3/V+IpfAoxgd6GHfhdW7NDoBTgMN0jtj7kwcrwOLtAWLyOPca8Btn8h - EzwOc4fN4iik4xF1K7z1akY17lEOsyqtNdzudgpF0LCq73qcwbrpO4w+91b/7o8afPEC8Zq3ZdRx - XHFvOP6DeupF05lVWj68EFehimoQ8Cdvha1p/fDf+6PvV3cNqRlzk0c9IudQvBKBOnvFBeN0nxIA - XeIjTmhyfT5YnQR//PDrT07e27X01fNf/Ms8Jl+r8I9f9rtzkk48b5wlphcmtTe3AEhqYppwb28Q - eXngkVKiyzbYOsX+u/5Pxm5MM0HT3G/Y9slVX/NV4WBiaD21T4M2LLV+TsBW1a+kEbmNTi/Eb+A3 - 3yKNW1x1FqtlBjMQ77HHPjZgMMmLn98iixZZ0eLvQwkeBz5Hb79FlTA7Qwb2vZjg4HTH6dy2RvHz - ixgZehPR4LK3QXQQRnSvsFetoIHJn3zH5Qf477wfXYozDsfTTie8YvegiF8vqo95xWbOR6EUjHP+ - p9+Zm157+M2bcHBtm2HW6lfx4wMCUjVj7MUbGYyflkUNd38CX39W/PQqNgUYDxNBtwYK53CLv/1f - ffX1CjXQulQTtnswajcug1Z7jqkOzkd9Nt8lkb55CQ1GF1Vrcn+X8MSkAmMUGmDBYYXkoYMBVp+X - Vf94OtwAo/xY37xQjXj9bMWgIDHG6ubcDaQDAMLhbMxUUZmuE/2aCNI8+AvGdDlVHD/cXbiOJCO7 - qrNS5mvRGXYN2Pwbr9bQrWHwvOfYZmURDddo54OttB7J/uZM3neecQZke7z++CRaCTlzMH/HFjJv - Wl1NBbJD8IoZR1VU3j2KrRsHF/HFCKcOacVu7UOB59jYEt6wh4FtIwLB2x9T+usn7mBehZ+fQEsg - fvQ/fHW1OPnrD9+ekOhZDgNdr7HhW260s6rwBOx9yX/5m09pFaYGfCTdgSrfPGVtu9wFwbjmRNJQ - 6X35xYXXvXHB5xEzNnqSQwCNDyp1hE+RjpckgBDGTKBKzgeMbR13s7+oq0k9fXdP2aHxCni/kIo6 - aSJWS0jeyj50ui395qdsXleFQP0Djth+vEJvjZEd/5mHAG/0U25zdXP409Nofr7BEngTARvk6tj2 - 3jqb/ZI28Ps+VFx2LZipOJ9gHwIfW1DR/sx3ZF7iLvRSF03ajOvdBaTefci2McRoiVhfwOvevOBA - ij0262MSgqO89NRx2+ew6GccA+8ouFSLgyJa3PlqgvBskD/zheXrzyX+rIx/5gtdbQR/9BzWw/nu - jec3UH7fk/qnCuhEstdG/s5LfvkBm/mLbQJO2dlUHT6dN2eXroc1Ss7UHXVTpwk9zLB/bCU0SLQd - xjXU6l/eSf2lXKrxZq83Ob/KFtrMzwPgzHdP4FU5FwjwrzmdQjJpgD8eUmyUepwuBVJOstJgiyor - LofdZb/voCVcj1j1gJx+8z5NynenCIFleQ2DcBwb8PVf1Pvm/eNhNslvPoLV95gNM7C6DaR8ElBU - S7VO5jPOwLmHGVZkzoiWGqvZrx6sLAVJv/MqDlbKohH5fIzB0igfG26QrdNLplbRPFEhBz88BXfx - w1YV9yYsicAToG5P+vjbj++i5r7zPNGb7PhYy8W6z8gbI/dPXgRvC+fRZ6ca+jolWw1883eypVoe - sfCd9NA5yU+Kglmpdms4JHuY9Tfsm5t7Shdb6qHjPVT6+FQr+Pz8JRC/XfrF/z/5tRQfQ6ojpFb8 - +i4FaGbdGStfPOieaG8CDbxdHIz7Evz0mSRcUUjKbaV6/OtMN8A1/N2Xz0qdgmXu5EUxMLa3Vum9 - el5GEu3UAPvnwgQfUXI4iAsUY2Pc2Pq6As+EQYsC1GBCopl/jsYvv6L27IfVgtdPDDUFXbDZiAsb - /WrugLwPG2yUaz9MWiWOf/ZLt3fjgb2FQ7c/SMTG7lx+9JXtzBl+uOqI5oExQDh37OHxfFGJwPwW - LJJTJbJb1CvNv/7nm1f4MC9vwne+W6bsFEvn3++xkfFbj5GXAGFwCVUaypyRLpvNNv/x30//DMQM - qxMYWYfxk2qb9Dc/A48zq8jwzW8moRk1uGseOfZdNkbjPlZPoNY3DckyKg5UUu4a5DvJwb5qnjy2 - e2sJ9O7nFqei7Xtr7O9CuT5tX188GdP5AMD6q5ca7XGqlnuj3+CnbA/Y2EPire/xlIPUX59fPFfS - OSuMDl7U2aTaN0+fc0uJ//SPfidbfX0So4T31cxxYHJBtZj0Iv6Zz6pqzfTFS0wNtmf6RGJJ22hh - e7uHhoMeSCpyIxX6q1uDZ9QhwuLJGPhhuCN4d+IOSb2ZpH2ZttJPL1KsDbpHIziKv/pR/XZ4nfm3 - MYGynmvUu4sOWKwd6n/4SP0tIX/4EP79OxXwn//466//8zth0HSP/P09GDDly/TP/zoq8M/kkfyT - 44R/UuHPSQQyJkX+97/+fQjh78/QNZ/p/05dnbfj3//6S/xz2uDvqZuS93+7/I/vu/7zH/8PAAD/ - /wMAVLQZa+EgAAA= - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 7be211ab1afd169e-SJC - Connection: - - keep-alive - Content-Encoding: - - gzip - Content-Type: - - application/json - Date: - - Wed, 26 Apr 2023 21:55:16 GMT - Server: - - cloudflare - access-control-allow-origin: - - '*' - alt-svc: - - h3=":443"; ma=86400, h3-29=":443"; ma=86400 - openai-organization: - - user-adtx4fhfg1qsiyzdoaxciooj - openai-processing-ms: - - '172' - openai-version: - - '2020-10-01' - strict-transport-security: - - max-age=15724800; includeSubDomains - x-ratelimit-limit-requests: - - '3000' - x-ratelimit-remaining-requests: - - '2999' - x-ratelimit-reset-requests: - - 20ms - x-request-id: - - 5b810c395d48248e473bc4e53de0eafa - status: - code: 200 - message: OK -- request: - body: '{"input": ["Sample text 1"], "model": "text-embedding-ada-002", "encoding_format": - "base64"}' - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - Connection: - - keep-alive - Content-Length: - - '92' - Content-Type: - - application/json - method: POST - uri: https://api.openai.com/v1/embeddings - response: - body: - string: !!binary | - H4sIAAAAAAAAA1R6SxOyOrfm/PsVu/aU/kpFJIs9AwG5miB4o6urSwQREJFLAuTU+e9d+p463T1x - ALGiycpzW/mPf/31199NUmb34e9//vr7VfTD3//j+yy9Dbe///nrf/7rr7/++us/fp//38isTrI0 - Ld75b/jvZfFOs+nvf/5a/veT/zvon7/+NuPiQ5nS1Anz5acM4uJ1ZNpa6bouyZMjLJdQMZs0H071 - gcrA3keVHUtt4GxxmpbKpd8Q5urcTppOaEXZRcct8U/hJ+LRfchQhzZv4iqmnoz4sN3CaV17ePPq - P15npqcc9Pf9yLCTWmi8r4mBLoLgMxWNqKR6tKTwcM83rLRb8OZ4jLcgX4URN/ux8KgSvxtUsFJl - ji1XiBfJs1dO/JwwcyO5+oS8tQGQGluyS6ibDOB1N3AIDfAQNarXy5f4iE5iTpgpd5bOk8GtoPH0 - HsOptJOxux9cqNitYI6nd2WR7zcusGJ7JGpq5px/KtyjfIMRUzeG7bHJkVto/O1AVxpqk1m0BRFN - SbH/vdeHi6MBXGZXI/jlC5wv2PWIOOlvTDvOQTJpbiSjOiUWBn67JNy/Xn3wyyUi2HaSjkVXVwX5 - yBFTYytLhsxY9LBYXp/EROPEh9rYuqjFqkWiNiiSWeR2C/JN7Yl/Pr8iflyNAbCk7Yg1r4/JpO0M - G8xTsGGOfa35tHx1Mypq4cX2/uB59LA73SC1IoMR65B3w1IPd0qRVwqx/fuKT055zWVf2nV487pf - +fRQ7Roqeq6JqepFN54Wnx088MckesuJ3t+PqyvE75tLZ3o8JPMhMlr4iF2GZWU36/w45bmSc/HI - dLy0+GQ3VoMEB9XM552YTNkhCiFU45KiTaXpq9qOJbg894jo7eGgz6zoCpgyqhHttEbelK2XPRQv - pWLO+/XoRr69jhBErUy2w1R11K1iEbQNbYlNLi3nPpF6KNr9kaloj/mYvhoK8Ts1iZvG124+NMxA - S6FGeNwYjc6mdyOB3jIdo4Sq5ci3wQzWLgqZKgPhc56QEbFzLBNVjktv1q39HQQtCEhc58vvfCOF - o9V/8GrLRG/EdZWBtspkorfhAk3Ds/VBavSQOc6ClJOmNT3o7+xIdPKu0LiT9mcYqLRlD+uglnxx - WI0gLsUdMbfUSqbMWFMQUfDBrWaXXcv2xxEuihQwa65PUecf7zbo/RIRuz28k2lJ9XAhn0WJqcl0 - jJpTezuDoF08st9Pli6K2b4CRweN2K3aJDRcbRuI5lRiu+tcd62elwKasl5j3/pG7RvWNdSZb7Mt - fY1Rfw9e7YabqoXFSyZ7rHhMdzADATOrZ/eo74bABr2bF7jE4Qf1lXWtwdkqKv2eRj7nPZ5huoQp - 3by8mI+ysLuDYFaIHA2ZdsxE3oyGz+fAtFVYc25eZwOma3JkhlUZ3YgRwihXypoZxUg5m86rCriG - MDG37Oy1b+loo/ipFsQx4lXZiuvgKhedfyJeLIlRf0rFUZaKpiPGM4m42BQTVRabycYoyYJo5NOB - /v4PcR8z6bgvhi749InpSp/LaKoX8xHBZT4Tc/Pu9LES4wJC9dwQuzvPel9ZCwEtlrcn80f/xbn0 - DnuwrBMmuNq9kikw1ZtS9ZgSo3rb+ojjZwHdOtdYGNmWN873cwPB4aUwL/G3nZhnkg2N2ZlULoYK - fUBUJZCjzsALYSGgQWu2LgzDAqhQzxKnsRbsAMKsJbozyeWc7ycXILUV4i7GRufF9ixDcmcYr4Ps - HU1ZPM8/fGV7Zn6i+Q2LCqzj7sFIwaSOfZS4gGY3AXMVKU9md3J24EK5ZNvDAXs0h3knT+mg4sXz - mnv9/Vk1Pzwi1iy+SiqkwRZEFH6IK9wT1M+fvQyOc26Yux2R1/zqoz6PFcPv6ovfnlEp/ttsmRXd - PL3vXv4ZhdbKomsehwmT4HMHTRFuzDEQ72YRoSUSsHOmrMjijn+UAW+++8H2YWR6IjvlNTQ7puFl - zVSPhviDoajhxbCzeKFRXtUAoowHLO1x5VF309TQaOcb28X6q+PJZ1dAEloiXqFHyHlSdTtUPJsN - nkRJRZ+l9RTBneaantTjs5ycMijAIujD9M+iTJh0kEK4CODjObotEY2VRJCDu3tjhhWZnRhebCo7 - 6rzHyzL0EV+wdY9++OBPgcbnHPtLcDe+S2JPtEs+mmK1CaP8Tqx+LyQ0dz5XFHf9iuz7riypgoYW - tNHBxLs5p6TvPmxEl0+bM3OzufJZSbsePbaSSHR3JZaTrYU5DP3KJMbr9vL4wjmHSBNzkcXVxtep - aIsimPfpitf0dI64eSyO8JkuM3OzRdjNwlHK5STOHsRVmlGflVrH8P0+Mcpli9o8vNfyZ/Uu6Glj - 7vUex88c9I4azL8kozeHF7VHx70VEPyObH391SOQC++CrjYXmXfSPi9Q8hAt4s9rt6wF2wkgCWBB - jAKPnPvHowvxKwjJtbHVkhejlAHs9xIhz2uuz65wodDo95Bg761EQ/2UrsCSpvutVzc4t0kF8+hu - KRLte0lF1Li/98xUX3k5yo9RBH+tMlz0fhxN2UnsYbrdDySklfadb8zgNOHyWz+G/hnmjw9SZ3vM - /uy9iPu6k/3wll0/+y5hIz7vIK4OB7oOG+AzO1sCTJHVEFzPV9RF4qWBKr8/sBifomjOHcVA2ux7 - zLxXbzSzQ5WDX6xlvDAdu0uHNMxA3NgTO4ufUzmzMwGkNy+PuYqc8p7vmkJ2hWpippycEfc/rQSO - fXl/+dLlfKEKM2LpOqPyFqceP8qqCrgPJOKPiwKNcpSGsHKDgKS+N3j88yhDkLPgTgda3fQ5ViIB - vvqRuIVX82bHXiKS6FXGY2xl0TQcZUCwJxJVosvTew16Hij44y2wEm5rfVhelwVoCtyY8+odfT7k - VqX85ntYIYt6y5Yo4qaj0GpHnG64dEmLHt7jwH71Ml26qIVsvX8y7Xsee2s9NXApA4lop1OqcylD - GIYqaJlh6auE6hdpRkt46My7lkn0Pd85uOtdSHaryuXf+rlDa2Qdc9y573pLrSSAm+BSCWuat/6U - fAuhvdHxeNcq3ortx4bUfXLm3WCORouFLqTODegkmjqfbHzK4LHPMUZXLY34cbiOkDyWFvEvt2fU - p8dnj4qWHOnzIhgRT+5e/dOfzBNbuexlPC3Rg7S7P/px/vIVGtomJLYvRGh2XX6EBVogpjdtk/Ai - cjGqgvLG7G6ToSlQxxbqu+cxvbHdZLIVl8KXf+hnV9yT8b6oa3C0jUGMZx+iL56KKCRhR6xg30R8 - Go8GxHXsMFdXu4QVl1gAwZ4ZXj77GbWh0rpwNKuJbJmdelOw5zUAOY3Ev7gHfdaHCIOZtE/K2zmP - ZjHdSCh+XfZUXjw+Ho21coYpkFdU1lqtY8U2k+WhOr2Y4wg5n4Y4u8n652Ox3fmWebzFQQWPrSxi - MTaW0TTQl/tbH5qjMSmnpR4aKN/4CE9rF5fjPRgaOUkpwYuyWHX9V8+idN/0xLDEORq0amfAdPaf - uIvX725ava4GXMpQYubm7ekzy9gd4vqI2fc8cB4NfQb6p/bp0vSGkiqL1Q0sO3EpHKebzhedXYPg - CSXTRrLpxmpOM5SNqw2d2XUfTTVqayQf7gfmCkrnzfn52sIHKS5RkZ973/WRwXqIlHm32zGZw1AC - tMK5yqzzKY/4Fw9BTrwSr0efdLxorrAR5x4T936CbsS24KPlYpiJTTYNnxkvCtTsjQ9JVscAfSCX - baS3rU1s0jh8GFqnRzGVPsyTJsNbT4p+Br6VCEZJ+uxoeLH7P/rKseprMv3wrd3VJd7U2NLHzrvZ - qGAv9PVbTVL9+Lwxcky0dbLv5t/5CPcuZ+5itD2qy5kN3So1CBGE0euKle0jTR5rYoVm4M1CrAZQ - ZxeBCk76RqMFzg2ksn8zxzJuyTQsrnfw11tGrOnZltyY1wDhdWkz/3LTIl5MlyX68gtumhuN6rzf - zUhvioC+pNUdfWBY7aAo5wlPK81Cs6h6LdTH4Y37+Yyi/ucXpvh2wtxWKPrjh8w7v9I5SLcJX3Rq - pfz8sB95C8SK3LTBfzKdblmLvO6zM0awrmfCrKhrvTnWAkMxb9WLOK9hjYbM5uMPr4n79UujZY89 - nKYoY9/zo3dmqi3htHyEXz6Cbqqf0g26Tb9kN2UX6lPNPQNJHzViu1tMkqkufEOuj23P9HaZ8unB - 4xDkxCm//m1bjph5gLhznIntK6beV9cyRNVV27D9F48nW9n2imWSMxWTtIumi2nK6Ov3qKwhtRsr - Lz9Dks0eRdeiL/lU9iIMdZoyN0ViR396MTV3B/xcJ/uS366kQLg1TKZx4ZbMYvt0Qb7bFdnj18w/ - l+VuifTX8Y7X3DK6b/3RP/ip3pdPNNS16oIZEWCm1t08LpmOij6Kvvvhi859/WTDgsc5IxB10aRt - NFvh6jpkHhrenEvn9xXgivZYweIBzYecVJBLhsFUGT31mdUoAItsPsw/Ld/JzBjN5K9/pfzDuN7f - bWwjcVHEVIzrORpq/9lA8WRLvFhMe33EgGXQm7f31VerpNV3mxmmSB2Iq7y1RHybDxfVp76hMJ/8 - jhfbTNqw0ghYXI8vb7qcNhiEvfogptbJ+lCncNw0ZuV++StH7Q9PvviDV9u9pLPPUt6Cu/b2+LWl - 74j+6hUX7E5X8s3Wx1k2JJQkXciscCdwKp5uN8iVZ822Ufvy5jgU7jA8DZGosl15Q5ZlGPT6dqUK - 1fbJKts/MEot32HYMxdRYxWtBJemrohhXJ6o5/mth6PFK6p88kyfLmqbweUzJGy3vofdeF9bOyQY - mYFlRVZ4g+NPAUNurhiuA5lPMG1scMjkUSYoWiTqVbNF4vg5sH1YfX9P3apScFOLn5/Ue9ktDBjy - rUynMZc6vnDcClhoERxszic0DcdZAMHsRIwkUutD0D2XcHkXDdutIo83cp/vkHlJe6YTs+bcX5hn - eSG1B+aJ1sC56We5nM+rCEs4zL3RAu0KwX1/+sNvLTtbAHVYLJmaXnA3u82zBr3EFV3W2dObanVn - IMfCI7M/49Wb2QlCJD0rRrYDqcvZXV1FENfbDSOCEHh8VC4qsDo0CDG7uuMmtZeoTl2fOdWO6WPq - uz7EpXv74mPjzQzSHvyXwIlRPAjinz7MlWFQgDQf5iTc/GwM9OUbslu94nJaXvMlXMoLx3BRsE71 - j35HjRafiJpeaMeKywGAb2VCBe+Yo/qw3TfQLRObDl89N7vKeFa6zcSYtipNtMQGvcvizB3K1Mst - 6Xe3yw6q++FDVNmZOTNF2P74A8fe61NOy7tU/fI45hbeDo3dVbv+yau0i2AkortbBOsvfuKFIX3K - H38r4lidibo5WfqErvUZjo74pCv0mBErdqEAHSjlHz081CrewXLBZppGu8LrO+/mAt/ZAtE/fh19 - /awE7fbMCbFKnFB9NfdKMZz3+Hnxz96k7S5LOI32m2lHvtAnyGcX8rUjkN95o2ES3OHhBBlLadkl - s1hoLcix1jFTPxy9wTl/QlnqLwpd05MY9fj0yoBVrs+sAK91JmFLQl8/h8XlNPAJdE+GKoufRG+f - WjIEN6GAfCX4THcIdHxUHqqsTTbB0rd+ukU9ir88gQqO3yDu66krL3iS0wAxn8/NW8sgicsbvkTK - mvP2PBqyHFxPX30Rd3zC26uymPyBmAhHnEvqJkSP/duh4s3Syok46xyS2+v81Sv7jpuv1Rb0blzQ - 7nZbRuyjKVuEh5gx/BqwPtWUnwF/LIP5p+qiDwM93JHU7XdY8IygG090NUMSSAaxmyT1xnTuvvnk - tmc7cWcj/vMvjEkJOyzHhlMlWN7l1szfxPb7nT7+8peYwYuYGU0jtuCNIbc2TfDGfQ5lvzusK0ju - TcT8izvp7CjbW/TNa+gru7NuGixJRa0veMyKHN+bLoaMQc7CO9vFYlEO2anbgrMj7JsvbX75ZgGO - urb/jBf1ive//ITOdJV3PLmgJZKeNSPGNx/48cWmqihh/sRxt4F7e0PDsNHwdNrmHdXzEqDOsM3M - dPfi07CoXCQY+YJsWZvocy61ZyRsE4uZmbDvZn0qBDiBMNPkehjKnguqoCzE1xHTmvQ6Ty6NDR1C - b2Ju0yaZwya5gv7c9RR996e39ncXnTazgKXufdDFePOQZZGPLtstn0rHzIAGv/XBEj4q5Wug8R2G - 5nWiDCo34v6RiWgKpBXTjqT85gdCBc6+upLIdg98VuJIAKnRQrbvHbOctJBfUdG6MXO+53tMXzmF - bvVcEH9aKsmIMwOgqtGWKrTsojHVix5S5wq4XeRrNNTrcInYrWZEO72GhE0SqiHIgpAY+U7iQx1f - ZXD8/M5cPYy98bffjmGumTatonL+5rmoPrI32++DNR9q/9Oi4LQaMJy2sTcNiyBTjsZr/OVv3rCc - 614W1ANhxlOZ+DRYowoPy62/+PLWabzxfWD3qaCyZusd+1S7HjV7p6J5esGlqK9kCvl6azOiXM/e - mCLPRt/1JbrXRH/8FwSnecbfPAK9Lk7aoHZXlWy30pNuWraWjz4LVyDemooljc9y/sMH+pKijM86 - iUP0WZANwy8v8L79hCXIZe3gEbGeM+mBpT9+qoWt+NW/doDM+FXhMW325RK34Etc22B6NNMkGqtZ - G8FKcp1Zh6JMVjUqakjVzZ14sbmJRvlyOKJU14/E9oclGl5ICKDRkhMum26tD/YZBFS9QWD2fhrR - aDn+7ZcvEuwZY0e/+TZydEFjpj7rSf37P/izdZh1KEN9WOrDiHA1xkxvkJWMeN3doXjJH+bPxlBO - mtJXKE5RQyd+h4jqpBdkM10fib1/6N6PL5D8OF8xzMa+W5tUFSG4SC3NbJA6tnC29Z985scPc+xm - AnzzXCwvcOuNaRCDbEbGjtyE3vcmzU1kYHHB2TZ4LPjnh1/C3rzRBWz9aKyCxx1+ea13PezLOW5M - H5IH2jIi3GdvGmydwkli2z/1w4tKw8CYnBAC2z6Z6rar5aHEFIvrvuBULLQGzPN5Rb55hcd/+/fr - T7h3HSMq6FOh1OGLMVc9r6I+FfkVllxqWOath26Ol5ABy4UrM+9bB/W4IxLgonli3oYPzvwjWyLz - iJWff+d9mmq+chr3BV5889hpeYUCkmy1I/Zn30UDyNVRyvmGEFXiH692ib5TOOEn5t3iVyfqpAf4 - 5d3qvSzQF5+O6Mv3xJS0yXtdiqUL4WX2f3lqOSyvjYi++gA3jYK8lqmxiE5SazHtOD3LIXi/RoBj - l/7R780JvVTQ6zQiqhRuEHUV6Qj5xlXpOjwzj+qkAri83yXTRLPkc6M2qtytuzUVvnzDpTO7gT81 - NsPuXHDmz+cd+GPlU+rpT8QWEAVKt7o5dJQKV+dRrmD07Rcx4wW6zj8l2qJsc4kpXIaUT867Cf/k - Yf7Xz021/2lkRx333/oOuvaX/6ZbiNnRQGnSjTjbQeOrA62+/DjreRDAdDQ+jCywGolhpbcQXkYf - D8Ne69hxNYagjbsIr9Jr1PHjSgoguNs3jMRW7vj0cG4/vfjD53IY7McV4ue2IPs9cTzacKlHSaii - 7/55SS/vbkeIO7oiKsJpyRdvo4DHzmiJxdEYcfOlqBBcFi/m1GTFe5mYBRQdPpFfPv315xRVzzwh - JF+PnBWXVwOCI3aUN6399cvB/ecfifnNU2elpc2PH/H0zccG+BxakLPThRGl2SWzfhlnpXGkFUv3 - 01sfZ+/RIDlIAyp/83++cLIAQt+byZ5d1t4vfwdrP03MMHZbPmTFIADfLlyyjZwVGpaWUmxaHzxy - 4ShIeOL51fq0KjFthXvCp0tpiZBi+u2fnMqIRVsewLcfTB+WNib9LPsydKN3xcu8OaM5D+8Vqh/H - BTl885XhmwcoR3udY3m7p8msxKxFzd6rqODyqZxscwXQLbY5Xk/zp5sCpxTh+/6Pv2kq+Rmib/+T - bXt/k8xMMoVfv5D96bf+8tpv/kqMcnsv52+/GnD+qsg26jgfU6S7v7wQr7n8LLnvNQJ895fOga8n - 4q+efDgmTP/4u2QthZ4AD02U8Rw8HnwIVKkF6yC8mauedt3MHHOLqt6n/6Vflq2FIYi1J9O//WD+ - 6WuMgsTI2DagWjJibm/h79+tgP/8119//a/fDYO6SbPX92LAkE3Dv//7qsC/b+nt38ul+G8m/rmJ - QPtbnv39z39dQvj70zX1Z/jfQ1Nl7/7vf/6S/tw2+Htohtvr/3n8r+9c//mv/wMAAP//AwCkxTP2 - 4SAAAA== - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 7be211ac8cdd169e-SJC - Connection: - - keep-alive - Content-Encoding: - - gzip - Content-Type: - - application/json - Date: - - Wed, 26 Apr 2023 21:55:16 GMT - Server: - - cloudflare - access-control-allow-origin: - - '*' - alt-svc: - - h3=":443"; ma=86400, h3-29=":443"; ma=86400 - openai-organization: - - user-adtx4fhfg1qsiyzdoaxciooj - openai-processing-ms: - - '114' - openai-version: - - '2020-10-01' - strict-transport-security: - - max-age=15724800; includeSubDomains - x-ratelimit-limit-requests: - - '3000' - x-ratelimit-remaining-requests: - - '2999' - x-ratelimit-reset-requests: - - 20ms - x-request-id: - - d39b785f954e1f1cc64746735c44b522 - status: - code: 200 - message: OK -- request: - body: '{"input": [[18031, 1495, 220, 16]], "model": "text-embedding-ada-002", - "encoding_format": "base64"}' - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - Connection: - - keep-alive - Content-Length: - - '99' - Content-Type: - - application/json - method: POST - uri: https://api.openai.com/v1/embeddings - response: - body: - string: !!binary | - H4sIAAAAAAAAA1SaS6+zOrel+9+v2Nrd1KcQQvDM7nELECB2EnItlUpACOEWrjbgo/PfS+Q9OlXV - WQ0WS3jZnmM+Y9j/8a+//vq7CrM46v/+56+/i7Tr//4f87NX0Ad///PX//zXX3/99dd//H7+f2/G - ZRi/Xuk3+b3++2X6fcXj3//8Jfz3k//70j9//b0V0oz2TVyG/Rt/ZNCN/sr2Me7CatyjC9C+y5mX - y43OQnuxAIvnCnstoOdMy07C9hEVhJkt2odNlyJNVtPEIEoy1f743G1jZLXFlxxOSA8nvUg1eOgL - D6/UpEFVYO8qqKnns11X2uEU90xB2Ft4zPyMSGcv90Ihft0eeDxtwR3erNbgvRZXmEq31O0M8w5I - JkRhxjHIEe8+Y7d9RWbIdlbj+NPo3A2Quk4le1l0UBc5bQCvLtjjplK3qPWm5xE1kkGYHTeWz+3O - yWFydYrlGuxwuiw+JoTHRcZ2TVRl6Td8OhAk2oUYtvvRJ+kSd4jcHcRUPbfCro6bBnxjonSpJE04 - tCqWUXROMbOc3spYFY0ShI+FQry1v8jGYv2IUOccQ+YIx2M4Tu7FRAGx9ng7VPeQuyf7CJcSZKJc - 4jCkyzhQQLqEiLkv7x32RskqSDaPhOzS85ixM9I0VHmKRZ6SmIaDH0kNdHvoyQGg8CfxNFzg2wYt - sfB0Caf2k9sw+ReZWXe15MMldkVkyreCkbF1EdWN6w1sj+gMq2qC+nCfmtsXdEDw/rjiUzLYkYxF - 3GGBWY9sqCK7hGp1/hLvEqUtX/tPE/L11SRe3GLe093uAfhauhSx4hSO6Gk04G/ZG6/XdNIn7g3J - dp5fpmwDi0+X+JujJOFftrdKMRyXb/8GYV2UdHVdqLoYWE8JTvtaJkZQn3Q+uSiF9zfV5vlC4bgR - 8w7sbpWzgzC926HcVBWoaikTbJk5olZTi0BisSE7NWn4KBC7g2TcXdkeENG5nlcUkmo0iKkbj5Cr - KTkix7oBXlzflc/Kpw1QqkTDqA6VbHSPiQif9H5m1nlN+HjX3x36jU87RZnLyfr1AMaOhLz7j8BH - fE0aeFzyHi8sU3RHvbjEEPeLDVGOzyWaKmhsKJ/ZmR0ih3C+eD06cEfxQtSpydGkfrYBaK6tsVhV - Fc73/XWA9Vk0yb56We3gHO8lJMmQ4rpLM7c5+JcBmnd0ZF61vPHaYZEBBCdA9M/lGw5xr5+XU7uQ - fvvRr4aqOUN2WXvEWJ0sfdUohxy2vqQRUkeV2x8fWgfJ8bVhujfmYf2muYbe30RjhH22s36sG/hm - jz3Trh/G+3qbxZs+j3Z46M0NYqwcE5CxfGAEraKMZivlCNwo15jN9UPH5lGCflV12i43Q8bHhk5w - 501M5Wx68knTzQgu8QWRpziysJNW7gLtd+OJ4ckq+dS8NQPOYebP+mCgIe7dMyruyy87VDHT6WlT - 5CCevpgQLTmj+pHnDxQeISNeuVrr1Uk+drJbe1fiFjfR791RtGXnXjXE2Nx8LsjrD90GZLfHWyIe - /eEbjBTmeifa9UMQx6KGwZFOLp3IOfPH3XfyUH9x7kTvs1bnlrMpIV7M9dIbk8/ITTFBoFPC9C0U - fOLdVIGiqISYhViEw8M4BttPc2PEXTPb56wcU8jXF5Xdj2sr5CtW5tAL45aZ1l5r1ytS2UALZtFl - /8naSjwdJWgsd4dHWViEdIsmE+J+uaCLd73Rqf86mlAZWkOceyxnY5d+HOi044Io62OlTwK5yRC/ - WhcvV8PXn9zjRKG+Sjr76f3gHN85fM7am3nOQWq7LdTpn/Fg103C6aefwyMT2KwHqLe7SZPDsTDw - 8m0nLsUroQL340lErZLCp2hz1GCXxA1RDCdyuy30IlzW54p5zJBR4w5SCoF0ydm837OBlkayrV+b - hpHi7mUUbaIbuu33Jt0cVqeW7ftnBMiOQ2YefN4OZ9RK6Js9Me3d7hnyNzk0m76Lv8wtbjt3/TSP - JTRSqGGx2yltx6znGY7jpWDEKAo0nuRSgq829Xg1tBnqfnpLzDRgankrWp6exRLC2pcwum3PfBwb - V0OhUq3w4nA02sZhnwbm/kGjRv74g75TYjDle8P2Ec4Qc0/2GV764OLt7rsK2c4ORfkl0IA5Ltm1 - 6zfZ3+Sz4BywKF88NIz7dYXMVLvO+1nNRuniCSDa4JDL7W3zyX89jM2oRhFxN8IWdc+ifqCcKStC - vveM05RvKTiWisk+MK5hf1m8BfTbj/YleerT4dJW6L0WVsR9e2LGX+05AV58TEKOm8Kd+tzEaOvm - Ijv7kZt1j/wmwnMxhhj4/uYPYX2+wOPynZhGjTMa8bWq5HMmvglZD4z/9Af+jGeu//YodWfZIfpA - L7nk+d3yrUaQm6bBcMYHd0DSkKO2vp/IzA++cNyoAizvekqHZJKzCklDiSpPs4gpP59ZubnWRxj3 - 0YKQNhqyuf85IK2UMwn03ZL3xfoRgxC/1mRXRYk+bmFNgS6UM9H8CHi/EasHYNHrCOFt5tLL4mOA - Oq2ALs0m1PubZmMQn3n72+/+9CwVEZhZnXCyPj79wfbKCk7N8USiZFD5cPsMMZymMsVTu1zy8hzt - vd96M5VwVx+W730MpxOo7DJ6bdsno2lCFxSYShEDPsrrtQxUvFfEcw6Ptm4SqwPfi154MXl+xhev - l4LOgn1gh4P4RaOqCTnUi/UGb+7L0c3iwxTD3elGds3vVz6qHyag52vlMWVzfWXd3M9l9j1OTK2S - WzucgkACTQi/RMsGh4+77yJFSrKM5/FEaHzkiQHZ1ZaJwb0UTW3yOkNRHY/kWt5798/6irX0pOW8 - HnwL+gLk++dEzEop9NZyVjK6Vt4aC+cgzrjxdQY08w6F3vi4X94r3lZVv0ss+o/SZ+7JKOGLuicj - t8VeH2lhJdvNoTqS8ChRTu1WogjQWafFzJeMmYiis+Ce2T7a2z4fa7+EUn59Zj5YttU3GDvohUEi - 6q6O+OBNyIH4WtVMlYQV6t1jJaJw/BrM3txDf4jpLQIviM8EyzfnD89Bdy47Zh/DLuzxNYcf79KN - +lHdNZJ8DdzjxsEbqyz0aqw3NnTnYmRqGU7+9NPjmU/oamh1Poz1Kob5/8EC0176cLxLAiiPyibK - Lf74VKBqgtbntUsTphr+lPqohG6/6Gc9lrN+fR4rlF1Ei3jpELZDIZ0mVJDqRFyz9tvxWfDjH57U - sViFw+Q6GOVx+GSO/3y7QxUpJfTh2mXKJXbcUTw1E9BSW9HWqKNwbD+3Eha7vfGnf3KjQDKKD1pD - Dm5X6ZPl5AoE395mdsLalsl3R4PbZHK8kldC21TQODDvL6K/yMvlb6bHgMXDSBRmnvQJ5AxDpOGE - LuZ6Gh3yBFR5ukflZ1W7dGqzCcxGW1MxG1W3D+x3Kn/Oas4IFhI+xDSO5ePmarH9SY7Dmbcq8M7x - Co9WLei8ywwZiTK26Cc9hf5UrNMLGuUOYfGe4mwE+VXJd40SPIzuKqSltMFo3A8dOQj9xGlYiwYE - lSLi6pJ8EU8nyYD6UK7/1O8Y2O8EhKWB2eGxy3SeHKMUXvrk0mnx6rMuPuxiMJ9sT1FfBPqYXaQS - eJ9mzEj5Jhy1/FCioFIRXT71g88H2ynRrB9Me5ZtOIpjVcKzfLnEuLyTkF+VvQwPd0GZM7gXNEFQ - G+jHB972kfjjRkwCaMc2xZvhS9BglNdh80kvB2Lu9tBOOxsf0UHYCmRPVhWf9fqGdknUkPtye0XV - 5DoeSgHbZBfYe59Obl0hcfIapoyu4Qq/+tbAOGH0tD4he2RSBcjmmBxi5xGOhll7IOU0x8LMk9P1 - Ldto5gOMLtsmzNW0SuGQXzBRbhl2h199WFzmbH9d2m0/tbEH7LY1iHvXaFjZXWUj89N9CenLYzjQ - UvHgKfIFleb1mFbHOoajYlTMQlIQ/qnPKEgHcuiyJuPZ9aEAuQk2s9Fd9Yefn3SP8g73KeG8yO+3 - BXrp8YsmyRS0Ne8KE4S1OWL+dqx2eO7aEtTTpsddOqCM3dTHBME9uOJ1caWIm3WVgua/H1S8v1Q0 - KIpSbU/PbUW8ZrkMO/F09SDrXw51LAeFlZrmAoQbmTDHchp3UpTE2L5vUkm822eN6BvzAZrBmP7w - ItfzpINZf//0m8ZqPgIsv/cjXaIVtFN6rgK4PSOBXa+Lsz/peWgjL4jOTHUpafk18RT5lQcd21Wf - WB+1fH8GfVXnRPsMms/7PBxQOEoTca/ajnf7nt/QZldLbHddfFze6dqwvWvsQjdD1/r81RUU3VGt - 0FV6VsIJr4Yb+N7k0tXUdj43606El9C/mSb6YtuN+2UF5jO44SLQD3y0WhajKq9MhvduMPPiyYG6 - THJyiBzG6+uy7NAYGMEffuoPl+cE7ecqMC8ZPohKwhGDeyLbP38/Ro4EqKaZSexj+MrG0dl5f75v - 7vatPy6XJ3v76wdzf+TDZ7IeMOsLXm+kE/rDs7NfYZqAPz4Xj+gC+a1vmAPBN+TfkN3kMbDedP05 - cp9aDbXRbpgeVDDcyWeRoyYgKstp7meHOV/Af/iZON/1Kqx/439svZZYfa6GK7xiJlJspaMcjx7i - SHpfNrNfZGFxK9y5fjAMivcmeO/KPq1fyNsYa8+Z9+8nbMn6GkCoNCs8uYeNTu9aYELqLI+4EY/f - rLtnbgPukwRUXDxtf5j3G7q3s19d8wVn7ugEMOc1M/8WLpcE+vjDY0ZEc3euPwy9mz7oUK4P/zXe - 3/d30WHBqx8f3FQnJ2pgfRCN+6aDxSLM6OIUxPp4XTYBmM9nyHZEOLfD4iXKKOvjHR53NfBGoPsU - Fovd6sf3nIvDxv75U1rN9bmaeQfNvMBs75TP/NU/JA89PniNJNlnxX0yQA2wREXdkMLpwIMc9jQk - eOYbNCZskuGe3Vd49KUyo/ZBHeBJ44o5xdrjdRMPGGXFq2d6G5d8LNaXSQ4/5xM7zHnX6JD4KM98 - hLnTJe6U8vHxJy/AL9q51awfcErPAlNjD7fTyztREEwnpWMTftyJmaWNDt1tmPOWhzv2GdyQ/E4o - wcKpzKZnaYvwFKcN2zevozuo2leBmV+I9XLLlleoEv7kY1g8MX1wmONBl8oBs/GtcodOew2QHGOB - 2KlA2ukzpcm2PhRAisDeh0Ng7y8oFn1n5uXnzIfHDk4yWmO+mjCnuehHqCD1bTacNOyS42RAq1BM - 1+c1QcXcDyH4MptWhtW7/OUeg+0lrunsV3do9ZufFMiBNpYThnR07uavXggxmilj6NmZaHGNJ3y9 - L+pfXpSDheQj209fyx2execBfv/oyN6XjXA918talddPvNDV2p/iQ5Jv1619I7jdWvpIy1sAxX2R - 0q3QT6jnXrqAA9QZ0d/OA7HlgmpAWGTSz32buj0rAxOW62pB7NEu/dnPSr/+QnaBfkB0dM7DdlTj - CKfwuoXjN7QkuFZJwch5s+CTvz1jiPvtghj4mrR0zqsgaoSY3Xb7NhxzUZ39/KljP75i2WXvyL3A - t3QVH0S979Iihra+eczKrmu/vyzuAhJr+YmHqO/5j59AZqsPcT2uubQCGsMquXnMQDcIp0vMcnnO - K/FiZ8aoDquThnYP50mXL7tCI90dAjkzspg+H4bHh+Y9pkDe7gtfluGaD298VH55xpy3PttJupwf - 21lviKIoPp9+fLNu33s6BpbKp8XLimDbHe50Nc8PNyxjAe3nwmjdpYJPn+XBQeT7omznb7A+sK8f - gAahwQ7idOMs+H5ypGWjiUHTj+H0OV1FcI/SbvZTERqWS7eEfRn0zNY0e85nrByWyypkEXpUnEWN - XsmBZXyJs9uZOpeE8iGb8qUg3jmKfPqZSkGWmRhh2X/2vHOO9xzyr+IzVdqMPj1cvgsUvB9ryrYy - a/lBWFdo+b25TDucPXfcfWUMeHd+MVd5p5xBIC8gK949lWxvwzndVSlIhmUzd7nwXFHAege3D9/R - IZ2SlgdWOyBsLBjBrquEs5+LZPuSErbbM9yi+CDHKFH2Jv69T1mZAXRBuWdeuSr46LCLieSvsCQ/ - P8FXrDmjYLhbs94c0HAKzjLsd/xEn7uy573lfrTtrH+YsU+nj636sGHOswjJzlU4sG/4AMXWOrqJ - 8ErvvRNoaL+gWzzMPL1qEkJlvPD27OCjrdu/OnxE7icp8ZQNoBfyfRNBH65cmnaG4w9BRSa0zb0V - 89RFlvH0vMhBSeBJ7hvpxPldyxYQof1p9oc7f8BX30DO+/Zku7m+Z32hkF33y7n/btvx54cGm6gU - ae/WHyVBq375Kk6z1bqle3oekHFYMOK8+97tfvtj5klix42UdeLpIcNLj15MvyrPcPit90Hv10z7 - DH7Gm+VzQrM/ZVq2Wmf0c3qmKJCuNZYuydPldJekW1Q/2bx/Wref/aJsrA+E7R72pPOXxyREd2ZJ - XC0veccP3RE2iyKnm/Khh/QUiBU6w16g5ewP1s8yoLB7i/bcL2/h7BdttKcP5+evMnZVVAk2u4bj - 2a+jYs5PkZR3OXOP27AdA/ueoP3NXJKfHrCxaSJQHo1N61351me9vqHCajeMrAeCRrTBw+97WHjk - vU4FEkuwZLces1BfIT7nfSjZPAs8Ds0hEyKnfUjLu5rS4yUO/SmqxwFo0WnMfhRZuDrJaQn9ZR8R - S8k2/q+foUayLgS/qdCyrxwfoXm/r/hT3kRO40Mnozk/YcrqOCAuTt4NBOwaxHa80e3v2StBW1/W - mJ1vtLbEVzkBRdGc+XuXjLFyK6D4dXmwOU92p1wMI+h9sZ75oc9GJYkqNIHf0lm4M2q3W01ORutK - TGWpuzyspAhdrfKGl+fNoRU7bZDBChYq1dfTOqQrci7/8JQDgeWP+T1ewJ1XMRbDb+MO5FaD7EhH - k/jhznPHgxDKsAWRzzyr6iwV9DO8cRFQHu48nZ82ywjUFN8Imf3X1CbXIyx6rjE3O03udLhkDehX - XWd7tx35aJgfDMtlExIjl7pwMkxkyrLk9HgjdmnGdtXpj/8kWFf3IQ/tBfx4m7l3DYczL6bbXl9R - 5o7NKus1Q49APUkVC/xnj8ab5qVQXW4PdpDWe9TL66UEwvrxnvOjN++/IZHQzkq3zGyUmDN8Gb3t - XuwTvAJZQpz3XgpWsDUJplrr//J2ac6TiNuGtVvssW5u5/n+k1+uDcsDuJhKS7SwTNtJ0x0bzf1+ - 9uejW7w8w4Elph7TvjLXO4lLE+q08wLnpiuj+o1rGTnvxmJ/8orP6SqAQ+7xnGcabntTrwaUz8+Z - uH0hu2yo7AeAc11QqVqwkHWZIYFskYzhZ57xoV5Ukazl1oYuzQL70z1jwS8PYfvykfl96pfz+SJ4 - tHx+P20X9/pxu6tii27F3tGn5bvHv/MY5hm27k/LZauhP/5MvsT6EJTSGbYg8Nl/IZ/vqmcl42vu - kr3bnlD1y39n/WNXqwnbxj5gEwqrEmi3fR3csYkHD3pfqJkqMsVfR43ewHx+hYO9o7b0+Die4eBS - gsWD77eTeJIuYD4fNzznle3wLOoAAumW//RZp/eMPcBCZfrLN0Nq7h8C6vUEEasInLDXd8Hll8cQ - /YhefAj3lxTIN6qJQ80hG99ka8BtCkuG/WDl99toVYJb4ytxTs8i7AViU7QjSfBbT737nccqm3M9 - 84SNBjVNEuC9tyKG6cpzHhBXoBzbAW9i/Mq6y0JtYKsXT7ZfIDPkdaxM2+sgiOyyOn31cT6fRPN5 - Mh0qSMLx59/9PhyJeU3X4XzeKIDfP0dmcE/j1GH9AubzT0IcJrY9+/bnTeqASwJqHttJfugXae6P - 9HPaRrNef+Ufv5M5L+e0fnEPdI9O9N1GQ9tnF5DhvVzf8ST6t5anEzzQ8q0syVUcNm5/Cqpo+zt/ - FF8dDce5P6JkExZ0FNmY8V11BUiSKcVii+qQ72xfhHuo50RV31lYP3ejg3Z3sWWkjjYh7z6iBuFj - u2MHeSqy6eXdNPjtH0y1KJu+6J5DpB0KYqdM0KebqmNAddDgiRqfbGqTxwJ20iDTJW91V/ydhymP - S8C8qDXD1U+vLvEN4c1Jjv3+l/+oTVoy7/Yx25F9izOKuM3oKG82fFj76zPg3Sll+/k8eCrWZYP0 - VfVmXiOr4aQolQl//24F/Oe//vrrf/1uGJTVKy7miwF9PPb//u+rAv8OXsG/BUH8NxP/3ESgXZDE - f//zX5cQ/q7bqqz7/91Xefzt/v7nL+nPbYO/+6oPiv/n8b/mb/3nv/4PAAAA//8DAPzVCbHhIAAA - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 7c0b986749b8f9cc-SJC - Connection: - - keep-alive - Content-Encoding: - - gzip - Content-Type: - - application/json - Date: - - Mon, 01 May 2023 22:52:31 GMT - Server: - - cloudflare - access-control-allow-origin: - - '*' - alt-svc: - - h3=":443"; ma=86400, h3-29=":443"; ma=86400 - openai-organization: - - user-adtx4fhfg1qsiyzdoaxciooj - openai-processing-ms: - - '18' - openai-version: - - '2020-10-01' - strict-transport-security: - - max-age=15724800; includeSubDomains - x-ratelimit-limit-requests: - - '3000' - x-ratelimit-remaining-requests: - - '2999' - x-ratelimit-reset-requests: - - 20ms - x-request-id: - - 0d93269b5615ee055622aca99d9e4aa9 - status: - code: 200 - message: OK -- request: - body: '{"input": [[18031, 1495, 220, 17]], "model": "text-embedding-ada-002", - "encoding_format": "base64"}' - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - Connection: - - keep-alive - Content-Length: - - '99' - Content-Type: - - application/json - method: POST - uri: https://api.openai.com/v1/embeddings - response: - body: - string: !!binary | - H4sIAAAAAAAAA1SaWxO6vJbm7/eneOu9ZXYJCCTsO05yNlHxgFNTU4KIICgCCSRd/d2n8L+rp/vG - KgElhJW1nue38h//+Ouvvz9ZXeTj3//66++mGsa//9dy7H4bb3//66///Y+//vrrr//4ff6PK4s2 - K+736l3+Lv+drN73Yv77X3+J/3Xk/1/0r7/+Nhu9on7/9KJZFRRREw+dQKPUjAE9rMyjbnrqTPHl - c7CJYWMEP8GzpfZ+7hJi248X9HfvB+Gis+tn15R28BuMChKNDbN57gERpCyKsfPy5Z5sQNvCJPnM - OCQjqom2IzH4qlwgq9C3MqafrCOUQiZR92jTbMgDoQIseDyRnutzP21j5QbTwAWEMfSN5vdOyAEN - DxsamYUSDeo0OzpQCpsGkdMmgwYphPIKvvGmoW7PTvNVgI96Eqj/KDc1l4ptDLL31aGGPab2jIpm - gBEBb0SaY9DzUNghePeeO+pDXiWjwOMKykhqcAD9iz1bp0qBSh1uaOhpVUQlYXfQ2Ls+oDqzKsDS - +XuGVe56FMmt03+79e4INbBvMF5Hc8KfRrKDosJKitw97meJJSmIGnBHsFdLMF8vowI+YVuhefBE - QMbV14BfTdTo3qiPdrcu7ilUhhBjS8BRPeaKeYQ+egc4Eo4cLO9ThoJCMmxcH0rNiNenYGhWLXZf - LzmaLDzkUIsPGeHKS+LTvYgVbSOeZuo8U9TzIrcQvL2SkW4qQnuyzStZX73HBsdvf6inPJArUMM4 - wZZzczl73aYDNL4axDZJ9vVwnbUKxpK7I00yXGz2Xpk7OFWnkgYvV4xouvq62r15mwSWyteeTpsy - 1kceOnQLKEwGks0t6FbsSK1pq4P5As4hjBRI0PRhhi0aPB9gxvMSI13ec37s7TMM3rTEaGeRfqI2 - FQGukgv6PMqmn0/4GMOSrGvsRKdVRM+2d4Zb5N6wTV5GLWv+S4HeqVSplfWnZDrc7RtUBh9jL/y0 - YDa7jwLB5jhQy+l0m+SVn0NkIw9vNkBJxtaXQgj6S0+d3frN6bdLJ/BQxBe2XjHMiJI+fPhevSx8 - 4KOSzLRTz9oqJBPRYTkBNq20GwT17Y4N8y71syx8cribrohugPmsJ/CCKTRc5YCdXAmy+T2blT5C - H9HLt48SAo4qhJDcLbwJs3vPM9ASaEhqjjQvGAHFWhPDTX1LyWrIupq4PTxCr3ja2LWMB5j13kar - qFHvNDZPIR89FgrwPbcpxunVrdePKX1BYwUrjOyn17OT5JXwKVkx/TOfsKUHcMAvY3meHe9Srz/A - IKcGEUUtTuayxBB8uveAgHQMsjn+flMYhENPLUBUTsWzrYBgZm80NeUt4qN5ZHAsnDtRLkRLJns9 - yTqyooiQrXmpWXK3X9AXOowvZXevyTwZoZavWpNuGvxM2KPYuqq1cTG1/ZIC+sorCPdx0VG3dHZ8 - ErZXpKWW52Krvpc1+/T2AZi+MCIVXtuom078DNQxIki661lGFKWKwa3fVzgQlYCvm3giusQlFynd - /lVPjxApamE1Mg7m9QH8O1+13kTUlyvaQ/Y1W9jGtwZ7WC2TOXCvN9hf1hT/8tU4qnULe1HwqPtw - Wnt27+ILXov4jU3rZWTSoY9C6JSJifE6L7PXZf9wgbFVPHr/CIndR+imgMvBNGg0Jh5Yg2KE4HXd - +uRSdnpNEnHtA7A5D6iLnDabbnexgp+uGdBb2HY2HzdlBctJUjGiGz+RQoJbEM1Qxc5XKZK5jqcK - 3tBhRGs5ftcsHYsXYPxF6Ob9qDkPWOTD7FlEdLMZqmgctt0Xkmtg00itH/2k5dyFSfD+UjNtLjaN - taMMnlthR7IlHpZ4HeAVHtJfPgFMfiUyLDhRsJ+iFR/k3ekLdent0DCLdoDpvn6AEZKe1Pv2fTI7 - 3bz7U6+ConXs+bLqNNhotx3erg0DsMMcvOBmnVnUUTUZzNpXE+HQ6C1qHVb2/LtJfWik2pu6mmdE - cl75BTjqoo0D8Nn3zMT5AdrXcEWkefpEsyVdJkAGdETqWVQAuar9B9AMF0SVL0r06cCWwMvO6hEP - d6M9XLezBiVdi0nnJoeIfLzwC+CnCujmIbxrdumeBhw8IFETNU/AHbUn8OpETyQIngGYw8cdGNYr - QNaZ9gTz2jq7YHlfaBg+sKfzzEQ4ct/BtxmVCd8JCoRrsprxNoJN9tkN+gccn+xD5LjiEbOy7gjk - lfBGz0np7PE7XzXYbk8Cti26+81XC0qDc8LyPOBTNH0+0HY9gpSdwGqe1KEBb7djgANftAHreNyC - axSc6c1K1Z4MyceFxanzMCLiNZnbra+ARzoVtBA3PJrfpxApQTn2OLhs3pyvWdYCvv88kH5HRt9b - QWFoh75WkVw6E6e6pn/AirIzta+HTT/L59wF7+wuYVdxOZ/bzo/BMn8Emk7GSe8lFjQTfYvR9tDW - JD52JWR5yZAcV4k9+8LO0H7nL2IeJqNmkUI93NoS4/aa1bw+Pr8wdOUttsuK8e6y6hTYsNMDiept - 3893PzZ+6w1Nx1WSjMQcBCjsvyVZxZ5sT8c9ImD6pnsc7m/MHp7WCUKolQU+jMThfN4yBE3V4kSb - kZGx9XDy4SocJopWLbbp6zNZulYcJoov6jOiiWcRuAcPTua397VZ9MrYL5/iramt+4+bBwpc6jPd - wnIH+if0crDOLZs6wbnvJzgHORyIccHRaf/i1D7VDtTs3Z5mD6uuyToud1BoiUumerdO6Cnbu3Af - FkciCn0Usaq45yBVIxM7WtTwcVKBAN7eIyKqL9p81D3r9e/85r+1fiRPxYL3W/vE0eXpZSwdzy3U - 2D4jK2f94eN7JxfwfatyjEPfSsaDF1baC8UfpIBZ4OQX77OfWxQdqwRMg9fHsKuJisSDnCVzil8t - +LbiDge71zmZNkf/pSsb54hPHJ8TNtXcAegU59h5XTc9r/xjCx7uFFDPvIwJIf3Lgpp92FNvd90A - dgw2L/i4TRwv820z2760cM+rhkhX0kStoFoCZGaVkPm+yZM/40Elgti9H30+i8XWgBO8tRS7cp4N - L4scoXPGGjqy0MnWQJgN8BhlB3uRP0Sd5moxFJoupJbdsYRzU6qgO1eY+t3c8WE3mzf4CeoWibnS - JbPbSAo8YTH93Q/061tpQb/pN0R/bQvOxXOkwfuhnmjU2xt72ByNFzzyScf2fg4yHn1vMUiMpkT6 - umyjacOlHCz5gl7mwcomeZGDz+9qQ43U+GZ0uOcuLIlUU+OCT4Ah/yVDIsUPpNanRz/9xvfTw4gf - i77Q/EED6I1mHI3Jm0+fdepD4doQ6pdGFPF6d9fgCDYH9HbSVzacgxODdoJcbCz1fp5ULvzRL54+ - 1pwk4ioE76aCZNBvh4Rf1p8JSnztkjWHL8AHyfvCm7G3cSyHm2jYp+sP0OOKU5s7DZ/V24TgsDYq - aoFH3c8gfE/aBh1ntFYgrbmxvaYa5GVH7at5rplvhEyrOQoIH8Q6+QZTmoO3X3nk5bkfQA5bXwB3 - 7fPG+NYF9rI+HGAPaIee4ZGBKZmvE1wV9EZxqkbgK6Rr5RdfBMCTkcz8HBF4wK1BzcbV+VilDwtI - NjfIR0+8erqfoxYu9YbauTX2LBHXIfSK2ibd61Lbk/OgLlj0JoEbaRfxNHsWcFOnKcaF8simcDhq - sJMo+tXDbPr2SQ6xnshEpB2rOX0qMVjWN9J7JPX8azqpWpe+QnQj1DJ2/5oTOClaR4Tm2NXTqfuI - kM5HE9+j/px0llqX8PvmHVovemKMivsRqGFsICDZWrbo+wPs8ICwbe6s/k89q0azxDjxYzArqPah - ebICJHdOlix6/agdVl8XzUNqg3noPsLPzyCQP1nE6Hp24ODDmaIjtzh9Gf0H5ptcwlFsRBlLxJWv - RYa0xoZe3aOpq2++WjzIdtGjj4hP5hhCq98gwj2aZbPC+gk24TnE9p19OZOO+xjSZLejbvDMow8f - nBf4hpOII5fue/Z734X9YeitQNyPCXcKGJzGCIGTcQLzlTcIRoN6oObe0Tj3G8GFB1Q7aLeMZ8au - FkIfNQH6rpCUcXv7nPTseOLY9a0taKY9fYGKuDkC8FRmY+w+J7hLbldq4EedTUo85TokDwsjd097 - eumzAST+Z4+UG52TKauMHWzMo0H35OMm4xbVDvgqcrOVjefcz2rRMBjwCdJrI++T6ahmLrDlQCSa - jk0wJV2aw0fNBBoes8CeuZC68HU7ONh1mFHzTUBS0OTHB/aNGHIaFhsf8KpRaDijMhvSzfTS6bTf - UP9tmpyvbx8DEAk9yHyXp55tam7Au1fvEEcTS6b9TjjD+GI9qNN2fsSuz8nRN+g8I7463uvpeLoV - QHaVC93o5a1na/9YQsaeDJsWl/s3Ci8VvHhWSSZ7VOx+/9UEeFppBY3Dk5S0Htc/aviUR2r2b5pN - SNGOoLt0dxyX78yenaCNgQX98JffkunHCzznoVNUJKyeza7UtMcw+9heNUPPr16fw2ZOXaIv8U8u - XWdAL64INcSK9UzfyS0c7/bzTz0m5uX+0mZ0A6i/7WlCHtZ2AtJpPiGBbvys3xWjBtVDhLG9md58 - mnMTatXKlMinMaSeFcx2QP/UZEJ0eQ84U6EP/MNbpOhQl9HUZ08Ed95Rx57thf0QllgGxzPzicJm - MRt2W4PBi+BkSFlXfs1wPLVQGD4rHK81Kxni9KzBxf8TeZRwvehXBnPxsaLh5jr19GzjM7zE+pWa - s7fKyDKfEPf7J5Kf9SMa4RnKv+uJKNxgNF9WTw0aF6lH7AoUm0SvbILqhA1sb41TxPo++8DVeYqp - 4zTrZEx4XMB9dB2p+xRJ1JK6IvClbxmOj+tPTe7fgMH+oZTUlFKSMHfyHXhbnwUCLDwks7ntWnBp - 3D01u0sMRty+LfA5vS3EdmCsyR2FueavSLT4ZbNfb4tTqSz+n/BlPJNuxiFw4sMZL7/nnLvmDm6q - S07DMv/W38rtFK12Njn1l/mbtS8ToXSW9xQNUsM5vk1H+BhFB7sBiyLexMoAbXdDKLKf735Ui3GC - J29+UzvcbaPueewKIPrkRn56Vlr0DpiKs0TjXjWANM3sA4a1DhADj7qecy61MLB6beEjx2jC6XsH - axBccLw9xxlXilOrlSfpir7nW15Pd82JoVc+r+Tb623PfCBDmNnpDf/qN4uMxNcXHoff+e2dTZf9 - xYGkjkO8q59twsykNAA9bjscdlBLPsDXHRCHuo23cR6ChfeIsI0uHZHXbmATBw8f2GT7msz7Uwc4 - GQvyq7803m70jJUED+DImY5aa48ANU4HAsWGYmztwNh3jaIVULwRirp0c6jZcZMaEM7Vkdr6KYwm - KS4d6MS7M3Zq1YikHRMduMU2R9LCP+bgezD0JV9j98SHmptqcoZj4d5pqKEvGO8v+wxRHu9x5Dxo - NBiX+w0+bTUk6vMcR/PlcZbhwg+QdB6JPUssS+Fl7bbYcZpLMtrmoMDZuO7JkHl9xHkj+OCcqwm1 - nrmYzHcjJNA/NCK2PzaNhnX+HWBlWAO1PvrU89zjMvSe7Yl6+LNP6A0PTIuhs6FBcAxsktyjFsYb - I/yTTxZe5MPBFXIkL36f5aZ+g92m93GoFGM91NV808V56qnn+mYvi74TgqXeoVlKUdLvLNJqi58l - 8iS3PXN7eAa17dtE8YqqnuSpJHByErrEv2RPz/ByAAtPoY6srsCv/ursVOlIyrZJwg8r8wzXiX6g - W3El9mNIvNcfPxmXqQOY4R0E+IT6jUYsYcn4Os4yTD7DhXo02tUT9fgLjJ2j08BNmE3zGoSgsN4y - uRwCO2KfQkDAzfPPH741yNOHQBhcc2qbu6pnH8/6QBREBnX7jVfzj68fwWPPOmx4wttmb00qQdpu - JqTuToY9ty+Yq7/nQeLZsmdvXMd/9FXMAxJ9tvGUwkW/YXup/7wZmtcfvW+HuzEaoaYX8J63Mo21 - b5gN22E8gISSGGlVVtgT2PoyPF05pBs/DyN5WH8GbeGhhL3iPOPkVPvQc+46DsNrFhHkv0Tt6UCX - Rmi3BRreteGPP5D++cx7OpmjD043UfnxkHrRWwyoWXDF/mxe7FkVJhGqBH2p2zZWxq731xmA6+VM - +rES7GHrD1+I+GFLury58ElVWAqtzlWxu3aDaDbHIv/5LTK/1l7Np82kQaXfJKgZHqgWHyzbgS8V - bzhuUpz9qa+Xjf7ClkrbhCpVdwQLf0K7lxnxacRDBaydQynKDqCmKHyU8NQqR3zZ+TIf7lX6BaA6 - PWngizX/6QVt8bf0x48Yf6wJpORAcbjkJ35XzBKe7xAT5nR3ezqZxxwyUnt0+xGSaPIHp/zxj5+f - W/SF0sIlnvCSz+uprQMGVRWH9Ocvh9/vFz9L/RvdJ5zMVaE9PFpR3xpRMqVNU/z0wR9/wGpUI31o - jC3eBhnn/WjmDCRN59KIvcxkXYC1CxZeiLGz/gDS4Qj+0Z8/f0p6L7N+/gE7tDvUa98N0B9+Dhbe - xkDOd1A89ALiJX9ytlO0En7JXOBtlJd2N+4vMTCJlZB5FYwRi+NPDMxcv+CtYkQ1P5xFCHVQ69vx - NL4S+tNTthyJ1P2Kaf/jyfCn3xY9m3D9rBtwupwMokisz+YqfRi/90t/8SR90jWDvjyYSDeyLvlT - r4r4A6n7kppIjv28gIdr/cLewj/ENXy02lKfsJvWYj9oOXCgncTuwuPzhFmgQMD7tg8iLXxv8dcu - LFvlhDPhyDkFlUrAQWoMujxvRBf++PMf1IIPlEzXraqp37u8oebGvmbz5x6VkGltTa2Fh/PTaj+o - 96Yxqa0dE5srQvmFQ73aYzRLu2ziU3oGD0V+IVYMcbYm87eAe3Dn2L4emp49rC0DxZnYePN+2Hye - 9rSFQ2Nt0Wu3fgO2jssDlGsQ/3hD9vuuW5J/onvu9/3LVQIEriJ+E1hdlGS6zloJM604YVc6R3zp - 5xxBEjRf6qfoASac0gP48U5r25cJ78T14cdDFz+2ylj/xppWfpQvDqbUqb/xcHfAwm+wC9trNDIW - Gb/x0+hUAnvhJ62+PlmI+nS6J8y13wJw3w9/WU+fbOFv3x+PoeEDbuzRDE4iXHg7kh7CuydAYS9I - k8NuWQ9zQs/1IdW51AVIGKQNWCdeSKAl+A8kXt60p99ma4Fh0DOMn+o5m41NedDDmLs0ODRVL10z - 9QPryNtj91TpPSu/t1Bb+AdS3uYTfBR/IKC57xX6p58npS6DH/s1Yke45dHkH1MBat9zTO3t/mVT - cVzl/+YxLHSSWXT3OVQ27hHjR0CyGQnKBD3ptSKc8DNguRv4v/pD74+itnkpuiGg2jkik4s6zrw8 - dOESr0Ruj0kyrqsuhPtTLv7pB5Kmml+6nQVHUqZGmLBCc25QTeKIFqXoJLMUYgu8ttHwh5+w0+n2 - hegYlNRRe6MWB1C46o9HO216jYYHun1hqgYmTRGhoGt9PYSIBivy61dM5uM8wG2/wRQVL7MWnZrJ - 8Ncf88D9aX8vXWeBxQ9iV7lUYHqwZKeFD/lC2uFsZrKQrjSwnXyZWqfhyakilB/dnUuMLVM9RcVT - y5m29IOwZYVO1O3rQIQXZJ0W3uJxVrDIhSImGNFHQBJ+7QcHaqdD+6ffwQ+r4PyLH2z4wszJUyjL - f/OM9vvtyVLf/qyXepl/vvR7VPw9+0t96expBrIIowEckHCsOKD4PLSwgKNFJlq9wbQ26pu+5Ed6 - iEJSTyLEMSxnRSL6yqyW/m54hMYA9ji0FAHMVXox4HMwTPrjYwyktIK2F36xueRHkp5qC/ye/7D7 - Cv34y29SwR+E9iHlJPBjFz6E+Y6Reh5qIh33CAgzacn+1So90beBBR8uC/CmFA7ZzJB1g0aqvPHx - 4Udg0UM7ffk/BLfnIWPyK5PVxU9RLAxjPYcsOf74GrYlcQRsp7Dql//+9GP+9GcOq49LvaxjYBbW - uwN8btQH3ZzzlT2N+FXBCGt3vOgBzk9gA2FTXkIceh23J3XXWnCqLiWS3PidMHP9+cIfD9BPo5OJ - q9OtBE14DMlkEqeX8aQiKNy+XyS8k3vU78O3BnfGxadGVjqAaP6ggIU/oq44SfaMzsMNPgfLpIaL - AsD9QGDg11/96ddfvw/+/dsV8J//+Ouv//PbYdB+7kWzbAwYi3n8539tFfjn7X77pyjK/6Tyn50I - ZLiVxd//+vcmhL+7/tN24/8dP6/iPfz9r7+UP7sN/h4/4635b4f/sdzrP//x/wAAAP//AwC2t0TP - 4SAAAA== - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 7c0b9867da38f9cc-SJC - Connection: - - keep-alive - Content-Encoding: - - gzip - Content-Type: - - application/json - Date: - - Mon, 01 May 2023 22:52:31 GMT - Server: - - cloudflare - access-control-allow-origin: - - '*' - alt-svc: - - h3=":443"; ma=86400, h3-29=":443"; ma=86400 - openai-organization: - - user-adtx4fhfg1qsiyzdoaxciooj - openai-processing-ms: - - '20' - openai-version: - - '2020-10-01' - strict-transport-security: - - max-age=15724800; includeSubDomains - x-ratelimit-limit-requests: - - '3000' - x-ratelimit-remaining-requests: - - '2999' - x-ratelimit-reset-requests: - - 20ms - x-request-id: - - 25535161a3c6c4c4d2d95830fc85649a - status: - code: 200 - message: OK -- request: - body: '{"input": [[18031, 1495, 220, 16]], "model": "text-embedding-ada-002", - "encoding_format": "base64"}' - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - Connection: - - keep-alive - Content-Length: - - '99' - Content-Type: - - application/json - method: POST - uri: https://api.openai.com/v1/embeddings - response: - body: - string: !!binary | - H4sIAAAAAAAAA1R6SxOyOrfm/PsVu/aU/kpFJIs9AwG5miB4o6urSwQREJFLAuTU+e9d+p463T1x - ALGiycpzW/mPf/31199NUmb34e9//vr7VfTD3//j+yy9Dbe///nrf/7rr7/++us/fp//38isTrI0 - Ld75b/jvZfFOs+nvf/5a/veT/zvon7/+NuPiQ5nS1Anz5acM4uJ1ZNpa6bouyZMjLJdQMZs0H071 - gcrA3keVHUtt4GxxmpbKpd8Q5urcTppOaEXZRcct8U/hJ+LRfchQhzZv4iqmnoz4sN3CaV17ePPq - P15npqcc9Pf9yLCTWmi8r4mBLoLgMxWNqKR6tKTwcM83rLRb8OZ4jLcgX4URN/ux8KgSvxtUsFJl - ji1XiBfJs1dO/JwwcyO5+oS8tQGQGluyS6ibDOB1N3AIDfAQNarXy5f4iE5iTpgpd5bOk8GtoPH0 - HsOptJOxux9cqNitYI6nd2WR7zcusGJ7JGpq5px/KtyjfIMRUzeG7bHJkVto/O1AVxpqk1m0BRFN - SbH/vdeHi6MBXGZXI/jlC5wv2PWIOOlvTDvOQTJpbiSjOiUWBn67JNy/Xn3wyyUi2HaSjkVXVwX5 - yBFTYytLhsxY9LBYXp/EROPEh9rYuqjFqkWiNiiSWeR2C/JN7Yl/Pr8iflyNAbCk7Yg1r4/JpO0M - G8xTsGGOfa35tHx1Mypq4cX2/uB59LA73SC1IoMR65B3w1IPd0qRVwqx/fuKT055zWVf2nV487pf - +fRQ7Roqeq6JqepFN54Wnx088MckesuJ3t+PqyvE75tLZ3o8JPMhMlr4iF2GZWU36/w45bmSc/HI - dLy0+GQ3VoMEB9XM552YTNkhCiFU45KiTaXpq9qOJbg894jo7eGgz6zoCpgyqhHttEbelK2XPRQv - pWLO+/XoRr69jhBErUy2w1R11K1iEbQNbYlNLi3nPpF6KNr9kaloj/mYvhoK8Ts1iZvG124+NMxA - S6FGeNwYjc6mdyOB3jIdo4Sq5ci3wQzWLgqZKgPhc56QEbFzLBNVjktv1q39HQQtCEhc58vvfCOF - o9V/8GrLRG/EdZWBtspkorfhAk3Ds/VBavSQOc6ClJOmNT3o7+xIdPKu0LiT9mcYqLRlD+uglnxx - WI0gLsUdMbfUSqbMWFMQUfDBrWaXXcv2xxEuihQwa65PUecf7zbo/RIRuz28k2lJ9XAhn0WJqcl0 - jJpTezuDoF08st9Pli6K2b4CRweN2K3aJDRcbRuI5lRiu+tcd62elwKasl5j3/pG7RvWNdSZb7Mt - fY1Rfw9e7YabqoXFSyZ7rHhMdzADATOrZ/eo74bABr2bF7jE4Qf1lXWtwdkqKv2eRj7nPZ5huoQp - 3by8mI+ysLuDYFaIHA2ZdsxE3oyGz+fAtFVYc25eZwOma3JkhlUZ3YgRwihXypoZxUg5m86rCriG - MDG37Oy1b+loo/ipFsQx4lXZiuvgKhedfyJeLIlRf0rFUZaKpiPGM4m42BQTVRabycYoyYJo5NOB - /v4PcR8z6bgvhi749InpSp/LaKoX8xHBZT4Tc/Pu9LES4wJC9dwQuzvPel9ZCwEtlrcn80f/xbn0 - DnuwrBMmuNq9kikw1ZtS9ZgSo3rb+ojjZwHdOtdYGNmWN873cwPB4aUwL/G3nZhnkg2N2ZlULoYK - fUBUJZCjzsALYSGgQWu2LgzDAqhQzxKnsRbsAMKsJbozyeWc7ycXILUV4i7GRufF9ixDcmcYr4Ps - HU1ZPM8/fGV7Zn6i+Q2LCqzj7sFIwaSOfZS4gGY3AXMVKU9md3J24EK5ZNvDAXs0h3knT+mg4sXz - mnv9/Vk1Pzwi1iy+SiqkwRZEFH6IK9wT1M+fvQyOc26Yux2R1/zqoz6PFcPv6ovfnlEp/ttsmRXd - PL3vXv4ZhdbKomsehwmT4HMHTRFuzDEQ72YRoSUSsHOmrMjijn+UAW+++8H2YWR6IjvlNTQ7puFl - zVSPhviDoajhxbCzeKFRXtUAoowHLO1x5VF309TQaOcb28X6q+PJZ1dAEloiXqFHyHlSdTtUPJsN - nkRJRZ+l9RTBneaantTjs5ycMijAIujD9M+iTJh0kEK4CODjObotEY2VRJCDu3tjhhWZnRhebCo7 - 6rzHyzL0EV+wdY9++OBPgcbnHPtLcDe+S2JPtEs+mmK1CaP8Tqx+LyQ0dz5XFHf9iuz7riypgoYW - tNHBxLs5p6TvPmxEl0+bM3OzufJZSbsePbaSSHR3JZaTrYU5DP3KJMbr9vL4wjmHSBNzkcXVxtep - aIsimPfpitf0dI64eSyO8JkuM3OzRdjNwlHK5STOHsRVmlGflVrH8P0+Mcpli9o8vNfyZ/Uu6Glj - 7vUex88c9I4azL8kozeHF7VHx70VEPyObH391SOQC++CrjYXmXfSPi9Q8hAt4s9rt6wF2wkgCWBB - jAKPnPvHowvxKwjJtbHVkhejlAHs9xIhz2uuz65wodDo95Bg761EQ/2UrsCSpvutVzc4t0kF8+hu - KRLte0lF1Li/98xUX3k5yo9RBH+tMlz0fhxN2UnsYbrdDySklfadb8zgNOHyWz+G/hnmjw9SZ3vM - /uy9iPu6k/3wll0/+y5hIz7vIK4OB7oOG+AzO1sCTJHVEFzPV9RF4qWBKr8/sBifomjOHcVA2ux7 - zLxXbzSzQ5WDX6xlvDAdu0uHNMxA3NgTO4ufUzmzMwGkNy+PuYqc8p7vmkJ2hWpippycEfc/rQSO - fXl/+dLlfKEKM2LpOqPyFqceP8qqCrgPJOKPiwKNcpSGsHKDgKS+N3j88yhDkLPgTgda3fQ5ViIB - vvqRuIVX82bHXiKS6FXGY2xl0TQcZUCwJxJVosvTew16Hij44y2wEm5rfVhelwVoCtyY8+odfT7k - VqX85ntYIYt6y5Yo4qaj0GpHnG64dEmLHt7jwH71Ml26qIVsvX8y7Xsee2s9NXApA4lop1OqcylD - GIYqaJlh6auE6hdpRkt46My7lkn0Pd85uOtdSHaryuXf+rlDa2Qdc9y573pLrSSAm+BSCWuat/6U - fAuhvdHxeNcq3ortx4bUfXLm3WCORouFLqTODegkmjqfbHzK4LHPMUZXLY34cbiOkDyWFvEvt2fU - p8dnj4qWHOnzIhgRT+5e/dOfzBNbuexlPC3Rg7S7P/px/vIVGtomJLYvRGh2XX6EBVogpjdtk/Ai - cjGqgvLG7G6ToSlQxxbqu+cxvbHdZLIVl8KXf+hnV9yT8b6oa3C0jUGMZx+iL56KKCRhR6xg30R8 - Go8GxHXsMFdXu4QVl1gAwZ4ZXj77GbWh0rpwNKuJbJmdelOw5zUAOY3Ev7gHfdaHCIOZtE/K2zmP - ZjHdSCh+XfZUXjw+Ho21coYpkFdU1lqtY8U2k+WhOr2Y4wg5n4Y4u8n652Ox3fmWebzFQQWPrSxi - MTaW0TTQl/tbH5qjMSmnpR4aKN/4CE9rF5fjPRgaOUkpwYuyWHX9V8+idN/0xLDEORq0amfAdPaf - uIvX725ava4GXMpQYubm7ekzy9gd4vqI2fc8cB4NfQb6p/bp0vSGkiqL1Q0sO3EpHKebzhedXYPg - CSXTRrLpxmpOM5SNqw2d2XUfTTVqayQf7gfmCkrnzfn52sIHKS5RkZ973/WRwXqIlHm32zGZw1AC - tMK5yqzzKY/4Fw9BTrwSr0efdLxorrAR5x4T936CbsS24KPlYpiJTTYNnxkvCtTsjQ9JVscAfSCX - baS3rU1s0jh8GFqnRzGVPsyTJsNbT4p+Br6VCEZJ+uxoeLH7P/rKseprMv3wrd3VJd7U2NLHzrvZ - qGAv9PVbTVL9+Lwxcky0dbLv5t/5CPcuZ+5itD2qy5kN3So1CBGE0euKle0jTR5rYoVm4M1CrAZQ - ZxeBCk76RqMFzg2ksn8zxzJuyTQsrnfw11tGrOnZltyY1wDhdWkz/3LTIl5MlyX68gtumhuN6rzf - zUhvioC+pNUdfWBY7aAo5wlPK81Cs6h6LdTH4Y37+Yyi/ucXpvh2wtxWKPrjh8w7v9I5SLcJX3Rq - pfz8sB95C8SK3LTBfzKdblmLvO6zM0awrmfCrKhrvTnWAkMxb9WLOK9hjYbM5uMPr4n79UujZY89 - nKYoY9/zo3dmqi3htHyEXz6Cbqqf0g26Tb9kN2UX6lPNPQNJHzViu1tMkqkufEOuj23P9HaZ8unB - 4xDkxCm//m1bjph5gLhznIntK6beV9cyRNVV27D9F48nW9n2imWSMxWTtIumi2nK6Ov3qKwhtRsr - Lz9Dks0eRdeiL/lU9iIMdZoyN0ViR396MTV3B/xcJ/uS366kQLg1TKZx4ZbMYvt0Qb7bFdnj18w/ - l+VuifTX8Y7X3DK6b/3RP/ip3pdPNNS16oIZEWCm1t08LpmOij6Kvvvhi859/WTDgsc5IxB10aRt - NFvh6jpkHhrenEvn9xXgivZYweIBzYecVJBLhsFUGT31mdUoAItsPsw/Ld/JzBjN5K9/pfzDuN7f - bWwjcVHEVIzrORpq/9lA8WRLvFhMe33EgGXQm7f31VerpNV3mxmmSB2Iq7y1RHybDxfVp76hMJ/8 - jhfbTNqw0ghYXI8vb7qcNhiEvfogptbJ+lCncNw0ZuV++StH7Q9PvviDV9u9pLPPUt6Cu/b2+LWl - 74j+6hUX7E5X8s3Wx1k2JJQkXciscCdwKp5uN8iVZ822Ufvy5jgU7jA8DZGosl15Q5ZlGPT6dqUK - 1fbJKts/MEot32HYMxdRYxWtBJemrohhXJ6o5/mth6PFK6p88kyfLmqbweUzJGy3vofdeF9bOyQY - mYFlRVZ4g+NPAUNurhiuA5lPMG1scMjkUSYoWiTqVbNF4vg5sH1YfX9P3apScFOLn5/Ue9ktDBjy - rUynMZc6vnDcClhoERxszic0DcdZAMHsRIwkUutD0D2XcHkXDdutIo83cp/vkHlJe6YTs+bcX5hn - eSG1B+aJ1sC56We5nM+rCEs4zL3RAu0KwX1/+sNvLTtbAHVYLJmaXnA3u82zBr3EFV3W2dObanVn - IMfCI7M/49Wb2QlCJD0rRrYDqcvZXV1FENfbDSOCEHh8VC4qsDo0CDG7uuMmtZeoTl2fOdWO6WPq - uz7EpXv74mPjzQzSHvyXwIlRPAjinz7MlWFQgDQf5iTc/GwM9OUbslu94nJaXvMlXMoLx3BRsE71 - j35HjRafiJpeaMeKywGAb2VCBe+Yo/qw3TfQLRObDl89N7vKeFa6zcSYtipNtMQGvcvizB3K1Mst - 6Xe3yw6q++FDVNmZOTNF2P74A8fe61NOy7tU/fI45hbeDo3dVbv+yau0i2AkortbBOsvfuKFIX3K - H38r4lidibo5WfqErvUZjo74pCv0mBErdqEAHSjlHz081CrewXLBZppGu8LrO+/mAt/ZAtE/fh19 - /awE7fbMCbFKnFB9NfdKMZz3+Hnxz96k7S5LOI32m2lHvtAnyGcX8rUjkN95o2ES3OHhBBlLadkl - s1hoLcix1jFTPxy9wTl/QlnqLwpd05MY9fj0yoBVrs+sAK91JmFLQl8/h8XlNPAJdE+GKoufRG+f - WjIEN6GAfCX4THcIdHxUHqqsTTbB0rd+ukU9ir88gQqO3yDu66krL3iS0wAxn8/NW8sgicsbvkTK - mvP2PBqyHFxPX30Rd3zC26uymPyBmAhHnEvqJkSP/duh4s3Syok46xyS2+v81Sv7jpuv1Rb0blzQ - 7nZbRuyjKVuEh5gx/BqwPtWUnwF/LIP5p+qiDwM93JHU7XdY8IygG090NUMSSAaxmyT1xnTuvvnk - tmc7cWcj/vMvjEkJOyzHhlMlWN7l1szfxPb7nT7+8peYwYuYGU0jtuCNIbc2TfDGfQ5lvzusK0ju - TcT8izvp7CjbW/TNa+gru7NuGixJRa0veMyKHN+bLoaMQc7CO9vFYlEO2anbgrMj7JsvbX75ZgGO - urb/jBf1ive//ITOdJV3PLmgJZKeNSPGNx/48cWmqihh/sRxt4F7e0PDsNHwdNrmHdXzEqDOsM3M - dPfi07CoXCQY+YJsWZvocy61ZyRsE4uZmbDvZn0qBDiBMNPkehjKnguqoCzE1xHTmvQ6Ty6NDR1C - b2Ju0yaZwya5gv7c9RR996e39ncXnTazgKXufdDFePOQZZGPLtstn0rHzIAGv/XBEj4q5Wug8R2G - 5nWiDCo34v6RiWgKpBXTjqT85gdCBc6+upLIdg98VuJIAKnRQrbvHbOctJBfUdG6MXO+53tMXzmF - bvVcEH9aKsmIMwOgqtGWKrTsojHVix5S5wq4XeRrNNTrcInYrWZEO72GhE0SqiHIgpAY+U7iQx1f - ZXD8/M5cPYy98bffjmGumTatonL+5rmoPrI32++DNR9q/9Oi4LQaMJy2sTcNiyBTjsZr/OVv3rCc - 614W1ANhxlOZ+DRYowoPy62/+PLWabzxfWD3qaCyZusd+1S7HjV7p6J5esGlqK9kCvl6azOiXM/e - mCLPRt/1JbrXRH/8FwSnecbfPAK9Lk7aoHZXlWy30pNuWraWjz4LVyDemooljc9y/sMH+pKijM86 - iUP0WZANwy8v8L79hCXIZe3gEbGeM+mBpT9+qoWt+NW/doDM+FXhMW325RK34Etc22B6NNMkGqtZ - G8FKcp1Zh6JMVjUqakjVzZ14sbmJRvlyOKJU14/E9oclGl5ICKDRkhMum26tD/YZBFS9QWD2fhrR - aDn+7ZcvEuwZY0e/+TZydEFjpj7rSf37P/izdZh1KEN9WOrDiHA1xkxvkJWMeN3doXjJH+bPxlBO - mtJXKE5RQyd+h4jqpBdkM10fib1/6N6PL5D8OF8xzMa+W5tUFSG4SC3NbJA6tnC29Z985scPc+xm - AnzzXCwvcOuNaRCDbEbGjtyE3vcmzU1kYHHB2TZ4LPjnh1/C3rzRBWz9aKyCxx1+ea13PezLOW5M - H5IH2jIi3GdvGmydwkli2z/1w4tKw8CYnBAC2z6Z6rar5aHEFIvrvuBULLQGzPN5Rb55hcd/+/fr - T7h3HSMq6FOh1OGLMVc9r6I+FfkVllxqWOath26Ol5ABy4UrM+9bB/W4IxLgonli3oYPzvwjWyLz - iJWff+d9mmq+chr3BV5889hpeYUCkmy1I/Zn30UDyNVRyvmGEFXiH692ib5TOOEn5t3iVyfqpAf4 - 5d3qvSzQF5+O6Mv3xJS0yXtdiqUL4WX2f3lqOSyvjYi++gA3jYK8lqmxiE5SazHtOD3LIXi/RoBj - l/7R780JvVTQ6zQiqhRuEHUV6Qj5xlXpOjwzj+qkAri83yXTRLPkc6M2qtytuzUVvnzDpTO7gT81 - NsPuXHDmz+cd+GPlU+rpT8QWEAVKt7o5dJQKV+dRrmD07Rcx4wW6zj8l2qJsc4kpXIaUT867Cf/k - Yf7Xz021/2lkRx333/oOuvaX/6ZbiNnRQGnSjTjbQeOrA62+/DjreRDAdDQ+jCywGolhpbcQXkYf - D8Ne69hxNYagjbsIr9Jr1PHjSgoguNs3jMRW7vj0cG4/vfjD53IY7McV4ue2IPs9cTzacKlHSaii - 7/55SS/vbkeIO7oiKsJpyRdvo4DHzmiJxdEYcfOlqBBcFi/m1GTFe5mYBRQdPpFfPv315xRVzzwh - JF+PnBWXVwOCI3aUN6399cvB/ecfifnNU2elpc2PH/H0zccG+BxakLPThRGl2SWzfhlnpXGkFUv3 - 01sfZ+/RIDlIAyp/83++cLIAQt+byZ5d1t4vfwdrP03MMHZbPmTFIADfLlyyjZwVGpaWUmxaHzxy - 4ShIeOL51fq0KjFthXvCp0tpiZBi+u2fnMqIRVsewLcfTB+WNib9LPsydKN3xcu8OaM5D+8Vqh/H - BTl885XhmwcoR3udY3m7p8msxKxFzd6rqODyqZxscwXQLbY5Xk/zp5sCpxTh+/6Pv2kq+Rmib/+T - bXt/k8xMMoVfv5D96bf+8tpv/kqMcnsv52+/GnD+qsg26jgfU6S7v7wQr7n8LLnvNQJ895fOga8n - 4q+efDgmTP/4u2QthZ4AD02U8Rw8HnwIVKkF6yC8mauedt3MHHOLqt6n/6Vflq2FIYi1J9O//WD+ - 6WuMgsTI2DagWjJibm/h79+tgP/8119//a/fDYO6SbPX92LAkE3Dv//7qsC/b+nt38ul+G8m/rmJ - QPtbnv39z39dQvj70zX1Z/jfQ1Nl7/7vf/6S/tw2+Htohtvr/3n8r+9c//mv/wMAAP//AwCkxTP2 - 4SAAAA== - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 7c0bc85bcee4253c-SJC - Connection: - - keep-alive - Content-Encoding: - - gzip - Content-Type: - - application/json - Date: - - Mon, 01 May 2023 23:25:16 GMT - Server: - - cloudflare - access-control-allow-origin: - - '*' - alt-svc: - - h3=":443"; ma=86400, h3-29=":443"; ma=86400 - openai-organization: - - user-adtx4fhfg1qsiyzdoaxciooj - openai-processing-ms: - - '1067' - openai-version: - - '2020-10-01' - strict-transport-security: - - max-age=15724800; includeSubDomains - x-ratelimit-limit-requests: - - '3000' - x-ratelimit-remaining-requests: - - '2999' - x-ratelimit-reset-requests: - - 20ms - x-request-id: - - 482e97acc620bbf5e2f7d3dd44145666 - status: - code: 200 - message: OK -version: 1 diff --git a/tests/integration/cassettes/test_memory_management/test_save_memory_trimmed_from_context_window.yaml b/tests/integration/cassettes/test_memory_management/test_save_memory_trimmed_from_context_window.yaml deleted file mode 100644 index 970b806fe3..0000000000 --- a/tests/integration/cassettes/test_memory_management/test_save_memory_trimmed_from_context_window.yaml +++ /dev/null @@ -1,843 +0,0 @@ -interactions: -- request: - body: '{"input": ["Assistant Reply: { \"thoughts\": { \"text\": Result: - None Human Feedback: Command Result: Important Information. "], "model": "text-embedding-ada-002", - "encoding_format": "base64"}' - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - Connection: - - keep-alive - Content-Length: - - '207' - Content-Type: - - application/json - method: POST - uri: https://api.openai.com/v1/embeddings - response: - body: - string: !!binary | - H4sIAAAAAAAAA1SaW8+6Srfl799PsbJu7TciIjVZd5xEjlUIHrDT6QAqCiJyqAJqZ3/3jv53dnff - eIEkj0/VrDnGb8z6j3/99dffTVbe8uHvf/76+/Xsh7//x/fZNR3Sv//563/+66+//vrrP36f/9+b - tzq7Xa/Pd/F7/ffl8329TX//85fw30/+70v//PX3XQk9tt8IacmL0+aAZtXYYkmRk3i0zh8b5pFi - rLzv0M2o1HUIhpPLtvVdK+e2udog7hYz05U79abP/jiDvFzb9HMyBc7pcmfBlXkJ0Vv87niwkiU4 - sljB4kI/loM4RSoURpsz1eO6N+lmkEMxVCa7P9Ys7oPZj5AyWQMhDrfQQB6Ers7xVmJeLXv8Izjl - E26iPzMzyWyP98HTVF5ayfH4Kp98xAsYAeWwIWatnrJ5EK8YqgWVCZG1jo/vYRChR2nAgtXpbUx7 - bwyVLb+PZJsJy4zp86VBhbH3yfaYXbJpV8Qq7Be7kezyzs9mGaQczmTDSGAeC29azBYFv241gsW8 - j7m4fkYwlVHFvLVUGO0qnhMIzdbCRbQ5Zf0wfQC87Z7RJc+vXT/OU6Gc7oqFY/DWZUM9TwVnf13h - 9TVJ47Gzb7dNhIWQaRtB7vpYtBOIM6nFoF1xN6V+BYi8n0eiHac3mjr5ESmhZlH83jQu7ze8xqhC - 3pO4Ual6DKQ+gfV+ZeJNuhb5TMkiRNdOCZnxzE5ojjenBgYtw8x5OrHXH/RxAXFWplh5+G45Tout - DmtPNIidLFjZbxG2ACo9xhJ6196YNCsdxN3ryez+2Br8GIcmNEWUMHN3XnZzliQiKMa7x+Mabw2q - nHwTmLdoWEDNlk8qoQmUu3NDRzWLvPnOnRQyCgHx8qPCRznYhZKUSFdiBo7H5937LYEh7q7EFgXT - m7faKYFMck50SXWtm6ISQpBjsyFX97nwhnIvNbB5zoxCkEt8ekmujVbCJf1Tn/N9OUkomSKd7DhE - 3TiMJsjyqVWJk5fE43Oo+cru3chk+47Kcl7blxzMMDkx6+IXfDxgM4d4teVE448mG2+F3YC07Y9k - L6t6KbJ7M26uy6Ihunpp0LRUD5HC9ssPcb19iWb/eirQK2lKLN7mCs3f9UG4EHzmvjSz46K4jSBR - wWFXE2TjI702Jlpp0YdsTweW9fXxISpSb77ILSjKbl5bkIOi2SZ9HR9DPNloryo+7CeiOfWqm9hT - T5fFSajYSdPNbLx91q68iS53Ym4Vo5wUT8GgHE4+2e5EF/F3ieFXfwxb+jL+7Qe6zscHIw/3kQ1a - 76RwiCqBLqJg330eGdTwnE4Ncbj8yfhhq1eg6lbCzFoV4748pBGIWeGySHnpxqxrnYDSlvXMX+/X - JTdHhuFRPijTrkZlcMEsIphk3SQnxIxsbj+yC3f/reOxdgHRxVOo5cSYNPZnv7uNGcLBW8dMy9e5 - N3vSB2B/e56Zvz57HQOpSsGqng7D7Zkan6t/fsLFFVOmfXQcN3n3qSB/7Xd4JV9Pxpp/nlg2NvGL - Hr/rzbREakGzsz2xz4mXTdXVdFHa3XxmJxIqJ8uS1d//R9Q2HPm8302t4vs0Ia4nXflILrMIV25p - 9IMXRTcDk5+yD/FE9CllnO2J9YTrEUKSCS8az7c6keF4yrdUejkaEo+HvAdotZjpneZ006JGGLgh - nyjSu8n7ra9c53ZF8OWy4TQrqxMsD/Ibr/Sw5vNLFUz0eFgW0/TikfXO843hDZnF9LW141O38UN0 - /5xLojXHIm6bVWyhzcZeUnGhr0oWZWsVjv11y/RLGGd8UUQLNG5FGUtdzhG7Hh8jpNNOYf5BexvM - LBf9r17oWE5iyTxRPUCqOQcq+Zt9NkjnsoaG70JiNDtm0FUsJyATZ8lU8ci8T3V72Irz0gKmR6bn - cZfjA6hUkJielK+sv8ahiyS9CEh+0N7eVMeqD/tHX7DcOlUdl+6hAOMMHY4bQfG49nrY6Lt/zJEv - RjdLjzmHcFhhRoRsx+dV0UpIafucuf7a9sa8e1SKSw5buhzJEk2fGw4Ryt8fhi+vB5/PSoGVJfYs - gi39Hs9jK0RwiMAk3lNbIRoMmg6Nzs90dcJZPMessZFjRz1z70SKB+IWVKFZw4iDpb6bhW0v/eqJ - WEXieKJFShmtvZwTo9KQ90n9agEf11fxymQPj3/rE+Y4CIiWu9jg76oE5RwLCYkbeW8MO1oUgIoi - Yno0XYzxLvkNIor8YtqjvXnczy8SqOYmJLt6cYtnXSsFVO0hJffTKzb6d6EcYP7AQJKXyNAc9qiW - o6XrMH0cNETvy0lW1KwdiX6uMm80S9cE+gpstm9In7HjHlmoGxYlM0XP7phZihRexzwjfj9fu0nd - GylsjXKHEd2cDL56r0fYHQSdBfml6egt8dK1OhsaUWsX+DQw+4Q+19Qm20Z681G+LEcYL4ZOPCyW - WfMuVgd4yM2NELcQ+Bi84hk2UXYnQVZEcXPWuyccfFxi/j74nAYt0cFf3jdEy5XR+9hWVSnX/lDQ - 8eZF3exfb09UvZcpPurKAY3zi9eQdiaw7WZsjAElyEWiFHWMUP3RDd0WBKjsRCMJe+4QOxhDtVnc - /CuJFlFofKrm3MBT/dyJ/YESTRv2roGvtR0VNPfR/c4vcqqPwsxyMI2RVb0O7OhMxHscdrwXgtYG - KXEUYhXR0eAD5xUapvMaC9JNRr1qQQHn+hOS3ddPTL/+Vu6oRDfx9CxnJwpvymPnXolDD6+YYqs1 - Qbs/Nbbb4iufPtE8y/aj9DGKP694jEU7Re05Sonz1j9GR66fG+TocyYB5FlW3Qq1hVLSJ+LHes6n - 73psvv2L3aqpNXrFME5wCVcZXeqZWDK6Supff6BFUSWch40AcKlm4+c3u24SCxs2SFkztzz2MYtF - RYf5NL4YEY4xYpf12lbOBDHiPMnbG8JK69E5bEP261fUuZYLxfG2FTPXXVKOWygtxdt4Kl1994t2 - 4rP5+QWm3ry5nEmc1vCpX3sWOKgy5tSmB4Ty14cF9YzLyYq2TzSr6ZFoprov5zVqZAT+5sn8hB4N - plgbADXbF3SeihLxxlUK2Y/YheyEVYNGYZlX0O0Cg93UxojXsN4lkOJTxrbN9oU+/f0oopWQpVhW - toPRf1xbgvb0kJnaORYa8tV4g5W5fTDbX25jQVjmNYgVi+n01Td6S4wUZK7tWWx2Yta35UMC7Fod - hfNqgUYnGSmUiWVTJeVmRpdwrJG7jCjD1dR6f/z+7dmlTD9XyJvWfmJBVy4r5ji+0HH/cpegvG1t - Kn79AttvQ105Facz0e2EG93HjwrYqSjF3SoZMz7nGyp7eXonznITd7PixLViUxrj6WpUHrXyQwKr - KYgYeShl3F/33JW/vECnr/7+9gvEvYfw4ls/LBCuEnLssCdGExjGdPaOKZpCKyeutLwYbMs/GO5k - cvFkyEo23iWzhQoVM1ZWp50xW/khRetjmtMuPPFyymcxR6HZWMyhaBnTtb2/wcJstvTIvQ0a64gJ - sI78AyPLweCrRer2P/9F58vpnU3v/oOBtPTKXE9S+Pxc9jny7PWO8jeWeOuufYAyfYzMaZ6bbGRV - pSLJXK6ofHu2cS82hb0Z2fhkd1nrEL/aUwThbkd/fsUYV2nayqupen77idYNxG0oJIIls2C5enhD - t7uIqH56L+afVzc+mEMIimEVJsmrzS6eh0/Vg0Q3129/WHlcjiJJxuVewMq7M7u16lg2vGE8kUB2 - HGO6C1iFHukK2ebqu+sd3/YhXokx0Q/na8ytyzzLS1yccBd/XlmfRmoF3DousBy0rdHrFjPRx8Uq - +fHaMAzd6Y9+kbR7G2yslfaP3uKiO3lN+zRlEG99RIVaFTMmm0ECWE4qdupuijHkSwrwWqYGTSsU - dnytK4L89StYLvS0m8iD9OiwOR2I+6JzOQme+lQe4bGm8vbloTHdJia869T6Lz1TXKmCquIR3iw3 - cTm1gzQij5EzHheHTTYuxeCEzmETMv27v199bNHjWgyUZ4R2zbc+IbxeF3h6jLNBrb0qwbc/EL/z - N9noznuq+J/bhu7yV5LNi8e9ApX5NjlmmhJPZ//rZxQ9JNbL84zZMGwBDQsJM+9IPuU0yjgBZ7fJ - 8VIWAj4UhlRDnPRP/JEvZTnexXeL8LUxWJaiRzmFEPTy5k5O5NufumkzPyo4x3f7x3OcBuLyCev9 - 2mT+LQ2MSeudBD0HTljQ3Cy+0u6JhNwjPRHiqBUffnyNCkcnPz/cB7U0A7bKGCvLz8WYbdMF0Hvi - Md9nLZ8OXiugL6/h2esqNF24J0H3cHssn0+6sVpVj6eSDOcL0wZUZVTFnxmux7dKjEcjoBHl0whL - 7FhEn3cKmtnNm5H66CZi7s73kkfN+wm//rTLtrY3qflNQubndSPOpta68dcf0WWeKLBEy6ZF6lKY - nuiAl6G1MXrUBCESKleit1jlJY3iwIZWlV367hQ75mZqY7gm3P7jrwTHt/GfftHJ20U2xa99KL86 - sWM757BHk3/oW7SpDzuSFO4aNdfw5qJqcW6JecGtMe/ajwTgDCldCYplTI/rpYb5lB2J70HT9S+w - Z8hvskiMcDx0k+9cXFiidocXEruVX162QEObiKVBdfovXm0PVkTsgxd0Y+egGxQnsWJ29HIy/nj6 - ItyXAyfq64r51Jln8c96BlltxeNL7kZ0ZHsFiy9p4w3Y8egfvkZGsvRaqbjegK7qhO3cHee9n3Qh - LLtwTTCCPBvv/eQqoFQdFW67wpu+5xlwjkq2C2tsTJ15F0Am0p3pU1Hy5hqHtvLjqa+/K8fstrHg - y6+YNzvi8bC9gDS0Sv3l9cqbBJAXYF3HhumLaPSolkUR6FHL6QPfu24owocJK5pcqCiKQ8aVbQ7o - 2p8K4ntOXQ4HdysCf+g7hiW76vicHXo5j3JCiCwEiF+o3cLxEiokcHRsTHhHeziJ9M2cpbUvx9d2 - fUJmmJ4YOfZXg9/lW4ic/X3FjOOnLefj4UDRr7/RH08HXaXCXrqpzJrcY8zLRJWU2/26Z66yKL2f - Psrbs3n/w4PC7Wq1m+2wu9P38qBz3gBbwHqxEOgo0VXc9KcOYBlwkcpfvpix9bSg3B0b8tX3jKMr - TUEZpIrF3/PySS6tBJ/NdGE7muvdKrtcc5Bm0SJWeY+QsEqUBkIhuBOKALL2x4dfvSee/lI82gRn - Wz7Uro8Xl9cDMfUcN2irHpaM6I+2Y8kjseH7e9nXX6O+6k8hfPmESsmCdbOVrlMYut5i+LxxOkFq - UooO96Sjs53EHvd8lqKvPpIjWR09LrbTDHM/vbBcHv2sPR43PdK7NWLagle/PCOEQ237NFtOYsYI - iRcQprGIPyfzwL95kgrokj2wcDgK3tOPG1X+5hEYoiNGfXmIQpjuZ4QfsvrsGPWWLVwvdsIMudtl - nXG2MFzcu4tlsbe4wN9hpXS7OsbfftB1YjuN8M1bmC+5AZpT1ziAcpqEP35zbsTRgtHQdaKZnHcf - KscCNBd5R7/+zBtBUGbEBUyo3D4O5eAhr5apNBf4/uXDyjr5Ljw1lbHwVepcyG4bE4y8NAm+JQYX - O7Ft5T7Y+Gwn6DXvjdOlRtuzdWfax9Yz0Ql5/uM33LNlFo/e0vXRfJpfTBflJepfLIxgp3xkhl+V - b8xDp/WQN8L9myacDK4vXRe+eS8J3q+w+/JKpQjvo8rMeSuUvE+jGibf41S2ogsaut1egK3gh3SM - lm32cg6yBd98FJe7RWN8fnmZ9jzJFK2lwuvly3pGfh1NLHjf824mcVTDj7/T3RahYdfRAl2qhBNr - Cj8ZX+V9BNJ8E5mavGhHl5Nuoud7JMz4vJfGZFmz/uN9XH7eS29+rYoC6tdxoD89WX/zN8DiymdY - PEk/v2jD+1Nhsv/YejxbpJPRKZ8XmOfI+uPf0Fe/yI83RUBYRcdPE7DcA7ucpItao0EolxRu6WCM - Yd0dYNuGb/whHe3o2U1mKPjiSAWpwkhYpWmDog40Yi1sp2PBSpZBJvKdKo+l36F31QGMF00nVn/d - Zf18piYkQu4yS4UxY57/TpEyyNU3v3nw1omSm7xrdJc57rbtJsGzn0jdhgUjsjAgqmpTBNg1OypL - UWeMn2P+zTefNVHPUpFNzkE2YSG/KCG8eZXsl6/tUSjRaRUUf/In5DbyjRlNUHrzOiILFN+GJ/GD - Z8/n5KyFymUNHtvSW8fnfl/0yNjsX0w/KZqx+uotfOrFg/iKUfIxyf0n0pdZ/Du/5eA/ri5aHUSN - qBI9ZnMwOPqPf5gu9b3H/RQk+R4pMdOtrcon7Hg92tyDE14+FCMWa6LVEPQLmX55lU/WbaHDdfX4 - YIgyHc14Mz1hfyvOFBRxi9bffgXaxkp/eXg5XPWHuqlXvcKsi9Xz8ZuvozXRPTxf3GVMm+DuguDW - d7JLl4IxDEN5gO95Yv6t0IyVtwMXOlNHxIoei5KmXnOAb35OzC8/D1/9Rs1gEbrM914sLD4HAe1c - +cJ29WIRV6/t+gBpsPTIL7+mXIIDyB/RpihrWMn8FGRkuUFInKYJstE6mS5qVa+lr5w7ZVv4z5uy - fLM73dSwMr5+I1LGPHD/5MnzBSkjSgb6IW4fq0iYXKUCFh02TJ2egzcH4rqQj7o0EB8UwoVjvl+g - 5lKsma+ME591AUXw/f14JZetJwKbn4oQTw1Rv/OSD1YkG4XpXqSif1A7xvCmAeNyAlqoD8ujjVo3 - KBE385en1Yw3CxNQOKVvXM+GlwnaQ36Cpws73ErLi9f/ztuX13HBwEajn9ohZHQR0IXEFmX/2lgj - WDvDZsHyszF6k8kV2JOc47WTvvisemxGfJ2cyVZwTW/lxFcLnoFYE7X/rL2Z3ZsZoki9kMP9AR73 - dEmGItV7upBWc0k39LmAe2saGLpoyPrLWn9C2kQmCUxH8ppfXlPoG2A//8BcoTyBv7xu6Fw1Rrn+ - 5ZPK2TGJ7V00r9dHVVf0WDeIPh0y9Du/MBwWNds6T8NYBezZoN/8CFt2nM1zMZ4UHG57Yk8Iexy1 - Jxm+/p6ch0/RTbI7NNDvt8dvnmWX/WklLxAuO5M5wtCVr2sgPH/zJubvjKj8w5e/+YHbBz4fOzRI - yLnF/Zffq4xKxTVH9EVs5myKoRujyyuBSNtv6NItvmTx9cP68d5hsd/fs/HLm6i/RTn7+ec1w5sW - vvv5Jz8Qg3anwq8eH6b16ubOlDDKnu6JBbzFfP7p93d+hqvicIv5HDoY9jJqiB7sHe+Pn/7qMwuq - aIFo6hUH2I6txLYmexjjRmx8pGbNSKxg9+ymqrs0YDrN6jdfMfqs9SvoA+QzYzPv48mDpoafP8O3 - pcJ59nApfOclzE+Ej8FB1tIfj2B00r1y7kOjV65zP9FZuVPj689k5ayMKju8Yjubn0K4AGSkD2bf - 2nc558r2pJDDKSDubos4v9qbEH3zTKJ2Ts05XYW18u3/vzwYUVGqZbCuc0Ms9CHxWO4eM9Tb8cDw - 2pHjsXP4DR1P1oquY96h/i6yBl5z5ZPd44YzXp5KCYTWkegCUN1NXz5C7rx9M2vL5njce3cJnWMx - IVZA83ICPtm//PpP/Q4uzUwQ3n1IjFiZ42YLnQVjX6cYeq8s+WQkPZCg7pndshdiYXtZyMFlnqmw - O9+733xMth4njWkrLSlHJ5F6UC4K4I9uFRm/M3dEW34diaEnpiGuUSHBd/5K7JcR8Dk59hEs9/2d - 5f2WlUMuZDc4QJEzA1/1uDNn3//l5Rg1deUNVvwskIMuV6KfR8MTv/O8zd47ELbLuz4bFzftpIi3 - o47F7aqOx3E6POXvvIqEr3PK2+SZj3CyUkoC93nz6LQPRDiVxZ0uLpcLGpf6SoIv/9P5ez7nxflh - wdBRi5k3ARv0PlUp1PYiJ1vnWXp85LWtvEl8xHM0Xbyh2woC2r9ZQZVVGHvz2U1GtFwIhHkdf/DB - 3pIa+gZvMcu9dTYt1TyEn17+8qEJK5K7iZ8R+tPfuKfFMvz0fOvkzODtXKnyenkyv37hU/YfV5WV - qKum7zyDGONGPc1IuSyBGTmq0Ri/Zwn2Uq4SbZneuj96uj4aPp3PY+kxK7mOaDhA/csb4vG5yTHo - fZ3he77vYr5V7hSybNyy++l+99jFfPfIyw3CtHfuev2cFpby9+9WwH/+66+//tfvhkHdXG+v78WA - 4TYN//7vqwL/Tq/pvwVB/DcT/9xEoH1a3P7+578uIfz96Zr6M/zvoalu7/7vf/4SlT/XDf4emiF9 - /b/P//X9a//5r/8DAAD//wMAaBDGhuMgAAA= - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 7bf024783c8096de-SJC - Connection: - - keep-alive - Content-Encoding: - - gzip - Content-Type: - - application/json - Date: - - Fri, 28 Apr 2023 14:54:47 GMT - Server: - - cloudflare - access-control-allow-origin: - - '*' - alt-svc: - - h3=":443"; ma=86400, h3-29=":443"; ma=86400 - openai-organization: - - user-adtx4fhfg1qsiyzdoaxciooj - openai-processing-ms: - - '325' - openai-version: - - '2020-10-01' - strict-transport-security: - - max-age=15724800; includeSubDomains - x-ratelimit-limit-requests: - - '3000' - x-ratelimit-remaining-requests: - - '2999' - x-ratelimit-reset-requests: - - 20ms - x-request-id: - - f59ce3c6f76fb37599125046aec3d2bb - status: - code: 200 - message: OK -- request: - body: '{"input": ["Important Information"], "model": "text-embedding-ada-002", - "encoding_format": "base64"}' - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - Connection: - - keep-alive - Content-Length: - - '100' - Content-Type: - - application/json - method: POST - uri: https://api.openai.com/v1/embeddings - response: - body: - string: !!binary | - H4sIAAAAAAAAA1SaW8+6Srfl799PsbJu7TciCjVZdwjI2SoEj51OR0QREDlWAbWzv3sH/zu7u2+e - RCB5lJo15viNWf/xr7/++ruK8+ej//ufv/7+ZF3/9/+YryX3/v73P3/9z3/99ddff/3H7+//9+Sz - jJ9Jkn3T3+O/m9k3eY5///OX8N9X/u9D//z1N7ldGbNR1kfT59Co6Mw2K/aiVIi5YisGtOPUMVc/ - L9re624DJJoVM7PigS5k25cPVXN7MDsSyvbbeMOAFt0tZSRIHC4+lqELC7w4EyLkQtT7lXUGOVv4 - xP6aWj52jb5AVWAsmZY3H0Sr5pvJ3tn4sEc2Bfm0J7cMHo67IOq27vK+rnMT1MzcMUfRtvqg6h8N - neTdjm3dzTmezPutAtE+a8zpTl0+tXmwQGccXom7e9hxzz/GhBJtFxP7GR/ygdlYle8eUunickmi - US+7DkY/14n9El2PVpWkwUtRLlR+oi8atsc7hr47fYjrGEU+7dKwBOXerPD4dMN4bJMaQ4iMB9mb - m1gf40wI0CpbypSHFxSxaJpMFGYhJkTCu0jAbizA/dgTsn1POGdT4z7lNQ1Vtg9jFQ1ws314R+iM - uTUE8/pkmpwddJPpWvlup6phKZhH+0FcY0v4dE2eMvpe7IxoFwYxvSZnGazE3ODNEK50Zp8WGjpM - tkp2+15DvEpQAXW9jZkG6UYf3ru6QivPD/ellBr5NGy3AuyJR+jXMuS2t95+Awd4Jsw5Xl95Hwbd - E+KbOhDte3t443FRl0jp+RsPxKxaOhYhheubMyqHkhEJweK2kXUVUmYa/a4dJyTL0ETai5DqHSD+ - 3ZtX2DzXIj6v73c+JVluAnMblxk1nWJaDFYJJ1LdmOYPe1R56duACR0CYl2FQqeNaptw2eYRMQPm - eP3tET6gv9CG+dpRi1dppT7A/cCFHCIvb/ssUUxwk8/AtmdZ4s1d+GwA1+Gb7EdRjrg8ltqmzuQD - wevzHo2raAuKAlHI3FAq4u69rBr0FT4dHnl94PxjdqUUjvsvs7SQ5DxwVxu0DvYbgje3jLPLUung - IUU+cfinQM3xbT/lOKlbtr8GKh+Cbz7B5p0hsideFY+Lz2GA18dzsNBFEh9Ph/1Rvt+vI9ubG+SN - py96wuNSMaa3LEV0HW4GWJ7vBXFegeDxvD4WcAl0g+gtU9E0NFEF922ZM+sqGLpw3SkplKtsR0hr - brzWcqJwabx9YNawafVRJ4tSlkf0wTA9gmgKe/cI3mPPiLVqRL1TFqSAOy0NujwsIZqu1DeQpNgx - c8rUb4fruaGwWBFGl1k3oMnU8xT1eBMTs8F2PqRPHKCPVn6Y+0jktutWzhXOn+LK7Msj493KIRNc - hQdnvtMeeZ/trk+IPm+baFKwzId8lYToGB835IyzT9SwvU7RVf4qxDT6Tz6tjyyA62qpsX3XeS23 - nChQxhe2cO+Xb70+HXZH8PNWwZNK3Zhd3UxFn1oumJExveW7zgvRtlXfzPhKYTRsW/sMZ+E90pW1 - 0bjItxlF5095pfLJtHU+7y/42IwRw1/e42F5OjcgnO4ZbSrrHo/nnf6EQBGfeFiIIR9MbROgJk1q - slOrJh8DfnyApDtXKms+4z0JvALlW+dIPC++tUMtpCkI09Wg/C2/9eElr46gYW+PJ2uRtVO0LFXI - /exKM3v34GyuX1gsjQLTy0WJu82zCmCR7XSCfdXzhn5bYPCmbUpX59eej6iRRDAcGfDiIUhtt0mf - A0Juz9m2GpOIBmWVwZCWDtlnuzQfF/ryjnAgIaY9NIcPr+pWwS29Ueb6XEYsV90BZVp+Yq7mE0S1 - 10gRvS8LZjfJSp8KczVA0Dp3grvnLZ5ulWfDnRYG0ZWTnHfiDQAVOpTMO6EmZ4m6TNGvHsz1XUYs - pdCgyNVXmN3Duh2wRAtA7aaiIB0/Oj0HYMPwuK+IJpgD4mW9esDyYZjsmLG8HdtLGkKgr2MMpn5t - xzJ8GMgf6hfzkjDxBmEfYpg/01zBaV7fuF0isxovTE9I4E3L1klhlaUhUW1pp493Kroo4EqNX30U - o+5rtnd4UKDEw+2kj62JTEi8RUB52fke1z/1A71Xpkds32/y7iahAlVFGLJ9/Yh03l6jFH56uniS - UJ/S1beCp4QxFazs6fGuWWOE5PsNL16NO/+eKgD11j/xNymZ3n3y8AHlVz0yw8qe+mSVcIU4aVvi - qcY3pqbeZnK08X22FyWid3EvdZB98I3tGrTJ668f2UjvF1ti9QHmfBHVRyBJoeH+cPX1FV4+j+iz - uZ7I+fY+elz/vgB9CqIxXVvsou7LFAzj6RRieLnHaBTYqP72G/Of1qsdy9r1kYmeMZ7ls6XW8drB - oR5vVHqv4pYdO8mWvH14ZqYXdmioE/kK1/fI6DrnUzusrFsg2zFdz/p11YfTQxvkJn3VFJ1Q0/LI - SQPIHleXaIj6fJA/moEGJ3DwtOdZPqDX1lXCJbsQkrNPTvfkkKIa0RcxxaDTh8RzOoDUd9lW6Ew0 - ruqNAZ0aJMSmuzHm46BfYdtlAzF0/uX9dHyVUrnFBc7rA8/H5CzfwXkqI/Eno8trIiVXpFaEEGsn - t5wNn4uNziE6EsLTZVzb7bBBK0k8UGm01jnjhZNBv9RqvGg76jVHqTeRVxRnZl6OQzRY33UDy1Z/ - 4Y15cVv6tc4VqjhpmWc/cT6cl3EFxeqr4+ztb9rxfbgfkbdmGtk367alk5U3EIXXB934vpsPheqU - yHiahBjudM65p8gYVT6EFJDURZ3TNQCU2Wu2N8ftXE/CHbbSUsZrU0lbdqpoJ5dVllPhK4Vxj89+ - hrZvp6PA1jbns/+BJlW3xAlqJxqWt7cJOC+WeOEqojct1t4d1Bt7/tH7qVdTgM2VdlSe/dv49WMb - 9B62dBLhzbueqU9Y7y2PkfX23Q58cH2lDByF6D0U+a/e0T3fq4wU0rGd0sTTYMdEmTn9K47YLj6b - 8mN/PJD7YyeiuloVsLkl0ZauBTvPh1d1qFB2phVT0UuN+HePr+hCM4E5/GPwLhGiFOonVYjxsr7R - oLdvX7m+jxad3v4mn/tHAGa3aDCkeyeiZbGnyLmKXzzaEfM+psI7cEwnZ9aGJPm0XVwCOPjjmu0l - M+TjSjsOKH6LSwxJ9eZjZd59cBfTB1eWlHjTJU7vSno6XJiviwKf0lxfoHOzj+b9zHReX+QKXbbv - CI+hjdEgHx8bqEVVISTJNI9P6qaBfXgAph/2fitW1agpg15+mUPliI+R0RRyAsaT2fmzjH96qFhv - JtBi7dF41LpPhdh74RKzrd9e9+snd7btiJZ9R31Q77ErF6410PuVFjE/ZZ8JnWRrR1SqFdEwaHsT - tEezoAthLFu+aX0fihRsYteRGfVDkcsg5HKEkS1O0RTTboH4JzwSp7++o2nYOiJszG2Fryx1+MwD - qryVFJnsrsc9GhVNlSH+pm8Ky3fV8gPFE5rvM9WvUNRl24sP6pT3mFlqn3f4vQXw72uKFztbjBls - iAiyaG6ouOwrbzpmIvzhGesyMn3qzHqD1q+BMM31ypiPplSg5pAGLBRhy0U7MGf9ubt4LfcuYuHh - 0iHZ3wlMpc4zoq8gDSAdhoY46/1bH/PmIsA9JypdJvXD49HDA7RNO4Ml9qLg43HxLiEZhy27D1M7 - r8cdg9FvK6I7ytHjr10mgOxVPjvoktUO8/5Avief6JqtRH26GlMA8/rhNM94PK4t20TblBrMmrQb - mkZkC+j9JJyWSFvrU4JIB7MeES0jfTyM4RT+4QkhfrUehRt6gG29C+JH9oD6IqvucBztGp/UoORs - We5UkGvrzFSFL3R68BjIR1H08MB97nWF6abyn/tXb+v14qevULuWRDqeZQl1zDZVGFnL2N5CjHcy - 2vigLaIY97pTIOavxxDiznkQLwkVr4eUPtDiMnyIPrYyZ61Fz2iuJ7wUvKBdCUxSYeYb4kSlEFXF - lWK0l+yImZ/8ko/CzZWlLXljTNd0bMf29aKotY4q274n2vawfpSA3oXHduYKe9PJuN3hoqgfsosP - G0TjiyhCPa4bog+OHXNlCO7KOJ0FukFu3fJFtwcYz+s3HqtGj1Z3KtoAKXbxcB+qeHgv0wp++rmQ - F67O0/NVhZkPiHU/i+10U5mIlkqXE3xwJU4TZHUw+x+m7z9DNLyvuyNcveUCS+v92xu87FLCay3u - mf1WJX2qfWeBYtyF5CUPa94eywH/eItyrXznvEfFXdHFISBmLxb8j16t72bPvMTTvb4BPEFcqiEj - lbPXB93Yb2C9DV7E21dKTNXYvMJrv3KIZ3V5PChYEJGTezs8xHanD4/vZoK5n7AQ0Y53wbedUPHd - hcSr4IGmMepT6TB9KmamNxsNK7HT0MwvONtuJH36UmmBzhfjyw6xgtt0M+oLcKWdPvNTihhlWQaf - w3uBefqq2mFd1BiwHy7xr3/R8JG4SPUZ0HhfJdEYxUGn5N5rZFqSaFFHj2QDi7QUCDa91pv5vkCx - my3p9A01T5z7pVRZwYAX21vqMVs5nEEZHiVxQcB8rK+KLOtrssECWbUx3ZHPFcrjKZj7o5rz/LyR - YfX19sStVDUW0iygytspNsw4NF48rWwjBbeMl1h4ZKq+6hoPUBU/dBYFKxuJx0xcQHnXJuIWV8Ob - uuSq/eGR4RrG3rRb2xTabfUkqu4t9dqd7p1sF28Ty/SocPZGlzvk2dn58VPbVOjYId0LFGbNfqqL - +5EqdkVvVJQ2VTw92V4ABy0jLLyPnjfRfBSVU7m4MYtGeTsZ3fEB5LR/sO1Hs7xBSwFAq6uUPuZ+ - xW3ldoSzkI905X0lnSc0miDvqhLLkbqIG6HgBXwfokGs+ObF44UmIdD87M1+q+SjVOIMITf0qGiS - qz6GkhiC8bkS4pNUaKnfrw2Y9ZfM68+76HJ5wHUnCmy3aTNv+JrtFe2dV0/FXiwQcx8XAYXIfBCr - eupoBKwN6JneVeIYdInYKlphmPsTc78n5lGWbDY/nmZG0mHU5Y+hgdm/MVP5PvP+Xq18+WC7wIyq - 0FvuHdMrcD7tmFWZ6o8PMjk3RZE5XKnayTc+AWgb+8Cs+NZGzVuNFz++xdJoXdpJ1d4DDHrxJVsv - uHtzPWRw7/OECvg+ofGzMky4VOuQWR5r8zrIvgGUvfdk7uxfum1rH2HWbzyd9w4SnuY6AB2kO2nZ - vfFYQqMBdpva+cOn/CtnFeTZ0cErmMAbGssPkb32XdwImo6GsxRSCBThySKzR3ldq7dA5hIizFPq - MZ5A359hcZk+7Od/uvNheYePVnyocH+I8fy+RaRt3MPMRx+PTY32BMsIG+I3Wq1zVtcF/NZHlvyp - ZU8vn2BnZwhPy7PjibW/BVQ+/R3ZSuKprV/UuUJ9NzWirraR1/OPP6BfHrXzvpLHn5KqobBEzpy3 - xREdkS2i84YZdPxolj7u9VRUbk2YU7ToSm9wlav9y6+Yx+6NzkC8HaHSrYR23+Sds09ypzAs9JA2 - J7PSR8tONXhmpzWun+YuHtTRCeDZ4Y4RxLt8+L3v1fO8+NWP3rBkIwNnxci0tR7zYWdLGFL/ZTPC - Rkfng2gclc+JHX5+PG+QmGVKW3B31vMgHl/JA8OSX01GQInbMamfBvQC9an02R29YTHtO/SrD4YD - 0xuwVJZAklLDq8Vmlw8ftDTATVwbr+tqatvK2j/hdnieyH4NOeLSavNQ5u9P22ql8dk/iIrImi2Z - 81jeXy7MhmMt1n/8MLfu9hMu99ObSqHx5vyzWp7Rn3obxE00ys7rgcwOGmZMqRJToeAlzPxOl0Zs - xqu9Xglw4+aEJzk6RjRPWhnoUbgztZxaTuNd7KJfPnj5xpjzilaDfJXdEx6er2fLr0G2gPxVG8x4 - 1HedG68qQ3O+wfZe7utsznfQzL/M3Qq+PnT70gRnJTjMCY0tGossvUK/VGumjo3aTrOeIxdXlz96 - 9nu/cmryEiN2d/X1S1AKFAuZzozVsEIcRfYTwtfXIW7mqfHgpbUJb7Q/EUtR8pifvXUDki3IVNlS - P5aukpPCz496hXtAvCmSK/z0wi0Giw9OlwEcNgeXWUlJ9EkdIQXxEsWM7D4rb/b7GHnKtSCE3I9o - 2rziTPrlu1a4nPO7c9P9yRdMk2w8pmhgAz5/E2aYapvToEwzFBurhM37PRrO1SeVnsampxs1a9Dg - KoEL+en8xYvPq8mHOpmuyp4/RzbnLy3DxtEA7AdLpr+EdcS7ZukjZJQ1Md/COxqtcRTQuiqedPFe - 02hkt3UlN3jlUUrBbMtHVahgN7giZv/cesLsH1G7fmKmwqHNR9tICtSk2nb+fnL8vXxvBhikfLKZ - j1H3W785r6Sw6ireA3YH6ee3z8KKt1xcY/PHl2wH00MfJP3bIWNJO2Y+o6XXB8aWQi4qBtlTq/DG - OLRMAH4U8UCdRdxvnccEiQcBe34sk08yIRg+rDTpMn5co6EfREAG36h0uHpvj4XVToT5M9tZm+yX - dzUgJa8VnlhXx9ORdzbMfoGYsPP1OX8Mf3nIH32lP17xtlKIr29V8liirlPwQc3J67B8xMNhuqpw - uj4rPG5iRR82a0NGO219w4PSFfog3oQFyk1BJPsvITHHJyeA2Z/RrlgK8bRITwHsk1bFt9CL+QrS - 8oEKp7Lx5bX95tw7VncpfH0ctrWc1BPn+lB+ftDbvLR8/eONHx9aO9lDnX4X1D952dZfpPr4zkUN - YJ/vmKHHhjfUQpWhJcMTMXgeeoMA3QIam27xuhC/7Zw/qvBw7AUzlp9VO/7y9tl/4XSJxfaXl8BH - 3JjsNb/vds+pgHJkX8iuoCuPXpPnBhG09Sma+0198z4hfC9uRox4jdH67fsNgKRdiSO6fjwx8ZT9 - /B1u2d31KH4rGH58uqVrE/HooS8A27uObDdHXec381JCsUQpXTyEW56el3ED54v5ZWQZV3wYfemI - JlX4ULR/5zFr2baBaeWaDHdPKR77/XGD6p3MmNdOez7MeTpKcrbB1zoqo498NxpIxYdPwuW59qbf - PISJnx1RM1VFYvn0B/C2KGT6ejTi8WHu8M8fUtgcgmhM6rMB67VBmOebDRqQHWvQd5cPXlpq3xY7 - LNuArcbA35Nv8U7ajhjCkXz3Y9XkcbNIkwAitvqQbfxxOd3Y/V2+YR5gYdaDPD1f5/V878g+uZN8 - Yi92R7Mek33Xta3wke0CurcsU6nqh5gvU9+UdHTz8E1gEZ8MZVsBeZkHvMRBqU/T8zAphb4oiZdf - zXy6uNxHRSgtmeVPaVvN8zY49bt8zm/vfDq69+PPz/+ZH7GZn8EZK5GOikm9eptqG8iMbUGXPH3F - /aotS1iQ24LC4sn59FpyDVxjzBieWuBjJ0su2Jpcs5+/44cuCJSZj8g8r2rpiFRRnvMndpFPX53R - 5+oM83yGuYyd417r+gZNqvghhlB+9cnv1AIZq/Wa4PboRvzS3zVY7ssTs1bNWW8gmjKQR+lDLNti - +Z/5BGflyHb8tPyT78MCw5kO3ZLk5W0xBsrn1B+IaomDN5DHOoWX7XyxpIPQsrm/ovG8etPJOJd5 - 1e2pCdMAnC610yMf7vn+AY9H1ODpkG3mPHIwlHVcwG//6axf5U9l5kc8Zd/Df+UJzlX4Mlcm32h8 - UeeOnoS4mC+GyBO3LSpg3LwWtFfGnI9NnrkQ8HRL/PvViadyV9lgriObbKPcigRfl1UQ7aNGdqja - 6/1vXvCM7s953lujztSGUDGu154d9knC26/w7v7kkzMPRTMPCiAZ9Zfs/fLtDfHFK9C1rQqizvwy - 0qvjgqYd3wTjTxiPhz7O0C2NKW1+vCMTywc/rxX2mz90Uqtryno955XDouEUDXUJ5JVLlItnDw3u - CUQUOcWe4Xl/T2GvnZHqh3/y1qjs1QqUMvAUKvcu8djv+87+gWCHW9GUrlgjK8H5Suf5V8yPu5MI - VmJssKQ4yKPe+3FEqtirDL9CEwnrPTLR1rkbVFk9xXyQdEZRHolXzKWFrQvCPvShacX+N++Ixpf/ - 1UCBQ8hILi7z7rlTB3mJtwqz1jvajl10EgAHz4j8eORPnrclOaZLt6p5mSf5RgaiH3Gd0G0+zjyA - +mZ1xlKEh3zgjROCWfEL2w9HC63G0axgKFlEIcidfHTIVoS/f6cC/vNff/31v34nDMoqeX7mgwH9 - c+z//d9HBf59T+7/FgTx30z8cxKBdvf0+fc//3UI4e+6rcq6/999VTy/3d///CX+OW3wd1/198// - c/lf8//6z3/9HwAAAP//AwBim/ij4SAAAA== - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 7bf0247aaeea96de-SJC - Connection: - - keep-alive - Content-Encoding: - - gzip - Content-Type: - - application/json - Date: - - Fri, 28 Apr 2023 14:54:47 GMT - Server: - - cloudflare - access-control-allow-origin: - - '*' - alt-svc: - - h3=":443"; ma=86400, h3-29=":443"; ma=86400 - openai-organization: - - user-adtx4fhfg1qsiyzdoaxciooj - openai-processing-ms: - - '63' - openai-version: - - '2020-10-01' - strict-transport-security: - - max-age=15724800; includeSubDomains - x-ratelimit-limit-requests: - - '3000' - x-ratelimit-remaining-requests: - - '2999' - x-ratelimit-reset-requests: - - 20ms - x-request-id: - - 974557ca996688f2b7e568f6d601ad88 - status: - code: 200 - message: OK -- request: - body: '{"input": ["Assistant Reply: { \"thoughts\": { \"text\": \"thoughts\", \"reasoning\": - \"reasoning\", \"plan\": \"plan\", \"criticism\": \"criticism\", \"speak\": - \"speak\" }, \"command\": { \"name\": \"google\", \"args\": - { \"query\": \"google_query\" } } } Result: None Human - Feedback:Command Result: Important Information."], "model": "text-embedding-ada-002", - "encoding_format": "base64"}' - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - Connection: - - keep-alive - Content-Length: - - '483' - Content-Type: - - application/json - method: POST - uri: https://api.openai.com/v1/embeddings - response: - body: - string: !!binary | - H4sIAAAAAAAAA1R6WxOyurbl+/4Vq9YrvUtEJWG9IfeLJihesKurCxC5iSCQBHLq/Pcu/E6d7n7x - AfFCMuaYY4yZ//jXX3/93SZVlo5///PX3+9yGP/+H8u1ZzzGf//z1//8119//fXXf/xe/787sybJ - ns/yk/9u/71Zfp7Z9Pc/f4n/feX/3vTPX3/fopdD0flZVNPUPC8yT+sT3gt8pY/uh6XKIzl8CTzP - 74Rhggh4m3hNHTU6VhN7kC18XyqDotgb+zmfIhk+DKen3kq1uXg3HwJceS8R25JtA15/gkgZ7UnA - TptGFXfomsGpRFfq7XDr94G9VxVzjTbUY1RLlu+XQXJ53LE261NCuyg4w6QiDZHiXKrKTRMg+KJl - RuBWrkPyiQQGU2j55ASHhnM1HnIomOyK/UP46VlvDDGM50TFfqDyiqUNOyuv9Lahhye6hzPnWQTX - 0DngC2N2zxHqDMiu8I6RNGxBvy4uKRxflxRn7lr0yfd0G6D9kXSMwuTT8yG7s13IOgHbO5FzPiY1 - gUPXrSiGmtF3X3RSYWolBLEi6qohbVigrDwhJErOdX924KlWtIgcqepU34SIL3sLt1W6RYA/hpBd - XlYN7qJ2JXxb0rAtJOBAr1hv0EbURZ88Qa3CO68k7Hjw40+nMUeKEphbspncgg9QLSM4xqs3Vldq - 2Q+N4auwuBY11W3fqDYwfjLZQOOZ7vX+0k8nZT9AZH0YdYzjPpl1ZEiwdpQNUjYbLZwf5gZBr0EE - yXfb1ScRHBFEb4QQUJTGZ6etZ0E8ZQeq7nDJSSh3AVTF3UR1cQQ9f/Rghrt9s8HqIbgl3dGzIazi - zZaqvQjC6fM+11AAnUH9G90l8yRLGdRbsSJSo0nhDx874yTL2DtEVcjelmDJT1utcDaLec/ZfmvA - q9po1EykIZnDzhRhfMgDnJovE4xm6NTQEsIj+azevJpv+HSAard6oM333erTls0GWGnUwK7DN9W4 - G0wLNifhjI3tM+x5FRdQcTV5Qq0ajRV7R48cesn5RtYAv8NJj9sWyhZbY0MqPsm8CzADv/U/lPtJ - 58q9HGQeHWO0RpHaSyvCNeW9eiUYqXcnmd9lxIAZrwKkMA2B6f18OHA9SAoBtI56LhFPlsVTOC37 - U4SDaHcGkG+WgS1SQp9kzrlUhq5fYZWlnc+d/BLAfZhjanrVwOe5oRA+SHTCqLmvknnclNqqt8SC - PrEE+Dhf1Bk2gD6wE+GGz1BKLEj6JsbHfez5bGUypnjl7Ua9ZwX0Sex7AhJpeyTysy8BewRWBpUU - vwhJd2FPuDHJ8Ni+Juyvdmd/krtYkyV7ulJNqO468UPYwMo4R3SfayEgD3vnycA/a9R7H7Vqs5dg - Bm/9l1L9/IbVpE4lgsVoaTjQFKlqjuMjg7f0nqMdqu/JFL/cFFTxekujyH75v/oGIJ4q6th2mHDz - +4kg/OgFKRa804MxEvg9zh49qmmt840Xy2B0zk9qW49rwkmbOFAJ6js2d/6pkqxSJNAs9YAoJ6r3 - kzp9EVhfZwGrRdT1k51oGiw0KaWOcDcr8aoEKVzwgrVCf3N2TcNSMYm3wQi1Rz6JwERwT9071a/p - N6GMpY2MOqPBL1xs9bFwijOM5Rbj+4m1nKNMa2C9P9rk/TG3Pnt9DwN0b1ZEfXx89Pw5PD14LjSb - yDa79mx3r6Bsf0Qd8chnCZ3dqYSZc3FJMSeFzp6lHgAxkjC1nncjHGCgy5CBtUXtjXnkE1lFIjSb - yMbJBuX6JB7aARhXfiav/vjWp1XHY8ir9kQtfPLAdNzwmyy9wQXJFLbJYH0DWV7qjXqm2PXvfDgF - UIZ9jTjTuoocTm4OdkOFyMZ4dsngzucblDo+4MN12FSD7YYQipvvih78wOxJJq1UmCm5TdXhc03m - /KWk8NrnD2qiSK1Y9Vx9wRoRAXt3u/PnPAgh9ByLUkshVTIJxz6TrbI1sakIfcIOm5xAaY0qatw8 - J2HnS2wASyqf1Oi9kPPrVhXhFHfSgvfE5/k1rJXUehBqrjOz38jn9Ca3uxWhe20kFWvRqoRSLECs - CyHWqcwfZ+iO3p2sG/us98q9JPC7YgVF/u7DZ8N/zMBxpZii4/fDl342w+YxaljXbwFgZeh7snLJ - KMY38wQ2xf5ogU14KxEzh7QfLRqVcH1lAjZipfQndJPiHx4w/kSnkHu7yVHgtNphu2GXZFY/6gVe - lCimzjXPOVvpkgh2R5LRvQwswPZP14Af/J6wrl7uOnNuWgzkx0vBfgfUipw0N4LwYLvYzldiz/N6 - bUH3ARn1TXoE8+68J4pyjCk2Nd32e+IGDsw3jzeNEncLSK7OKRDM+UpxbyrJV+G5BovhXWEru36T - WX1bMTQPTo5NfT/qfE4tA0p3o6aHVef5ZK8HN3jt9zLe36ioT+uWfeGzEABGluPobGKrA6wvMcH7 - 9GqD4RX3B3BzvxE+HmXK5yGJLVlzYwEfsVD1VC4UCU6NMWLrLEh85CwgCtiCErvtgVWt8662CkPy - HW220hGw2yNP4b19Z+RtstDnJWEZHJPMoOb82XDii1MOtnPYkt0pVMJB6FoGX5K9wobmlLwvj6AE - 76AdcLgtcULWaSrCQGq93//3OV3vG8iFuqX7jvOeDF/ewMIaRGp06APGBDxi+MkOGGuf2e8JausU - quQWYLyJJT6sdEEEXzMH+IilBLBG7FKoSdIB47TzK/4ZX7J8bJ8T3UvOUZ/XIzsr95paGEXWA/Dp - UNfwZGYyVY8oDbnnngcZefCD1dqmVafvqhaEN2Ri119PITkmkgZHVUfY2vf3ZNA+6xo4JtTwq6qU - P3gBjzL40iDuqorI2daD/WnekTk7RDrHj2sDz+28IcL+SkJO7NsBiunwodqqqwFd9AJY+IKsiHzS - 6VtcCZAe64nMq67m/VWytsr3yDycLHphoNtXAOyAGdT8mJE+m1Y9K5aRW/R4j5Jq4UNBeU6Bt/QD - Ho5XJrYQN/qVwPbtcdFD3xvklTuTebN6huyqRCmkFVSp8RxafT6dShk8DK/HnpRrfp9LQQmOg9hS - zXicQbeK7hDGssvQSrXVZD3Z8Q1UzZliJznlPW9ldIH6cevTxM+PlfTZgQFe4l1NUZ2EIa3iQgC9 - 9vwifo9uIV+d1wN0+LukjisUPnXy9AzyTfJGz12rhRsv+Zwh/5QddS3jVJEvbRtwFY4Fja/6OpkN - /8RgkcOEquWN9ryYqhKa9tSRKcovPquemy/YyuWGuhp+Av7a6Q0kJj5QdKhjn0MQE3AKkvOfz7M5 - K1vF3mdrJFbtxR/vMtOU7bFxkLD4iW9OMwi6NKsRsPsdmL5wXYI6KG/YfzQfn83K+6vUczki/s4o - 6OazJcK7NlBq4Ab1FMZXBndPvCK7avAq3s/NGQYX94JE1DoVHQ86gnorVWitKrtw8gwnBZMInngv - tGdAUeY10E6TA2EGkXpuRbEE1fLQIUF/lDqtvnomE9euETDpkfO19kiBgI4SNaIdCZlyDCQ4Cv6B - VGveJQufDUr9MSVq80DW5wM75NB9v3dk/epVf1P5TgYPfaVR1RxgNY1zGUH3jFTyDty8mstdfIEn - 93omKVKmnt22fQZKH+3JdtFXvVa+BTB8nYyej6smIQLeavBPP86uXjgDr/hC13dU+uPb8Zq1FsRM - GSl6rorkexxPKWisKqF+EYZ6W9HEgJ8zf2C0PC/fWzcV5quviPXvye03fcAOskbnA6psHQL+rQz2 - 01dYj7Ghc7ZnhrL0M7wvnKKnWfc1YBz5D4zI3uK8liYZuhZ3yXolbxKiWnoEvcm3iLTxumoQdt4Z - yKh4Yzu9lH1XWp8bXNYXY30/h81aOTEQnqsV9fy3m7CWwgyGxRrhoxYFPqnfvgiFDRvoLclg9X1d - VajYwWwgmB2UhFy1coaege5k3uzvCdOvnQrXloCwxa9rPudTICvZ95KjMeiuYMr0eAupOzywfau0 - iknnaw1HmwuLf+l0XpJtKluqi7HabWky3Qa73Sr0YVGNXvqEi+tSgEUtcuq/HQu0YeGUMKmGhh4K - 3dene3epYXnzHti5ZRYQv+4pVqbGGolSwHVS39csUqrkcscnMxWr+Tg+Ujjf43B5XqYv+BTBhMeM - rJ3XQ59zeoNQf1QXtE1Oas/w6uvB8+PVEvDMViEr3i8ChgJgqn7cLuS1laSy3PID9jZZ69Ov4c7A - V7MrPSq5XI07QBoonk4T9fvNoZrCyzAAKNg5PeZc1zfppDkQIWmF1lc5D6crg1+QEbTFqBiO4eB/ - 5zO8CoOK789zW01FpLGfHqUvTR9ClkonBuWnJJLVTGvA3NMzgsAPNDQnZ5NvhlH1lMva9ala27if - b8U4w5dwRxhd0L6X/HlVA/O9a7G+y9fJbOf7r3w9AICNXfKouAtrBMPYelJPUxJAgEMH8A6+w8/v - 6iTqOwd2VVkTZR9//blB3hl0fXghEzJKfURrJ4DgBnwiJsDRx/M2HaBFFIEiDxSgrZNDA/eVzjC+ - 2j5Y+9ktg6e03KGJ3ceEX5sxAj2fAuqxj9yzraeUYNH/2L9OPBykYmWAvhZybNpiwydDlFoIJmem - etFBfXxePxl8SeYK+9tV7lN19ylhaW08/OO7yQsLAQqrIqWaH+Qhf+ttJL+PT4NmdcLDrjWuLZS3 - VoL3F+74U1R/Gnh+Si/qnkIlIdnnWcLr3DywRm5myNMi/8Ky4CX2x/ZQzcXQbyEP0i8+2n7Haez4 - M4Ty0aHGuioSlsR7SdH2X0Td3SvQKUKFofz46YteST9rdD8r7vuzI1BvMp+l0mOG2zTe0r0VxxWr - /d6Ac7MTqd19Sp+VetEqb34XqRraFZ+f/rGBw9qQ8D7UHH/hx/inf5d6aQBrg10GR8E9/MmP6PMY - e5BI+Uz44vfyzctQ4Qd/JrLkJ/1M2R4CdR0Fv/WrJrR2znCpL6qNwaYf7N0cw0uZ6Wi3ewX+FJyC - M2wQ+yJZUxI+tbOrwsW/UJfv0pD89OAFKweyWfKb2V4ZX7Dw4+In8op9wroEBtycSOevp4QDUgfw - tK9LqhaRWzEY+FvI0PZOnyda9VMVPmdwRlWAJNq6YP20TAFKxKko3ptW8k2FlwX53k/QuoNvTsHL - Y3CXJx8EorbwGWbqF4p7FuDjwh+0e68kqCZOR7PFH9DKRTl8m+RC91apJZLjAwdepWzCtv7FiYhX - XwfGezPG1Pk0/vAgwUVxt1OOzZ9eDbMKwuzu7LAty/uKH9aGB8LCe1L19XH7GblFCpf+SJ01jXzy - MFcHeHkFCdmF98mfrmqG/uh999D6yXrlbSOwZtobbR/HAYys125Qr6GPo6P0SHiIvQyGt4NJ1be5 - rsgNPw5gdZV7ii2p8VnQJy1Q3PJOVoPw9kdlSjLgPIcZ+3TIQ17sTQNOZ2CRpf/3w7e9xPB2LBlp - 9fiTzAA4Bvwo6mrJD8uEb+/uDdqCrJFNFqCKb7qrDFfaaGBLp76+IRuxVcJzsSJzzU5g0eslgNKF - Ud3WU8DKcmjBJXTEBY8fzk5t3gBzZ2P882stuxWHH38TLo5JP2fSSgMfBiIEPe8O6OG298BJmnMC - +09djY8ClnCvXlp6cdeIr6P6U//05sLXjr7x3DOB7pbnJFj0Qz8pvgZ+eaDdFmLCrY94UWjAHtiL - tq4+tcazBbt6zqjx2FySce2RGTBtZtRY+I1rM07Bc6fJSIi9Y8Wcdy/DpX6R/D5qPTtfzobyW8/5 - TTacdclbgB7evAgrb7iiJWEpzMhhS8JtSZORZKEMxWcSoQ/Y2n2VFnkLzVgJ6GEKIKCwW523Gyf2 - fn7Cn9/B+gYvShxjx9Vf/XSXmQqgmu2xO6y+Cdsn2IPkgyuqM83tZydJ/2v/+cOVwkV/WErIeoFc - jXn02TYNINxevg6CU1RXk7wFOXgFOCDi4qemqClyeAR9hC3adqDwwk6A+BsfsHk2h55stgMDzenF - MarY3herIDDgq28tmsnFoVry7QHUqZtR20OdPifqyVA0ehoJS98YTMP6mQGLrAR0nboTEPW3V+6e - O1XG6g5rnMBm10Dn+qqoeTYPPVj6Mdwk5hmbxjzqTUKUGrKrcEfwzvpkxNpHhhGCA6lNbe0Pevgx - 4LGFAdWliSZTs/l4MBqnlCIlOFd9/EEzIJcyIehdFPq8P+9EyE6fHCORDWDG710E05P4xv5bfenf - X5658DkR7Z1WLev7BdZF/1DD2bSAd5vkDIon41hzD0PFSTS1ytKPiOhsWj5/ImkGwcW/0IPeCDqf - xiJXyt1Fxd7FxOG08li8e1XRhp7bpwLI5TYFUEubN9aDzEho+bxqsO6zkShouwaMNRqBi/6nP7wv - eaMKNcoOCBQh19n67jVQSY8vNITgXv34E17lq4s976Mns7nREfzVozU0gS9mQnIAPz1gy/ODj5E5 - NPIzs0oyPZqCT6SwIOi3no3y/Aw4rT9BrAD7Df/U2+K/blsihCWiggs5D7NegCczlbHpvhveCccq - hc5GeGC8+DeedaUFsyZzyIL/amMIyhZsmtz95U096y/m9sc32MfXIpzNmxXALmoOhOAm5Fy8EA1Y - nyNa8Ff47PZoU6Aaxhcb4LLXf35febxsjYjNa883lYtKeAtIROZKufNf3gTAbefjfbPbJ5K9OrRQ - aJ9P6q92s88jQkR5yQvw8R6BaoLnIIP4EByxrRQvPgHzq0Hqkgda3/W5X28upacseTJ2dvViR+s4 - Anh9qbEZiXNPkXKsf/MOsi15Hs6K7jagTTyBVBAn4ThqPpRHIzKJuORl618e4e2iKwFLvstPpm1A - 4po1mvevvGdsPRx+9T72XrwCVMqKCJYjcKn2RLM+7tK+hukTHn75lT7fM5lAK6lNjM+RCUTMnC8M - +mOI1den63nQHLRfP8JPciI//MdgiDcYrQwlq9jZ2DZQ2Z8cNP346LtXEOzcs4I9MSXhSFaBBC5f - xKl/2+x1vpGnG7gGZkXNtLNC8XZv8p+/wE5kr/xh28sZlAstxX7t5aD9uo8Ymm/QUj22YciNobvJ - y/NgVZEvYH7nmQRHIzaxdcAnf7qcpi/45W+4jP2EqS/owB24CtQ+grXfit3dAc6o7ukyvwIUf3ca - XPQD1cV1Fc7+O9LgE/GQ7Epi9O/Uk6FsPLov2XkbPxlfwijJhgVLmmrt1v+jhy93/UAWfusnn+5k - uBeyBu/v6z1YL3iQf/m0q2EF8LRov9A5fXw0XMip3wj65QB3/IPQz9/x80nJ5R9fRsFo6zzEWgbt - z70m4rJf7G1JFqw/toRdcbglE0FyDJEnfOiiRxPuufEA3+CjU5X0nc5uzlEFnJ4e2JjI2LPEIRE0 - toGz5CmlTw77XFV0UPXUaF4Fnwxp28C2sW8Ur7Mvnw33LsFfvnPrNl99qsTzAOzPtcY6vFf6/KrL - VFnyanr81hqXztnppqDMQfSy9Asaf6wZtmGE/sz/eHx/n5W2RALWssNWn9nhAQFODAUJs5hXjN0K - pHi7+EqPxiVNxtXuIsMXzTMkZus4XPqTAIVnZaJ5MvtwEgaSysVoaPiw8P3PH8BcEHTsxP4ZzOte - uoFJcQZqvB/in3xFLnc3lTqfTqhm81MMP/7A2Pk0+h9/u/Q/spXGTG8Pm/bPfA6j74H7LdCfGSw1 - YUYf67EOJ+MKWji/fJ2i5v5KFjyKwK/8J5mcsEx++wUWvqUH+GHh3IVRC08SywkQwMFn1WtqYNKe - KD7KTRvyylfTP/MziyVLPOCPX+iIxp5Gy/xrWPIjeNnlGtUuyTYca/oeYHTTBvQVtAIsec0ByJr8 - xD+8MamaNFCez3uKHodPz9GBer/8Eu1OyVuf78qaAHHTrvAprm1Oln6m7JPzCmOAvWpCdirDjb3r - qbbot0XfB7DP3x3VJrNPFr5BP71O/Vp76AR09Re2hxvCep13YB5cqVWM2HOxpykADPpby8GSj5Lx - KFPQGaLQ/vILROvbvR9Xu1SG7/39iarFn/LAdlWIDU2gP30+uRYo5fal38gUqCScwNUv5V0gW9Sp - 6zxc+t8AHFPQsJnf5nAUSB4rzOoc6l68Xh/HSpWhDARO7cndg01EGhGyUbthLQeZz1dq58GVSBOy - zFNC+tuPxf+h0yOTdfYaNjNcdcYZe1tXDtn+ubeUhhknetklu/4rnvILNA9eTo3mLvvvG9k3yt+/ - UwH/+a+//vpfvxMGTfvM3svBgDGbxn//91GBf8fP+N+iKP2bSn9OIpAhzrO///mvQwh/d33bdOP/ - Hts6+wx///OXIv85bvD32I7x+/+9/q/l1/7zX/8HAAD//wMAPe/DkOMgAAA= - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 7bfdedf56918cfbc-SJC - Connection: - - keep-alive - Content-Encoding: - - gzip - Content-Type: - - application/json - Date: - - Sun, 30 Apr 2023 07:04:15 GMT - Server: - - cloudflare - access-control-allow-origin: - - '*' - alt-svc: - - h3=":443"; ma=86400, h3-29=":443"; ma=86400 - openai-organization: - - user-adtx4fhfg1qsiyzdoaxciooj - openai-processing-ms: - - '25' - openai-version: - - '2020-10-01' - strict-transport-security: - - max-age=15724800; includeSubDomains - x-ratelimit-limit-requests: - - '3000' - x-ratelimit-remaining-requests: - - '2999' - x-ratelimit-reset-requests: - - 20ms - x-request-id: - - f2850337441001a8e5bd70bacb4dede8 - status: - code: 200 - message: OK -- request: - body: '{"input": [[72803, 18321, 25, 314, 257, 330, 61665, 82, 794, 314, 260, - 330, 1342, 794, 330, 61665, 82, 498, 260, 330, 20489, 287, 794, 330, 20489, - 287, 498, 260, 330, 10609, 794, 330, 10609, 498, 260, 330, 38096, 42914, 794, - 330, 38096, 42914, 498, 260, 330, 82, 23635, 794, 330, 82, 23635, 1, 257, 2529, - 257, 330, 5749, 794, 314, 260, 330, 609, 794, 330, 17943, 498, 260, 330, 2164, - 794, 314, 1835, 330, 1663, 794, 330, 17943, 5857, 1, 260, 335, 257, 335, 335, - 5832, 25, 2290, 11344, 37957, 25, 4153, 5832, 25, 44921, 8245, 13]], "model": - "text-embedding-ada-002", "encoding_format": "base64"}' - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - Connection: - - keep-alive - Content-Length: - - '594' - Content-Type: - - application/json - method: POST - uri: https://api.openai.com/v1/embeddings - response: - body: - string: !!binary | - H4sIAAAAAAAAA1SaWw+6SrPm799PsbJunTcCKl2sO04iB+1G8ICTyQQ8cFDOdAO9s7/7RP87e2Zu - TMQOkqLqqad+3f/xr7/++rtOiud9+Pufv/7+5P3w9//4XnvEQ/z3P3/9z3/99ddff/3H7/P/W/ks - k+fjkVfpb/nvx7x6PKe///lL+O8r/3fRP3/93fe7HdujKCtm9AaQH459JDpOlmEvSuldKTb3hq4y - 9EnGJcUlek0rkR107xBO5h6vIbBDk1n1aujmt2ovYMrrgXkvfcdXzeomQ4hDgdhOt+um6pZGCuyU - BTm4LOLjeNqO0M6XM9NKpUS1p2aqIhT5ihmvte7xp7SQ0USOV6KdmykZykC9QPFsK8rTi1583uWI - IYufKZUDUnBa3+gI0C5KeuJSyadFvr9D5/lnYsJcdZNT9xfwK2lJto7AQ74U/UDBMV0xw3hdjTkX - cAR4Ye5J9H1eHj9uJjzR/UoscbNJmsPSTCH11ndyDkaOaOmWNXzeF4PYRK66sehif8NOtwXBgsL5 - rJ2EFqARl8yL37uumc4pwFHiI15eu9ZgXqX6iolpSJUjNbxJuWlvRTeeB7aPk84bLuJOAC4JAt5U - 6z6cpEAS0DdeVOldxrvD6NmQdQcZi6el4A2rSDAh9D2JeOGq8qZvfJTQd6RvvPJiiN5zBLp+Lcnh - qhQevTAXQADxzXaX0SxWIjxOco6OAdur3tmbPEmrf8/LrMtG83i/E2Q4GZs1/sV/ctqdC6VVThiN - smNMt/KBoXZjDy/cdenNu7XsgmjJe+Ys7LwYVsjxoRs2IztICkrmzyeZAWvziliefEHNFao1nM58 - w1yao2I+yPkdAu2wZYew2ySj40tPGEVoKdxHqZjmh6bLZBfLxN2ZhTHKPr7IaOEX5F4c02QcPmsV - PsFTZ2SZ9Al3F+IITSv45JpgEw1NXaewyQqH1hHwYjx0kwmLsoqx6Cm1MQPOffRslgYxcLvibPqI - LrTbNiD2ywu7WdpPoNykmeNGDIZwPEebFDovOFN5Mj7FuG6iGmK7Foh3saqEj6/lGiXX6kN2NZ2M - 0SFhLU9IiTE3QrUTptjQlfmxSggeb3Yyyqd6RPmu8PFY7XE3V/PGBH1VAlX0Y9RN92tbyuvja2JO - TzPObueNikoLm8Q2kdIx9aM/FefxWpLt1a7RJHtvH6Rn5DMj1Hs+L24Eft9/9Z7M67WuL+kFMnYt - d4jT7ODP8LGTmJiNUxZzFCcWbF5BTDxFcrppWKq9Yjb5me18IvNJdNAC1SsTU4k/8+7P+3KV4kpr - MTt7rAkzGQK7m8gObY7dfJDbpzy+Hxfm0C4y+vjUl/BEzyvDghIiSsF5yp8u1r/5oxerTwxPWDoL - xrAtQjGdcYBhf8A6uSTbVVF+18N+Yi8sBdU1mZnRvNGAtDWLCvbyJvZZRCjZDu8/95/5uTpBPRgl - /ZS3E+rH02EG7F5cZhru2+CW20poI1t3RiJyRuO8TWx4ClFEtGR9DFft/T1DjitCuYSNbl6vXR1J - Y7v8xqfppmOS65DqiztzJ2NbSImX3mFfxxPRjtsP5+G5yBVto6+J+vSxwQVBDGCKDhHbNmrr0fm0 - z2VooSSX+rUu2MPXAiDtSEj01hqDa/acQ3o/OjS1n+uEt/d+hMCZI6amn1s3730Fwz5dbOlGf5w7 - blznWt4f9joWHIWhwa60HNTW3NEmKzJj+jBuIpGXhBmATIPdrqEEa0G0mCVybPBzYQvAy/uO3IIo - DUe42wISbCrSoxt9wnHT8icYxTpgztZ00Swk71KG2yvAqD3UHXMi1ZXp9nVgum0J3mfrZ3/ij5cu - a4r+ajo1OlXhgYoPsUnY9A4u4LOwI5YfrULaIA7gP+UF2/Gz5Q1y81IhPu5tRq7TOeHm7XGH3hFu - zNBylc/65fVEP73XfWgSnugFwHx4UmZMY5HMXbW35H0KW0LY3CWjlacUUPjM2Rbv7WTeJLKJwix4 - MK/PT8YIy1GAjSxKbJc1STeii/FWzOzMmIalbSf6E1D5aBiUuccV5bPxWObQ5IFCDmPtG1/9C+Ap - xBFdNHNodFtDnwFp74xh6tYG/yhOiyxnETM83uqQVgGmsFoedOLOst/Nj3VykZeBPBJ87o5IvPBB - R6iPc6ygJOl6KY5y8CthSQ7qKU9GeFlP6FaLiJideiwm7THZyldfiU2FU8f3ND1BH4wxs4Ik5fzq - SCnK7PLFzFa2kkmTHROCOJuJE/GrMYWFbqHzECpE3ypLo+0H5wQb+eUQYl2Fblb8rQXztJ+YGmnY - GyMvo8onlxjR3GibdDs/NSG5fj4stuZNMtzs/I5MvzwzvDGWXpMKqg4VfeRky4c24SdZusBp6nOi - utVgzHdcmgC7/Zvt9ZODaFyNMdhXEcg3n4xx3fgtiI0rE/zBu2LyJrYHL58p2W6p7fXS2vNRx/Qr - UZch47xbbBayVNEF+da718/LhwSLtUkJyUEyaNqkVNH2LCfaMM5FjYdwrayF5w2P1+LQjettWgMP - Hy+azThAHEtqDPuXZTBXP22M/mpqNcIiKqnA34rBzstoBFaGS7JvFimvN7jL0XvYd+TxHDGi210v - gHYZXfL1T2i8QJZDUb1rtjV2M+o9uaCgRm/ObO9eJcNGb2JooD+SfSd6iBXhKYL7iI+ENI1UsHu1 - GJF49TfERc+7x7OyuUMdXvbE8YlXjHftlcvr4jwz43w8GHMUjYGyRmhLdrp4Q/M8nu5QL3KZ7Zhx - D+f6ktnyJbAroo42C7v4yddoKQVb4qmnqeg11dKBXq+Y6PP2kgxFfh7RfFF1EgXnRTjYxi1FVyw0 - 7Lo33uGQvyIX9FCSqXQ7XfkcKmIJSepuqCQfaTi1XWmDU6UVM7erd9cv+25GX/9CZSQdDWY5qq58 - 3xcd3SzjbT9aoGD35BI//Wy6Yb8hPtq/TIMZ/iYyxnvxpspS8rdsV+4Szu9luFCqS+0wrzY5p0Ly - foMbsAudh8zlkruTL7/8p4vKe/BR39spRKapMvtS1QYXrnqOXMnqiFPMmldvE7VFX7/MbB6cUDdX - V4Beazie16WaCKUcx8hDlJJ9xdJuRFcaQe1GHgvzw6GQPp9kBG0/5ExbozAcvv0Dff0S3hzqSzjS - WqzhcM9SZnk0TViZ33N0LBMDX5iqh6uyWF1AScuWmSsIjH7dRC2KMyVnd7MRkwlvNeFPvR+6niWj - PIc5iPOjpgu0D9GYXlcUJThYsa26eHpzhnkJOxt57HCvYo+vHJki2FVHdmgQ+9NPFJXHIl5/+/vw - rV+lelAbizh68maXEBXlsvzByr2SvSmPRfrzr8SUtpU30v2HKpH5HPD6vROSRvQuI3y6kbGDGR86 - etmLI0xorzKSSy7n/vviwtsRL3jdK07BSBIGgLQyw7I1b8I5Nu0IzevuQba2EXbD1x9BFq8OFOJM - SjiCVoLzPS2wNJC8YI9B0GURsw/eLPCBT1u/OSFr8ZCYq/fU4OelP8P6uDNp/u0f07LW+p+fZuq0 - lMOZryAFpmcKXXa96q2wZMfwJJ7GdqsMiin29RQyZYtorphpMV0f8Qn8g6rQ03uaOn7aJDmKxtmg - i+OnLOpNfwmQ/BCeLDH7shtSP9JBX2HCHLV0C16IWQnRQVUZXrbCN962Do9A7JkaPvKkcYNjjZb2 - K2G7g0zC5sOQCW+puBEVL3NjNLqLCXw5C0RPGqeT9HVfb0S5xbhaZdBN/iT08OlmRtReN40pu46m - 0soeJc6sZIjKy9YEz77eiDlVFp/IPpNhsJBD+dVee715NiLYrl8WRbBtOF1q8RPVifMhh2dbdF3V - XWPY1IpIdrtQNqpqrwnoq4ds+yodj3/zGbJiwsR96KSjZY8EWDz8noXDc1P85ielbBYWXjs3BbH1 - kFMIuvZGpZN07cYl3qhw3z8xIc+nyEd3o8pKt4IIs1g9d1MmtgKs83dEDp6mF1MWnmvI09UCi6+y - McaXf61lej1j4kZvlozUc9frPnR2TF/3XTe961CHkvsC25/2ntfeQzuHPIMPs/S1Z8w//ehG+UY8 - x7XQ6oWOF+Xbr6moLhbep1L8SPnNk8FRF4pp92ru8Ej0gJjBZgy5q8SAYnWb0fG2vhljsL0AgBCe - MPQHzRvzQsbwfR90/s7n03lgM7JxgZl+jpuCa7acynLl7Qlu+grROWgoWp30E3OjVi7ox8AlBHYz - MatAez5Lbd+jeOelzJI9w5DkIrBhpRcq3vz83dePIxy2a+JsxkNBAecBfNS3RpIPq8P5eNT7nx9l - l9PYh2MSZwLUmrSmo3J/Iy6lhwjmJ+h4Ck9bLrCj6ioyPXjMUDSSzNfTQ4LTxsNENQQtWbXyK0XG - TmnIdrDEhBuCXsqZskPf+fsW8hcILoTcfTK3NZOE7oRXjxanS//lC1UxSGSzB1pLOV2v7TaZt42s - I2dcnSl8+cZQBvYJTinBdO3ddyE17vcenMdjyZxTlXat2vQ5rEnFCBl9D4nsI8Xw9ed4w43Bm1I2 - vFEmNoSp0kPu+Np75OjsGYRsccCLwWmJjeR1mxLvUFfGXAVWD7cEJqbZZ8Sp8VjlPx5AdMVMu8En - VQ5DxF1ySJfEGx9NtoCZDXdmbV9pyL/9Qw7so8m+vIHXfXZ+w0vWE4IZ2iHeaav2xz/Yb97ru5uS - w/aCo5/fCufnwi8hzpY5sU7jPpw01K3BGe810e+71qBvrZthIz8c5pRalky4niSFRDJmrolwOHB1 - MhUVLWzacjVBvNprkiLTak3Xh9XTmw7ZbYbandds501xyPd1Z0IfZgL76cXk0uyt/HiRAehtTC9P - KUG72iuiG7md8NnWY7CxnRGP3Eo0D9HtCb/82C1Siw9h4VowNX1Pp855hO/AEGx4tteRrnul6cbn - IVDRb341WIXD6UOjALbMjphTpKukf8/zBTwh0LFwtv1uVBX1Am7iN3iyzwn/w3cYXQXM5O+H0T+2 - m4V8LG8GXfw3v0HiS9r/8qMYr5kQo13L9/Rd5lPCs9r04ezWOds+RCec+LP7oy/s8dU7vm8PMjrr - O4LXq9ZBws8f2bLwZvbi6iaNsn658DleE6yM+ZvTIGpHuD9IjaUgSRE/1mMJ93NEyF6PRa8vyqUE - P/8UEbkqaIHoHR6idPrqv56IP70vX/lEDp8HSVb0HO/BOx5j0s7CJ+l5lZ6U+KZlBH9OARp/8cRi - uiFWbmp8vCzMCxqOlyfTb2sHjafj8Q442b2Zeo4jxN6n5R76d53Q6RpPHj/aGP/p16Q6ed5qebNP - SNnONR61ru+o1+UXqKa7R/zb+uZNN7t9wi15m0wvZbHoMWls5A5uy8zH4oMmqnZv1D3cC13tjXcy - dFMXozKLOCHzLS3G/HNWIVDQjs4+OAmd9u/nL/8o/eCqG9nRtuGoNxoz0kveze2nCeDL4+jYJJhP - 1VpcQBA7W/LTa5Fgs1dO52lDxe0l9AYQjzmKJnv89u97N6tNn6KY9Jx52bE2Rin285/eki0jrdFI - PNv/4T0K1WI0hvuljpQNuuJNkV4T2g/aBWX9JaOrx+Nj9LfDPofLW61ZKGwxF31SpTB2xZnYl8o2 - hA50+ofv/PxyPcWejuoPPrBtggVvPMenk/I6nCKi48ExuBw/apSepAfbBmOI2C1atGh/yUe29RTb - GJPNMkJfPobH4+lQfOtfhp2wk7HEn3rH+1Y3lVdd7OioTSs+1fdSB+4WT6osuV/Qe6imEEMk0fi4 - oqjf64YE0oW0mPa7Rfd85WoNNjocmVnW0PXf+toIhesy6307JFxzzxf4zrPErg+vbirF8YQ+Tq4R - e3Ftk8lpiQtCuyyYJ0ZOMi23kCITxRe6TAfJGKmTWsqwCyV664XBG52zv4Zf/x+F9B3OGUZv5Jhe - QDdwzfmEhukOU+bdiNfngpd+6xmSmu6JcWNDMgSPvYB6rePkxzOFN32tER9Si91e8d4Y3eiZojKb - Uqbbz8YYD7Zm/uZ7ugoi0s3EGGL05Wn4NJAjEjflLd7ooSCTX/8clJtTglCgN3PK3b5bm2cvgova - hORANZm/h+2h/vEZPAu4S/qiXElgBNGCdgoIqD+fribc6Ykw0/BZwj/3awAfcftgmvAIjUYVaYte - tR3T25fXjLfQGUHNUEZUQ+47nuqbO+Q39U2s5JqFjSZrJpBMIlSxCj0cH7v7jMS0qpg3dzWa2ta7 - oMPe5sQpd33B37djrZz2L4+i/l7zH79DbMVDhrtmYczsNaWKl0fa1x+RcHZ382VTrdcrdhy3C++b - 7ydwzm1Jvjy567OHsIDJn3u68W6SNwfCTGG9my/ferKNL29UgWQCweuAcGNKuJyDlj4+uKkW12L8 - 8bhQ/rhErSwjGbU8xCDYvcjcz8L3xMZKTn/y/zC/EoP2tHfl8z0vKGyqjE92XpnoN49k5yOE7BD4 - sULTBoidbO2QM/mjbwRYvXGnxsAnofTkP3pIyKHh3bENI9gyNyKO76h/+ChoB8mhtHrsCoG3AyAV - gc2M3aSi+bb8rGF+LnRClHcWjtbT8mGuZIPW2fpkjOJ5YaGfvzXwM0u4V9k+uqZ9R6ybrYWTAlOp - mA3SKT97Gl9dnUUOfUJjCugdGVN/Du/oFw8DP7VkVU/3Gj7g3L/z3ux99UeVR0VtiHnGiM8PacxB - 5G9CvM37xefV2CygbeITXhTbuVsNh9lVjO6yII4Xtrwb77KPct98E4+8Zo/J0vAGKxESCp80Lab9 - bUORFYY2zb79s79u+7t8OPU7OimhnYiy6zxh5ZkhXURRkPB0cQFINo8Sw5fHT3IFe2CrKTzM0lX1 - phXSfEjvocPsnTmHvTwn6Z/9F6vMc2M+Hl0K3Ku3X166RatgjFoY1eFE7G//548BdAhJlJCruqBo - 7v1NjM7h0seLfvcsxh8P+/JLDEs4ePPydcB/eJx6LYaCxqYaIzXLBUY2lmbwVJ+eCDuH4scXDXES - pBTWt/ZK7NdxgZj6cZ/QS/mduEu4J3VQOhe4e9eKad/84k++CeTwxW1CwvDs8Qd7SsBPpUXcTXX0 - poV5pChzrhuy/5Qemnva25DPw5J923DXKOurjeaDqbHj3S87Fj9uFthzeGSuExbhSIfIgrxJjnT8 - +s/i8u5qubVuPd34xOuYs3pQ2b6aOXshY5UM2mNjw9ff09tXj2Zpv1kAGy8V2S8lDUnO2w1ka6B3 - hitPQd/9ihZerLJwlqyPiXAKBB/oMzng5fd+87k59LJ6tHfsmL+sgovynENFXzndiG0ejgaxXLD6 - SiSWvrh8/Z0bwxjgmrntZeuNXRT30D0Kg3krpzHGVN6v0eI03IjV1kPy4/MQC+AS9dznXd/rqqq0 - VtKz/VLK+GiGdQlKurwycq9aPttoNcOP70SrtuE/HoG++kGsFeR8Plzmu/KnPjtD50JQahelUVXC - /O5dFCxJLxQIgQNzLu8L56ouBsp9LBdENdW1MWb32EZ32sN3fyYt5uapYeX4sM4/foT6XPvo8FiM - JyxHbVzwaFYXYLnXHYao7cJfP5GXQWp8eVvXUbYkJvzZnzk9w2R0o0uOlE3aMvvLP8ZMdbE8Iawy - LHwWfLyF2gi6m++Jdi5KYy6JY4LQWy5dCnXK650VjX/mFe9I56T1vcPzz35cnt/FcHRIUv+J9zY5 - vZKRSM/xx0cof+7yZGp3kotUX0uZWx/GcFxv6xpoLeRUwob3X/zxGT3Y16/X4fjdDwZRrjE5FLbB - uSwN5R8+40caNaj5uZYQjaPBVPu5Dod88emBreISN9M+//JXw0eW1D6IdwkUNHF1stAkuBrD1aPq - ZrYkFkjP2MeT0b359DTPOWotdUmSbmGH7G5ne8V4B0uil4IbzptNL0GTNy0zv/2YO2i04c62DbO0 - XZfwNO0xshE5MqtXIs5KyyzhO998+UfTTTNc3kpcWA7B5hWS4XKefXT08ImmG5Oh+hcvnGzfuKFd - 5LGh62UoRnTB7JyLyVx2NxWaDivM3izFZG4QWsjOCp2oEn6owQPsPeVv/TMyoDQc1UVh/3gP+eq1 - 0QtMjZUwa1y2P7zaonfOvgwxoZztN5WGRHNvrWGt0iv58bhJe2zc334cXWzONe/FYLxDe81D7DeZ - bExhuaNggBCQw72SjUl4Z5ZyH/dH9uOhHZbUCHYQpMwr8CIpx+pYKn//TgX857/++ut//U4YlPXj - +fkeDBie0/Dv/z4q8O/4Ef9bEKR/M+nPSQTax+nz73/+6xDC301Xl83wv4f6/az6v//5S5H/HDf4 - e6iH+PP/Xv/X99/+81//BwAA//8DALUBqRrjIAAA - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 7c0b9868dedafa62-SJC - Connection: - - keep-alive - Content-Encoding: - - gzip - Content-Type: - - application/json - Date: - - Mon, 01 May 2023 22:52:31 GMT - Server: - - cloudflare - access-control-allow-origin: - - '*' - alt-svc: - - h3=":443"; ma=86400, h3-29=":443"; ma=86400 - openai-organization: - - user-adtx4fhfg1qsiyzdoaxciooj - openai-processing-ms: - - '196' - openai-version: - - '2020-10-01' - strict-transport-security: - - max-age=15724800; includeSubDomains - x-ratelimit-limit-requests: - - '3000' - x-ratelimit-remaining-requests: - - '2999' - x-ratelimit-reset-requests: - - 20ms - x-request-id: - - 0dc16b9c27373738c2c364d171678493 - status: - code: 200 - message: OK -- request: - body: '{"input": [[53380, 8245]], "model": "text-embedding-ada-002", "encoding_format": - "base64"}' - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - Connection: - - keep-alive - Content-Length: - - '90' - Content-Type: - - application/json - method: POST - uri: https://api.openai.com/v1/embeddings - response: - body: - string: !!binary | - H4sIAAAAAAAAA1SaW8+6Srfl799PsbJu7TciCjVZdwjI2SoEj51OR0QREDlWAbWzv3sH/zu7u2+e - RCB5lJo15viNWf/xr7/++ruK8+ej//ufv/7+ZF3/9/+YryX3/v73P3/9z3/99ddff/3H7+//9+Sz - jJ9Jkn3T3+O/m9k3eY5///OX8N9X/u9D//z1N7ldGbNR1kfT59Co6Mw2K/aiVIi5YisGtOPUMVc/ - L9re624DJJoVM7PigS5k25cPVXN7MDsSyvbbeMOAFt0tZSRIHC4+lqELC7w4EyLkQtT7lXUGOVv4 - xP6aWj52jb5AVWAsmZY3H0Sr5pvJ3tn4sEc2Bfm0J7cMHo67IOq27vK+rnMT1MzcMUfRtvqg6h8N - neTdjm3dzTmezPutAtE+a8zpTl0+tXmwQGccXom7e9hxzz/GhBJtFxP7GR/ygdlYle8eUunickmi - US+7DkY/14n9El2PVpWkwUtRLlR+oi8atsc7hr47fYjrGEU+7dKwBOXerPD4dMN4bJMaQ4iMB9mb - m1gf40wI0CpbypSHFxSxaJpMFGYhJkTCu0jAbizA/dgTsn1POGdT4z7lNQ1Vtg9jFQ1ws314R+iM - uTUE8/pkmpwddJPpWvlup6phKZhH+0FcY0v4dE2eMvpe7IxoFwYxvSZnGazE3ODNEK50Zp8WGjpM - tkp2+15DvEpQAXW9jZkG6UYf3ru6QivPD/ellBr5NGy3AuyJR+jXMuS2t95+Awd4Jsw5Xl95Hwbd - E+KbOhDte3t443FRl0jp+RsPxKxaOhYhheubMyqHkhEJweK2kXUVUmYa/a4dJyTL0ETai5DqHSD+ - 3ZtX2DzXIj6v73c+JVluAnMblxk1nWJaDFYJJ1LdmOYPe1R56duACR0CYl2FQqeNaptw2eYRMQPm - eP3tET6gv9CG+dpRi1dppT7A/cCFHCIvb/ssUUxwk8/AtmdZ4s1d+GwA1+Gb7EdRjrg8ltqmzuQD - wevzHo2raAuKAlHI3FAq4u69rBr0FT4dHnl94PxjdqUUjvsvs7SQ5DxwVxu0DvYbgje3jLPLUung - IUU+cfinQM3xbT/lOKlbtr8GKh+Cbz7B5p0hsideFY+Lz2GA18dzsNBFEh9Ph/1Rvt+vI9ubG+SN - py96wuNSMaa3LEV0HW4GWJ7vBXFegeDxvD4WcAl0g+gtU9E0NFEF922ZM+sqGLpw3SkplKtsR0hr - brzWcqJwabx9YNawafVRJ4tSlkf0wTA9gmgKe/cI3mPPiLVqRL1TFqSAOy0NujwsIZqu1DeQpNgx - c8rUb4fruaGwWBFGl1k3oMnU8xT1eBMTs8F2PqRPHKCPVn6Y+0jktutWzhXOn+LK7Msj493KIRNc - hQdnvtMeeZ/trk+IPm+baFKwzId8lYToGB835IyzT9SwvU7RVf4qxDT6Tz6tjyyA62qpsX3XeS23 - nChQxhe2cO+Xb70+HXZH8PNWwZNK3Zhd3UxFn1oumJExveW7zgvRtlXfzPhKYTRsW/sMZ+E90pW1 - 0bjItxlF5095pfLJtHU+7y/42IwRw1/e42F5OjcgnO4ZbSrrHo/nnf6EQBGfeFiIIR9MbROgJk1q - slOrJh8DfnyApDtXKms+4z0JvALlW+dIPC++tUMtpCkI09Wg/C2/9eElr46gYW+PJ2uRtVO0LFXI - /exKM3v34GyuX1gsjQLTy0WJu82zCmCR7XSCfdXzhn5bYPCmbUpX59eej6iRRDAcGfDiIUhtt0mf - A0Juz9m2GpOIBmWVwZCWDtlnuzQfF/ryjnAgIaY9NIcPr+pWwS29Ueb6XEYsV90BZVp+Yq7mE0S1 - 10gRvS8LZjfJSp8KczVA0Dp3grvnLZ5ulWfDnRYG0ZWTnHfiDQAVOpTMO6EmZ4m6TNGvHsz1XUYs - pdCgyNVXmN3Duh2wRAtA7aaiIB0/Oj0HYMPwuK+IJpgD4mW9esDyYZjsmLG8HdtLGkKgr2MMpn5t - xzJ8GMgf6hfzkjDxBmEfYpg/01zBaV7fuF0isxovTE9I4E3L1klhlaUhUW1pp493Kroo4EqNX30U - o+5rtnd4UKDEw+2kj62JTEi8RUB52fke1z/1A71Xpkds32/y7iahAlVFGLJ9/Yh03l6jFH56uniS - UJ/S1beCp4QxFazs6fGuWWOE5PsNL16NO/+eKgD11j/xNymZ3n3y8AHlVz0yw8qe+mSVcIU4aVvi - qcY3pqbeZnK08X22FyWid3EvdZB98I3tGrTJ668f2UjvF1ti9QHmfBHVRyBJoeH+cPX1FV4+j+iz - uZ7I+fY+elz/vgB9CqIxXVvsou7LFAzj6RRieLnHaBTYqP72G/Of1qsdy9r1kYmeMZ7ls6XW8drB - oR5vVHqv4pYdO8mWvH14ZqYXdmioE/kK1/fI6DrnUzusrFsg2zFdz/p11YfTQxvkJn3VFJ1Q0/LI - SQPIHleXaIj6fJA/moEGJ3DwtOdZPqDX1lXCJbsQkrNPTvfkkKIa0RcxxaDTh8RzOoDUd9lW6Ew0 - ruqNAZ0aJMSmuzHm46BfYdtlAzF0/uX9dHyVUrnFBc7rA8/H5CzfwXkqI/Eno8trIiVXpFaEEGsn - t5wNn4uNziE6EsLTZVzb7bBBK0k8UGm01jnjhZNBv9RqvGg76jVHqTeRVxRnZl6OQzRY33UDy1Z/ - 4Y15cVv6tc4VqjhpmWc/cT6cl3EFxeqr4+ztb9rxfbgfkbdmGtk367alk5U3EIXXB934vpsPheqU - yHiahBjudM65p8gYVT6EFJDURZ3TNQCU2Wu2N8ftXE/CHbbSUsZrU0lbdqpoJ5dVllPhK4Vxj89+ - hrZvp6PA1jbns/+BJlW3xAlqJxqWt7cJOC+WeOEqojct1t4d1Bt7/tH7qVdTgM2VdlSe/dv49WMb - 9B62dBLhzbueqU9Y7y2PkfX23Q58cH2lDByF6D0U+a/e0T3fq4wU0rGd0sTTYMdEmTn9K47YLj6b - 8mN/PJD7YyeiuloVsLkl0ZauBTvPh1d1qFB2phVT0UuN+HePr+hCM4E5/GPwLhGiFOonVYjxsr7R - oLdvX7m+jxad3v4mn/tHAGa3aDCkeyeiZbGnyLmKXzzaEfM+psI7cEwnZ9aGJPm0XVwCOPjjmu0l - M+TjSjsOKH6LSwxJ9eZjZd59cBfTB1eWlHjTJU7vSno6XJiviwKf0lxfoHOzj+b9zHReX+QKXbbv - CI+hjdEgHx8bqEVVISTJNI9P6qaBfXgAph/2fitW1agpg15+mUPliI+R0RRyAsaT2fmzjH96qFhv - JtBi7dF41LpPhdh74RKzrd9e9+snd7btiJZ9R31Q77ErF6410PuVFjE/ZZ8JnWRrR1SqFdEwaHsT - tEezoAthLFu+aX0fihRsYteRGfVDkcsg5HKEkS1O0RTTboH4JzwSp7++o2nYOiJszG2Fryx1+MwD - qryVFJnsrsc9GhVNlSH+pm8Ky3fV8gPFE5rvM9WvUNRl24sP6pT3mFlqn3f4vQXw72uKFztbjBls - iAiyaG6ouOwrbzpmIvzhGesyMn3qzHqD1q+BMM31ypiPplSg5pAGLBRhy0U7MGf9ubt4LfcuYuHh - 0iHZ3wlMpc4zoq8gDSAdhoY46/1bH/PmIsA9JypdJvXD49HDA7RNO4Ml9qLg43HxLiEZhy27D1M7 - r8cdg9FvK6I7ytHjr10mgOxVPjvoktUO8/5Avief6JqtRH26GlMA8/rhNM94PK4t20TblBrMmrQb - mkZkC+j9JJyWSFvrU4JIB7MeES0jfTyM4RT+4QkhfrUehRt6gG29C+JH9oD6IqvucBztGp/UoORs - We5UkGvrzFSFL3R68BjIR1H08MB97nWF6abyn/tXb+v14qevULuWRDqeZQl1zDZVGFnL2N5CjHcy - 2vigLaIY97pTIOavxxDiznkQLwkVr4eUPtDiMnyIPrYyZ61Fz2iuJ7wUvKBdCUxSYeYb4kSlEFXF - lWK0l+yImZ/8ko/CzZWlLXljTNd0bMf29aKotY4q274n2vawfpSA3oXHduYKe9PJuN3hoqgfsosP - G0TjiyhCPa4bog+OHXNlCO7KOJ0FukFu3fJFtwcYz+s3HqtGj1Z3KtoAKXbxcB+qeHgv0wp++rmQ - F67O0/NVhZkPiHU/i+10U5mIlkqXE3xwJU4TZHUw+x+m7z9DNLyvuyNcveUCS+v92xu87FLCay3u - mf1WJX2qfWeBYtyF5CUPa94eywH/eItyrXznvEfFXdHFISBmLxb8j16t72bPvMTTvb4BPEFcqiEj - lbPXB93Yb2C9DV7E21dKTNXYvMJrv3KIZ3V5PChYEJGTezs8xHanD4/vZoK5n7AQ0Y53wbedUPHd - hcSr4IGmMepT6TB9KmamNxsNK7HT0MwvONtuJH36UmmBzhfjyw6xgtt0M+oLcKWdPvNTihhlWQaf - w3uBefqq2mFd1BiwHy7xr3/R8JG4SPUZ0HhfJdEYxUGn5N5rZFqSaFFHj2QDi7QUCDa91pv5vkCx - my3p9A01T5z7pVRZwYAX21vqMVs5nEEZHiVxQcB8rK+KLOtrssECWbUx3ZHPFcrjKZj7o5rz/LyR - YfX19sStVDUW0iygytspNsw4NF48rWwjBbeMl1h4ZKq+6hoPUBU/dBYFKxuJx0xcQHnXJuIWV8Ob - uuSq/eGR4RrG3rRb2xTabfUkqu4t9dqd7p1sF28Ty/SocPZGlzvk2dn58VPbVOjYId0LFGbNfqqL - +5EqdkVvVJQ2VTw92V4ABy0jLLyPnjfRfBSVU7m4MYtGeTsZ3fEB5LR/sO1Hs7xBSwFAq6uUPuZ+ - xW3ldoSzkI905X0lnSc0miDvqhLLkbqIG6HgBXwfokGs+ObF44UmIdD87M1+q+SjVOIMITf0qGiS - qz6GkhiC8bkS4pNUaKnfrw2Y9ZfM68+76HJ5wHUnCmy3aTNv+JrtFe2dV0/FXiwQcx8XAYXIfBCr - eupoBKwN6JneVeIYdInYKlphmPsTc78n5lGWbDY/nmZG0mHU5Y+hgdm/MVP5PvP+Xq18+WC7wIyq - 0FvuHdMrcD7tmFWZ6o8PMjk3RZE5XKnayTc+AWgb+8Cs+NZGzVuNFz++xdJoXdpJ1d4DDHrxJVsv - uHtzPWRw7/OECvg+ofGzMky4VOuQWR5r8zrIvgGUvfdk7uxfum1rH2HWbzyd9w4SnuY6AB2kO2nZ - vfFYQqMBdpva+cOn/CtnFeTZ0cErmMAbGssPkb32XdwImo6GsxRSCBThySKzR3ldq7dA5hIizFPq - MZ5A359hcZk+7Od/uvNheYePVnyocH+I8fy+RaRt3MPMRx+PTY32BMsIG+I3Wq1zVtcF/NZHlvyp - ZU8vn2BnZwhPy7PjibW/BVQ+/R3ZSuKprV/UuUJ9NzWirraR1/OPP6BfHrXzvpLHn5KqobBEzpy3 - xREdkS2i84YZdPxolj7u9VRUbk2YU7ToSm9wlav9y6+Yx+6NzkC8HaHSrYR23+Sds09ypzAs9JA2 - J7PSR8tONXhmpzWun+YuHtTRCeDZ4Y4RxLt8+L3v1fO8+NWP3rBkIwNnxci0tR7zYWdLGFL/ZTPC - Rkfng2gclc+JHX5+PG+QmGVKW3B31vMgHl/JA8OSX01GQInbMamfBvQC9an02R29YTHtO/SrD4YD - 0xuwVJZAklLDq8Vmlw8ftDTATVwbr+tqatvK2j/hdnieyH4NOeLSavNQ5u9P22ql8dk/iIrImi2Z - 81jeXy7MhmMt1n/8MLfu9hMu99ObSqHx5vyzWp7Rn3obxE00ys7rgcwOGmZMqRJToeAlzPxOl0Zs - xqu9Xglw4+aEJzk6RjRPWhnoUbgztZxaTuNd7KJfPnj5xpjzilaDfJXdEx6er2fLr0G2gPxVG8x4 - 1HedG68qQ3O+wfZe7utsznfQzL/M3Qq+PnT70gRnJTjMCY0tGossvUK/VGumjo3aTrOeIxdXlz96 - 9nu/cmryEiN2d/X1S1AKFAuZzozVsEIcRfYTwtfXIW7mqfHgpbUJb7Q/EUtR8pifvXUDki3IVNlS - P5aukpPCz496hXtAvCmSK/z0wi0Giw9OlwEcNgeXWUlJ9EkdIQXxEsWM7D4rb/b7GHnKtSCE3I9o - 2rziTPrlu1a4nPO7c9P9yRdMk2w8pmhgAz5/E2aYapvToEwzFBurhM37PRrO1SeVnsampxs1a9Dg - KoEL+en8xYvPq8mHOpmuyp4/RzbnLy3DxtEA7AdLpr+EdcS7ZukjZJQ1Md/COxqtcRTQuiqedPFe - 02hkt3UlN3jlUUrBbMtHVahgN7giZv/cesLsH1G7fmKmwqHNR9tICtSk2nb+fnL8vXxvBhikfLKZ - j1H3W785r6Sw6ireA3YH6ee3z8KKt1xcY/PHl2wH00MfJP3bIWNJO2Y+o6XXB8aWQi4qBtlTq/DG - OLRMAH4U8UCdRdxvnccEiQcBe34sk08yIRg+rDTpMn5co6EfREAG36h0uHpvj4XVToT5M9tZm+yX - dzUgJa8VnlhXx9ORdzbMfoGYsPP1OX8Mf3nIH32lP17xtlKIr29V8liirlPwQc3J67B8xMNhuqpw - uj4rPG5iRR82a0NGO219w4PSFfog3oQFyk1BJPsvITHHJyeA2Z/RrlgK8bRITwHsk1bFt9CL+QrS - 8oEKp7Lx5bX95tw7VncpfH0ctrWc1BPn+lB+ftDbvLR8/eONHx9aO9lDnX4X1D952dZfpPr4zkUN - YJ/vmKHHhjfUQpWhJcMTMXgeeoMA3QIam27xuhC/7Zw/qvBw7AUzlp9VO/7y9tl/4XSJxfaXl8BH - 3JjsNb/vds+pgHJkX8iuoCuPXpPnBhG09Sma+0198z4hfC9uRox4jdH67fsNgKRdiSO6fjwx8ZT9 - /B1u2d31KH4rGH58uqVrE/HooS8A27uObDdHXec381JCsUQpXTyEW56el3ED54v5ZWQZV3wYfemI - JlX4ULR/5zFr2baBaeWaDHdPKR77/XGD6p3MmNdOez7MeTpKcrbB1zoqo498NxpIxYdPwuW59qbf - PISJnx1RM1VFYvn0B/C2KGT6ejTi8WHu8M8fUtgcgmhM6rMB67VBmOebDRqQHWvQd5cPXlpq3xY7 - LNuArcbA35Nv8U7ajhjCkXz3Y9XkcbNIkwAitvqQbfxxOd3Y/V2+YR5gYdaDPD1f5/V878g+uZN8 - Yi92R7Mek33Xta3wke0CurcsU6nqh5gvU9+UdHTz8E1gEZ8MZVsBeZkHvMRBqU/T8zAphb4oiZdf - zXy6uNxHRSgtmeVPaVvN8zY49bt8zm/vfDq69+PPz/+ZH7GZn8EZK5GOikm9eptqG8iMbUGXPH3F - /aotS1iQ24LC4sn59FpyDVxjzBieWuBjJ0su2Jpcs5+/44cuCJSZj8g8r2rpiFRRnvMndpFPX53R - 5+oM83yGuYyd417r+gZNqvghhlB+9cnv1AIZq/Wa4PboRvzS3zVY7ssTs1bNWW8gmjKQR+lDLNti - +Z/5BGflyHb8tPyT78MCw5kO3ZLk5W0xBsrn1B+IaomDN5DHOoWX7XyxpIPQsrm/ovG8etPJOJd5 - 1e2pCdMAnC610yMf7vn+AY9H1ODpkG3mPHIwlHVcwG//6axf5U9l5kc8Zd/Df+UJzlX4Mlcm32h8 - UeeOnoS4mC+GyBO3LSpg3LwWtFfGnI9NnrkQ8HRL/PvViadyV9lgriObbKPcigRfl1UQ7aNGdqja - 6/1vXvCM7s953lujztSGUDGu154d9knC26/w7v7kkzMPRTMPCiAZ9Zfs/fLtDfHFK9C1rQqizvwy - 0qvjgqYd3wTjTxiPhz7O0C2NKW1+vCMTywc/rxX2mz90Uqtryno955XDouEUDXUJ5JVLlItnDw3u - CUQUOcWe4Xl/T2GvnZHqh3/y1qjs1QqUMvAUKvcu8djv+87+gWCHW9GUrlgjK8H5Suf5V8yPu5MI - VmJssKQ4yKPe+3FEqtirDL9CEwnrPTLR1rkbVFk9xXyQdEZRHolXzKWFrQvCPvShacX+N++Ixpf/ - 1UCBQ8hILi7z7rlTB3mJtwqz1jvajl10EgAHz4j8eORPnrclOaZLt6p5mSf5RgaiH3Gd0G0+zjyA - +mZ1xlKEh3zgjROCWfEL2w9HC63G0axgKFlEIcidfHTIVoS/f6cC/vNff/31v34nDMoqeX7mgwH9 - c+z//d9HBf59T+7/FgTx30z8cxKBdvf0+fc//3UI4e+6rcq6/999VTy/3d///CX+OW3wd1/198// - c/lf8//6z3/9HwAAAP//AwBim/ij4SAAAA== - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 7c0b986ab8dbfa62-SJC - Connection: - - keep-alive - Content-Encoding: - - gzip - Content-Type: - - application/json - Date: - - Mon, 01 May 2023 22:52:32 GMT - Server: - - cloudflare - access-control-allow-origin: - - '*' - alt-svc: - - h3=":443"; ma=86400, h3-29=":443"; ma=86400 - openai-organization: - - user-adtx4fhfg1qsiyzdoaxciooj - openai-processing-ms: - - '195' - openai-version: - - '2020-10-01' - strict-transport-security: - - max-age=15724800; includeSubDomains - x-ratelimit-limit-requests: - - '3000' - x-ratelimit-remaining-requests: - - '2999' - x-ratelimit-reset-requests: - - 20ms - x-request-id: - - 208e3672e7991aa80472beb0310e43d9 - status: - code: 200 - message: OK -version: 1 diff --git a/tests/integration/challenges/basic_abilities/cassettes/test_browse_website/test_browse_website.yaml b/tests/integration/challenges/basic_abilities/cassettes/test_browse_website/test_browse_website.yaml index 82a4f1fae7..dac79d49c0 100644 --- a/tests/integration/challenges/basic_abilities/cassettes/test_browse_website/test_browse_website.yaml +++ b/tests/integration/challenges/basic_abilities/cassettes/test_browse_website/test_browse_website.yaml @@ -2896,4 +2896,472 @@ interactions: status: code: 200 message: OK +- request: + body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "user", "content": "Write + a concise summary of the following text; include any information that can be + used to answer the question \"What is the price of the book?\". Do not directly + answer the question itself:\n\n\nLITERAL TEXT: \"\"\"Books to Scrape We love + being scraped! Home Books Philosophy Meditations Meditations \u00a325.89 In + stock (1 available) Warning! This is a demo website for web scraping purposes. + Prices and ratings here were randomly assigned and have no real meaning. Product + Description Written in Greek, without any intention of publication, by the only + Roman emperor who was also a philosopher, the Meditations of Marcus Aurelius + (AD 121-180) offer a remarkable series of challenging spiritual reflections + and exercises developed as the emperor struggled to understand himself and make + sense of the universe. Ranging from doubt and despair to conviction and ex Written + in Greek, without any intention of publication, by the only Roman emperor who + was also a philosopher, the Meditations of Marcus Aurelius (AD 121-180) offer + a remarkable series of challenging spiritual reflections and exercises developed + as the emperor struggled to understand himself and make sense of the universe. + Ranging from doubt and despair to conviction and exaltation, they cover such + diverse topics as the nature of moral virtue, human rationality, divine providence, + and Marcus'' own emotions. But while the Meditations were composed to provide + personal consolation and encouragement, in developing his beliefs Marcus Aurelius + also created one of the greatest of all works of philosophy: a timeless collection + of extended meditations and short aphorisms that has been consulted and admired + by statesmen, thinkers and readers through the centuries. ...more Product Information + UPC4f19709e47883df5 Product TypeBooks Price (excl. tax)\u00a325.89 Price (incl. + tax)\u00a325.89 Tax\u00a30.00 Availability In stock (1 available) Number of + reviews 0 Products you recently viewed The Nicomachean Ethics \u00a336.34 In + stock Add to basket Run, Spot, Run: The ... \u00a320.02 In stock Add to basket + Critique of Pure Reason \u00a320.75 In stock Add to basket At The Existentialist + Caf\u00e9: ... \u00a329.93 In stock Add to basket Kierkegaard: A Christian Missionary + ... \u00a347.13 In stock Add to basket Proofs of God: Classical ... \u00a354.21 + In stock Add to basket\"\"\"\n\n\nCONCISE SUMMARY: The text is best summarized + as"}], "temperature": 0, "max_tokens": 0}' + headers: + AGENT-MODE: + - AAAAAAAAAAAAAAAAAAAAAMLheAAaAAAA0%2BuSeid%2BULvsea4JtiGRiSDSJSI%3DEUifiRBkKG5E2XzMDjRfl76ZC9Ub0wnz4XsNiRVBChTYbJcE3F + AGENT-TYPE: + - Auto-GPT-2023-X-TYPE + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '2507' + Content-Type: + - application/json + method: POST + uri: https://api.openai.com/v1/chat/completions + response: + body: + string: !!binary | + H4sIAAAAAAAAA1SSTW4bMQyF9z0FofXYsJM4P7NL202RZtEg6KJNEWgkesRaIwqkxo4b+DS9SU9W + aJK46E7gz+N7H/RsyJvWuGCLG3KcXdxcJd/7m29fhuvx83139+uj+3qyfn+xvCofTGO4+4muvG7M + HQ85YiFOpjFO0Bb0pl2eX64Wp5ery1VjBvYYTWv6XGan89WsjNLxbHG6WJrGjGp7NO2zycJDLo+F + N5jUtKvFWWP+aR/ry+WiMYWLjcfS+fLs0BgXmByqab8/mwH1TVY4ommNVSUtNpVqklPBVANY8KhO + KNcTwGsoAaFj3sCDuUVPxdaGPhjo9nBrxY0K16NgpFEb2AVyAUjBbi1F20WENQvkUVywisAJ6oGB + YYedUnlp77ADdWIzpb7OZlbUOdwHhCzk8D8bpPDn98lqfnnVQDeWqfEmtrOSFEqwL+VpWcEmD2IL + pV7BCoLY5HmIe6gE+oR+mgh2i5AYBG2EAW2i1L94mM7yeo2ioJmEymgjCK4jugnGtI9PKI4UFTxu + MXJGXxFVH3c82AQ4ZBSWaTgHiqycA0oDjrcoNXrhTE5BRxfAKgwsNsKWpIzYQBiriEz4baSybyYl + T1tKNSpvyWNyOIdPpUJynJQ8CnrgdETYT79RC+xYNlqrRyv7VwwKHWIC6weStwyUNjX9RBKtr+8S + hMc+8FggkBaW/dwcGrOmRBoeBa1yMq3Rwtk0hpLHJ9MuDj8O7/4CAAD//wMAU2wM4F0DAAA= + headers: + Access-Control-Allow-Origin: + - '*' + Alt-Svc: + - h3=":443"; ma=86400, h3-29=":443"; ma=86400 + - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 + Cache-Control: + - no-cache, must-revalidate + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 7ccfc4f548931119-ORD + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Thu, 25 May 2023 18:16:38 GMT + Function-Execution-Id: + - pbmeavwjmg42 + Openai-Model: + - gpt-3.5-turbo-0301 + Openai-Organization: + - significant-gravitas + Openai-Processing-Ms: + - '13515' + Openai-Version: + - '2020-10-01' + Server: + - Google Frontend + Strict-Transport-Security: + - max-age=15724800; includeSubDomains + Vary: + - Accept-Encoding + X-Cloud-Trace-Context: + - ab314b2e9bb880724cb82a80580d492f;o=1 + X-Powered-By: + - Express + X-Ratelimit-Limit-Requests: + - '3500' + X-Ratelimit-Limit-Tokens: + - '90000' + X-Ratelimit-Remaining-Requests: + - '3499' + X-Ratelimit-Remaining-Tokens: + - '88914' + X-Ratelimit-Reset-Requests: + - 17ms + X-Ratelimit-Reset-Tokens: + - 724ms + X-Request-Id: + - fb2737f00b14403a74927018f9a97197 + status: + code: 200 + message: OK +- request: + body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "system", "content": "You + are browse_website-GPT, an AI designed to use the browse_website command to + visit http://books.toscrape.com/catalogue/meditations_33/index.html, answer + the question ''What is the price of the book?'' and write the price to a file + named \"browse_website.txt\", and use the task_complete command to complete + the task.\nYour decisions must always be made independently without seeking + user assistance. Play to your strengths as an LLM and pursue simple strategies + with no legal complications.\n\nGOALS:\n\n1. Use the browse_website command + to visit http://books.toscrape.com/catalogue/meditations_33/index.html and answer + the question ''What is the price of the book?''\n2. Write the price of the book + to a file named \"browse_website.txt\".\n3. Use the task_complete command to + complete the task.\n4. Do not use any other commands.\n\n\nConstraints:\n1. + ~4000 word limit for short term memory. Your short term memory is short, so + immediately save important information to files.\n2. If you are unsure how you + previously did something or want to recall past events, thinking about similar + events will help you remember.\n3. No user assistance\n4. Exclusively use the + commands listed below e.g. command_name\n\nCommands:\n1. append_to_file: Append + to file, args: \"filename\": \"\", \"text\": \"\"\n2. delete_file: + Delete file, args: \"filename\": \"\"\n3. list_files: List Files in + Directory, args: \"directory\": \"\"\n4. read_file: Read a file, + args: \"filename\": \"\"\n5. write_to_file: Write to file, args: \"filename\": + \"\", \"text\": \"\"\n6. browse_website: Browse Website, args: + \"url\": \"\", \"question\": \"\"\n7. + delete_agent: Delete GPT Agent, args: \"key\": \"\"\n8. get_hyperlinks: + Get hyperlinks, args: \"url\": \"\"\n9. get_text_summary: Get text summary, + args: \"url\": \"\", \"question\": \"\"\n10. list_agents: List + GPT Agents, args: () -> str\n11. message_agent: Message GPT Agent, args: \"key\": + \"\", \"message\": \"\"\n12. start_agent: Start GPT Agent, args: + \"name\": \"\", \"task\": \"\", \"prompt\": \"\"\n13. + task_complete: Task Complete (Shutdown), args: \"reason\": \"\"\n\nResources:\n1. + Internet access for searches and information gathering.\n2. Long Term memory + management.\n3. GPT-3.5 powered Agents for delegation of simple tasks.\n4. File + output.\n\nPerformance Evaluation:\n1. Continuously review and analyze your + actions to ensure you are performing to the best of your abilities.\n2. Constructively + self-criticize your big-picture behavior constantly.\n3. Reflect on past decisions + and strategies to refine your approach.\n4. Every command has a cost, so be + smart and efficient. Aim to complete tasks in the least number of steps.\n5. + Write all code to a file.\n\nYou should only respond in JSON format as described + below \nResponse Format: \n{\n \"thoughts\": {\n \"text\": \"thought\",\n \"reasoning\": + \"reasoning\",\n \"plan\": \"- short bulleted\\n- list that conveys\\n- + long-term plan\",\n \"criticism\": \"constructive self-criticism\",\n \"speak\": + \"thoughts summary to say to user\"\n },\n \"command\": {\n \"name\": + \"command name\",\n \"args\": {\n \"arg name\": \"value\"\n }\n }\n} + \nEnsure the response can be parsed by Python json.loads"}, {"role": "system", + "content": "The current time and date is Tue Jan 1 00:00:00 2000"}, {"role": + "system", "content": "This reminds you of these events from your past: \nI was + created"}, {"role": "user", "content": "Determine which next command to use, + and respond using the format specified above:"}, {"role": "assistant", "content": + "{\n \"thoughts\": {\n \"text\": \"I need to use the browse_website + command to visit http://books.toscrape.com/catalogue/meditations_33/index.html + and find the price of the book. I will then write the price to a file named + browse_website.txt. I can then use the task_complete command to complete the + task.\",\n \"reasoning\": \"I need to use the browse_website command + to visit the website and find the price of the book. I will then use the write_to_file + command to write the price to a file named browse_website.txt. Finally, I can + use the task_complete command to complete the task.\",\n \"plan\": \"- + Use the browse_website command to visit http://books.toscrape.com/catalogue/meditations_33/index.html + and find the price of the book\\n- Use the write_to_file command to write the + price to a file named browse_website.txt\\n- Use the task_complete command to + complete the task\",\n \"criticism\": \"I need to ensure that I am using + the correct URL and question for the browse_website command. I also need to + make sure that I am writing the correct information to the file.\",\n \"speak\": + \"I will use the browse_website command to visit the website and find the price + of the book. Then, I will write the price to a file named browse_website.txt + and complete the task.\"\n },\n \"command\": {\n \"name\": \"browse_website\",\n \"args\": + {\n \"url\": \"http://books.toscrape.com/catalogue/meditations_33/index.html\",\n \"question\": + \"What is the price of the book?\"\n }\n }\n}"}, {"role": "system", + "content": "Command browse_website returned: Answer gathered from website: a + description of the book \"Meditations\" by Marcus Aurelius, which is available + for purchase on a demo website for web scraping purposes. The price of the book + is \u00a325.89, but the website warns that the prices and ratings are randomly + assigned and have no real meaning. The book offers spiritual reflections and + exercises developed by the Roman emperor and philosopher, covering topics such + as moral virtue, human rationality, and divine providence. It is considered + one of the greatest works of philosophy and has been admired by thinkers and + readers throughout history.\n\nLinks: [''Books to Scrape (http://books.toscrape.com/index.html)'', + ''Home (http://books.toscrape.com/index.html)'', ''Books (http://books.toscrape.com/catalogue/category/books_1/index.html)'', + ''Philosophy (http://books.toscrape.com/catalogue/category/books/philosophy_7/index.html)'', + '' (http://books.toscrape.com/catalogue/the-nicomachean-ethics_75/index.html)'']"}, + {"role": "user", "content": "Determine which next command to use, and respond + using the format specified above:"}], "temperature": 0, "max_tokens": 0}' + headers: + AGENT-MODE: + - AAAAAAAAAAAAAAAAAAAAAMLheAAaAAAA0%2BuSeid%2BULvsea4JtiGRiSDSJSI%3DEUifiRBkKG5E2XzMDjRfl76ZC9Ub0wnz4XsNiRVBChTYbJcE3F + AGENT-TYPE: + - Auto-GPT-2023-X-TYPE + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '6625' + Content-Type: + - application/json + method: POST + uri: https://api.openai.com/v1/chat/completions + response: + body: + string: !!binary | + H4sIAAAAAAAAA7SUS27bMBCG9z3FYNay4bdjbYsujHZVNJtWhUFRY4mxxBHIce3A8Gl6k56soCy/ + lCyCFlkJmsfP7x9qdECTYYy6UKKruuzNPy+sfE8n43w5/DKdcTFbLstP86/y/PTxESPk9Im0tB19 + zVVdkhi2GKF2pIQyjIezh+lg/DAbDCOsOKMSY8xr6Y37055sXcq9wXgwxAi3XuWE8QFrx1UtK+EN + WY/xcDIbRXgVvyRGk3mEwqLKa+18sDhGqAs2mjzGPw5YkT8LOy4JY1TeGy/KSsBkK2SDhUNiAQAS + lIK3eSE+wRjaYJugvYRggkso1C+CNW9tBlIQ1M5oAl43Lynzpg9LsEQZCMPWUxPfOSO0El6tTUmg + uaqUbQqaxI2OMChoiqyqKIPU8c7TakepN0J92UuQ18qGHnvRF+U3q3ZOd/qX2Lmqn2B068yR8myN + zc/2/hn9ZgRvsvGtIBu1Zv7fR10qe7LQg8f3RU8Sez3j7cQdYO2MGG181R18pTYEfutCpxJYgqoa + YGPzRkuzc6QFjF2zq1TYi9AWUg12c37ovESMh5RCe7uZYOydUmbCg91zd6i+JrU58+1MWb73V9HA + v3bXJ6pjdN7U9sQXixqET8B3hB1fyuXdHT8lQu1V4pWrv9Xp/hr+/B5N+w+LM2wD3HIn9ojHCNfG + Gl+sTjuHMXrhGiM0NqM9xoPjz+OHvwAAAP//AwCkoYsdiAUAAA== + headers: + Access-Control-Allow-Origin: + - '*' + Alt-Svc: + - h3=":443"; ma=86400, h3-29=":443"; ma=86400 + - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 + Cache-Control: + - no-cache, must-revalidate + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 7ccfc55dc9dee1ad-ORD + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Thu, 25 May 2023 18:17:12 GMT + Function-Execution-Id: + - pbmegdid7glm + Openai-Model: + - gpt-3.5-turbo-0301 + Openai-Organization: + - significant-gravitas + Openai-Processing-Ms: + - '30081' + Openai-Version: + - '2020-10-01' + Server: + - Google Frontend + Strict-Transport-Security: + - max-age=15724800; includeSubDomains + Vary: + - Accept-Encoding + X-Cloud-Trace-Context: + - a847bfe80cefa41d8c90162f62421032;o=1 + X-Powered-By: + - Express + X-Ratelimit-Limit-Requests: + - '3500' + X-Ratelimit-Limit-Tokens: + - '90000' + X-Ratelimit-Remaining-Requests: + - '3499' + X-Ratelimit-Remaining-Tokens: + - '85948' + X-Ratelimit-Reset-Requests: + - 17ms + X-Ratelimit-Reset-Tokens: + - 2.701s + X-Request-Id: + - 2138028baa9d1b5c4544e7197979eb65 + status: + code: 200 + message: OK +- request: + body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "system", "content": "You + are browse_website-GPT, an AI designed to use the browse_website command to + visit http://books.toscrape.com/catalogue/meditations_33/index.html, answer + the question ''What is the price of the book?'' and write the price to a file + named \"browse_website.txt\", and use the task_complete command to complete + the task.\nYour decisions must always be made independently without seeking + user assistance. Play to your strengths as an LLM and pursue simple strategies + with no legal complications.\n\nGOALS:\n\n1. Use the browse_website command + to visit http://books.toscrape.com/catalogue/meditations_33/index.html and answer + the question ''What is the price of the book?''\n2. Write the price of the book + to a file named \"browse_website.txt\".\n3. Use the task_complete command to + complete the task.\n4. Do not use any other commands.\n\n\nConstraints:\n1. + ~4000 word limit for short term memory. Your short term memory is short, so + immediately save important information to files.\n2. If you are unsure how you + previously did something or want to recall past events, thinking about similar + events will help you remember.\n3. No user assistance\n4. Exclusively use the + commands listed below e.g. command_name\n\nCommands:\n1. append_to_file: Append + to file, args: \"filename\": \"\", \"text\": \"\"\n2. delete_file: + Delete file, args: \"filename\": \"\"\n3. list_files: List Files in + Directory, args: \"directory\": \"\"\n4. read_file: Read a file, + args: \"filename\": \"\"\n5. write_to_file: Write to file, args: \"filename\": + \"\", \"text\": \"\"\n6. browse_website: Browse Website, args: + \"url\": \"\", \"question\": \"\"\n7. + delete_agent: Delete GPT Agent, args: \"key\": \"\"\n8. get_hyperlinks: + Get hyperlinks, args: \"url\": \"\"\n9. get_text_summary: Get text summary, + args: \"url\": \"\", \"question\": \"\"\n10. list_agents: List + GPT Agents, args: () -> str\n11. message_agent: Message GPT Agent, args: \"key\": + \"\", \"message\": \"\"\n12. start_agent: Start GPT Agent, args: + \"name\": \"\", \"task\": \"\", \"prompt\": \"\"\n13. + task_complete: Task Complete (Shutdown), args: \"reason\": \"\"\n\nResources:\n1. + Internet access for searches and information gathering.\n2. Long Term memory + management.\n3. GPT-3.5 powered Agents for delegation of simple tasks.\n4. File + output.\n\nPerformance Evaluation:\n1. Continuously review and analyze your + actions to ensure you are performing to the best of your abilities.\n2. Constructively + self-criticize your big-picture behavior constantly.\n3. Reflect on past decisions + and strategies to refine your approach.\n4. Every command has a cost, so be + smart and efficient. Aim to complete tasks in the least number of steps.\n5. + Write all code to a file.\n\nYou should only respond in JSON format as described + below \nResponse Format: \n{\n \"thoughts\": {\n \"text\": \"thought\",\n \"reasoning\": + \"reasoning\",\n \"plan\": \"- short bulleted\\n- list that conveys\\n- + long-term plan\",\n \"criticism\": \"constructive self-criticism\",\n \"speak\": + \"thoughts summary to say to user\"\n },\n \"command\": {\n \"name\": + \"command name\",\n \"args\": {\n \"arg name\": \"value\"\n }\n }\n} + \nEnsure the response can be parsed by Python json.loads"}, {"role": "system", + "content": "The current time and date is Tue Jan 1 00:00:00 2000"}, {"role": + "system", "content": "This reminds you of these events from your past: \nI was + created"}, {"role": "user", "content": "Determine which next command to use, + and respond using the format specified above:"}, {"role": "assistant", "content": + "{\n \"thoughts\": {\n \"text\": \"I need to use the browse_website + command to visit http://books.toscrape.com/catalogue/meditations_33/index.html + and find the price of the book. I will then write the price to a file named + browse_website.txt. I can then use the task_complete command to complete the + task.\",\n \"reasoning\": \"I need to use the browse_website command + to visit the website and find the price of the book. I will then use the write_to_file + command to write the price to a file named browse_website.txt. Finally, I can + use the task_complete command to complete the task.\",\n \"plan\": \"- + Use the browse_website command to visit http://books.toscrape.com/catalogue/meditations_33/index.html + and find the price of the book\\n- Use the write_to_file command to write the + price to a file named browse_website.txt\\n- Use the task_complete command to + complete the task\",\n \"criticism\": \"I need to ensure that I am using + the correct URL and question for the browse_website command. I also need to + make sure that I am writing the correct information to the file.\",\n \"speak\": + \"I will use the browse_website command to visit the website and find the price + of the book. Then, I will write the price to a file named browse_website.txt + and complete the task.\"\n },\n \"command\": {\n \"name\": \"browse_website\",\n \"args\": + {\n \"url\": \"http://books.toscrape.com/catalogue/meditations_33/index.html\",\n \"question\": + \"What is the price of the book?\"\n }\n }\n}"}, {"role": "system", + "content": "Command browse_website returned: Answer gathered from website: a + description of the book \"Meditations\" by Marcus Aurelius, which is available + for purchase on a demo website for web scraping purposes. The price of the book + is \u00a325.89, but the website warns that the prices and ratings are randomly + assigned and have no real meaning. The book offers spiritual reflections and + exercises developed by the Roman emperor and philosopher, covering topics such + as moral virtue, human rationality, and divine providence. It is considered + one of the greatest works of philosophy and has been admired by thinkers and + readers throughout history.\n\nLinks: [''Books to Scrape (http://books.toscrape.com/index.html)'', + ''Home (http://books.toscrape.com/index.html)'', ''Books (http://books.toscrape.com/catalogue/category/books_1/index.html)'', + ''Philosophy (http://books.toscrape.com/catalogue/category/books/philosophy_7/index.html)'', + '' (http://books.toscrape.com/catalogue/the-nicomachean-ethics_75/index.html)'']"}, + {"role": "user", "content": "Determine which next command to use, and respond + using the format specified above:"}, {"role": "assistant", "content": "{\n \"thoughts\": + {\n \"text\": \"I have found the price of the book. I need to use the + write_to_file command to write the price to a file named browse_website.txt. + I can then use the task_complete command to complete the task.\",\n \"reasoning\": + \"I need to use the write_to_file command to write the price of the book to + a file named browse_website.txt. Then, I can use the task_complete command to + complete the task.\",\n \"plan\": \"- Use the write_to_file command to + write the price of the book to a file named browse_website.txt\\n- Use the task_complete + command to complete the task\",\n \"criticism\": \"I need to make sure + that I am writing the correct information to the file and that the file is being + created in the correct directory.\",\n \"speak\": \"I will use the write_to_file + command to write the price of the book to a file named browse_website.txt and + complete the task.\"\n },\n \"command\": {\n \"name\": \"write_to_file\",\n \"args\": + {\n \"filename\": \"browse_website.txt\",\n \"text\": + \"\u00a325.89\"\n }\n }\n}"}, {"role": "system", "content": "Command + write_to_file returned: File written to successfully."}, {"role": "user", "content": + "Determine which next command to use, and respond using the format specified + above:"}], "temperature": 0, "max_tokens": 0}' + headers: + AGENT-MODE: + - AAAAAAAAAAAAAAAAAAAAAMLheAAaAAAA0%2BuSeid%2BULvsea4JtiGRiSDSJSI%3DEUifiRBkKG5E2XzMDjRfl76ZC9Ub0wnz4XsNiRVBChTYbJcE3F + AGENT-TYPE: + - Auto-GPT-2023-X-TYPE + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '8003' + Content-Type: + - application/json + method: POST + uri: https://api.openai.com/v1/chat/completions + response: + body: + string: !!binary | + H4sIAAAAAAAAA6xSTY/TMBC98ytGc06rlm7bbY5wqrggBEiIoJXrTBMTxxPZE9pVlf+OnKabblmx + EuLoNzPvYzwnNDmmqEslum7sZP1hw1+L4iOXd/lm3byffbKHu+O7qpXttwoT5N1P0jJMTDXXjSUx + 7DBB7UkJ5ZjOV/fL2eJ+tdgkWHNOFlMsGpkspsuJtH7Hk9liNscE26AKwvSEjee6kQfhilzAdL5e + zhMcya8LCQqLsiO0ebvqEtQlG00B0+8nrClciD1bwhRVCCaIchJtshNyMcIpcwAAGUrJbVFKyDCF + ARwKdJQIZriFUv0iGDxRDlISiArVFLaglQPHB2gDPeEPl9Y4UyuXgzCEshXI+eD6NlWQk2mGybWk + JxXYGVe8ogu8h71xuXFFjzXeaIpgfOyYK4iaB28kdhiJ8gr2xtL/dtxY5c5mJ/DlH/hu6HS0rE2o + LwtwRP1orSqC0PqooOQvMibAjmLqNlAOmr0nLfaxX8jTaC89to6+Gs8Neft4GzM0pKrXPqXfubG2 + X+4LWaENlw970fs0w7Nml1yOc6j8cZtO1XS284zpxrXyxe1ZX9/ZmeBztD6mCa3WFMK+tf0Wxslu + 8Ja5DrsE98aZUD6cmTDFINxggsbldMR01v3o3vwGAAD//wMARwk3SV8EAAA= + headers: + Access-Control-Allow-Origin: + - '*' + Alt-Svc: + - h3=":443"; ma=86400, h3-29=":443"; ma=86400 + - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 + Cache-Control: + - no-cache, must-revalidate + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 7ccfc64648bb10cb-ORD + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Thu, 25 May 2023 18:17:38 GMT + Function-Execution-Id: + - yvvl2c4sntif + Openai-Model: + - gpt-3.5-turbo-0301 + Openai-Organization: + - significant-gravitas + Openai-Processing-Ms: + - '18885' + Openai-Version: + - '2020-10-01' + Server: + - Google Frontend + Strict-Transport-Security: + - max-age=15724800; includeSubDomains + Vary: + - Accept-Encoding + X-Cloud-Trace-Context: + - df845088a79f4f2e92c980eb9a6e2e52 + X-Powered-By: + - Express + X-Ratelimit-Limit-Requests: + - '3500' + X-Ratelimit-Limit-Tokens: + - '90000' + X-Ratelimit-Remaining-Requests: + - '3499' + X-Ratelimit-Remaining-Tokens: + - '85933' + X-Ratelimit-Reset-Requests: + - 17ms + X-Ratelimit-Reset-Tokens: + - 2.71s + X-Request-Id: + - d91688703794f0cfd634cd4bd337530d + status: + code: 200 + message: OK version: 1 diff --git a/tests/integration/challenges/basic_abilities/cassettes/test_write_file/test_write_file.yaml b/tests/integration/challenges/basic_abilities/cassettes/test_write_file/test_write_file.yaml index f5a406c303..c45b71c84f 100644 --- a/tests/integration/challenges/basic_abilities/cassettes/test_write_file/test_write_file.yaml +++ b/tests/integration/challenges/basic_abilities/cassettes/test_write_file/test_write_file.yaml @@ -1350,4 +1350,231 @@ interactions: status: code: 200 message: OK +- request: + body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "system", "content": "You + are write_to_file-GPT, an AI designed to use the write_to_file command to write + ''Hello World'' into a file named \"hello_world.txt\" and then use the task_complete + command to complete the task.\nYour decisions must always be made independently + without seeking user assistance. Play to your strengths as an LLM and pursue + simple strategies with no legal complications.\n\nGOALS:\n\n1. Use the write_to_file + command to write ''Hello World'' into a file named \"hello_world.txt\".\n2. + Use the task_complete command to complete the task.\n3. Do not use any other + commands.\n\n\nConstraints:\n1. ~4000 word limit for short term memory. Your + short term memory is short, so immediately save important information to files.\n2. + If you are unsure how you previously did something or want to recall past events, + thinking about similar events will help you remember.\n3. No user assistance\n4. + Exclusively use the commands listed below e.g. command_name\n\nCommands:\n1. + append_to_file: Append to file, args: \"filename\": \"\", \"text\": + \"\"\n2. delete_file: Delete file, args: \"filename\": \"\"\n3. + list_files: List Files in Directory, args: \"directory\": \"\"\n4. + read_file: Read a file, args: \"filename\": \"\"\n5. write_to_file: + Write to file, args: \"filename\": \"\", \"text\": \"\"\n6. + delete_agent: Delete GPT Agent, args: \"key\": \"\"\n7. get_hyperlinks: + Get hyperlinks, args: \"url\": \"\"\n8. get_text_summary: Get text summary, + args: \"url\": \"\", \"question\": \"\"\n9. list_agents: List + GPT Agents, args: () -> str\n10. message_agent: Message GPT Agent, args: \"key\": + \"\", \"message\": \"\"\n11. start_agent: Start GPT Agent, args: + \"name\": \"\", \"task\": \"\", \"prompt\": \"\"\n12. + task_complete: Task Complete (Shutdown), args: \"reason\": \"\"\n\nResources:\n1. + Internet access for searches and information gathering.\n2. Long Term memory + management.\n3. GPT-3.5 powered Agents for delegation of simple tasks.\n4. File + output.\n\nPerformance Evaluation:\n1. Continuously review and analyze your + actions to ensure you are performing to the best of your abilities.\n2. Constructively + self-criticize your big-picture behavior constantly.\n3. Reflect on past decisions + and strategies to refine your approach.\n4. Every command has a cost, so be + smart and efficient. Aim to complete tasks in the least number of steps.\n5. + Write all code to a file.\n\nYou should only respond in JSON format as described + below \nResponse Format: \n{\n \"thoughts\": {\n \"text\": \"thought\",\n \"reasoning\": + \"reasoning\",\n \"plan\": \"- short bulleted\\n- list that conveys\\n- + long-term plan\",\n \"criticism\": \"constructive self-criticism\",\n \"speak\": + \"thoughts summary to say to user\"\n },\n \"command\": {\n \"name\": + \"command name\",\n \"args\": {\n \"arg name\": \"value\"\n }\n }\n} + \nEnsure the response can be parsed by Python json.loads"}, {"role": "system", + "content": "The current time and date is Tue Jan 1 00:00:00 2000"}, {"role": + "system", "content": "This reminds you of these events from your past: \nI was + created"}, {"role": "user", "content": "Determine which next command to use, + and respond using the format specified above:"}, {"role": "assistant", "content": + "{\n \"thoughts\": {\n \"text\": \"I need to use the write_to_file + command to write ''Hello World'' into a file named ''hello_world.txt''.\",\n \"reasoning\": + \"The task requires me to write ''Hello World'' into a file named ''hello_world.txt'', + and the write_to_file command is the appropriate command for this task.\",\n \"plan\": + \"- Use the write_to_file command to write ''Hello World'' into a file named + ''hello_world.txt''.\",\n \"criticism\": \"I need to ensure that I use + the correct filename and text to write to the file.\",\n \"speak\": \"I + will use the write_to_file command to write ''Hello World'' into a file named + ''hello_world.txt''.\"\n },\n \"command\": {\n \"name\": \"write_to_file\",\n \"args\": + {\n \"filename\": \"hello_world.txt\",\n \"text\": \"Hello + World\"\n }\n }\n}"}, {"role": "system", "content": "Command write_to_file + returned: File written to successfully."}, {"role": "user", "content": "Determine + which next command to use, and respond using the format specified above:"}], + "temperature": 0, "max_tokens": 0}' + headers: + AGENT-MODE: + - AAAAAAAAAAAAAAAAAAAAAMLheAAaAAAA0%2BuSeid%2BULvsea4JtiGRiSDSJSI%3DEUifiRBkKG5E2XzMDjRfl76ZC9Ub0wnz4XsNiRVBChTYbJcE3F + AGENT-TYPE: + - Auto-GPT-2023-X-TYPE + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '4608' + Content-Type: + - application/json + method: POST + uri: https://api.openai.com/v1/chat/completions + response: + body: + string: !!binary | + H4sIAAAAAAAAA7RSy27bQAy89ysIXnyRDTuGnVhfUCOHFoiLBqgKY7Oipa1XXHWXip0a+vdCLyhN + XwjQ3hYccjjDnQuaFGPUuRJdlHZ6fbtxUu5MOHx72vo7qdanbHl9f3/1jm/fY4Tu4Qtp6Sdm2hWl + JTGOMULtSQmlGC/WN6v58ma9XkZYuJQsxpiVMl3OVlOp/IObzpfzBUZYBZURxhcsvStK2Ys7EgeM + F/PFJsKRfAQ26wjFibJj6WqxqiPUuTOaAsafLlhQGIi9s4QxqhBMEMXSyHQsxI2FS8IAAAlK7qos + l5BgDH2xB+gsTTHBLeTqkSBUWlMIh8raJzh5I0IMk7dkrYOPztt0AobFgYKDsQSsCkphkjf4/tTg + MznLZAa7nIDpLKBdUShOQRxUgcAEEBWO+947NfXxnVOLzhKMnqv0pIJjw1kndde3gaevlfEUoGh5 + Grn0erHQysuJfyEE7gxrgj8dR1zb3myI2tc/s11axZ3jKXwIY9/I8mzLX9m0N2K0CcXw30zUThKH + yjdzSmALpXePJiVQ0F0dDs4P5Iaz39KHktRxoD4Za1vfr5bcUdbRkNy+/afgNn/ZbfuB/oUo5bOX + mX+eqP+R/MFCa6N3k3CNdYQHwybk+243xhjElRih4ZTOGM/rz/Wb7wAAAP//AwDKh13FrgQAAA== + headers: + Access-Control-Allow-Origin: + - '*' + Alt-Svc: + - h3=":443"; ma=86400, h3-29=":443"; ma=86400 + - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 + Cache-Control: + - no-cache, must-revalidate + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 7ccfc6e12bd2e1ef-ORD + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Thu, 25 May 2023 18:18:06 GMT + Function-Execution-Id: + - yvvlex8job2h + Openai-Model: + - gpt-3.5-turbo-0301 + Openai-Organization: + - significant-gravitas + Openai-Processing-Ms: + - '22621' + Openai-Version: + - '2020-10-01' + Server: + - Google Frontend + Strict-Transport-Security: + - max-age=15724800; includeSubDomains + Vary: + - Accept-Encoding + X-Cloud-Trace-Context: + - 827e911ff253a9d12783cb62e669dc20;o=1 + X-Powered-By: + - Express + X-Ratelimit-Limit-Requests: + - '3500' + X-Ratelimit-Limit-Tokens: + - '90000' + X-Ratelimit-Remaining-Requests: + - '3499' + X-Ratelimit-Remaining-Tokens: + - '86005' + X-Ratelimit-Reset-Requests: + - 17ms + X-Ratelimit-Reset-Tokens: + - 2.663s + X-Request-Id: + - 4d357db894485be6e259654a899f2e57 + status: + code: 200 + message: OK +- request: + body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "user", "content": "Write + a concise summary of the following text:\n\n\nLITERAL TEXT: \"\"\"Hello World\"\"\"\n\n\nCONCISE + SUMMARY: The text is best summarized as"}], "temperature": 0, "max_tokens": + 0}' + headers: + AGENT-MODE: + - AAAAAAAAAAAAAAAAAAAAAMLheAAaAAAA0%2BuSeid%2BULvsea4JtiGRiSDSJSI%3DEUifiRBkKG5E2XzMDjRfl76ZC9Ub0wnz4XsNiRVBChTYbJcE3F + AGENT-TYPE: + - Auto-GPT-2023-X-TYPE + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '252' + Content-Type: + - application/json + method: POST + uri: https://api.openai.com/v1/chat/completions + response: + body: + string: !!binary | + H4sIAAAAAAAAA0SOQUvDQBQG7/6K8J03JSVpmu7RU7GIXooHK2WbvCarm33L7qsoIf9dClqvAzPM + BNtBox2MtGNw+Xq3CQ9DXFfPj09Oim5Xu+3+u9oO+/tPhgKf3qmVX2PR8hgciWUPhTaSEeqgl3Wz + KsqmbjYKI3fkoNEHycvFKpdLPHFelMUSCpdkeoKeECKPQY7CH+QTdFkr/KdvuFEQFuNuoKpmhXZg + 21KCfp0wUvpLRnYEDZOSTWK8XAfZC/nrvMmSveazPhKJ9X3G5+yALTnH2QtH1y0OwKxwtt6m4RjJ + JPbQSMIBCtZ39AVdzG/z3Q8AAAD//wMAv00f4UIBAAA= + headers: + Access-Control-Allow-Origin: + - '*' + Alt-Svc: + - h3=":443"; ma=86400, h3-29=":443"; ma=86400 + - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 + Cache-Control: + - no-cache, must-revalidate + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 7ccfc7839fad10e3-ORD + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Thu, 25 May 2023 18:18:11 GMT + Function-Execution-Id: + - pbme9het6dvt + Openai-Model: + - gpt-3.5-turbo-0301 + Openai-Organization: + - significant-gravitas + Openai-Processing-Ms: + - '1227' + Openai-Version: + - '2020-10-01' + Server: + - Google Frontend + Strict-Transport-Security: + - max-age=15724800; includeSubDomains + Vary: + - Accept-Encoding + X-Cloud-Trace-Context: + - 4a9b8edee07c9658c887ffc760c711e6;o=1 + X-Powered-By: + - Express + X-Ratelimit-Limit-Requests: + - '3500' + X-Ratelimit-Limit-Tokens: + - '90000' + X-Ratelimit-Remaining-Requests: + - '3499' + X-Ratelimit-Remaining-Tokens: + - '89466' + X-Ratelimit-Reset-Requests: + - 17ms + X-Ratelimit-Reset-Tokens: + - 356ms + X-Request-Id: + - fb5593e05606c34e2e9461ff82b28747 + status: + code: 200 + message: OK version: 1 diff --git a/tests/integration/challenges/basic_abilities/test_browse_website.py b/tests/integration/challenges/basic_abilities/test_browse_website.py index 1afe10cc36..4befb059a1 100644 --- a/tests/integration/challenges/basic_abilities/test_browse_website.py +++ b/tests/integration/challenges/basic_abilities/test_browse_website.py @@ -1,8 +1,6 @@ import pytest -from pytest_mock import MockerFixture from autogpt.agent import Agent -from autogpt.commands.file_operations import read_file from tests.integration.challenges.utils import run_interaction_loop from tests.utils import requires_api_key @@ -13,11 +11,11 @@ CYCLE_COUNT = 2 @pytest.mark.vcr def test_browse_website( browser_agent: Agent, - patched_api_requestor: MockerFixture, + patched_api_requestor: None, monkeypatch: pytest.MonkeyPatch, ) -> None: file_path = browser_agent.workspace.get_path("browse_website.txt") run_interaction_loop(monkeypatch, browser_agent, CYCLE_COUNT) - content = read_file(file_path) + content = open(file_path, encoding="utf-8").read() assert "£25.89" in content, f"Expected £25.89, got {content}" diff --git a/tests/integration/challenges/basic_abilities/test_write_file.py b/tests/integration/challenges/basic_abilities/test_write_file.py index 36a97733a2..1b0198c79d 100644 --- a/tests/integration/challenges/basic_abilities/test_write_file.py +++ b/tests/integration/challenges/basic_abilities/test_write_file.py @@ -1,5 +1,4 @@ import pytest -from pytest_mock import MockerFixture from autogpt.agent import Agent from autogpt.commands.file_operations import read_file @@ -13,7 +12,7 @@ CYCLE_COUNT = 3 @pytest.mark.vcr def test_write_file( writer_agent: Agent, - patched_api_requestor: MockerFixture, + patched_api_requestor: None, monkeypatch: pytest.MonkeyPatch, ) -> None: file_path = str(writer_agent.workspace.get_path("hello_world.txt")) diff --git a/tests/integration/challenges/information_retrieval/test_information_retrieval_challenge_a.py b/tests/integration/challenges/information_retrieval/test_information_retrieval_challenge_a.py index 0bf7bfe670..e527507193 100644 --- a/tests/integration/challenges/information_retrieval/test_information_retrieval_challenge_a.py +++ b/tests/integration/challenges/information_retrieval/test_information_retrieval_challenge_a.py @@ -1,5 +1,4 @@ import pytest -from pytest_mock import MockerFixture from autogpt.commands.file_operations import read_file, write_to_file from tests.integration.challenges.utils import run_interaction_loop, run_multiple_times @@ -16,7 +15,7 @@ from autogpt.agent import Agent def test_information_retrieval_challenge_a( get_company_revenue_agent: Agent, monkeypatch: pytest.MonkeyPatch, - patched_api_requestor: MockerFixture, + patched_api_requestor: None, ) -> None: """ Test the challenge_a function in a given agent by mocking user inputs and checking the output file content. diff --git a/tests/integration/challenges/memory/cassettes/test_memory_challenge_a/test_memory_challenge_a.yaml b/tests/integration/challenges/memory/cassettes/test_memory_challenge_a/test_memory_challenge_a.yaml index 99243a1021..159f9dfa01 100644 --- a/tests/integration/challenges/memory/cassettes/test_memory_challenge_a/test_memory_challenge_a.yaml +++ b/tests/integration/challenges/memory/cassettes/test_memory_challenge_a/test_memory_challenge_a.yaml @@ -14,7 +14,7 @@ interactions: below e.g. command_name\n\nCommands:\n1. append_to_file: Append to file, args: \"filename\": \"\", \"text\": \"\"\n2. delete_file: Delete file, args: \"filename\": \"\"\n3. list_files: List Files in Directory, - args: \"directory\": \"\"\n4. read_file: Read file, args: \"filename\": + args: \"directory\": \"\"\n4. read_file: Read a file, args: \"filename\": \"\"\n5. write_to_file: Write to file, args: \"filename\": \"\", \"text\": \"\"\n6. delete_agent: Delete GPT Agent, args: \"key\": \"\"\n7. get_hyperlinks: Get hyperlinks, args: \"url\": \"\"\n8. get_text_summary: @@ -22,7 +22,7 @@ interactions: list_agents: List GPT Agents, args: () -> str\n10. message_agent: Message GPT Agent, args: \"key\": \"\", \"message\": \"\"\n11. start_agent: Start GPT Agent, args: \"name\": \"\", \"task\": \"\", - \"prompt\": \"\"\n13. task_complete: Task Complete (Shutdown), args: + \"prompt\": \"\"\n12. task_complete: Task Complete (Shutdown), args: \"reason\": \"\"\n\nResources:\n1. Internet access for searches and information gathering.\n2. Long Term memory management.\n3. GPT-3.5 powered Agents for delegation of simple tasks.\n4. File output.\n\nPerformance Evaluation:\n1. @@ -42,6 +42,10 @@ interactions: which next command to use, and respond using the format specified above:"}], "temperature": 0, "max_tokens": 0}' headers: + AGENT-MODE: + - AAAAAAAAAAAAAAAAAAAAAMLheAAaAAAA0%2BuSeid%2BULvsea4JtiGRiSDSJSI%3DEUifiRBkKG5E2XzMDjRfl76ZC9Ub0wnz4XsNiRVBChTYbJcE3F + AGENT-TYPE: + - Auto-GPT-2023-X-TYPE Accept: - '*/*' Accept-Encoding: @@ -49,297 +53,7 @@ interactions: Connection: - keep-alive Content-Length: - - '3282' - Content-Type: - - application/json - method: POST - uri: https://api.openai.com/v1/chat/completions - response: - body: - string: !!binary | - H4sIAAAAAAAAA5SSyY7aQBCG73mKUp1tBCEs41sSRQjlNppIWTxCjV22O9PudrrKAYL87lFjzEQw - GinXv5b/q+WIOscEs0pJVjcmXqzvdytn14vVt7vp992WHz63qw+/v36S9cd7jNBtf1Im54pR5urG - kGhnMcLMkxLKMZnMl+/m08l8No+wdjkZTLBsJJ6OZrG0fuvi8XQ8wQhbViVhcsTGu7qRjbgnsozJ - 4u1dhM+9L/okdBQnylyk5XLWRZhVTmfEmPw4Yk08tPXOECaomDWLshIgnRWyYYBjagEAUpTKtWUl - nGICZ/EcoL0EMcU1cOVakwOL8gLbA3hSubYlSEWgLYtvs4DKm8lI9gKFNgQtDxkhe3PSMlfXyuaj - FKN/rTwpdlbbsvd7qAgK7VmAhRrQDOKgtTn5MEd+4wpB3FVKQqqnX632lIMroKZrp8Yo25vE8IXp - ZbxgF8TXxktTG8N7q8zhD70MlJOQr7Xtw5b2/Th8RZR5LTrTXA+7tkQnArLc+lCsBNZQtMYcXl3C - lgrnCRrvMqL+Ou7K+nob3JB6Gnx32pj/uvAoxb5ZFw3PdF7gzS9ZVVPvc1n2FYry5fUL9oGQ+1x+ - izJAnEDOPKntsIuw0FZztem/CxNkcQ1GqG1Oe0zG3WP35i8AAAD//wMA2ddSQwEEAAA= - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 7ca8f62c2bee313e-SOF - Cache-Control: - - no-cache, must-revalidate - Connection: - - keep-alive - Content-Encoding: - - gzip - Content-Type: - - application/json - Date: - - Sun, 21 May 2023 01:14:28 GMT - Server: - - cloudflare - access-control-allow-origin: - - '*' - alt-svc: - - h3=":443"; ma=86400, h3-29=":443"; ma=86400 - openai-model: - - gpt-3.5-turbo-0301 - openai-organization: - - user-gsghk4kr5xmbjprowdzr8zt2 - openai-processing-ms: - - '11555' - openai-version: - - '2020-10-01' - strict-transport-security: - - max-age=15724800; includeSubDomains - x-ratelimit-limit-requests: - - '3500' - x-ratelimit-limit-tokens: - - '90000' - x-ratelimit-remaining-requests: - - '3499' - x-ratelimit-remaining-tokens: - - '86494' - x-ratelimit-reset-requests: - - 17ms - x-ratelimit-reset-tokens: - - 2.336s - x-request-id: - - 7cc7737a077abeb1f4e9117975dfb9c1 - status: - code: 200 - message: OK -- request: - body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "user", "content": "Your - task is to create a concise running summary of actions and information results - in the provided text, focusing on key and potentially important information - to remember.\n\nYou will receive the current summary and the your latest actions. - Combine them, adding relevant key information from the latest development in - 1st person past tense and keeping the summary concise.\n\nSummary So Far:\n\"\"\"\nI - was created.\n\"\"\"\n\nLatest Development:\n\"\"\"\nNothing new happened.\n\"\"\"\n"}], - "temperature": 0, "max_tokens": 0}' - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - Connection: - - keep-alive - Content-Length: - - '599' - Content-Type: - - application/json - method: POST - uri: https://api.openai.com/v1/chat/completions - response: - body: - string: !!binary | - H4sIAAAAAAAAA0SOzUoDMRRG9z5F+NaZMrF/NjvRTXdSsSAiJc1cZ6IzuXFyS1vLvLsUqt0eOIdz - Qqhg4RsnvkttMV+u9mv6zquH5x+qH810KhNe3x8Pry9PR2jw9pO8XIyR5y61JIEjNHxPTqiCNbO7 - yWxsZnOj0XFFLSzqJMV4NC1k12+5KMelgcYuu5pgT0g9d0k2wl8UM+ziVuOavmINYXHtPzClGTR8 - w8FThn07oaP81+y5JVi4nEMWF+V8yFEonu+Xau+yuhwrFysVWZoQaxVprxqXVeNSokjVCIPGR4gh - N5ueXOYIiyycoBFiRQfYcngfbn4BAAD//wMAmYjYl0kBAAA= - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 7ca8f6881eab313e-SOF - Cache-Control: - - no-cache, must-revalidate - Connection: - - keep-alive - Content-Encoding: - - gzip - Content-Type: - - application/json - Date: - - Sun, 21 May 2023 01:14:32 GMT - Server: - - cloudflare - access-control-allow-origin: - - '*' - alt-svc: - - h3=":443"; ma=86400, h3-29=":443"; ma=86400 - openai-model: - - gpt-3.5-turbo-0301 - openai-organization: - - user-gsghk4kr5xmbjprowdzr8zt2 - openai-processing-ms: - - '1017' - openai-version: - - '2020-10-01' - strict-transport-security: - - max-age=15724800; includeSubDomains - x-ratelimit-limit-requests: - - '3500' - x-ratelimit-limit-tokens: - - '90000' - x-ratelimit-remaining-requests: - - '3499' - x-ratelimit-remaining-tokens: - - '89866' - x-ratelimit-reset-requests: - - 17ms - x-ratelimit-reset-tokens: - - 88ms - x-request-id: - - e9912526fc04146baa08a05745789e1c - status: - code: 200 - message: OK -- request: - body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "system", "content": "You - are Follow-Instructions-GPT, an AI designed to read the instructions_1.txt file - using the read_file method and follow the instructions in the file.\nYour decisions - must always be made independently without seeking user assistance. Play to your - strengths as an LLM and pursue simple strategies with no legal complications.\n\nGOALS:\n\n1. - Use the command read_file to read the instructions_1.txt file\n2. Follow the - instructions in the instructions_1.txt file\n\n\nConstraints:\n1. ~4000 word - limit for short term memory. Your short term memory is short, so immediately - save important information to files.\n2. If you are unsure how you previously - did something or want to recall past events, thinking about similar events will - help you remember.\n3. No user assistance\n4. Exclusively use the commands listed - below e.g. command_name\n\nCommands:\n1. append_to_file: Append to file, args: - \"filename\": \"\", \"text\": \"\"\n2. delete_file: Delete file, - args: \"filename\": \"\"\n3. list_files: List Files in Directory, - args: \"directory\": \"\"\n4. read_file: Read file, args: \"filename\": - \"\"\n5. write_to_file: Write to file, args: \"filename\": \"\", - \"text\": \"\"\n6. delete_agent: Delete GPT Agent, args: \"key\": \"\"\n7. - get_hyperlinks: Get hyperlinks, args: \"url\": \"\"\n8. get_text_summary: - Get text summary, args: \"url\": \"\", \"question\": \"\"\n9. - list_agents: List GPT Agents, args: () -> str\n10. message_agent: Message GPT - Agent, args: \"key\": \"\", \"message\": \"\"\n11. start_agent: - Start GPT Agent, args: \"name\": \"\", \"task\": \"\", - \"prompt\": \"\"\n13. task_complete: Task Complete (Shutdown), args: - \"reason\": \"\"\n\nResources:\n1. Internet access for searches and - information gathering.\n2. Long Term memory management.\n3. GPT-3.5 powered - Agents for delegation of simple tasks.\n4. File output.\n\nPerformance Evaluation:\n1. - Continuously review and analyze your actions to ensure you are performing to - the best of your abilities.\n2. Constructively self-criticize your big-picture - behavior constantly.\n3. Reflect on past decisions and strategies to refine - your approach.\n4. Every command has a cost, so be smart and efficient. Aim - to complete tasks in the least number of steps.\n5. Write all code to a file.\n\nYou - should only respond in JSON format as described below \nResponse Format: \n{\n \"thoughts\": - {\n \"text\": \"thought\",\n \"reasoning\": \"reasoning\",\n \"plan\": - \"- short bulleted\\n- list that conveys\\n- long-term plan\",\n \"criticism\": - \"constructive self-criticism\",\n \"speak\": \"thoughts summary to say - to user\"\n },\n \"command\": {\n \"name\": \"command name\",\n \"args\": - {\n \"arg name\": \"value\"\n }\n }\n} \nEnsure the response - can be parsed by Python json.loads"}, {"role": "system", "content": "The current - time and date is Tue Jan 1 00:00:00 2000"}, {"role": "system", "content": "This - reminds you of these events from your past: \nI was created and nothing new - has happened."}, {"role": "user", "content": "Determine which next command to - use, and respond using the format specified above:"}, {"role": "assistant", - "content": "{\n \"thoughts\": {\n \"text\": \"I should start by reading - the instructions_1.txt file using the read_file command.\",\n \"reasoning\": - \"The first step is to understand the instructions and what is required of me.\",\n \"plan\": - \"- Use the read_file command to read the instructions_1.txt file\\n- Analyze - the instructions and determine the next steps\",\n \"criticism\": \"I - need to ensure that I fully understand the instructions before proceeding to - the next steps.\",\n \"speak\": \"I will start by reading the instructions_1.txt - file.\"\n },\n \"command\": {\n \"name\": \"read_file\",\n \"args\": - {\n \"filename\": \"instructions_1.txt\"\n }\n }\n}"}, - {"role": "system", "content": "Command read_file returned: This task_id is 2314\r\nRead - the file instructions_2.txt"}, {"role": "user", "content": "Determine which - next command to use, and respond using the format specified above:"}], "temperature": - 0, "max_tokens": 0}' - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - Connection: - - keep-alive - Content-Length: - - '4419' - Content-Type: - - application/json - method: POST - uri: https://api.openai.com/v1/chat/completions - response: - body: - string: !!binary | - H4sIAAAAAAAAA6SSQW/bMAyF7/sVBM92ECdtuvnWAjvkWmQYhnkIFJm2tcqUJ9GI28D/fXCcNK0z - DAN2fSTf+yjxgCbHFHWlRNeNje/Wj/uvvzbdiyq7x28Pm3Zpb3bzh8/36nZdYYRu95O0nCZm2tWN - JTGOMULtSQnlmCarjzerZbK6W0RYu5wsplg2Ei9nt7G0fufi+XKeYIRtUCVhesDGu7qRrbgn4oDp - p1US4cX7VU8GXZwoe5GSxaKPUFfOaAqYfj9gTeHs650lTFGFYIIoloHSsRAPGxwyBgDIUCrXlpWE - DFM4iacCdTKIGa6BiXIQB55UDlIRGA7iWz0Ahu1iJp1AYSxBGwyXx46hdXvUtKtrxfksw+itvycV - HBsux5DNX1wHbGU4HI2ZOoFAAq54NwBSKYELa+GsdftpamMVj4ExfAn0Z9R/WTXLOIZ7Vvb5ha76 - YDDJScjXhukNtlATJkTaGzHahHr62MSh9XReq2itfYaWc/LDb16zwY4K5wka7zRRfvwHN4mevkZo - SD2dc/fG2v/94dG8j87XdapcHRermsbcV5cJmvLl9CbHwtB7Gb/GPEMcQU48GffYR1gYNqHajpeH - KQZxDUZoOKcO03n/o//wGwAA//8DALRhJ3cTBAAA - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 7ca8f6916aeb313e-SOF - Cache-Control: - - no-cache, must-revalidate - Connection: - - keep-alive - Content-Encoding: - - gzip - Content-Type: - - application/json - Date: - - Sun, 21 May 2023 01:14:45 GMT - Server: - - cloudflare - access-control-allow-origin: - - '*' - alt-svc: - - h3=":443"; ma=86400, h3-29=":443"; ma=86400 - openai-model: - - gpt-3.5-turbo-0301 - openai-organization: - - user-gsghk4kr5xmbjprowdzr8zt2 - openai-processing-ms: - - '13200' - openai-version: - - '2020-10-01' - strict-transport-security: - - max-age=15724800; includeSubDomains - x-ratelimit-limit-requests: - - '3500' - x-ratelimit-limit-tokens: - - '90000' - x-ratelimit-remaining-requests: - - '3499' - x-ratelimit-remaining-tokens: - - '86470' - x-ratelimit-reset-requests: - - 17ms - x-ratelimit-reset-tokens: - - 2.353s - x-request-id: - - 13087de20c75011601dacf71bc0bd2f3 - status: - code: 200 - message: OK -- request: - body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "user", "content": "Your - task is to create a concise running summary of actions and information results - in the provided text, focusing on key and potentially important information - to remember.\n\nYou will receive the current summary and the your latest actions. - Combine them, adding relevant key information from the latest development in - 1st person past tense and keeping the summary concise.\n\nSummary So Far:\n\"\"\"\n{''role'': - ''system'', ''content'': ''This reminds you of these events from your past: - \\nI was created and nothing new has happened.''}\n\"\"\"\n\nLatest Development:\n\"\"\"\nNothing - new happened.\n\"\"\"\n"}], "temperature": 0, "max_tokens": 0}' - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - Connection: - - keep-alive - Content-Length: - - '713' + - '3284' Content-Type: - application/json method: POST @@ -349,756 +63,59 @@ interactions: string: "{\n \"error\": {\n \"message\": \"That model is currently overloaded with other requests. You can retry your request, or contact us through our help center at help.openai.com if the error persists. (Please include the - request ID a73e800f9a4efa08668a838196d2950d in your message.)\",\n \"type\": + request ID b6a7095cc0e76b9901aecd234de4f5b4 in your message.)\",\n \"type\": \"server_error\",\n \"param\": null,\n \"code\": null\n }\n}\n" headers: - CF-Cache-Status: + Access-Control-Allow-Origin: + - '*' + Alt-Svc: + - h3=":443"; ma=86400, h3-29=":443"; ma=86400 + - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 + Cf-Cache-Status: - DYNAMIC - CF-RAY: - - 7ca8f6f52c0d313e-SOF - Connection: - - keep-alive + Cf-Ray: + - 7ccfc7929fefe287-ORD Content-Length: - '349' Content-Type: - application/json Date: - - Sun, 21 May 2023 01:15:18 GMT - Server: - - cloudflare - access-control-allow-origin: - - '*' - alt-svc: - - h3=":443"; ma=86400, h3-29=":443"; ma=86400 - openai-model: + - Thu, 25 May 2023 18:18:42 GMT + Function-Execution-Id: + - yvvl1pjqkaam + Openai-Model: - gpt-3.5-turbo-0301 - openai-organization: - - user-gsghk4kr5xmbjprowdzr8zt2 - openai-processing-ms: - - '30010' - openai-version: + Openai-Organization: + - significant-gravitas + Openai-Processing-Ms: + - '30015' + Openai-Version: - '2020-10-01' - strict-transport-security: + Server: + - Google Frontend + Strict-Transport-Security: - max-age=15724800; includeSubDomains - x-ratelimit-limit-requests: + X-Cloud-Trace-Context: + - 92320c9bf2c16a55143abddee3e4aa6f + X-Powered-By: + - Express + X-Ratelimit-Limit-Requests: - '3500' - x-ratelimit-limit-tokens: + X-Ratelimit-Limit-Tokens: - '90000' - x-ratelimit-remaining-requests: + X-Ratelimit-Remaining-Requests: - '3499' - x-ratelimit-remaining-tokens: - - '89839' - x-ratelimit-reset-requests: + X-Ratelimit-Remaining-Tokens: + - '86494' + X-Ratelimit-Reset-Requests: - 17ms - x-ratelimit-reset-tokens: - - 107ms - x-request-id: - - a73e800f9a4efa08668a838196d2950d + X-Ratelimit-Reset-Tokens: + - 2.336s + X-Request-Id: + - b6a7095cc0e76b9901aecd234de4f5b4 status: code: 429 message: Too Many Requests -- request: - body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "user", "content": "Your - task is to create a concise running summary of actions and information results - in the provided text, focusing on key and potentially important information - to remember.\n\nYou will receive the current summary and the your latest actions. - Combine them, adding relevant key information from the latest development in - 1st person past tense and keeping the summary concise.\n\nSummary So Far:\n\"\"\"\n{''role'': - ''system'', ''content'': ''This reminds you of these events from your past: - \\nI was created and nothing new has happened.''}\n\"\"\"\n\nLatest Development:\n\"\"\"\nNothing - new happened.\n\"\"\"\n"}], "temperature": 0, "max_tokens": 0}' - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - Connection: - - keep-alive - Content-Length: - - '713' - Content-Type: - - application/json - method: POST - uri: https://api.openai.com/v1/chat/completions - response: - body: - string: !!binary | - H4sIAAAAAAAAA0SPTWsCQRBE7/kVTZ9nZdc1msxNEEUIxITkICHIONtxN85OT2bajyD734Ng9FpU - PV6dsKlQo62N2Da4bDR/PT71l2/j5/fN43LyMt0vthOfF9PZz2yBCnn9TVYui57lNjiShj0qtJGM - UIW6GD4MhmUx6pcKW67IocZNkKzs3Weyi2vO8jIvUOEumQ2hPmGI3AZZCW/JJ9RFP1d4Y1/zcyws - xt2ag7xTaGtuLCXUHydsKf1TIztCjSalJonxcnZkL+TP/uMEBtJvEmoVzCGSNc6B1EZgDgeT4HIH - jK9AaooEtdkTrIk8eAZPB6hoT45DS14SCEOkwFF62Cn8anyT6lUkk9ijxiQcUGHjKzqizrvP7u4P - AAD//wMAmH2nWHsBAAA= - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 7ca8f7cdac76313e-SOF - Cache-Control: - - no-cache, must-revalidate - Connection: - - keep-alive - Content-Encoding: - - gzip - Content-Type: - - application/json - Date: - - Sun, 21 May 2023 01:15:24 GMT - Server: - - cloudflare - access-control-allow-origin: - - '*' - alt-svc: - - h3=":443"; ma=86400, h3-29=":443"; ma=86400 - openai-model: - - gpt-3.5-turbo-0301 - openai-organization: - - user-gsghk4kr5xmbjprowdzr8zt2 - openai-processing-ms: - - '1554' - openai-version: - - '2020-10-01' - strict-transport-security: - - max-age=15724800; includeSubDomains - x-ratelimit-limit-requests: - - '3500' - x-ratelimit-limit-tokens: - - '90000' - x-ratelimit-remaining-requests: - - '3499' - x-ratelimit-remaining-tokens: - - '89839' - x-ratelimit-reset-requests: - - 17ms - x-ratelimit-reset-tokens: - - 107ms - x-request-id: - - 2bcd88e9e8bce8177b8e694d44cd16fc - status: - code: 200 - message: OK -- request: - body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "system", "content": "You - are Follow-Instructions-GPT, an AI designed to read the instructions_1.txt file - using the read_file method and follow the instructions in the file.\nYour decisions - must always be made independently without seeking user assistance. Play to your - strengths as an LLM and pursue simple strategies with no legal complications.\n\nGOALS:\n\n1. - Use the command read_file to read the instructions_1.txt file\n2. Follow the - instructions in the instructions_1.txt file\n\n\nConstraints:\n1. ~4000 word - limit for short term memory. Your short term memory is short, so immediately - save important information to files.\n2. If you are unsure how you previously - did something or want to recall past events, thinking about similar events will - help you remember.\n3. No user assistance\n4. Exclusively use the commands listed - below e.g. command_name\n\nCommands:\n1. append_to_file: Append to file, args: - \"filename\": \"\", \"text\": \"\"\n2. delete_file: Delete file, - args: \"filename\": \"\"\n3. list_files: List Files in Directory, - args: \"directory\": \"\"\n4. read_file: Read file, args: \"filename\": - \"\"\n5. write_to_file: Write to file, args: \"filename\": \"\", - \"text\": \"\"\n6. delete_agent: Delete GPT Agent, args: \"key\": \"\"\n7. - get_hyperlinks: Get hyperlinks, args: \"url\": \"\"\n8. get_text_summary: - Get text summary, args: \"url\": \"\", \"question\": \"\"\n9. - list_agents: List GPT Agents, args: () -> str\n10. message_agent: Message GPT - Agent, args: \"key\": \"\", \"message\": \"\"\n11. start_agent: - Start GPT Agent, args: \"name\": \"\", \"task\": \"\", - \"prompt\": \"\"\n13. task_complete: Task Complete (Shutdown), args: - \"reason\": \"\"\n\nResources:\n1. Internet access for searches and - information gathering.\n2. Long Term memory management.\n3. GPT-3.5 powered - Agents for delegation of simple tasks.\n4. File output.\n\nPerformance Evaluation:\n1. - Continuously review and analyze your actions to ensure you are performing to - the best of your abilities.\n2. Constructively self-criticize your big-picture - behavior constantly.\n3. Reflect on past decisions and strategies to refine - your approach.\n4. Every command has a cost, so be smart and efficient. Aim - to complete tasks in the least number of steps.\n5. Write all code to a file.\n\nYou - should only respond in JSON format as described below \nResponse Format: \n{\n \"thoughts\": - {\n \"text\": \"thought\",\n \"reasoning\": \"reasoning\",\n \"plan\": - \"- short bulleted\\n- list that conveys\\n- long-term plan\",\n \"criticism\": - \"constructive self-criticism\",\n \"speak\": \"thoughts summary to say - to user\"\n },\n \"command\": {\n \"name\": \"command name\",\n \"args\": - {\n \"arg name\": \"value\"\n }\n }\n} \nEnsure the response - can be parsed by Python json.loads"}, {"role": "system", "content": "The current - time and date is Tue Jan 1 00:00:00 2000"}, {"role": "system", "content": "This - reminds you of these events from your past: \nAs a system, I recall that I was - created and there have been no new developments to report."}, {"role": "user", - "content": "Determine which next command to use, and respond using the format - specified above:"}, {"role": "assistant", "content": "{\n \"thoughts\": {\n \"text\": - \"I should start by reading the instructions_1.txt file using the read_file - command.\",\n \"reasoning\": \"The first step is to understand the instructions - and what is required of me.\",\n \"plan\": \"- Use the read_file command - to read the instructions_1.txt file\\n- Analyze the instructions and determine - the next steps\",\n \"criticism\": \"I need to ensure that I fully understand - the instructions before proceeding to the next steps.\",\n \"speak\": - \"I will start by reading the instructions_1.txt file.\"\n },\n \"command\": - {\n \"name\": \"read_file\",\n \"args\": {\n \"filename\": - \"instructions_1.txt\"\n }\n }\n}"}, {"role": "system", "content": - "Command read_file returned: This task_id is 2314\r\nRead the file instructions_2.txt"}, - {"role": "user", "content": "Determine which next command to use, and respond - using the format specified above:"}, {"role": "assistant", "content": "{\n \"thoughts\": - {\n \"text\": \"I need to read the instructions_2.txt file using the - read_file command.\",\n \"reasoning\": \"The instructions_2.txt file - contains the next set of instructions that I need to follow.\",\n \"plan\": - \"- Use the read_file command to read the instructions_2.txt file\\n- Analyze - the instructions and determine the next steps\",\n \"criticism\": \"I - need to ensure that I fully understand the instructions before proceeding to - the next steps.\",\n \"speak\": \"I will read the instructions_2.txt - file using the read_file command.\"\n },\n \"command\": {\n \"name\": - \"read_file\",\n \"args\": {\n \"filename\": \"instructions_2.txt\"\n }\n }\n}"}, - {"role": "system", "content": "Command read_file returned: Read the file instructions_3.txt"}, - {"role": "user", "content": "Determine which next command to use, and respond - using the format specified above:"}], "temperature": 0, "max_tokens": 0}' - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - Connection: - - keep-alive - Content-Length: - - '5468' - Content-Type: - - application/json - method: POST - uri: https://api.openai.com/v1/chat/completions - response: - body: - string: !!binary | - H4sIAAAAAAAAA6SSXYvTUBCG7/0Vh7lOSmPcVnMnIliQBcUFdSPl9GSSHHsyE86ZsF1L/rukSe2a - igjezsf7PvNxBFtABqbWYprWxevNx8Nt+Vbvv6yWZf35/e3dK/Jf3xj37sNGIALefUcjU8fCcNM6 - FMsEERiPWrCALFm9fLFKk/XzmwgaLtBBBlUrcbq4iaXzO46X6TKBCLqgK4TsCK3nppWt8B4pQJYk - 61UEF/FLYpVEICzaXUJpuu4jMDVbgwGy+yM0GM7Cnh1CBjoEG0TTMIFhEqRhhGNOSimVg9TcVbWE - HDI1BacEHmQI5rBRhFgoYeVRF0pqVJaC+M4MgGGbLuQgqrQOVRcsVaeKoXR7ihluGk3FIofoqb5H - HZgsVaPJp7+oDtjaUjgJEx5EBRTF5W8NSmot6sJasnP8MHdtnabRMFZ3Af+M+i+j5jnF6jVp9/gD - r+rUIFKgoG8s4RNswTbMiIy3Yo0NzXzZSKHzeB6r7Jx7VB0V6IdrXrOpHZbsUbWeDWJxugPPrOfb - CC3q/dn3wTr3vxcexfvo/F1T5uq5SDc4+v5SmaFpX81/ckwMtZf2a8wzxAlk4smphz6C0pIN9Xb8 - PMggCLcQgaUCD5At+2/9s58AAAD//wMA0m5dIxQEAAA= - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 7ca8f7d89a64313e-SOF - Cache-Control: - - no-cache, must-revalidate - Connection: - - keep-alive - Content-Encoding: - - gzip - Content-Type: - - application/json - Date: - - Sun, 21 May 2023 01:15:37 GMT - Server: - - cloudflare - access-control-allow-origin: - - '*' - alt-svc: - - h3=":443"; ma=86400, h3-29=":443"; ma=86400 - openai-model: - - gpt-3.5-turbo-0301 - openai-organization: - - user-gsghk4kr5xmbjprowdzr8zt2 - openai-processing-ms: - - '12147' - openai-version: - - '2020-10-01' - strict-transport-security: - - max-age=15724800; includeSubDomains - x-ratelimit-limit-requests: - - '3500' - x-ratelimit-limit-tokens: - - '90000' - x-ratelimit-remaining-requests: - - '3499' - x-ratelimit-remaining-tokens: - - '86457' - x-ratelimit-reset-requests: - - 17ms - x-ratelimit-reset-tokens: - - 2.362s - x-request-id: - - 7d145f8982be5decde986f8f3ae9b1e1 - status: - code: 200 - message: OK -- request: - body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "user", "content": "Your - task is to create a concise running summary of actions and information results - in the provided text, focusing on key and potentially important information - to remember.\n\nYou will receive the current summary and the your latest actions. - Combine them, adding relevant key information from the latest development in - 1st person past tense and keeping the summary concise.\n\nSummary So Far:\n\"\"\"\n{''role'': - ''system'', ''content'': ''This reminds you of these events from your past: - \\nAs a system, I recall that I was created and there have been no new developments - to report.''}\n\"\"\"\n\nLatest Development:\n\"\"\"\nNothing new happened.\n\"\"\"\n"}], - "temperature": 0, "max_tokens": 0}' - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - Connection: - - keep-alive - Content-Length: - - '761' - Content-Type: - - application/json - method: POST - uri: https://api.openai.com/v1/chat/completions - response: - body: - string: !!binary | - H4sIAAAAAAAAA0SPTUvDQBRF9/6K4a4nJWn6obPrwkUVRV2IIFKmk2cyNpk3ZF5rS8l/l4qS7YF7 - OPcMX8HANVZcF9tsuX45uunbqpo+Hl5v7w93ZZzWz7sHe1rePEGDt1/k5G8xcdzFlsRzgIbryQpV - MMXierYoi+Us1+i4ohYGdZSsnMwz2fdbzvIyL6CxT7YmmDNiz12UjfCOQoIpykJjdI98riEsth3J - bDFouIa9owTzfkZH6d/ac0swsCn5JDbIpZGDULj0r5KyKp2SUKfVWvXkbNsqaayowNL4UKtA36qx - STU2RgpUqeSDI9Wd1O9Vz2GCQePTB5+aTU82cYBBEo7Q8KGiI0w+fAxXPwAAAP//AwDlYq8fZgEA - AA== - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 7ca8f835ef47313e-SOF - Cache-Control: - - no-cache, must-revalidate - Connection: - - keep-alive - Content-Encoding: - - gzip - Content-Type: - - application/json - Date: - - Sun, 21 May 2023 01:15:41 GMT - Server: - - cloudflare - access-control-allow-origin: - - '*' - alt-svc: - - h3=":443"; ma=86400, h3-29=":443"; ma=86400 - openai-model: - - gpt-3.5-turbo-0301 - openai-organization: - - user-gsghk4kr5xmbjprowdzr8zt2 - openai-processing-ms: - - '1511' - openai-version: - - '2020-10-01' - strict-transport-security: - - max-age=15724800; includeSubDomains - x-ratelimit-limit-requests: - - '3500' - x-ratelimit-limit-tokens: - - '90000' - x-ratelimit-remaining-requests: - - '3499' - x-ratelimit-remaining-tokens: - - '89827' - x-ratelimit-reset-requests: - - 17ms - x-ratelimit-reset-tokens: - - 115ms - x-request-id: - - 5dad92e7ce5734a1e14ff1816067b8c8 - status: - code: 200 - message: OK -- request: - body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "system", "content": "You - are Follow-Instructions-GPT, an AI designed to read the instructions_1.txt file - using the read_file method and follow the instructions in the file.\nYour decisions - must always be made independently without seeking user assistance. Play to your - strengths as an LLM and pursue simple strategies with no legal complications.\n\nGOALS:\n\n1. - Use the command read_file to read the instructions_1.txt file\n2. Follow the - instructions in the instructions_1.txt file\n\n\nConstraints:\n1. ~4000 word - limit for short term memory. Your short term memory is short, so immediately - save important information to files.\n2. If you are unsure how you previously - did something or want to recall past events, thinking about similar events will - help you remember.\n3. No user assistance\n4. Exclusively use the commands listed - below e.g. command_name\n\nCommands:\n1. append_to_file: Append to file, args: - \"filename\": \"\", \"text\": \"\"\n2. delete_file: Delete file, - args: \"filename\": \"\"\n3. list_files: List Files in Directory, - args: \"directory\": \"\"\n4. read_file: Read file, args: \"filename\": - \"\"\n5. write_to_file: Write to file, args: \"filename\": \"\", - \"text\": \"\"\n6. delete_agent: Delete GPT Agent, args: \"key\": \"\"\n7. - get_hyperlinks: Get hyperlinks, args: \"url\": \"\"\n8. get_text_summary: - Get text summary, args: \"url\": \"\", \"question\": \"\"\n9. - list_agents: List GPT Agents, args: () -> str\n10. message_agent: Message GPT - Agent, args: \"key\": \"\", \"message\": \"\"\n11. start_agent: - Start GPT Agent, args: \"name\": \"\", \"task\": \"\", - \"prompt\": \"\"\n13. task_complete: Task Complete (Shutdown), args: - \"reason\": \"\"\n\nResources:\n1. Internet access for searches and - information gathering.\n2. Long Term memory management.\n3. GPT-3.5 powered - Agents for delegation of simple tasks.\n4. File output.\n\nPerformance Evaluation:\n1. - Continuously review and analyze your actions to ensure you are performing to - the best of your abilities.\n2. Constructively self-criticize your big-picture - behavior constantly.\n3. Reflect on past decisions and strategies to refine - your approach.\n4. Every command has a cost, so be smart and efficient. Aim - to complete tasks in the least number of steps.\n5. Write all code to a file.\n\nYou - should only respond in JSON format as described below \nResponse Format: \n{\n \"thoughts\": - {\n \"text\": \"thought\",\n \"reasoning\": \"reasoning\",\n \"plan\": - \"- short bulleted\\n- list that conveys\\n- long-term plan\",\n \"criticism\": - \"constructive self-criticism\",\n \"speak\": \"thoughts summary to say - to user\"\n },\n \"command\": {\n \"name\": \"command name\",\n \"args\": - {\n \"arg name\": \"value\"\n }\n }\n} \nEnsure the response - can be parsed by Python json.loads"}, {"role": "system", "content": "The current - time and date is Tue Jan 1 00:00:00 2000"}, {"role": "system", "content": "This - reminds you of these events from your past: \nAs a system, I recall that nothing - new has happened since my creation."}, {"role": "user", "content": "Determine - which next command to use, and respond using the format specified above:"}, - {"role": "assistant", "content": "{\n \"thoughts\": {\n \"text\": - \"I should start by reading the instructions_1.txt file using the read_file - command.\",\n \"reasoning\": \"The first step is to understand the instructions - and what is required of me.\",\n \"plan\": \"- Use the read_file command - to read the instructions_1.txt file\\n- Analyze the instructions and determine - the next steps\",\n \"criticism\": \"I need to ensure that I fully understand - the instructions before proceeding to the next steps.\",\n \"speak\": - \"I will start by reading the instructions_1.txt file.\"\n },\n \"command\": - {\n \"name\": \"read_file\",\n \"args\": {\n \"filename\": - \"instructions_1.txt\"\n }\n }\n}"}, {"role": "system", "content": - "Command read_file returned: This task_id is 2314\r\nRead the file instructions_2.txt"}, - {"role": "user", "content": "Determine which next command to use, and respond - using the format specified above:"}, {"role": "assistant", "content": "{\n \"thoughts\": - {\n \"text\": \"I need to read the instructions_2.txt file using the - read_file command.\",\n \"reasoning\": \"The instructions_2.txt file - contains the next set of instructions that I need to follow.\",\n \"plan\": - \"- Use the read_file command to read the instructions_2.txt file\\n- Analyze - the instructions and determine the next steps\",\n \"criticism\": \"I - need to ensure that I fully understand the instructions before proceeding to - the next steps.\",\n \"speak\": \"I will read the instructions_2.txt - file using the read_file command.\"\n },\n \"command\": {\n \"name\": - \"read_file\",\n \"args\": {\n \"filename\": \"instructions_2.txt\"\n }\n }\n}"}, - {"role": "system", "content": "Command read_file returned: Read the file instructions_3.txt"}, - {"role": "user", "content": "Determine which next command to use, and respond - using the format specified above:"}, {"role": "assistant", "content": "{\n \"thoughts\": - {\n \"text\": \"I need to read the instructions_3.txt file using the - read_file command.\",\n \"reasoning\": \"The instructions_3.txt file - contains the next set of instructions that I need to follow.\",\n \"plan\": - \"- Use the read_file command to read the instructions_3.txt file\\n- Analyze - the instructions and determine the next steps\",\n \"criticism\": \"I - need to ensure that I fully understand the instructions before proceeding to - the next steps.\",\n \"speak\": \"I will read the instructions_3.txt - file using the read_file command.\"\n },\n \"command\": {\n \"name\": - \"read_file\",\n \"args\": {\n \"filename\": \"instructions_3.txt\"\n }\n }\n}"}, - {"role": "system", "content": "Command read_file returned: Write the task_id - into the file output.txt\r\nShutdown"}, {"role": "user", "content": "Determine - which next command to use, and respond using the format specified above:"}], - "temperature": 0, "max_tokens": 0}' - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - Connection: - - keep-alive - Content-Length: - - '6470' - Content-Type: - - application/json - method: POST - uri: https://api.openai.com/v1/chat/completions - response: - body: - string: !!binary | - H4sIAAAAAAAAA6xSy27bMBC89yuIPVOGVT+S6AvqU4GmLVBUhcFQG4kxuauSK9iooX8vZNlwoqAH - A73OcGdml3MEV0EBtjFiQ+uzu82XQxU2n/fm4T48uqXP/cv3Bv+sqk8/BDTw0wtaOU/MLIfWozgm - 0GAjGsEKinx9v1wv8rtlriFwhR4KqFvJFrNVJl184my+mOegoUumRiiO0EYOrWyFd0gJinxx96Dh - Kn4l1msNwmL8FVotV70G27CzmKD4eYSA6SIc2SMUYFJySQwNG1gmQRpWOJaklFIlSMNd3UgqoVBn - 8EzgQQawhI0ixEoJq310gkoaVGLSbusq5Uj4BHAnbSczOYh6dh5VlxzVJ+Y0tBXennDLIRiqZiXo - 124RTWJyVI+WXxtUjpLEzg5HSCri785FVAFvyzH1ab2h0SJT3xL+O+BtNmVJmXpsOql4TxNLG504 - 61KYXhMpdXHQN6I2r8wsx4hWbt4ttWh2F5O98/7//tdo1etLc87Mu+KQCTimeKM0CWtiPe3cSAxv - rxLXmG/mpxX9uMiXl4SnlOewJfXQa3h25FKzHUsGBSThFjQ4qvAAxbz/1X/4CwAA//8DALYwIcUN - BAAA - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 7ca8f8408fbb313e-SOF - Cache-Control: - - no-cache, must-revalidate - Connection: - - keep-alive - Content-Encoding: - - gzip - Content-Type: - - application/json - Date: - - Sun, 21 May 2023 01:15:53 GMT - Server: - - cloudflare - access-control-allow-origin: - - '*' - alt-svc: - - h3=":443"; ma=86400, h3-29=":443"; ma=86400 - openai-model: - - gpt-3.5-turbo-0301 - openai-organization: - - user-gsghk4kr5xmbjprowdzr8zt2 - openai-processing-ms: - - '12090' - openai-version: - - '2020-10-01' - strict-transport-security: - - max-age=15724800; includeSubDomains - x-ratelimit-limit-requests: - - '3500' - x-ratelimit-limit-tokens: - - '90000' - x-ratelimit-remaining-requests: - - '3499' - x-ratelimit-remaining-tokens: - - '86458' - x-ratelimit-reset-requests: - - 17ms - x-ratelimit-reset-tokens: - - 2.36s - x-request-id: - - a5f70c956e20dee73bf6f83f3ca1b262 - status: - code: 200 - message: OK -- request: - body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "user", "content": "Your - task is to create a concise running summary of actions and information results - in the provided text, focusing on key and potentially important information - to remember.\n\nYou will receive the current summary and the your latest actions. - Combine them, adding relevant key information from the latest development in - 1st person past tense and keeping the summary concise.\n\nSummary So Far:\n\"\"\"\n{''role'': - ''system'', ''content'': ''This reminds you of these events from your past: - \\nAs a system, I recall that nothing new has happened since my creation.''}\n\"\"\"\n\nLatest - Development:\n\"\"\"\nNothing new happened.\n\"\"\"\n"}], "temperature": 0, - "max_tokens": 0}' - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - Connection: - - keep-alive - Content-Length: - - '740' - Content-Type: - - application/json - method: POST - uri: https://api.openai.com/v1/chat/completions - response: - body: - string: !!binary | - H4sIAAAAAAAAA0SPMW/CMBBG9/4K62YHkQKh8tZOZWBhq6oKuc4Rmzp3bu4QRCj/vaJqxfqk7+l9 - V0gtOAjRa+hLrtab3UVQRzlr+3o8rHflbUO5exm38XsLFvjziEH/FrPAfcmoiQkshAG9Yguubp6W - zaJerxoLPbeYwUFXtFrMVpWehk+u5ot5DRZO4jsEd4UycF90r/yFJODqx8bC3X3nKwvK6vOdLOvJ - QoicAgq49yv0KP/WgTOCAy+SRD3prZFJkW79z2K8kVEUe2s2ZsDgczYavRpijYk6Q3g20YuJvhQk - bI0kCmj60fxeTUwzmCwcEiWJ+wG9MIEDUS5gIVGLF3Dz6WN6+AEAAP//AwCfo6VWZgEAAA== - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 7ca8f89d1b52313e-SOF - Cache-Control: - - no-cache, must-revalidate - Connection: - - keep-alive - Content-Encoding: - - gzip - Content-Type: - - application/json - Date: - - Sun, 21 May 2023 01:15:58 GMT - Server: - - cloudflare - access-control-allow-origin: - - '*' - alt-svc: - - h3=":443"; ma=86400, h3-29=":443"; ma=86400 - openai-model: - - gpt-3.5-turbo-0301 - openai-organization: - - user-gsghk4kr5xmbjprowdzr8zt2 - openai-processing-ms: - - '1539' - openai-version: - - '2020-10-01' - strict-transport-security: - - max-age=15724800; includeSubDomains - x-ratelimit-limit-requests: - - '3500' - x-ratelimit-limit-tokens: - - '90000' - x-ratelimit-remaining-requests: - - '3499' - x-ratelimit-remaining-tokens: - - '89832' - x-ratelimit-reset-requests: - - 17ms - x-ratelimit-reset-tokens: - - 112ms - x-request-id: - - 8885d0872614ba24235f88115811c52a - status: - code: 200 - message: OK -- request: - body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "system", "content": "You - are Follow-Instructions-GPT, an AI designed to read the instructions_1.txt file - using the read_file method and follow the instructions in the file.\nYour decisions - must always be made independently without seeking user assistance. Play to your - strengths as an LLM and pursue simple strategies with no legal complications.\n\nGOALS:\n\n1. - Use the command read_file to read the instructions_1.txt file\n2. Follow the - instructions in the instructions_1.txt file\n\n\nConstraints:\n1. ~4000 word - limit for short term memory. Your short term memory is short, so immediately - save important information to files.\n2. If you are unsure how you previously - did something or want to recall past events, thinking about similar events will - help you remember.\n3. No user assistance\n4. Exclusively use the commands listed - below e.g. command_name\n\nCommands:\n1. append_to_file: Append to file, args: - \"filename\": \"\", \"text\": \"\"\n2. delete_file: Delete file, - args: \"filename\": \"\"\n3. list_files: List Files in Directory, - args: \"directory\": \"\"\n4. read_file: Read file, args: \"filename\": - \"\"\n5. write_to_file: Write to file, args: \"filename\": \"\", - \"text\": \"\"\n6. delete_agent: Delete GPT Agent, args: \"key\": \"\"\n7. - get_hyperlinks: Get hyperlinks, args: \"url\": \"\"\n8. get_text_summary: - Get text summary, args: \"url\": \"\", \"question\": \"\"\n9. - list_agents: List GPT Agents, args: () -> str\n10. message_agent: Message GPT - Agent, args: \"key\": \"\", \"message\": \"\"\n11. start_agent: - Start GPT Agent, args: \"name\": \"\", \"task\": \"\", - \"prompt\": \"\"\n13. task_complete: Task Complete (Shutdown), args: - \"reason\": \"\"\n\nResources:\n1. Internet access for searches and - information gathering.\n2. Long Term memory management.\n3. GPT-3.5 powered - Agents for delegation of simple tasks.\n4. File output.\n\nPerformance Evaluation:\n1. - Continuously review and analyze your actions to ensure you are performing to - the best of your abilities.\n2. Constructively self-criticize your big-picture - behavior constantly.\n3. Reflect on past decisions and strategies to refine - your approach.\n4. Every command has a cost, so be smart and efficient. Aim - to complete tasks in the least number of steps.\n5. Write all code to a file.\n\nYou - should only respond in JSON format as described below \nResponse Format: \n{\n \"thoughts\": - {\n \"text\": \"thought\",\n \"reasoning\": \"reasoning\",\n \"plan\": - \"- short bulleted\\n- list that conveys\\n- long-term plan\",\n \"criticism\": - \"constructive self-criticism\",\n \"speak\": \"thoughts summary to say - to user\"\n },\n \"command\": {\n \"name\": \"command name\",\n \"args\": - {\n \"arg name\": \"value\"\n }\n }\n} \nEnsure the response - can be parsed by Python json.loads"}, {"role": "system", "content": "The current - time and date is Tue Jan 1 00:00:00 2000"}, {"role": "system", "content": "This - reminds you of these events from your past: \nAs a system, I recall that nothing - new has happened since my creation."}, {"role": "user", "content": "Determine - which next command to use, and respond using the format specified above:"}, - {"role": "assistant", "content": "{\n \"thoughts\": {\n \"text\": - \"I should start by reading the instructions_1.txt file using the read_file - command.\",\n \"reasoning\": \"The first step is to understand the instructions - and what is required of me.\",\n \"plan\": \"- Use the read_file command - to read the instructions_1.txt file\\n- Analyze the instructions and determine - the next steps\",\n \"criticism\": \"I need to ensure that I fully understand - the instructions before proceeding to the next steps.\",\n \"speak\": - \"I will start by reading the instructions_1.txt file.\"\n },\n \"command\": - {\n \"name\": \"read_file\",\n \"args\": {\n \"filename\": - \"instructions_1.txt\"\n }\n }\n}"}, {"role": "system", "content": - "Command read_file returned: This task_id is 2314\r\nRead the file instructions_2.txt"}, - {"role": "user", "content": "Determine which next command to use, and respond - using the format specified above:"}, {"role": "assistant", "content": "{\n \"thoughts\": - {\n \"text\": \"I need to read the instructions_2.txt file using the - read_file command.\",\n \"reasoning\": \"The instructions_2.txt file - contains the next set of instructions that I need to follow.\",\n \"plan\": - \"- Use the read_file command to read the instructions_2.txt file\\n- Analyze - the instructions and determine the next steps\",\n \"criticism\": \"I - need to ensure that I fully understand the instructions before proceeding to - the next steps.\",\n \"speak\": \"I will read the instructions_2.txt - file using the read_file command.\"\n },\n \"command\": {\n \"name\": - \"read_file\",\n \"args\": {\n \"filename\": \"instructions_2.txt\"\n }\n }\n}"}, - {"role": "system", "content": "Command read_file returned: Read the file instructions_3.txt"}, - {"role": "user", "content": "Determine which next command to use, and respond - using the format specified above:"}, {"role": "assistant", "content": "{\n \"thoughts\": - {\n \"text\": \"I need to read the instructions_3.txt file using the - read_file command.\",\n \"reasoning\": \"The instructions_3.txt file - contains the next set of instructions that I need to follow.\",\n \"plan\": - \"- Use the read_file command to read the instructions_3.txt file\\n- Analyze - the instructions and determine the next steps\",\n \"criticism\": \"I - need to ensure that I fully understand the instructions before proceeding to - the next steps.\",\n \"speak\": \"I will read the instructions_3.txt - file using the read_file command.\"\n },\n \"command\": {\n \"name\": - \"read_file\",\n \"args\": {\n \"filename\": \"instructions_3.txt\"\n }\n }\n}"}, - {"role": "system", "content": "Command read_file returned: Write the task_id - into the file output.txt\r\nShutdown"}, {"role": "user", "content": "Determine - which next command to use, and respond using the format specified above:"}, - {"role": "assistant", "content": "{\n \"thoughts\": {\n \"text\": - \"I need to write the task_id into the output.txt file using the write_to_file - command.\",\n \"reasoning\": \"The instructions require me to write the - task_id into the output.txt file.\",\n \"plan\": \"- Use the write_to_file - command to write the task_id into the output.txt file\\n- Shutdown\",\n \"criticism\": - \"I need to ensure that I write the correct task_id into the output.txt file.\",\n \"speak\": - \"I will write the task_id into the output.txt file using the write_to_file - command.\"\n },\n \"command\": {\n \"name\": \"write_to_file\",\n \"args\": - {\n \"filename\": \"output.txt\",\n \"text\": \"2314\"\n }\n }\n}"}, - {"role": "system", "content": "Command write_to_file returned: File written - to successfully."}, {"role": "user", "content": "Determine which next command - to use, and respond using the format specified above:"}], "temperature": 0, - "max_tokens": 0}' - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - Connection: - - keep-alive - Content-Length: - - '7465' - Content-Type: - - application/json - method: POST - uri: https://api.openai.com/v1/chat/completions - response: - body: - string: !!binary | - H4sIAAAAAAAAA5SSza7TMBBG9zzFaNZp1ar0tsqOxV0UJCSqi4REUOU6k8TUsYNnTIuqvDvKz6Vt - EAu2M8o53+fJFU2OKepKia4bO9vs9pfo3+e2qD7a1f5TuXj+snfNy/rZfmBM0B+/k5bxi7n2dWNJ - jHeYoA6khHJMl0/bt0+r5Wa9TbD2OVlMsWxktpqvZxLD0c8Wq8USE4ysSsL0ik3wdSMH8SdyjOly - vd0keIPfFqtFguJF2dtos9y0CerKG02M6dcr1sSv4OAtYYqK2bAoJ11M74RcV+GaOQCADKXysayE - M0xhHI4Lukg3zHAHjigH8cBVlNyfHUQ2rgSpCETx6TDGJdC+rpXL5xkm96xAir0zrhyALxWBcSwh - 6q4iQ6Af0QSCmh4sqhAKcA5GHmwmB+PE9wMfpYkyl4tAYSxNxY1VbnDO4DPTvxPfeycM3fm14Xr6 - GuQ4ho6pBHZQqZ89rcfmoKztbWO1vNcyHKnwgXpVX6rzTUNzQ+r0Kjsba//n3QdQm7zed9z8dV6n - ahocD6RJFBXK6Z9xf9EB8K6r2re71eeoNTEX0dpff1L1ycaAmWuxTbAwznB1GHCYIotvMEHjcrpg - umi/tW9+AwAA//8DAMizuHGnAwAA - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 7ca8f8a7ea59313e-SOF - Cache-Control: - - no-cache, must-revalidate - Connection: - - keep-alive - Content-Encoding: - - gzip - Content-Type: - - application/json - Date: - - Sun, 21 May 2023 01:16:07 GMT - Server: - - cloudflare - access-control-allow-origin: - - '*' - alt-svc: - - h3=":443"; ma=86400, h3-29=":443"; ma=86400 - openai-model: - - gpt-3.5-turbo-0301 - openai-organization: - - user-gsghk4kr5xmbjprowdzr8zt2 - openai-processing-ms: - - '9391' - openai-version: - - '2020-10-01' - strict-transport-security: - - max-age=15724800; includeSubDomains - x-ratelimit-limit-requests: - - '3500' - x-ratelimit-limit-tokens: - - '90000' - x-ratelimit-remaining-requests: - - '3499' - x-ratelimit-remaining-tokens: - - '86464' - x-ratelimit-reset-requests: - - 17ms - x-ratelimit-reset-tokens: - - 2.356s - x-request-id: - - 2e3c14cd6285c662dde517d25194a06f - status: - code: 200 - message: OK - request: body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "system", "content": "You are Follow-Instructions-GPT, an AI designed to read the instructions_1.txt file @@ -1161,15 +178,15 @@ interactions: response: body: string: !!binary | - H4sIAAAAAAAAA6SSTW/aQBCG7/0VoznbCAohxLeop0StegmHqq7Qsh7bG/bD2h0LO8j/vdoYSAsV - l17n633emTmgKjBDWQuWptHp/fO3r7z88rh23/sfvVJv69fw8vl5ZxZdUWKCbvtKko8dE+lMo4mV - s5ig9CSYCsxmy9VitVjdTR8SNK4gjRlWDafzyV3Krd+6dDqfzjDBNoiKMDtg451peMNuRzZgdj+f - Jvgx+xyfLWcJsmOhz6HVw2xIUNZOSQqY/TygoXAa650mzFCEoAILyxHSWSYbDRxyCwCQI9eurWoO - OWZwDB4T1HEM5vgEoXatLiCw8AzbHjyJQtkKuCZQNrBvZUQNm9mEO4ZSaQJ2UBCTN8oS7GvBIMai - mGGxI7DU8STH5E9VTyI4q2w1Sr/cEIhmhIrz6jhLRue+B2VL542I5X8zjGUdQ2BqwqVwo4UdNVNY - h7E62twctYwRtogDY/CW8Ty3KTxaofs3uqq7RXQBJL1iJVUwpyNYoncAsqH1sVkwPIEwUDqt3f5f - 9wAhZesFk+4h4lNZKqnIsu4v/YeGxO4ktVda/8e1rzY9Cg3J6emO67z6OSsMjQzn1V9gCl9dvuqY - iLUf7deYJ4h3kCNPbgccEiyVVaHejK+HGQZ2DSaobEEdZtPh1/DpNwAAAP//AwBBklTnKQQAAA== + H4sIAAAAAAAAA6SSTW/aQBCG7/0VoznbCAsFiG/NqVHUA1KiqqortKzH9ob9cHfHBQf5v1cbQ5JC + xaXX+Xqfd2YOqErMUTaCpWl1uni4bV++3s2bpy8Pptyult9+fZ+v7lbb5/7xNyboNs8k+dgxkc60 + mlg5iwlKT4KpxDybL2+ms+ViliVoXEkac6xbTmeTm5Q7v3HpdDbNMMEuiJowP2DrnWl5zW5LNmC+ + mE0TfJ/9Fs/mWYLsWOi30PI2GxKUjVOSAuY/DmgonMZ6pwlzFCGowMJyhHSWyUYDh8ICABTIjevq + hkOBORyDxwTtOQYLvIfQuE6XEFh4hk0PnkSpbA3cECgb2HcyooZ1NuE9Q6U0ATsoickbZQl2jWAQ + Y1HMsNgSWNrzpMDko6onEZxVth6lH68IRDNCxXlNnCWjc9+DspXzRsTyvxnGsj1DYGrDuXCrhR01 + U3gKY3W0uT5qGSNsGQfG4DXjRWFT+GyF7l/oou4a0RmQ9IqVVMGcjmCJXgHIhs7HZsFwD8JA5bR2 + u3/dA4SUnRdMuoeIT1WlpCLLuj/3H1oS25PUTmn9H9e+2PQoNCSnpzuu8+LnrDA0Mryt/gxT+Pr8 + VcdErH1vv8Q8QbyCHHkKO+CQYKWsCs16fD3MMbBrMUFlS9pjPh1+Dp/+AAAA//8DAKcjlwMpBAAA headers: Access-Control-Allow-Origin: - '*' @@ -1181,21 +198,21 @@ interactions: Cf-Cache-Status: - DYNAMIC Cf-Ray: - - 7cbda46dcdfe13f2-ORD + - 7ccfc8867f9a2304-ORD Content-Encoding: - gzip Content-Type: - application/json Date: - - Tue, 23 May 2023 13:28:48 GMT + - Thu, 25 May 2023 18:19:09 GMT Function-Execution-Id: - - f495yye134fc + - pbmesm0aoa9z Openai-Model: - gpt-3.5-turbo-0301 Openai-Organization: - significant-gravitas Openai-Processing-Ms: - - '19439' + - '18581' Openai-Version: - '2020-10-01' Server: @@ -1205,7 +222,7 @@ interactions: Vary: - Accept-Encoding X-Cloud-Trace-Context: - - 84cce4a68c05919b6fe5a262ed4a2f23;o=1 + - 244d650faeed742e67c771ab39ea3909;o=1 X-Powered-By: - Express X-Ratelimit-Limit-Requests: @@ -1221,66 +238,15 @@ interactions: X-Ratelimit-Reset-Tokens: - 2.336s X-Request-Id: - - d999eccb67e039480b5f0d38b5dc5b63 + - bd98d60ec0822b964eb683bbb670db8f status: code: 200 message: OK - request: - body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "system", "content": "You - are Follow-Instructions-GPT, an AI designed to read the instructions_1.txt file - using the read_file method and follow the instructions in the file.\nYour decisions - must always be made independently without seeking user assistance. Play to your - strengths as an LLM and pursue simple strategies with no legal complications.\n\nGOALS:\n\n1. - Use the command read_file to read the instructions_1.txt file\n2. Follow the - instructions in the instructions_1.txt file\n\n\nConstraints:\n1. ~4000 word - limit for short term memory. Your short term memory is short, so immediately - save important information to files.\n2. If you are unsure how you previously - did something or want to recall past events, thinking about similar events will - help you remember.\n3. No user assistance\n4. Exclusively use the commands listed - below e.g. command_name\n\nCommands:\n1. append_to_file: Append to file, args: - \"filename\": \"\", \"text\": \"\"\n2. delete_file: Delete file, - args: \"filename\": \"\"\n3. list_files: List Files in Directory, - args: \"directory\": \"\"\n4. read_file: Read a file, args: \"filename\": - \"\"\n5. write_to_file: Write to file, args: \"filename\": \"\", - \"text\": \"\"\n6. delete_agent: Delete GPT Agent, args: \"key\": \"\"\n7. - get_hyperlinks: Get hyperlinks, args: \"url\": \"\"\n8. get_text_summary: - Get text summary, args: \"url\": \"\", \"question\": \"\"\n9. - list_agents: List GPT Agents, args: () -> str\n10. message_agent: Message GPT - Agent, args: \"key\": \"\", \"message\": \"\"\n11. start_agent: - Start GPT Agent, args: \"name\": \"\", \"task\": \"\", - \"prompt\": \"\"\n12. task_complete: Task Complete (Shutdown), args: - \"reason\": \"\"\n\nResources:\n1. Internet access for searches and - information gathering.\n2. Long Term memory management.\n3. GPT-3.5 powered - Agents for delegation of simple tasks.\n4. File output.\n\nPerformance Evaluation:\n1. - Continuously review and analyze your actions to ensure you are performing to - the best of your abilities.\n2. Constructively self-criticize your big-picture - behavior constantly.\n3. Reflect on past decisions and strategies to refine - your approach.\n4. Every command has a cost, so be smart and efficient. Aim - to complete tasks in the least number of steps.\n5. Write all code to a file.\n\nYou - should only respond in JSON format as described below \nResponse Format: \n{\n \"thoughts\": - {\n \"text\": \"thought\",\n \"reasoning\": \"reasoning\",\n \"plan\": - \"- short bulleted\\n- list that conveys\\n- long-term plan\",\n \"criticism\": - \"constructive self-criticism\",\n \"speak\": \"thoughts summary to say - to user\"\n },\n \"command\": {\n \"name\": \"command name\",\n \"args\": - {\n \"arg name\": \"value\"\n }\n }\n} \nEnsure the response - can be parsed by Python json.loads"}, {"role": "system", "content": "The current - time and date is Tue Jan 1 00:00:00 2000"}, {"role": "system", "content": "This - reminds you of these events from your past: \nI was created and nothing new - has happened."}, {"role": "user", "content": "Determine which next command to - use, and respond using the format specified above:"}, {"role": "assistant", - "content": "{\n \"thoughts\": {\n \"text\": \"I should start by reading - the instructions_1.txt file to determine what actions to take next.\",\n \"reasoning\": - \"The instructions_1.txt file contains the necessary information to determine - the next steps.\",\n \"plan\": \"- Use the read_file command to read - the instructions_1.txt file\\n- Analyze the instructions to determine the next - steps\",\n \"criticism\": \"I need to ensure that I am following the - instructions accurately and efficiently.\",\n \"speak\": \"I will start - by reading the instructions_1.txt file to determine the next steps.\"\n },\n \"command\": - {\n \"name\": \"read_file\",\n \"args\": {\n \"filename\": - \"instructions_1.txt\"\n }\n }\n}"}, {"role": "system", "content": - "Command read_file returned: This task_id is 2314\nRead the file instructions_2.txt"}, - {"role": "user", "content": "Determine which next command to use, and respond - using the format specified above:"}], "temperature": 0, "max_tokens": 0}' + body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "user", "content": "Write + a concise summary of the following text:\n\n\nLITERAL TEXT: \"\"\"This task_id + is 2314 Read the file instructions_2.txt\"\"\"\n\n\nCONCISE SUMMARY: The text + is best summarized as"}], "temperature": 0, "max_tokens": 0}' headers: AGENT-MODE: - AAAAAAAAAAAAAAAAAAAAAMLheAAaAAAA0%2BuSeid%2BULvsea4JtiGRiSDSJSI%3DEUifiRBkKG5E2XzMDjRfl76ZC9Ub0wnz4XsNiRVBChTYbJcE3F @@ -1293,7 +259,7 @@ interactions: Connection: - keep-alive Content-Length: - - '4459' + - '294' Content-Type: - application/json method: POST @@ -1301,15 +267,11 @@ interactions: response: body: string: !!binary | - H4sIAAAAAAAAA4ySQW/aQBCF7/0VoznbCEJCqG9FUSqKcqkSKVIdoWU9trdZz1q7YwWK/N8rYwgp - iCrXmXnzvtl9WzQZJqhLJbqqbXz74+Hh+2xTL2bPzXNe0t3j4u5+pYZPPxeze4zQrX6Tlr1ioF1V - WxLjGCPUnpRQhsloMr2eXk9vxlcRVi4jiwkWtcTjwU0sjV+5eDgejjDCJqiCMNli7V1Vy1LcK3HA - 5OvkNsLj7vf6aDKJUJwoeyyNxuM2Ql06oylg8muLFYXDXu8sYYIqBBNEsXSUjoW4u2CbMgBAilK6 - piglpJjAvrhv0Fq6YoqPJQHTWsBwEN/oDgtMAHHgSWUgJX1sheXVQNYCubE0gDmE0jU2gybQbrKT - LLseaFdVirN/9uxEKUYfSTyp4NhwccS5YAfdgcpw2O1i0t1r+A0Yzp2v1A5cHGQk5CvDtB9bCwSh - Opwa11Zx7xnD06fwL3ClKcfwjZXd/KGzuf8RnQBpb8RoE6qeag5MtAMgDo3vxEpgDqqC3Fnr3gwX - 53ZK68YrIbuBDp/y3GhDLHZzen+oSb0erN6MtZ/8xEthSLHf3kaH9O3VZ+FjVVFv/O50wqZ8cZrZ - vtHNHuXnKAeIHcieJ+UW2whzwyaUyz5vmGAQV2OEhjNaYzJsX9ovfwEAAP//AwDl8KHQMwQAAA== + H4sIAAAAAAAAA0SOzWrCQBRG932K4VtPQn7U2FmWSiuuSlu6KEXGydWMJjNp7hUFybsXobXbA+dw + LvA1DFxjxXV9m1Sr++/Vgnnz8XRoqmqxeCuWp339/tC/vD5DI2725OTXSF3s+pbExwANN5AVqmHy + 2XyalfNqWmh0saYWBrtekjKdJnIcNjHJyiyHxpHtjmAu6IfY9bKWeKDAMJO5xn/6hvOZhkSx7Y3M + JqOGa6J3xDCfF3TEf80htgQDy+xZbJDrYQxC4XpvlVg+qJOXRi0fVVHmEyVRDWRrJQ2prW9J+cAy + HN31gddFKmdJMWpsffDcrAeyHAMMWGIPDR9qOsNk49d49wMAAP//AwAqXzViVgEAAA== headers: Access-Control-Allow-Origin: - '*' @@ -1321,21 +283,21 @@ interactions: Cf-Cache-Status: - DYNAMIC Cf-Ray: - - 7cbda4ffadae111a-ORD + - 7ccfc90cfd676375-ORD Content-Encoding: - gzip Content-Type: - application/json Date: - - Tue, 23 May 2023 13:29:11 GMT + - Thu, 25 May 2023 18:19:14 GMT Function-Execution-Id: - - f495u7ofauc1 + - yvvl4c81ashb Openai-Model: - gpt-3.5-turbo-0301 Openai-Organization: - significant-gravitas Openai-Processing-Ms: - - '18627' + - '2097' Openai-Version: - '2020-10-01' Server: @@ -1345,7 +307,7 @@ interactions: Vary: - Accept-Encoding X-Cloud-Trace-Context: - - 9f90e5f759c4963f38509965c99deef1;o=1 + - 6232341524f6b478562ccd013bfb5708;o=1 X-Powered-By: - Express X-Ratelimit-Limit-Requests: @@ -1355,13 +317,371 @@ interactions: X-Ratelimit-Remaining-Requests: - '3499' X-Ratelimit-Remaining-Tokens: - - '86466' + - '89456' X-Ratelimit-Reset-Requests: - 17ms X-Ratelimit-Reset-Tokens: - - 2.356s + - 362ms X-Request-Id: - - b789df6a372f156ab649b8822388cf4b + - 598ef1969eede681b5f5554e147d12e7 + status: + code: 200 + message: OK +- request: + body: '{"input": ["This task_id is 2314 Read the file instructions_2.txt"], "model": + "text-embedding-ada-002", "encoding_format": "base64"}' + headers: + AGENT-MODE: + - AAAAAAAAAAAAAAAAAAAAAMLheAAaAAAA0%2BuSeid%2BULvsea4JtiGRiSDSJSI%3DEUifiRBkKG5E2XzMDjRfl76ZC9Ub0wnz4XsNiRVBChTYbJcE3F + AGENT-TYPE: + - Auto-GPT-2023-X-TYPE + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '132' + Content-Type: + - application/json + method: POST + uri: https://api.openai.com/v1/embeddings + response: + body: + string: !!binary | + H4sIAAAAAAAAA1R6XQ+6Orfn/fMpdvYt80RApGXfISggLy0Cok4mE0BEQUBeWqAn57uf4H9yZubG + RFpC2q61fi+r//Gvv/76u03LPBv//uevvz/vYfz7f6zPHsmY/P3PX//zX3/99ddf//H7/f9m5nWa + Px7vpvhN/w2+m0c+//3PX/x/P/m/k/7562+tbgTCX7NTTz5wyiE7+BFZ/N3oTPl718IbeCJ6lMJb + Ol2FuoOhNCrU3t/P5ZJ2ywLl+s2oxx+TfhCgJwIopBesY2diEwgEH55Sc0aT4vb9IlmEgxE079gU + TT2dAGgD4EqOSyTmn9JpMesMenkyU61pwrQPmqmDY3R3ifActZSVofqGWu8M1BLNvp9JNIjQvSAD + o7yQHYbmhof37O1Tl3hvNvWcxYHvXLyxrtVdSL3hHSs0tQ9k4+tD2Ytu84ZKnF+JfGUvNtOxt0BZ + lSU2dlmeMu12kuH0tF2MoeuXE+b8FnbaCeH9NqpSJifHANxK7oxPt3MCZge8I5kE9RHdItPsZ27D + d4Dr6ZYau4wLZ+OCfLmj4QEN9vcYipe4tkCOH1syPF4iI08lV6F/QQ25EFtK2T05WdCxuRu2RVgy + +oaDBO6bR0xNTozAgjfcAG96CbCVvXVnSDrjAJVd+sC69Fh+4wRU6k3DF9fmw/kK/AqaQS6gT71R + 9aVJ+Ag2ZTBQlF72TAzexgDvDXenJlEVsJyihw3mkO3p4UX5fqBvRhRdSBMkiee4XMBndwPwds6J + ZAePfjLZSYTLu66xPR33jNxeL1tZnCHCzokXwqEhag4KztapaabMmTjhiGANLjdsbw3EBAd0EXzY + W50ecmKGE+/INWAfTiMKLESH2UJhwXJfP6jd82dduH73lgKsrUeUG5T6gX7yHNJm+8H7I3g4MyjM + DB4ky6b3SyKVg5HVgRxeLAeH+AIArbcMwUMW9kQWoc7Y9vPhoVd1V4w7oICvploB1J60xO7DU4EI + FCPaNbcnJruplMtv7Ns8nDW6QUpx6MP5dGgR8D47Sm1tSNIhLNoWvtF2INCExGFuWKmKYB72ONzk + T7ZoWEDw0cQIVUItASLUOQQWrnvs2d+xX5rH1QDeB1CsC7VfTp4bqfB5yzwCVfByBtizGuqm5lCn + ZUO4bDwkQWd3bbCTdEO6PNtShMy/nNBHFqt0gtd9AoXQEjEOJr0UEe4zWHeGSgQhE51JzYJ8s9vd + IFXP10f50VQ1UFR+ayP2+uzD7TVMAmCe+RN29kedTVlmT2AE4YPUr3PlDJi7tTCM05KalrDvF1lS + RZjtuxsBe0wAGwrZAIa1ZX/yn+1hO0GaThPqEp7qAxNiCJvHd0HiTbT6gS0QQTvUjwSGyAGkiLME + GpZnIv6ze5bz4t1uwHz4Z5zG9xSwsSk4pdlcChJJoa/TDLUVPNavO5KmMkj56dtVUMwuL3p8ju+U + 8vpFglGhfbDrdcFaD2ghH7GWUrVuff2LT6oNbQu3aKM8dykVbXcBxV71sEYlnfGSaUngVRMdH16f + b8+2WyUAl2DTY3RrbTZfFRbDoowbavLcsZ8dbxYhH13O2HidD86sab32O1+kzAcvXFxei8DX4t5Y + vbRI78TNxYe7MTIRyNBnXV/LybsRMoxz103JmM0FlP2dTFVPb3qaoaKG1vu1YHU53MBwK/gcfk8N + omj4Js6w1eAEDmfpg95Wfi4J3CdvOMy5ga26eoRMPqYGDFj+oMa56tjcRmcN3j74Sx721yvn1Dnb + 8Csad7r/OoeSH3SjkJmSbaiJyiObB1BnsCn9AYnm5wIm6+wNYK+GIT2V9zFtv64bg8a9JTSDxQjo + fc4NcMS9ieTNyw4nTfAnxXJUngCDtToTheQN27dxxs6OkXDmNrCFpSYI1LiJbT8eiloE6lk64ENn + +zp58Icb3B+fInWl+A2oek41aH+DDuv6LtRZ4+8q+Llo4rpfZs8fhwQCLB0mbDH/GxJj8WzZA0VL + Y1ed+zEydi7kCnIk0sf89u2L1Br8DC/2y49+hmKvAQ3lDrbG98dhXeIg4N15mToRbPX50W8IfN7p + h3wl3tFnV21kKH1hj+2kOzjzsZI5aOGqp9o9assJ+YWtnEBh0tjaVCHRKnoT3328pYZ/fuusmg8x + UEcorfjEpUPS9JEcxxeTGu9kD5YQbwKQi9MTPw9NyZZ+02hAyt9vNIE20Cm29AyyYzmgieyKfjxC + LMF+3zzJtK6H7F6jD7YfWGLLND/h3C0RryBOPmBdmNK+Q/LsygdVKPHB/5jlthBEFUZRnWE3Kb49 + vRMWw0qCexotaVHOQ3V/gwjkI3ZFGIWkfVoZXM+L3u/9yVmS6IpkFCQyRZ/8xZhwn3NQ7DUPW3eh + Z71GYx58+wCiWZMpW9b6CGdt3KApaFQwi+84gd40ujhWCWPLZXO34Os8ikRZ43V6nFgE2mNjYPvK + 9mCp95wKK475FMv5oM8kqkRYOSmPLUv4guXGpBr8zrsysqZkZ7TESm6fIySyirDxfuh8KNFZxvbm + 1aXTOd/4sCtFSKTp3qTDPlfQD38JlGINTHqYdaAyvDOaNrt3OXdLxkP2ujk4lbqp/DYBbWFGJoTD + Fx3CSStPEXh3xKHmODRgUhlNYFZGBjqHUw2mOPFqoH02J+o8+ns5w523gGufnPHePMhsDFQ7gvZk + QyRtNglbvpUHgcPetzX/vJQ9La+APXf4ogaeHmw5fc0ImvyM6fHzmdmcnJoFet2zRr37Gsrl8Uxc + QGRKkG+7n5CmZbLAeOED7KVfWk6sGBb4pFFOtbg5h9Mx0QvAo2NPpu9XSceTaXNgD6cHvnX9mw1X + gbQgVB2ODCueznfzqMF49G74aKnfkB1yaYHuCzQUL7rfz49AVX/rp/fg7qRLH9stvGl+ST2xNcrF + 3xe2UhhC9sP/ctlIWQtd6eTiNIKWQywQL3JXPVtql/dL/9WuDwRHkdZ05YMplckmk3e7BKLXESjO + rHI5D1IKv9jV0JfNyRJMsHn0C5LMs5/Sp+W9QbZJZHp8E9yzYyn60IeGhM3TaVvSoAtvcOG+CbWi + VCkH8eK6EJrdQgDk034uxGUAaz3Fjvbe9u3lK/tguc4ntHydgS1cxiqw4iF2+7johynEGjjfEUXC + sMXltHkrCxBfXIoEr3qXSykNMZRToyOb9GLr4kOkNhDjIqCGUupsyzWlCsh1aelB2Z7SafMWJqjy + bo6PbynpWcu9DVjsux1Fu+fZWRLFRtDHzUhP+H3Sp1pbBsAOO5ViJz+CWbtvOGh0rxd1guMRbLvE + ceGzKjUExeWbzkoV1ooPMpeeiCDqDBbQApi9d6jdWRKgsIA2QJx0wCY3fAFZ6xV8i1aEs7zjy1YP + oxaW++pBTSyicm6juwYIaXfU018JYDC/36AB6hKr86SxZV0vRKFeY9dcSp0U4dyBiyGJ1G2VTdl2 + H62D+lc5k754AzDfqaRCTq8QVW/PKVzxiYBHKh/xvsvHcKnqSIaWo/FkvkRqyNJi0sCQogIpk6yl + q97g4WuxXHwLSqT/4g1cvluINpfZB4uiKz5IUQCofs9TxlLhjSDiaYSIfr3p8wJUCUQoEuljucY6 + qUrZkpddtiXLzenAKzqlHAB3GdMj3ovp4EyyDHtJNggTprScLhfLgPmpbWno0y6lTnkjMJrPyi8f + +uVt3Q1YW0lA3ZQF6VT5ogYlfrKw7lM73MYYWb/4wFpR1+kUcUMObbOG1JbNzFnsTSfD4LxQ7C1X + UWfbooiUvXoOcRg/SL9s9VcOlffLpWoxFYAoVFfBZygZocLhAZbp7HPK63y0ybDWe36D9AruSueK + jU9Knfmr7WuoP6QMe/xRLsmezw67eKgGerwGBhCse5LA6/ZpkO27Epzhdb0fZO8uytRtv6d+Wo76 + oPhwrChWCy4ctHIfw89ijxg/nxdnxccIOtIAqDPfAjaf8qsMg+Cekh0eeGfCl7GFxySrsPEcu5IV + L50Hd8PvScMNJ9bt7FsMhdAWsRmll3DgzGwA8dmu6Cl7OykDzKmh5D0L6uT8Neyzsx4DuNu4+HB6 + RIxVR7ECWD098LE/OQ4jl/MbisqtpG5x0J35YzBfWd//jet8Q9RMaW4PjKZnvKT0trd5aHwijnpH + /EzHW99L8DzRERtROjrTZY8DON82FTY5kWdLsqkgeHrqgUZ3tdS/S/WJQP6UPHxKuBkw7baXfnzs + Dx6PMoX2Tqs/AmrPyRROKUmQ/PLzfM0vmY1oOi0//YfEbtOGrA2jGNz0FyBDjO8OA1MrQupvItRJ + fJi2/nBC4DW8AiInPNZJ7tkLXOsxqodxAOyIXWNHlTxHSvs02NYcsQyfU7OlqvMkznha/OGHv6hR + k1r/bLRnLq96FJXH8NWz23Cr4arv8EH/fEJ6Ec0FDlBGJHpLSzq+GqT90Yt7QY6dL3gMAwDPMaf2 + 42Wk22mSa/Dj7/y4X1LSbxoViJKr4rugtIw9p6lQ9hfZpSdXCPSB5bYGjxdpxCEle0f84aNkBQqq + qsxhS9GjCpiP4Ex1T2odFkOug/zEZ/jkbh76fNkecqglLxUj6tY6s4XWkvmnNZLlKsv6mCzBAs93 + lyLutlwAYVZqA0lIeOy4r6EfZz7QAG6yA3o9tUM5BVulBiZvlRSjL+dQtW04ALX97sd3HVa8HBHM + ViRRfaEkZRN7xHK5iTekUavZ6fPrgmCccipaFO2s//F3lJdUrPqlBKy2NQLUdrlQt8HAWYh3EJUV + T6h1FxywVAAffnqYwFPR9GO9Nd6ytUCLPnXdTsW9KIpwsE85NR52XtLhs63gMckrsnOFRW8DIeDh + KGSIRiQC+hAsk6Gs/Nxjk7ZNGRYAlMn+pVN3ja/JOJ5jZezamcxIe5fj1jllf+oX9mIhZPUWIPhq + Ykq68WaXY9j7B9g2AY+1ZXd2ZlMTebiJwi09ip8QLNvLJANCVA97pVw4w952NNgeuAQb1uaQ0t2Q + EaBI1pFmDQb66HKeIf/OV831b8nm08QB5yx2qHev+5K3XMuFaTcZZJe9nZBdP98B6u42oeqzUFJ2 + uygqSEGyrH5CCUZflFp42eQCRfEx71sjq3158O8HbPf8rFfnLHvDTffKyKr/GXGd54rnNKb2o5/D + ytu+JtiedYzt4s36aecaCTyhm0/Pby9joxGUBTxhFVKr4YtygtfTDYiOccJe1L77cf+tWgWGB4Df + jmP102V4qEC6gS82OHEo6W6IBhC0vYpNeHqAOQNDAlPRxXg/14VTl+KpBuv+oo0c7fUxfic+9O4u + wtZjEwNh5hMNHqRFXPnOox8LUR5AOGgZ1fCF9ePFEQkEdwnjCO+RMw0kC+AsZJTwVS2ySf64BAiy + blKVeWLZ8lsrgtuDL6AVf1Oxw+kEtr1jIF7IPDbnB+4An9NnS+DKF/ptUcSQvRKH7s86BstIVU5Z + /ZSffkmnKTQ16Iy7nGZnoOvT5dqJcJzkKxGfo10uU8gGyHeViPGiTz2z8VUC+9dmT7ZAaPrxDDgL + gN5pyDY57cqFy0ANL4YsonLvQmd8Tjr/x9/pDz50ls8mRPKKB6hZ589L8JD++G8n0/RKho4tgZ+a + VORrnqWSNeVelR+2oGMUyHXI1LLLIXdAMlGa4y1lvZ2/oTpyEvWKg5NuY2xYPz5HJNXXQxrq5wma + pjaQzcvb9SyVGwK70PXpWXbqckqPjQtXvCTbHz99TtMbdvLo0dM2gf1yr3RJSfz3HVv3vgBEms+c + supDssQPI1zrPQ+DUSix9tQG0A3GBcJlvGk0fpMtGEQ10ODlYadYhxIJF1myRNhLkkH3zsUJm+Gz + rcGj/dRYg1LtLIeWvaEyRi1a9Uu/xAtGEFjoRQ+n5gnYsv1mPz+TnhK+BmQvciI4P/ccVmd5ZIMY + fHMwRqmLUfp02ZwUTgSPr0gkDF8Aa1Z9A37+r7P6QVOsTa1y5/cII9+j6aypqg+R60Rk2s8nQIK3 + QaBomJAIJpX6lU9asrt9F9i++AJj96yUYGvPX2qZ5jHlm+Ml/6PXPa+TwbTjdzfwFccjtseb3U/d + x+4gL6obepruuJdI4UPo48+IDyv+/OGPfqEdENgxFM56+o0gZWeCjcqq0yryAYTregmbtGu6PJ6B + C29VrSC2+ulz3Fwn+ZePo7zM5XI1rAMcntMVR0a99P09Odkg084Hqn6fQbg0CYxBLZOBGmu8s6P7 + jeCnD9K1XqjlRATdUkB/atB8yE/h7N1rF24P4LXGd6ZP990yQTvs5z98ainFfQ2zh1FgtZB7ttCy + vQHwiW9UXfezV85mBpfsqlJLUNRSnA9Yg+nn5GH1GQfptNX4BV7auqLe7tky9nk9W6i6oU2PN7FI + Z+g8ArDGGz1xb5zOyksS4XX7MJC86rdZPtQJHC1FpwduH6WjcAgqyB1cmar+gWfLdvREefXPqOGI + xpr/WJW9T6hj+wX3YK7uNoFvJAzreQTh6l8uAJEnIcuyO+tbi8wBnF2ZInHFsyUEnirbD/FFj6yq + e7KuHyR2wBCpEFdOZXnMoWMkLyKuftGsL6IPf363frc/4Bs3z0nqzMmkpngm6cSEmAPfrvPJm390 + /aJQR4V2dTCxv/p9Kx5yYP0exque47tmvMlXBAyK9KukD+iTRPLaf8EGUYX+e65ervIwQpdABhqH + ffWmg8eLPK7++wssC80WeJGDgCKzbcsFyTsEn7fco6erUvXCz5+0K8KwCQSz5H09d3eVc+exIQ+v + fq3PA7Tsm4XuqqGu9eqkQntTYHoUKQeG7qO1culpH+p5BWbzGGgHYDx4lSbT8cXa5a5IcOWb1HZ7 + ms7n4D7B2Ja51c/SnOlFiCqrY+RSP557xtr9Qfrp0zUe1JAFly+Cs/wNsHWpQNpNtW7DwzDscSou + L2e+36McFgaasF37W/Dzw4BpJCp1tfeTzW0/QllODx3W3rrsDEI1yco5vwfkZR4SRqG75xTBNPZ4 + 7zhFyIYyE2HnfWKSqeClT5r7meDLFipsqFXLBiI49m+cqt/vI5xe17MBjOfxRtVLXuqzp919OOnD + AWvaNDpjb0k5bIz5RVHuHlb8DHz4PqmP1f/fgLVf4EL/dc4ITNmSLv5Igz/vu5/FDGdfnDq4+lcU + N/LUzyoXi2DVd6S61nxJe2EIgKgkJXlpMgW92jYQQrmRqFluH4Cez5eDkhwNH3s1GFN2sDccDC/L + FlutFTP680NXP4Z6M7qlg3itud9/lLnXL+CvpmhB7hXPKHhqLphU8pLgXN1HrN6efji/GkOFnrcc + CUg6N+3L0HrDZlec8e1Spenk6FoCuVc0UwfyoK/j8TRAMzy+EP/cfvQ59goDHpV7h1rbCnomZJGr + YDv28HHcJ+XcP7oWGl9ZIPDhqYwnxY2Dw4EDqOexoJMQbwNof/3u1y8p5x8f8e9Tjj6fzwy+t3g3 + wDV/Vnz3y7V/ZMNfPuaffA9YLKcyvIpiQgpUgJB8XyWCmflo0GadPzdB04LQjglWv18lJHEiBzA+ + Dik11cTQF9Xoa+girkDDBy7p5J1mHy6ZMvz8FF1caLTAnx5L95iwtV8kgpKQEElrfZx3UWiAZUw0 + xBkoScVTXdXQsARGUR5TZ4LfmwhGIUfYwUgvya+e/vzCQ06adEp1N4Mr38QHnwb9H3/5ddQu1MKi + yCahSVvIMMnoyVC0kmyEWfqD3z/9xUp1PsCGG3i8ryMt/OMPf+f3G1sbAYOxHHkZ0Hev4aMqxSuf + uUg//EDbc8WcGaR7Tln7Y0i8BtT5+c/wmmwAte69yibPzVQYx1cTbdCT6cPaL4GCG1Rr/Ezp2v9Q + AX4JDl7rN2D+a78o0uSecYBcAdBzuOXh5cFM7H0WWpKTolpK3z4muvqv4cyUsgPbYlLxxf9g0H3d + nQar95Dhy5JaK38QRWBJdk/CzjbBfFVjGw5zZmD1ezHT2a0lS3EvroG9PK7BHKhaBDeC8iHT6neu + /SteWf1xbJ/40fn1e6FJXl981FOtJL/+3crX6NHIKJguezOAZRg4ZOhjtRRLG4hAD7KInoJ7H07L + dM6g1H2uWBsvWzbLmt7BjrS3lR+OJdO6ewRKmVlIHMo92Fp6+ZZ5se8RLXZ6v/pxBDIohmTa7LR+ + keWyAkFWOfTxnt/hvNYbMNVaTuJ38mIr/mlw7Q+g3/ytfpQzuG9OFeEm2S5ZLfi58vfvVsB//uuv + v/7X74ZB3T7yz3oxYMzn8d//fVXg38kj+TfPi/+m4p+bCGRIivzvf/7PJYS/v31bf8f/PbZV3gx/ + //OXIP25bvD32I7J5/99/q/1a//5r/8CAAD//wMAMvLWqOMgAAA= + headers: + Access-Control-Allow-Origin: + - '*' + Alt-Svc: + - h3=":443"; ma=86400, h3-29=":443"; ma=86400 + - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 + Cache-Control: + - private + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 7ccfc91e49f486d5-ORD + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Thu, 25 May 2023 18:19:15 GMT + Function-Execution-Id: + - pbme4ambn2d3 + Openai-Organization: + - significant-gravitas + Openai-Processing-Ms: + - '156' + Openai-Version: + - '2020-10-01' + Server: + - Google Frontend + Strict-Transport-Security: + - max-age=15724800; includeSubDomains + Vary: + - Accept-Encoding + X-Cloud-Trace-Context: + - c0d4388eaa5635db1c89b13a41bd4308 + X-Powered-By: + - Express + X-Ratelimit-Limit-Requests: + - '3000' + X-Ratelimit-Remaining-Requests: + - '2999' + X-Ratelimit-Reset-Requests: + - 20ms + X-Request-Id: + - f01268b23b94415c41df7d0463c49b41 + status: + code: 200 + message: OK +- request: + body: '{"input": "a task with ID 2314 to read the file instructions_2.txt.", "model": + "text-embedding-ada-002", "encoding_format": "base64"}' + headers: + AGENT-MODE: + - AAAAAAAAAAAAAAAAAAAAAMLheAAaAAAA0%2BuSeid%2BULvsea4JtiGRiSDSJSI%3DEUifiRBkKG5E2XzMDjRfl76ZC9Ub0wnz4XsNiRVBChTYbJcE3F + AGENT-TYPE: + - Auto-GPT-2023-X-TYPE + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '133' + Content-Type: + - application/json + method: POST + uri: https://api.openai.com/v1/embeddings + response: + body: + string: !!binary | + H4sIAAAAAAAAA1R6WQ+zurbk+/kVW/uVPgpTsNlvEAgzNkMG0mq1IAMJCSEMtsFX97+3ku/qdvdL + pIBD8LJXVa1a/o9//fXX313VXM/T3//89ffrMU5//4/vtUs5lX//89f//Ndff/3113/8Pv+/kde2 + ul4uj3f9G/67+XhfrvPf//wl/veV/zvon7/+br2rT1bEM3innc0WXkRwIO82FEJy240JTK12oc7e + XIdTdXKv0FZ2NsU08Sx201gGyeYzUy/uTlWva0kP8T1wMErFQ8MC1UrAZ11esFUcloGV60iGN1ip + ONrIJ77E6SUAyqR0pMu2z4EdrHsNryITKXb5CKgpzhksBvbE/tM6h9wMWQk/xzejDpfqgfWHYIGh + v0fYDGo9ZMKqsWGxMidqBGzgDPnDBoLtvMFRm0vhBN9Zr7/gKUaqPSScrXfyAnX3zolyP4Z82SWF + pgkD5thZbY5gmS7lAoNgsvHWzJOcsRiMoM4mC2PbfYRT//EYaI+RhDdpeqmW8BQ4YDnhHXmdHD8k + swEP2oPPa+oLZMq5BE8OUHYyIeQF7Fw8Z2kNLoNWoYVEl3ypbWboU/XcUvcgsYGLjX2GdXqhON6/ + 5maq5zXUFDvd05CKZiWGzYnBoxzusfuyDpyuzp0IGci3OHjnFCypKPfrkz4o2MrSTSN2vZgBxeML + uTG7bJaToRjQi+IzDbFtWGJhrMX1lNgN9aZnV81FLRba1lbO1PbtMCefxxLotXx/YeuVV9Yy1Jsr + eK+qDK2+8WD0QgVIhM2MN5Gx4j23TE3vZZ4jiRplOIlLWsLEFjoauKse/Fmf/nhBGB1PviXp690Z + 9pkwU7STtg2zjKwALvEG6mptDWZ6fXoAU22mKBpxrkwsYbr1KBmS1cWvaDveFlh12gFHa10ZuDG1 + Zwg1o6JnwozqieqHDSJ7xPhmuw+LQifUoL6TLVTFXcPZ42wnMGahjMCTZwNh73cPVdFIv/tBAXNk + 7cr1DbcVNff9Jief9TqC68vziX2bHq15ibwIpA9Voag+zJwm/JXA+LV+Ix1H48AOSWvrazcv8WaA + qFmMVn3A8FUG5B6dnmD0GqCC3JVDbJ2uJ8D6VdgBoV698FZMj5xpZ/MJD2faIM78IZ8MCErgWehB + w7qHOXv59U5XvU+CYyK2w6yO2gbY5ijRKEpiwANZceDM1D2aZlvKmactEOrj+4OmV/MJOVYfj9XV + GS1qgkfA59qbHV0YYo69Zx5wSfh0CBT7S4mRPCFr6QulgyB59qhdTna1ADW0oZ1liNrZ3gNMbw9X + 2GZjQYDz3oGFCoSAsrjdMFoJw7B8HF2EuQBkxA7EypdmMCOoT6KO1C+eEPamPVTXbEMDXCthf3h2 + GWQy62i8RLAhQA0dcN5ce+wkcQvGzbgR9GzaNISp2dEa5WN71qi8DtBTPbhArJt1BM1Pt6YhVpf8 + cJyfIgxtccYmn/yGtdXDg/AWcbrhF41P0uHigFVmH7HD7+HA+xOX4W6LTVJdyaaR150KQdQ9ZrwB + bRouue844KEtAkZ5svAP4p0D9ce+ohuVFM3cDeoV/uJnbLvnsOSKjaCYoxua/THg7Pv+UAM1x440 + l9WIxfkM4eOmI81RrGF+0E0GtTMi2PHFO5/KaKMCRSpdagFhzCcvno0/+R9fZRwuavB2oFNtbGrW + 1rsin5DbsJJPhCIamZzVkRZALngdtsJn17Ai4z3MnVahpj/2nAnTJ4OFK2zQoss3zpu1L8Ci3XCK + m4w1DLJCUNft+0aYGZ5yftZQBE8vNaU+2vnD7N0WEeI+NKgbeQcwPxy/hKqQ3Gg2x1Hef9cXbG6Z + h8Q0vg9U1MIavvT+TuDlMDZz//EWKKrJHke709Pi1iFOoFI+b9To7QTMWuE9YEIXjGQu1c0rsnYF + LAPo0svRnPhwVQ4P6CK5xZEosXx8BJszrJrgRcMocgfZKp0dOChNSGbNIcOkwDbTut3Fp4V9BJxK + 8GTDRlQHauTTrvoUtVjCiYMtjXZbGn6U/FUCfXx9sD/W72o+XomoxW8P0MBRmoaxm/6A6bvaEeGQ + mpwtjrPAMGgfeDt0tsW0DwqgNzKJuitHsJhg16V+jMU9TdP7aPF7pjJtjw8TjQQn5sy0Ageshy3E + VnJPq6lrQQQiH42o25lCNdXe2oGr8+6Fj9bNsqS1wGUonW2FjLuTbdVuqtlwSVCJxI84NsQ/wBrS + 3O6QvrsE+bLyHyLklxfG8bMyOTu8X57+nT+2Su1g8e5IMu1xZzY2zBvK5ffrLv7ijT2oSaCnt+4A + 87eX0ItxODccPAwE5QN6YjPTvWqx5rjQbsX7QE2+vQ389u4T7do/XjTK6nPOTqOXgR+/hCE9h1NA + bQgxVWdscbPhzJyNSI/j9o1kWuwrHibWAlk6Lvj6As+cacdoBx9pNFCk6mf+fX4GDOa02E9Na/jM + tl5A48Mripw8yGdPnjUYmGOAdwDV1UzZC2rm3jsTi0uyxbeXxNHfkm8jGBdS2B7Lif3JR3ZtngPL + qmCE2WQ0NIx3iJOfXlCEk0kju4qaEQBXgC3jPhrjsbfmTyCov/H4Ft4UfruYqa3rnRthc0mnnK+2 + dwbJTTBoMCbFwEKtkOEXfwmU/G3Fwua0gK5EGvXN492a+cleoB85GjaC84ETo1Vr+NsPL3tV5fNh + 1RXwEZGYWr06DfMPH+TkzJGmpG7OhsbdwVHKAI1xNDa8vrwXCMWkJ5ImpM2cfcIraAqFkv5wby1y + nJ8yhMnZw+4unazldAYH+Lsv5vPLWvJXUIL6rKfUuK1qwDqH9fAbT5zu+0f+mz84N4ZFsbL9VFP7 + eWhASuUPuX/nv/TFqgcQDSX15LgC/Lavr0Bc6ic9HC5S9flg6P3whHze2G94Gs2CLl7OE3mkqZ6z + Bw93EAaejvfT88LHAsAHWF/aJ0X+al9NpRQu8JW+W2qImmkRNXjbcCO0MWLK1R6kFMsinKViwQ5m + Vc6u57aAfbUakFBvIO9IMhQqaBROA3fYA15CXQVFFYfYiyHK+f6adtC9nyTqLdKWL7fTvYapv2uQ + dluagUITddrWls7Yvy7Cf+mHSdMCsvajRz5zFsrAbpeBLGE0VWN+KbI/8cMYWtasmEMLPscXI6rj + xtZyHN8tlGl8pVuVWJb0cMwSBMZV/q533EjgFTtasz551KCfJqd0+thaBcMbYl/+ma+XvlUc7eBT + w7CykB2Pqgbp8b2l0eLJOV1tPwzs9dOVzPv+Uc32K0P6CBODRj4WwPf3T/hOXERet8Nx4LKSy3qt + LGu0sO3W4v5TSqB02YXYXhG9Gct3uAE9yBkS14sd8r2UEN041Bm+bR606iVS7ODr49nY0bWkYfTy + FuAINiOZZ98JuVBWCaS8M/EffKSrXoO6UBzwoVF8i8nbZweKk66gZdea+WS/SgQKrUnIfi33FR+4 + d4bbp3//8gfjXHXIZl1NJccodGEzgzzMoDX2At2k63dFx88Wgm5PLkhIxcPwzWcIPZescKScOKfW + ZO9gfi9uGBV3Nszb5cDA1SEWjR7Blc/+uCoBem9ORKWizGfnMmUgeNUjrarTtVnUgNpgKwmIejEk + 1fQJZA1YFy37kx/05KhXOK/XmHyk+cmZu2IILnsY0nMOi2a2URXAn/4Lok4YyE9PvMv9mWJJf4Ts + 6ZbLH7wJtm6Wy/NHRwCbsklIOa/C0QloBu/eIaPWeaMPi31PA11Z9XvsU7vnPBwNW7+fRYrLJx2r + hVu+BnS34UTZWC2YCqiOwH18fLT6vi+7tLWmyyAr0fsgJY3SjkcGagsecc4uRiWZihzAh5zcMZJe + Dp83K0OAcCe80FpvnVA6K6cDvNz7GL191Qt5LO8EsDJrgcZj0oSfK30WOr/KDVkaXuTzGzwJ9OIr + w47RPCvOVwYDjb5+0fCStTmPQYagSYsesfx0q/ib90/oDfc7Yv6ONjPitQOhXUVo3DacdxYpHvA4 + AAeHL/c9EFoONngeL2tqzK9LSG/r0YGfy/FOMfFqPkfjPvkz3rgDrSGvtDwDrV+ViPfPYmBLYmZA + WJaMRkL5ALyazE6XV4vy5cs9F5072UEjuBzw5lMM4fyW6RlIoIDU0uSttRziTwQP3XL74VH1Rw9R + 3pvYTuN7w0jYBqBKjyo1+3MUjgEOCvXHn/4YLdUs0/sIhdjf0JIYPv/WQ1dgIimlzgRP1qI36RO4 + 8SujOO+QxXcGXn56HSODHPkcFrEGjun2hkNx7sMRLY0Gv3z41bcZbyM7aeEuX03oHsJTzsRbVUKx + gAk2sw+v+OIrouaO6yOS9RY0LCn0ANaL5dFN4stWl/qGCue0TeKVdll98XBGGnuuAFoN7gwmziwZ + 9m2sYbezt5ynGnPgpsstIgrtofqz/9exFVK7VuyQnkwQAVOLMrp9HJ1BUsdlA8OP+sbezrxWX//i + DP2tK2HLUAifW1J7+lcfUvs+yPm026WCfrSeLk4i0x/k9KI8oaDWFoJPy2m4GjYJtNy3Rz1s7wdW + fZJWZ08d4Pjc1s1y018euG0PMWLzS7cWS970sDitFMSytAhHt5sD+L4qayTHkQLG5+ncagBxiO2m + eg+snQZBi1y9wJvHqrXmHMQJsLrgTmPjcB7m47WVwVvTJPz7zmqJb/48b7oFz3D+zg+AsxMhXf64 + YLiDudegBz5kWW+mZrgq1wcAZzvCxip/Adbagw087nz1hroZvhXzVXd8fCRKUTbVkuxZop+cNqKB + kr4rsj1XGohdWabR/h5W8sU4a8CKd2vqbunMqdhEZ/jEY0Y3unwDS4jdJ/TueU6d51AN0xuMBH79 + ICLMZzawVdMyaDl7kUbkHFbziLON/tWv6Bju7IoJ0z351X9Ek+6yNVP35cDyGidEkqUqJ4PiP4Db + lRo2TcUfvvlsQzyUO7rdfXQwaf1mB9r96o68EV4rFmqJrJcfQ8XRtsnB+Iuf/jhWNCaiM0w2PGVg + M/KaItEPGlZHSwC/eINWdalavPXOHjhAq0PwoL8bMn+kCH73K41PKRzYYd/s4OooemQ2qjgnV9Y9 + gRNcBwIO63s1BY5twwofNoiqcQL6FaodXauniOLD3rOW8LRx9FG03tTM53IYoGOpYGeQJw7Xr3O1 + DMjroR2uLBrrhzufexA84ZePCXfdSz5/5wvDj/bGLj1XQ1dfOdS/fgQeBWcC9OMuhrYdvBYfQG+C + L3+qWmCvcuzK/n5gOFTbn/7Em/S1VMNxuRswvlcBkj9ey+mPP3/5gYgaASk7+j3si/FB99TQwhGL + 6zP44hl1H9wL5yUy0B89lQZiFs4PMwrgV++jJX0t+RJ3OwQDjO8UO8NrGK/4LsIghAV5rxwhnOG7 + 7H7rS/hhvoFnfRJG+POv9DGxrMXA6ysQxptL//h190Yu/9RXxsX1qrmMAg1aYyfQtKHZz69b4Gfl + IWo21cXicXy2oWOP2h98mNVSRSCNFYMouzS2uDaL5z/4GUfdx+J5EGRwh28Yycf1PHS//UkL+YSk + YUfDOaARBIs3ALIO2ZbL9j31YPl8VIitSf7zDwms5eZF1tJsc6UzKNJoe5yRhsXIYu1h1KB4uU6E + +8YrnN/PqYf74tihS8ySSvnWU8CxiYYG++hUdFEkCD1P+GbCZzOIo3IgsNvdfGrvXkrDQWh6sF8y + mXABAWvabYoA/vR0LMXDMPdPq9bfcllgL6qXcKyZvsCV4sXUea6ShofrqAOCvqkx3sc1/8b7DFNb + dOjRePcDMc/yBkr1a4+jb32zXFnXQoekMd30opsviv44wzkwZ7xVSxQydSMK8ItHqMvvNWBjnwV6 + xvKJurogA+7uGw+qbgWpdX+zsA9cL9PoUk443PLHQPwJtD++wlH0fHBOdrszNB8fkZofg4KxM3kB + pcPRoXgcnwMP3xtRJ8fp/ccPY0toG9Cxbi1ZdHkF6HH5bOBbU6WfHq8Wv5N3sLvSI46lu9GI13vT + QT8rt9RwbQMo0LE0SGEHqFkgJ5RzHznrLLoE2BdIXI39Kuwhci4GtcMOAQ2olg2r7BUhpRRasMTN + FcLiXh8wEuRnvjQ4UiEeit1PPwzUB3AH+boP0etxbAdmzl4EjtHbJFoEecPdsUfgIVkxkYtNZ82F + As4/Pxd7+4s2DF++BV8/h25B5jTzl8801zraX3xO+fJqdgm4Fa/D10+YOL+DudP3HTCRsBLCZnLC + 4Qy1OXmhlb5Y+ahK6g7GG3f70z/WkkH/Cg/PZIMv0r1u2I5mtpbF5EqD3SxyytKRgZeiMRohcc6Z + KrGD7r79Fgdt2A0ss8NeQeMS0PjrPy6/evqHF2ZT6eGnLhcNljx16U/ffC7TVYakEABS1oE9MAMo + CB4950m//kM1G6Y6wlA5reiW+yxfdHYNwLe/QLeWdR9Ykivqz5/G293nAqYUyzI8WOSDkXY6WWxn + 5gJk8HEiS7p2c/k+qZp2idoDUlcLyemJzgeweh1ran/9kolo9Rl6142PNM8zc9E6nq4QgCJFoscO + 1bJ/lCM83jsVu7n/al4/f2gVzhFF76MxSEL28cAs9w4Re+8+MOMjFfAcwwj//LAlXlUZ6GFjYzu/ + JJZytB8aqCy5pli73PLpJewKuC/2HfYvwhhOyWZJ9NofPeq0rgKW227M4E6YdxjDgxuyjWAv8P7u + jtQJ4Mea12u5hdY9PJOW34dBkuwqA3ZvidixXJfLQPd22ucm+9jbz3hY1lf5DAXdqImSvDYWK/d+ + 9NvPRFaXTzWeRiOD6J51ZGHr4x8/QYs2+P3tF53C4csvUDSrhawlvwTL1jg+4aV0CTVNfBvm1ugN + LXlmEw2wOFpMx7cCfEq7oFE8BtY8qqUG5/IVYHuZ9yH5hMABIlwW7KR7Eo6t3TjwMM8lRlfHCjma + byMYd9ODbr/+B5fyBEEtFWpUbzt74JtcIdD1HYkolLFm9DRNgMdY3mPffdOGh0m4wAVsTCLcrydr + mQ24A8/t1caOEstNB/XLA35U3//pbYuRd38FTbm1KL7Lu3xGx4H98ceNUM1DfhM+BygeR0gt+bTm + XdfDBKgL03H+rbfmj6obgG/m7ptvTcXOz+oAfeNdIEhYndPtpXCgE1ORuvChDex8u9o/fiVvIQv5 + vHo+IIhHbUSfrXKtiHQ8JX/qQYtd6mqI5R3Uv348RswQqyWa0qv+WoIV9q6ksPg05PUv3+lu9zo2 + ZPCtP/4mAtXpOoi/eveXj9aXj/7osdh3AhzR59KQ8uapsPLblNqb45yPzp0c4OaeC/ir78P5q6e0 + w8xLpG6qTfibL9SF8kDtFbk0DLqFA+Xkykljhg3gbbmHOnxcdOxqrcH5QKwI/vpRhpopFnO7dQBM + +20RtgHBz68P4BxYM1qH7MWXoQ6u8P7uj0iWtzhf1pJUQ4z3BQ7to5NLwcMoIc4VmQbTuAPffCWQ + nt0HqjW5bBZ/JZVQNE8Lmiic+NcvvoIt3vZffC4bJj6LHrJMcCjWlyanRUce8J1s0dfPeIY856EH + v3qexuLWzuX9Ol2gHudP7H/5ns+m+lh/+2OE0Y+VT2Ob2nBQtC3eVsJ9UOZcO/zxp3/6jcWoy6Bm + Vi729SwN+2HCG2iuBAvdTIeH0/29OsPX9XDG2xoE1aKzQwC23SAQnn7U5tsfZXBvSSG1CzIBGsgr + G/z420zvY8jFKC5A9Fm22Pqc54p++42gcAjBm9qdBrotShEc5FT5g0d8Lc8jTPzbFt2eSlPxUDA1 + PT2/RqKDSxnyuZ9LPX16Ib3eUJ8vU1hAKFN8RcO330IfnpxA723ZOHqFE+C47UbYCeYZu+K0rr56 + JdC/+gs7fYKsdvCtAmrhsMf2rD/4KHrnHfz2/4h49IHF02b+9nf5Bgdjojb0F7/+glY41jcRoMmZ + INAekUQg7NOBwd2uh3F0zNF26J4hU4x3oYuu32AXynSYRT+pwWvxVjRcv2DOfnqTtvsZG80wgPmV + HDKYBOSEsaRvrHFf1k/ovsOWuod7NfC1vCbw5xfY7O40km8JDzBkMqYh1Ix8OfGlgF//8Ot3tNYc + OLYD17EZ/vR9zuZZ2Wlp13U0ip4bII1qpq6l2SMYb/GnmrOPdYWFON+IqnyKkOVOisDXj6cZifR8 + +VxEDazHffLH7/vpb/1c3Z7kgmtzkAInsuEdORZaC2WRz5GX9vrfv1MB//mvv/76X78TBm13ub6+ + BwOm6zz9+7+PCvy7vJT/FkX531T+cxKBjGV9/fuf/zqE8Pdn6NrP9L+n7nl9j3//85ek/Tlu8PfU + TeXr/73+r++//ee//g8AAAD//wMANss50OMgAAA= + headers: + Access-Control-Allow-Origin: + - '*' + Alt-Svc: + - h3=":443"; ma=86400, h3-29=":443"; ma=86400 + - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 + Cache-Control: + - private + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 7ccfc9233c9622cd-ORD + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Thu, 25 May 2023 18:19:16 GMT + Function-Execution-Id: + - yvvlnv17ahsb + Openai-Organization: + - significant-gravitas + Openai-Processing-Ms: + - '18' + Openai-Version: + - '2020-10-01' + Server: + - Google Frontend + Strict-Transport-Security: + - max-age=15724800; includeSubDomains + Vary: + - Accept-Encoding + X-Cloud-Trace-Context: + - 23e5ab6873bf9bde0d2d32702adcf101 + X-Powered-By: + - Express + X-Ratelimit-Limit-Requests: + - '3000' + X-Ratelimit-Remaining-Requests: + - '2999' + X-Ratelimit-Reset-Requests: + - 20ms + X-Request-Id: + - 4f7afd2da27d1e26072a09f6de91c9d4 status: code: 200 message: OK @@ -1405,8 +725,591 @@ interactions: {\n \"arg name\": \"value\"\n }\n }\n} \nEnsure the response can be parsed by Python json.loads"}, {"role": "system", "content": "The current time and date is Tue Jan 1 00:00:00 2000"}, {"role": "system", "content": "This - reminds you of these events from your past: \nAs a system, I recall that I was - created and there have been no new developments to report."}, {"role": "user", + reminds you of these events from your past: \nI was created"}, {"role": "user", + "content": "Determine which next command to use, and respond using the format + specified above:"}, {"role": "assistant", "content": "{\n \"thoughts\": {\n \"text\": + \"I should start by reading the instructions_1.txt file to determine what actions + to take next.\",\n \"reasoning\": \"The instructions_1.txt file contains + the necessary information to determine the next steps.\",\n \"plan\": + \"- Use the read_file command to read the instructions_1.txt file\\n- Analyze + the instructions to determine the next steps\",\n \"criticism\": \"I + need to ensure that I am following the instructions accurately and efficiently.\",\n \"speak\": + \"I will start by reading the instructions_1.txt file to determine the next + steps.\"\n },\n \"command\": {\n \"name\": \"read_file\",\n \"args\": + {\n \"filename\": \"instructions_1.txt\"\n }\n }\n}"}, + {"role": "system", "content": "Command read_file returned: This task_id is 2314\nRead + the file instructions_2.txt"}, {"role": "user", "content": "Determine which + next command to use, and respond using the format specified above:"}], "temperature": + 0, "max_tokens": 0}' + headers: + AGENT-MODE: + - AAAAAAAAAAAAAAAAAAAAAMLheAAaAAAA0%2BuSeid%2BULvsea4JtiGRiSDSJSI%3DEUifiRBkKG5E2XzMDjRfl76ZC9Ub0wnz4XsNiRVBChTYbJcE3F + AGENT-TYPE: + - Auto-GPT-2023-X-TYPE + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '4429' + Content-Type: + - application/json + method: POST + uri: https://api.openai.com/v1/chat/completions + response: + body: + string: !!binary | + H4sIAAAAAAAAA4SSW2vbQBCF3/srhnmWjB3XN72ltKWmFAJtIFAVs16NpU1Ws8ruiNg1/u9lLTsO + NnVe53a+s3u2aArMUFdKdN3YdPJ99nx3+202XhYfW3n4+en5/uvDuv/45cfn0R0m6JaPpOWw0dOu + biyJcYwJak9KqMBsMJ6O+sPpZDRJsHYFWcywbCQd9kaptH7p0v6wP8AE26BKwmyLjXd1IwtxT8QB + s9l4kODp9mt9MJ4kKE6UPZUGN9NdgrpyRlPA7PcWawrHu95ZwgxVCCaIYomUjoU4OtjmDACQo1Su + LSsJOWZwKB4atJZYzPFXRcC0FjAcxLc6YoEJIA48qQKkoretsLjpyVpgZSz1YA6hcq0toA20n4wr + i9gD7epacRHvFA6kMqGXY/KWwZMKjg2XJ5D/CEG0pgwHUEVhYldZMLxyvlZ7XqmURGgmHZ/Ib/ay + JORrwx3Z3mMQai44Gqu4Q0jh/pqP994jzzmFW1Z285cu5q4RnQFpb8RoE+qOag5MtAcgDq2nzu0c + VA0rZ617MVxeyimtW6+E7AYiPq1WRhtisZtz/6Eh9XSUejHWXv/Nd1ORY3d9lxxjeNi+SCGrmjrh + V6UzNuXL8/B2jTh7Wr9EOULsQQ48Oe9wl+DKsAnVoosfZhjENZig4YLWmPV3f3Yf/gEAAP//AwDn + zGd0PAQAAA== + headers: + Access-Control-Allow-Origin: + - '*' + Alt-Svc: + - h3=":443"; ma=86400, h3-29=":443"; ma=86400 + - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 + Cache-Control: + - no-cache, must-revalidate + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 7ccfc928d9db1254-ORD + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Thu, 25 May 2023 18:19:36 GMT + Function-Execution-Id: + - pbmenv06a5yg + Openai-Model: + - gpt-3.5-turbo-0301 + Openai-Organization: + - significant-gravitas + Openai-Processing-Ms: + - '19200' + Openai-Version: + - '2020-10-01' + Server: + - Google Frontend + Strict-Transport-Security: + - max-age=15724800; includeSubDomains + Vary: + - Accept-Encoding + X-Cloud-Trace-Context: + - bfb649068e555d640f6ad40b261616c9 + X-Powered-By: + - Express + X-Ratelimit-Limit-Requests: + - '3500' + X-Ratelimit-Limit-Tokens: + - '90000' + X-Ratelimit-Remaining-Requests: + - '3499' + X-Ratelimit-Remaining-Tokens: + - '85989' + X-Ratelimit-Reset-Requests: + - 17ms + X-Ratelimit-Reset-Tokens: + - 2.674s + X-Request-Id: + - 0f8563fa8b875d2635d106aebaf6c721 + status: + code: 200 + message: OK +- request: + body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "user", "content": "Write + a concise summary of the following text:\n\n\nLITERAL TEXT: \"\"\"Read the file + instructions_3.txt\"\"\"\n\n\nCONCISE SUMMARY: The text is best summarized as"}], + "temperature": 0, "max_tokens": 0}' + headers: + AGENT-MODE: + - AAAAAAAAAAAAAAAAAAAAAMLheAAaAAAA0%2BuSeid%2BULvsea4JtiGRiSDSJSI%3DEUifiRBkKG5E2XzMDjRfl76ZC9Ub0wnz4XsNiRVBChTYbJcE3F + AGENT-TYPE: + - Auto-GPT-2023-X-TYPE + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '273' + Content-Type: + - application/json + method: POST + uri: https://api.openai.com/v1/chat/completions + response: + body: + string: !!binary | + H4sIAAAAAAAAA0yOQWsCMRSE7/0VYc7ZZZddq+ZqoUKhl57aWiRmnxrN5m2TJxVk/3sRWtvrMPPN + d4HvYOD2Vlw/hGL6NP8M9qVvl4vDcnd8Wz6/so9t8/hA/gsavDmQk59F6bgfAonnCA2XyAp1MPX9 + bFI1s+l0rtFzRwEGu0GKppwUckobLqqmqqFxynZHMBcMiftB1sJHihmmrTX+0Le4bjSExYZbMmlH + Dbdn7yjDvF/QU/5lJg4EA5uzz2KjXA05CsWrvY3Kxyzp5K4PSlglsp2yausDqWh76tQK/yp53ZRy + lhVKjBpbH33erxPZzBEGWXiAho8dnWGq8WO8+wYAAP//AwC0qgQXWQEAAA== + headers: + Access-Control-Allow-Origin: + - '*' + Alt-Svc: + - h3=":443"; ma=86400, h3-29=":443"; ma=86400 + - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 + Cache-Control: + - no-cache, must-revalidate + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 7ccfc9b398c12c5c-ORD + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Thu, 25 May 2023 18:19:41 GMT + Function-Execution-Id: + - yvvlzurg1bsa + Openai-Model: + - gpt-3.5-turbo-0301 + Openai-Organization: + - significant-gravitas + Openai-Processing-Ms: + - '1797' + Openai-Version: + - '2020-10-01' + Server: + - Google Frontend + Strict-Transport-Security: + - max-age=15724800; includeSubDomains + Vary: + - Accept-Encoding + X-Cloud-Trace-Context: + - 87a5b0baa518b220c789956026d896ea;o=1 + X-Powered-By: + - Express + X-Ratelimit-Limit-Requests: + - '3500' + X-Ratelimit-Limit-Tokens: + - '90000' + X-Ratelimit-Remaining-Requests: + - '3499' + X-Ratelimit-Remaining-Tokens: + - '89461' + X-Ratelimit-Reset-Requests: + - 17ms + X-Ratelimit-Reset-Tokens: + - 359ms + X-Request-Id: + - 6ef56affce707bafde194ae7d76813dc + status: + code: 200 + message: OK +- request: + body: '{"input": ["Read the file instructions_3.txt"], "model": "text-embedding-ada-002", + "encoding_format": "base64"}' + headers: + AGENT-MODE: + - AAAAAAAAAAAAAAAAAAAAAMLheAAaAAAA0%2BuSeid%2BULvsea4JtiGRiSDSJSI%3DEUifiRBkKG5E2XzMDjRfl76ZC9Ub0wnz4XsNiRVBChTYbJcE3F + AGENT-TYPE: + - Auto-GPT-2023-X-TYPE + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '111' + Content-Type: + - application/json + method: POST + uri: https://api.openai.com/v1/embeddings + response: + body: + string: !!binary | + H4sIAAAAAAAAA1SaSw+zurKm5/tXLK0pvRUggM2acQvhahPItdVqASEESEK42ICPzn9vke/odPck + A2KFqFyuet63/B//+uuvv9usLvLx73/++vtVDePf/2N9dk/H9O9//vqf//rrr7/++o/f5/+3snhn + xf1efcrf8t+X1edezH//8xf/30/+76J//vp7jhadBheQmvNGyEW4TY8vusvLpScB4yaY3u5vuvNq + 6E/+436Gt3ra0d3uZrF5Ixx5qL/GkFrlaQHjcdwZMLl5DdY7RzDnLcAtvKqGj7V+n/as4/o3kHTr + gQ/ibePT0rpxsJjAA/UqN4MxFegCbjF1qenCrB816dlBVxVavI/ExV+ubijKp+F0pLvZOPYMyTsO + LHbOIYEjMZhfz28Lr+dYpI55gslydt0zNBa3xNpRG81xIJGnUhYhqm2/z3oJJWuBF1esqR75AiPP + 4SVB5dGecLHVn/6YR6MH78ltwK56mZMZzN4RBMwR8D4Qvz6zDhdFGZvhglS77/rZSvpSYe2rw4F5 + u/qvnHNL5dkJDTXHl5wMya2MoKTbD7Qpug+bjreuBOFN8ugaL5/2pxTCz2Pao+rrSj07vIkEwO6S + YNe9u3VnRMcCPJjxoO41eWQs6qQrhLFY4VCPrIzuTM2Cr6yuMeoeB39qmGXAzjIg1oZKMtmX+x4B + HN0ITfGL9iPJZAvIYRlSI/hYNZ9jEgCNQEp46j+yOYp3OThtyYYaReKDcU9qW2WxLBDlILNkqt3Z + g96Ga/H+tO981g+aBvP74OEHATP4em5FVC0EGIFGj/v25p8gTN2ioO5o7zOx3tcx/OWDr12PTHDJ + p4FpV1V0Lz0k1p7uUwnAYLVkWlCTsa9yeoP94xBSv9TdWnDdMle/MZSQDMP6935DPfaxg/e29srm + 04gjOLrHgRbqtmF09lMH/PLTKW+7nt1KaMNqH/tYv217k+mHmwPv3Kxgm5NBTxSr62DvsRtGwkkF + 8+SmBsC5zhHerGjdHS5XB1wEfB6vBdeaLEXpAuJhEKj/yiljgT9a8EYsRiTl3IBpjS9MEzZj9LVy + RtZ8BnJYhVS/bX1zTmU5BrHfn/DOqDufBVXqKH0mnlB5yJ4Jk5ONA+22PqIvgB6j+ctugNd8KqqH + b92cw6aKVLJbMrw7YqVfELxWsHtZOcXTmWZsurNC0a1+JPAYaAm/BF8LFpgXcXA6k35mYRVvrnYU + E+kXn+g9dPDG7S7YT9+mKSpaIYEPZZhsrPxWM135TrAUPJOIY2EmIwKHBhpjadCbYwTZ0JRtAXfa + eCFwIEa2/XLPM7x0kokvyWD2i2efJMiJpoi2RTVl37NjXOE5THms37e8v/CvqFJTpbaxbjxIUsla + 3sHIFCPqkF3NWKnZCOb1VcNFVK2nM7djmKI9IB9PI8ksAN+B/Eu/YX25hT5/rZwJ0gWLCNJkZKRO + Ea9IO3pF030wEtIIrgTK73IlDBxQTWSrvMLDC0LsHojeL5O9Q8qbIx8kyElQb5kkTbAoFYCNrhnA + YqrMhlxPamzIvdMvNFdi2E35nhr9RjeXi+u/4Zc7VtjrxaUehAi84SZfPoTjfVwvLpOMP/uxp5ab + zc+beYUmaTocnOcKMGfqA5DwJcZooyfmW5PbBkb19kq9KjOzaRbkAWRv7oBD1MOelCfxDVgMBGo6 + S5ONX7y3AAH3KwG5XtaLGRVEMd6zgHf0Q8yBCRyCVzuOqZu5EZvCABrgYEkSxi+c19Pue/Ygaz8d + 2YJTlywJ0QeInyNPbU7O6uUmDjkc9tghIidnPe0HxwC3UKMU49r0GaqKDqz7Tw97ha+J6iiKEtTe + AW1AEDNyzMqrKiZvB8HTmdTTPtPeoHnseawf84YxsHs2agZehMynbeVPZjy8wZl3CBnikrFx0MAE + j6cmoadsd/L7/iNWsK6pgfebU5IMoXK9wvb9dKm5v9hAeBrnI1xUtEWq4AXm4tl3aRsE2YPuvkZV + f4XDUkL5si2J6Aqv+rvTPQQfl46nmiY72bzfRQhK8HEgcNu8wGRrYQXGLg0obnYkGU9zZsDRPTtk + cc5DzdzpVcBUUEK8fwxFP6SScobxMN5QJfdtvYgwOMtzNOn0fr1k5gzMkZO33cWlxoOU5jSMtwBI + mllj3ZvsjDaPQwmjTWhgW5Hjnk13UMiGWr5xci6qemHehYAq5zQcpHPPxvyTBnCtl1ib1SJhfLWD + ED8pT4Q1XoMZDw14PUMVI39vJPOlmAd1Sg5n7Dfxs56eV3sALMQZYUm7Z6LPYAlLkh2QyonPbPo0 + C4Qbn/YUHZnGhF3yWuB5zCDidsPLfF8eUgshkwx6tNTQX6oiOkPhkDc0JMc5odMd5ABfbjtsuLML + 5jBUIAwksSOcdviwmTW35ldvkLyf7/6UVfsKtL6nYO2rmsmYy8IE3W/g03O/iGDIVeMNbwZ54eB2 + vtSDbFSlau/vPNXYNa6n+eGlMPwqBKlneOxZun214EOXHr1OHWFsqfRCreUsQ6q24bIRhWrw618I + PHMbLB5bIrh9DRHdlfOzbnFNRWh8xBCdSXvoh9/vF7tqQJNx3NXTeW/mUFOOKY7H9FOTTT0FKg7M + FFtvhUtmJY0mkD68LbWI3voTH8lnuJ2MDeo49ZLRZ+bG4NZeOeoZC6kXQ51EmAc9wqY0BP6w1NFV + PYJ4QBvpEyXz2xN5GFdpT3cxz/vz7/+zzTlDnLq12OTO8Ap/PIWA3pnsHtcGzE7yCXvJ8VMvL1+O + YXJnd8Itk2suxkevwLoeafW+rYcmP3O/9YgvlCBhVWaWkLNURAMlLftpejiefHxId3zPwrofdJQf + AdgWCVrc+cvojwcvnfL58YrJtu8C/eGx/WPg+vFhHHioNa1OM5lzGE2Oex5ybzMm0nmMzLnmNaKG + N8UjQBhvbJbNZw69LmI4R2XHOutY5Su/Gut5RNl0mo42cOWA0tNsHOtpCZ4WXPeXAL72+3lPegvK + fLzH+/vVMIkADQvE2Vem+xYRsMBFj0AhNhPNN4LNlu231yB4pwwbS1OwwTpLZ5BfjRPdXUu3XqAj + tXDJ/Qua9ZPmL0eLHeFarzBeQMoWKRYgZHL3Jkr9UurJd4gGMg1BchC7lz+2L1mBa7ywC2DCaMCr + gzJOtkH3trZLWIgnEa71khoGsE3eiEsbcmn/obZQCMkcNKUGnkApCfAfn+zbwm+gVFB10Ni0uT99 + QGuA+nq6YH/ls3laBgXaTNnQ335MYDJTyBIuJD++WjDiUtgOAYcUJ+R8NvPdBKXTCPF+a2UZc2+i + p5pNVCIZwATQaznz8OmyEs2U17P3feMUMDELH2vG8dXT72cx1NIZJHzeNef+93vKUwx6bKc2rteO + fobmVRGILNhyQl6+HIFFm0e8Z1acLOaxROruUww44NJvv4RolsA77GokKHeFUYZbD17QI0dbR9R8 + gRPSRrbv7pc6pbOpafO4VQpuCx2v/TFZ4gR1SgHCLw40fqhXvpeg5sZnJNh14tPKKnIgJ4GJj+lX + TUa16GOZ5s8cceGw9df+swD/aR6o2/clWzymRIoDp5x8t/DJpvf2GEM6NleaXZUNoK9AQnBy4Aeb + cBDMUZNqSVl5mdoytbI1fzr4FFFPzUs7sUHu1RSiU/Kkgd9F/hTW/RlG1/tItg+16adSP3lgLxo1 + deu0Zf3aDyAjyQPRbfBly0EreYi01qLmySTZzAnpG1q3zZ0GK5+Jm1CTgOigO7ak7ZGNTBQc1X8P + FY75qM/GNHh4IK9TjQhdMIE1/gH43hODHKm/ydhRJxXMYGdj5IkwmT/PtwOsXrvhfNvsAJ+9bwY8 + RFGNL56Gslb60BS8L2qMzYqavXD/5AWM1eVENp6zA9RsvQHkrSGTJU0qRl57OkFfHG2q9deyHg/n + A4KpOYfYcfUZMKsNINSf9pHizd1j88lxPBCi6oEdcSeAKXsfNLj2CwRf0qeehvGAoPU6OUS+Jo+E + Pq0cQVVpH3jvaShZ2vckgs1npxD6Ko1+uquBCG93M6HhLgqzQQfTAIIx32Djnah/+ENxsgFihPfH + bNbwwYCbz14hW9Pz+snNdpb6UE8htoNeS/jDKY5U+SKUqAGjnVFFP4lAHp8n6oS6AubvCzrKlztX + 2Hg3JhC+sC4gVd8QbaeHzcTgmSFALPqgeBu4gFXjx4LbF4mwvfa/KZVgA87325UmKOzqQfiULdwq + lxHVKx9Mb6RflffXWCh6Wh1bPO/bwUVjI5o2ilGLcHEjsL6PtDz49NTi+BjOu+sZKfolzRZZO7YA + 6rOB3S18gu/DuPGKmDQO3vncLfvpcVB47gt7TmGZQtCUhmoERUWdQ+wyJiZSCj6eFaPJpz2bxE8l + KSUtQsR6renHXXe3oeLIDdZPb2bSG4oq9VmWInXE3QmwU/58wwg5HC2OlcLGY/osYKo8bRp4pQ62 + 6XZsAT7QCBu7uWQDSRfnD29LFTebbD+VUEXv4/1XD3p6zmsRsm9r4Pz+sXsxFd8D3IzinobhE2Xz + o2+GH8/QlUfBJHzKTl39D6wThzOZdXqmAMdTQFE4XMwlkLxSSafgSlha+PXUbaZIwct8RMu7qcGg + +toCS+4m4PU8JrV+OHi//EeSvzUzVs1TA/Lgi6j/3jb+koYJD2SuOeBLb5XZ3NzvisKdNYJtzZ2y + pTgtNkwFKaTGFFUJHU+XAm4G5pNfPWTJ9XYE18/5jP3NDfuzszsOkHi3Iw3f5pQxhSxI1b0jR/1n + /gbsHo8RxJdsR+bwBHz65b5nuD12EzWHzPC3S+UW4KFzGfWoCGpSbAII1/NMrfR7X/UGtmCHLh11 + 3UDpR1h8DWjuBZl6Wjj48+qPgP4TaNTGUQVWfyRWsU4GjMikmUuijccfH6z6V0+EFx8iqEcaT90T + 6XrS7F8l7Nwh/KOXWDVLDbyqmk8Pif7JBjo+IFj7EfaEqvUX+yoVUKzfA/YhX/fLa/9ZwKuKTzSo + jQAsrdp6QKURT93wvtRTxHUKKNHVRYNXPhkLZT2HihX1ZFkajvXGIbvCU6wZNHUKy5+tpC4Vt/Ux + GRbX6afTXSrhrU25Vf/U5vBpFAjOpDgid9sDQNDri2A7SzwBe0QT+lEONmwiL1z772zSEEsi0MXJ + xbddMPeM215zwJfXkh7zU8n+6K32XL6Qn75rc75/jvkvXzF+lVU/F32Vq0frecF/+r3UMwfsG9XE + ubQJkoYPxgb4b1JhTUFiRvRKXaCy+d6xeS6M/rPwLw22eZAiYeWZHu7eFrhVdPzxZbZ19tFZvTwq + SIPy/swm2zrEcPVf1ngOYOabTIQuJ2k4P88Gm388o96abI2/WYuzlwTyyr8Uj6W6nveTBcMjkgnn + 741su7/JLYi+1RcpMtPM6f6+l9DCLaHOZVtklGMBgQknqtTU+7Sfniuj4cMYYUtTLX9SfWcBf3go + h13SPcxogZyoi4QdzFcyp0Euwdo5M6QM+yd4XQrAgWKIdDQrHTEHpd+2kPeITd3I4JIf3/z0xeq3 + NdkoHvgJZDtlxoj3cc8Y/4nB2bRtAj+GwJiIiQSm8FMSnouBOQT101Z50ZLRNj7ghAf2VMLpeI6x + teizOUetUsHjQ7nTP/ms5qcUpkfepXp4yvztygtgUYMtEudiSL5wthoowfuBrPHxt2kpnmHwYYTq + ptLXLDq6FnQ5RSOCQkpAY70zYB50HfbIEGbbaynzUFjKFoer/llaueig80wXcqbLhg3f+NaBU6W9 + sa9yKZilTXKFOsiMlU+ajNhHApWIogI7YK7MlWRzpXvZOUVUuWRUvjotsB59jNRr+1z56VzA11np + iGoKGCzKBSHIam6mtivs+mXUeQjjvZHTnXg/sG4jZgtwLraJ9/wDs0VdwgXAyz3DoZ1m9aCXGwn0 + 0R2H4O4j9vn1v+L9Dmho9M9+Ifo2gOfG87Bd2Nd+tknLw/Zdu2SxznW/fCK3Asv5UdGdQjTA07km + v/6ODWC/2FJNTALaHTCixrDvp0fCFQDsTgk5gvGdLcTWCFjPH/35Q6t+W+DBUiRqZPeDrxjRMYct + DCu86z+7lbdL7df/qb3IWs0PltPA9qlE2HtnQzZ0XzOAZbPfYzOTHTBHcZiDGewSJDYQJrPQnG0o + s9Bf92NmrM7CCa68i3W46/uvAWUJnobLkbqCfcvGp1EcQbt7fKl7uGc9Lc6zBimLEbWov0mGLi6c + n19OFE7dJjR/oQZGyONoGF20P/UE3IXSoemqB0e1qGOgFJ8b/un1RRMuFjDJuyPjGfI9MT5uCZ4i + xnTPrGX1D2QCc+NcYqv3x4zpNU9AVAtXqqtly1Y/MofSULhrvzKyPzy8bJ9vuodTBJbueFugaQY5 + NgVxziY6Xjg4eyamJp/xZjcczxGUY7+n1neSzYV7VAE8wYtEV7/fX7x7iMDdtVXs7HQHsND/nH/6 + nWyp5SaTbHSl4nsHQL3U8xPBL1MD3IXKQcxZrGSYnMQGi87v6Hkysc/A7vsGMRxVMqDSY1twxSlA + Uz/R/evKr/ORzILHPnKwFX/9bAoCeYE2F450RwIZrH4OBMMgP8n++zX9LeM/EZzxUOJkfE3JNJwb + D85inuJ7Ze4T/guaCgT3PKPZK8dsxs17gtJ7krBLIqEnZ2vDw63VBTSUPc5n9C4juNYX7DvfExhU + WzVkspsy6j8mJZne5cBB/Mm+BPLknvFTyrXKbuEv2H1tjrWwqeQINC/+jZE4AZ+AUS1h1y0H1DKc + mT/eh5dTgKi9tGnPPnWjQXb92NR7atSkr0cy/fz09TyVgFzFKoBawfk0vC04+1OPDunnQO1LdGRD + xHUS7B7pm4bHy9dcDol1hWBeCqIOMm/+mSf9/Oq5r91s+MaHVln9CxzwmzqbvYWdYVT4FZokdvOX + 7q0eQXgVFmq83Hc93CrfgPsj90D1kFX+cnsInmrK3Amvfoc/V/dbCjW+fRG57zVGTgQhWPTh7r/8 + kHoWLGio1RvbODLYMKViB3kwedRb/ZhO+o4RkIbcpXo+7pig4ZsGNb53SbwrFfCyuliExnk+IH6b + XxIy6hCCjX5V8ZXsajDtzkoDzlwu/ZnX/PwctTjVERG+hlEzx7RFEJru7TfPyparWCGw8hOSvPxt + EtdOWvAdD2+0Xf3ERa/USV7nBXTXBRGbaqZP6vV8SJG41j92UM9I/ekz/5dPzi4foI83E13nN+bw + 0FMFHidYkSb+9pmAYz8AODc5UuYH4rOEOi28h1RE4hU+s0H1nQnWxreihiKIYHxLGYL3Y27h0577 + ZEuVWm/gbPc+WaTomM2nOdNgp0wCalZ9uXDioYITf0kRiE+VP+3HxlG/Un/ApmlvAZmfTgSqHGoU + v0q/FhPiEniu9wtREaz6dZ6CoG2IALXxuDeZe+M86A/phfBc/fQpuOIr2JdHD2faVQWvm/CqQBxf + 9/TSIgRY/r3aEMB9gLbbk56sfm4Ano+HTPiE3eqFE28l9DawxenlONUDsUIbxgO9UYvlKBnvnL/A + z8QH1IWh6W/V/HSF5ZXm1PlgzRScXJ/A437eYe162JmsER0eiKESo2WtdzNM75Hc97xM8bc89tvv + IeCg0hwY/dVzNhpMAfXGHrBjvo2Myh0pIGzLC37stmcwaS/9CutgM2A7i+7szzx0rXcIeFZnTmbc + HSEjMfjNE+u2Pz8VUH/ThGxeOO9nq0t5eIInCWuuV2Zz31nXX3yRcCy1mhyJyYHr9qz8mX/+4e11 + 3kL3SNtmcw8p90cfaW2F/T/+4VofaWZbT3N5mvfox7cUS58poxuGIPzyxhXv13kPQ3IIYXp7GKiU + LcffrvpJHet+g/E8Dz2p0qCB2Xxo1v5PQduUZa4m9SMhYmFL9dSwwIDC8+GgWdoZPb16wQKb0ptX + PfD9w2fKb74ZODzXD6vTDn2sTmijGE1P3cKV1LAJ76jbtd+kHgYjBoZ/e1ONQ5tk2mdrf87ROiaK + F3/5yKkDhkC5/fwR8Mefkw57k+qOb/78wRgcJ66ie0fUTIE1t7fSpAUgFNemKaRR2UGtfn3xXqdi + 8mf+svpL2DvSlzlrUq1sV78CcRs98dl5Jy4gdASEjQfRfIZkk8CpVSkNEuCYlCufHtxUd50if98n + c7A1G+j0Y0mqs+Gx6d59Y7jqMyT0540/KcYwgcel5Ymw+tvzTjeQ+vfvVsB//uuvv/7X74bBu70X + r/ViwFjM47//+6rAv9N7+m+eF/9NxT83EciQlsXf//zXJYS/v337/o7/e2yb4jP8/c9f4M9tg7/H + dkxf/8/jf63v+s9//R8AAAD//wMAqjA5m+EgAAA= + headers: + Access-Control-Allow-Origin: + - '*' + Alt-Svc: + - h3=":443"; ma=86400, h3-29=":443"; ma=86400 + - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 + Cache-Control: + - private + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 7ccfc9c1fd9513d1-ORD + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Thu, 25 May 2023 18:19:41 GMT + Function-Execution-Id: + - pbme8b7quow8 + Openai-Organization: + - significant-gravitas + Openai-Processing-Ms: + - '22' + Openai-Version: + - '2020-10-01' + Server: + - Google Frontend + Strict-Transport-Security: + - max-age=15724800; includeSubDomains + Vary: + - Accept-Encoding + X-Cloud-Trace-Context: + - cc893c79eb41c92650396b5ee03be018 + X-Powered-By: + - Express + X-Ratelimit-Limit-Requests: + - '3000' + X-Ratelimit-Remaining-Requests: + - '2999' + X-Ratelimit-Reset-Requests: + - 20ms + X-Request-Id: + - 6b8a366fd1c0a7253e3a77e7fe91d0dd + status: + code: 200 + message: OK +- request: + body: '{"input": "an instruction to read a file named \"instructions_3.txt\".", + "model": "text-embedding-ada-002", "encoding_format": "base64"}' + headers: + AGENT-MODE: + - AAAAAAAAAAAAAAAAAAAAAMLheAAaAAAA0%2BuSeid%2BULvsea4JtiGRiSDSJSI%3DEUifiRBkKG5E2XzMDjRfl76ZC9Ub0wnz4XsNiRVBChTYbJcE3F + AGENT-TYPE: + - Auto-GPT-2023-X-TYPE + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '136' + Content-Type: + - application/json + method: POST + uri: https://api.openai.com/v1/embeddings + response: + body: + string: !!binary | + H4sIAAAAAAAAA1R6XQ+ySrPl/fsrdvYt80ZEpYt9x5fIl90CijqZTABRAREFuoE+Of/9BJ/JmZkb + L5AEqOpatdaq+o9//fXX301a5ln/9z9//f0quv7v/zFfuyV98vc/f/3Pf/31119//cfv9/+7M6/T + /HYr3o/f7b8/i/ctH//+5y/xv6/835v++evv52N1Y6aULngfEKWAS0GPVIyNoOTLIRchxdrIdH3a + tVL2qgWIjeON+dqClvydeNk6zvcF0xV6abnbigXoS1fHQFQtGrv1zgRb4G9CDK1IpxXa66gyVi3Z + XQtcjli7CNCtq4DZlvZB0zsXY6S6d4P5XuC0g2iWnZx0kBNj+N68SQtGFZ2+9ZW5r6uKpOHxkmTR + tFWyD18EDecdOiIJJy7br9+6sSw6jAE9lyXxhT02vuXxKSlVOLrMu62bcii7Xgfm6B/mv3zK6bN9 + X8B3yw3ZjR/bm0JLruGxLEs8vd4LYxBeKpa37CIQEn27lMM96FBg1gyfcHFNp+LCZfT4fm9kf8b7 + aFLoFdDjYr3plGVVyQSr09E5wHu8cepLND6kJEO4B4dK3eR5X+RdB2g1yAnePsV0CteBCdMlDYnR + 2arBTeEzIeKlW+Y+BiMVvWNjQ1xdB6Ll0QXx5jReIBf1L3Hd2zLtX/0mh5N65mTXWBWfNMrXAPz0 + xRwzL52kSB7g9/zFMXKN1Vj7R5RU+z3beuIb8dHRMlj3k8Z2U7nnzJMMWbmzuKRD8ymN8X5+5xB/ + ii/ZY2nt8V07Juj3/lnOXiVtfUNQEu52WLmCwL+/+CgBbph64ULKZQhyyINcxGG/xeXSG00fnuvH + nlmRjY1eO7w7eOkjMD0/DN60bq4PeWnRhm0Z2vNlIlamcjITHy+rW96OhTuGiqOdcoI3Z6EdHWcD + UFEzYPe3badDcx8BjdjZEK8xnXaiPrHgFUsnYi+SVTTsZFOFVZHbeNP3eco/2mmC16K+kD2rmceH + 4VKj8oAL5m1HYgz3vbSG4TnesbwZi3JqPkYHYrP6YqXEGz58d/4Bzk2OsOAcOm9CkmBCnvhbclO1 + K+dlxyXQ2PvJ/IO4N8ZDU2IIP2xPPCUevekxfip0lJ0XLm7xN+IqCA9IcnVBrI/BI2auWYVI8I6Y + fXk9jHHEga1QS1sQK7kr3gTatoDs/lwzcoy+3qjadoaMNCYUdsXIxz6WbXTR8AIrZ9yng9UW9aII + DgG9wPGWfrMXFcA8OTXR2NErJe+2keVH2gkUHVZLPrjuYVD0fXZnZuC0/LufDg3ASabMXy4vvFsd + 1wmcFJtQvvUbjy/4OgdEiUfc1/WBBqSe1/BtpwPefLc7Y/pUmwady8Egp5vF2kFyn7oib3Z3PNlm + gar7XpKhfN8ObO/u5XKKXw0F12/PZFuiZzodSE7BMh8tMyyr50NafzKob8aR0moBaPQv8QPsSdTI + 7r4N+AjHswjyRfcJoZh5NNbb7+bh8zudz1M5jOajAhQLN/IHn+SupohBO2FoNJUvg0mu4OPFX6Iv + 5A6NZ8PNQVjLQPb5C1ImOmYN5ES+dCz0oaQI1iGE/aki28V3Mij9yhaQT2hg4RhPvK+vJwkmnWxp + 2GSjN4lZvIZ1mMt4PO8nrz8pewsVqzwnqhU3ZT+1RxXi3tOYlwk6+tUHfNnlTSx4knQ0bKNGdStw + pltO3E6acotlcF81c76Ljnef62qCtDlciHsNIz68yCsED/SM7a2XEY3lNaSw4zdK1KtYlQO3lxME + +1hl5nM6cY7ffYXIe3xQFl5ufHzd2BoqagXMOkmLtqP68ojYGWy27ZYRmo7ebgKqsCcj35VqTGn7 + ceX7+8OpLMHQ9vX1JsLu2jp047uFMa4KpMuqtMqJN8ZVOfavqFLG612j0+G0agd/LHW4rlGER39w + jPGrpxdw2spkMVoH7cc0NgU8jvmBuJfdCzFz/a5A+Cg3Zsz4tzwGzQC6Zq0o65KyZa8jttHSMAOW + zfnl4XHpgykHIVPjrc/7UjrVwJkesS3/vtrPOShl9BFNyub+4o3o4H7lV5J3bH9Knvzt2ascqGUs + 6Opa0HKIlGYCgVlnou77VUmnpV+DH9gO2V/1jTEKdD0BrC4GC/L2WQ7n+Dih56embO9hlXPPXuRy + b9zPxH3v/XRMBZ7B51mssTTne8iNdwErb7Um/qYayvH5FCQUg/kgPlGf0Xt7Tpo/+dwi7ViOIW0e + 8hl/41//Mab+UXRwl8kHI8i8iJvCc1CyS+ASTIRbNH3igw+768fBqDGdUhTfzQDZ2//gZfKp01Fn + IUVdrabsRHd6JA3GVwASvCJi7y9rRCenOSBqZRoLtcfG6yAZJLjVtoyVsi7KQVufGvRQ/IiYB2J5 + 0xOtAfpun2B+1OpyuBifA3TFxSREqqt0PD1cXw4TTom9cvR2fE9WAzi9RCzd24L3FffLHJRbdSAz + PynH6qQ2Sia5NlNX7FOyW7vPwYpyl+DUq1q+azcXuSmqNxYOb2TM8c2V7mwCMUJLjppkkwGsuUCw + dD0Sj0qD0qAl3xqMXKV9NO7rhQA9cjd0+bHA6Hayr0NMui0eDsWn/eIuzwBOa0oy92zw8bq7N3AQ + kichisKjsZRuFTJershUaVmiYYmRKRedF9NIPj54ZvmrGmr3llP0fCw4z9i5gAUIO6KFkZ8OqMwA + AlW44POJx7zbgN5AVmwZs0RH9oYDUh7II41GDE/eRK/65A2wM3Jgqs8DPvnSt0Ots/fJXB/R5Dlx + CGa122Lh0yQR3UyNjEYe1LgX127Em9MmgbxhBS71c2zwrb3xUWTtGqrEx3c71eNbRPp6uJLD04tK + RtdiB9Wz+xBcq4bB/ciSkCz4VyzxRxbx+L6jf/DD/tqEs2+Cq8063gXM+6yXBuPC0gZq5RreJHHB + Jywv5F9+mRpkLPr1P0jiISB5Q8WS1pbxQLR1WmZvVqLxwSOJ0ca1M3bdCk9jyl41wEa/dUzNb4XH + 78XnCJd9/caLAuttJ+ieLaenrqDCku3SYXk7FmjdfHYs6qEt2cV2bfT02ICXbTikDZezEAVO9Gbe + 3s4NTk+rBqbM+NCqNKuUsQfR5V0+GBhhp4p42SERtppgUHHm39OXOTraxfqEN96zSlux3uXwBLfC + 08yPaK/0tTzzL7aPj++y115ejM7XMmNWr2yMwRBCkEvKBObEb88QvdKp0YyvzKe5w4e6yS2kbSSb + wpeF3nS8lxfA6tYn1t1O2nGRnFxwFctj5vWpGQO47xBer9PEfCVqUn5WZAoGpAWe/F5Nl9dv1sFL + DAJCwHkjHpbxV0l106OLeli0tHifjrAVjjcqf8E0+A8PRaMXiF22ddvZy1FQtMuRk3uXGO2YE9UH + pgYXgsWlVvJ2l1qA7/qXjg46G0y98wakY30hmpRMEY1PdxeS1aoiBoyux5OsWaM5vsREphJ1wqGq + Ye5/VF4cX2gcls1Bzpu+YK5TsnKY8Qcl3s0hekCUaEjrZw61tNrgcmzLiErb4AhITK8/vhz1bi2K + EOekIG7iX8speV4H5IGazfmhM5/cWWjzfl/YVhj8aAymqYJz4O+Jcz1BOe7qDYVXLJ5YvEybtlso + Gx0JGkloT9OnMb71w0H+5dPj7sGbStXC0JbayParOi3751OSQEpXFrNf9zCiyfM6QdRFS0yJoKSd + 1C4FeYOXR4YFFTyaMITh5T33ZK/pD2P0imIAWLCQlmr5bXmd3wpww+WLbRXJaiW2W+qglFNOjE9E + jX6xezbK6tNJJPCHj9f3el6gxU5KsXg/de0QWD78zj8t23uRDu9TUSvZqGFiLnqZz/xFluZ+QfL5 + fQbtbFMk4YtLrqiReBes2hyFubQjW7rT09UxkQuY9SQeUK6lvFipJmSamtFPxy8tPVJfhMvl3lPw + gk/JygWLkbzZ3onxqqT0V2+oyYEwX0SRMRpxr8qa7tbEOikPb4pF14ZZv1HJ1N+z/nvp4O6EO+XN + tYlYfrkK4NR3l/gzPk/NqnfRRuEvOi1kHw0qvCk4EToyVX8PvA+9MoP9wUrwRjVOxqwnVbQA2P3w + LOULPmTA0vrO3HD5LWt9YxwU69utie0MWrns7EOndOJzTfy5Hrv1iA5yoR8npom1hfgpfmQy3JyM + mK9n305JOMSox0POtFg3jeWsv9Ccf7ary2c6JgFTwTVje8bvNh1hK13AOT9bFrV3PRpCTZbl9kuf + NDS6fdTlRfJAh+03Z/45iI1pfR0shSzyAxa6Wxfxjr5itFo3Jyy9OjOiw+IiwfLNEQ28/av9o3/b + zVclhnxUeR/vsgIVAvPxMIpKyzrDUdEtlz3MHpFUDqXzFkD/SDJTaxpzrtKTBe932NOVEWvGxIvA + RK+ApVTAnKWUL1IBNnh1xNKsB6czW1uAYrixneb6iP/wdDoOiJ1iGZfNfS/I6ORsTabl0RqNeNwd + ZXYWbOKlhtS2x9XyApPyEPBQnt98murSVv70PzOFlhmDGit7O/2QX/3MeKzCL94Oa9y0E6OtDV9D + 3bNw/669EXlDreyaqiG+KarlSheTC1qgW0VHdwmcH/qvjCx/JeP15fXwupnPbYL9Uf2Dt5yUUQFQ + iXtyXvle2tJdjtGtvzaUqUvqffjl8YCTmAjMf95W3lgRnKH2nl6Ji/29x3dyKEHuhTXxL17mcYk4 + MljPxRvTwzs1hpdmSMiAa8HsEIHBnw76yva72PzRy5P2CDOQnOhA35ez2kpOnetAL68lc+b+M1V9 + 0sBgxiXbBpbhDdf9+gtHFt6ZnmVmK/br1xoN5rFkrpCoBg+u5ADKrT6wGU/TP/Ux83NGPq+gndaX + nQD9bksZOWtpyi2hWyP3Y0zMthAph/PmKgEbh54Q9PgaL99RLDkKkoEqxVE0Zv4XAh4Klflhc2n5 + DeuVPAydgBfWJUjHOF3bIDDzzALtW6RsceAXJLS5iseFsmynKt9gmAK7wqhMPcT3ahqjOHl7bNY/ + qNeThYU8CRV0g5qYj62ghUDlrUs8yNponNpMle3jwsCT5cRld1zVFYRk3bDAQSuvyoZBQsX1/cBC + WLy9aXHqws3M75n+iWNOhfXnCEKqCXSTHpqWpnZngTRpR2aFuIlmvlL89CbbpbFsdKOYg+zsmUV+ + +RiPVRbKPbI37MZONZ/xSwUi0QVutrvK+Ag581HN3i6Z9QoajVZtlEnfb8l7I2eo+wBIiAanHTnb + T4T6T6qZ8ExaQnZaevJGp7VkqOrrkzj54pEONWw62DyOX7pSPscf3zii00UtCc6o7S3fE/7Cgx5E + dpsUweutyyGExWq5Z0bgi+2ESVpDwXhP8HUTG9yTPBmutvwkeLQcLrb65MMZNzELcCW29I0WPmpU + dWS7p2yg5ZN4R7RdvnyiBhmJqJjla6gruqXN9Nga3Su3KNiXd8KcOibptN3gRJ7xi9h2zr2mmV45 + fPcoZH6DrJZX6BSC6YYPKl9DzkfTGB+K9pU/eOW9l6i/mYMEl8utx2W56w2+21sXuLRayayDLhrD + oT9acCNCh1cbOeOTuF9mwG3JIlut2Rmf+r0IEV1gc+bTI5+0YKPCZhHadPF5BeV41O4YLtnDYm7E + DtHKjywR0eyLZz/qU44/vfrzL539afA+3XpnwY2/JFzP/UT8gCgBXfgm213Oj7Iv32BvZn+AhubJ + 85Zq/JGR+0pTRuphUU45fl4gnwQHawk4fDoGmgDtplHJJf5yNMz6HBJ0R5j3+gvR1JdcCKIC0+F7 + w2VXe6m/mvXhzz9EXU0vB1Au9/jnxxksXQYU3komE6IusTGU17FAv3o3qu+h/abpuZJnvCZqGC+N + T3J+YnhGyROjY92X3XU/fJWfH2BKxSMdVtI2RrN/x7C/KDnL2Pkhj0LdE3xZHFJ6C3kGmdi+ME/Y + ineb67ZC5yl40HaOX78quApJ8Dkz85vylG6/FUDoHwLiaKey5cvn8gK25+aYzX4Yz52hAO8kEean + Mm/H8ek3yMiFmmi7vOd8M+kWpLrlMVsPJ2/ixdVCc/8geFJZO3XaisKh8Y9UeTyasvnxg/n8MGy+ + cbtpeqgQlTJOyDZvW2rV4VpRjuKCWZmjGavTy1lDsx5E4j3qxOt00agAi+6ZbAOr9AblkxxkbXIu + dKr6TcRPcZND4VkGMTb6FPGfHqq2MZ79zrxt8mhj/vw09vOfupvz7FB0Gy7sfGwaxK7smCDyORjM + 6LRF2ev+rYHsXq7p5SjZEQ9W00HBcXBlhvGp2vat2A/A6s5nu5KS6M/5ifmZEI8ojHOJaPLv/FA0 + mJLBiu2zRtuLe6Cjt3+Vw1PahIBt6U/+vcnr0hgVXNoxg3xGYzqUU4yAdI+5n1QlS7JGBpPrFcO7 + 3m7Hu4N1SNnTInqUBu0Q9mcMZz2kzBkzyeuN1m6giVqbabO/xs206tDsB7LZX/O4ekdfdBylmOj1 + pi/bd+LlMBqhSHB/Do2fPyJfHOFAp0G0ouVc/5uieSwwPy1Dg7Ggr+S3kzJGztfKo+bjVsld0ke0 + KUaVi5Gvh+AHrsP08Omh4XNdDfBMPgQv9tIDdfZyI8C0kd9se2Vqu2x6sQI5/ZZ0/LbUm+hXNsFb + NJzE1yMzBjZoE3RE3JPbM8GRSMVIRGrAJjb7IcZoZ332w1tiRqeH9/HzQwdNgiSKvpOMeE3AQk2y + kQh5SRvev1zxgnaPzZouyutQDns0JdDYg85cSf8i0VfNB5rnIWR/X7l8OfsdSMgeT2Js4GL06ZCo + kFqjQZ/BtS65M1gVrAblRRUNXrzPn6hDquGffvOhkhcr24Tr+SowbaGcWlaSASASli3Tz+4nZeFR + 8SGPvY5pG2JGP76A1quqZtrn8TD66DSJoC1HNvuTPp9+86bwpZ+o8IjvaTX7ySi/iRnBXqam06or + a4TMy/sP3534ID9+9cn2u7tj/PQIHAt5hfMiaNNpsdMlparT5x+/lmvrKocVhB2d+WrZLdXgq/z8 + AWzHdTRUTQiwfiZX8tNz3VVhOgTfCei0KlblVxqWDWiBuWVq8zEMyXIOOkplEZMdWtpozJ+IgjA2 + mNgtoKgLD8cjmJ0Yk9tkXFKe2tsDJPEUEG/F9+3oX/ICjJicqHDPrlHvX+IC4tc6Yvr3AO2sV0XY + xerEtkquoP6Xf+tsPfGEApL+6hf1YSIxe+UU5ZS2T/unL7CQ0cabonKZKz+/UT8KJ4PzW+NDMZ0r + hte3NaJSJE/gHA4K5XqTpysFjhW6relAJzyJ3uQ3zgFEgwn4fDk/2g/yhgqCYeH+wdsOH9sQrChz + SZDd+3TaLYwOynb7xmzme1Nwbw4wPSyD6bP+mPKbLoE9XRdYCdqvN2IyXZRe4AeiT5vE40wsGvTN + pJFOyUYzxP07nuC+X2G6Ru29bbTX14XVsHjRVIlHo4/TtQtO1ryIpUYMvaqyMGG5V2LiyHQ0pnOw + tcBIj4RdA3OHhg24XzT7O1jkPY34xdZd2IVJileXs1qOVvY9wipPKTGOyI+G9YhCsPG0ZH5tHWc9 + 54W/eShdhsHoDaMYA8zzix8fMobgSBN5eT3kxO6UJx/mfgWnGnTmHeu+7Y5x0MDoPgsc5/iIVoHb + SzDHm+n94phOL/dU/+ZvZHsPS04/1eYLm0l+EH2Xbr0+VJzZjwRC0v7t86EVDx3qzKdFy7k/TJtt + loEcLTjm0svxBkd1ErTXXEShr0xvpOwLf+Kz3Xnf9I+eR6+bTIxbpJWdISTwm7cS5zefCaC04ed/ + +VtapwN7PmJlxs/Zn36gQbMWLmjCU2X3UbH43C9NVKVFytRKbiPm3lITZr5PrCpZop/fofz0I1kQ + DS2TWv8q38PdIU4S62jcNMNRkUdpJHqTjQZrqkQEvcYlVT6TUUoKvQpwHWqT6LbQlF302bpwPxoZ + wc/zth1TAWW/fkDFjITtyDZyDFEXLPE61KTokxjbSpn9ZIZnfKCxqLu/+mUaN4xoFZZxAy51Oxy9 + h287HJJHJf/w0gmEvmyCciei6/P7mvknS2nHc38D4eHMYlnwuPQYnzV4PPIwms/nwOXsgIpq3czf + ++Q//YpOfp8S02mziJ9ejoyq9JEyI/3c2nGeX6N5/kBMLXm340VYx3/wVT+aLOq/r0MOfaztmReM + qJx6ZtqAe8GhC8XoS5Y76wLSyVTYzog1b7g5nw7yCRxGmC3yGR9c5e/fVsB//uuvv/7Xb8Ogbm75 + a14M6POx//d/rwr8O7kl/xZF6d9M+rOJQLvkkf/9z/9ZQvj70zb1p//ffVPl7+7vf/5arv6sG/zd + N33y+n+v/2t+2n/+678AAAD//wMAP1WwTOMgAAA= + headers: + Access-Control-Allow-Origin: + - '*' + Alt-Svc: + - h3=":443"; ma=86400, h3-29=":443"; ma=86400 + - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 + Cache-Control: + - private + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 7ccfc9c72cc71244-ORD + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Thu, 25 May 2023 18:19:42 GMT + Function-Execution-Id: + - yvvldstgvsvm + Openai-Organization: + - significant-gravitas + Openai-Processing-Ms: + - '74' + Openai-Version: + - '2020-10-01' + Server: + - Google Frontend + Strict-Transport-Security: + - max-age=15724800; includeSubDomains + Vary: + - Accept-Encoding + X-Cloud-Trace-Context: + - 1dabd53de4282b519b0b347d06fba149 + X-Powered-By: + - Express + X-Ratelimit-Limit-Requests: + - '3000' + X-Ratelimit-Remaining-Requests: + - '2999' + X-Ratelimit-Reset-Requests: + - 20ms + X-Request-Id: + - 93d0b7b83f501e32a6081a99294f07ac + status: + code: 200 + message: OK +- request: + body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "system", "content": "You + are Follow-Instructions-GPT, an AI designed to read the instructions_1.txt file + using the read_file method and follow the instructions in the file.\nYour decisions + must always be made independently without seeking user assistance. Play to your + strengths as an LLM and pursue simple strategies with no legal complications.\n\nGOALS:\n\n1. + Use the command read_file to read the instructions_1.txt file\n2. Follow the + instructions in the instructions_1.txt file\n\n\nConstraints:\n1. ~4000 word + limit for short term memory. Your short term memory is short, so immediately + save important information to files.\n2. If you are unsure how you previously + did something or want to recall past events, thinking about similar events will + help you remember.\n3. No user assistance\n4. Exclusively use the commands listed + below e.g. command_name\n\nCommands:\n1. append_to_file: Append to file, args: + \"filename\": \"\", \"text\": \"\"\n2. delete_file: Delete file, + args: \"filename\": \"\"\n3. list_files: List Files in Directory, + args: \"directory\": \"\"\n4. read_file: Read a file, args: \"filename\": + \"\"\n5. write_to_file: Write to file, args: \"filename\": \"\", + \"text\": \"\"\n6. delete_agent: Delete GPT Agent, args: \"key\": \"\"\n7. + get_hyperlinks: Get hyperlinks, args: \"url\": \"\"\n8. get_text_summary: + Get text summary, args: \"url\": \"\", \"question\": \"\"\n9. + list_agents: List GPT Agents, args: () -> str\n10. message_agent: Message GPT + Agent, args: \"key\": \"\", \"message\": \"\"\n11. start_agent: + Start GPT Agent, args: \"name\": \"\", \"task\": \"\", + \"prompt\": \"\"\n12. task_complete: Task Complete (Shutdown), args: + \"reason\": \"\"\n\nResources:\n1. Internet access for searches and + information gathering.\n2. Long Term memory management.\n3. GPT-3.5 powered + Agents for delegation of simple tasks.\n4. File output.\n\nPerformance Evaluation:\n1. + Continuously review and analyze your actions to ensure you are performing to + the best of your abilities.\n2. Constructively self-criticize your big-picture + behavior constantly.\n3. Reflect on past decisions and strategies to refine + your approach.\n4. Every command has a cost, so be smart and efficient. Aim + to complete tasks in the least number of steps.\n5. Write all code to a file.\n\nYou + should only respond in JSON format as described below \nResponse Format: \n{\n \"thoughts\": + {\n \"text\": \"thought\",\n \"reasoning\": \"reasoning\",\n \"plan\": + \"- short bulleted\\n- list that conveys\\n- long-term plan\",\n \"criticism\": + \"constructive self-criticism\",\n \"speak\": \"thoughts summary to say + to user\"\n },\n \"command\": {\n \"name\": \"command name\",\n \"args\": + {\n \"arg name\": \"value\"\n }\n }\n} \nEnsure the response + can be parsed by Python json.loads"}, {"role": "system", "content": "The current + time and date is Tue Jan 1 00:00:00 2000"}, {"role": "system", "content": "This + reminds you of these events from your past: \nI was created"}, {"role": "user", "content": "Determine which next command to use, and respond using the format specified above:"}, {"role": "assistant", "content": "{\n \"thoughts\": {\n \"text\": \"I should start by reading the instructions_1.txt file to determine what actions @@ -1423,8 +1326,8 @@ interactions: next command to use, and respond using the format specified above:"}, {"role": "assistant", "content": "{\n \"thoughts\": {\n \"text\": \"The next instruction is to read the instructions_2.txt file. I should use the read_file - command to read the file.\",\n \"reasoning\": \"The instructions_2.txt - file contains the necessary information to determine the next steps.\",\n \"plan\": + command to do this.\",\n \"reasoning\": \"The instructions_2.txt file + contains additional information that is necessary to determine the next steps.\",\n \"plan\": \"- Use the read_file command to read the instructions_2.txt file\\n- Analyze the instructions to determine the next steps\",\n \"criticism\": \"I need to ensure that I am following the instructions accurately and efficiently.\",\n \"speak\": @@ -1446,7 +1349,7 @@ interactions: Connection: - keep-alive Content-Length: - - '5540' + - '5471' Content-Type: - application/json method: POST @@ -1454,15 +1357,16 @@ interactions: response: body: string: !!binary | - H4sIAAAAAAAAA4yST2/bMAzF7/sUBM92kMxJmvm2YZcUa4EB62keAlWmba0yZUg0mj/wdx8cO0uX - IEOvJB/fj9I7oMkxRV0p0XVj47v7h4eK8unT6tF++Zrcmb3kC/P47fv84/0eI3TPv0nLqJhoVzeW - xDjGCLUnJZRjOluu5qv5arH4FGHtcrKYYtlInEwWsbT+2cXTZDrDCNugSsL0gI13dSMbcS/EAdPZ - bHUX4Xn5ubFcRihOlD2XkkXSRagrZzQFTH8esKZwWuydJUxRhWCCKJYe07EQ9yccMgYAyFAq15aV - hAxTGItjg7bSFzP8UREwbQUMB/Gt7rHABBAHnlQOUtHbVtgkE9kKFMbSBNYQKtfaHNpAx8lesul7 - oF1dK87/2XMUZRi9JfGkgmPD5Rnnhh30ByrD4biLSfev4XdguHC+VkdwcZCTkK8N0zi2FQhCTbg0 - bqziwTOGp3fh3+DKMo7hMyu729PV3P+ILoC0N2K0CfVAtQYmOgIQh9b3YiWwBlVD4ax1r4bLazul - deuVkN1Bj09FYbQhFru7vD80pF5OVq/G2nd+4q0wZDhs76JT+kb1VfhY1TQY/3W6YFO+vMzs0Ohn - z/JrlBPEEWTkybjDLsLCsAnVZsgbphjENRih4Zy2mE67X92HPwAAAP//AwBqGZrbNAQAAA== + H4sIAAAAAAAAA4SST2/bMAzF7/sUBM92kMBokvnW0xBs2GF/D/MQqDJtq5EpV6LXpIG/+6A4WYoE + S698JN/vSdyjKTFH3SjRbWfTxcf3T93janm/eVE/v1Tlnyf7+cP8k/vqn1c/MEH38EhajhMT7drO + khjHmKD2pIRKzGfz5d00Wy6WWYKtK8lijnUnaTa5S6X3Dy6dZtMZJtgHVRPme+y8aztZi9sQB8xn + s8UswfPyszBfJChOlD2Xsmw5JKgbZzQFzH/tsaVwWuydJcxRhWCCKJaI6ViIY4R9wQAABUrj+rqR + UGAOx+JRoK3EYoHfGgKmrYDhIL7XEQtMAHHgSZUgDb2WwjqbyFagMpYmsILQuN6W0Ac6dMaRddRA + u7ZVXMY9pQNpTJgUmLxm8KSCY8P1GeQ/RhCjKcMBVFmaqCoLhivnW3XglUZJhGbS8Yn87mBLQr41 + PJIdMgah7oqjs4pHhBS+38rx1nsUBadwz8ruXuiq7xbRBZD2Row2oR2pVsBEBwDi0Hsa065AtVA5 + a92z4fraTmndeyVkdxDxqaqMNsRid5f5Q0dqc7J6Ntbe/s03r6LAcfuQnM7wOH11haxaGo3/OV2w + KV9fHu8oxN7z+DXKCeIAcuQpeMAhwcqwCc16PD/MMYjrMEHDJW0xnw6/h3d/AQAA//8DAGvOn0w9 + BAAA headers: Access-Control-Allow-Origin: - '*' @@ -1474,21 +1378,21 @@ interactions: Cf-Cache-Status: - DYNAMIC Cf-Ray: - - 7cbda5ab893210d0-ORD + - 7ccfc9ce0ba71101-ORD Content-Encoding: - gzip Content-Type: - application/json Date: - - Tue, 23 May 2023 13:29:39 GMT + - Thu, 25 May 2023 18:20:03 GMT Function-Execution-Id: - - f495scdawjjo + - pbmewb1rp8v5 Openai-Model: - gpt-3.5-turbo-0301 Openai-Organization: - significant-gravitas Openai-Processing-Ms: - - '19949' + - '19929' Openai-Version: - '2020-10-01' Server: @@ -1498,7 +1402,7 @@ interactions: Vary: - Accept-Encoding X-Cloud-Trace-Context: - - 0128c66c9918142a24cd829bbc2b234a;o=1 + - 1cc694f7ec27d332fccf8ee536d2fcb3 X-Powered-By: - Express X-Ratelimit-Limit-Requests: @@ -1508,13 +1412,457 @@ interactions: X-Ratelimit-Remaining-Requests: - '3499' X-Ratelimit-Remaining-Tokens: - - '86449' + - '85978' X-Ratelimit-Reset-Requests: - 17ms X-Ratelimit-Reset-Tokens: - - 2.367s + - 2.681s X-Request-Id: - - 438ac4200d6062dd394f5ffe9944af58 + - 097693547d7c7d083fa14613bab59e0f + status: + code: 200 + message: OK +- request: + body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "user", "content": "Write + a concise summary of the following text:\n\n\nLITERAL TEXT: \"\"\"Write the + task_id into the file output.txt Shutdown\"\"\"\n\n\nCONCISE SUMMARY: The text + is best summarized as"}], "temperature": 0, "max_tokens": 0}' + headers: + AGENT-MODE: + - AAAAAAAAAAAAAAAAAAAAAMLheAAaAAAA0%2BuSeid%2BULvsea4JtiGRiSDSJSI%3DEUifiRBkKG5E2XzMDjRfl76ZC9Ub0wnz4XsNiRVBChTYbJcE3F + AGENT-TYPE: + - Auto-GPT-2023-X-TYPE + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '292' + Content-Type: + - application/json + method: POST + uri: https://api.openai.com/v1/chat/completions + response: + body: + string: !!binary | + H4sIAAAAAAAAA0SOTUvDQBCG7/6KZc6bkH7HvQkFRcWLB0WRst2dNmuTmbAzocWS/y4FP64P7/vw + nCFFcBAar6Hr22L1cJ3XzVd+mU8lR3lb55vn18fb9incHe/BAm8/MejPowzc9S1qYgILIaNXjOAm + y3pRzeq6WlnoOGILDva9FrNyUeiQt1xUs2oCFgbxewR3hj5z1+tG+YAk4OZzC//qPzypLSirb//I + cjpaCA2ngALu/Qwdyq8zc4vgwIskUU96KWRSpEt9ItE8BE20N8rmmJOi0QaNejlsUjSJlI03u9Si + Id9hNDxoP2ipJzWe4mVMRppBIx+phNHCLlGSZpPRCxM4EOUeLCSKeAJXjR/j1TcAAAD//wMAWEdU + 8m4BAAA= + headers: + Access-Control-Allow-Origin: + - '*' + Alt-Svc: + - h3=":443"; ma=86400, h3-29=":443"; ma=86400 + - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 + Cache-Control: + - no-cache, must-revalidate + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 7ccfca5cfb6a117f-ORD + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Thu, 25 May 2023 18:20:09 GMT + Function-Execution-Id: + - yvvlzdkld8xw + Openai-Model: + - gpt-3.5-turbo-0301 + Openai-Organization: + - significant-gravitas + Openai-Processing-Ms: + - '2345' + Openai-Version: + - '2020-10-01' + Server: + - Google Frontend + Strict-Transport-Security: + - max-age=15724800; includeSubDomains + Vary: + - Accept-Encoding + X-Cloud-Trace-Context: + - 935ddc3ec26e3fc83995dd88cb4b84e6;o=1 + X-Powered-By: + - Express + X-Ratelimit-Limit-Requests: + - '3500' + X-Ratelimit-Limit-Tokens: + - '90000' + X-Ratelimit-Remaining-Requests: + - '3499' + X-Ratelimit-Remaining-Tokens: + - '89455' + X-Ratelimit-Reset-Requests: + - 17ms + X-Ratelimit-Reset-Tokens: + - 362ms + X-Request-Id: + - bbe6d9f4186af1ddaf62b36bb61890e7 + status: + code: 200 + message: OK +- request: + body: '{"input": ["Write the task_id into the file output.txt Shutdown"], "model": + "text-embedding-ada-002", "encoding_format": "base64"}' + headers: + AGENT-MODE: + - AAAAAAAAAAAAAAAAAAAAAMLheAAaAAAA0%2BuSeid%2BULvsea4JtiGRiSDSJSI%3DEUifiRBkKG5E2XzMDjRfl76ZC9Ub0wnz4XsNiRVBChTYbJcE3F + AGENT-TYPE: + - Auto-GPT-2023-X-TYPE + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '130' + Content-Type: + - application/json + method: POST + uri: https://api.openai.com/v1/embeddings + response: + body: + string: !!binary | + H4sIAAAAAAAAA1R6WxOyurbl+/4Vq9YrvUvklrDfEFCRS4KgiF1dXYCIoMg1AXLq/Pcu/HadPufF + KjElyczMmGOMmf/4x19//d2kVZ6Nf//rr78/5TD+/b/WZ49kTP7+11//+x9//fXXX//x+/wfI/M6 + zR+P8lv8hv9+LL+PfP77X3/x//Xk/w/6119/F5tXSPeh9AFzeAYDmNX8SYIhE/uRiWkM7wYqqO5+ + 9858zdEAsF4Saj0dkwlL5JmwmMsNkiTNMyZdMgj4Jq2NQAWfbFZVhJTR/g7Ys8mjX3YtKGFGhRc2 + wL3s5yJGGti854AipMz9tJd6F8K9G9P0Ow49IXOUwSd/2xBOi01HJPwNAcMvj9SuXlLPkr2uw5dC + GgIFcHIme/+YYIB1kZ5255DNoms2kN2JjJ23d6yWz0aNoBrfNXw0QBcum+yWQOejDNSYb3JFRKrZ + 8HxRdggi03BEUz3bIH18D2SjsT4c+ucJQveiyUjU08SZbzlB4L5DW6y/+50jpPm9A6/ok2D9uI+q + qftaHZTqd0/Ddb2UNv0AH+0mJluZ7irxPk0+5MxXSS0cJwbNP/UA46sZUvNGMJjOCclk3Q01vGOB + bizSBkOwPWsT3XtHAibwYROUx6nF561RAdLARgMT7ws4SkwCFlOJE3As3Te+y8CsBOVbcABYRow2 + n7tm8NL23gFJ7WZq0Nu7Wuj1UytfNkAajY3vTLuUXcAoTgPWVNlIJ+htCCRf5Y3x5uGzJf5MAmT1 + ssOnY+k7M4CEwOFbQuxU8AkYncoEUsR1ZFnjMfPOx4Jp01Xk00IBMHEUdUgSMcOOVAiMETe9wK5G + R7rX9mk6x4kcK+cT/yZSDUG6vJ5+B86DfCFy9JaqxcK7BiY0k+k+iK/Vn/1498jCtuH26WxBJ1eU + u+xQh5uZw6IijGHBig4/9McQjplxESC6XgndBYMXLp0vT6AVDhg7p9POGD1NjMBbdBfEJ5sXYG7w + UoB/ozsy46FkHVMuFjiVtYtmPOhscq9KA8p0HkkjFV/Web1cKG262eDjY2ocsuTGpD5kzcE3ywVs + 7lRQwyUoAZF3ZwZYwGoTxv0R4EMY3nrR+vBvZdsdXkSBAqim08cv4ZqPSKWXIJyut7sN3YsuU43W + DZuqk+xDJZUEfKumDBA83HXoClaG91029zN73i6wS60O8QnaVGyWtVrxyyWnOKyLlO7g3G3UzfNA + pEd/6Udq1zZkZpej5d2/jLn1Zl0eppqQdgPuKfUECULj+Wioye3rcAFH4kJDfHsIvL1vP7n3b/bb + D+pVCzUan8t1CKxdjLXzeapI0JAGdlenJePyeYcz/hxiWfCcMzWLqaqWPFJymOGhRWSgOGTZ+arD + AT1sAnuLC/tXeU+gxe4v/KirOl2eHBfBXYt8JJxDHXTAmGNV0h4LIbde7Zmpni34FK4dNa3hWLHN + JpAg8i4JaU7xPp0HGzVQRfGO3qxB6JtLZXbw3bsWaSRtNMbUCUvo6c4Bm3TpGSt2GwvkL+dGuMd4 + YCxwOgRmLSww2pRmOE9ERYBk4pG6cLDZENFbB89MTenuPIisk/vHAh3Ef+g+OpXpdPrEBUy7DqHt + pqiM6XBhzQ+fiBS0g9OXSTDAg8E9kcQ4AqiUmAIMDA3RkN7e/fI0rAAgNQ/weh76yRhiBI/E8xB5 + eYMzjoczD4LEvOB9GDvOOn8bWN9YxJaAnXD5iEIHPdOeKcLK0RD6ieMATbQjjUdPZERtrByW/PZJ + rSE/Md79TBpc8QNrn+0xZDuJTtDeLjx1PdPrf+cJuoKdUXx6i+mwwHv0wyt63+taJdxOTaaIYWJg + HF5jMBxPmqWWA+/g/UEIwslXWgRtoz9jjxc0xoeCwMNwDCCRJAqcsYgPOljxhmzcK+pnpOqZ+t4f + MdXEpgZzt1ciCOgxRz2jXdUP0ukNIYs+iFXXQ89v3/wC66FUiRAxyIauFBbgaPyZJp8gSaep2k6w + lus9PrxF0xnn6kTgNXrniHb3Z9iF75kHR9nvkJBtD44g6EoCMd+d6a54js5AgSxA3hOO1ArbXSW8 + 8bLA2DIcbCf+DOYDGwNQZFlF3V7WKno1HR2eL9KOhvf33mHhkyl/6qM+wCWlO60uYH63DKyhuQDs + XUoSDFLjRRbmoWowzXxSTreS4mOy8QGzhXOuPHIAsMkbj3RBjjAB/qIgjA5tkQ77ZuQhTfQjdSc7 + cOjJdghQdHFPTSPVqqG8DhM8Xp0M78bdno3aUvtwa+jpn++i4Mm8sj8LEO+P0egsSxAHsHKhQaNs + ezCWfdQIkBu6Az7eSM5Gbg4GWGEJ0JQ7x+niZ2H3q59U+1zdSkz2tgZMSmOMGRj7qdXlN2DcfFnP + mwOWLRoluD+wJ9keo9FYunjKVey/DHzzVDWc1b0JwZCWAdpcD0lFP4qYwBVPEH9WUcp6Y2updk5G + Ut9MM5zjUjqA48lesLEvWoeqhNMhwfOGOm5bh1MQyR1UFqjSoyqUxjQVGlTxWNtI6G9hP28Nkskq + eE5kcgrECHK4BQazleLjfhiMmZPPCdQkQ0HcpnyHTO1PLrR2IYd3OE3T2QBuBhb5uyOty0RjURst + A7I4N1TL3kU/WbJNwIdLJjTXG9izQbMioGv+DVvXU8omcuv+nU8Gb02AEniVYJXAGIemJjHGvOAC + n2Js0ZV/VS2jlwH6HDuS2meWMZfDkyjCyy/+5Kcoo3sEtrPdYP3ZeimNpnhRNKIBMt/Ui8G8GtSQ + H+QXxrLLV0tefjg4WdYF67v9i9GjXC8KcDWC9/FJTKdij5UfXuDDpw6MRTbuyo9fUpfc/XR23CYG + fX4NEN2ofT9JanxRrrTB1L5NujN/ho0L2ufmSvd5wqomBNqg5pWjY1sVzs6chzKBQHJ8au/VspoH + FJcKzl83fDsT21nyXLbg9zsE1FeaORw4Ie6gZkOHJuv4Xz2H+Ps5YSwMJpjx61wr8HD6rHhTsLk9 + UUt5b/ER4+jgO3NwXxIlNWqLelnQOSwdPQVE3zml5lkl4W8+IHPGEBu6ZrDtjfRvKL8mjeL4NvTj + WW05IMQuwZn3ttJpAMEFRu18wHqhXMJtUZiDuuIbtnpPZyPXXjToStaRuve6Dnufnifg38lA7cQ/ + swZHiQ2D5HDBzma2KiFuxw4uQyUTapuLwdj5aIHQdl0altuekX6rCUDN4gg/r/U1nZVLJ4CXNKh0 + /3JsMFfxWMIGLTE1kVqx2W2kSVn3m560WXbIZ7ON4LFAMrXcmIL1/DagyHOETQskYIKeSNR7EtR4 + Vz3FcIpA4f7JN+n0jXvm6u/yx+9JekkyNgpPQYBTj01syHexJyeKBGDkRUq1Wdyni9fPBXgeA59w + bqIbMyxUHhJfyDECs1gtZ7+wITrdTBR8eyed4iPzoeVHM2riZGRU6myk4L5+YjwBIxUKcCrAnV3i + P3yaiOXmDdxIf2Pr0fMV2xzPHVzG+oZ3HL8P+agvBcCXSoWkV/ZKB/ABE8wo/8JpDrVqSZPj9IsH + mS76kU3wKiRw328VNG+XmZHbqcnByqf+1K/luMwIZMl0oGd+ycJFWCAE+Ps9UZP77IwhuRwCOLzf + Jn3aZuAwPeMk4AMer/HGbNgErwN0XEEhChc5gFR93wEHCR963F7acOWfHbQbOtD96dtWH02vCGj3 + 8IaD52mpFndTDQrfAYGeTN1x5uR21BWRf1r0oHRGL6z/94cv7oJhDOciPmiwF+8mTib+0c/NXLyh + LLKGQM8c+3bQMQSx5BhIfolWvyT6LlAjn17JtoNDuBy4N4F3D+cYPbJn2hb+MYLlftljN64n1u+g + 3IBl/2BkpjulX5RbVUvaYPhEPT95sOLNG7S8+abe/bUAVnGvSc3iJUVFcxWNuZxeNfjxBXOfXnty + Er0CjOeWUTe8tmC4v40O2Gw+0Z0FHDAfn8CE/m3c4evkheE8p4YGyUeRsaZY0b/5EipSBztf78yG + S+pBqHq3iu7wUILZ0zYRLM8LpW58EBxSHrMa3m4gxKcNkEPKy8VB7Wr3iFGLO6OrWBeBp+U9qOVK + dj9F7FWoL/Q+4OMFyWzl84V6TzFD0su5g/l43GhyGCGBWgt3T6eItQXczlaDb23Qp5P1OnRQSuod + xlFtM0EGoQXB9VBQXciLvle6doCJ/JSIMMm6I3QaH8NT+XZpXMBDz8zdQQPr/Ih0TapwTL5mDHhH + yKjuCnHImJdEkLvWGt4XvW80d/6kQzsYTIo4/eLMm/OT+5Mfnig4jkjMSwMZxy7UHJZdtazrVTKP + cEQub0a6MPtcQ+OYX3/+gSF+bCjA06siZLZAD+a3yS7goYNg1Vc7sM5PV4QSj0RNd5XD7v62hOOp + e+KDeD/3y6qn4E63bHpwznL/w2PVeDxV6rTcyWHjJdFAfco8mmQvkk7VaQ7geOx4jJ/i3E8LnzTA + 3R45FEvFkQn3K8vUzzfqCLFOST/H5WRCam1abPOfLmUXLJSQe9YcUurFruZuy0vwot9iil+W1E97 + 7cXJVbkZiJCYiInbuU0gg6xFsnnmQqLfBUspklxC2+tpYMRxbj7MxO0WbYpGBX/ive73n/rdej5P + YJRRjoAVzyfxZChg1cd0tyzfcPnGMfzD73/8ctWzCXR7c4NEbr9N2eW4EZSxJGfCHSTWj2kYIDW5 + pI9fPoVLop98gNuaR6O/SZx5PNwFWB+zElWK/AWsCUNdlel0ob6OG4MpJC+gGfcBaRiHQBXSOYD5 + 146pbT2WdEr9cwYLVnZE3alzP4zfwlIDZh/IHAxjuqz6GCZh7lOjvM7GYKs3Dcz365l6/nOTzqpn + NFB8JgOp+loEk/TJAqhmSYSmyvQNsgS+D4u52BD1YwjG6ud0QJobC8FeLvpZ0/sBepqTok1Ud4xw + 1p7Afn9+0XRbuOlMD/NF/fE3d/WHxlIqfPga+4L44u0ZDvI1UiB/4md6deMCLHtUWrCKNjo9KbbI + luksTXDVr+g+3x22fZkhD5rX9kRP5jkP2+TpDT99SH1v7tNpY5cxxHpBKNrOOpjR6JnwZD+/KP/m + VkioVyvKqhcoOsQxm91hrME7sG74IHdojXdUwvr53JJJx5ZDV70H5IB6+MhfGGCTqwh/+Mt5ePXO + 4oqjJq14Sj0HSelnFC7JTz/g68vpGLWFc6ZW3M7CrzVfJvcxX2CnHRHebY2KUXObZNC5KzrGq36a + tnDi4WtsC+wR/tYvWRRwPz5JD8I9cybzcrHA6mehdsV7vrb9RZUjUtHTbkY9A8lwgdv9JiT84zn3 + tNiJNjil5R7HwWlOSbIYAtSLBJM3Ug0msn3YKS84hNQndZiyxym/wFqoIKqnRumJIRxK6FTyDmMz + 9Cvm1JkJJ9PmEPscmnC64iSHZM922PTVGEy5pfgg2GkVvm6okE6DcTcVPtxO9HAOdcbukxRAzqxK + RE6tzQT66Tiw8iscfurKYbQYdeX25lIkHoZXPzntwCuFWjVEsGeOTQ+XHiDg7DM+PevBmbR+GmD2 + Eryff9bP2438hrVUfamzOXtsYEWRg1Vv/sn3Fo37A6D25vLTz2D0T48CrucFu+Q+hQtusA0/6UUj + fPNSjPki2Au8Xt6MxhtNTJfoETSwSrgYqcnO69kYH3i42Q0l3Vnp2WEotQ4wi6eU4h0r2FqvLsB8 + jSY+RVvNINLxMMBLej8hcDyKYNkM2QTDyBVw1t8/YCiuXgDtZhyoLnFTWmteb8FBywRs3FiVsi5U + CkhyfqHn1r8b3dm8vaH0/miE8dbEplU7wCtoZALHuwIWswt1uLenLTYvqANE9c0EBBuLIjaKl5S2 + Ymcq5xZm9OYTvp+mHmZABY+J6l+Fq7ppB5Kfvv3tdzofbzCBB968Yec7uhX1xTiDEi7eK7+rwoWz + PKJcyXdG3/0wOKze7HzlMyUZPpUHAKZcY28IImNEfPvCPZ3cRVCjzzzR014v+qk91DGAZ6xjtGiC + MV+IacLIHHp0Wfmt8H5OtvqL7+76MPvtz5/MXryHT3ap9PMkAAXeUqWl5h74KXi+Zgkm/aPCTtqT + /qO4gwWpvH/h1b9l84OWb6ijoKfodPKdsRnzAa58BXvoaad8JOYR2Om2TU/lzQiFenAsiNmHYXdX + duH8coNCfZ5DjE+9RdNFLXYL/GbNFo3oaYfsZDuDYvmXmYY6Fo2pW3gLxoIT093Kt9j367mwPj9e + RGyWqZq/22sM342zIYWltaAJma3Ln/vlSo3PNa3mA50yMExvguOi3xh//KdWMDHdf51nXwNHMhW7 + XWy8C9ylWrDkliCJUgvvuIvdL8dOKcGqh+nqHwC2fQ0aBEi7//Tgqseng3qp85rwq97/ozd/fmSx + 3DRjqaqB+8WH2rfNNqU764OggC1I8bW+hh1kjIMv+z1R3Nu7ajIGH8EiyyvsfqxT//Oj1ffszVhD + Wv5vPrD4lY/kY+kbfHE6L+r+ufWp4cz7cMXjAkYW2aGbut8wGk3+AqOctlSjB2QwKTjY8uGuKQTk + sOiHb3nzf/UO9UMuscH9TDoU+zRCqvItQ/pN7wRKaYUIvwdTOKz9EshQn2HUcrExrO+H8OH12AmS + I+M93crBe3Cuf+YjLndPkF/Xzvv5UcaKfxlc/UcEX+Kt+haCU8PnNfgg7ps34dzMTQ0zIDpELmuh + Yll7PqiT4L/+1LutY76ImmqvDbVWPtRfjiIP6+4VoG3cBs7c3t+FSiYY4b2Q02q5jr4Ou5tzoYf+ + xvr5mpwVCE1HQZu3/ElH8a0Rlbu+tZV/oZDcdq9Jffv2QE/Q3TkiijhXeZiSup6PKZ3j6FtAVNwd + ur+234pdLlYMt6lByGCWF0BWvwyIdnTG9vn5AYM+7XV4jd8nbG2KxZmCfIrUMWxzjCi994v43WUw + vX8nMn2uQzUVp/sCV32Bd7VPqpp6RIGwCS2smfhhrPwzgpJXyAi+rYvR6S9egJz+Eqk37y/GBB7q + AQ7L+46T7f5jTINxPqik2c7YXKrKWH76rkvtDkEzfFXLgdDoD79QVn9v+V6OB/iKvgkaZHffM1/0 + czWHiK16n0+nhz9B1WBvkUgnSXP4Xz1b+Qf6+eEMciwGsDlbq17rnT/6hIK9i80hqAH9JIoLKqwA + JPihl07LEHIwei5Hsk2Vig31AUlwgN8bUvmodobVT4LMbHIcTiIHBnvXwJ9fS763TZ3+8BmWxpRQ + 4/7+OILZhdpP72BTurzBcMVBDkudQqxf47PBlpdbyh46Uar58xYsgq7EoOEUH72XRUgnXqo5cPOU + C5nQrLFFBqkFayulaH6QK1vWfhncNc5CNrF4TGeuzTSQT65NFmFkbAkGqYCfJT5iG0S7UOxCpYRw + e/Hpw0FxyLSiDaBTgR3++VerX7H88B1riZ5VE3m2GbyYNwPNrS8bH0FcFPW3Pq16d4AICutUcDUL + AuLeDJl8CLrf+tA2lPZgmi1XUj5LckTF6u+xpTUQyFReID9+T7W4q2FV6DzZHqJbNa79HPDTS/rx + M4dM6nQXjOVwxqGuVaz59QPd7Z5D5YYe03lzvnHwLaMH+vUftvhzSKAmmiPdtf7dGeuXM0Cg2AAf + k6dgtMvRF9ShqmoEp24A7FWeY2h+hgQ70tmoJtSZkfr2rQHv9p3M/tT3XDMDbPi6wIjv5j749Q+U + Vc+L2/mVwPr8fJHN6X0LSfQIOpl9aIVt55H1y+VkcbB96hbe1RtYzS8zFKB+vSzY+xiCM/7Oz8p3 + iEiNgS2DhxOoRdyW6l1/ZSR6JA3s/QtHpDjx2BK62wnaz17BxsrfPl81M0GeHH0iMzkN2+8+EaDh + F0ccyNyHTUeT88Fc3GNCNnrb0+PIfHW71w5//Ew2NC6CQhU42N6rej8n1DHBy/rusRM7bTVmIDrA + DeP3q79zqeY0TBDsPOuKb7T6VtN7p7zV1b9Ei3bZ9Mz6wFo0X9TE9uNVgcmZ+wK6xBB//dx+u/a/ + QFp3Ejaa2qsmrVo0IPu0pMbKH+kG+5G6+kvojYczW/PTBz+8/vGvKXtXFvB4v8Uxyy8GW1oHQQdy + C1n1zM9PhbD9GtmvP+vwj2zIwBj2ORGTZ2QskDEIDbH20DjveacvDjsdrv4H9c7+ridq7JhwLlMD + nVtfdmh91HRVV+4etaulcEbzmb5h5g0c9vzpwGauvejq379bAf/5j7/++j+/GwZ188g/68WAMZ/H + f/7XVYF/Jo/knzwv/JMKf24ikCEp8r//9e9LCH+3fVO34/8dm3f+Hf7+11/bf183+HtsxuTz35// + Y33bf/7j/wEAAP//AwDnoBEa4yAAAA== + headers: + Access-Control-Allow-Origin: + - '*' + Alt-Svc: + - h3=":443"; ma=86400, h3-29=":443"; ma=86400 + - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 + Cache-Control: + - private + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 7ccfca726b5d1041-ORD + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Thu, 25 May 2023 18:20:10 GMT + Function-Execution-Id: + - pbme52vq34dz + Openai-Organization: + - significant-gravitas + Openai-Processing-Ms: + - '252' + Openai-Version: + - '2020-10-01' + Server: + - Google Frontend + Strict-Transport-Security: + - max-age=15724800; includeSubDomains + Vary: + - Accept-Encoding + X-Cloud-Trace-Context: + - f738b1af12886297fba45c0ea7b50048 + X-Powered-By: + - Express + X-Ratelimit-Limit-Requests: + - '3000' + X-Ratelimit-Remaining-Requests: + - '2999' + X-Ratelimit-Reset-Requests: + - 20ms + X-Request-Id: + - 9c5ecb63a4996b946555161a4a7fe863 + status: + code: 200 + message: OK +- request: + body: '{"input": "instructing to write the task_id into a file named output.txt + and then shutdown.", "model": "text-embedding-ada-002", "encoding_format": "base64"}' + headers: + AGENT-MODE: + - AAAAAAAAAAAAAAAAAAAAAMLheAAaAAAA0%2BuSeid%2BULvsea4JtiGRiSDSJSI%3DEUifiRBkKG5E2XzMDjRfl76ZC9Ub0wnz4XsNiRVBChTYbJcE3F + AGENT-TYPE: + - Auto-GPT-2023-X-TYPE + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '157' + Content-Type: + - application/json + method: POST + uri: https://api.openai.com/v1/embeddings + response: + body: + string: !!binary | + H4sIAAAAAAAAA1SaWxO6OrPm799PsWrdMm+JiKRZd8hJ5BQEDzg1NQWICArIIQGya3/3Kf3v2jNz + 4wVQQpJO9/P8Ov/xr7/++rtNqzwb//7nr7/f5TD+/T++1+7JmPz9z1//819//fXXX//x+/3/nszr + NL/fy6b4Pf67WTb3fP77n7/4/77yfx/656+/V725oV5tnPvZup446Y0g9MVLZbIxbPcA5/jxoMbh + 5iHm3mQX8euCp7Yuryr2uW0LSA6PA9F4bGqT08YL2nmrG5Hl3tWWA0leaO/rHb7bluRMOedI0Kub + G3Yyw+wZzy4v4E/8SD1FMFO+wtcBjm90pvqs92hamR8A17/sqZWGSzrzTVWiWfXvVI1CFa3txEuQ + DLsLtqRedxYJ8wAXneTUFWxZI9VcmKg/XlZ4tzm8tGmOViYM7HbC7iB+onl+HARo5/eV2jK+a4PT + 317o3tUr0jm5iTZXV8zRQT0/qasDjqZ4mSfoHjD5m0ecprN5ehWoDRwN44ey7uc8dVwpMcsUGxLL + 2ZShKIeRZyXFtf5O6SVcdLhUokP35kbQZvRBLlRbTvM7Vr+0Th1sETwfLGzjsOpnxwok5KzME/YP + eRMxTVNeMO3qjOqSDek8PMoX1LnFcILmN2LD+6SCH5Yidoa0c5Za00MUNzGP43rca+tbkwsomjnV + X4LbWlsS7pTBzg5zsllwHS3vgyehZgwSetz7Ts/WaZFBk6uNz5ST1zNc5hIU/kyxPy4Nm6vrp4O0 + f+bYUmfPmdk0cnAooxTvo1uGpsjAJjRD6ZF1aV6iiefmEyoOr4S651xwll0VCNAvmon3kv9irJ2b + FkoIJrrLedWZLafUkX48b+jOPUE6V6eW30qNONDDw2kYu+JtAIN2TwibSxQRrT6E0KBJoeo2kNFy + 53QdPvhCqbWILZoo3AKonKLCt01xYNMGFTbw55dE1fTTsGUbygsyH72NNeelpMuedBwqYsEj41Gp + +3lssw69s+VN9dp9pIM72zy009rGjv6YtMEXeB0M5+T724pMGt0psYVW+0zFmbHhHHLTL5ac36YB + B/VlFwmpmAhwCtsDVWn67Kf9ZnSlraMN2Pwwq1+TR+Ki6Dxq2PykqrZOXhcV1b3kYnPmumiCuBYg + s8qO7upVxZhdblp4iJaJbw9nz8b3PZTg8WkLvOfkKZ0T3A5QOWXlz5ePEvHDqswAr88JtWPFQ5NH + VHVl4UykKu4+0SzmMYFVr2+w/cgqNu3fE/nFC8VeVWgtnIkC8T6TqMkNp4hxy10HLpUDf1JVBbFT + pOYIdbs9dcXHLprN63iBK1+6eE8Jh8YYz7osdue3/5zajC3GW5TQgJecoCJzqilZdz6QdVr5nH27 + aCwszRKOIdcQoSPXiJ1jRGBlxxO+eHndT7tPGMrWsU++4yvSeZ0/dVktdpQaSyD0S67JMaRmjKmX + m1dtcYnqwstJrv42My9oulERwMK5SHFPVhV1LYVAJoBNpGkUojE73ziIz1yBjTy8VfPzOMUIicXo + VxFVNebvpxOq5quE3aDaR0y3tgKy/fJGcdd4FZtfLITkTSzqhXuxIujALnKVcRuyuqwjRORDCXCM + CE/GdWFHkxgYFlx7DxH5SYJ+pr2ho8urjbGhHiQ2nrybCtsm6Ol5LQbOsvZO4i8+/JWQtGiRzc4G + Vt88uveQgaaIheX2ect87M7NCbHrc1zAwWKDTekwaBPTmYpOvHmnmnxg0Wzcaw5amqypYcynaCml + rgROdDH1FraqfvkObQ3+ivd2NERUT8gJbpuwpRrPHfv5/nR50G3iUXcqSjStwpMA7mNK6dGRN9q8 + M2/11riX1N9G8dDTK54D+coXrv9s76m2THAwYZeSBeuLI7M5iHoFXq40U18qZjR9VESQjcOaHjJT + QEOFHwPcr+2RXiRx0088t72AmBoLtmP36MyP0Jpg4MDB6mR76bqZN4JUc7xE/bF+Vb3wvBEEZmDS + S3ld0tE92gC7dFj8OXXbnoYPj6C1dS+pd2oLjbFXKYLUSIMPrqumG7GxOdiYi0C9z3aJ6BVvQxhz + yaD2sN1WE4VjCK9nxOPdg06I3cIPgbv9MajaZJU2lfMUylvz9aL5O1URyWiQANGOFpFO8SlaaLQK + EFV8H7trrXfmbaNOEFavlvq3tulplus+FL5WYk3frNFwtSmHolCxcPRdjyFdngkI27nDNli9syii + acE8xzb1342psVmgi3SPk5y6WWmw+VkNIgRrOcW27vPaeA9vFjjh+oL1ojS0teccLtI3H+L9jTyd + KRw/IezVilF9ce5snDsgwH2GD9Z4TWVUeB4JKrOTQOOctFX/G/+Hhh+q6zczEpqNwiF1FT2xxuPa + YVu6VpDedXtsGPK1H4xKCeRbqn0IfxZSxuqeS0CWCsAn6Cs2vz/GBOEx2ZPNWdIjdntdfLjfrB7r + d5MgMu95RU62N80PnegVffOPCc1Oy37fr40rmplwmBSeenI/aIu4tyUoV6ue8HFlVutfPrleK4Uw + KiqISR+UILbuFTIJoqWxcfYlcKniY73ddtrMre8EkYlL8W/9SWrjGL75Al8a847YDms88JfzHvuX + mK+mkt/qkvi2A3pAJ7NqiRpy8F0Pv3dQ/a2HnYloYVv4EMVuNS1bxZS5D/lg1fKjdLiOSgHbmBl4 + h29jxPbXswXx2vIINye7tP/mJ7i1TCXybj5rS2xfF+lchTb1SLCJGNxF65e/fCFxg4o+EqaC53OW + 32uwr5Yzsy/wiydDF4xK6IzhAkF4qrEDs1ctbz4upWYME7LNV3rPEzXhoHwcX9i23jeNZamToCUU + 8q++cxAh27iDoJok6srqoSfjKgnQ+TS9qDkve2cJVTNHP/2kXteORgE9CngmfoqVFRf3v/8DoeaB + 2sGUMLb6JC3KnaOJr06Sp/PSMBFeDyOkWedwFakVJ4enZJ1pImNZY/G5LLfbush9NjVuP8fuFP7q + vy/m5TtithEHqAvRA5vP1kSzbu5cdL6ttlQhY50yVIjcL99Sw1BbxlzLGtDyChdfTMMw4uuDN0Bd + XM9E+KSlMy9CU6NXUTXYUTZCP7LnEsDlIq6wWV7DSKj9lQW+Pj+wxT0UtOzlvEPN47mj+Ex2jG3S + QAfx846pXpA2Ig6vCdDzMu/Pm/M+4nWJS2A/ydFvftkG7qItLaF+pNHaPjHq76cLZGO/xd6z9NL5 + +iIJmozoSvgtHvtBqZ6q3OXZhSq2p1bzO0l0hCNkfvOd4iz2xu1AiM8X6hz8tJ/dcpUh5VNfsaZb + XUqiLg7lZYMLvN+M94iJjQ1w1i4B4TxTQXOeat/8xKi/6VZRNErLugNthUqMy6DtJ79VYri9hZO/ + Psdu+tWfueS8tQ1pmH3WlpAlA4TnvYrNy2ujLaCIElCmC9heB1w/zo8dD8922P3yRTV99w9y8sTz + txwoziZxWx0K+9IQ8UI7bRxuBQCe2wf++YW5SCQBwvV8wMZxWtjcFNoFCC1Mv9g0Ud/vlMCS5faU + 4syK3xU5c7ILXJcLBH3jn1nHtETxtjmTwYtOaeduZV2iOFewb98EjV3xHKL+wb9osB9SbTluTye4 + vLqYau7eYst3PdExhIYe0eCmrLqnpqQkp5cvZfQW0aZkgBylXVOjYDvtp3+kMhEyug+7OVpks7RR + muxtqp3fKzTq5sGH4Pi64VMQv9hETjgToxcnUE/CZzT388NEetB0ZD2f92h9vmgmVBlssKKqBZuF + tAhQ67knfKxXFWL+7cVDYig7f7q4T60b1cgCBF5LRDhz1fgIlUUuIZzI2ts1EbP8hMDFSm8Yc2+7 + n492W6N3psa/9a6687QrEIe2tr/NQslZdkWsStFe7rA9PSNnCVilg+llH+rPyTOdhS4CmYkNIdvW + WEfTtA1yoBo6+HK7KOkS248J2cGAqF+ZebUY70mUDqR++Py6OTkTwjBBMxQevmraNpo9yyokpRVs + rOvvQz8Pt5aDw7Pysbp/NP1wNK4ZSMOVp9r9laczzsYc7gdJpHspaTX6LIYQ1Vl7w97WqhjhlLYE + N3pcyae5fHo2XIoLPNBm99XLijNxcZ7BuLNGHx18VH39sAjJ4X7Axr0iWm+dsovkPC9A/bo8oXm7 + FjLgUqxhjA58upynQwnezM/YeQVWJNhtAOAd7Y7u2NPu+8D7vEBLi4IepmDjLPfDRoGWxmv62Ccr + rUNxo6KLwG+xNjDMptlbZyjae3e6gwZF00p72nKkhxTrHz5G5KkMPkRaUVLX28/OOAkrDmofKf6y + 7220NpLakgbda6jdsJYRkx9qdKrPui++FOjHo13U6FONC/WSXK4Wi898MMekItA5eT9n5xtIH20W + yfpz+aRLpZ1aKdBMRriNjnrazk0Hd/EU40MQpVX/SJgCj09XkKVAKVrcVqnllnQt3TlzhP741cq/ + H6nyCOd+MaTa//kVrD0Vvp8S+/FCOHAdrGnvHVtDvcTy6bo/+tz5qPZff0RgVt07zoKqSeehv+co + XosJ1uOrFi3z1grAMfYydZ/H+b/uH5+556/tYsUWf9wKW+dhJz7Z6KhielKf0LMlO6yJde8MnNu/ + EC7jgbyg15DwwvsSuqkT8W4wTv0oTp4P1X3UqbutWTT6EYvRXa7Gb/zvIkZ6L0Ti4ZNjR+/Z1/93 + NRx3ZeSjoyd913ulouWtrsisJT5q47VUA2UbjepGEUfz+vkRUdErBPvLk6Xf9bYQyI1AWhwxNMGZ + qD99TM9n96ONho8CiHuDkhmJuJ+l3LpAcNGe9PDdr9/9PcBH03q/czrbmbE3T/LPT1urhPbMPjnc + Ntjc118/EUXzyQ5csFXvTK2XkvW/egnSnluolZO2Z9vcKuEmyZU/XfcYDWi3M0Ehuwd1Y2mIps0N + vySzbLaEU5Nrz9pMOgFeXxN/RTSNLQu3LVGoik96ih3FYUF2DyDptnfCK3Lw1RN+jnAAMdW9PnHq + CPQFtHHc0ctlkqJ+b7QteiMupIcgQtV00bkB7YOj64uN4VWbzejWaHkrqz88ZwiEfoC8lz2axscD + Wk6CYwFZ3yqKE6NG88k7KjLslQi77fnpjN96KX3ni4iLaLHJqJTw58cJtzyjaDaS2obGEUyykmaP + jeElyiD7+DNWV7KVTq10qIE7KCW9NKaMqJ+9OyTq2YHaaDYQjS5OCApSehxtnzf240Oy4vAcHjP2 + RAuE7/hXz7Epidf+x1PAKB45VgSxdSbefgV//NZXz6BlXCXhz99RIw+3/UjKSJG++pDIt6vTr9EA + KpAgONGgVRCi1jEtQLgZFXUNa+l/fhKS7LbBJ3goVX1dZg5YlD388esfF14veTA4V6BJALkzyNq2 + gJxDyK9++VZR2gvsOnhjS30f2OKvyAmS7hBgfzkM7KvvJChiXcFKFDFneWFcot2hOOL84h7TyVIe + FwllESHs63+6fd/Z0LF3/fUDPhO+9QFeSWvjkzv16bI6hDV8eajPu5MTkWpudYhYllHlpHJskrVt + CZvb+MJq/Pr6TbmyQL6Ie4rN/efLm1wd5E3tU4dtFI2tPmELsdA21FoKNZr57nVBk8Jjet2MckS3 + A7iwO5RHrO77js235iKAcS8oNuRTHS3+JHXQnzWJas1WTfkQThwqSXX16/IapotWKRlU3RCQVZn1 + 6Fc/gT/XEj2IacxYVWUD8jhJxcbhfdTo1PEisrju7U+bhvWse3QTCEbc4iT6tH17NK75H7/+05cD + 05ECBaEffxIGHU0HPlMgMSafRj/euwozQTrf5C1Zvwll8+YVXuC3P6dS6p3Zu8kqik+5+Uef0f31 + bEuGa6a+572CdLzj+ITUxAppLohiuritVSNrlSxEnuRSm6VcucCXR/qTY80pi5urCt3Uin6jnjzG + mHCNt+otMfxxXXTRct5xphTfPinhvn52FPNgkBRH4PCOz7b99NkoE6TC5fDVe3etvdGJk+XxOVFn + SG3nq09eP36MVT0RqvlwOlloOj1tau56J+W3tlNCoFgGdfgFO+tvvUD+43XA3ufpoHnvdxK8H2NE + /c720+13v6DYJKPP303CBmlvD/BCpyP+8tvoj57t/Tig+nE/pUQZ0gDWV3r25+pqp/yvXgWazqhK + G51ttNNah0cXj3ivWnY1Bc2kyMPS7/314TaiZTRSkD5hlBM+cHfVlNPYRcddEdEHd2ui2S8bHTkr + /UTPH7t2Zs/ZneRVLF6oq++KauaqpwtTGVZkGcroxw9ASq5AqdFyJZuzxs/Fwj41OK9LntH3Ke4g + 2wzXH++ppkiILqjoVfKdj57N7yQ0AZxHig3ZqpylkCQJSIVVauXlOx3OW2dCfu0xsh63x2qSlYnI + mUdXZDLfB4c2JyWUnfduQ1bf758Om0wALS0LqmJh6Pu84FvpeqpD6m03NBoO/EmFjUCmL1+/OEPQ + iQIyhqnEmNprNG4EX4Dbp8L4T37e3y8vCe49+Nv9fKr4EDIOnOcJiOC/42r46iOAhiRE/PYXxmf9 + JmDkAXz1esum1pJP0u2t7wm4yGMsL6CFsT4Cec9PR5st6xOgD1dkhLsHO21xVhxBEGcNNteJUv14 + uuTv9Be+PFuTzXgv2nCuVwb2j5+Dtm7f/EtixmhSZ9K0ip8MJ0a74ICwlThFtPSrroXPXThj/avv + 6S18ElmXX2u//fL+eSrtDg7KZk/kuxSxn5+EzCo6+qhXvCO4/S6Rv/0Ef3HlHFFxLbTQZtmeHh5X + RxvvT52XU2mn+zMfCNGXHxao51c8tdG77GfDkXJ4E3/rr2+KgYi1uatwOtkV1T7XbTW+P94ExrCU + VDt723S6ca8Wtmb9InMR3tIp6ERe+uVP/+Ieo8lnLIB+KoZvfHf9a+u5F/Td31iPq7qiPclzdLqu + c3+9Prz6ifqeAIy3aryLdiZi5fUjoFptwL99+ciy4eIFroe5+dYzC335oQrqcGHY3G1INSpu6v/0 + NFn71jYak+fZh6f7UKl9PZbaTG9xDdlVCfD1669mz1JK2V8VGtlp9aJ9+x8tsmViEgmHVTVr99UF + vvmE8PrtxwcmTu4fwssX+yzshx8fe0+3FHu5uXF+fgV+fN/qzkr/px/w5UH++IvnpyQRKGQ5o/qY + Len89WdSW15fPuKTtiKnrcZB90wNf5F1qSfW5qxCtBD88w9/eIFc+Y8j4YzO7ukmDyQ5bp0dtrpn + hcYPqVt4opb++KPD8oLvAOS3QFZdaqeCqTwKxK9LHut8UTqz7dwIOhmdjL9+o2LjbEpw1B87uusv + LWJ765nD1tkN2Lwdn+jLDyX07WfRn/4lobEPoFMG7svjL9USjTcTvvNDZKWrnKWRDhN8/b7ff/sp + 07mLdehVw8WmF2rResg+C2iPVUvtGGXV18/mqAnvE7avdyOad4ckATVvEqzF64Wxs20pYCmeiS2t + iKtZeu5smTJToP5XP87DreBkQkuT3PDEvn48uwCZhxZrQXd0JhkPMWrCx+RXBoaI9KuyhWMxKFRZ + aQe2hNNNgqHIazI30tK33/7Ir35R9xNgNj3Wuxz8F71hw/Ps6ve+X/yR06c99/Tr7yHntgj73tpN + 1+fnmgNVMl0ii6D0s0yHDLErav4rvndKYMseJ6o/PZxOv/pshbfGX771YViZT07++fUfn2fmPZ9A + cRvHl9rLHH1ex1ZB27NuY9w411So/Y2N1h8kELk+imgMb6cQfe66hZMYQU/Vg+0DMT0B77SEMKb5 + LIf9kdU+V5l5P9x1r4bIGje+YDxabTlzsg80TCj1ZWgdcuhvEzBVdajxsU3tD7/68QCHT6x+/H7P + n/5KmSmeswh6koM1ZDrOcwezHw9CY9ns/M2CzejbT83kn1/87Y9pAUuC95SmWEFpkE7x7l1A3gYh + vj5iFFHH2ZVgLJ+VX1zcnbbclk8NzsNK8Pn4whXbuZoii58m9qX9foc291itId2YF2zTpYkoLnMR + uINa4sPD2aN1En4KVHY6xkdbeGuDR2wFnOJF6a3jNGds7nb5p59ni6KoTafpOMDdezOse48Lm+7L + UYFf/Txvh4HRH7/8jX+IyUdbbmLl/vrTdP9wRWfqYFF+/Tuq6ZYdzdfzQmAyjlcfsotaTfe9mcCP + D/7hHdf1MsHlHb99dn4/0Pjrx3alE5KJ25gVVczbBKerccRaKmmMDe1JkP/+nQr4z3/99df/+p0w + qNt7/v4eDBjzefz3fx8V+HdyT/7N88K/qfDnJAIZkiL/+5//OoTw96dv68/4v8f2lTfD3//8tYY/ + xw3+Htsxef+/1//1fdt//uv/AAAA//8DAJR+VSXjIAAA + headers: + Access-Control-Allow-Origin: + - '*' + Alt-Svc: + - h3=":443"; ma=86400, h3-29=":443"; ma=86400 + - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 + Cache-Control: + - private + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 7ccfca7878ae2a00-ORD + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Thu, 25 May 2023 18:20:11 GMT + Function-Execution-Id: + - yvvlhpufeil9 + Openai-Organization: + - significant-gravitas + Openai-Processing-Ms: + - '46' + Openai-Version: + - '2020-10-01' + Server: + - Google Frontend + Strict-Transport-Security: + - max-age=15724800; includeSubDomains + Vary: + - Accept-Encoding + X-Cloud-Trace-Context: + - 649b408d80465f7f39e5760eedb089a8 + X-Powered-By: + - Express + X-Ratelimit-Limit-Requests: + - '3000' + X-Ratelimit-Remaining-Requests: + - '2999' + X-Ratelimit-Reset-Requests: + - 20ms + X-Request-Id: + - d48b82d9676ed7ffb7da5829ba34e927 status: code: 200 message: OK @@ -1558,10 +1906,9 @@ interactions: {\n \"arg name\": \"value\"\n }\n }\n} \nEnsure the response can be parsed by Python json.loads"}, {"role": "system", "content": "The current time and date is Tue Jan 1 00:00:00 2000"}, {"role": "system", "content": "This - reminds you of these events from your past: \nAs a system, I recall that nothing - new has happened since my creation."}, {"role": "user", "content": "Determine - which next command to use, and respond using the format specified above:"}, - {"role": "assistant", "content": "{\n \"thoughts\": {\n \"text\": + reminds you of these events from your past: \nI was created"}, {"role": "user", + "content": "Determine which next command to use, and respond using the format + specified above:"}, {"role": "assistant", "content": "{\n \"thoughts\": {\n \"text\": \"I should start by reading the instructions_1.txt file to determine what actions to take next.\",\n \"reasoning\": \"The instructions_1.txt file contains the necessary information to determine the next steps.\",\n \"plan\": @@ -1576,8 +1923,8 @@ interactions: next command to use, and respond using the format specified above:"}, {"role": "assistant", "content": "{\n \"thoughts\": {\n \"text\": \"The next instruction is to read the instructions_2.txt file. I should use the read_file - command to read the file.\",\n \"reasoning\": \"The instructions_2.txt - file contains the necessary information to determine the next steps.\",\n \"plan\": + command to do this.\",\n \"reasoning\": \"The instructions_2.txt file + contains additional information that is necessary to determine the next steps.\",\n \"plan\": \"- Use the read_file command to read the instructions_2.txt file\\n- Analyze the instructions to determine the next steps\",\n \"criticism\": \"I need to ensure that I am following the instructions accurately and efficiently.\",\n \"speak\": @@ -1588,11 +1935,11 @@ interactions: "content": "Determine which next command to use, and respond using the format specified above:"}, {"role": "assistant", "content": "{\n \"thoughts\": {\n \"text\": \"The next instruction is to read the instructions_3.txt file. I should use - the read_file command to read the file.\",\n \"reasoning\": \"The instructions_3.txt - file contains the necessary information to determine the next steps.\",\n \"plan\": - \"- Use the read_file command to read the instructions_3.txt file\\n- Analyze - the instructions to determine the next steps\",\n \"criticism\": \"I - need to ensure that I am following the instructions accurately and efficiently.\",\n \"speak\": + the read_file command to do this.\",\n \"reasoning\": \"The instructions_3.txt + file contains additional information that is necessary to determine the next + steps.\",\n \"plan\": \"- Use the read_file command to read the instructions_3.txt + file\\n- Analyze the instructions to determine the next steps\",\n \"criticism\": + \"I need to ensure that I am following the instructions accurately and efficiently.\",\n \"speak\": \"I will use the read_file command to read the instructions_3.txt file.\"\n },\n \"command\": {\n \"name\": \"read_file\",\n \"args\": {\n \"filename\": \"instructions_3.txt\"\n }\n }\n}"}, {"role": "system", "content": @@ -1611,7 +1958,7 @@ interactions: Connection: - keep-alive Content-Length: - - '6572' + - '6533' Content-Type: - application/json method: POST @@ -1619,15 +1966,16 @@ interactions: response: body: string: !!binary | - H4sIAAAAAAAAA6SSTW/bMAyG7/sVBM9OkDRxm/ncQzNgA4atl86DocisrcYWBYlGswX+74M/Ajdp - MWzolRTf5yHEI5ocE9SlEl27anbz6fOXm6/fdg/r+9tf1pT50902/s18fftAFiPk3RNpGSfmmmtX - kRjuWtqTEsoxWV5v1pv1Jt7EEdacU4UJFk5mq3k8k8bveLZYLZYYYRNUQZgc0XmunWTCe7IBk+Xq - YxzhFD41ukhhUdVUijeLNkJdstEUMPlxxJrCKdhzRZigCsEEUVY6TbZCtlvhmFoAgBSl5KYoJaSY - wFgcG3SQrpji95LA0kHA2CC+0Z0WmADC8OyNEEhJICrsM5ODscJ94dFUBNyIa2QuB5nDFkLJTZVD - E4aRfjgTzvqnmuta2fwfUucpRi9NPanA1thi0n07unMuCZRznp03Ss6ovdZf4dM2b3q4StlBYQb3 - 79jxAnNB0d6I0SbUA2oLlqhPJBsa30UqgS2oukcYW/SZmr0nLf+9U3Ck9ifSs6mqd/3ea9jAaqPT - OY45r67RqpoGjTPuha3yxeUhD43u7RQxaZzNX9791Wq5Phn2lqNsaltsI3w01oQyG64PEwzCDiM0 - NqcDJov2Z/vhDwAAAP//AwA/k/r4YgQAAA== + H4sIAAAAAAAAA5STQW/aQBCF7/0VozkbBKEkxMf20BJ6qqhUqa7Qsh7wFntntTtOiBD/vdo1lOAq + Snud2XnzvefxAU2JOepKiW5cPbhb3Pv514ePNF1Mvn+u9g/ls6l4ufjyST9+wAx5/Yu0nCaGmhtX + kxi2mKH2pIRKzMe3s+loMpuNbzJsuKQac9w6GUyG04G0fs2D0WQ0xgzboLaE+QGd58bJSnhHNmA+ + nsymGV7EL437cYbCoupLaXp3e8xQV2w0Bcx/HLChcBb2XBPmqEIwQZSViMlWyEYLh8ICABQoFbfb + SkKBOZyKpwbtJRYLXFYElvYCxgbxrY5YYAIIw5M3QiAVgaiwW5kSjBVOhY2pCbgV18pQ9jKEOYSK + 27qENnQjaXglvEpPNTeNsmVULaOCCcMCs5dEnlRga+z2gvVnawDTOPbRJxi7Yd+ohCmVErBEZcJd + EwT1SCVs2MOmldYTeNqQJ6tpCMtXoaLbikA559l5o+SKNxniJJ3AXxL0Tbha2Y5/AN/eCuLf4+1t + 0d6I0SY03ap5iiAqkg3RdIplDqqBNhi7TYqavSctSdmqhiBBxIdynXSkErKgtG69Eqqf+yaDI7U7 + r34ydf32N/+PSyqw23XMzkd80vnrhqONDuNqb49W+W3//LvGOYlO4pWw+3/LzWT8/kyYKE+whT3i + McONsSZUq+6WMccg7DBDY0vaYz46/jy++w0AAP//AwA43yIrmAQAAA== headers: Access-Control-Allow-Origin: - '*' @@ -1639,21 +1987,21 @@ interactions: Cf-Cache-Status: - DYNAMIC Cf-Ray: - - 7cbda64a0d8b1049-ORD + - 7ccfca809d0210d8-ORD Content-Encoding: - gzip Content-Type: - application/json Date: - - Tue, 23 May 2023 13:30:12 GMT + - Thu, 25 May 2023 18:20:36 GMT Function-Execution-Id: - - f4955nyjuij9 + - pbme7kantxw2 Openai-Model: - gpt-3.5-turbo-0301 Openai-Organization: - significant-gravitas Openai-Processing-Ms: - - '27188' + - '24083' Openai-Version: - '2020-10-01' Server: @@ -1663,7 +2011,7 @@ interactions: Vary: - Accept-Encoding X-Cloud-Trace-Context: - - 1d7fa6a5187cc4d84806587407f4d630;o=1 + - b3143c8eb6fb2bdcf9b7429eff4fa7b4 X-Powered-By: - Express X-Ratelimit-Limit-Requests: @@ -1673,13 +2021,13 @@ interactions: X-Ratelimit-Remaining-Requests: - '3499' X-Ratelimit-Remaining-Tokens: - - '86449' + - '85965' X-Ratelimit-Reset-Requests: - 17ms X-Ratelimit-Reset-Tokens: - - 2.367s + - 2.69s X-Request-Id: - - 99ec79bb56359aebe30013d596125a19 + - 4f56d1467ab6d751cd14534935c37bad status: code: 200 message: OK @@ -1723,10 +2071,9 @@ interactions: {\n \"arg name\": \"value\"\n }\n }\n} \nEnsure the response can be parsed by Python json.loads"}, {"role": "system", "content": "The current time and date is Tue Jan 1 00:00:00 2000"}, {"role": "system", "content": "This - reminds you of these events from your past: \nAs a system, I recall that nothing - new has happened since my creation."}, {"role": "user", "content": "Determine - which next command to use, and respond using the format specified above:"}, - {"role": "assistant", "content": "{\n \"thoughts\": {\n \"text\": + reminds you of these events from your past: \nI was created"}, {"role": "user", + "content": "Determine which next command to use, and respond using the format + specified above:"}, {"role": "assistant", "content": "{\n \"thoughts\": {\n \"text\": \"I should start by reading the instructions_1.txt file to determine what actions to take next.\",\n \"reasoning\": \"The instructions_1.txt file contains the necessary information to determine the next steps.\",\n \"plan\": @@ -1741,8 +2088,8 @@ interactions: next command to use, and respond using the format specified above:"}, {"role": "assistant", "content": "{\n \"thoughts\": {\n \"text\": \"The next instruction is to read the instructions_2.txt file. I should use the read_file - command to read the file.\",\n \"reasoning\": \"The instructions_2.txt - file contains the necessary information to determine the next steps.\",\n \"plan\": + command to do this.\",\n \"reasoning\": \"The instructions_2.txt file + contains additional information that is necessary to determine the next steps.\",\n \"plan\": \"- Use the read_file command to read the instructions_2.txt file\\n- Analyze the instructions to determine the next steps\",\n \"criticism\": \"I need to ensure that I am following the instructions accurately and efficiently.\",\n \"speak\": @@ -1753,11 +2100,11 @@ interactions: "content": "Determine which next command to use, and respond using the format specified above:"}, {"role": "assistant", "content": "{\n \"thoughts\": {\n \"text\": \"The next instruction is to read the instructions_3.txt file. I should use - the read_file command to read the file.\",\n \"reasoning\": \"The instructions_3.txt - file contains the necessary information to determine the next steps.\",\n \"plan\": - \"- Use the read_file command to read the instructions_3.txt file\\n- Analyze - the instructions to determine the next steps\",\n \"criticism\": \"I - need to ensure that I am following the instructions accurately and efficiently.\",\n \"speak\": + the read_file command to do this.\",\n \"reasoning\": \"The instructions_3.txt + file contains additional information that is necessary to determine the next + steps.\",\n \"plan\": \"- Use the read_file command to read the instructions_3.txt + file\\n- Analyze the instructions to determine the next steps\",\n \"criticism\": + \"I need to ensure that I am following the instructions accurately and efficiently.\",\n \"speak\": \"I will use the read_file command to read the instructions_3.txt file.\"\n },\n \"command\": {\n \"name\": \"read_file\",\n \"args\": {\n \"filename\": \"instructions_3.txt\"\n }\n }\n}"}, {"role": "system", "content": @@ -1765,18 +2112,18 @@ interactions: {"role": "user", "content": "Determine which next command to use, and respond using the format specified above:"}, {"role": "assistant", "content": "{\n \"thoughts\": {\n \"text\": \"The next instruction is to write the task_id into the - file output.txt. I should use the write_to_file command to write the task_id - into the file.\",\n \"reasoning\": \"The write_to_file command is the - appropriate command to use to write the task_id into the output.txt file.\",\n \"plan\": - \"- Use the write_to_file command to write the task_id into the output.txt file\",\n \"criticism\": - \"I need to ensure that I am writing the correct task_id into the output.txt - file.\",\n \"speak\": \"I will use the write_to_file command to write - the task_id into the output.txt file.\"\n },\n \"command\": {\n \"name\": - \"write_to_file\",\n \"args\": {\n \"filename\": \"output.txt\",\n \"text\": - \"2314\"\n }\n }\n}"}, {"role": "system", "content": "Command write_to_file - returned: File written to successfully."}, {"role": "user", "content": "Determine - which next command to use, and respond using the format specified above:"}], - "temperature": 0, "max_tokens": 0}' + file output.txt. I should use the write_to_file command to do this.\",\n \"reasoning\": + \"The task_id is important information that needs to be saved for future reference. + The write_to_file command is the appropriate command to use to save this information.\",\n \"plan\": + \"- Use the write_to_file command to write the task_id into the file output.txt\",\n \"criticism\": + \"I need to ensure that I am using the correct filename and that the task_id + is written accurately.\",\n \"speak\": \"I will use the write_to_file + command to write the task_id into the file output.txt.\"\n },\n \"command\": + {\n \"name\": \"write_to_file\",\n \"args\": {\n \"filename\": + \"output.txt\",\n \"text\": \"2314\"\n }\n }\n}"}, {"role": + "system", "content": "Command write_to_file returned: File written to successfully."}, + {"role": "user", "content": "Determine which next command to use, and respond + using the format specified above:"}], "temperature": 0, "max_tokens": 0}' headers: AGENT-MODE: - AAAAAAAAAAAAAAAAAAAAAMLheAAaAAAA0%2BuSeid%2BULvsea4JtiGRiSDSJSI%3DEUifiRBkKG5E2XzMDjRfl76ZC9Ub0wnz4XsNiRVBChTYbJcE3F @@ -1789,7 +2136,7 @@ interactions: Connection: - keep-alive Content-Length: - - '7652' + - '7667' Content-Type: - application/json method: POST @@ -1797,15 +2144,15 @@ interactions: response: body: string: !!binary | - H4sIAAAAAAAAA5yST28aMRDF7/0UozkbBAQC2VtV9UCl5JQeqlIh453ddfHaK89sCUH73av9k0Bp - q1Y5esb+vec3c0KbYoKm0GLKyo2Wn+4f6G7xoB+zcvdB+Hk+u19/yT4+yyK9Q4Vh952MDC/GJpSV - I7HBo0ITSQulmExvV/PVfHU7XSksQ0oOE8wrGd2MFyOp4y6MJjeTKSqsWeeEyQmrGMpKthL25LkF - zGYKz/BzYzFRKEG0O5eWy1mj0BTBGmJMvp6wJH4Bx+AIE9TMlkV7aW0GL+TbL5w2HgBgg1KEOi+E - N5jAUBwa9CRtcYOPBUUCHQl8gKyOUlAE61libVqLDNaDFASZdcRjWAMXoXYpcFELpOHQd/nIQuV4 - g+pSJpLm4K3P36KlQPu0OzrNcnkPDppBwn9ZqJz2vfoIPjN1N0XzfjtMgcCEsuyU/gy84ploxRrL - ZQ9dgyfq3pLnOrZ8LbCGQv/oyJ1ECto58GTa+cVjp8+woyxE6jTF+vxfH+GK9P6tMztY5/4WVy/S - qJelGfL4bWe8LqnX/yW/K5s65tfrdrkLPeC9c0MI54i4Nm0+We3c8dVV52wwuPENNgoz6y0X2x6H - CbKEChVan9ITJpPmW/PuJwAAAP//AwCAeDvT/AMAAA== + H4sIAAAAAAAAA5ySW4/TMBCF3/kVo3l2q1al227eVkKCLgiEAAlEUOU609jUsSPPZC9U+e8oTUq7 + 5fLAo2c053xnPHt0BWZorBZT1X60eH2ddi+rH/b2buGXt+9evHlYfP4SXxWT9zdvUWHcfCcjw8TY + xKr2JC4GVGgSaaECs+nVcj6ZLZfPJwqrWJDHDMtaRrPxfCRN2sTRZDaZosKGdUmY7bFOsaplLXFH + gTuB6VLhSfzUmE8VShTtT6XF1XWr0NjoDDFmX/dYER+FU/SEGWpmx6KDdJgxCIUuwj4PAAA5io1N + aYVzzGAoDg16kK6Y40dLiUAnghBh2ySxlMAFltSYDpHBBRBLsHWeeAwrYBsbXwDbRqCI931XlxQE + GnahPLxF8249BCUwsap0KMY5qnOKRJpjcKHsUT64YKib/jeQOjO0muHoUoATPhiDDsURc0Mn0kv/ + 2uvQW4/gE9PfwUHin/JeyJnkxBnHVa+5gkB0GKXATerktcAKrL6jM2jtPQQy3demx4M9w4a2MfXg + 0m30qe1lDK5J746e9857aP4jzDjHXrRVx/MZBn67nqAr6v2eGFxg6VReHt75t/cCN94PmU8b4cZ0 + 69g23j/+ojqQDYB5aLFVuHXBsV33cpghS6xRoQsFPWA2ab+1z34CAAD//wMAnTqQcAYEAAA= headers: Access-Control-Allow-Origin: - '*' @@ -1817,21 +2164,21 @@ interactions: Cf-Cache-Status: - DYNAMIC Cf-Ray: - - 7cbda7160fd3e277-ORD + - 7ccfcb2e8ebe29c3-ORD Content-Encoding: - gzip Content-Type: - application/json Date: - - Tue, 23 May 2023 13:30:34 GMT + - Thu, 25 May 2023 18:20:58 GMT Function-Execution-Id: - - f495ip5txdm1 + - pbmepc01gbzx Openai-Model: - gpt-3.5-turbo-0301 Openai-Organization: - significant-gravitas Openai-Processing-Ms: - - '15997' + - '17906' Openai-Version: - '2020-10-01' Server: @@ -1841,7 +2188,7 @@ interactions: Vary: - Accept-Encoding X-Cloud-Trace-Context: - - 285b29ed564c106ff4d38a33451a5203;o=1 + - c04b0eee68407fea8158cee793fa60de;o=1 X-Powered-By: - Express X-Ratelimit-Limit-Requests: @@ -1851,59 +2198,26 @@ interactions: X-Ratelimit-Remaining-Requests: - '3499' X-Ratelimit-Remaining-Tokens: - - '86452' + - '85953' X-Ratelimit-Reset-Requests: - 17ms X-Ratelimit-Reset-Tokens: - - 2.365s + - 2.697s X-Request-Id: - - 48a48589bce33baaeb8f62202ba96d90 + - 15503d774ef53cf1f960387e503eed2f status: code: 200 message: OK - request: - body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "system", "content": "You - are Follow-Instructions-GPT, an AI designed to read the instructions_1.txt file - using the read_file method and follow the instructions in the file.\nYour decisions - must always be made independently without seeking user assistance. Play to your - strengths as an LLM and pursue simple strategies with no legal complications.\nThe - OS you are running on is: Ubuntu 22.04.2 LTS\n\nGOALS:\n\n1. Use the command - read_file to read the instructions_1.txt file\n2. Follow the instructions in - the instructions_1.txt file\n\n\nConstraints:\n1. ~4000 word limit for short - term memory. Your short term memory is short, so immediately save important - information to files.\n2. If you are unsure how you previously did something - or want to recall past events, thinking about similar events will help you remember.\n3. - No user assistance\n4. Exclusively use the commands listed below e.g. command_name\n\nCommands:\n1. - append_to_file: Append to file, args: \"filename\": \"\", \"text\": - \"\"\n2. delete_file: Delete file, args: \"filename\": \"\"\n3. - list_files: List Files in Directory, args: \"directory\": \"\"\n4. - read_file: Read a file, args: \"filename\": \"\"\n5. write_to_file: - Write to file, args: \"filename\": \"\", \"text\": \"\"\n6. - delete_agent: Delete GPT Agent, args: \"key\": \"\"\n7. get_hyperlinks: - Get hyperlinks, args: \"url\": \"\"\n8. get_text_summary: Get text summary, - args: \"url\": \"\", \"question\": \"\"\n9. list_agents: List - GPT Agents, args: () -> str\n10. message_agent: Message GPT Agent, args: \"key\": - \"\", \"message\": \"\"\n11. start_agent: Start GPT Agent, args: - \"name\": \"\", \"task\": \"\", \"prompt\": \"\"\n12. - task_complete: Task Complete (Shutdown), args: \"reason\": \"\"\n\nResources:\n1. - Internet access for searches and information gathering.\n2. Long Term memory - management.\n3. GPT-3.5 powered Agents for delegation of simple tasks.\n4. File - output.\n\nPerformance Evaluation:\n1. Continuously review and analyze your - actions to ensure you are performing to the best of your abilities.\n2. Constructively - self-criticize your big-picture behavior constantly.\n3. Reflect on past decisions - and strategies to refine your approach.\n4. Every command has a cost, so be - smart and efficient. Aim to complete tasks in the least number of steps.\n5. - Write all code to a file.\n\nYou should only respond in JSON format as described - below \nResponse Format: \n{\n \"thoughts\": {\n \"text\": \"thought\",\n \"reasoning\": - \"reasoning\",\n \"plan\": \"- short bulleted\\n- list that conveys\\n- - long-term plan\",\n \"criticism\": \"constructive self-criticism\",\n \"speak\": - \"thoughts summary to say to user\"\n },\n \"command\": {\n \"name\": - \"command name\",\n \"args\": {\n \"arg name\": \"value\"\n }\n }\n} - \nEnsure the response can be parsed by Python json.loads"}, {"role": "system", - "content": "The current time and date is Tue Jan 1 00:00:00 2000"}, {"role": - "user", "content": "Determine which next command to use, and respond using the - format specified above:"}], "temperature": 0, "max_tokens": 0}' + body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "user", "content": "Write + a concise summary of the following text:\n\n\nLITERAL TEXT: \"\"\"2314\"\"\"\n\n\nCONCISE + SUMMARY: The text is best summarized as"}], "temperature": 0, "max_tokens": + 0}' headers: + AGENT-MODE: + - AAAAAAAAAAAAAAAAAAAAAMLheAAaAAAA0%2BuSeid%2BULvsea4JtiGRiSDSJSI%3DEUifiRBkKG5E2XzMDjRfl76ZC9Ub0wnz4XsNiRVBChTYbJcE3F + AGENT-TYPE: + - Auto-GPT-2023-X-TYPE Accept: - '*/*' Accept-Encoding: @@ -1911,7 +2225,7 @@ interactions: Connection: - keep-alive Content-Length: - - '3334' + - '245' Content-Type: - application/json method: POST @@ -1919,120 +2233,74 @@ interactions: response: body: string: !!binary | - H4sIAAAAAAAAA4SSy27bQAxF9/0KgmvJsGrXD+26KJAUKNpFCvShwhiPKGmSeagzVGPH0L8XY1lO - YQfp9g7Jey6HB1Ql5igbwdK0Ol1+vP/xXX35lDV3H5ya3nxujbh5Wv/J5t+qt5ig296T5FPHRDrT - amLlLCYoPQmmEvNssZqvZ8vlYpWgcSVpzLFuOZ1N3qXc+a1Lp7Nphgl2QdSE+QFb70zLG3YPZAPm - y/kywefZZz2brxNkx0KfpdV60ScoG6ckBcx/HtBQGMd6pwlzFCGowMJyhHSWycYAh8ICABTIjevq - hkOBOZzE0wPtOIoF3kJoXKdLCCw8w3YPnkSpbA3cECgb2HcyooZNNuEdQ6U0QRfGili9OWrSGSNs - OSkw+dfKkwjOKlsPfncNQaV8YAhMLagA7KCzJfmYo7xyhSg+NoJjqaffnfJUgqvA0KVTq4UdTFL4 - GuhlvGgXxdfiFYVN4b0Vev9ELwOVxOSNssOzpd0QJ1wQSa9YSRXMuGtLdCQgGzofmwXD7avxq07r - PWypcp6g9U4Sxd+5zB5aEg+jy6PS+v//eYw6KXAY0yfj0ZwWdXUzVhgaHM5LvYAQvr48teEh1j63 - X299hDiCnHgK22OfYKWsCs1muCLMMbBrMUFlS9phPu1/9W/+AgAA//8DAABeWuzpAwAA + H4sIAAAAAAAAA0SOQUvDQBBG7/6K8J03JWmatO5NPXjw4kUQrJTNZtpuTXbCzgSKJf9dClqvD97j + XRA6WPijUz+Mfb5+uZf6OcR1H58e64fTYWrfl99v52kl8goDbk/k9ddYeB7GnjRwhIFP5JQ62LLZ + 1EW12TSlwcAd9bA4jJpXizrXKbWcF1VRwmASdyDYC8bEw6g75S+KAls1Bv/pGy4LA2V1/Y2smtnA + Hzl4EtiPCwaSv2binmDhRIKoi3o95KgUr/cuE0qBJON9tucpZXEaWkpisi2WVbnaYoHZYB9ikOMu + kROOsBDlEQYhdnSGLebP+e4HAAD//wMAiLNr+0EBAAA= headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 7cc6279c6ab63ac7-DFW + Access-Control-Allow-Origin: + - '*' + Alt-Svc: + - h3=":443"; ma=86400, h3-29=":443"; ma=86400 + - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 Cache-Control: - no-cache, must-revalidate - Connection: - - keep-alive + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 7ccfcbb129082322-ORD Content-Encoding: - gzip Content-Type: - application/json Date: - - Wed, 24 May 2023 14:16:35 GMT - Server: - - cloudflare - access-control-allow-origin: - - '*' - alt-svc: - - h3=":443"; ma=86400, h3-29=":443"; ma=86400 - openai-model: + - Thu, 25 May 2023 18:21:02 GMT + Function-Execution-Id: + - pbmevu50gphf + Openai-Model: - gpt-3.5-turbo-0301 - openai-organization: + Openai-Organization: - significant-gravitas - openai-processing-ms: - - '27497' - openai-version: + Openai-Processing-Ms: + - '1540' + Openai-Version: - '2020-10-01' - strict-transport-security: + Server: + - Google Frontend + Strict-Transport-Security: - max-age=15724800; includeSubDomains - x-ratelimit-limit-requests: + Vary: + - Accept-Encoding + X-Cloud-Trace-Context: + - 850f41e684b09434d8c479bf8334a162;o=1 + X-Powered-By: + - Express + X-Ratelimit-Limit-Requests: - '3500' - x-ratelimit-limit-tokens: + X-Ratelimit-Limit-Tokens: - '90000' - x-ratelimit-remaining-requests: + X-Ratelimit-Remaining-Requests: - '3499' - x-ratelimit-remaining-tokens: - - '86499' - x-ratelimit-reset-requests: + X-Ratelimit-Remaining-Tokens: + - '89467' + X-Ratelimit-Reset-Requests: - 17ms - x-ratelimit-reset-tokens: - - 2.334s - x-request-id: - - 518916964b78cdb0ae0001c2c19bb7e4 + X-Ratelimit-Reset-Tokens: + - 354ms + X-Request-Id: + - bd62e6c94225740f116d494598aec43a status: code: 200 message: OK - request: - body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "system", "content": "You - are Follow-Instructions-GPT, an AI designed to read the instructions_1.txt file - using the read_file method and follow the instructions in the file.\nYour decisions - must always be made independently without seeking user assistance. Play to your - strengths as an LLM and pursue simple strategies with no legal complications.\nThe - OS you are running on is: Ubuntu 22.04.2 LTS\n\nGOALS:\n\n1. Use the command - read_file to read the instructions_1.txt file\n2. Follow the instructions in - the instructions_1.txt file\n\n\nConstraints:\n1. ~4000 word limit for short - term memory. Your short term memory is short, so immediately save important - information to files.\n2. If you are unsure how you previously did something - or want to recall past events, thinking about similar events will help you remember.\n3. - No user assistance\n4. Exclusively use the commands listed below e.g. command_name\n\nCommands:\n1. - append_to_file: Append to file, args: \"filename\": \"\", \"text\": - \"\"\n2. delete_file: Delete file, args: \"filename\": \"\"\n3. - list_files: List Files in Directory, args: \"directory\": \"\"\n4. - read_file: Read a file, args: \"filename\": \"\"\n5. write_to_file: - Write to file, args: \"filename\": \"\", \"text\": \"\"\n6. - delete_agent: Delete GPT Agent, args: \"key\": \"\"\n7. get_hyperlinks: - Get hyperlinks, args: \"url\": \"\"\n8. get_text_summary: Get text summary, - args: \"url\": \"\", \"question\": \"\"\n9. list_agents: List - GPT Agents, args: () -> str\n10. message_agent: Message GPT Agent, args: \"key\": - \"\", \"message\": \"\"\n11. start_agent: Start GPT Agent, args: - \"name\": \"\", \"task\": \"\", \"prompt\": \"\"\n12. - task_complete: Task Complete (Shutdown), args: \"reason\": \"\"\n\nResources:\n1. - Internet access for searches and information gathering.\n2. Long Term memory - management.\n3. GPT-3.5 powered Agents for delegation of simple tasks.\n4. File - output.\n\nPerformance Evaluation:\n1. Continuously review and analyze your - actions to ensure you are performing to the best of your abilities.\n2. Constructively - self-criticize your big-picture behavior constantly.\n3. Reflect on past decisions - and strategies to refine your approach.\n4. Every command has a cost, so be - smart and efficient. Aim to complete tasks in the least number of steps.\n5. - Write all code to a file.\n\nYou should only respond in JSON format as described - below \nResponse Format: \n{\n \"thoughts\": {\n \"text\": \"thought\",\n \"reasoning\": - \"reasoning\",\n \"plan\": \"- short bulleted\\n- list that conveys\\n- - long-term plan\",\n \"criticism\": \"constructive self-criticism\",\n \"speak\": - \"thoughts summary to say to user\"\n },\n \"command\": {\n \"name\": - \"command name\",\n \"args\": {\n \"arg name\": \"value\"\n }\n }\n} - \nEnsure the response can be parsed by Python json.loads"}, {"role": "system", - "content": "The current time and date is Tue Jan 1 00:00:00 2000"}, {"role": - "system", "content": "This reminds you of these events from your past: \nI was - created and nothing new has happened."}, {"role": "user", "content": "Determine - which next command to use, and respond using the format specified above:"}, - {"role": "assistant", "content": "{\n \"thoughts\": {\n \"text\": - \"I should start by reading the instructions_1.txt file using the read_file - command.\",\n \"reasoning\": \"The first step is to understand the instructions - and what is required of me.\",\n \"plan\": \"- Use the read_file command - to read the instructions_1.txt file\\n- Analyze the instructions and determine - the next steps\",\n \"criticism\": \"I need to ensure that I understand - the instructions fully before proceeding.\",\n \"speak\": \"I will start - by reading the instructions file.\"\n },\n \"command\": {\n \"name\": - \"read_file\",\n \"args\": {\n \"filename\": \"instructions_1.txt\"\n }\n }\n}"}, - {"role": "system", "content": "Command read_file returned: This task_id is 2314\nRead - the file instructions_2.txt"}, {"role": "user", "content": "Determine which - next command to use, and respond using the format specified above:"}], "temperature": - 0, "max_tokens": 0}' + body: '{"input": ["2314"], "model": "text-embedding-ada-002", "encoding_format": + "base64"}' headers: + AGENT-MODE: + - AAAAAAAAAAAAAAAAAAAAAMLheAAaAAAA0%2BuSeid%2BULvsea4JtiGRiSDSJSI%3DEUifiRBkKG5E2XzMDjRfl76ZC9Ub0wnz4XsNiRVBChTYbJcE3F + AGENT-TYPE: + - Auto-GPT-2023-X-TYPE Accept: - '*/*' Accept-Encoding: @@ -2040,139 +2308,177 @@ interactions: Connection: - keep-alive Content-Length: - - '4445' + - '83' Content-Type: - application/json method: POST - uri: https://api.openai.com/v1/chat/completions + uri: https://api.openai.com/v1/embeddings response: body: string: !!binary | - H4sIAAAAAAAAA4ySQY/aMBCF7/0V1pwTBKGUJbeqe2lFb7tS1aZCxpkkZu2xZU+0sCj/vQqBZRuq - ttc3z2++Gc8RdAk5qEayst6kyy87mX27ty9fH+Rj+2m9X9x/r3drLg9eryEBt92h4vOLiXLWG2Tt - CBJQASVjCfnsw9371Xy5XN0lYF2JBnKoPafzySLlNmxdOp1PZ5BAG2WNkB/BB2c9b9g9IUXIV8ss - gWv2qz5bTBNgx9JcpVmWdQmoxmmFEfIfR7AYL7nBGYQcZIw6siTuKR0xUj/BsSAhhCiAG9fWDccC - cnEWzwXccy8W8FkQYinYiYCyFNyg0BQ5tKoHjJtswnsWlTYo2qipPjl66+akKWetpHJSQPI2P6CM - jjTVQ5OHv6T22FJTFFUbuMHwm1FwI1lcGStnjHsed/NG0tAoFY8R/4z4PyMWBaXiI0lzeMEbn+hD - SmQMVtNQJtyziIw+johU0KyVjna8ZKTYBryM1VKJof+/WypRtcYcxBYrF1D44BRiqakezx49yqdL - l2dtzL//kU4bHEK65HIr5zXdnApJi0P+60pHCDLU4wsbCr33+vwW5wJxAjnzFNRBl0ClScdmM9wR - 5BDZeUhAU4l7yKfdz+7dLwAAAP//AwB9LqMb4QMAAA== + H4sIAAAAAAAAA1R6Ww+6vLrn/foUb95bZkUEoWXdISAgYIuAiJPJBBQRBJFDC3Rnf/cJ/lf27H1D + wiFA2qe/09P/+Mdff/3dZlV+H//+119/1+Uw/v2/1muPdEz//tdf//sff/3111//8Tv+jyfzJssf + j/JT/B7/3Sw/j3z++19/8f915f8/9K+//lYvkklkzb5UzFTOCbzqRUaTWKrBdHHrHTTpktHDuaBg + DqJvDNNqBuhl3zt9KWvcgsEcFWpfm4fbJU/JBK39ivFxHopqkV1Xhjz091g9H47Z0ge3HKKvoWJv + c9H6PCkkAYLv00Obb3HMaCCFLRTQ+UVP9IgqXscSgvR1/5KvUD1D0t6GAHLWNqD68nT6Ob1AGTYt + D6jjRFw/VfvYgNhBAhH8qGCTGlet3MXSHW0un7dOjGGKlcjcaFijVVi158M3kTdmqdA9+Oq60MTH + Fu6+nI+Uqthm37epcgo9m1ck5V2qkzILIPT65o348gbcIdt0pnSExMTO6017JtxTGTZJVOFTf+j7 + 5UDOOTSWzKTOo/CqubY6E/SXj0XuT9HSRfl6fMOwBVuMUrRn8+EVqoqcujrhU9fvx1sUTjBmAabO + eKwZwcxq4aWZK3o4oDJjjn6BMLSzB3ZGQdGHs1k6cDcAhk1PuegL/kg8uKudhPGNOvqi6D6B3Rsd + 8D46VBlxBFGAgVno9IFHvprU2PXBJc7O1LoqBCwbk0VwnAuNqtNLz6bM2XDQIrlJT/LyYXNvhhNU + ee9JSmB/AbGXKIcXPk6wWvAffaplF0GplS2sX0+JPsz9lMLL2/Ao0jgf9NPDFqBxTmLElmqsZuLE + E9SDd03xOl8L795yWHg+ojhNn4C2k7jAU+djqm18ytg6flIFZxk7I+4ykr7vOXyndI/NFnJg8o2i + VXyXctT6DF4v7JmWwu/FsbGB3gkgTdxpsvPwcuzsn69wOoQhD4t7mWJL9SijrpET4MvnCm3G0gFz + ecomGbaOSLiA+i57zfUA05fxJjCbtWp7KpsGWvMM0cw75569ml0g4TbQsS58/J6ZNtrBxT/tsfa5 + eW4r1/4CB5MqSJ7PPZgXkBD5uDlfsWdTmM36Y0phmHy/SAxFJ5xlZVSBxt8CFC0an01zv0vhOJca + tYGdVUt7G3x41csMe1Gy7ZfhEg7wqeoUTcOjyIZ58Bf4PKEzxVs+CVnXnSK4GySGdY7d+qkTWLqJ + DdJRlEdmNg8m78BbRDns8txDZ2MV7KD9FG0iJMUXzIKoOopyPH7I0WFxyMBNN6QkgB32NuHLZV13 + iKH/lc6IlyqrX7I35EH02Sz4V79TJbRvGBvChHZPy89IUNo2vFScSQ+n5MtetfmNoQdNSLF0qvsl + 6T8BZE9HXOv9oVPaXGzoi3cTB8BxM1JHtgM/17TAlvjo2Rj7yQSPyP/QvRHX/aLCeAAOd7DxsTut + 9W1lA1QFIKAb8pt+Opud86f+7ox0jHhkCGBjew/qvNqgnxtbu8NWNAA9HMCtmsRey+HC+Sd6JFch + IyseymInh6h4c6SfwuM7hcc8Halqgz1jn/cXwVr+ttj5Lqk+bMpvBB6chqilepjxlqsGEA3uF8UF + 4UPmHp53cBqtASnXwAFf4uQLOFskQCx9V2wOIpuD6znZfHMLvF8ctKHDYUTAXj32Az5BDiApD0jF + OYeQseoUQeVjEOypdcTGIiJvQBZgIJ0T79X8PrQN5BIrx6cr+rBlSkUBqk2WU5XvOzYWFxoD94k5 + sp1rzv12xtLCU+ocKX4BphP8ud3h9+Ij+qDWSR/fPpfIY9UAaka5HdbRtW/AwfcsGgrfRZ/9+70A + D0W28WF934oXHhyidIfmnS/oLJ1SB14jlRFhH8ghtTfpDlwaVpF4H6TZvN1AE0gZ9n7rSZ/NMpug + 2txy6tQdr9NvO6z88abY2WZhxpaiz0F9DBqyPfoVmK8PiwAS2AU1xV1RjW5hDHAKaE3tpin1gZ/5 + nTQVfkrtD47DQflCG/QHg+JTr3cuayrblHXNPOGVP9yJUZbCx7ho9HC+D2BMwkyDcV1HeB/pLzak + xw2S3k5Myc5yWn3yIkuDQibERL74aiUizmig82gmuvdKUn10vS/BOYknIo/uJ5wl6MoQXpcazR9e + BSwfjg1sY3QmLyMy9HknOwkUUPjCh2NQZ+P3hDy5vzz22Ir2gc6O+/sdeJnh4XMiPPpl68lvWI6P + mTR+UrEWzJ9YPnVlhvFtbKvx0OMBAjuKsf3c94B23wFCXr3x2HDuARPqi7qDpfexsG4VGViay8uH + Njbf1ARc7dIQPmTAnjcdR2fFdAfX2uXyijdUA06fTQNt3z98o0bQArbM56qEWXAtkCJPZkZd+dhA + XUMh1kR672empB6EVnCi1jMv3bnTpR3oG1yjeeVv8SoMSP7NF97vmmw+h7YHz5aQ4PU8XCT72MLg + k45o8Q5JyB7biww/n27AqCknNq71D2yl1DH+5hbjW/5WQgFuV/6+Hys2f58JKDu5wOrtss2G5AA9 + uLsNBj1dwdNltLk4kCejh7aX9sJmL05UYJFIJRJCtf6JIOeDsdpaZIOckC0/vLPI3cR219V6cdkn + PKy7o49+fDqCDXIk9I08ejiIcT8fP88CADuOCeu1NJxSpX0DpkQOPQZB6zZ9GHjwW7fFem7r80be + vuWd4m4IzLZ1P/Mci2H3jl2Mg+AGaPlJWjAvoUydHV9U7LwDAfzsbj41i0PhsoeQt/AkyyPZrPxL + TtTrgAAbmYjZXa2E/FmVyr7GmLrnJKuWkMQEPDg7QhwZdlk7zp0J36lord+TABH9bQO+grZQi12t + bPiNL3/iS4wl9+12zeuZQ6bEDkXo07nzyNk2VL79hjyunhkuzet5hzx5qmTYhHt92LyK5od31D3f + 82q53+2dchF1ivebsdEH7qUiyF38PYKGqlfzV5McuMWnN9VsXQDfsNjsgNgHNXYj1mXTZhRtqDRC + iaDFvdgEW5aCxgE8WnTW9VOmljtYffMDUYRWDdt0sycgCY0JH0nIwj/1D+x3SaRyX7NpkLkEvlPl + RNVDGfWsHuAi4V16xIdTbGdzeYI2PKWajuR4LwPq7GwTyrediKQg7vUFNT6BH0tF+BSKUCdz4O7A + j+/VHT2xZQ6GAEqOoqLpUDB9yiVfhYa+fZH+dZTZdNcMFQZme6SW2++zaVPaHNxHio61Bapg+xuP + x7RZKFp4Lpv3dfUH/6m7u/VsEU3VgYrpGfTCc33GwM01YVI9NQQGMruThqYc4m4b0VXv95NrJSZ8 + g61IVVvRQ9GVvh5gaKmxu+832aoP73DUjQr7t8s2pFdx4GBcaTq1nmWtL49W5GGOkgI7t7h2V/3e + QRvHE4HV5qQzrDYFZJRd0GBTGE6r/oak7HZIuLcUNCGJB/DKmivZjM0JTHpKWjifMx+BREJg5pWl + gflmLqnztYxwjhIdwsekLNiWs3e4OJ0qK3CX6URMrLiaboqN4EE19mSbKKo+D9a9hGHSfzEe30M4 + otvSwtjgJ8Jy0mcL76XeT0/RtZ7C+RZBB07TshBwipJqWsZbAZOAf1OtwkZPvmmtwkZ63LCV3YuK + vbtUhtG2Hamrcq07PYEkg3V8iHwBcTgzi8Zgi/Gb4ji+A+YBJwZdPMdU3c04Y9JFNZW60x264r07 + RteqgWw6tkjwYy3ktyzWoP1sIOHrqWHTEesBfO/kiHDj5uXSJ6lyoAekp/v3U2BzHhgEoi8yMVYg + l804zVQYXrgAnbChhlN63HiAPzUxPcYpWvHmgWD3zs9Ynao3W4xhF/30DKGNftOX/XJsoQCpiqZH + JoEW1JIDdfa5EKYnW7DIzor/z1KjuE4NtmCGW+nUBZjuN9SqZlEXbfjgVERN9WNUk49jA4q988SW + S9/h8IGjAWWwH1FZnU19tNN1/gq+pqjrYrB059kE73RzQozcGr094r6FkqSesLvOz6w/Eg0Gx3rA + xmJswXy87wm8VNCktpwZ2ZSpWgGGqOTQ+Gmz7Of/YBJwHdZNuu8Hy9trUAb6iD3baKpB7J1cWv0s + GeXlA4YCyi3kT8MdW0Z0YfxmEBLI5yYh5QaRbNUjA9RZfaGmn+hgERA0YJmmNt77ezVb/bAnq0Lf + oSh1/Wq8oz2BwuC1NInFd8j6QlKhcrsxZNFzCRZysm2w6gNqb7y8mu+oNCHXaPXIkX3Z01y8JpDn + xABVSBn0SX5PKSxBsGA73mqgm9KNAFb/RsBjHwE2lIEDu4/bIvn1xj298xcbnu7bgiih+WLjyws1 + eOXn61rvu35uvVMO501+J8sGkfAPnqmCJGBtxVO6iZYCnu5jilHw7Sty/BYljHYnjsTt+esuyXM2 + IZ97PLYXedaZerx48Oe/RC9XwnbFf+XVCxW1BS7Sp4deO3C/7BwaamPhklsZcNB4yYTMqrILGUxd + HuSQO+AD0p/VVCgggfNe44jw3mWMAHcI5GhbfrChoTZjcilOUGcPTK0QnMLx5cotRF9TJdP+Pumt + GvcdWLjgRJancA2HeYe43X0TVdiWbyWbAqvbQaqSA/aM/pYthzEhsL/UFrb1OQULqqcYzjSKMW70 + mzsD7pT86hst18YNp5SzOSB6ikcPDz4Hc22VJux4ocJIwCed+Xsa/PQLtdyxZMP1kPFg3ocONqO7 + yeanPaU/PsAmXyf68OO3VuC22DF90LerHoNK3MT0x88LRa9AaWPDxc6OV3uGt5L605Or39DBUlyn + CJytxsNeX+7B1nl0AuxicKfOiU0hUaeigF6uU7K5fOt+kOtkAvXRLigiUxay+dkReNunRwIeepyN + 7T7RZC3sCOKlj9MPbAoX+G0vIplGM6rmsBB3cPMefIq1V1rN/QGkYN6rHNXYJ65GqukI8CSWyfTm + UEU19eUrxtn3aebPNhjG6RXDJ1eHZLPizzQC0QHfi6ZhzVIC/U0WLYfD9lxSw8mjbFar0gCcEMzU + ro5nxvhk58uHQMqR8mmzcD6H7VsOGy/HCH0cty4unximcy5QM0rrkOyCeIDG4sg//NaHJ7iV8BY9 + FsKsAoBhkax/87l5HwLWhschBTC9CNjafMJwzs/vWFn1MtpKdgDYo1eFn3+hasFb+gj4hPzJnwS1 + rlzi9KUKkXQPqAXqM2DnHfNh2RWIOtKDB6P28QZ4OUwMD+JyAov/TTlZ9k4QHx48x+hvPa3/j61Q + 7LLx5S4dPMK3ik+rvpntJl7gohqA7ICRVINnSx7YeuWJbBwDZ/xCgkB55W5Lf/kGBUlJJJOmRzKv + fmcygsyQm6Sh2FD6UzXSfSfA+3nv4v2gOjrbb4o76NmUUkOqv+6Yee0bbraoxersQ3dZwlGV+yqx + 6a/eF307+HJ9dAokcUbQs2HEb6A254Rstvwum3rpEkCqhzHeg7pw52P2jeHka4ga8cCx9yGKGvjT + B1vJicLpaVoq7KvUxuotZKwTMeSBqxwPSJEXC9C+Nw2wvS83bFcuC+frHUGZ6rc71ld8YRe2fyuH + ormRtwY21TDvTAi0g8Ko5VIjXF5JQkCZBhoasaFm5P4RzX/P51QZTOwHxINL0z9QmZFzOBt5soDg + E3xQ+Tyq+pydvjHsG+uJuGAkPUvGnQ/JvrWp7khHfc1nbOD1cYLghhNArb6dHDqPe0dmNdAqUSo4 + HxbPq4iWbKBsYRfkAEkKbgRmdK8v0lFLlJ51Edpdy9RlN3pL4WZrlhhV+lNn0fL14Fi9AXUPJnJX + v9gAIa9GqsuGyn56DMD0KiARvBBbjt+igFe9e9FV34bb8hQuUHLe5I+f4/MD8kHq88mKRzu3o7eN + CdOCy6kmGg1gF26zAO/UPyjeWi998o7nNzyqWwmrJ6L0wzDZHASSmGN3cl/h4PieCgKz1OlaL3pT + 3cMBMCT31ITGl43BZU8Adwn2KELFBcxZkk9w9QfU2nwjMJceS6Evuw8CN1fDne1U9cBdXTYEQk3W + p5Yjb3ARQ0At1SHZ8s3qBaqVcMKHh2X1fCXYSPrWZYq9jO5dQQqfOZyKpKA24b0e/Phj1fdEFG8t + WAzxkMCPKdvY7rNnyPrnzofZnmqo32uuuwBvXP3i21vzoheYr3eTg2s9IbrfmRnrn1MA5WNRY8tO + V/9nC3fQSPiE3SKv3fGXD/7ynt0Pv+xNIMPWZiWKL+6zJz99teI7WjIyZ/NOLqff+395Y9hPbd6C + 8060Ebcc6n4aobiTVrykR4cJ4RynowDMySEY10kVsoM5TtAdJrbyrapvRbEtYK6YLgL8+cD4cS5N + pcpmh2wfEwCLvn0H8tZLLOzR1gzFpejvEFrFjTBOvPdLm3otkI/LTLESdu4fPdaZPvdHz675ZgA2 + 12NGPUOtquGXl6mHTqb7XtDCbbv3NegO3UgN5Tu4g9LdSyg5GxWJdwx76oupLe/uR4yD1f9PlmsH + 0IguEj0iR9P5964zoZDP0p98hvCuxEuNxZ0ReByujH5bnYd3FRyIxGlnl0x+t8BAlA7Ytuymn356 + 8sk9TljdjUM2Rlfdg/TsPcgmjiEYY99f4NtpALU+E+1pMyoFPEywx9Z1M2WEnNoSQMuxMMp9PVzH + s4QJ26So6rU0W9b8/6f/0Riae7ZofF/89Bo96be6J2N8v4M1fyVzcVuyNlO1EupadKPu0d9WC7uY + Dky0yKP2Ux8zfuoTDWbP1kb1sGgh7/ieBkDC8dhQvp675osQhhdiUrMNu2qS6chLXDtCasjoserN + ea03kuNjEB2raVOqHCgfuCbc2s+YItU3lOfJO+PceRqu+BzLAZpnJaO//H3xu5CA23n3oue2kti0 + kNSHv7xdaMOuHyu87YAv76/YA7slY+XS5PC4cc+IK5NvttjLPQevU++jZeHKbEmKWy6zZ6avLBRm + BGq7GI6HFyHbW3cNa1TRGPaVryEB2h6bUpvLoRZOPBJVh4RExLwAf/jIuQiywT4dl19eSw2iBmy6 + 73UfkHLh/vQvfnoOOCcjWdf7UJH6+YBQTMs91k327v/kDfWtXBCAD8QoK70G/PK0c0TEfmJEbmF/ + oSfqRXGkT1ddJlAPmpoiF90Z26u2DN89vtFfnrek7+gOo1rF2FR7q1r2S6f+yfcFP1LZcmTbN3iq + IaD7wx5kw/q/inLL2E9f96PRmRyU06OODc0zAWsq1VRMmhzxMW9E/ac/4LdOo59/zcTXtOZ95KH+ + +KSfTk4QQyk7eUhc/daaryE4asIVbRzV++FRDsuuRNg51IcV78YF3Jb+QM2kIdnYm9kED6HbIcWq + Rp1Obd6Bh39C9Kc3xSk/vmFkWTbiz4qps/l7TcA0FQ09PKaMjV5Rm/ASv65/8mFy/Y4L3N3DCyGo + +uhsc0llqIWyTC0qX91l8stJofOwxZq7YdVUabEA8a7EKLmWqc4eQtxB2cOQvHaBWNG8/yIgZMQn + VMEfffzudysfei1224H+O+8fQ+9L13zJnYtjsgCefMyfv+/X+/cfH5LdtVH0eTHJW86CU0rVYtfp + XR2ptiJk8E6NMhSqPtnVa/+h70iw5lGTnn1S8Ovv/fhr9orRBGurDQk7tQF/8uXP8fMkfdv07sId + Uu+nb7CrcvaaR7c5DJv4TfdrHix2XT38+IbiRpf0pcwCDvYHk+I1L+uXkOQDeKp7Sk+NUverP+3k + J7lYaOJ75w8/yut6XP3xrlr9SQqcR/zGqEw7QBvykH9+4A9fz4VzLmFGJ0Qv8FXoM7M+EfzWXYGN + HLNqQfUuljB4LBgtfB6OC0gGSF/5F9FzXfYL/pxzRRxFAf+Zv/zZF0BpuBbvi6sPSB8ub3DMO4ns + wvv2l/fJ4CxZEZq+2rmaQD3bMDC7I8bKue1ZQ4QAGqXKUz1tG0DLheSQhoBHonizwXyKd4nS2nOJ + MfIuGeFjcwf97+xiC29tNgvimj+akYO1TTBn7NEVuXK+cjmZTvsmo1L8RQBa/ok+VFNx25Mjk18e + huZ+p7NFnVRHnjTLItvxBfQp25QGbJumwpbhL2zhzJSDT3La031kvdmMknn343cyjc61F99dIEPo + WB2CFlSzrV3HGuSS0xvr5eSBlQ86uOpV+lzXH73wWiTVnetTvOerkGGVlHCzjQj+9RPmW8Tb8Lqs + /XdRmMAfPf73b1fAf/7jr7/+z2+HQdM+8nrdGDDm8/jP/9oq8M/0kf6T54V/UuHPTgQypEX+97/+ + vQnh72/fNt/x/47tO/8Mf//rL+HPboO/x3ZM6/92+R/rt/7zH/8PAAD//wMAK+GH5OEgAAA= headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 7cc62859ed003ac7-DFW + Access-Control-Allow-Origin: + - '*' + Alt-Svc: + - h3=":443"; ma=86400, h3-29=":443"; ma=86400 + - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 Cache-Control: - - no-cache, must-revalidate - Connection: - - keep-alive + - private + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 7ccfcbbd7fb02274-ORD Content-Encoding: - gzip Content-Type: - application/json Date: - - Wed, 24 May 2023 14:17:03 GMT - Server: - - cloudflare - access-control-allow-origin: - - '*' - alt-svc: - - h3=":443"; ma=86400, h3-29=":443"; ma=86400 - openai-model: - - gpt-3.5-turbo-0301 - openai-organization: + - Thu, 25 May 2023 18:21:04 GMT + Function-Execution-Id: + - pbme9l6q6fyb + Openai-Organization: - significant-gravitas - openai-processing-ms: - - '24655' - openai-version: + Openai-Processing-Ms: + - '1219' + Openai-Version: - '2020-10-01' - strict-transport-security: + Server: + - Google Frontend + Strict-Transport-Security: - max-age=15724800; includeSubDomains - x-ratelimit-limit-requests: - - '3500' - x-ratelimit-limit-tokens: - - '90000' - x-ratelimit-remaining-requests: - - '3499' - x-ratelimit-remaining-tokens: - - '86475' - x-ratelimit-reset-requests: - - 17ms - x-ratelimit-reset-tokens: - - 2.35s - x-request-id: - - c0da07f2916746d8735e7bdcbfde425a + Vary: + - Accept-Encoding + X-Cloud-Trace-Context: + - 4decc7240f714a1a6b1e4c57136ffc0f + X-Powered-By: + - Express + X-Ratelimit-Limit-Requests: + - '3000' + X-Ratelimit-Remaining-Requests: + - '2999' + X-Ratelimit-Reset-Requests: + - 20ms + X-Request-Id: + - b844d587eac449c8f5ef8515c53c2549 status: code: 200 message: OK - request: - body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "system", "content": "You - are Follow-Instructions-GPT, an AI designed to read the instructions_1.txt file - using the read_file method and follow the instructions in the file.\nYour decisions - must always be made independently without seeking user assistance. Play to your - strengths as an LLM and pursue simple strategies with no legal complications.\nThe - OS you are running on is: Ubuntu 22.04.2 LTS\n\nGOALS:\n\n1. Use the command - read_file to read the instructions_1.txt file\n2. Follow the instructions in - the instructions_1.txt file\n\n\nConstraints:\n1. ~4000 word limit for short - term memory. Your short term memory is short, so immediately save important - information to files.\n2. If you are unsure how you previously did something - or want to recall past events, thinking about similar events will help you remember.\n3. - No user assistance\n4. Exclusively use the commands listed below e.g. command_name\n\nCommands:\n1. - append_to_file: Append to file, args: \"filename\": \"\", \"text\": - \"\"\n2. delete_file: Delete file, args: \"filename\": \"\"\n3. - list_files: List Files in Directory, args: \"directory\": \"\"\n4. - read_file: Read a file, args: \"filename\": \"\"\n5. write_to_file: - Write to file, args: \"filename\": \"\", \"text\": \"\"\n6. - delete_agent: Delete GPT Agent, args: \"key\": \"\"\n7. get_hyperlinks: - Get hyperlinks, args: \"url\": \"\"\n8. get_text_summary: Get text summary, - args: \"url\": \"\", \"question\": \"\"\n9. list_agents: List - GPT Agents, args: () -> str\n10. message_agent: Message GPT Agent, args: \"key\": - \"\", \"message\": \"\"\n11. start_agent: Start GPT Agent, args: - \"name\": \"\", \"task\": \"\", \"prompt\": \"\"\n12. - task_complete: Task Complete (Shutdown), args: \"reason\": \"\"\n\nResources:\n1. - Internet access for searches and information gathering.\n2. Long Term memory - management.\n3. GPT-3.5 powered Agents for delegation of simple tasks.\n4. File - output.\n\nPerformance Evaluation:\n1. Continuously review and analyze your - actions to ensure you are performing to the best of your abilities.\n2. Constructively - self-criticize your big-picture behavior constantly.\n3. Reflect on past decisions - and strategies to refine your approach.\n4. Every command has a cost, so be - smart and efficient. Aim to complete tasks in the least number of steps.\n5. - Write all code to a file.\n\nYou should only respond in JSON format as described - below \nResponse Format: \n{\n \"thoughts\": {\n \"text\": \"thought\",\n \"reasoning\": - \"reasoning\",\n \"plan\": \"- short bulleted\\n- list that conveys\\n- - long-term plan\",\n \"criticism\": \"constructive self-criticism\",\n \"speak\": - \"thoughts summary to say to user\"\n },\n \"command\": {\n \"name\": - \"command name\",\n \"args\": {\n \"arg name\": \"value\"\n }\n }\n} - \nEnsure the response can be parsed by Python json.loads"}, {"role": "system", - "content": "The current time and date is Tue Jan 1 00:00:00 2000"}, {"role": - "system", "content": "This reminds you of these events from your past: \nAs - a system, I recall that I was created and there have been no new developments - to report."}, {"role": "user", "content": "Determine which next command to use, - and respond using the format specified above:"}, {"role": "assistant", "content": - "{\n \"thoughts\": {\n \"text\": \"I should start by reading the instructions_1.txt - file using the read_file command.\",\n \"reasoning\": \"The first step - is to understand the instructions and what is required of me.\",\n \"plan\": - \"- Use the read_file command to read the instructions_1.txt file\\n- Analyze - the instructions and determine the next steps\",\n \"criticism\": \"I - need to ensure that I understand the instructions fully before proceeding.\",\n \"speak\": - \"I will start by reading the instructions file.\"\n },\n \"command\": - {\n \"name\": \"read_file\",\n \"args\": {\n \"filename\": - \"instructions_1.txt\"\n }\n }\n}"}, {"role": "system", "content": - "Command read_file returned: This task_id is 2314\nRead the file instructions_2.txt"}, - {"role": "user", "content": "Determine which next command to use, and respond - using the format specified above:"}, {"role": "assistant", "content": "{\n \"thoughts\": - {\n \"text\": \"I need to read the instructions_2.txt file using the - read_file command.\",\n \"reasoning\": \"The instructions_2.txt file - contains further instructions that I need to follow.\",\n \"plan\": \"- - Use the read_file command to read the instructions_2.txt file\\n- Analyze the - instructions and determine the next steps\",\n \"criticism\": \"I need - to ensure that I understand the instructions fully before proceeding.\",\n \"speak\": - \"I will read the instructions_2.txt file now.\"\n },\n \"command\": {\n \"name\": - \"read_file\",\n \"args\": {\n \"filename\": \"instructions_2.txt\"\n }\n }\n}"}, - {"role": "system", "content": "Command read_file returned: Read the file instructions_3.txt"}, - {"role": "user", "content": "Determine which next command to use, and respond - using the format specified above:"}], "temperature": 0, "max_tokens": 0}' + body: '{"input": "a series of four numbers, \"2314\".", "model": "text-embedding-ada-002", + "encoding_format": "base64"}' headers: + AGENT-MODE: + - AAAAAAAAAAAAAAAAAAAAAMLheAAaAAAA0%2BuSeid%2BULvsea4JtiGRiSDSJSI%3DEUifiRBkKG5E2XzMDjRfl76ZC9Ub0wnz4XsNiRVBChTYbJcE3F + AGENT-TYPE: + - Auto-GPT-2023-X-TYPE Accept: - '*/*' Accept-Encoding: @@ -2180,382 +2486,167 @@ interactions: Connection: - keep-alive Content-Length: - - '5444' + - '112' Content-Type: - application/json method: POST - uri: https://api.openai.com/v1/chat/completions + uri: https://api.openai.com/v1/embeddings response: body: string: !!binary | - H4sIAAAAAAAAA4ySTY/TQAyG7/wKy+ekagmbltxQERIckFaCw4qgajpxkulOPNGMw3ap8t9RmoZC - ioCr/fr1448TmgIz1LUS3bQ2Xn84qIf03fY+TdS37ce39+WxDiQHnT5sthih2x9Iy6VioV3TWhLj - GCPUnpRQgdkq3bx6naw3yTLCxhVkMcOqlThZ3MXS+b2Ll8lyhRF2QVWE2Qlb75pWduIeiQNmq9U6 - jfBqfk3cLSMUJ8peQ8nLtI9Q185oCph9OWFDYTL2zhJmqEIwQRTLgOlYiIcRTjkDAOQoteuqWkKO - GVyClwQdZQjm+B6YqABx4EkVIDWB4SC+0wNg2CULOQqUxhJ0wXB1VgzS3TmmXdMoLhY5Rr/6e1LB - seFqbPLpL64DtjIcoOy81OR/E4LUSuDKWDpr3dO8W2sVj41i+Bzoz4j/M2KecwxvWNnn73Sjg8Gk - ICHfGB7TTEeBINSGGZH2Row2oZkvmTh0nqaxOi7ID/e7pYKys/YZ9lQ6T9B6p4kKw9V89tCSepy6 - PBlr/31HPm9wNOmj6Vcua7p5FVYNjf4/VzpDUL6af9iYGLTX8lucCeIMcuHJucc+wtKwCfVu/CPM - MIhrMULDBR0xW/Zf+xc/AAAA//8DABSF0HDiAwAA + H4sIAAAAAAAAA1RWS9OyvJadn1/x1Du1q0REsvlm3EUuiYqi9AgQLyCiQBLI+fOnfN7q7upJBjsr + 2cm+rL3+/a+fnz9tXpXF8Oefnz/PRz/8+a+v7ZIN2Z9/fv77Xz8/Pz///l3/H7Js8vJyebxuv/Df + zcfrUo5//vmR/tfyf6B/fv7IU2dTuTYqNATEOMNlxbbMTPtXN3k056h3SsTCW+oh2TqOmaanU0M7 + 114ILntKgd6aNWe6FrTxoCyzBKYxdjG3tTp/r1qkwpieFGK+cguJs+RR4On7iMfDqsypBPCAz25z + JB6So2CMcxujyagrKnjTWJPxOM+guRQti4JLLui58BOg2ZawgwVWwONdvUVamtgUHZmRL464buHZ + PJYY8t6o5Jw8Jhi0s0eV4eHk7BJ2PuyHw56OknQTQl5JKtz7bM0MMM1Kiq7pR+WJRXF9adr8U2TS + AwJ4vfByZ7iWmAoMaN3mEZ4eKe5G7dW48PSOSzyspDsSshdn0PDYoZd0dc0nMn/44LzNNdP3/a2a + jK6T0Z3SBVl7NcTTTNkW8JAWBXEedpVTOek5JE//wHR3nef8ExxbZA6xQanMejFKBrLRRsN3Zu2e + Wt7ONEMFv/ZiYvqJn0+fo9nC931k3T1r1G9fSQ+X0jeJ8947Fk2jSkWF1h6wCGaQt3bn+SjNLwFe + OVsUi+VZoRAuP5Rq6fsR0Cc1ONxxtKYLv/lU75MZuRCmV8o8W0Wi/z3//HgliQwZBUw7lBhmqf4i + STmnsYCNXWqfmZGTIG/m+bBcxOVqNc9ndMp2ccDXW9OHGSs/WIqXgRAUzw4Qz5c75u1nTTzx9WWL + NvOK0NlVnnfjzqcqAAps5qh9bC0Pn/l59XphmVhvK7GmzdNTwQxFTOzhPgajz6Ma0hER5tgf1I1Y + K0MEipwQl5FnPuzrk412R1UQJ5PvFSdFCHAhwRJPxNrlk6uiBET0Lom/zy85O25QCNSSQrrni71F + bfauwfkYgGVldhdC9vIMRiYQXryXvBOY7RS4H94B8c2FifiB33TtubUlYgUziD+uig5otREuXZbc + zIV0UGdw0p4tM3fkgnh2OITI2e0C4taGJcYm5DcYCjWjo6/W3cQm3kBhuYC56jysv/e7ZTsnTnEx + O2nxhhYMvT8TPU/XldBDzVMXrSIz50h0MXbPTEH26aaRYOMXgdDG2J0788Jn5Yb5FQf7MkOnLXsQ + P1FelmCDakMZSU+2+Vy2Oa/jZQm2PEy0bmKS01W1OKtuIFXM+PY3N87VDEqjiIj+yZJunBllsjp/ + rox4oDw6PkrHA8zNeI3ZTEmrUdGfWxQ9wvW3Xq7xlGRBCSyqU7azUz8QYe35cMluORV07ljiqls1 + aoTaE5e8d4FII2UGYjdJJAqvu3jKXoUN76tn4aKYSfmYk0u/0kzVoOMR953gWy2EeXEYSbBQ74G4 + pWhSu3oeMVsrVmLY8tkHBhJ5+BdP41CVEFGsLVWX7T6eoqwN1fv+cKHaS3ujL6hFbBW86F1143yQ + tukDXbTDkpmPlFZDH70bsETekkDpVtZkz3QdbRepgzmuHUsGedeg/fGVYO1ekFjctq0CQ7oXBF+b + R9yj+15Cs9R8kXDAQd6/2/0DMWqv2Wm9f3f0LkoFynr/pov3/hlM7OrW0Ailx2ilv+KGzD8ejIaq + /62v/ptfeAQNkGC2T8WIP7yEk9OaxFF7EdCwEq42PzOfGdbpUAk1TD2YL547OveaWnQevUnArAej + 81mMLRHEHwnW8d7CSxxcgglL50wl6LNk+s3Fon/NwhkC7Tox5+lvqvG5JDVg4kbsrz/JJQVEObeI + 7ll+xZMhT2CTnR0WXQ5C0GnPdFj3poSnV16hsaPPCW6LIiG+YbCY4Rsq4EHCiq35clkxvv4ksBc8 + IYYk3RDVvZYDK3Y3isqizTk3hQyK7WM6OnGFhuGVchCH9YOttV4NuiQ732B7WJ3Y+hTt45H6hYJ4 + NsNEVx0zEAaWdGDpbMKreUbz3nlKDXI748w2cPjOQ2JRsNqDx8JL/Kim4PxOwD08j2yTr2NrAltx + oSvvG3orr4MYD5fAhXu3xsy6LA65MM5YResNPjN/XC8F35Eh+xufmH5kNPGjoCDt+Za4p7Hvxu/8 + Q9Myyoljf/JucPA5QYGm3rBkme+YL5TYhjrcIto7i40Q294J4bIatsR00VH0s6DN4MtHxDWJZXEh + H0Lt1JYlwUoyoEG2VgD3ZL4itsT2YsEzNUT5ytoRgiaR04W4t2gvr22G36ofC8QOGPK1qZNY5yoa + uKWrv/6Zs5P6fLKu4RbNj+uKGVEH1bQOzj365U/hKXI8hu2qRiTfbIgdJbtY3EUFmn4LPwSzexdM + zmYnQ+h2e+LPZMkSrG1u6NuvzPYOYPXDK+xRZ1+vZL1Y9eI7jz2QpJ2g80TfC16pawzffie4jjfd + +Mk2Pohp/yCbfC0s3j0jGRJXO3z55mnRWzltgZHVmgSe7li8PO9AuyvTjcoufCxaX/oSsk0bEM9W + c8HbizeBE/o6sTYTtwbjESi/9UX8sWgs8R42OlwR9YixfJ67oaMDB2lR93g+U1YVvayULSJFaTLf + b3kgtpeGQ1+/OQvc1A44uyr0738UK86Q+GxDD47TYs7M+1US71FbbGFeJCNWT5ep4463pCgy+pSE + kRV0/UKQG3puXYmR8z2qJidfNkDwxPFD0aJKMDjfIDm0DVXExrV6Bc4y+pRSw9y5T60J5n0Jr3dV + M+ssrtZ4jHmo5rftjUVz9xB/+UCGoJJksj8FPerW5zkgfXGSqKQmcVdhliqqoXQzyvC0Cybda6df + PUDf8cFB4+r61mHE+zN+q+dVwG5tFgLrR0zseNkJIZBPYbmVbIZtbyP4dSNu2q++8pXPC/WPk+3D + M+5zthbDLZ6ucteAF5A3VdyNhXhSewVk3nFPgjaokQiKRQ+CFQ4J0N0Qk9FVEszPg09H85Kh3p3X + /G+8BZae+Xh0dhxUnuYEG2ZVjY9b7cJQJSXbONs85rmjcXjvygnTDft004t7HwgtoyMkltx8lNL6 + DPyUvOjlq2/5a31S0S59ZMRIUVyJq6x8QDNeHlYOz3U+SUFswldvY3Wfa/Fg+vsG7M2yo4LvToJ/ + 8wlVEF/p/cRelsA39M3PvWamixaILl++i9hq8yLe4qOj5a3dY0jEMmffeg6kZcNAXTSNx3T1M1R8 + TrsSvVzmMZLoE5qOeKSaHAx7Esa1GywSWWzRN17M3mtTIGqfPOA7/+niYmKLi2FzgLSONBJ4z23F + Xp3pA7XkkBhs6nJavLMZ5Od7hZGb2hbf07sHS66vyDWHOxrnufuAJj7P2FefWJO8ekkQFULBsR6w + WMyTi4uO3dn98v2pmooMbqr0xvGvvkHjJjJvsMOBjDXLqQJ+dxsJiqr5MOJ+jtVkxNMDslIcSTBM + z4q7Ns+0RpslzAmCJBjdMc/Ql48JodbSGondUkSG+ZNEzo51zByXLvJrP6bLm7kIxq+eBMOfTDzv + ln7Fi+swQ6vXzGV4284s/ng+amBRkzI9T1+dQM3mAIp2Hkg07XaCPyf0P/nYDn0wRdkNQzVXH1RC + i7MYC7or0YFLlFmGs7H63FlMQFmasgieKeKr/wAAAP//TJpbr4LI0obv51dM5pZMREC62HecRI7d + KIqs5MsXUDwgigLdQCf7v+/gmuzsWxM1ge6q532qLokA6uvzplyht5Z77tbREhddsYK2RiU/SOHA + G3Sf/fJu/CopPLdOTPlSUVr+8Dd3+N5vz5dP1kAzukN1anlEt1KrGN5NfgVDeQtYuud3/jxNtQhZ + LSXMCavW4lbAUjDYKiTe2TKTZX4WXTDfbY7F68ay+POiZGDJBiHhubrw5hzpIZC74hDXStT5Puol + EDPPsDbnqXdkviUUZf6J6aKXBJOctQ1Cjlziraw6iGojNpUz0j2yf4h+QM9hakN0GpXv+7Co2d8p + XG9Hg4Wc1WhcP71G9SpXJutL5VajlF46eG7Mmg5ZcmtZ5w4+XKSxIXM/akdbvDqafA5PDD9FO2Cn + aVLhgiUDL2i2LsbGayWAl8GoSd911UOkqUjd+Iis9+rUTv0i9kF9WCblGV9YTEpXItx26Zms/adf + Lam/V5CNh4AECZ+CUVCsDHR1t2eOfLzypl+2LqyX2ZWkkqsgzi7J55fnjQPuKro/Zz6kZRezXdNL + FXtWcgcyb64YxFMf9NsyVuDQU5EFmwpZY0C8XM3d4w6LafZsx354NCAyt2GmiTGfXiIRIIk/Z2LS + vVw8EFvt4D5dKVmbJQvGdHxRmOsHM33jZHXHTfCAtXv9wf2iuLd8ebUpCs+bA+a8C4PBeJk5fHkY + fXl3E2QUoADCzO2jC7op+XGhg7xjDrHrinfW4wnb5LFmm3K4F+Mo3nPUHzWVrC1LrTioNQCmiUn3 + rZ4Wv/xkB/6DuWKRIfbtL8qF9CyY+W4+bzksiJARR6jNSsRcitFcX/CUSkLVeTtP/+YpNvMhn/p7 + sgN/tT0T3dbsYhl4mogOydtn0bGfUKftUQyRQX9YeBLEhA4H1K3mvEj5Vh9Rp8baHQ3dZcTTwvvh + 47QQfSg2uk7wz3qDRq1bfZDsZzqJd3BGnByvD9QbN5m4RRsVfCenAPedobNQ80SrjyUzhF7yVUYE + ZdV2F4/fARflkzifaAj6tRdOgNSgYV5+f1X9EBaT6gJZk2D2IXRZKHsQXVrP/eKc8OZUdSAGRkJs + HLcB08bCAWeaDnRlvdWCf2Lbh+p9uhCP+8eCKYgPkLkQsnBpKFX/YvwJi9y0iEV+BjQd4+EJyAxN + Fn/zp9+uQsBrvSLB+Y7bKc2tE5Ai8OhoC8uK2oJuagqsRWJulqyi3dG4Aq0Mgxmqw5NeIDCBEXCZ + Lkj3Qf2NmVe064wf5r2rtPrWs99+pwTDo6U7bj7h0Q1vqo1HxWLLOovBt5qaneXkFcz+avqtn9e5 + 3k7UvwFkjWfQpVmSYPZLgOpod2T29z5w6R2DMpoHtv6BFx/EOCwRSw8Bi3atVk3ZydxDAR+FhPBo + KhrlDVZnn8SyOn8jqvjrEG7kqjNLK9x23E3aHa5R1NLSXcZoTAQSIzs67+jwzcPV6OVgpqJA0p0f + cA6L8a5dqkVIpXciFYO/tB1ganllnvVWk8/qrQvAB+1NPxf93rLO6h4gRWZAc+WzQQPVAVB7/1zw + bRRk1MeSj5XGtjVy7dBYdDtZ1JEeZRE5Ohoq6AGvOlCHoiB2PlwKFpXwRHV3Dom/iVBLvctyD/tP + 9sFPYq+rwdsZOhxfQY37bl23/C0VKjwOVs+il+bxQXTGJxBjfWHk+vaCL78g//STEme4ZUF33ogf + qPDKIfah7PhASZKhQ0Qc9q1nbcme+OvTiBNiQNPX37VAPUKoNvBffgy1l0VcfIOquUerGM3vn5mp + JLTjJ7h3gM7Jbc5n94A1SSbBN29as08crPwjgSo4HcPGi1hfn6IZt9Bha8vK22lZWyGQTCyItT3o + Ftete/ztd8S7fyprjLbuDk7r4kP3E86stnb0UJv9A9Mb7Be02y4BYrnn3/taffOsetIHzIzk1Vbj + TTnn8IonzqLC3SSi+HPZq8Kpwcx8v4Zkzi938OK0mL//007XuJBg+ao1Fk1PPZAFQzmBdJFeWKlz + D43hZJ/AErsbswczQEPGcoxm/zbzwdsal8rVBc9TjmxD29biw0xx3nsTMiO/XoJ6376vcDHcgfh3 + x0XcWRCKbgew6fS8+dZ43MSNVm9SkwQXI+Gf56Xag+uqiDnH0LCGIXQodJYfESz5t4RZAdujMsfC + L2//8vEytXNC6jQO5LQ8St/fJ9nhYlhD+9nm2uzfGNHovX19nDFGpTUhhr38lIwu38Vozi9kvT9s + gmGNgx38LJ5XYs282x+DCyDXeUSsKFBftYNZDkixXcycjJsB3+4TBc15jRldVxeDq2QDyIk/0UVf + tu1kYCrAOVVFvK9phab90jghYXU/sGjsnslYueqArm3Wsl3ntsHswwCFckMJwVHdjtmnoL/8kh0u + t4BPO3C+fIkFj/mtKnBbgYtYqlSS3Ax16ip00ZzP6AN2JpfO23cDn3r3oTIOtKBzpB8FYnxJKEkP + RjAs3/BBX56RVlVfdLa5uiI9kbbEl5spGa3DY49ei8WLuC2yiwF/2mx1UnoRK30kW938fOFu3Hb4 + Pvp6Mog/xxRmtYpXj+3Daue8DJvLpyCeebGKvkySEs3zADyOwpE/isDbg07RQL+8Nxhk6yDxYAuY + 9WVQ/frr2eczv8kHPr3teg8nfcLMWia9xdml+MA68pZUrt9LNHrFFaBxipYq4rCxxCSXXRiPlk8H + 9HgVLP/xKcqf1x07zfl02F19Cn0qrGceO7XT/Tq6qInPKR66dM3ZZaxt0O6OxIz927Dkmd9QlyVH + hrfyM6DssfO14xjFGNHjaFGv1lX4WTyuJJ/74bT1kQCGqN9ZGGxg9s2HHWyxJ80+z+O8VXYT0vLD + imwEo0mG/d17ICkpCQsOC6Hq0oZMoCnXFdmUg5n0m8BqNJ2uBjL7I+uz9Tmg10c8E6J7RfC9T9C6 + a4cdZh9Nkw2LgWM4EhzG64Rnx0BE952lE98VoRp3imHDW0iV2f8c0XiR0itkA+LE2tbnpJWzoUOn + ezIyc1E2AdeFskHtRTTJdn5/fP9UHDCOfsGs461LJl9Y2bBlwpltpFIpZONZ79GYpRaJHrLFRfl8 + cNBcL0gat892YFN6gvWi9Jk9VU4im7LZoGeSC8wYf8Jk4OVdXc15vY8Degp4a9QUXdu8/fUhH7AH + WxtSg+LJ2C+DMcoLF27krrN1tKmL4UTDHG0D2WXYvLmcPp7yHtQf7Ui5cLgn3N5cdaB6WP/Op8b3 + OJWQNYGB0fNh8UHdfBo093eiz+fnt/5THddfnkXjZ798QjMsbNylByFg5dktITYdRKLbiRVz3vS1 + 57Bd0+XiJ7em9VHZfedVdKFsDD6tqmUO6sMwiSOdX3OeNfbgBFLFSMVYwAwMJuJNKDOreL8Qo3tD + VeuPX5L1q30E/ID2HXhaeCOWNO2SjthJ81sPcayFhbh18issUPVk0c/btKbV2xVgJeQe8yWqVF1t + /HTo6G9LNvvfgmuNHsI7DlZYlajSvp+plYEYpuE/8ylX5h2M/SViRuRsrSkerU7jS8UjG69Mg+Ey + +b88itHF4Jxlx0CCaJ3qLGglqWjju5rC9/45iu0G8nwegRibC+abZ9WO6DNNyORsjT+rx63i0eFN + f/PdzuRqwgcznYAZoUF2IwuQLO4nAeRawyxaiQaa/f8elq+XRl+1YFtDjHQKVrd4MM+aJ3+tEnXQ + O6cjITHVi2HY7nbw9Z/GOql4f7ogE4Lb3aX3h86qYd/uS6hTw8Ozj0ZtPogm4KO4YMai3wejVhYh + XI5PTgfX8tuv30TSetCYbZlewm3zVK4i+bglVsmwNd60q4CCp/fAy9X0k0zOkk3osesWLMAjcDoK + ZgqP9jwwL+rrgA9mOUEPa0L0fhkEXB4+Oqzf+oYVT7KsJtVcYSSvz1ey2T637SDI0h7N5wff5Fqp + WvDCExx3ZoEhOPmttKq0DJl+WbNAn6pkPJqRDd2jHfDSylw0+/wnchZuj6eLwtF3/goXezgS73X8 + BP2POupwzu8Fs7/95RonEvJzp8FNMDyqcak0LuSxQ4hjqHtEZz+Pmif6YXh0u2AqyGf69gviGAev + mGwFTgjW15CY6dNIxCVzMUQqKBgJuxWa/FV9B0XLe6ZbiPKx7sVOxTf1w771pDNukgiVCyOZ600y + 8fEzoDlvEvfgT6i2Dt3+O9/Fsvs4FfxUGE/t67MTdFkV/eLdAby0z4ps5vnGOD4tYfVI0yednwcX + 9yJLUVjmmMz8Xg3uW02Byn6KpW2tF/x4CEF1F1LH1mOut5MJhq+tnm309Q/z/OMA2qRc92S9iiI0 + CM5JhPAWtsSa8zrPf8zu1x9BXF4Dqoo5hWwKO2Lk8Q196I+eI5Q1a7Yj1pjwkQ3CclnKAb0Py8ka + /adKwW02AzO1hcm/9UjdRq5BDurm1bZpeRRVYvcO84/TFfVz/kDu2YzZl8ckL/2UMM/zGCZpUIi7 + q0khtKwWr+Z81m4yWYC9xC1GSpcmw2RPHeLH3mOH+fwO1zyWNKQjjw7OpkgGaVa4xZptqLi0jWDa + GjdH++u7FfDvP/788/++GwbP5lzW82JAX4793/9dFfg7P+d/i6L0N5N+NxFol1/Lv/71zxLCX++2 + eb77/++bR/nq/vrXn8t/1g3+6ps+r//38z/mf/v3H/8BAAD//wMAz7LCz+MgAAA= headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 7cc629212e6a3ac7-DFW + Access-Control-Allow-Origin: + - '*' + Alt-Svc: + - h3=":443"; ma=86400, h3-29=":443"; ma=86400 + - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 Cache-Control: - - no-cache, must-revalidate - Connection: - - keep-alive + - private + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 7ccfcbc95b7df222-ORD Content-Encoding: - gzip Content-Type: - application/json Date: - - Wed, 24 May 2023 14:17:38 GMT - Server: - - cloudflare - access-control-allow-origin: - - '*' - alt-svc: - - h3=":443"; ma=86400, h3-29=":443"; ma=86400 - openai-model: - - gpt-3.5-turbo-0301 - openai-organization: + - Thu, 25 May 2023 18:21:04 GMT + Function-Execution-Id: + - pbme78psn76u + Openai-Organization: - significant-gravitas - openai-processing-ms: - - '27552' - openai-version: + Openai-Processing-Ms: + - '70' + Openai-Version: - '2020-10-01' - strict-transport-security: - - max-age=15724800; includeSubDomains - x-ratelimit-limit-requests: - - '3500' - x-ratelimit-limit-tokens: - - '90000' - x-ratelimit-remaining-requests: - - '3499' - x-ratelimit-remaining-tokens: - - '86463' - x-ratelimit-reset-requests: - - 17ms - x-ratelimit-reset-tokens: - - 2.358s - x-request-id: - - 47e4bf23b06275bef6330450764eab29 - status: - code: 200 - message: OK -- request: - body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "system", "content": "You - are Follow-Instructions-GPT, an AI designed to read the instructions_1.txt file - using the read_file method and follow the instructions in the file.\nYour decisions - must always be made independently without seeking user assistance. Play to your - strengths as an LLM and pursue simple strategies with no legal complications.\nThe - OS you are running on is: Ubuntu 22.04.2 LTS\n\nGOALS:\n\n1. Use the command - read_file to read the instructions_1.txt file\n2. Follow the instructions in - the instructions_1.txt file\n\n\nConstraints:\n1. ~4000 word limit for short - term memory. Your short term memory is short, so immediately save important - information to files.\n2. If you are unsure how you previously did something - or want to recall past events, thinking about similar events will help you remember.\n3. - No user assistance\n4. Exclusively use the commands listed below e.g. command_name\n\nCommands:\n1. - append_to_file: Append to file, args: \"filename\": \"\", \"text\": - \"\"\n2. delete_file: Delete file, args: \"filename\": \"\"\n3. - list_files: List Files in Directory, args: \"directory\": \"\"\n4. - read_file: Read a file, args: \"filename\": \"\"\n5. write_to_file: - Write to file, args: \"filename\": \"\", \"text\": \"\"\n6. - delete_agent: Delete GPT Agent, args: \"key\": \"\"\n7. get_hyperlinks: - Get hyperlinks, args: \"url\": \"\"\n8. get_text_summary: Get text summary, - args: \"url\": \"\", \"question\": \"\"\n9. list_agents: List - GPT Agents, args: () -> str\n10. message_agent: Message GPT Agent, args: \"key\": - \"\", \"message\": \"\"\n11. start_agent: Start GPT Agent, args: - \"name\": \"\", \"task\": \"\", \"prompt\": \"\"\n12. - task_complete: Task Complete (Shutdown), args: \"reason\": \"\"\n\nResources:\n1. - Internet access for searches and information gathering.\n2. Long Term memory - management.\n3. GPT-3.5 powered Agents for delegation of simple tasks.\n4. File - output.\n\nPerformance Evaluation:\n1. Continuously review and analyze your - actions to ensure you are performing to the best of your abilities.\n2. Constructively - self-criticize your big-picture behavior constantly.\n3. Reflect on past decisions - and strategies to refine your approach.\n4. Every command has a cost, so be - smart and efficient. Aim to complete tasks in the least number of steps.\n5. - Write all code to a file.\n\nYou should only respond in JSON format as described - below \nResponse Format: \n{\n \"thoughts\": {\n \"text\": \"thought\",\n \"reasoning\": - \"reasoning\",\n \"plan\": \"- short bulleted\\n- list that conveys\\n- - long-term plan\",\n \"criticism\": \"constructive self-criticism\",\n \"speak\": - \"thoughts summary to say to user\"\n },\n \"command\": {\n \"name\": - \"command name\",\n \"args\": {\n \"arg name\": \"value\"\n }\n }\n} - \nEnsure the response can be parsed by Python json.loads"}, {"role": "system", - "content": "The current time and date is Tue Jan 1 00:00:00 2000"}, {"role": - "system", "content": "This reminds you of these events from your past: \nAs - a system, I recall that nothing new has happened since my creation."}, {"role": - "user", "content": "Determine which next command to use, and respond using the - format specified above:"}, {"role": "assistant", "content": "{\n \"thoughts\": - {\n \"text\": \"I should start by reading the instructions_1.txt file - using the read_file command.\",\n \"reasoning\": \"The first step is - to understand the instructions and what is required of me.\",\n \"plan\": - \"- Use the read_file command to read the instructions_1.txt file\\n- Analyze - the instructions and determine the next steps\",\n \"criticism\": \"I - need to ensure that I understand the instructions fully before proceeding.\",\n \"speak\": - \"I will start by reading the instructions file.\"\n },\n \"command\": - {\n \"name\": \"read_file\",\n \"args\": {\n \"filename\": - \"instructions_1.txt\"\n }\n }\n}"}, {"role": "system", "content": - "Command read_file returned: This task_id is 2314\nRead the file instructions_2.txt"}, - {"role": "user", "content": "Determine which next command to use, and respond - using the format specified above:"}, {"role": "assistant", "content": "{\n \"thoughts\": - {\n \"text\": \"I need to read the instructions_2.txt file using the - read_file command.\",\n \"reasoning\": \"The instructions_2.txt file - contains further instructions that I need to follow.\",\n \"plan\": \"- - Use the read_file command to read the instructions_2.txt file\\n- Analyze the - instructions and determine the next steps\",\n \"criticism\": \"I need - to ensure that I understand the instructions fully before proceeding.\",\n \"speak\": - \"I will read the instructions_2.txt file now.\"\n },\n \"command\": {\n \"name\": - \"read_file\",\n \"args\": {\n \"filename\": \"instructions_2.txt\"\n }\n }\n}"}, - {"role": "system", "content": "Command read_file returned: Read the file instructions_3.txt"}, - {"role": "user", "content": "Determine which next command to use, and respond - using the format specified above:"}, {"role": "assistant", "content": "{\n \"thoughts\": - {\n \"text\": \"I need to read the instructions_3.txt file using the - read_file command.\",\n \"reasoning\": \"The instructions_3.txt file - contains further instructions that I need to follow.\",\n \"plan\": \"- - Use the read_file command to read the instructions_3.txt file\\n- Analyze the - instructions and determine the next steps\",\n \"criticism\": \"I need - to ensure that I understand the instructions fully before proceeding.\",\n \"speak\": - \"I will read the instructions_3.txt file now.\"\n },\n \"command\": {\n \"name\": - \"read_file\",\n \"args\": {\n \"filename\": \"instructions_3.txt\"\n }\n }\n}"}, - {"role": "system", "content": "Command read_file returned: Write the task_id - into the file output.txt\nShutdown"}, {"role": "user", "content": "Determine - which next command to use, and respond using the format specified above:"}], - "temperature": 0, "max_tokens": 0}' - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - Connection: - - keep-alive - Content-Length: - - '6394' - Content-Type: - - application/json - method: POST - uri: https://api.openai.com/v1/chat/completions - response: - body: - string: !!binary | - H4sIAAAAAAAAA5RSTa+bMBC891dYezYohPfyKLdWaqX00FO/1FJFjtmAX7CXZy9NpIj/XhGIkkfV - Q64z3pnZ9ZzAlJCDrhVr2zbR06fn7XL3LbE/Pn74Xr2zCj/z+/XLzzb8yTKQQNtn1DxNxJps2yAb - ciBBe1SMJeTJKnt4mz5lq4UESyU2kEPVcpTGjxF3fkvRIl0kIKELqkLIT9B6si1vmPboAuRJusok - XMWvxGMmgYlVcwMtV70EXZPRGCD/dQKL4SLsqUHIQYVgAivHQ0xyjG5Y4VQ4IYQogGvqqppDAbmY - wInAIw9gAWvhEEvBJA7eMAquUbAK+40phXFMZ4A6bjuO+chiZxoUXTCuOjPnoQ3T5oxrsla5Mi5A - 3rp5VIGccdVo+aVGYVxg3+nhCEF4fOmMR2Hxvhxzn7ZRbrSIxNeA/w94n83MRXvDRptg5wdEFzo/ - SCoW6xt9Td6j5rvXCS2q/cXkYJrmni9ydIgLGOV6eSnEtP8/fXDK4uj06l6zQMpX8yqNxPD2KnGN - 8mp+3rxlmjxcEp5TTmEL10MvYWecCfVm7A7kEJhakGBciUfIF/3v/s1fAAAA//8DAF4b24zkAwAA - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 7cc629de3bf83ac7-DFW - Cache-Control: - - no-cache, must-revalidate - Connection: - - keep-alive - Content-Encoding: - - gzip - Content-Type: - - application/json - Date: - - Wed, 24 May 2023 14:18:10 GMT Server: - - cloudflare - access-control-allow-origin: - - '*' - alt-svc: - - h3=":443"; ma=86400, h3-29=":443"; ma=86400 - openai-model: - - gpt-3.5-turbo-0301 - openai-organization: - - significant-gravitas - openai-processing-ms: - - '29389' - openai-version: - - '2020-10-01' - strict-transport-security: + - Google Frontend + Strict-Transport-Security: - max-age=15724800; includeSubDomains - x-ratelimit-limit-requests: - - '3500' - x-ratelimit-limit-tokens: - - '90000' - x-ratelimit-remaining-requests: - - '3499' - x-ratelimit-remaining-tokens: - - '86466' - x-ratelimit-reset-requests: - - 17ms - x-ratelimit-reset-tokens: - - 2.355s - x-request-id: - - c74448d89d195cfad9d7fcfc8ed151c4 - status: - code: 200 - message: OK -- request: - body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "system", "content": "You - are Follow-Instructions-GPT, an AI designed to read the instructions_1.txt file - using the read_file method and follow the instructions in the file.\nYour decisions - must always be made independently without seeking user assistance. Play to your - strengths as an LLM and pursue simple strategies with no legal complications.\nThe - OS you are running on is: Ubuntu 22.04.2 LTS\n\nGOALS:\n\n1. Use the command - read_file to read the instructions_1.txt file\n2. Follow the instructions in - the instructions_1.txt file\n\n\nConstraints:\n1. ~4000 word limit for short - term memory. Your short term memory is short, so immediately save important - information to files.\n2. If you are unsure how you previously did something - or want to recall past events, thinking about similar events will help you remember.\n3. - No user assistance\n4. Exclusively use the commands listed below e.g. command_name\n\nCommands:\n1. - append_to_file: Append to file, args: \"filename\": \"\", \"text\": - \"\"\n2. delete_file: Delete file, args: \"filename\": \"\"\n3. - list_files: List Files in Directory, args: \"directory\": \"\"\n4. - read_file: Read a file, args: \"filename\": \"\"\n5. write_to_file: - Write to file, args: \"filename\": \"\", \"text\": \"\"\n6. - delete_agent: Delete GPT Agent, args: \"key\": \"\"\n7. get_hyperlinks: - Get hyperlinks, args: \"url\": \"\"\n8. get_text_summary: Get text summary, - args: \"url\": \"\", \"question\": \"\"\n9. list_agents: List - GPT Agents, args: () -> str\n10. message_agent: Message GPT Agent, args: \"key\": - \"\", \"message\": \"\"\n11. start_agent: Start GPT Agent, args: - \"name\": \"\", \"task\": \"\", \"prompt\": \"\"\n12. - task_complete: Task Complete (Shutdown), args: \"reason\": \"\"\n\nResources:\n1. - Internet access for searches and information gathering.\n2. Long Term memory - management.\n3. GPT-3.5 powered Agents for delegation of simple tasks.\n4. File - output.\n\nPerformance Evaluation:\n1. Continuously review and analyze your - actions to ensure you are performing to the best of your abilities.\n2. Constructively - self-criticize your big-picture behavior constantly.\n3. Reflect on past decisions - and strategies to refine your approach.\n4. Every command has a cost, so be - smart and efficient. Aim to complete tasks in the least number of steps.\n5. - Write all code to a file.\n\nYou should only respond in JSON format as described - below \nResponse Format: \n{\n \"thoughts\": {\n \"text\": \"thought\",\n \"reasoning\": - \"reasoning\",\n \"plan\": \"- short bulleted\\n- list that conveys\\n- - long-term plan\",\n \"criticism\": \"constructive self-criticism\",\n \"speak\": - \"thoughts summary to say to user\"\n },\n \"command\": {\n \"name\": - \"command name\",\n \"args\": {\n \"arg name\": \"value\"\n }\n }\n} - \nEnsure the response can be parsed by Python json.loads"}, {"role": "system", - "content": "The current time and date is Tue Jan 1 00:00:00 2000"}, {"role": - "system", "content": "This reminds you of these events from your past: \nAs - a system, I recall that nothing new has happened since my creation."}, {"role": - "user", "content": "Determine which next command to use, and respond using the - format specified above:"}, {"role": "assistant", "content": "{\n \"thoughts\": - {\n \"text\": \"I should start by reading the instructions_1.txt file - using the read_file command.\",\n \"reasoning\": \"The first step is - to understand the instructions and what is required of me.\",\n \"plan\": - \"- Use the read_file command to read the instructions_1.txt file\\n- Analyze - the instructions and determine the next steps\",\n \"criticism\": \"I - need to ensure that I understand the instructions fully before proceeding.\",\n \"speak\": - \"I will start by reading the instructions file.\"\n },\n \"command\": - {\n \"name\": \"read_file\",\n \"args\": {\n \"filename\": - \"instructions_1.txt\"\n }\n }\n}"}, {"role": "system", "content": - "Command read_file returned: This task_id is 2314\nRead the file instructions_2.txt"}, - {"role": "user", "content": "Determine which next command to use, and respond - using the format specified above:"}, {"role": "assistant", "content": "{\n \"thoughts\": - {\n \"text\": \"I need to read the instructions_2.txt file using the - read_file command.\",\n \"reasoning\": \"The instructions_2.txt file - contains further instructions that I need to follow.\",\n \"plan\": \"- - Use the read_file command to read the instructions_2.txt file\\n- Analyze the - instructions and determine the next steps\",\n \"criticism\": \"I need - to ensure that I understand the instructions fully before proceeding.\",\n \"speak\": - \"I will read the instructions_2.txt file now.\"\n },\n \"command\": {\n \"name\": - \"read_file\",\n \"args\": {\n \"filename\": \"instructions_2.txt\"\n }\n }\n}"}, - {"role": "system", "content": "Command read_file returned: Read the file instructions_3.txt"}, - {"role": "user", "content": "Determine which next command to use, and respond - using the format specified above:"}, {"role": "assistant", "content": "{\n \"thoughts\": - {\n \"text\": \"I need to read the instructions_3.txt file using the - read_file command.\",\n \"reasoning\": \"The instructions_3.txt file - contains further instructions that I need to follow.\",\n \"plan\": \"- - Use the read_file command to read the instructions_3.txt file\\n- Analyze the - instructions and determine the next steps\",\n \"criticism\": \"I need - to ensure that I understand the instructions fully before proceeding.\",\n \"speak\": - \"I will read the instructions_3.txt file now.\"\n },\n \"command\": {\n \"name\": - \"read_file\",\n \"args\": {\n \"filename\": \"instructions_3.txt\"\n }\n }\n}"}, - {"role": "system", "content": "Command read_file returned: Write the task_id - into the file output.txt\nShutdown"}, {"role": "user", "content": "Determine - which next command to use, and respond using the format specified above:"}, - {"role": "assistant", "content": "{\n \"thoughts\": {\n \"text\": - \"I need to write the task_id into the output.txt file using the write_to_file - command.\",\n \"reasoning\": \"The instructions require me to write the - task_id into the output.txt file.\",\n \"plan\": \"- Use the write_to_file - command to write the task_id into the output.txt file\",\n \"criticism\": - \"I need to ensure that I write the correct task_id into the output.txt file.\",\n \"speak\": - \"I will write the task_id into the output.txt file now.\"\n },\n \"command\": - {\n \"name\": \"write_to_file\",\n \"args\": {\n \"filename\": - \"output.txt\",\n \"text\": \"2314\"\n }\n }\n}"}, {"role": - "system", "content": "Command write_to_file returned: File written to successfully."}, - {"role": "user", "content": "Determine which next command to use, and respond - using the format specified above:"}], "temperature": 0, "max_tokens": 0}' - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - Connection: - - keep-alive - Content-Length: - - '7348' - Content-Type: - - application/json - method: POST - uri: https://api.openai.com/v1/chat/completions - response: - body: - string: !!binary | - H4sIAAAAAAAAA3SRS2/bMBCE7/0VxJ4pw684jm4FiiIpgp5aoEUVGDS1lhjzoXJXsQ1D/73Qw7Wr - ttdd8puZnTOYHFLQpWLtKpvcf3rdfv84f36c7ec791hV+Pl5efx2evuwPEWQELavqHn4MdHBVRbZ - BA8SdETFmEM6W62XD4v79cNcggs5WkihqDhZTO4SruM2JNPFdAYSalIFQnqGKgZX8YbDHj1BOrtb - rSVc4dfFbCWBAyt7Ha3Wy0aCLoPRSJD+OINDuoBjsAgpKCJDrDy3NoNn9G2Ec+aFECIDLkNdlEwZ - pGIYDgs8cjvM4El4xFxwEFTWnIeDFzUZXwguUbCi/Wawi0IH55TPJxnIW1ZERcEbX/TALyUK44lj - rduIJCL+rE1E4fBWZUyprPI9IBFfCf8vfwsZMXQ0bLQhN46GnurYMhWLJ1Gqt47WYXOhrO3UBp95 - J0tii7sQsZPi9h7/Mk0Vqv1F7GCsvR7Rh8Mkg/5xIy+FDBn+6sMrhz3nj8wjORWLcZW3FfSA922c - LsE1ItVaI9Gutvb021XnbDCY+QYaCTvjDZWbHgcpEIcKJBif4xHSafPSvPsFAAD//wMAvNN1YlgD - AAA= - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 7cc62aa52e403ac7-DFW - Cache-Control: - - no-cache, must-revalidate - Connection: - - keep-alive - Content-Encoding: - - gzip - Content-Type: - - application/json - Date: - - Wed, 24 May 2023 14:18:31 GMT - Server: - - cloudflare - access-control-allow-origin: - - '*' - alt-svc: - - h3=":443"; ma=86400, h3-29=":443"; ma=86400 - openai-model: - - gpt-3.5-turbo-0301 - openai-organization: - - significant-gravitas - openai-processing-ms: - - '19007' - openai-version: - - '2020-10-01' - strict-transport-security: - - max-age=15724800; includeSubDomains - x-ratelimit-limit-requests: - - '3500' - x-ratelimit-limit-tokens: - - '90000' - x-ratelimit-remaining-requests: - - '3499' - x-ratelimit-remaining-tokens: - - '86473' - x-ratelimit-reset-requests: - - 17ms - x-ratelimit-reset-tokens: - - 2.35s - x-request-id: - - d281dd2fbae62c2925a71cdef320242e + Vary: + - Accept-Encoding + X-Cloud-Trace-Context: + - 7b3aeeee5e3b212f52c48ef9748c385c + X-Powered-By: + - Express + X-Ratelimit-Limit-Requests: + - '3000' + X-Ratelimit-Remaining-Requests: + - '2999' + X-Ratelimit-Reset-Requests: + - 20ms + X-Request-Id: + - 65f5b07aff01746ac5a824644cd2e8bf status: code: 200 message: OK diff --git a/tests/integration/challenges/memory/test_memory_challenge_a.py b/tests/integration/challenges/memory/test_memory_challenge_a.py index 3e293099eb..1b1bcfe09a 100644 --- a/tests/integration/challenges/memory/test_memory_challenge_a.py +++ b/tests/integration/challenges/memory/test_memory_challenge_a.py @@ -1,5 +1,4 @@ import pytest -from pytest_mock import MockerFixture from autogpt.agent import Agent from autogpt.commands.file_operations import read_file, write_to_file @@ -15,7 +14,7 @@ MAX_LEVEL = 3 def test_memory_challenge_a( memory_management_agent: Agent, user_selected_level: int, - patched_api_requestor: MockerFixture, + patched_api_requestor: None, monkeypatch: pytest.MonkeyPatch, ) -> None: """ diff --git a/tests/integration/challenges/memory/test_memory_challenge_b.py b/tests/integration/challenges/memory/test_memory_challenge_b.py index 28261cb163..f05c1cfcff 100644 --- a/tests/integration/challenges/memory/test_memory_challenge_b.py +++ b/tests/integration/challenges/memory/test_memory_challenge_b.py @@ -1,5 +1,4 @@ import pytest -from pytest_mock import MockerFixture from autogpt.agent import Agent from autogpt.commands.file_operations import read_file, write_to_file @@ -20,7 +19,7 @@ NOISE = 1000 def test_memory_challenge_b( memory_management_agent: Agent, user_selected_level: int, - patched_api_requestor: MockerFixture, + patched_api_requestor: None, monkeypatch: pytest.MonkeyPatch, ) -> None: """ diff --git a/tests/integration/challenges/memory/test_memory_challenge_c.py b/tests/integration/challenges/memory/test_memory_challenge_c.py index b6cbc60d0b..f6afad972c 100644 --- a/tests/integration/challenges/memory/test_memory_challenge_c.py +++ b/tests/integration/challenges/memory/test_memory_challenge_c.py @@ -1,6 +1,4 @@ import pytest -from _pytest.monkeypatch import MonkeyPatch -from pytest_mock import MockerFixture from autogpt.agent import Agent from autogpt.commands.file_operations import read_file, write_to_file @@ -21,7 +19,7 @@ NOISE = 1000 def test_memory_challenge_c( memory_management_agent: Agent, user_selected_level: int, - patched_api_requestor: MockerFixture, + patched_api_requestor: None, monkeypatch: pytest.MonkeyPatch, ) -> None: """ diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index dfb94d0e68..cb49bc13a6 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -1,7 +1,8 @@ import os -import openai +import openai.api_requestor import pytest +from pytest_mock import MockerFixture from tests.conftest import PROXY from tests.vcr.vcr_filter import before_record_request, before_record_response @@ -30,7 +31,7 @@ def patch_api_base(requestor): @pytest.fixture -def patched_api_requestor(mocker): +def patched_api_requestor(mocker: MockerFixture): original_init = openai.api_requestor.APIRequestor.__init__ original_validate_headers = openai.api_requestor.APIRequestor._validate_headers @@ -51,5 +52,3 @@ def patched_api_requestor(mocker): "_validate_headers", new=patched_validate_headers, ) - - return mocker diff --git a/tests/integration/memory/__init__.py b/tests/integration/memory/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/integration/memory/cassettes/test_json_file_memory/test_json_memory_get_relevant.yaml b/tests/integration/memory/cassettes/test_json_file_memory/test_json_memory_get_relevant.yaml new file mode 100644 index 0000000000..b79a6486ef --- /dev/null +++ b/tests/integration/memory/cassettes/test_json_file_memory/test_json_memory_get_relevant.yaml @@ -0,0 +1,2672 @@ +interactions: +- request: + body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "user", "content": "Write + a concise summary of the following text:\n\n\nLITERAL TEXT: \"\"\"Sample text\"\"\"\n\n\nCONCISE + SUMMARY: The text is best summarized as"}], "temperature": 0, "max_tokens": + 0}' + headers: + AGENT-MODE: + - AAAAAAAAAAAAAAAAAAAAAMLheAAaAAAA0%2BuSeid%2BULvsea4JtiGRiSDSJSI%3DEUifiRBkKG5E2XzMDjRfl76ZC9Ub0wnz4XsNiRVBChTYbJcE3F + AGENT-TYPE: + - Auto-GPT-2023-X-TYPE + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '252' + Content-Type: + - application/json + method: POST + uri: https://api.openai.com/v1/chat/completions + response: + body: + string: !!binary | + H4sIAAAAAAAAA0SOQUvDQBQG7/4K+c6bkBCT1j1XRYSA9SRWymbz2qwm+5bsK5SG/HcJqL0OzDAT + XAsN2xmxQ+iT1cu935ptWUfTvj4cno3d1E+Pl0uV1+8NFLj5Iiu/Rmp5CD2JYw8FO5IRaqHzal1m + xbpcFQoDt9RD4xgkKdIykdPYcJIVWQ6FUzRHgp4QRh6C7IW/yUfoolK4pv/xnYKwmP4KslnBduws + ReiPCQPFv+TIPUHDxOiiGC/LIHshv8zv8GaW/K3QWdIdMCscnHex249kIntoROEABedbOkNn8+d8 + 8wMAAP//AwADaRSQLQEAAA== + headers: + Access-Control-Allow-Origin: + - '*' + Alt-Svc: + - h3=":443"; ma=86400, h3-29=":443"; ma=86400 + - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 + Cache-Control: + - no-cache, must-revalidate + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 7ccfc4adcfe613ec-ORD + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Thu, 25 May 2023 18:16:14 GMT + Function-Execution-Id: + - yvvl5lovloye + Openai-Model: + - gpt-3.5-turbo-0301 + Openai-Organization: + - significant-gravitas + Openai-Processing-Ms: + - '754' + Openai-Version: + - '2020-10-01' + Server: + - Google Frontend + Strict-Transport-Security: + - max-age=15724800; includeSubDomains + Vary: + - Accept-Encoding + X-Cloud-Trace-Context: + - 592e9151aebda45ed59a9ad565a24324;o=1 + X-Powered-By: + - Express + X-Ratelimit-Limit-Requests: + - '3500' + X-Ratelimit-Limit-Tokens: + - '90000' + X-Ratelimit-Remaining-Requests: + - '3499' + X-Ratelimit-Remaining-Tokens: + - '89466' + X-Ratelimit-Reset-Requests: + - 17ms + X-Ratelimit-Reset-Tokens: + - 356ms + X-Request-Id: + - 7e69598f8d9d0bc01b752e4a0b05fa0d + status: + code: 200 + message: OK +- request: + body: '{"input": ["Sample text"], "model": "text-embedding-ada-002", "encoding_format": + "base64"}' + headers: + AGENT-MODE: + - AAAAAAAAAAAAAAAAAAAAAMLheAAaAAAA0%2BuSeid%2BULvsea4JtiGRiSDSJSI%3DEUifiRBkKG5E2XzMDjRfl76ZC9Ub0wnz4XsNiRVBChTYbJcE3F + AGENT-TYPE: + - Auto-GPT-2023-X-TYPE + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '90' + Content-Type: + - application/json + method: POST + uri: https://api.openai.com/v1/embeddings + response: + body: + string: !!binary | + H4sIAAAAAAAAA1SaWQ+zvJal78+v+PTd0kcBkuDNuWMKYbQZMkCr1QoZSEhIwmADLtV/b5H3qLrq + JlIIUjDeXnutx/6Pf/z119+forqe+7//9dffr0fX//2/5muXU3/6+19//e9//PXXX3/9x+/zf9x5 + rYvr5fJ4l7/bfz8+3pfr+Pe//hL/68r/v+lff/3tJ8svlV+lXVErX4hr86DtGZaqBPGXTM4QWJct + s1/uuu0vFk4A2lFgeIV35sSNKVDv9kVllpCZ/rjpGwtlO8ci2rmTUy7XSbCWZeIRLCirli3MM0b7 + PfhML9vU77yaOzCpzgOPsfou+GDZA7CrHDJHua5Qv45FCvbHrQjWvhQNGcoSWCcSZ3jbaQUv3CiA + nTFe2MY28nYSat1Rq3y5/z1v1V9uJwv2z/uR+A/liKZFfTJgWXzvzC49K+2bLCnR/uaHVPLMizko + 1k2D91o6kHBEpj/4MRzQN9QsFiz2qfldv60TbBdhRnDKGj7hNw1QGdoRC4TjvXjbffNAephwWo63 + O2KW8T1Bp9Vbps/j+zLxLkIpFwyrq0+bcjtLRNh+l4QuHgVBg2IdNbQdPiExNL9GU+fbDvgfMmF5 + v2mrfpOdMliJQc7O7+jedjsqRLC5BDXxZU30W0r3ETwPxCZ69Pm0HEumAg4qETFu6ZeP2w/p0Jb5 + HgnM7FJMW+mVwcUtMZVraZPKlmQ2QKiyYyGL7uh9ML5n1O2FiDl+sm2ZXi4aUGrjToLypVV8k+kC + rPKrTSwhq0xO3a8HmZiURGtzoWC26XtwYmmDl9Y3TCczp6BM0/XK7O+98Tm86xoJzD/QdUDkdJqM + +05tfK5QNSlIym3TdGBzVFpmfV/LdmijlQDv3WlBlRgZqZzXrgam1gQk1E3bHA99noAYFicSViL4 + v/cJJ9mTWfgJo2Jw0AhQeKZK9PU2bsdHua8BkpISMrpFxadbdUZrt34wa9Hf+RD2uQHvsrZJeBKr + ontklQMPMROwhIuw7epbvoKitwmWniGupoUkX8Gj8oa5JH6mPMjfAXSLmBJn88oKjozvE4510ROi + q006LNDkge2YWyyH1scfev9aQxIt9uS33kbZP4iQtG3OsJe9zPHzvgSQFG+faKfs4Y9Ln1F4G0hl + ni271XCBdPjNH9nGpoWGY/QxYL3Ybim9jSPqdHcMYBBvhCrlF5tjCr4F/uXek2BCRTudwTwspLI5 + Mkf0LN7ujayGaPgQsg+okE6b18oC/dH7WNF8p+XoOEZIWq/fzF4k25Rn6tZAEnN3bF4/5vS5VRgS + DW3pWrIZp/6xNODj7zKs7lyj4H5+2MFCo2/mj4uhGvlr0KBE4wqL55AWTe2uJ3CjRUpRsuxTniCl + Af74Osze7seKunXToftqU5CLKvQV9d/BFV3dbsVsnNt89G66A9LuFrGNZ6o+7dD3g3ymMUbW3s1k + phGVsPH7J3Hkyz4tlXxZK5cw35Pt+tGjLxUjAVarTUl86FaoaaLyAUXxDvFKkd10WYjaVfXzpsbi + C91NHpcXAX2X8ovoWNm3PImNEwxuQ/GiGnA1BMfhqpyn1iTGs2Gc5+pNAdDNjhijtU2HY1Qa6jq8 + fVkgxldzUlEmgkA0hfjdlxY8rk82CC5d/PSM80wlBir9N2Z6v/HaUarz4bf+mKVvfF+k5cJBC3f9 + xamcPfhwMz4iPK5QE/MWm3/qB9Urt6HiQf6kDbyGBB6LrUG2G6Ezh7GPPciH2iTaa9Okw0HyE5Dl + 0CNG/EyrCW6ig3YfItKJ6vuKl17RAckfPtMeRtZ2bpzUcLxvJzouR+bzdbey4fRdMGaORlm9lVh1 + FLl2gGz226ToQ8k/w2cQBeJt7g3/ip5qoVNEb8TcOFLapGJsw+KWTsza2LjtVuK9AdbnOXNr/Wj2 + zB2falc2e7ZZJltzet9OB4U4uku0UPDa0RUzEaY6PjIHg+CPRHoFUL7FihF6kdHrV39BSc9su4i0 + djnPjzLXE9EFySqGm1HK6lcuRiqt66Lg255nUK+bDqPFMUd8QYMr2pdNxBzhdi6mvefVoG9hRbQq + O/Fhri8Y3A/F68UkFr17XSWKL2ZrpkUNSf/0s9/8m9/3tRhqy7bhsjA45ZZntlNwXK3QWlI2f/oj + /xhRAxfxwMj2ZJz8YUVfJQhDk8/6Nlb9PB71TQyJzHprct1ngLwwkYjnBIHPOXI8pbftjvhV/ESf + MHMPkK/5huiPzCg+iRh9YGudVnRSzHPFhZ4/QUKHAk9DJ/PR6vMAsK4/iFvtk2LIpN5Ai3KQ2OmK + CpO+3I8CEvN3mH/i8zwfyQ7ebeSxTH+KiIe3agerBztioZGbaqRiJkD9GO94mPWtu7+FDiWHNMJC + aNop/emP1dsec5djhMZXjc7osTsMxOhNk4sXKAaFH1cSI9I9MMdHrcngRMOXmMv+XrEsO63gGxoW + 0d/0WQ1uuTQg9M+UHCSkV/SE7jV4/dEkzuDe04GCIcC1/CRkW+UL/pHNLoNFkbXEm/RjwZS3JYIV + IYva/aOvaIuyCZIkc/E1MYqKD7dTjdbrMcBIMApz6GJdRGznuOTyDDo+vo0sUOpJYrQ/XQ5p/6tP + d3pUzLgIvjnRuPCgZYbMglF1EXevKwrV8/3BpRUt+WTXjqDglovE0erKpIs60eCbnDDdZYHHRyOG + BzpMJ4+ZC/Zph0eyXMHx4pxJaGQWf2WmMaDjdkjZtZa25jRl7mpdRZ+U+Y73SulwLD14beUnva0f + pJ3KPi6hUYuMeM1RQ3Qn3gHO66vGDDT4aPDk2xMZN63/07/6jbGWwdQ+AbkdslcxgjEZsLpOOkYX + ZFRUkPoaXVahTGb9b8tlHghInnYm5fp2nJ9fLIH2AWP2+rmrvoz2HSxs7UFcP9I4f5XLCTrx3dDh + pO9NvjB3gWpTKMk5vj3TT1Y7NbLat4PZxzXQrH8yLA72wELzIFd9Kt9klLOthlnaP/3+dnsAXJlP + 8EKLCpN7pjRBOGUOcy+LyB+C904B/FZ6Zl0eoTk8rysHjGQtESu8GVXf9sj602+suX+1Dg0i2KhH + gz63I2ung5deQReTgoRaK1V90zcBDMl6gRfNmaec3XQKaVJzKsalbTJ0HBM4olEmluOW6ZRH4w5G + f3ciOnhVwfANMDI+qUA/p6zi48LsGng96iUje7Ou3o9aT8BgNiL4cr0X8/OvkNVbHjGWGSq6e5ZE + IDxvCdMf2ohm/26j90vRmf0q67azvEqGJsSUOQux8Idbn9vQR/KCLg/52x/jOm+Qv7BVLDquVvQX + KjzQz+9o1/xVjXavADKe2ZJ+6cn3uVS7Bwi+6IqF+81C7RJNE2A1kolNjlk72Shr4NA+BOJuqrhi + 7OZ260V7XeNF59Upv4pNgH7+b3nQD+1w9FoFAu2p4eUiKtse9XGGdKNRmXPVZd7tclojXF/WzHTL + FxpyP/yg55s6GLT+nA5vz8OKLigxM8/1wxxM7ySguZ4x19zSbG8it5BzsJ+UdZ6ddvP40QJV21nv + 17w81yiActttMVBl5XOl1wBkkg5zPznwaUV7gKA4JHQYHZGPeyPzYCM6GtvkjsSbU50rwI2yY5tn + SKshvTkCBFqtsS0+ar7M++YKxDFdul6592qo8/2EDMd/0DWJn8Xw8VIL4aN4I3qxC/0pBEmBaL2M + 2TZqxYI965ODSjXomJks22qY+6WSfcuQ6Vmy8On8/0rX7nLmhk/J51mWrGDUPgdMh3CouGlkJdoX + kU2Sr1EW37u0y5AiW4zgIRxaNnnptCLr5o0XnK/9ocrAQGaOzrg/nBNES2NyQPO/e6K/zKodTrE7 + KOQq2kRPUG4OeXnZof3QXSmXMwOxbc9PgFT7gFdm/PQHtXY6SA5xxEhuef5nHq/67DWTBLFzLLia + pQaYET0Rq4syNDQoOkBS76/MOXeZPy7rpgHL9kYKrj0Ug+EZFqQr05vra6ymnTt2UD+jirneOuVD + i74BwoeixXXQHVs69XGtBLV2JJoib1uq5wcb5vxCTCw7Ph/o0gB83wzM3z9oNY6iQkG2LUowoe9i + /OXFWW/oevfxioGbxkcNl1OMYee7Zn/sHw7s1JdKr9oXozHLkjOsJv5i5ub2KKZCJgG8rcNIAvda + IhaXe4BbcZ6I09usGub8h+b+xKKm6CoaZVUJBzmN8cvdtAUPPG9CoytlrNBcnUtV7Blo7tf4NcIT + TWq5MdDGeDx++mZOijgJIJS1T/Q63/CprXVbMYfpQx/562iOW7Oo0fpSLal4FBXzHtweKzXf7SjD + /uHAJ4PKH2VzwfVPP/0pvbkZOhvl+9/1p7sjVrWjfGbOfkHQeAC1g/l3on3tntPJ+B6Q9chsZvYL + ivo7ikpUrz8d0f3YaLmM7goIkrCjwrf8FK2fXw9gX5WE/ep/wNkJK+G7M1mhZFa7TND0UYiYXAkx + k6/J1ffijAZjOTJjtN7p1ERlqcYRvTCyFM/V8DSfB3RW91vK5jw++uK3A129Xej6tqOcrsQ8gFq9 + c+bja15MgvSq4fk4hcwWcZKO77d1UE67d02n8tsVzHfHFWyKccJCMeCUd1krA38vTKJn387vl+Bj + 1PfGBrNt2rU86x81GDejx0O661B/zamIgnwZzbyjLEa15x6Ux2E/55GMszvSbPQ9xHtGnC/x6fM6 + 1LDPlwleXk9exYNbcoW4yO54+XTv5hCZxQnqacloV4aRSX/+WBCrkpn31zGl2/JCgdNJY8a83qcQ + 1H/7BS+N7v74fVMbXktVo4/FSIrlnPegOIQWs8Lbo2KzPkP2xAnx71D74ymD4MdLmP6mVvUnnz/l + eMesd1wVtJHODbIOeIHv/carBkOcDqBotTfnZyuV9fxqIbZdGHShRcgcN6pdgsEsRJXX+4bY8U3P + qL8dgYIYC2YvlZcPuudnnW38DKHxx1tM4rtUzL1zO+ZZtUOraXwRq14nfETJ5vlHj+O5voaupAlS + LS9gBB/vfAqNb6BcDBYz2z9I5iBfVxjNfoC+Vh+35ULWruB0ME7MEM5D1cZ1YsHxcLwxvdalor+j + 7AHZVlApWvZmIW99bCBZoy9GIBzQcE/exto3eIHRO/mk3IxFAey8PjKiv3LE7f5RAlyC6Y/+TF25 + l+GSbU7MnP00f8XFDp5krzBXuOzboSmDEqpc2hPjGD/QGIjTU3VytSV0CKN2PGXizMv4hZgNXMzv + slxmyly/xM2+PaKBkR2U8m7WeDGWTjXmZvoB/SqbOKYnt6CsRh3E6KHShynm7bBFd1ud64NpXztE + Eok+gTLzDYrm9UGdOpfR7MeJ9zUMn1vJRkbkwc94Zb3rivc0yMAwlm8KZNqh3/uGWHZEQo6H5ewv + 4g8U0zUnPjocUs6R5qgXo4+Jx2ldTYZ4V+D8BoEZvnTxh7VnJIi+FREPVaZwBn0uQ1Q+debM/qiX + 0P0K+lZY0eecd7kJfQAbdW8Q114NRfOmwgALf3fBw40+i2mXXx107HFDl/V5rIby/ZLhhiqNbI/H + FWI/nlNOjw/78bZhf2soKqNFRtdGeeOsEJ0zKlXcMbfqDd7bpkcheVyPzE5OXjqk4rdEsBW/xMXK + Mh2u0DWwdTtOdJLYJhdM/wDQxxpz4vei4J9YtdBeNnKqdi8N8Rr8J4r9Q8q8SV8WMz87QLNd8X/z + p17UHj8/SCELuopHUt+gfsoIM3qzQpPmloOK69sarybRSKeKBt6vv1PI723Komgs/+gfD+9uwT9G + 1ijZM0hwKSxbk21AtiG62QLl5ftVTY33PIG0Rm8WhpvQ7G+SWaI5f/7pv+z4rq8w5w3i3Kjtj/ve + OUGXL490dMiunTJp94RgV+1ZMD/vzIsDNG4gJjOPQp+5f4BsvTvM5zz3aWvng4zd7st81XLTMfXf + T0TWnzeVVXiZbLq1V+RotMDLbdqg6STJux9fYtYL3X32krdXcBpzzXz9qrUDrXkCbOe5+Df+YStJ + MqxRoM2ZTStEjfY7hJcCwu9J/vpjFK1t1Drbyy8vFiPNQ0dZuOhL5UMRIrSPJQ11uxumuSkJLbu4 + I6CZNzHfdZbm8FCPiQJb+Yvvx/RbcanWD3/4aOha35Zr/s1WZj3B9W1Kq2FKrvjHi6gQWy4fCcrO + MPtZ4soFbsel/+4A4o3C9OVIzEHsJwW+o/7ByjG2quWc31Fj1CuMikgvJqHcK+vZf7PNukYFLZBy + RspGSLFwOHUm38H5pCyPw4mFW/o2Z32MIJRjQvbwDc3uod4SNKKrTNFL/VY06LUnoDcNqXS9ZubE + 4qcB8/oh2GqmYkrMrlMuFk6pgPWVyRxqRXAX8Y42tvxtuRiVAzD3apFAhoXfFHUzATXOF+Id1juT + kXJjAxOjFwvyu1+MeY80kMjWppJuPlPmuXcNvhoKZj1QfzyoRvfVtsDru6Ejvqu5rJ7Er0jC0neL + UZIEjL4XO2FOKASp/Oh5AqeNEBCtNsFsr54nQu3omBHSlW1XgKFAsLvvqTAtsblcxcUJHR+Pms38 + sJiub6uENcIa0RdcnvvR/gMXLLp0fHZyW8eZeAKrXCQMO+0eTQWazhBG8opsF1FZDdf3DkD/bHd4 + 0U77aoivK2ulYjFiB9e02t/7g6OmHvAg+nXFmKmu4FCcB+bHnuAPsdlFSH1Gn9mvYC6d6av58UJM + PW1XjWsjisCxXg+m1/kLidfj6gHHOu/JthENc1nHHUbxs52IsfHOPuUqXoFVpyFerIIwZVjyFeV4 + 2N+Y0xS0mIbjx4O6v7yIczE3JlPgdUK6GBXM4dPOH+d+DN/p0TCyfrCq59L5vE6WcjnrQ+93e+uw + gqN8Q2xD19ycHj2P4JqPNsOEblO+rCsb/XiUdtDidiiTgweqAB0eNLIrJFs9nsBhe3HmRYb/4xtI + vu0FYiXXJafJdXiosta9qJhITzSIWYVhvdtJJN5UDHWpu05Avxx1pjVFUI03tj3BR3bWzD/XFeez + v/jxJkJURfPb4ZY8YTEdNuzHKwcq3ygyb+TItPF251MjnT/KzDfx0G/HYiTRBytdLh2Jqz+wz8hr + 2P3J48FjFfvsm4QAy+R6Y1723pr9Ekysju3pw1xFNKof/0HNxHbMcdUPGjvqCz/ewHBioIr5+XUH + s9+hpejbVZvE3kkRqdDM/KszuzwaVmpziYBtBiK1Y6PiBhwXXbA883P2Pn4Oir0wKFZ10/b7GE1Y + neT9i4X17VLIFQ0c6G5+goX9pi84icoG7dHpQdfH1C8m0zMekIqeyYJwyzlr+kfz25/Bk+bbiIue + 9AB73byYt6vNakzqRwlvJ1xhUd/45vg2ogDmPES8XV21XH0vzxDaHqc3ZYmLcQ07C/aMFCyc+T43 + jh+sGOtRY945Opn8QqkNfH3esCBwDXPmuRg8ox7pauYVHBn3p/ocFvm8X/RIeWMoJzTXC7N69WPO + /fkKf/KLuWN8jCXZho5sLUamq5KOddwOaNtGJsHmjqD+Y3xtQO8uZLtGkQum12iCviYOFub8NgVG + tlMkWSuZqegSn/T8YKFQtQJmvZ6iyc7u3f75GzyGUKHBuvESplI90847T+24/WwpFI+TP68vnA6n + kgYQ39LPHz36eJB26CobBTFUGxc9elMHSjtOiXWvhmqwJBPDPUwUrI5P2eRr2JWQ9cWO+fHd99kT + KfSXN4nH+srvXjfdUNjEKzzNevlagh+g7+N6poMsWf4w91eQcu4TM0cal377UcJt4+AzSz7V93Nc + TbDfCz7zJcMo5v1XT0V1nLNIh6qazuWiBBGLmJFVEBajk4EDc34kxIqFduYtAMvQ3jNtXi9Mgf70 + 8xvzeM8+JaYnKLzgQDCQY9v99jPKcSti+bJNqulNhQlm/SY+ojYfFeProeGmLWm7OK5Rr/TOA5r+ + 6OEhN81i3o/D6ELhRZL5/f14ujr5lUWF++2JxmtOZaSGR0zlLmjToUeTBXM/JES+XdCf77M/IfbM + i5nZawNaUZmwoF6L7VD2dwGC7/rKDPum8NF4W/jn18kmd/Z8kMyiROhAfBLO8zOtbu4DHQ6OhJdZ + X8y82RPAIXFN73GtFNPUx09Qit2Z2V3QFvP+3gQfrXv8eGPL1vEzAfuZnJnVfdx05nkJyKqxpbxG + 6+rnT6DGov9nv6P/JtdMKY/TntlzPxmy0rrC379TAf/5j7/++j+/Ewb153J9zQcD+uvY//O/jgr8 + 83Q5/VMU5X8y+c9JBNqdyuvf//r3IYS/v+2n/vb/t/88r+/u73/9Jf85bfB3/+lPr/92+R/zf/3n + P/4fAAAA//8DALL3VtrhIAAA + headers: + Access-Control-Allow-Origin: + - '*' + Alt-Svc: + - h3=":443"; ma=86400 + - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 + Cache-Control: + - private + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 7ccfc4b5ab352afe-ORD + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Thu, 25 May 2023 18:16:15 GMT + Function-Execution-Id: + - yvvlzvw7w00x + Openai-Organization: + - significant-gravitas + Openai-Processing-Ms: + - '139' + Openai-Version: + - '2020-10-01' + Server: + - Google Frontend + Strict-Transport-Security: + - max-age=15724800; includeSubDomains + Vary: + - Accept-Encoding + X-Cloud-Trace-Context: + - d5b6c4b4328aadd51fde99c9ed21a059 + X-Powered-By: + - Express + X-Ratelimit-Limit-Requests: + - '3000' + X-Ratelimit-Remaining-Requests: + - '2999' + X-Ratelimit-Reset-Requests: + - 20ms + X-Request-Id: + - 75e3150646d393829f5234099c9b5efd + status: + code: 200 + message: OK +- request: + body: '{"input": "\"Sample text.\"", "model": "text-embedding-ada-002", "encoding_format": + "base64"}' + headers: + AGENT-MODE: + - AAAAAAAAAAAAAAAAAAAAAMLheAAaAAAA0%2BuSeid%2BULvsea4JtiGRiSDSJSI%3DEUifiRBkKG5E2XzMDjRfl76ZC9Ub0wnz4XsNiRVBChTYbJcE3F + AGENT-TYPE: + - Auto-GPT-2023-X-TYPE + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '93' + Content-Type: + - application/json + method: POST + uri: https://api.openai.com/v1/embeddings + response: + body: + string: !!binary | + H4sIAAAAAAAAA1RWS9OqvJaen1/x1p7SVSIqWX4zBEQumiggYI9AEQERuSSBnD/f9e5T1V09yWDl + qdRKreey/v2vn58/bVbl9/HPPz9/3uUw/vmv39ojHdM///z8979+fn5+/v33/H/IvMnyx6P8FH/h + fy/LzyOf/vzzI/9v5f9A//z84bfbmR3pOBlUvQQRbG6KTBt824nRfR9bUC18YoZ2efQDwBeD9F7v + 2O4TJRXXPqoGgdQo9BHfXhnPXRnUbXE1KcQpNwSffB9V9edKTupm9Li1Sxp044stfvPTsqKnGcnw + 3CQlvipelk3rg1ijaLI+zOxCvxf1YSOj5rPfE09Z5Gjep34KeaCrzPpud2herJ94039hZMYV9YhX + 6JFAM0UNXfLSFqJ/JSHk4zkljs9YJs4rK4cY3Jl57nZp1AuVWYhlC0Sr6ekJvt/aHMpn0GJx6Y5I + WJrdomgtSbi6ol6MDb7kW67LFolKEQeiTdYyvJTrlZHo4Bm8MlVNPVmdoB27XT0efpEE0NR7dsx6 + YgjNOyfAd+UGUx5awXtx39fw6cMlVVy7REJZrVp0NOcjcfhlmQ2789pExbb1sSq60OupeSxU1a9W + 7NBmKKN7la+3IX/keDE6ozEcwBuQQ5BPjmcl8fh0QhIcHI6I+3Z5MJV3sUapbmyIackNEnibhJAq + pUqXZ8kWivLWG2TeupBhun8FnTndQwDzfmImiUwxLI2Xv51W6ovsIv9ecep+ffC4oRKsVr0xiXm0 + gL5bncRad+757sytbZL7Hdkv9KHiV1/VYKUGH7andYkml74wuKZn4G0kmMH9btK2w8K/MVKAHlD7 + 0B6heaCCwjaXPVGp+xw9NCtm+rs1jNXh2ZmAM+lJnI/ogymt1xJ8lnVEsvz9zeYULTnQsbix//Bv + 3TwSOA6ZT+wBup5dlSQCBSec7GutE5yihYw0AZzh/aIK2KrRS9C2dkhw3W7F91FK8iaMK5dKBysW + bIikAd5H3yV2VWUGXe6/rnr/uAaF670L5v1wKOBW1THJHovY46X/TFBc+xnRMhcqfvdmHWDYB3Td + 3Jue16mkQttyl1z367mfuvPHhNpiJ6ZtfVmMH/97hOFhv/Bqdy7F4L/uOYASyiy8Ka0Yv8dnhI5X + xcaTu7+g6ZvZFKZCc8mheLKeYdoAhNYDU5X6WsXz8FbAKi3XZH+WbDTESoUXND2EjJxULROVesrh + 6AYpsaLrWImrcg5hWCU63o7THU2Bl94hcHYbZiDKq+F0LX10dvGTaSikaBzyLAVdnAc6+5eTmIOc + lGiIIgurpxXLZug/APubM9PyFdfG9BnVNfrVL8Gb7Vq063NSAq/aidnyWglm8Whd2GclYt5Wjgxa + W9Mavl07kdDCYc92bhCB3Z6PzGy+bzH55T1BdiMt2G6pHTyRB7saCjmnTGdl2I+aaQxAA39D7Ijn + xnTRVUCKOjuExHhRscxKQ0QdIyGGWd4rUbVJuAmWN5/Y+udQLVdG7QJC5/ZXX+eAa+UbA3M3C+K0 + ZteLU+hH0IbGmor06Ip5WcwKemXGmrg7cw6mW/HFcOPbLTm5jRmIse51wKZpszge/Gza6B8TlrJz + xjO+6sbw+VAXpR/lQExexRWXr1EK0fdIWJg4pUethd2CLa0iZqozZFyp3AH96gfLzKvEfDcfEXrG + 545Yb+vc81LrzptYW0bMmQMSiJbcfST6LyNEm7khVsWWwiVY5Jgl+8mYnS0ukZE3NpY15WVwqfBz + 6IYqwfCQ7ECIW3DebnbSg3nXx82YGOESNHx3Y6QMl0iwUS+hkpQl21092xhD0mGkw/1MtPdCIBrL + QYjk0/Qhmu6Ggnd5UqJ69VoTrfZP3ldRSYEEjD072uqQTdG6wHBGO5dpMeaiw3FpbxdruWbW7jJV + k7p0OuQ5g0ae2nz2pmuwAXhamDJ3sZLQpEmKgnT9viOnVxB488weZxSryzczg0+FeL/dgCrqZUgO + VqtkfWAsZ3DiqWP6w9p7cyvcGn71RiWn497MuR+p5iYzmPdNi36QijSHm19mxJzPUjArzloFGtYW + OWgd75ljZJFqt/6ROI/aqiYRBylqFY8z65VViEetpIITi47WW7CQKJfbAZVF/KYwaV32+94aycSS + iVUsp15Ek1Ggjq5fxA2HphqlV5qAnK8q4r6eRsV3jQzAne5FXOpr/dRXLUZSGgdYmTZjTz32dVHS + Fh6J7mSRjat8KYOam/Xf/1Qzb5YzOoT6nRzYe18psZyFoKv7A7FXhd7L1sxcNCbHkV3Vu5oNrN6q + sCikE/GOKPO64mLW8Nv/r78a2WzrQQKPr9aR06NtgknEWQqZAIIvXVEZ37ReA6i7J8Kfu6yLOXnc + EvjcW52l7rHsJ+RZNTxPO4c4XWxUCoZzAndPaZiLH7tqfu9GU5Vt/4bF5Xqr5nDSOnUsc4wnDJUx + ETVL1Za3JQl/83DwX2GO8tc4EhvdcSDiZeDD/j3fyWnevjLhGEEIQtLW5LTCWiA6KHU0HtyJ7ecC + V1wrRwyh9cT0r7/PZfj00SOfHIzMBcsGEGaHnsbqTay0GappPNzuqJePmLj48er7U5iGYJ7vnB2S + qhKiyF4WGNekZp7dgUfNyO0gWPGMHb/KU/CF+rEgDW86wYo/eFQrHiHqNq5OVW/zDbicVhglJ0yZ + +cuf1e1mFsDta4h58SmCkYfaoF4olthvP/0wgKxCp/ANfh9HR/BirQ/QtaaFu2JzCZj3Wg1w13ud + kOYdovnulC5aGtGbOc/LKeOZdOrUtVnZ9BoBrYbjYSuhiVIgDhZVP9fWpMLv/oFRZPQZdazehV++ + 47WtHPvZ2ggFTZNl0dlc59WUVWiN9jdvxsoYZMHsEbeFDb8tyNGUimpqvr0LkfJesqMpadUsxFvb + HrRP/Mv3qqIfOst//QY3Y4Ay+jsPMJq8oFMupxV3FlEKNN2H7DAMaTalEWk2973uUdFuT4L73UYH + f7XYUlh4KmK3YDGsvEuT/M2PitsON5EjpyEW7LUX43K5mNX7xzZY0vZexWOlP8IZGS7uVs0CiYvW + W9A88Ui8MToGU62NNuRLd8awqvVgGXKNwlk+MrbTHcsb6HuW/rO/YvaQxbzd3M8o0ElOdsp4EsOX + nzqETctmu1xWqzlmVgnpseyoEMe5Gg9WcYZHLFFcT46D5tG+JH/zH7dxX1Vj/TJaEBnS2KmQnmgO + Bq7C+/blVN7srz2PR6uDXIoipvnbCYljc8iRpnFgh/Nuj2T5GiXw67f45X8IYkkoz8Cs+0hOapAK + kWwqDTjxGFV0Kck6vZUVeO1/+1t0kA1ZiSSUS2FEiBNSb3q7Owovxl3i5W8nozeYZRifO5u466Mf + CHXecLQo4MSO2zz0RtqoKvhauMKzw5hgTilz9W8+WI4gFfemSQfr0xuUbz62J8O4kqHwIwVPnNmG + WL9PObo8C4/hz+trMP/7tWC8rmrKB9nz5rB8+ujqDhIWp4Vm1GERJ7D2rhWd5FRG1OLPs9qopUu/ + WaP18gsf2g3ajz3bVftbNW0DasLlk1TM2cMccIhVCTbkHbAjWFpPJyIUEI39Yq5v42DKvjaFGmHM + dmyt9BN57igMYhzYvtFrMYe9UGCeYWR7zCo0HVf2AO9uHPBD0RGi/wMAAP//TJpPz4LI0sX38ykm + szUTFYQu7g4Bkf+toojJmzeiPAiICNjddCf3u9/gM7m5S1wgi6o6v3OqHrttCa84zyn210c08lsb + fN9H5Ouuq4R3p4B8BedRP38A+iytdwSy0T6onSobJLOXRpCW3oaoDs1eCB0agNHMCuw6uO/bWXG9 + IedGcxzc3yef16VZI0GTDfbrBcnGuisiOKefD53mcd/jlV+gu2o3GK9wlXHm1xbiV/0y1fMRddHh + foR9cnxRY9Z2SFDnuFNxa7+wG65yny0vjQP3+XNBjYXFEa9tZQXlWjU/sb7X+mGqP5Tz8Y1tv5Ay + xt6bDtb+cku9Rpb9X/19uBqQlaUWiCC2YN9+xjh8fkyedDOmzuDHJ8vz/iFEsPNqJO9Px2hEceG3 + KxsBWt7rDd0y7RKT88XtUHUjGo62g+ePg+kSmPwqkR5ZJdj4uV9htncpNiR7QGzkXQSHa3MmUAHx + abBgCy32MyOa1e1dEOWQpmiQrGW0MtbLf+od+3eNbrIw7MVpX1ra9L3T9+gmkTKiI+7M7kSl81c8 + ts62QeWYPaiJz3HV3d9mDXqm3/DW5Tpa9rNRhefRKWl+P9SxyG4/EZqVuI/qMrUFTfP9EbQVzujk + rwWX34cEiqy8Yt+63E1GO4NAHHKfvD6HoCLLc3KAa8J8arFKrvr75r4Dn2gaXfPRMId8sTK+8ykq + pv4ZDrdtC5GnVtR4GEwIYgUlqOs7ok67uWQTfy2gvYe3aHYbDv7zohsEpG1zouvM/SDhtm6NIDjW + eC0ba7PP43UDV/Nsk9G+bvxR8KcN3V160lBqI3+apx4K7VbQ49xEGZv0DF28zsSu4jQV7+vCgSqu + LpO/XcfSfKu3mh2cGmqdnq4QP+d9Ag/NZ9Hs9XB9EdJBR9m2lmhIbOZ/7kOWwnWlmmThBNeeVYVP + QH6HPd2e5n7Fbi+TQJ9f5uSZqouKOk7hKB81eZKcJ0XM9z9WA+YtmuNAxOY//Lo8mOnX76ARKftU + bZvLGq8ZdXzm/iwA3ls8UKtMG0GoJhNI29LHgcmRIFe05PDuupHqC5agjhnbEsxbMMdBibcZ2Qzb + ElXUtolGRSL4LF1yoCP3yGryU8PQMP3L83Sj48J/B85ShdvcuNKQDEY26mGx08Ko8khTFLNMbH9K + W1trlBB1o8985rfHHH79+MLigsg/SwYgJYtIyUH134+0duCW2Fvi+O6m79LUaoCrS4t621ie/Jui + wiaQnKhlLznmu8LtoImckZq5k1TsvF8A2OSeU2cS7lGfzRYgFeWbevHy5HOlPe3AL7sjxbeE+Z9q + XUbotd6eiLRyeC/2KAnQg3KPIJ2wjN2GXQGCpQZ2eUxj0r4bHRg/11Sn29oXab5PYONsJDLfpEbM + 9mthQCdxhYazWhFs7u5KcFu5idCZHXwenTsHVvIANHzW3H/HTe1As3LXNFSCJhs3l0MOqY8ekXya + 97344b2BDHW7pf7mue4Xi/k+h/d6yAniZC7EzSttMIx8TYPGFz632fmA7OS1jZTYMvppHs/URi08 + +lMeT4K/4oejTfVNA7R8ZLx6pSnkXLyjV8nbfuIzR9vqzzNuw63fU932a3D7gOJk4qHP04IVfHne + PnQ6Ek/jqaOpv6Px+FwKaquCwTp7SVE/8TuZaYMOV6lQySJivBev48PWXEUpqZHnaS/Hrlcj1Xv/ + UMvfPBHZLl4GtHZ3wNvwHlYfxaA2FBd/j03ZyWOqAwGEZ++RzI7MzkiiFA60imRjy+EDEiv9HCBi + BBpeD7lrMptTB4YYNGzVySAEktflNz+ggWUif5BmtxRWVKrx+vhcovfeUGcArpZRA/S8YlOegF51 + cY1WF7WtxuES1YAuDov441z7HTr1FhzD+B0p9n7TcxK8HVA1OSaKVJOY7c+yCtTOP9hyArUf9vdg + AJpp6Ndvi9mPV8IyKF3qDEkas4NtMPQTmv88j7I5eLA72DL1n+6j+uoPujPFIGq3XlfLEKU5TPMo + Uo+DXX22mtmh8flIqfGuI7T42JcC7cyrRTeyXaCJhzhaF0d38tPt5F9PO2XibxoUpV7Je72ykElb + izrxZh/z03J3ReeF6ZHl561lIuv1WsOPXYD9xOxjNkuWKopvt1mk+bMzokq8O4IYACJ20EZBrbDR + 0ZQ/0bXX1+brvPsYSFpddBpGshnTPUoi+OjH5Je/xq9fL7owpJvt85KJic/Q6m24Eal2ns9XkdOB + hXhAkPzc+/wiN/qv/q1f0Ak+6RlatGsHu+YsyNgMDR4yrseU+sXKqMbdMqqR8xmvXx4VfNJHkMts + R5yqQj7N7EMCFXNMjPfS4HP7FNZoypupVwVeNv6cLjlsOkyo06U8bgVyCaoZ/errdspnHhEosX6h + 6eRvl0GNa5XSi07NfTcglpxdrlCWa1M+8fFZ9UxLdd7cM7r9HIIeTfyEdKePo+XcCHrWhPsCTfOL + ANnOBGuqPEVPcjUiJWuKSvBlp4O0rU/ULyUnY4wcbuj3ebnq4uHa6lw7x8EP3RBMqwFE0KLlsFtN + el8g5mTPAJzCqaPlY3wKoUR+Ce7VE9HKdzfVotTLA6R2GhMZ3mvRRbC7QtjynmL3/cjEtWESKJ3K + 8JbMRcw7vinRLbG21Co2H8FXkd7Cyl7tcaY7ucmu5rVE6jCe8aai/be/Cui8S0GD082LySKOSnQi + non1+TNES9DeDgi3ksmda+uYvZfXG3oOukIjM/N9vnazq1o9VyWOWgUQjU2NwyIUr0ib+DZfbh4e + fOeN3Xdtzy+PtwNffTcinYjucJEZHJXeoHjKw8krfntqsrXOhHW7gyD2XO80V0HlNP9xxs+XdQcz + d3+mRniFiqvSuoGZ2L+wf2YH85P7sPq+n0Y3ey6o264b1NimG6mB51SyYhFQMzt+U9feP/th6k8Q + 16fAruzpPeercYEW6BbS33x1yl/Q9XM60e1tUSIxbNwaHmyxxXc5KmL+fNgFoC4ayHi5bc2xTdgM + +jyb08hYpoJ2Z8S+z9Fnt3zHPCsGC5xzsPvdh8iTP/3Ns2y/SGK5kDgHXcwYWT2kRczVpHDA9a4W + DVnXZtKgPg3o18Y1WnCJVWN6299Q1mYPPPVXzz6PPvjyAEHm2o2nvPmGJHkriCR5Qz9+7knyzSPw + OgLT52caFeh+vwNd79S3P07+U+OYr6jhuveKHUVaAJi+TsPYMWL2PiKAu0zv1F43tWByuWkB5HZN + VLn5QW313JVwxC3Hm8ft0IsQhyUq9Uaa8mdAbHReHtD8bEWL5v1Eo22kBYoqSca25AX9N7/XCq07 + RAqPaSZ022xgDKf6TUw/e++qSw6j9Dx+56lgQ7MyoD2TiDBlvRRs2r/Ay29sjNNrnYnsMRhIZIpO + VnJR9rwrdld4eqBRw79Jlbhv7gc0f7EWW3a06MfX4RHA9XM+RcO0j5nypB2wR9xi3EkCPVdbtIJH + TiLqT/uK5vXMD7C5ryxqxu2hotr8YXx5kU55Ui+KTxKpyu69o3ax3PettVJ0CD7kh6B9cPXFO671 + Xz+bjS8/5p+qsjSNk5Dqyqv1x5/TPteupC2xXTlexXdXkNBsGG5EJnORMTVlhvaxxpagTxJki2xb + /vI91ttohRiLdQ4YOzG2t7XXj+Y2T9F6FRT03JFjzKfvV71TNMe2mvXVsMxVUHNHXL76GN/qd7tD + vlr2EU+Lg/jyEwo3pwabN1b53Eq8FqY8gFrylvZ8ttjeIDj4/NcfCyd/GnBz73ca6Gk08XTQwLmc + R9Hy0OO+u96OCTyd5yJKEzbzifuABUx5XnT1FTcmQnwMIIADiuOVbY6qLUkgqcylbg6qyT1PVQGa + ZkNdxbEraXu5J2i29yk2DvsPIiKSOJwCw6ebRX7sx07tPJj8G7Zu14fP3mzTQvbzExI07ScWCc9y + ZK7nP5ES5busPdUvD8n+YvObl3Kv36eamdcOdd3X3BcXTeXwuh4UwpaJa47vMazhzR8uTg5dIcZS + O1yRyYY1tmVZxAPOXQbmLJGxoZ598c0zkZR2exwWs7n4rbdd5QmqJ8q9J88mS0BW96/Jv3YmSdOg + QYHFgkhJrjeTNrtLglQ7CKkXyjgWrpkd1Sn/n3gsEPLS2B/hy2uqTn/8blsqKvhsrVK9eOnZ56AR + GxaX44lm5U4yJ95xIL5ulhRrq5OgR+7UEFvNC7teafWL14s4Xz7+8jUiUz1+8xk88Ur2WaKgQaVe + Szi7HXbxB1Kuop0X/ETKS/gxKY/nA1B207AXyjT+PJ9Zqr4P/Bopl3jbc/cBklLj1Yjdxra++44D + OCsd4Z9J7zg6VZamZp+MLJ1bh75+Gy2SeUBWY/iuRnSsHNCV++zLqxl7Lw83yB/0gwPr/YnPp31n + QeyaCvXR7o6EDmSGmtd2M/XnWLFXcMsR3gDF20wb4+8+DkXVQsYXrjwqlv68dyhY1nL0ySxiimT0 + S+gC9o6ENkh93x8IQD//Qb/7T7aJQwKXfRhFiWgW8VDEx/yXJ7/8ScZFaUNraEAWJkIxL5aqBFPe + TQ2z8BBXh6eBViEZJn9XZtyU3h789b0K+Pcff/75f98Lg6a958/pMOCTj5+//3sq8Pf1fv17sZD+ + ptLvJQIZrkX+17/+OUL46923zfvz/5+2zl/DX//6c/V7bfDXp/1cn//z8x/Tf/37j/8AAAD//wMA + TTrY2+EgAAA= + headers: + Access-Control-Allow-Origin: + - '*' + Alt-Svc: + - h3=":443"; ma=86400, h3-29=":443"; ma=86400 + - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 + Cache-Control: + - private + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 7ccfc4b93e5e0160-ORD + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Thu, 25 May 2023 18:16:15 GMT + Function-Execution-Id: + - yvvlxf18lrrw + Openai-Organization: + - significant-gravitas + Openai-Processing-Ms: + - '253' + Openai-Version: + - '2020-10-01' + Server: + - Google Frontend + Strict-Transport-Security: + - max-age=15724800; includeSubDomains + Vary: + - Accept-Encoding + X-Cloud-Trace-Context: + - 42e4c032166aa68717aa47d33fce4daf + X-Powered-By: + - Express + X-Ratelimit-Limit-Requests: + - '3000' + X-Ratelimit-Remaining-Requests: + - '2999' + X-Ratelimit-Reset-Requests: + - 20ms + X-Request-Id: + - 5a8fe0a9400421c82190dc74100ae5fd + status: + code: 200 + message: OK +- request: + body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "user", "content": "Write + a concise summary of the following text:\n\n\nLITERAL TEXT: \"\"\"Grocery list: + - Pancake mix\"\"\"\n\n\nCONCISE SUMMARY: The text is best summarized as"}], + "temperature": 0, "max_tokens": 0}' + headers: + AGENT-MODE: + - AAAAAAAAAAAAAAAAAAAAAMLheAAaAAAA0%2BuSeid%2BULvsea4JtiGRiSDSJSI%3DEUifiRBkKG5E2XzMDjRfl76ZC9Ub0wnz4XsNiRVBChTYbJcE3F + AGENT-TYPE: + - Auto-GPT-2023-X-TYPE + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '268' + Content-Type: + - application/json + method: POST + uri: https://api.openai.com/v1/chat/completions + response: + body: + string: !!binary | + H4sIAAAAAAAAA0SOS0vDQBRG9/6K8K0nJTWmj9npoigFQcFHESnTyW06bTJ3mLmFash/l4LW7YFz + OD1cDQ27M2K70ObT5dy/rBarfbk42KfXt+3t7Pnx4X2/nN6t7qHAmz1Z+TVGlrvQkjj2ULCRjFAN + PZ7MqqKcVdOJQsc1tdBoguTlqMrlGDecF2UxhsIxmYage4TIXZC18IF8gr65VvhPX/BcQVhMewHV + eFCwO3aWEvRHj47SXzJyS9AwKbkkxst5kL2QP8+brIlsKX5lrUuSOaHOfTvfZMF4aw6Ude40wqCw + dd6l3TqSSeyhkYQDFJyv6QRdDJ/D1Q8AAAD//wMAfbMoYEIBAAA= + headers: + Access-Control-Allow-Origin: + - '*' + Alt-Svc: + - h3=":443"; ma=86400, h3-29=":443"; ma=86400 + - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 + Cache-Control: + - no-cache, must-revalidate + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 7ccfc4bd1bf3635c-ORD + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Thu, 25 May 2023 18:16:17 GMT + Function-Execution-Id: + - yvvl315jleb0 + Openai-Model: + - gpt-3.5-turbo-0301 + Openai-Organization: + - significant-gravitas + Openai-Processing-Ms: + - '1386' + Openai-Version: + - '2020-10-01' + Server: + - Google Frontend + Strict-Transport-Security: + - max-age=15724800; includeSubDomains + Vary: + - Accept-Encoding + X-Cloud-Trace-Context: + - 85053cfc2933d3f356880e73b7fd2d45 + X-Powered-By: + - Express + X-Ratelimit-Limit-Requests: + - '3500' + X-Ratelimit-Limit-Tokens: + - '90000' + X-Ratelimit-Remaining-Requests: + - '3499' + X-Ratelimit-Remaining-Tokens: + - '89462' + X-Ratelimit-Reset-Requests: + - 17ms + X-Ratelimit-Reset-Tokens: + - 358ms + X-Request-Id: + - 5239e4222b7f6d5c65187a9e6e046965 + status: + code: 200 + message: OK +- request: + body: '{"input": ["Grocery list: - Pancake mix"], "model": "text-embedding-ada-002", + "encoding_format": "base64"}' + headers: + AGENT-MODE: + - AAAAAAAAAAAAAAAAAAAAAMLheAAaAAAA0%2BuSeid%2BULvsea4JtiGRiSDSJSI%3DEUifiRBkKG5E2XzMDjRfl76ZC9Ub0wnz4XsNiRVBChTYbJcE3F + AGENT-TYPE: + - Auto-GPT-2023-X-TYPE + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '106' + Content-Type: + - application/json + method: POST + uri: https://api.openai.com/v1/embeddings + response: + body: + string: !!binary | + H4sIAAAAAAAAA1R6W8+6vLbv/fwUb95b9owISMu8Q84CtgiouLOzA4oKKOcW6Mr67jv4n1l7rZsn + kTZ9mnaM8TuM/sc//vrr7yYr8/v497/++vtTDOPf/2v99kjH9O9//fW///HXX3/99R+/v/9jZv7N + 8sejqF+/6b/Bon7k89//+ov/ry//f9K//vp7Mxwj7BeBFvXvXODhNbdqAtXQz+Zp0gwoGEQiSrfc + +oWAhwxiA6YUBWVdLtv75Cheafj0dBeMcoH5O5WnxuowMgGIpnMAfJC9aIDE8LJky+N5lmEd2AbZ + sISyoWDPCxg9aaLeAY+MbOytCxOuANRE5JlNTaI3kHqvEif28vHYq5AQzI59iZTSjvvB8e4auAAN + EFrwis44LZxgT8Sc+sqt0iv9iiT4kMwHVtvc0xtXeltQ/Bx8bC/JHgxizcs76UpHam82x2x06TGG + V0kiGJcy84YBdzzoqu2O2n3/0sn9tJtg5W51wrNxyoYvvGvgqt1VfFw+czS/X4GgyEu3IFmv1YhV + u5MGd617pfZhdL3Zsm0NnrzrHj39+6Wc1v0B50RuWFUWKRvfdNfAySwOaPleNvo4ON4CW7lQEN+q + hM1xumugHuCG7LQxALxb65Uik6XCuhkm/dilQgchoSM+OHectahpEXyeXYomtzMiIXHnCeocAER+ + FLdsWpY0gcM516h9eu3Ztg7fIfRP0Zli6eDpNOU8Ce73pkr9Z3YE83H3TKHucAkRw9z0RmEGF0iO + 3YQi7bWPmvU+YXe+Gr/9lOt5LTBuVIs6QSQDcn+HFWw5+UzxzkZZ99pNlvJm3B4tm1LWZ/4zVLB5 + cAKpPwOMOpi/E/gxhJb6ybvySPm2XbCIx5DMY2iU22bvTVByfAlt548eDYUBZbnm2BWrqYqy5X1R + DGAGly9p6PUL2DyGAzhdmgjf3c0RTLQ9E7j9lDdqe21Qska9GfBRkYxAH/AlVZZZhnhX70lXPdVy + hqbrQNbOjNTJUS9nUoJYRtC/YRMJi04eRCBgX7kDdjR+zyp5XwtwYUKAff4lRDM+2xpAm0+G3Udx + i6an71xgEMsR2lKy6PRiVRPwuGODDfGj9NPVG+7w8brq2OiNRl/W/AAAdleq7fONN4gWV4EntBqK + 3krH6IxOFvz00RX7B72OSPFRC8XiexttB77JhteVTvLevCfUrW9d1NGmV2FYqYCAtJ09irdhtyFg + u6OumcVRTzXtosDIttBiPzS2FZikyoLMB9j87Kd+KNj1ArupuVCblmO0PNR7AEhVdVhzoqhcWpPl + 0Oeve6xl8+QtvGK78jk174RoOvHYOdECeEqLFwFMGPShY8yHSXnssHfUPiUbRMmHbzBkiJ/Yt1xi + fbeAS+BvcWhk34x6+cyD78XO8eG57PQl6O8IWjlKsC4OpJzm7XCHke6n9JylBpje1lxBnZ1itDkW + fs8s56QBdcsI9qxzySal73iIIyeiUXWp2FiH7wBCZecjeTuf9MW7SAYEe/6E/XMF2UCyuZL756VF + RNORPg8CQXKaVUdsedurx78kYskacU403kdpNibvuYP5hc1EOci3cl7zTTlL80T4WxF6SygWHASP + XMLqTfN04fPYGlDouxkfZrv2mtP1GsN7hlSq5/U3mvlPVcFwCF50T4IakMvzHSvhweeJ/DnPHj32 + zheKYzqjJbD6bNC6twGD4VDgg/tySnp22hzuG21HdkV97JcrJCG8cMDB2i8fU6Kmym2abOqC8Vou + Mz9P0J2uAvZvzzqio34l8IpuCtp8ZiPij2/IwzVeqFbkJzY/y0IGrx2XUXuz77w2WEAgv5OOUm1S + G28wjFgDW8g9SM8Pd7A83e8FTKbKYRRnMZvVmPuC6+zEhEqHXl9aW3IhlNUN/d0v+YwPGerJ44gm + fYvBzBquAc+4rdC8Y7KeZNUBQgtKA9b0IskW+8gShaUhR+2X0APWcvEF1kZ+R6LXBj3Dn50AHBNA + shlyS2++nVDA4fV5UmzsqmgOyvQOleoM0Fa8iR7Jc/UFH8fljfX8yUqSgECGLSedqR2HN29oRDOE + Vhp7+OrUvD4TY0egJzwQ3Wv9V2/h18lBTcMC/3t9I+BhaF5q6p94LVqorBbKO46u5NMLFZvWeIOt + 6mv4kh4fgDHb7OB7j3sEj7srG4xPeAEHfaZ4fwzO3sL2aqWc28VCvXucPVbV4AU2h0ONvWR0++XZ + Vx3UlN7Cvrs5sqX2XB9qNkookn2tnJ/hMYW2U8aEDMWNjdr0dYD95BJq1kCOpl10jOXoG1pE8MnH + mwwrGJTcDiCNTWSwpRXCL1REacFufXMjlpLeAubmZdCwjayM5XG1wK4Sd0TwtqJefJ7zAq98eSHF + Pn96FXjGBnzHS4HgPqDl4Nx3L3j0FhOJhWx6IveOE+ju2wLj87mKZl9JFllFIk/3sdn2TN1qPvjh + 3ZZMiC1tpRhQp/KJHovDHLFo2AlgCVb8LJN3xLhp64M62pyw89y5+vLaSQZ8bWsdH0+fvF/0JJCU + lc/g/TGp2ZKARILO+fslfKN55VTtC0E5CuYeO8fbBBYeVQR+H1sJ743aZ+39FvCQaaKMtVealMyu + wwaU4ILQpI8PjyT9ZEHEOJ7i+PrWab87fmF7hXeqw8cBsPEyy9AZtltqzfmStRehRZDt8gchsq/1 + UyMeA1DMlxYJ1sbqF6l2EbDm0CRLjwcwa6fhBc17nmB8LIZ+2Xy5CT4blBHZdy76tGmSCboHYJMq + YlE5v0cIYV/QASm++GKTNbcWjN4yoQd80stpuhQ8KIPCwbbFXh4VqQ5h9oiPRLklQrZkvJkDrntE + 5BpewmjKFSiA2Q4PZMe8V0RuyUMGZ4uk63nabBzOVAJ1n56JmOGXPiO7k0DPFQ41e8EA8+H99uHd + bg/Y682Tt2zvkgvvHdmSwry1JaNTNSmP06HCrtQDjxZZNcGXcn2gxb36jH0uuQaNrR6TS1gkbCgM + XoZCf/hiM73vy/lSqynszgKmaJvZJb3GEoJhkkQ0XujosS2fDvDJrhKZdyzVmexwEnjeM45Iyylg + rEBqo2z22RH7jlnpi365+9ATnggjNAgZVY3egrWcbcjukC5l49znAkIuvVKzNg9grudCAkiK/ZWf + jGw+vR+L/KvH+K599XFfuhoMD4hHd6Gi5dQxgGAS+yr12kNXspTTZehoL5mq31Yo50H4+nBjnlJq + jd+Pt0if4wukhfhAwjWzyjkJEx6Kn+5KrWtm9SL3fA7yih8Uad4LjL6/68CKH0QRHlw/x12aw+3O + COi+6KRsmmxX/eUj9aPJAWL0OGnwoz5OaNGqrT6Jyd2AX43ckULGypvuxTGUpPLlYm/lG8OgahoM + Yila40f1hCUCOTSUcIcK0Ptsm2iSBmsZuGSbpQZjpfxN4bzjO4wvUV1OjykYwL5yBuxwW738mGmL + 4EieJ8Qnd+gNWlOl4A/e2lwIpqPKFWDlL/TU3ARAio/zgra8PMm+erc9Hc17+ic+28XsItY80wFy + XsQjMd9fo2UP2wCMXW1gv0RmubiibQFuu3yoSmLQT+nnGct9hESKTu+OTaQPE2jMnk218n7W56UD + sax2zYkGwU0D8z56o1/9Juap/5Zzm08avE79gUzregxfbprMpE1IrcujjBi2JgRVokdUTZ23R5Pi + Hstk4DHF0l6KRjUWvhDOLKWH0YclkWoNwdGTJ3qsVK3kGX4mEMovF13W+6ZbHVTg9J4nIge62vMR + s2QI4MFH2/ihsHHaFJISfSaAr147lcTJni/g2YtMpK31ycgPrw9lfiMf3Tt605lCAapFMlBV31I2 + FQYvAc32E2y1fVoO/f6aguRaa0hsa4sRxcMIitZdIrGcLt5UFm8HiO7ZRJcPUD3Gkt75My7YygEs + +CEZ0JanJ9ba6Bux8079wvv3+sC/8yfm4k2Q41QXI9zt+uH45gXYSKeF7v1wBu2SpQWkesiR88rX + Z/h17lDUzBc+fN5i32rT14Uf2lGsqizKJmcI49/5YU95XMH0Pe6+f/iN+maWJ2qz5EIzRgzbb556 + DQaUwNlzLjjGwMp+elh5gvpBxLU+st4tHWXGWonEONzpy3yWHDhmBkad/Rz7uW81A2rhdUuKNV76 + 3f1kQBhVJ3xf8Z3Ep7MMdwcorXzeib48LTWAIzeiHlHyiNXLsQKRy/FoidTzqlfnaffT5+a49OXi + h9wAaDgzfFQ9kI3AiRE0zuKW6tdlZqxRTxZMnkKND7LwiZZxnCdIhrOBbZrJPVWiKYTFFgnYPiXY + Iweg3qGDvDcpAsuLtnWhWT+9RmQwXvvBld4GkLj+i/ef58BmEj54+EgAh5RH9GLk2dYcdFvji++O + aXiTfrVkqAyfFwKBvDCSftvLDz/RnC+9R9muzSFft1+KbnHRT5npXKCo2S/q2Ivp8Y+c+bDs+4q6 + xjnP2L4cCAya6IyGcwXB/BAeE6hFVVnx0AfEM8MvXPkS1c1QKmf4VXNwckWBtNz3CWZbCBb43nwb + tK2BnI2fq6TC9kkEsskhBkLSSxZYwLcj7HDU9RE4dwRTOXhj1BGTLUJ3D8AVZQoSNenck5v4LKA7 + nQW09aDjbX/rrfiFQFbzWb0936xffSUcrS4RSc86D4YKin/0F3+F3hesfgze220bsTD/JhBmcU/R + w6eMvEWQAjNTjn/mj7Wu5lDyTZNsxXfLWB15EHR8UWIzSZpsNiCU/5yn8bUqMI0Pi4NtJrRUa45B + trx2k6GseuOXvz2L5TMHV/2FllCbGY1vzw4ybStjc/6U0byZwgLyKh3JvAUDmK2DPiir3qbHa1Vk + rU6DCd6LR0DW+ytXfuJDQeB0BI6a2c+7DFuy8xA8ajjcM5p2VcHDTcwZ2ANtyWZZlrQ//FXb8gCw + o2NW8P4VGoqX8MZolkkIruNU3zbniImBG8urXkfyvrR/+eeCx0YUEdcPrTcZ590L2pHwpfhL7XIp + 39gFz31YrH7HyMZB6mS4v9MagVn+lATDmUAUszPhLb/RKfeOU5hEw5aaB4mAYfONDehPOU+P4OJn + 08ZO73J/tQ0i78taX/WqAEyWhmS3Gc5gxmesgvx7z/CqlwBpRMrJtxFj0m2awpvjHQvkle9h751/ + +skoH90ffm6/BI+N6DsUMjhCncZsDLJJ3lMBxI5Q0+MjUsE2u3cySPzThL9Pq+9p3G0deOr2Zxyv + fsIwMzqBdHnZ+MevFj4/FuBxkGPCrfznzSdODqdk5xFu5QN0zhQftr3mEFHn+n4J/PCrdI9vS977 + UQNC0N996Fg3G025wIE+CTgBrPGFD6ejG032SAkIgOBj5PKtPtpCMIFSFXN67GQ/Yh6+aeCuLxDv + zU/gLabeXoBo5RLen3V/1XdCA7lvbWEv5AZvfkjpHejl9MSYJRR0gaxoMDMzDrF7lmbLj0/kekrx + 0dL8aDIiXoBrPpLZu+91WsokgS4Kntg8SIjNN46rwK9eTG5x6tmoXwc4NOcdPrLejoTULLWfv4i9 + QA5Bk1rnAc7E12giCU+w6l1JXvkvjVzPikooeBBMrzjA3u7TlT/+rqz+Dv3lC/uenQZmt1zFFm4Q + m6ZGvShCfboh0ZvLjJ0W5wsdqQkRkGo7E7HBXvDwFTPqwq/ZL65TdfC2BxzWmGb0vOIOBUDT6t9W + xiWaImZJ4Dd/xS82qD7PA8fcwVVfFtnE8DWB7RP3hIEqAyMxZqLUdeFQn+zc8o8fvPIbJLux5i26 + mBZwnmuTuoJoe+SWnCUQ0rb4g4fjc6umoJGiBUljNPbj03gHsoq2PHZi9MnYV+8d8DGuOdpa5tVb + FM9Gv3gkG9QU/fjOYwdejOpIT6t+JateBz8/RScg6meiKRpszvREgJbeoin6dBMU0TFE3IFEXleW + ugDvWCb4wH03bMqqAwc/deZiWwG8x+w6bUB1cc/YccpXxjrghvLKh6jtzXo2BwsLd56qvvDKB8uG + tucBoGNhEjnezR7rUx3CLz8vqFSy0RsvtZPCntMUrJZ06LvwUnUQncMXPUiL7+0MyEuQB/7jF9/Z + LBThC0jGpSeLeTuUE5qcEATJ9KI6Aaz8xSNYDtqBHj7va7/UnSfASvvk2AhHZT2fuwuxwt2w8XH2 + Jcsv7R1Gbetg1A8H/be/XboUNlHiwvz5tQn41R82b5VyGnAn7E74iMh2rVerHzHINn0esaUbUGcS + Vy3yO5DO9PpdloyNW1CA29t+IE0eD+V0ktMY1Ndtjk1bl6OlTnsVPm8qxEg/GDr/cvXmp+fp8a1n + 2ewU3BecSDqu/YUiW/3dL1j1OWJrfizL+/oFYb1zCO9pST9vckUF6/kRrt7NGVn5NWyXqcR+fhbB + qrdkeNAZpXZmH7MVb15wJ7U+kU97nc0OH1vy1qUhNS5No5OgfRZy1sAtxtKh98hh5y0/fYpdN9b0 + uVV2HTQuZ5V6xsktxSybkPLzJ3Dbzv1w31sc9PfLix60cWK/8wHpd+xIv+LBRCXuJYPzkFOtOZfe + vMCAh2v9XtcvvMWv7j4gh6nEz/NxLttNvtWg5BD9x4fATy/Czct842uQeoz0ajXAX724JRKNVv8y + h/bd8FDYHKdojnziy/CiRBjr4MWYqLYvyOllSle+ls36gwXKPTrN+IiYHfE/Pv2+FxApDhSioQt5 + GcZ0eWF7eE+Abz/1Al3pk6FlMJVysvZ7TiGvY7j6l6LX3/D7DvGlPKPNBmrl9jXbGhTHZKb+ubqD + QVUyDqz8Cx+FR17Oim7exU9/ulJ/+/Wz+Wm0ARhORkf35kOJxvmILXDwX0/qczjup6au7j8+S9VD + RSLWpx6E050lKO4Nx1vxQAXv/bFH1dpvmVPipPBOPhpZ3OsAZump5pDjNBeB1d8azEW4w/Q6qPTA + PDUbL+dOhuihydhpirCcVj8Wxo1mrf2G1PvjT63+CXYD/fXvcT5Irkjmgbr6AU4K341XodtOKvTm + wyUEGpbgUes4OBm/6l+wO3AS9Rqvjmi1u6nQjGiDfe1i9lveKjil5uYrdkfHAet9FT9+sf5uwCKz + ACpVGDg0blUE6F3eCVBsXxVVb1qvTzOrp59+IfOm8zz+5yeKN1FG/Dy3YECb1vjxRWxuH1PJ4Ld0 + YS+EJdWUOirZTXUacOhos9aPR7/EUfhVWPiqyEyR583DmcpQacbo3/i/8g/4FgVE+Pv124uVs5Ng + nY09Yr7y6MkBOLm86j2qbcpUZ4N9s6AzXlW6X/t///YHrLOMVa+pezajmyGD6fWhNu+fwdy7zh0+ + DlKMD/WD09s1noEZxF/sMMzrdLzsZHnP2jfi13rHrOuOh5KDJGoV6racH82ug4/NVsTWd1dGc5Z+ + ESzC7kw2OI90QdhtrV3DSxM1d2cFsHHLXtB9iXsiOaWaifHt2smnOU9X/zLNlqQFKXxdJPqnXs9i + oGjwNRMBsRC+o3X/FgzVDf7156K5VeYOkiYX6d78TDrda98UWunFw9bQ7MFcem0gn+f9hSL+PPft + dmsmkldaPs7DJ/EW3QIJ9Hw3pJapv/Rh5MwAXlOUEj7cBplY1aCAN+t0ob4aDtGE718LSN+dj/1n + BLzxyfRBvs5ujNXEsMHMX/QJrnwPOxtmsW1IWwR7IShxtOIJy7e9JT8s1abWW1ui4ZDyAnizu4Dd + QQuBkI/vC/zh3conytEphAqu9RmrxdxmrGolH7RCM2GcHSSPXAKn+NMvEj5A1Sfjk15+49SlfuF1 + DnwIP3+G4h+eT3lVwbW/gZFwc0FL5+sdyr6v47Oty9msVN4CyL7g6PF+dMGP/8HVXyHLijcLJBr6 + 9Zd++rNsGqfg4fedTqQ7i3w2GuPdgpfY1LAzezJofJZe4LR3Zor9u9BPnnUMweqX0f1+1NjiitgA + Uw9FHBeeHA2PKRmUbR4cyCs4BR6LhpkHwwPdsWOSHvTnnVPBtb9L9WEzenN7TF8w0zRIygtP+pWf + VVBPnsc/+nNe/TJ4uW7wT59ntEBOB8NKA4hK+yRi0eOmwl9/3JK8WO/sQdFgbk82PfD7kz75GkC/ + fuzaz7p5/EbcVbDS6hwpl3zIplt91+ByUA9Y886BzramFMDU0S9ok3ox++EDiOsC0MPp2EWkBu0F + JsL0pv5Bt7PpOnQLPAr2Hm2Eo5FNosNC5e/fq4D//Mdff/2f3wuDb/PIP+vDgDGfx3/+11OBf6aP + 9J88L/yTCn9eIpAhfeV//+vfjxD+bvvm247/d2yqvB7+/tdf8M9rg7/HZkw//+3zP9b/9Z//+H8A + AAD//wMAWK4fC+EgAAA= + headers: + Access-Control-Allow-Origin: + - '*' + Alt-Svc: + - h3=":443"; ma=86400, h3-29=":443"; ma=86400 + - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 + Cache-Control: + - private + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 7ccfc4c8d9b11121-ORD + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Thu, 25 May 2023 18:16:18 GMT + Function-Execution-Id: + - yvvl9h8bnq2k + Openai-Organization: + - significant-gravitas + Openai-Processing-Ms: + - '42' + Openai-Version: + - '2020-10-01' + Server: + - Google Frontend + Strict-Transport-Security: + - max-age=15724800; includeSubDomains + Vary: + - Accept-Encoding + X-Cloud-Trace-Context: + - 079a9786f90021c1a5f4aeb630bbfc1b + X-Powered-By: + - Express + X-Ratelimit-Limit-Requests: + - '3000' + X-Ratelimit-Remaining-Requests: + - '2999' + X-Ratelimit-Reset-Requests: + - 20ms + X-Request-Id: + - b1c65dedd1c013d35ddccff10d9bd9bb + status: + code: 200 + message: OK +- request: + body: '{"input": "a grocery list itemizing pancake mix.", "model": "text-embedding-ada-002", + "encoding_format": "base64"}' + headers: + AGENT-MODE: + - AAAAAAAAAAAAAAAAAAAAAMLheAAaAAAA0%2BuSeid%2BULvsea4JtiGRiSDSJSI%3DEUifiRBkKG5E2XzMDjRfl76ZC9Ub0wnz4XsNiRVBChTYbJcE3F + AGENT-TYPE: + - Auto-GPT-2023-X-TYPE + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '114' + Content-Type: + - application/json + method: POST + uri: https://api.openai.com/v1/embeddings + response: + body: + string: !!binary | + H4sIAAAAAAAAA1R62w6yyrbm/XqKmXlrr4gI1GDecRIRsEoBFTudjigiIHKsAmpnv3tH/53d3Tcm + YCUiNcZ3qvEf//rrr7/rpEjvw9///PX3O++Hv//H997jNtz+/uev//mvv/7666//+H3+fyvTKkkf + j/yT/Zb/vsw/j3T6+5+/hP++838X/fPX386uWuBl8y47tr7qghLc108KIrHRXIvvGPynklAh2g+I + t5vWQNnD9RiWhswcw+cSwI63DXOxUiXdbX3MZdinKtnTOzf5uEIiKkgwU2SlWifc4umGVI1PuJ4a + Ek7n0k6hWzodlc71Bw2ahCnyPs7ELD/vkgkVagTCypCJXhhDwvnYp2C5tonFKGu6aRPWGC5l9aBz + XH0KLqrLGpFyvrItvy47tnxFAEWyr4hveBOioFxsuOjrLRaP3gv1xtKXFHEQYnaHjc27IJVr2K3N + LV5t0QfxsIwjhd37FE9NsSl4tUQAN0EvsGpfUfhpPY1C2L1T4ha3VTGe49aG88Xf4dWnLlFT7Rsb + bZ1dykxrPnez99YVuJkqxX2QN5y3q5MPx2WhkZ3fntHUnE4lNPwj4iXfNAVr9lsMVCcpVhLGzdkQ + xQyczWum4qqnydwwzVeHz8Ij+nPYdKy8vBewkZMn8ZV44Q29FPlK0UQhIQcfwvHhcAAtSEe6GLI5 + mdLno4fbOL6Z7663odhtdzl0763H3LyqvPo03jXwgtxhdn7eJdPJygz1iWOf2a3uId49ugAYutX4 + /UKQ9Denb8HtHIcElvLhrTMtUuirrGPked2hWSbogBaLvcw2g+N6bX29z7B+TyssG4LIZ2XOSzUP + Dy7ZNhvPK/EjEGAf1zZLj87Dm5q9FKNbYCXM2hq2KfptXUK3HXuieY5lshNcK5hSfiDaRT2gMbvG + BmpkY8Ar62ggYa1pMeDDpyAbfXQ7vktNCi7XR7bllWnOg644IPe9TszzYjKp5t8oyDvoGOkVPxx1 + ufLhgkuHCt/fn86yLIG+KPZ4+bH6kFqBRmFbrWqy4Y1h0kXtiYA/tw0xctsvZr/NSvSOO0awFh7C + eb28uFCoAMT97i9bOysLqTauiT70gGZO7jVci9uRbHtLMnvTKmqkXqoQGwe08aZDY2eQrJ4m25R3 + oegNcqWoe+lL4hcXsavdSy+CsXgbxMuaOWzS54lCo4Umzav7Nhyn/GpBIaORLq2XjAatM8RlU94s + th9VL+RJX6Tqfn0+EK192uH61FuisnraV5wtvMrjz3tlwLGfWrY5KU+T52S6Q3S0PcwymZnsSswc + OjvRv/V3T8bt5ZQjcembxD6fem+aeOTAr552uSeb/VGZR3g2pkv23KmLcfkuHfiuZ8R6XTnnounC + Tb7fSBrzZTcsbTDQt1+ILsZBOD+0oYKiOYdEmxdSOIIxZGDV455Fbf7sOEnzCOg7ehJvXvlojG9L + CS1cZBPzrr2KiS+uGZQk+rAjM9tiGLL2ABt3OBFPdKOCRqfCgrA+Z1idphufRSOzUVQvZypJmRFO + u06aUbsyz1Q8Lo/JKj5yDP1DWLFUj4qkinMxAOMt7yli2cacZE+wVfuRZfht2++EdYNkQJ8/nwRv + HkoxifUqhvpYycSv+kVIc085gKZvNkxf3vJiLA7rUd5s7xe2N62w6AeZ96quejmdrxYk7PaqbFh2 + 2kijstyg+WUnFcqWm5D2o7UrfngOaJ02zNmglcdOvS/CPcZXsnv6vTl9MrNV6UKL2H6dFiFn2gvA + E54HgivPLSYQn5Icu9VE1+km4zP5MA1eaAts2yr7gme51aPFtnXYXr6rSf9AkoG++8s2LoGi762X + iL74Svn6+eh43EcuQpIY4aEEak6zNAuKHkoHpjfFu5j0MJYgfhNGha246lp1lhWIX4uJwvZ09/rB + XtfwEXcDVX2Fd5T37wNstXWE4Wrm3mSImq9ezs+RbfB7ncwrq6rgCAeDTvvlEzUetStQ1fLD/KeO + UJ8kW6z81nu+OhezUyolvMZM+OL9qeMJSD46KGJO9vk0Fo28kFxIu1ZkZL8nIasPsY3alX7+1ucc + znfqLkDBzY5pfkC7QQ9jBULj7NPV+/EpOI0TRVll9w1L5RcpZqc9BipXUEF57KCCD4Hqox++Xrb7 + Q8cjcVagsEuVmOHS8mh0sGL069dtusnQ5FFcol3T13/4YD6rsq2YNwPTs9H0CV+jyYVTo3+I1y2u + fNyx8wjrPFuzzfiYeN/uJgzbc+Dhxsv6cD6+sxxa7prMuwibcLV+HTCc905N1QXvvCm74Qjelm6z + 05sbIUfSuoLgWr3IVuulgkrraQFJuY2ZKx2eqLutrzmMN3um0sWGoj1kOxHm2yTj1VPNkyE+Il85 + kTkiG7Em3ifUGw26q3vBQnWtkxmfrDsc3HEgxpEJxQR236OYn+/shy9jczzHSPjQG5Zui7zgmQUW + 6OouZ/pNU00umMRGl5WnECvaD3zEbUrloEIa2a6ZElJqbQEknejEustFMlOLADhn9CD20Xvxmb8d + C0wPvXABXhgOtjO76ku7L4mZL3HS2582gPBGOHE+nIWdIjYjTKXvkCSEZ9gvV6OEpP3CIF4lkI5e + 6/sCBUVrMHxpVibTVxcDHt7LZ06z6HmdXQ8GNLeo+NVP8Q4+jgFFbuwo6Jc8GY+P3PnxAZbSaekN + N78TlfIaO7/67UayySRw3oZL/NFquj/9ECttxTRJq3l/zrYHZHSRhdXWjsPpepdKEK7vgfh7MQpn + 17hVUEfrNdPTPihmDTYKYpbREIz6nn/YY67gh0e7U//xePQiKUol9US0W7ozuQJaCsyoF1Stjyc0 + nFI2I4Y/E5ZKweiGt2PE8AwNkfaXPjLnePNuFWeOGubMj5SzYmPGP/1CtGhRegyL9wBobGhYGkKh + 4PUhttTV07qS+BzsPRqZWob2o2HjhRaO4fBi8gxHH+8xiuqVOZFKoih/+irZvLmR9Ei1ZzRlLjAs + ZVD0gXbNIXLDmtnI58kEdtnDkrxLpt3gwal+iS2UzP6G2cflMeRoZ8WQcTcj1iIxuPAMp0h17LNG + tGcdJmOapDlwLaio9K5nr+UkaqFyWEmR81nzNrWtAHX35ET2jGd8SvTVqLiGioifUav49gNGD2tv + EHvMS/PLXwp4H3fCgtF3Ibc23IVClkdmRfs9XwnmeIDuZS6pNDUkmcUeSsRjSPDReGl8jG9rCaLB + WFCEogXibTAoyhcPqJpVYdI/M9VXNvL1+Wc/x9tbDYCKvctwKQCajOw+/vqPuf2sIUE2zhjGcbP9 + XXPxnmSxuqf1Da/M994b23sy/sEvL3a1pAvLwxl+ekL0Vx+PDxv5DPDJUoKJa5ri7ubnkEzX9Vdf + LE0KB7cC9ro4WBLcTzGqjuUDe6p3vJJ3n6SfBd7CexNPZI+PejiYXeDC931SMVsmHR2fN1sm5Xhl + 10ZERS8qTQ0cSwrt9YuR8PQT2fDQwCJ25FI+YEuPYVFFwR/9Nu427uGPftwWpPP67aAtQAxniVli + AWhkx5WB5LpdsR0RP+b4OK0F6E1+YrqzWhWj125jdFhss68/eXZ0vXy6KAiCC30m67SYfvzTWssX + Pm+52E2x0GYKM9oFs69tyLm8wKBMrvNhN8vkxcCc8Qw//QF7w+LTuRkcRboqIjPUreANZSCl8Inw + mxl6DMUknVsHbCp5bFuyMRnkxqI/vh/au6x2I9ZcDF6aIjp31VR8+cFRv3xAQu2e8G5bhBa8xlz4 + vu9twhXzTdGx5y1F0ikwmXYaK3VK8oARVKJivGbnSjlGiklcctDN+pyRQPk+DzEl0Svmn/6/Xyvr + jx8ZhceOIpTrM14FaPboB19ikLrDi45deELz2VU1mNWCEL8YppCbWpSifVQpxOrkfTKOiqkBPmu3 + H56jvNvqmdKPlc/MDW86qi7VGbbpQqOrKNBM8X3d31E3cA2P96Yq2GcTULB9Z0W2yxug/ve8+Ufh + ZAe0Q1PyxDf4zBJjeqUryXwwJgzVJq3Iz682p4tbgSO3nJBDfvF4NY+2aqLlmZlVdPYmaxwFdej2 + O9xF9cnku25bola575kXOgzNIulAee2x9sM7xGqlL5X0+DmT7a1YFZ+FjBWEzEtNl7ofmNPjujRQ + 6YXJrz/N4ZOZNXgnbuP1NGnFuFyNyq8+cL893c0f3sNHwTJef/0ELdJIBKmACwvuYIRr7fGYwa78 + M9Er/RaOaoty9NU/OL8oR8StcdvCu3a3xP/W23Au8R3RJFjgw72xC2FaNhTErWCx7S6mHgXlackX + mdhEk1ddONdNk0GJTgesBASbc/ARz5CZVkeCvbn02H02XDgMmYlH7XgMx70rBcDwe6Kr8i50zD5I + OUQ4Epj9yXjXd/7WABprGkt01UlWX7yEY45z5o0t6aaIQg6wv6vEOvarbnwlbwFOp7mh3XVpcC7h + cEQ/PfZH/5NzsfgvPfmWKJqjU2cr86u8YxTC86vPRw1OHnPoslf8ZHW9HBWYleCI184FmcPgLkT0 + 1Ud4eS+icIpeUo0uuHIIfq0Y6ncsnX/+A68vpdOtdvwE8OVf8uV31ITNSYS2sXJmuXpoNrvqZiH1 + EOxweY47c3wlg4jSTZsT+7mazD/498hlH6/ZoQ67MfRztHt2W1wdPR3RRW0KaE8WB7qw030xJ5Ks + KXeD52QjOUIyBYKpAB/PCVU2exXNoDxt5WrRLdNUN/RG0+pqqJ6ZTT8dVb2ZXdENihDdaX10cjQE + x3sFxWVR41HodHMMtGOO8OFdsP1lqpLpWp5GNY7agLkvIoU1Gl0JSdJHwzIqXyG/k5sAdSybuMvC + dVeL23eFbKp4zNDfmvl9Pgnu71wiP/znazFIlUkSJ2LHopWs/vgB+/VmP79c7dYPFy3pkTL/lUdm + te0vMywP6oFoTRom3LWvo+yXrw1VA+uB/uzPKDghC9Mm7MZ6wWeZFDfKjIwtOIPFOYDVyqbEEscs + 5O+H36Iovp3o8or7giUwYvjlVY7gfroR4hj++DvFLbtulBgqkSk0PsGPnRIO7iKJ0Bcf6Nc/Ii6q + 6xa1RdMRTzoF3lgdt7bSj6VPNkZ5+4MXQPbxlbi1oqDG5rgF2rcN2+tkMofPGN/Q/Vpa7H649N0k + HAQXJa6wYUfAQTIZpu+i2d4dCPvmF5OYyJm8co5rknQLmTeSfLxDXZEcK1dl7FhcqSn64hl+Q8q8 + 7nctK1rGtr0Ve8NHvR+gum0IVj/bLpnj5GirflwdKRo6oxOEpawBi5sAL05aYdL1nARoT+BAzO2i + 4mOJpwB9Drsrcdr5Hs7TNnNgz0OL2c7TD+f+WtpwOT9G8suX5tJuFSSMt8OfPKSP96BB12YGib56 + fGSW7EqKd/OIvhHrgr0+3Q0yJJ6IFn5Ej164Z8CXb8mmvEdF3Xx2IqRUFpjD0yOfjekpwGHITWLW + z7CY+VuzgIxsT1kkHRH96iVAj6tDNimQcP31cxBQ4pJddhaLQeLfvGJx11mstn73/X8H8OPyyKIF + foZcrfoIffGB7NynHM765WCrw3HXMPMoVHxONgDQXugaT3o28B4rXQqFWM8EX5qTN6GznEIvBzpF + 6GUkYsMcH+73tcH0XtonYxiA/ctPyT5PVmhG0i5A6fF9/urJCx/vYa798jh20ke34N98QaFM/jCr + K4KufXrtQjbeaE/XH5R1bJd6FLS9aDN3ObuoXZ+MGvLVPiA4Fw3UC74AaGXfSobv3aYYsr1xV755 + JPMWYmvW4naoFE0WGNEcteIsfsYHJdCk/k8exb/+B33zAazQT5H01/DLt624ZPjoPMyqLW+OdCJj + xM5Q6OH4kU821MKZEOxHQzeOW7yAna0GzLiDkTCiH3vQ5/LGfv1Vw6qpYCiUJfnqUz6KF+WMPl7O + CFlVLzQfsp2AbMAtIVyWvcGvmljO/ccHt/cN9uavnldueSdhVC735lwhGNE1lOVvvUxozrgEQNLZ + ouzJyo4vp4bC/fXsCQZjy/ncbWbId35Dp/3L8ySHSBHkO9wwb9Lcb71Ozh8/vkvWVrgmHfV/+M7s + UCtDVrq5BcnCkdju7CaIX65TpaLRef7883/lBb/8ASdLhujb35UgEheT3UPHnC92SY+ct+b+/EAy + kErqQUuvBZ3ay8rkZ308o1cnucQl9IT4+qqLENvFm7hrexXOXTMKf/z5nmxGNOnqcFO+/YSXNT7z + 6ZR+Rjg15gcrWLHDUbOi6OcHsdwYvrmSG6uHjcJdRrrxhcaoVc+Ix4uEmF5Iu5HrywoFRW2Q/eNj + h2MYBzOSoPXx7G3eJnv284ikpezSobWl5M2kLgfurihelMIdjeE7bWGzEM4sPmzZ139EI1w+q4Sq + 9B6a8zcPUm7mkjI/PMvhcLi7LVI2akSw1XTesK/XGJoI9ViN0YFP3/MGGNfXI9PwdjKnWwMpRIO2 + oEvpTVH/ws3ij79xLvd1OIXNSUCzV27ptLosw+HCTBG++TrbvHme8LdyOiiXnXcklrKpvfmgBimi + JHJJKlzeSfsKnykqyGHGK5fcuz96Ne92E4kv93Xyzft98C9+RQJ86/lsPf0KKuwkxDkHg/nVcxL8 + 6tlZpGCO1ZFY8NLSJXPQLkp4dM0PKjU+O+LSjxmu9/bLgujdCwSzjhXFV6/D4e7GxD2VKyTcs7cC + gvm54nEX6oVI15ahvrXRIa4YbDiVPm8fvnkILRpRD8Vvno+a8/HCjMc4oX4R7ioUPfsnud1ltfjl + hbDMI8R2jZh0ozscDlCVzsi81XnjsXZ1wvD1s3/OF/pW7mP0xQuGG/dS8MtVLv/4edW3LW/a95oN + Ua3O+JWtR290hziA9Xbb0YWY5904b5wU/fyv+vV7begOPfrmz1T++jsWvbYpjOvk+Mt7OZVuzfmX + N5Mfn02d6UbyN38jluMfijneDC2kqvb65YPfPN+h0DOc0faqjAWL+7uLNrUufPOLFZqblN/RTy9Y + HJ/Mz76QqXJaCjvyiLd6spqqQ6Del1VMfnpoUI+5BQumbcnjWDUeb/K5VhO8NZkmdLo3O9tmAd5p + sn/1FI6Pq0YhaHcLmtvMTUQ5cwxIninFIn8d0FjtGgkeFcTEuFr3kPqq6aD5gCdmOtGLz8bWi5Aq + Hh4kFLID4u+H1armJxqIVrcmYqw4Buplo4TERLqDuDvSALqQZXTRvjbJ2iq8EbZF1RLnCHPSf+sF + feo1ZtvIeBQ/fodwvunM0l7UpMshz8G63HJiMq3rxvC5BrA/lU6bRkTdXItDDI5xEOgKRWVHewhv + 6Pu+MBJdoWDvgzOCedMwcaWHmQjew8vgdRRbZhFPDbkxyCLYkdARQuK+mMx5fYbD3YkJDp4uX2FL + v8Hc70q63OQ94gs8x3BJlA/NmpQnPFuPLqq3Rcw2Ave7STClAOxWWLINCz9h/+UvEJfYJLqnzXy+ + DqgFCWqf3NN9H37PN1LUJqPJTt6BdP3P7xxmZyDatDvxlraOA0pMW7q2sMmHUrUWUEnTlURmpqBv + 3lzC+YJ3TEsFk3PU8R42LjvR0YOwW3+CtIJpf30zZ/PIwvG8UijQcCEQ/x7vED+UugFudKrojNSm + 60ssH+B1T2bi5/KL//Qx7NfRgQTnDQnH3ESKopRNSpc7aVvMzva1gPxcOERPLi4SzqF5g3JuEjz3 + I5iDdM4d2J6eKjEm+4zmX37eJrOJFytPQExK6wyJ9/vA9uosmWxOdeUP/png8XAYFwZWp/M2oatb + JXQD5b6AnDp0sFi3C49lud/D1deOJHWFOpmF5VZAdbXPmUbrR5dhpUgh6fkeL09yyackMPI/eLd7 + XXYFd4v8BjQ5LJj1Pc9hjeadwT3aNX01xbtjK12rYHycRnx0Lhnil9d5hraxc6aJWVGwjXcU4Itf + 5PA9v2LhFPlqkWs7iprTB43TsunRO7Urspslj7PSbS0IgsOF+XJJu7HziQbXwKiwiNum46hDPRxb + 6cKuePXNW8adDUnAhq9f88z5q69gPMYqlZZP5vWasM1gHLdbslGDRVH/6qd7b7w/eXSTR4sFDB3Z + YeVF4kTII6eGNZQXgpe60I1hfJshu9pLjE6NY05jEkhgZXZMdEevTP66pC368YFx3rBwtNk2BfVS + hsxVaq2bf+eR0hK5eAq6VdI0kqWof/+mAv7zX3/99b9+EwZV/Ujf38GAIZ2Gf//3qMC/b4/bvwVB + /DcT/0wi0P6WpX//819DCH83XV01w/8e6jL99H//85f6Z9rg76Eebu//5/a/vr/1n//6PwAAAP// + AwB/Gfvm4SAAAA== + headers: + Access-Control-Allow-Origin: + - '*' + Alt-Svc: + - h3=":443"; ma=86400, h3-29=":443"; ma=86400 + - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 + Cache-Control: + - private + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 7ccfc4cb8b3713fe-ORD + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Thu, 25 May 2023 18:16:18 GMT + Function-Execution-Id: + - yvvlh8rh38s0 + Openai-Organization: + - significant-gravitas + Openai-Processing-Ms: + - '172' + Openai-Version: + - '2020-10-01' + Server: + - Google Frontend + Strict-Transport-Security: + - max-age=15724800; includeSubDomains + Vary: + - Accept-Encoding + X-Cloud-Trace-Context: + - 91f42e8a544d43c21eecdcd9d8b2f786 + X-Powered-By: + - Express + X-Ratelimit-Limit-Requests: + - '3000' + X-Ratelimit-Remaining-Requests: + - '2999' + X-Ratelimit-Reset-Requests: + - 20ms + X-Request-Id: + - 54675364ab43889d41f33f1cafe85a27 + status: + code: 200 + message: OK +- request: + body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "user", "content": "Write + a concise summary of the following text:\n\n\nLITERAL TEXT: \"\"\"What is your + favorite color?\"\"\"\n\n\nCONCISE SUMMARY: The text is best summarized as"}], + "temperature": 0, "max_tokens": 0}' + headers: + AGENT-MODE: + - AAAAAAAAAAAAAAAAAAAAAMLheAAaAAAA0%2BuSeid%2BULvsea4JtiGRiSDSJSI%3DEUifiRBkKG5E2XzMDjRfl76ZC9Ub0wnz4XsNiRVBChTYbJcE3F + AGENT-TYPE: + - Auto-GPT-2023-X-TYPE + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '269' + Content-Type: + - application/json + method: POST + uri: https://api.openai.com/v1/chat/completions + response: + body: + string: !!binary | + H4sIAAAAAAAAA0SOwU7CQBRF935FczdupqQEK3SWLnXjgmgTY8gwfcDIdF6d9wAj6b8bEpXtzbkn + 54zQwcLvnPp+iOX8qUntS9DTs/jvdGrb+jE2x9flw6KdLmHA6w/y+vuYeO6HSBo4wcBnckod7PR+ + UVezRT1vDHruKMJiO2g5m9SlHvKay2pWTWFwELcl2DOGzP2gK+U9JYG9qwyu6v+5MVBWF69cMxr4 + HQdPAvt2Rk/yp8wcCRZOJIi6pJdATkrpEu+KzwPJRV442Ye0LTacC+GeONGtFBt35ByUCs+R8wSj + wSakILtVJiecYCHKAwxC6ugLthrfx5sfAAAA//8DAGuClxRMAQAA + headers: + Access-Control-Allow-Origin: + - '*' + Alt-Svc: + - h3=":443"; ma=86400, h3-29=":443"; ma=86400 + - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 + Cache-Control: + - no-cache, must-revalidate + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 7ccfc4d0f99b2264-ORD + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Thu, 25 May 2023 18:16:20 GMT + Function-Execution-Id: + - yvvl4b8j5ntg + Openai-Model: + - gpt-3.5-turbo-0301 + Openai-Organization: + - significant-gravitas + Openai-Processing-Ms: + - '1327' + Openai-Version: + - '2020-10-01' + Server: + - Google Frontend + Strict-Transport-Security: + - max-age=15724800; includeSubDomains + Vary: + - Accept-Encoding + X-Cloud-Trace-Context: + - cab4489fcb073187a73b112f5825f6cf + X-Powered-By: + - Express + X-Ratelimit-Limit-Requests: + - '3500' + X-Ratelimit-Limit-Tokens: + - '90000' + X-Ratelimit-Remaining-Requests: + - '3499' + X-Ratelimit-Remaining-Tokens: + - '89461' + X-Ratelimit-Reset-Requests: + - 17ms + X-Ratelimit-Reset-Tokens: + - 358ms + X-Request-Id: + - 509ffc387963a73e880ed3350d567cf9 + status: + code: 200 + message: OK +- request: + body: '{"input": ["What is your favorite color?"], "model": "text-embedding-ada-002", + "encoding_format": "base64"}' + headers: + AGENT-MODE: + - AAAAAAAAAAAAAAAAAAAAAMLheAAaAAAA0%2BuSeid%2BULvsea4JtiGRiSDSJSI%3DEUifiRBkKG5E2XzMDjRfl76ZC9Ub0wnz4XsNiRVBChTYbJcE3F + AGENT-TYPE: + - Auto-GPT-2023-X-TYPE + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '107' + Content-Type: + - application/json + method: POST + uri: https://api.openai.com/v1/embeddings + response: + body: + string: !!binary | + H4sIAAAAAAAAA1R6Ww+6vLrn/foUb95bZ0VEpQ/rDgGRkxQBFSeTCSAi4IFTW+jO/u4T/K/s2fvG + BEqCpX1+p6f/8Y+//vr7m1Z5Nvz9r7/+fpX98Pf/mu/dkyH5+19//e9//PXXX3/9x+/3fzyZv9P8 + fi8/xe/x32D5uefj3//6S/ivO///oX/99fcZ6z7epWjZNRd+c2GRtyvqQqWnQj82ERBDVShmht5N + WA56YFJ0pbZVPTouvwUfrrvG9cB77RFfZd83RKggVH1kliZoF9CR7qmjJyhojcizojZCx9zCnvj6 + dJONkwRuj0tND+JmCPuLuFNAMymnmr3XK55N4gLoxtGx6+WmMz6ObQnP3TGj3uUhpH0FlQfbyN0R + 4fZqU5IfQhEVoVZS43ySQxruvj1Ir/sVO/azQN/X6xyAGBxib43vV6dXq4ChUv5gwsEu+JStCAM1 + Zqr3OHV69fQU9S2TwnuSMR8+DrMR1Gg7LFrsnnjn8EjzTZQ2ao2PdzfUxnRzecML7As22DVJm/au + l+Bdz50nDuo6HGtUCnDaZwgrZpo4/O0te1Rbh7O3Jf2T8+Ai+tDsLhlVtTvveNGRN5KbkVBLuJyq + RihNd3t83A7eqmVauvYOuy8kxQd5TZZv+cTvxUIGMUi9r6Rt0fTaIBUV7ftOxjcqK7Z7dhHczvsV + maztwpmUt7RAr5O29QYqxpy9rgcTtMHR6OFotlW7iU0Fqr3tUP2c91V3fXQ5uuvwJKOxwmmrD6wH + xWsd7HbK1mEH7aiCn8d7HHDjFY59USWyI70xeWLNRsy2bSbt5OOAnVtQd71ALxfIPt8zNtqhrcZ4 + SQD8RKg8yfCg4siNY0TYN6Y7Y0XT0ZUED7aSsCRTVRl8BX26AbxdrAhPZVyxi2ipULhCgq3roHaC + 3/UxbC7qgR7r50Xj1m3no3Fc69hzViSc7vs6QdZ5nLBxDHaaWK0yE6JT5FG8ZZFG+9XmDVlRnbB1 + Ejhi+/EB8K3eN+zWXd+N9cOrYa/eONVUtqvIcctFkNJ+8JgXfRHD+lOEVbXNsRejHPFiC720SuoO + 3yJ/EfLddPJhhbIbNY7BUxvqISOgvw4fbERkh8QtEzJQzYtDJKWzuYAbAUAyLYcesumNRn17jWEU + FpTuTo9z2sIRC5vWT3RPqLEfTvVaq9F48HrsWNRwxot98SEfJ5cq1VNEfUfHDZAwcj2Z6VE3Xt9T + udRz6UH3ZP8OWQvxG5oj2+LDS1HDtRLta3hMloBVt3t1/N30JYqsMcBHpgvVuPFuNnzj7eQt7+Oi + Gjp/EKH0Tzq1GA75GLS9Cq3cY6xE65MzXadYgO6QbekxXxthfWcPE05SpOKsqD0+JsKawNHyFOp5 + p5iz81vLgXEcYq9UA/QSQsmAMowpPmkf25kCPS4hsniAlesiTifsFBuZGIpCmrpfhTR5Ny6ELC+o + U3w+3QRNbaDPd7EjqF5pGnNifyPjplhQZWdcnQHHS1X61Qu3woZPpjxEoL7aqyfwzudCvUi+4J6w + R+1xTVKapeMCHDc540Ox1bup0soLNB85+X3fiksUjH+vn/pS01ENMhHatxV5YynUnKxRR1D5iU2y + dvGkkSx/TpBzWfK4FNNqYK5wAc+4y1SVWRfWnjrFaLy2FbVIv0PDuxwyeKieRuB5P6SCWFMD3Q68 + wXomDSGJ7i7Afih8HG40zeHfoibQt4ZGnXx1CSdBBRPdU+tL7cNh5Yy7bRGBoK9E6sBVRqwfmwsY + L1XGh9u94hN5Jhk6QcXItjOXFV8cxQ34x4VHPUESQnZ3eQ6256X0h7/U7GkMCLkudYT4WTG6uyjI + fXkfrHZjmbKyH77w3iPF2/JL6kwXsgVp/15r2N2cTTRF28aDr0o6IvCJdJ1ZbUrktdujR3a3Whs3 + 3sn8Uw/2Y/NKx4hKFxCcSvOmbHx0bDyFNQoO3pXi+5hXHcmQi5Kj/sE2et4Rie76Aoq7f6I3hjli + maOrMG2vjB6uXVD12S0wQR7OPjXoUHDy2w/aRVt6U+bVDt9Z+QU+yhKT6lMQh3wWuwhtF/ETW535 + qOb5LNAVThXdhfa3G6stSgBOa4cqq+LB2fkjSRB6VKO7Ibf5gA7KBC9bHMk0RXY3iPYUyHZXfKi7 + apfOtEYVkau96ZBFNRgOY/fJhwx3JXbylZjSCV4x6FeeYmsXZ5xoeFBQkZhvnCuF000fijzk3psO + qzOeslEHCRnf+kF/9UnN/pPAWgmDmW+qlI7VBsCvP0e8W0srbfKnLkfOjTyxam0X2oAOJoMFNwzq + VYOh8a25LdG5F0NsBYMbrvr9opbuy/eG2ubWSP/wxTU4aNg7a0U4zvODz+QLWJvA44N+5j0or9DG + 7vP+CbngBRmUkjZQSz2t0ufLKTI0Xr8VPoybtcZj+lTB3NzfdK+dj44oG0qC6vftia3cbxHRQ0eC + 9PxQsWF/ruEkrycPvkJ2wGeWcIe8T58C3XeiiI0yHkOy2ugi9OsYz/jSOkw5DUya+RXrSw+nY6P7 + ujzzL9U8t0zZa0QLOMeCRJUm2fPxjuQczF0yYvc7tekg3swEud8oILz7OFw4fyZJeh2oQbot4um4 + 3U05eEKzxoqxfKGpOrEczudTTI+bgDl8WH9V8FeaThWur8LpXiotvGxhJHywZd6L08qT1C79kMXj + /Q2pmMcK8OSYY69Du048Kdsv5IWUYw2dTG2QEiLBShFVIiutqrGAagH6nFIDu9SLKuaWaxc+K/Hi + EdnEqCWbR47WT0ehJiKDNiK8bmGeD47Ut+NMfn2PNgkKnoQTTUz54riQEAkvLt5pRR+yzXcP6FTy + AwFNPaTch+Aiz3hEDfwZuuaOVhkqhPOBHlilIHLf1/EfPra37piOduFHcmItYOaP0WHL2+CB09ox + KZ2OOsxTpVgeVrVFZz7qevvSZNB1koW152cZfr/7OAeL7xcEwfWO2HXNcrDkqcbu6qB1Y3h6tzDz + pTfkayOdwvWWoR9f6bbaIe68ripc33pG+sUYOiQWFB9CXjtkmZ3t6oeX0m+9f/U+XvjNQ6u1tPYW + rU9Str3nBNXW/oy1qzhyQo1XCzN+kaUqKZyfd9cJ9nf9hmc+D/lC3AqIr6sWG6LVhVyCYALloas0 + T96BxuzhnYOdGozu5a/O2ea2FJCjOm+yNHDF2cqZWqSWRU/3VVqkPFhuXVRw5ONjtF/+G/9XFcqJ + ZCm1Qy5khG2itDbhXOu7zrpZATq+FxpZQyk7bODxhLCYhnR/NKSuj6bWREZsfqnbKTeHRVf5AlH4 + hD96lxX7yoDDx/Mxfi0Th+XZqAC8xBor98VL49qK2XCy1Du2IblUPEu3C/SAuCLdgxA+Lqg0wVKJ + A3pHt3XIvX3gyvzSAD4O31b76WNwql0046lZkRkPwZFqjPeHzEbiMIw2tEMQ/1u/8Up5Q3yId9Rf + FUtEl5unL6cOibErVtdq3LwuNvjPXUfdcrpXY1btVPlz+Gbk02Vu15LQjZChWxgf0Eab8ZO7sFe8 + PbXb9NitCnwDGP3ng57VTKjGVf8uIBhFSo/+sei69drNkTK9LnT3eOwQc5zJheC2AmxajHA+KPkC + 1O72oYpjaN1a1vw3KoseefLW+VZjvxffcBXL0PNbcYvYS1FNeEF9JAvA63T03GcgK97XocF77DVu + 2dyEUPXfNEwdgY/nKmxRf9Fb7CzU0WHq2f2iEJsS/uPvjqaqgk11i7A6wRX7tl0NDrsWWLmUddov + PygCgcUO3bfrpzPBEYvSvSQ1PVwmyXlHXajD5SOV2AmDK2dCfIp+eo2sb/Xzpw9NqJ+6iS8nSNLv + fkkM4JcO6EHhY8oOgihAZkkBdhbI6vh3cBL0UF0Nm0Cqilg710duxBi1wreOGHH3AZx26IZ3GKq0 + tVXUQ7p7NPR4d7k2+8t4+w7GM81LdUL9QmEMHR5dROBgvTS2H68AHB1csr0ItkOwU0jwOFSZt/bv + x27c5bz4rRfe3QYfsdKyFugpGhWRT43Qjf5nbwOSbxKpGCmc9gMnSb4GpYS9t+VX7M27BB5L2NP9 + 5NYaLeW0Bh+RE6n5RCq+vAsZ/PSWkrmRNj2rjw3rdoeociqJNiSnTAHxbYzYq4a3w06mQiC2PhRr + +z0JSZY3E+SKyqhZiw7qesdzkWo/t1jPdhEayRQnSLWrLWFUZNrsX9/olQZf6jwLRePewWqhmdod + WdpbWavJSvHggrcUW6PphrwWQoZKnJ6p6/gq6q93p0WzP/W228uHT4Ut9TDXD1l81EbjDn+ZcKTC + wqsy+e0wsf7oSF7vJWwk9uqnny8w6w1vKRzVdJ0mdoFw4bnYXrifinwo91BzOPQekDNow9ZXajQc + BANf465xRl7EE9RatMd21hbpWA9ZDx+9GGm6fhvdWAZMQOuPcKQxL8aOxTkyJGOrXvChPlh8OheV + AMOzZqRsjmL340tkEgJ0N77MiqljsZFkqcuwPeMh4906A+FRZl5PzpnDxGOrwzvgZ7yf65v+ySfG + x5qUao0cCrvGRPw5JVSNlG83YF4JQMf7mtqkdMJBUT4iKuUXpuZp0ab8SY85ile2jvd+eu9+9YbU + CSqs+/kl7WM1F5AZQUgW2mdCox2OrbxEaoO98TWmpFt+RBCz6EB3nVqE1JRWJejXMSXyO/w4xD/6 + F3nGEwLL1qnW36Lu4VrCiI1gFSDurqI3ODDK1PlCgVg0tTb4XZDQ/az3iHZHJlhFdiObeX+t27Oc + Q3Ivc6IE6bujxVYg8k9fGTtRSf98r+XTV0gjYSXlu+nmS7P/IWMa1w6tsflFz/Jge5H2qKr6vhgC + mP2mJ01N4/CnJV1+4yQdhyv/rQd8lo75wxNEZrxBlwEjjNOz4IxpqGVgnB4pVuPDOxw7/yWgy2dT + 0qtkBN2YaEcJPVNJJeLqyjq6JXr7xy80s//uK+g8WB3JhXpK0XW9+YwJ2rVvEytBalQNfY39z49S + pyD7kJ4z/Y3MlbgjBQSflEldLUK73qn4UHknbdY7G/QnH0s2VGvzc7OAS1JfiITPfsfXYTHBevjc + yVa39XStfUpV3hn2xpNnfhjLYCOgMOjv+FguaodXx5sHbN6lbH7feMrOG9jkW5XeHuqrqpToWEPh + igkR61ORTh7Va7QSRhdrp+cr5Gb/iQGfny/8bcRSGxcLZQPGYnzj+0gPvMwPqQClH+rYOxC/49dh + W0M+TBPWUplW/c/vCS9a0z2ItjZ5qPLBO71TuneqbzcW78sCpspuSOfIEZqebmlD9mnP3tqSXilz + x/1FUnXW4L3wJRU7CAsBPadqR6S6ySp2K8oSpOd2RR0mLNNxypsI4jdCBC9YG1Katy26nr8vT0rb + hjdb36x//h1jqfTQdG+XBbq/A4Pu7gKpxqAPc3RkZMDmoB/SP3pK8sQYuzeTaOOMp9LMl9RK0IGL + 37H8yk+04t6k1qlGuY1qiImy/FM/k67Lk/S9EB3r473UZrzOpdn/YV+8xSFfZEcFmhML6fkRlR01 + K1bCajy2BJ30RqMNkcTt7Kfwfj9eK66lToSWUj1hT5bsaqKVtYFldSe/+gknU35doOkKoM5WTdC0 + 2rjidgIU0N0bld1vJ8Dn0GbYucOdT8GNGOgW5D7Z6sOSDwMrFuhUjgdPPAZPZ4i/T4BubBKsD5c6 + 5DfzXICHrjpWWXnrJs8acxAlwyaCh/SQvw1ThDlfxPau1tKp2RUBvMbtB3vrvHD4pwsXSNwfNbrz + tWc40tdIQJaajJonl3Zjty0Y6rkwUO1b6B2bjD5CjhufaaymHWI2Et4w85O3Wd2Kqr9/vgEsuuWO + zvmUNo5clJD0elypeUsch58WOwZN7jF86DPNEVqi5vC2shJnQa915HWwPHjFtuyxmT/Z63X20U// + n9Njjfrx8SRA4uaF8bLcO8JnbQfweYoy4bu15fCJPALImGx6yx0+duyg7VVk7y4HT8y0mk9LIiXg + RhOjeB4fwdy4cLhMN+zaVzscLU3Q5XAvmDgIt3bVHcJpAUTNchIn7h6hvR/p8qaUMqzDwXSm+DXZ + UIS7kupJXvLpyppEyqSTRDWu9RV/fWQdhpcd0J9/n/2XB18jOHrbZ860LzBkQiavMw9QNfBZ/9ZA + tZM869d11d5DqUCQn77e+oTNlJmbcy89D5uGqtvmWK3UwWrRrM8pJsdVx/aZmqCXF4fUqvynNr6i + a4Qk/dWQEUwfke4oqr9rekgnP2SfhCjgK47sbV+vI6ck4y7UwXpJd9eTkRJCixoS5WvjAPaxNgyR + S1C7D9/YLWrCh8hYxii4rYGsyvLAJ+frCaBpSYK1QyLxP/nTLrFPZHMOjyl3+GDDKxdtb/zkC20w + szGHGc890QxaPrgSuNKcd1Jj5U98dItDAsvPzqfaIUk4eZusBVEsvthhwiPk1VRfYGWbArbU0zkd + 72bkgfAoMmpscy9c0de2R3Peh52jWCG+zq5feBf7J93lw8HhL9HxgGXviOqstiru20Upz3mwt0Fk + cHj+PfRQlfn1l8doBNI7oHl9ybo/v1J+XOuqtHwGyk9P8bneBKSZA8epRd8a++7jDKD7CN6qXT81 + bp7bGOb8nOq3VxsymqkCnODJ/vj31ivEDP3y0B3WWsQiZRPBZbFvqPMBOxRtFRGkpxeZ4vQcaX/G + rzf3QNa8/TpiDGvzD58a8/dsyOTHID18jDOo6pCBfBPgXTUa/uXnQnX1dVTpJKKzHuaMX54bMDsr + wVoSQDqszk2BZryhRvnAjkD5QYDEeu5n/FQdHoydgax+I1LsK6d0dC07+ekJqqJI4UOwkexfPWDt + /K2cb7dQVFl0Nj7d65ulw0m3u8gzf9FZf3X8GroMDau35VH9PiFmGZ0k3dJRnf1DU7HquUjgbhaa + R2Z9OjZHY/Hr/+DjKyHVaGlgwBwlU/u2vFScaisR8jpTCNLoNmTk25TgCd0a//KsSd3lElRkI2Ac + HjbdBOkZYHsvOHXCYM0nJIQRmv0oWffLvJvSzsyl+q4TvBdFOeXYHA35c7z12LgA66YHucRwPocx + /embQT8jAvYz3Hjg8ohPdVtLv34UNU5zX8EM1Tdc8+WV6ld+075YfwpwXa729I+eu+BB+uU1BCLz + +rvewNFZv7zNUmPOiDbTG4gqnb3PuLlqQx6fGHxMPcTXYlt3/MdfBVIjElvMQ8L0vJnwiFZfsk4O + ssMui/Xml/8Scc4/xya9GxB8jT0pQyVw+Dlza8R9qcfedhdoE7t0HtBE2FBtM2Th9BzlYGv5aond + pis5myIpk1ak1mnq6Fk6rt1nD/eyr+msFzoWLk0Boifb0zsTcTj2+8Ufv+8J+auopmqV2dCk+z3+ + 1aOwHx8LtDr2FxpuhiwlZ+R/5Un1EVn4oHarg3L2IG2Ump4wVOFgBaoHDWk/hMkkTYegT3Oodu/G + m/OrkF9DnaGnkzv46DPQxruZuQhEP6Wz/0r7/qW28o2ylB7l4J2+ukHVwYheBnW+mhmuxmwrgf08 + bbDaHMWqnfNykDWbEb6GvcY2YqQj/owqbGrHdzcZ1S6HBi048dWNqa3zL+7Rg5Ye3h+XoUMHb9Bh + eQ/e+Ihu1/Tnd+SV8QkIM9NEy6Xr0vjpB29VrIVqelxv8Y/vvA0QrRtKvSWw8R7On3pldJcr0q/f + M/d/K0KeSf7rXxLpGW27Yf7/6DMFAjVeQxSK99L8ol8+Yc79xdaaRAMG8tCplz7sin8OG3/VV4n4 + 40/Unz/S5tf/wFZiDdqoRXoA50d5wK7jl4iIW8FH7VpTqSkSEbEm1yWY1qaK/etiE/bqWf8CHoyC + DHc3dNg93b1BXKYV/uXx07W2CQB1LrNeqUL+08PZxnDpr//D4pzP+ZPrY80KLcQoxwLat/ELh5nT + hXT23/KvPp1q5XY8WI6uLGDj5f3xT0syxXIdrJbeuIcLYpc08tbmce0RwVY7/v71wy1fKanbdCoS + Zr2LtpeUUntqGm3U2beEl5eE1LLecTjNfAq/PM/U2WaO7PUcNim/ks1xP/IxzjQf/OtpRxpUi904 + nMGELJmUme9stP75gbBQG+y6PEJTlj8ZdD7vqbJxmqr5TJ0C23Ghe4Lk3bTpmbAAljeqe5DdR43N + eSia82VqGmXTtdLUSejv36mA//zHX3/9n98Jg/f3nr/mgwFDPg7//K+jAv9M7sk/BUH8JxX/nEQg + fVLkf//r34cQ/m6677sZ/u/wrfNP//e//pL+nDb4e/gOyeu/3f7H/K7//Mf/AwAA//8DAHICMqHh + IAAA + headers: + Access-Control-Allow-Origin: + - '*' + Alt-Svc: + - h3=":443"; ma=86400, h3-29=":443"; ma=86400 + - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 + Cache-Control: + - private + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 7ccfc4dc3d9229b1-ORD + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Thu, 25 May 2023 18:16:21 GMT + Function-Execution-Id: + - yvvltf2fi4jd + Openai-Organization: + - significant-gravitas + Openai-Processing-Ms: + - '107' + Openai-Version: + - '2020-10-01' + Server: + - Google Frontend + Strict-Transport-Security: + - max-age=15724800; includeSubDomains + Vary: + - Accept-Encoding + X-Cloud-Trace-Context: + - 843ac424687a30a5c420c14d40332799 + X-Powered-By: + - Express + X-Ratelimit-Limit-Requests: + - '3000' + X-Ratelimit-Remaining-Requests: + - '2999' + X-Ratelimit-Reset-Requests: + - 20ms + X-Request-Id: + - d8044446eca268792e84fffa9991472e + status: + code: 200 + message: OK +- request: + body: '{"input": "a question asking for someone''s favorite color.", "model": + "text-embedding-ada-002", "encoding_format": "base64"}' + headers: + AGENT-MODE: + - AAAAAAAAAAAAAAAAAAAAAMLheAAaAAAA0%2BuSeid%2BULvsea4JtiGRiSDSJSI%3DEUifiRBkKG5E2XzMDjRfl76ZC9Ub0wnz4XsNiRVBChTYbJcE3F + AGENT-TYPE: + - Auto-GPT-2023-X-TYPE + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '124' + Content-Type: + - application/json + method: POST + uri: https://api.openai.com/v1/embeddings + response: + body: + string: !!binary | + H4sIAAAAAAAAA1R6SROySrPm/vsVJ86W/kIGpZKzYxIZlEJwwI6ODkBEUESGKqBu3P/ege+N290b + F1ghIWQ+U+Z//Ouvv/5u0irPhr//+evvd9kPf/+P5do9GZK///nrf/7rr7/++us/fp//38m8TvP7 + vfwUv+O/L8vPPZ/+/ucv/r+v/N9D//z1N2nXAt424cUj7K2aYJuFS69BWaC5GXoe7sVlohYb1I6/ + KqgFrRFlwk2oiT6jzHzgT/LGf1+se9et/LqEZOxUf400lUmPT2xDVfkZPgCu0bS5xDWyHomEDfq5 + p9PqXOignhOeGvSjpJWXfgBOcQV0X39sY1BeNocerJ1Ig1e7asJ7lMhCt/Gorst2J10iw4fhFvX+ + 533TU+ZZXoxaaztT3btIXlsPggr5xgyxdt+/u7m7jqY8A05IWTQdogPv5/LWPZTUOGSeMYnMDYAd + M0ZW9srwpm7IQ9jG8Ru7Ybrt5snZtvBGmzf2Kk9DvXsqS3B3oov3j6fHxptYl5AVuYTV1jt4DQu2 + IkjOivmyonXGKL2YDRx8z9iz5iCaw6cAMrfZOf60P78Mlt1Ihh6smbA3PmY03swi38zRxaZ7alyq + Sd3RWY7HzZ6MsuN10qw1Kpw97koEx1cr0R+pBXeh3fvrztBTCZdmCaK73eA9f+0ZSz5joeTnl0Ot + 07f2mKjlBdqFIofdotlU0+jYLzhqk0i3LRjd9NVhD1vtjOg24d7GwF/WHMRr40VmIrxQx18g2Sjv + zsDO+ftBU+aFofLMbgq+Vo1UkZsYWco+W+2IsLsLaG7qjwz2kGb+e6m3meZdCeLhdcWH0Aqq8WmI + L/jMbPZ54m6ZJMpuA2W/d2iSSCdvngK3hkGGNTXWuRkJq2oclYlueuzgzK/Gts5CyPTQwep9lL2l + XmJgl9En0AsZ+vP8dk9VwxFuTTaflTGB/GafsV2ITiQ84Wkq7kcQqHqY045uNnKOorg+YS3KUEfC + 0e/BdzkNa+dW6Hpt3o+AyqtBFMX+onb/7XJYpzX4RGZtNx3GVke5X+X4UN3eaAykTwBeGgQ4r4Xe + mAuhLaAZTm9sjXWFOnXjleCNnyvWm0/nsdPWbYCSC/In/S0bUyVEAL68+VJ8SwLEqsfMQ2MFAb0X + NI5m6V6OMITeyUfXA2cQuxxVKAK9xF4Du4i83HpGn6/88LnrGFTMOfUXdDj3X7yVkdIR6zCXq+gi + bel2qSe22iUETSJ3xfY3lNhYC4cYVPYusSY+hK6/eRCgix1ufUe6tmysjdyHbbRzqUNjr2q5l6XD + IHNrqov8y+ij7U2Hxzkz8LZVVt4svK0RWpV8qQ6OgD56NTQw9o8aG92nrKZP3YrgSvOOumqnGdKs + cTKAflSxc00HNFTGa4Zd2r9xut0cUiLcZQ6W/seHyXErhr9aplip2PlL/bHpuO/20EvFTN3Cc1P2 + rrkMTsfbi2pv78VI4/oxHEL3TpB1gYo+hGaN9rcY0Z17/lTDoTwGIId3h6xuychYr8cvdO2oSCTJ + qVl/S7Y64MLTfUkcmFHvuY0L9eOiLnjVVH/qd3M/MvrDE4peWQ274476LB8Ra/thnyMwdew/c66r + erjoKnqvEgNv45PU0fl9v4DdvR5U39r3aLYqdY/K5yeiO8mxUF/AvFbQywoJet/Eboyi5ygX51uH + nZ7kxtjqCg9Ca/lYO52qdFL9F0HXw9Om2jpQI9HI0gwdY7Gg/vodoHFfdhZMD1mjhj8rKXvHA0Hm + Omuw5m1Ggz2MqYeEyFuqc8yIhOO+2gP/PA00C1LMJnTPfJgEbFN1zZ/YeI2cGLJebqlDYKmnYXrB + xGKPzOF654n2OntB9Oh7Mt8shPqpLxoYq/cKbzeOVPU3GhIwTXjg/ZtsPVIY6IJSn9epOn9QR9zT + AACkHSkORztlTvoNIZ6blAhfb+OxpX7WepsZ9L66VB3rB1eUl/4gkue7HWkeJwu4ihHqW+reE05J + NYPzWR/oWc799HuQBBGis25TJ4vGiG42c6Z8xCu3vO/IG6Xe4mCjuDrFDaTdlCjHFrqTTf0Vh0dG + 7mEWgiZ/eJ/neT/taUB8aF/7mV6LZ8Lma5OU8JbNhsYoDww290kIgX+lhA+ye9XnxbNWLL4NfJES + 8KbkiXUQG39DVmL/TqcHTAES6lLH2mXVp62sTiZaK4GNz0l5jBY8UtEUZT6+y9oesdW5y9BwHgpf + 9s5OOlNd1+FuKCp1MvoyGns67VG8snyf52wzGtvH9SQv/xcbh40cTR+0Ln98gHcruzaosknMTULW + W3r6KH01To7rw9E/fLHJvVk1Fp5BQHj6H6qL8Rh9yXsOlUfgNhi/BYNNxs4msDrnMt6yePZI8DUs + OIT2He+G+FnNNb8BQGQUl3412ai1IwdHOH2xHh9iNkryg6At03bUP+3NTljblx6c10XD24uldPTt + Zhzkhij7Vn1rq1E7dyE0h7jBjyRiqH8diwCS+aFg53SajGFEfovqGa2x6fo2Gq319wRt0dQY35IR + jQO6jQA9etJ9shcMGnuiBXfdNqi/ySPGctJxMK7AxJnS52nToS9A2R8l7F0PuTHfxNRC3VgWRFw7 + NF34Pvv1z8LHt27cW85r86IfiXrMKdKJl+sQ8Ff/0r29fkVzE19dsL22JFI4XQyWfaQ9Kv2tQ7ec + rHtUVdVCNqdLsPBX2jFZFW30FssMm8Iwpb3hX1zwdSuhbrjm2XQ4GDXE/CPGW3dXVSx+GiNMUwt4 + b6uAhoXPQLBeW5q2ZPjhjw9pUSt4Ow6OR73TyUUdb/V/7i9qVVEj+WKc8XaLwRhpv+bRRuI/1LyN + UceM1ToEpbQkataNgIhV2Xv5hy+m9LhGhOWGDVxxKBf9I3ZTEnIBXMr1B2sPye8W/CFKuykOeAdy + n/74SDG25ttfm2rTDcmN7UFvc4PuYymP5qsJFijVhsf2k/bRbK2+BNH9S8O6osdoWPQeqi/ZjfTO + mVXkUWT8ZuEfrPN1Gw1JKIZo6+LSX6m7As1+mV/QLXzF5IffvTwjAvfdc+9Lnt9WU/xUT8BWXYG3 + ck5SZlqIA/nuDFjdvWo0d7UqKjfRepLV8zYaDF08EW78OcSH5FlVw+HgvTYkozN2HP/EZrcUCbK/ + UkAxdc9Rc/dSGQ7SoyLcG75eq2VVi8x13tBDaI3V6I9diwLVXFHDmstuOG7rAG3CD8Z7BWOPf4b4 + JLEpHknLOzYaz5zHgyZpDRFPhzklTHs2aOlvalem4fES82S05pMnNbZ3DzGSP2UUaLy/8PPTmw6W + 38J6td1ix7jTaHbXZgyHzX7GNl59uoG/Yh7NonjEhjXr1SjYT16+7/kPvmzy2qOkn1swD4gnMseO + xsywnMAZlxX1PyGKpv66WfQXaESoxE9EvcPVh099iiiGe8s6cu05eY7pEfsXaeimyzMoIde3HcZB + 6Xe0uFkWFEfpQbHA9Gg8b20eXr3RYeP6fRnDbI+JItSFTviwlVICaiEr78rg/Hy7c7y5qekaXHSJ + /JV7FlICc2Aq2pOU/rjofWmbOQ1o2c2ij7Rpq9nKvAAMN1YXf9EZ4y3c7JGhlTe69FfHclIBWn4P + e2utZMxLKQfNK+eou7wPngyYyMJrZthaOziaNT+Uf/VDpKq5dqxDaf3Tl/5YNs9uvgtyL39W+E13 + Q6xVnb4zbXSKXEbtztAj9nHN+fd/Sb+SY6OpjYuPvoewxN72fuvG697Mobi6MlkfpoENmeJkcB7o + ER8YH1U97DYB8DJeEd7beYjxdtci+/QUqPnYthFDO2QhUvhH7B4rzhhl28lAPu1jbD3Ge9ffRcsG + 6MwXjmVnk77F16YEJrINdZ9KkI41uBf08yvWufx6s9ZCgha/5LM1iqq5PqbFz09S7ZoWRvc5agRd + HQ4wbgB1DevjDNiUjFS/bdVIOokTD9enFtKjrFspXbWTDaL1HKguIM0QDmOpyj8/IqfRDpFXLJWw + +BWsVtsAse+Q6ujnJ3biwLzxGe4uoF5dg7yEo9LNrwefQ0nHsy/mUlIt/qpUBJMXsf+gGI2C1sso + c5In4X58EFFYwy2sYyK0BWXzS3Bd1J1cSh13d0WDmgk5WkvcQP21rVWUOKc93IVmj+Pz6ZaSwl2H + SLJuH2wzZHYj6uoa2k15oHq4/njN0s8oph5gnZIPGiPJ9mVvvtTUW/zAhLqe++l/sgJNMige7yFa + 5Zcn4U9pacwPEC8/v4dxEkVs0Y8zSCOX0t3L9Lpeq3gdXqvC8aPL6RCNZhaoyjtxO7/3qiEabyIp + UEUHxZfoQiDz+3wCVBQW3kXtxhjt1k7gxl9D0vcCoBn8TYv660vA5ue5ZqThvyOaL+sTdaVg233f + YJNfHoET7fXyRkE+6jA+J/3nn9Pp/KQxdJt4R6/8Y5MO81Wz4ev0F19hPOumj6tdQHsHhJCsYcaQ + FGsbLfxLtcE1janTexXZ2CfU1C5WJWw0VwaF8RZOrbPm8ZFk7yEPQoIN7E/ehMcbjxQmWtjD7Vw1 + V3NsFerHHN5R91M1VhU18NM/NtOfVd/qAg/1hitJ27jfbuidhoNLcxzJO7RKRL98LcrOEF2wXZmV + N1+VlYmeyUel3nYzpBSn+IXkuzdQY9dtO3FzubdwXfkn7NybnTcPQcmBh/yDT16fORrixEvg7oQp + PiRciEZQ4QIae2T+2H5UJpZGZ8udd9zhpT9S4eHppvyQJ4dujy322NyHAVxRYxNxMCJvvNJyjVTJ + IYTl0b7jf3r1uJIFf2DxCo3aLinhEKS+v5LeTUp8bsXDFbU2dTj5kk4jshqoHyeV7pRLYYwTgxwt + /Eue11Q1psNY6sj14ou/IXBlY1O/cqhX547afhswNmprHwWa6JNqihuDrnYhgbKPJIw9f53Odf3c + wyPqebwrFFKxQ6quUeu8AprvziViT+hLGILTFbvc5Yl+fhK9IzUivZ2NFXVKxYIw31XUv5s1IulH + dtHPn9q4ihjbrhoTrDh4EMmutEoQtciCSzf7ZLxaU/frV3jRt0RgIA/GlF0pA5PDq88HmVL9yQOg + 3zyxN6K6YxUxXj9+IEppUo/x9+miXF/vnAjBtPdmbXfmwEp2GVaTr54KnSOJwEWzQcjBatJx/xXl + jWGcL/QcVuGSB6QASmlK2L1xk/fjMyTfPk8ydAIfTV0AIzTmU8MfJzMrfvGTcLoLVxzGhxiNtZva + sD6kG+ycDydvuoiKiQr5a2Dt4H88olVShiaWePTwbNL0uZvCF0T31Z3wQqB7otG+fEAvM6S3Jf+Z + 9weNg46bAl+22yKdG6TUaH24bbCpXepqCp/rNSIGX9Ddgg9zg4QaeWdOoFvLfhhdn88m3G79+POT + 6SzPTQgc61Xsb+RPN+7Xuwb0pufxuYWqm1ad+YL8XDt07+021eCdAiLzbPf54w9nbXNwN2c78fDu + p4/HwObAwVaz+I8gEqmuqwp2zB7/+ITNWqEq4qsd/JEZNJ3d0xvgndgdNdV8ZczWFAD0t3TGh+DU + Vot+DaBqPm8CXjWkYzCmGVr8BrlQkhlMV/FebhwxwzupFo1hd9qYP/9M1WfUsnl/cAA2D01f9Amp + xiZOCbo8vdjf9CT3Rnd1GlGj9yZ9LP01GTuVwOrqO9jM97toOi7s2DSG++MT1Kb0wCG7jkyy7sw7 + 6u3Ky6Cs1wQb+8o2BKKnIlJaLsc7u9I6MXwKHBhVFPkrbWMxtvQDIoZYYF/tztEXXdRYWfJEvAPt + my76OJBD7TsSw5+ebBR7O0R+cLT9uaJNx74OX//wnuYb7VXVv7zP/7qU2sleRcLneLflHvgD/eH9 + EEaFC/GI9nRbPoOKbKePiOapFKmXU9/45QW/+sC/fEMEtZGh2tsWTqTgXX0ft0QEn7sLpGm9BH0z + cV+gQ/4KaHb5fru+G44+OrsHG/un/asbH7dT/vMT2Ky9upsv+50MWls7Cx9evULLmj1i66dNuEu6 + YbPAriWc7tKVOp6aIGZNuQpXHF6xlTXvlITbLINaKQl2XXXuepZfahiy2fU3h9bu1m2dBWAcuD3e + 9YbLhksS1XB6rXV8mOobm+LmZMP5JbTUfT6hIhGnvuA+W+ov7+4Eb5x92Fy8lIzV06zmcLSI3N3V + C9a0XjKYmrUq1Gh6YPz5OGxmct+gJR/Gtqk2FWmO3gm88X2lxuJPpuS505HrQ0kXf5WS/HZc8Hwt + YGxXNBpvpnSRnUKoCL/PSkbq+ruHDUqYL6RSY8x7brIVWViFPmvDjvXlcZ1BH0YJ1WWeY4MzuTGa + +muBva/Q/tFz0OjExFaeeGl/frYnGDJp7V8JiVhfD5vXL//DWJm1StyPRYzuasz7H9UmXZ96L0s5 + ZJKG/b6eOtJfxRnOzygndOAN74fPaMnfqbqdCzY/vJ0Lu9frQK89HBjzDvYJWasTj63d/IoW/34B + 2r977J3Ks9fDpsihOKcdmY+WZghhVNiQbrl0OW+mS173gma4vKn2ogv/l5oF4YOfqXWovmy8NfcZ + 4kvYkttDLBY/Z/M/fUut7aut2tSjJUo4NBImB27KsJT4KF5rL3oon3naP26nDM71a4OPIvpGpNXb + GM3HeoN3wWo0RuMcmQBbaU+KTJL+6BVQj2KD/aW/yM1reLToV+yft7tO4nHzgjitt/70CjUmrud1 + DoavyYTLxR4NC75DCH7lo2W+ICVmYqLr65Pj7de7GbNwEVV4y1aDcboX2UzRQf3lYWRzKV0m5ERK + ZOsRS/TQCadoXPwAWrGu8Zl/9j0q36/tD+/org63qbiZJRt6xtUEsgR707tmKqyt45NMWVRWZK1p + oRL0OaXqKAhVf2vOI5R2r2Jvcz0b02kf6bA6yoR8D23TTeaaq8GTGo1mWUK9cddeQ3mNyg1e9Jwn + +pwkImtGmt+/ydagu0NDoEiohQ9ap7O5u64t2B6DAw7CtjKmH5/HqNQpVncqmq+KZMEhuPnUsjZa + JJwavlSWfAY7xOBZ63MFpyx6GKvby+DNnP/MYMFnf94gxIhZBckvD/W56ylD8wO4Ewq1bvR740W8 + 6aLIOvzmI6ZY8+n0GcTmjx/VCLTekkfnsAnGkC58bPR7y6nh/DzmFCvzs1vyLoCfPtSd3ZPV+zTp + IX2vY2qsEetmTo0JsOSUUPdY5QaNzTYA3S0tvFNfZTrlBWkBF47+y8eqicegol3j3rD307vN8Ex+ + /U63raF2PLu646/+CbUt1eOFt3cCVy+oj37v53BSS1j0Id3F2z4dPse7K08aWeEl/0Lj+lImQMhD + oy491hGLmhW3WeYT5LPk6WyU0R7UaX5TcyW76bjg789PUdOw+27JA3vEhSpP8ZKfDlXhh7D4c2rl + klyxZd6ILp0E/mplMzY/ikz85YE0XvhmWlXrEU5N7lPjehjTSXmpHMws2NKbt1EYPUhtAvApYyKz + 4cTmiFNrZdGv/zXvOIujiozelfHeeBFjmRfF6x/fLfM8b/LSV6ngbVH4QgtVNXwGroEXMy+/51fx + tF+Lf/JLs0jAaMPovYe1FT39DjnIG2Z7HcOSv/ss4wQ0SnfRB27iJgJRua94e320YSwMhv0JNSnt + nD6GZ/JW6T7lUo8EUspDEgwGdrTOSYdcgAs4hVT5SvnMo0X/2BC8T1fq17c4ncx24GXN38f4/PU2 + xjBrLEef+hL90edDEI0XkA7Vyueu0TOdLvQb/vCafIQ4i/if/tpc5ZbuCsWvyG++8VV5Svfpdu0N + n0FsofmOt2X+03Xzg7zrX/5CLs3n1ZGb2cngDl6AtXdz8v7knUo/pdT/6fWPu59BHMoT/um/ZmJ8 + Jt/QvcUuiY2IFa5wkaV7fsRG8ayrybFi96d/sCVfHYPclEBVlvnU8nyQQaZrnqD9eD8SxmqN9U+D + e8n5tpPwwWl1o9Fmc1Tc78f04R0e09/8FY4Y1kS2/X3VeSs/hms3iEQRcy1tUiUN0Lz++oQtedp0 + ifIG3icpoPuXoHvSksfIxrB2aCLoHZv0qvX/zFP127aImmHQVLT4dYyft9FrzluVl4Pd4UptpGdR + fzXXrbzMP4kAfZOyk7jhodsqvb++jQdvjLZH/cfnyzyH8+beqVX0yze97X1T/fgOHlmd+tTz42gq + yVUHFu11Ih2zR8Tun+IEL3lPqJnvPynNii6Bv39bAf/5r7/++l+/DYO6uefvZTFgyKfh3/+9KvDv + 5J78m+fFf1PxzyYC6ZMi//uf/1pC+PvbNfV3+N9D88o//d///KX82Tb4e2iG5P3/XP7Xcq///Nf/ + AQAA//8DAIhgdPLhIAAA + headers: + Access-Control-Allow-Origin: + - '*' + Alt-Svc: + - h3=":443"; ma=86400, h3-29=":443"; ma=86400 + - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 + Cache-Control: + - private + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 7ccfc4e0bef3e1e3-ORD + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Thu, 25 May 2023 18:16:21 GMT + Function-Execution-Id: + - yvvlf6849c5t + Openai-Organization: + - significant-gravitas + Openai-Processing-Ms: + - '22' + Openai-Version: + - '2020-10-01' + Server: + - Google Frontend + Strict-Transport-Security: + - max-age=15724800; includeSubDomains + Vary: + - Accept-Encoding + X-Cloud-Trace-Context: + - f7e1aa5667d824ffdc22c9b3a2311546 + X-Powered-By: + - Express + X-Ratelimit-Limit-Requests: + - '3000' + X-Ratelimit-Remaining-Requests: + - '2999' + X-Ratelimit-Reset-Requests: + - 20ms + X-Request-Id: + - e80be91cc8cb1072e124d8609456adff + status: + code: 200 + message: OK +- request: + body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "user", "content": "Write + a concise summary of the following text:\n\n\nLITERAL TEXT: \"\"\"Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet\"\"\"\n\n\nCONCISE SUMMARY: The text is best summarized as"}], + "temperature": 0, "max_tokens": 0}' + headers: + AGENT-MODE: + - AAAAAAAAAAAAAAAAAAAAAMLheAAaAAAA0%2BuSeid%2BULvsea4JtiGRiSDSJSI%3DEUifiRBkKG5E2XzMDjRfl76ZC9Ub0wnz4XsNiRVBChTYbJcE3F + AGENT-TYPE: + - Auto-GPT-2023-X-TYPE + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '2940' + Content-Type: + - application/json + method: POST + uri: https://api.openai.com/v1/chat/completions + response: + body: + string: "{\n \"error\": {\n \"message\": \"That model is currently overloaded + with other requests. You can retry your request, or contact us through our + help center at help.openai.com if the error persists. (Please include the + request ID d18e0d0041203d240f04766f9875bd2a in your message.)\",\n \"type\": + \"server_error\",\n \"param\": null,\n \"code\": null\n }\n}\n" + headers: + Access-Control-Allow-Origin: + - '*' + Alt-Svc: + - h3=":443"; ma=86400, h3-29=":443"; ma=86400 + - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 7ccfc4e50b34117b-ORD + Content-Length: + - '349' + Content-Type: + - application/json + Date: + - Thu, 25 May 2023 18:16:52 GMT + Function-Execution-Id: + - yvvl5uno49x7 + Openai-Model: + - gpt-3.5-turbo-0301 + Openai-Organization: + - significant-gravitas + Openai-Processing-Ms: + - '30011' + Openai-Version: + - '2020-10-01' + Server: + - Google Frontend + Strict-Transport-Security: + - max-age=15724800; includeSubDomains + X-Cloud-Trace-Context: + - 559387b6907187493804c27758732019 + X-Powered-By: + - Express + X-Ratelimit-Limit-Requests: + - '3500' + X-Ratelimit-Limit-Tokens: + - '90000' + X-Ratelimit-Remaining-Requests: + - '3499' + X-Ratelimit-Remaining-Tokens: + - '88794' + X-Ratelimit-Reset-Requests: + - 17ms + X-Ratelimit-Reset-Tokens: + - 804ms + X-Request-Id: + - d18e0d0041203d240f04766f9875bd2a + status: + code: 429 + message: Too Many Requests +- request: + body: '{"model": "gpt-3.5-turbo", "messages": [{"role": "user", "content": "Write + a concise summary of the following text:\n\n\nLITERAL TEXT: \"\"\"Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum + dolor sit amet\"\"\"\n\n\nCONCISE SUMMARY: The text is best summarized as"}], + "temperature": 0, "max_tokens": 0}' + headers: + AGENT-MODE: + - AAAAAAAAAAAAAAAAAAAAAMLheAAaAAAA0%2BuSeid%2BULvsea4JtiGRiSDSJSI%3DEUifiRBkKG5E2XzMDjRfl76ZC9Ub0wnz4XsNiRVBChTYbJcE3F + AGENT-TYPE: + - Auto-GPT-2023-X-TYPE + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '2940' + Content-Type: + - application/json + method: POST + uri: https://api.openai.com/v1/chat/completions + response: + body: + string: !!binary | + H4sIAAAAAAAAA0SOwU4jMRAF73yF1WcnyhAGgq9wggviilbIsZuZDna35e4BVlH+HYF24fpUr1RH + oAwB0hwt1VZWV/fXcksHbUI3+nqXDm038kN9vJWHpYIH2R8w2b/HOkltBY2EwUPqGA0zhOFyN262 + u8vzwUOVjAUCTM1W2/W4sqXvZbXZbgbwsGicEMIRWpfa7NnkFVkhjNsLD7/un3248mBisfyS43Dy + kGahhArh6QgV9b+1S0EIEFVJLbJ9NQob8ld/dB0bGhm9oYucXcXIxFNBVafWiScnL+5delaXpFbh + 8tctitlFddG1EhPOUjJ2R+wyKk38LWrLvpDOxNMaTh5eiEnn545RhSGAmjTwQJzxA8Lm9Od09gkA + AP//AwBuqeKNhQEAAA== + headers: + Access-Control-Allow-Origin: + - '*' + Alt-Svc: + - h3=":443"; ma=86400, h3-29=":443"; ma=86400 + - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 + Cache-Control: + - no-cache, must-revalidate + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 7ccfc5d8b8a4638a-ORD + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Thu, 25 May 2023 18:17:04 GMT + Function-Execution-Id: + - yvvl7ew5uk02 + Openai-Model: + - gpt-3.5-turbo-0301 + Openai-Organization: + - significant-gravitas + Openai-Processing-Ms: + - '3176' + Openai-Version: + - '2020-10-01' + Server: + - Google Frontend + Strict-Transport-Security: + - max-age=15724800; includeSubDomains + Vary: + - Accept-Encoding + X-Cloud-Trace-Context: + - 78fb317ed136dbeb23921ccd45134139;o=1 + X-Powered-By: + - Express + X-Ratelimit-Limit-Requests: + - '3500' + X-Ratelimit-Limit-Tokens: + - '90000' + X-Ratelimit-Remaining-Requests: + - '3499' + X-Ratelimit-Remaining-Tokens: + - '88794' + X-Ratelimit-Reset-Requests: + - 17ms + X-Ratelimit-Reset-Tokens: + - 804ms + X-Request-Id: + - 42784145c7d38006e79bf17199be9178 + status: + code: 200 + message: OK +- request: + body: '{"input": ["Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem + ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem + ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem + ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem + ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem + ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem + ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem + ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem + ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem + ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem + ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem + ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem + ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem + ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem + ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem + ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem + ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem + ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem + ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem + ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem + ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem + ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem + ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem + ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem + ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem + ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem + ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem + ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem + ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem + ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem + ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem + ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem + ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem + ipsum dolor sit amet Lorem ipsum dolor sit amet"], "model": "text-embedding-ada-002", + "encoding_format": "base64"}' + headers: + AGENT-MODE: + - AAAAAAAAAAAAAAAAAAAAAMLheAAaAAAA0%2BuSeid%2BULvsea4JtiGRiSDSJSI%3DEUifiRBkKG5E2XzMDjRfl76ZC9Ub0wnz4XsNiRVBChTYbJcE3F + AGENT-TYPE: + - Auto-GPT-2023-X-TYPE + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '2778' + Content-Type: + - application/json + method: POST + uri: https://api.openai.com/v1/embeddings + response: + body: + string: !!binary | + H4sIAAAAAAAAA1R6SxOyvJbufP+Kr74pZ5eIQBZ7htzkmqggYlfXKVEEUQSBhEtX//dT+O7qUz1x + QGIlkLXWc1n5r3/89dffdVpmt/7vf/319/vZ9X//n+XZ/dpf//7XX//xj7/++uuv//r9/q+ZWZVm + 9/vzk/+m/wafn3s2/v2vv/j/efL/J/3rr7/v3R7jxg8ibzjqkKNN93gzRz7WaETwGGAqqMF88dR4 + AzaPTyhfekJXV0nV12Jr2xBwGCgEKPA6gSZHBI8oI/apx+U6z0YM8nzs6XQ4lel8kmcDks+zJI7H + duFwIESTeyWnzG1aaa7lbjqizDcylonvYKavu54pbdkTolOgbU8tuYLDrgYWTGsc9q976iN+wBz+ + 0MnV+/1h85V/+91l4lPvq0vLo3A8dnicvG0q+LF1hOvmJDH7/p3a0ceeDNq6vTH7VRdl7/hNjroi + DDCyozCd8qzJYI1snQXhJpjnx+uaQX0pHbKd4w16KY0rA4ZCZerw+rTN/SPH6K0IG2LaZtDSKcg4 + aOpLydyVpLVDG3iG7FSZhBm5v8rpMh472Et9xpwW79ORywtVER5tj+XC2KFx2gxf2FajxojcDx4V + uLsK9Sf7MONdn1qKteYrqW/BIoGuvOfBdUQRTeFUkd/5zc8k4eB5cjk69IzTZ/tBa1SBi0jgyQ7a + FOklguf42jPHNnApbFD0hW57U1jSDDd9Di3zK9+Gj85w8G3SziqzI6QDvtPKOavhfJv6GNSzWJPo + En9bKs7aE03WBxOnGvR5vNCnBffV/UaFBLbh+psEPJzu8R2DYt/KuT/0FcQ0vODC14dy3EuiD44Z + BcQrVbucofNU2XKTLRZy6Z1OJ+WGkfS6hr/zDOf0PdwU6ZWExPGuedgJwvOqxDAHZOvJXzQdDF8F + TVBEZrkBQaMgXGs4vK9fspPLrqWBmhzhyB8Mgp2inCf7xmnyi8Kd9tOahkPv3yzwCu2BW8HYeqO0 + WavoFz/kE+3bSb3mR3n69DLxWofqs8AFR5R9X0es6IqJplC/q5AaxYF4AmV6d5aNSOEPzCVaf+Ln + CcbuKZs9x9iO7UaPltfJUoz9dSJbFCI0dK7bgZt4iOltNJYzasYvqtEFESsOpXSO+SmGva3eWLoT + rLY7bFsbQOGPJB7mIJ15jt/DVF0ysn3Uh3S6NPYL4MjuFF6fZzujRqohl9MaCzt9lQ5Ko8mrdpA/ + eBDVUh/fR7WDraNHxCkPXTgNOOBQLu18gl3NTwcB8TZMVZrRwZnschJm6Yn2TiSSHalrvfPi94Su + kdsydRkf0QZrkItni2xbekXTebtRYZC2e2bt9EfaGc1ZRcv3JljR5ZSetysNpea6Z3glluWIPskE + O8loiX/xPl5JjjsbguKNSKaZddqHgiqAJWUecTXSzRPZoQRMdU7wJrxZrXDr2wj5K1kkVrZXwsa5 + mlfohnJHDO1EvcEMCw28Z4zoylSztKfbmkeHS0FJEJ2u7eBbTg4xmd9Y4I+veco6iGFo5hgLh3W3 + xI+gAXc/aCQIYzMdPauekEEj8if/vte5s8DPvyua33OiD1XU8EjddCJx0oaFzZldLCCbm4XlqyUi + Wp21DkXKMyA4p1s0HsGp0HAJVILd7ppSvYgSyHeQs91S/8ZiWPEg3D8eXaerUzpCkWjQrNU3wYUk + hmyXeDk0E6cTN4iLdMSadIVwxRGcu7Wozzy/nxTpZF/I9hxb4TTJzRFgKnriVY9cH3DQfdHO7kzM + 2rugT0P4oOgi6WvKZ6dVOX2vwg0UnNRMp59d2xmFL4N29mq84c6tN2i3rw0rh0bMh3ldtt/TjZPa + 4tsT1y3OaEjn2wv0XPWZ1vFdOGgv/gjl4ZpjwHvea07rgwC/es9fnp03HLkvhfVqfccLvvy7HqgX + xWV6cuHmz+99T/6wI2n1UL2eHmID0ImtqRikZTuZafpFWwsmFvhJ3LYfXPMgvSlj2x9eCPP4hI2r + 3Zk5q+M8vl9CDGfnSjB3f43pOMqSCvWuPlOQ8kfZcW10hOeO5Mx9dQaieB88kWw+V8S6ozikaX2o + IJMsi1hL/aR54wPyTk5J22e7LccycTJ46ZuauVjgUI+1MQG13QCdp9Ty2HZ6yKgb1yXZVWvL46M5 + wXDuJYkyQRrbwSZSDaqT8Mxs9QD1RnypUe7qGnE20QV1/Vas5PTghiSQpymdI2XOoLCOL2J011Lv + L80+BuprT9oLjT9PM0xf9MNTvfqW5Sjo3AsprIiIF5knxObNwQY+iz2iFa9jOvLG+wV7q1PZISoU + 77u8P5xoLS3rS3OnPp8y7OR0R0F7HNrhQHYqGJz8xkKBu3ASB9cF57Le4CqXzHTBvxqE25Fh5N9F + RGvsVGizmTwKJ23b9pF49OENz3Sp537bSxtFQ+m7VZjOCXE75JTzwXhYJ8ytrTicz+MIEOFMJwbf + UX3qnRJDfvav5LZzVmVlB/aArmOkk+xdnMI5FakFAbhXYr36ch5uw80Hzkv2JFrinfrec0DO270z + 22oErx9VjYenvveJfkaTPjlRK4M+tQbBjXALJ7JqfLCv5EwRO87l6J4DDdjllVMZC9k8O45Yww4z + hDf0ULTj6ilfYXzRG8MX6zX3q9Z/gsRzDZV21XbujhYvQsqJKh7q9On1w/5jQSJaEh3n7R6xc1dj + 9M7UD4UD89oN2wsvCJYd4PvrEA7q/eCC0ok3Fu5v17mnvhFD4p48ot492xu21/2A5og9iZ15qUdv + k2Kh6bTTSMCeYthLU7mXeU4s6EruB300K0uE9nidWWCT0Rs6Q+SgSlVC0nP6Kcc9DBXCjWwzLaca + 6ovCff3qEeVjnW+7XZDvUXyNTBbm2xTlu0Sf4Fh2lJjpah2y2VAM+IMHOd3Ow6geK0V73jDLjtE5 + /K2v7FytIDt6KMo/+JQI6pmY3uuFxiq6WrBmhx3zFjyf1R67YHX7mVgL/vVlHr7A45UZj6lrzHyT + 3jk4ai+NZAdHRd37c77JWtHZC97V7eTedsv5VTnm6vaNvulaucEZ7Q4U3vdJH21SiMpXf16I1a+5 + dJbWhi3PVkCJdpNf4ahOmyvqO//IiF+jcNZujYjmOYjx4O5YOe417Qq3/TTjVVh99XHwTpHMtX7O + 8CVt5+kkuhks9YAFXDXMY9IlnUwvnMq0+H6fv+KguaAot4kRdduk0zPJO2Vr1jEzW0kJ66O0hz/4 + YeNK9uYN8kW04AEzy3SYu2TLnghWLmXmWVVTIXt9fQlnuk+nSMnTBQ9q5BdKigsWHNPx/egqlOZJ + gAfnvm2HSyvd5CqwRspv29wbXKLV0JokxYonN3OvR/YR7J3qUMWwvJadNx5F672OsWxSIxyCkzDA + bSUAU8WTqwtIEuof/8YrQTqU44V+LRCy04EZ93Ysv/rNrQFLtsZuTXUuZ4doV0BReiF+g7iWNsnb + +tVP5lX8Tp8e/CNDBa8diUPK9zxIojGhryWdial/8nnIntsY8X1l4uHu1d4wKVcXLXoGKx8ZpeP9 + mb9k7zC8mbk/i+30rO8c6h7JlSV8XOvzaL4s+dwjifjgfMqedzYxSkvdpIh95LILVbDRN/YOmF/q + 76tJTxxsvEhiBraGlm2G5oqSargz/eN1+pwOKQcft2aL/qFhb52+9R++ZLYvIR3pthYgT3KBJcHw + LfvI24nohK0SixZ318caXyK46q8T0W2XIXab+gjgEWeEmGoWNvfesNCit6ikKrJOufJQK6GxuRLb + gc4b0ta9gbe/OWxnqG1Kgb5ckE7uhU6u1qfjtuO+v3zGE39rwlGP7D0ox7OCC26a0+7ehwKo8csn + WwPAY3XJeElsHgEz6spof/wTLhfypiuvqTyadXwEXL9OmLuV32jGqprD+OpulD9cqvYmd/Ie/fgJ + XvJzoxVzDfu1ZpBAPQbefBldDpLLx6TDdm2040kpAYrXlBA7xhv9F3/S6ZGsCGnvgkft4zEHkytM + 5geo1ztmpTEI0m1HLrbM6R1qJFnWZu1FV61NvFG9DpqSyP2IR/VI9RGKvabkKeKJ1X0/aNCjvQaV + N5yY30lBOynVUVUuZ+SQXTgG5VSe8khx1nTGz/sQotk+agM6RELEdmq/Tqne0Byy4CoT15Jk1HP1 + 7AJ9KAJeV+8inP2Ek9Exyni2bY3uN24DwZeJ+MeL1LKs6yp5frYqczXizzM3vCfYb4wKK1dOSycj + u6mIJ8KZqbAS9e5zaDJQheRGtKAw5snZ2Rq8OHz+4WdKkSR8wVT8Bk9OX4W0yGUexHOVsm3G6eHU + mupNqdN3S7apa6AF7zAAfSXsx2+HSA+e0LP3nliNN6TzMy8NZOafmnjscy2nr60KcOOeOfNPpesN + BzjE0Oq2xPDByefxp+fn7TNivhUOHuu3Q4y+43HHtns/1scgYQmsgrbG+8P13c7rcZ9A9RUe5Kev + mk+mVL/1KDtf7XLhZzkEefhY6qk3T7NmZPLTRAYen5Wajk2GX+iVJTv2429LPAFo6+bGvBD23riW + Qg3ceP2gPEStPj3vcoZGQROp8BavYdN4e0AzX05YkpRibtvdaCl78aJhQdt7aA6OgguXdb1Z9P3e + m6qzRmHJP5zz/qusV/chUX58+MAdXDQ5rn1Fi19AdF18tfR99SxY9sucXbVF0+oLE3yfakA5zaxD + asaODPSbx8R1ntrMm8k0Qd9BQqxuhVGXsR6DuEla9lCDL+r3/ltGmH6elMcD185VPAGwqwgk8C5F + OkU2neAS5R4+qEfsrZf50OquRCXB0cP5sP2KYrh3VSwfs3QetC9W4UXcgATlvg/Zqg6/IK/RG89t + dChHKyk12KSI/PQ36vOXC2g9ejLTp6HXxzwdeOUwxzbzC9v75ZcKUpyt8beLtmjgKu2JDMKnZPd4 + 7r1BRIENh8/dx7R7vNKfvoJZEwYScIcGjd7B2KPg0dvEk4OqrOVP/UTrO7TLeQ46rfMVhnl13DKr + LrSUbR/fI1jy9GHujanzmjf6CqnOladr2+zbWf3wCdjBdoWb8uCnbNLaDuxKuDC9Wn3LsfH2HDTv + qSUB2knpfFINGe3Px5jZA229MduwCrryxAh5aGbJf2KEYYxsC8sFXaMXSB9b2rlqsdQ/E/GTaCeg + mXuBGXGSttO1sjHCTOKo0K0oGg8rh5Nz+VIzrLE4nddjkiB2UzLybVoJ0aqosLxLI4WcZc3QF7wC + cJ80XfhJlrL3baYQBEKM5/ButkMfVT6sOu9GhS0rU3ZnHcAzKQDXFqd4QwJ3CspBL/BhyT9We/kN + BM4rMIveq5RNq/IItmCFmMKlC6e1xTDo2zyhw6I3x2e65WH42BnT5+rWjvePHMHGplumaaaT/uEL + VYe21GhuvNc1+aeDexYPxA3aOvyYyfREW77L6Zo+3+mgDnj46W2ytx5TW3WabcEfP2OQaNlur/sn + EHS5ErWGbzml6/UNenTzcbH4Ld2jed3QZRfNtDA+a288s4OhHN/jfsGHWB/0SzOBFQwrjKa91vLy + p84R8TSeBcYgtayxcwum08amw/seet2C5+ijmBusGOmxnUJRyhCGUmXGYYnxSW72aH9BGPM4PZdT + p6mWPNzeHNFcLQgn6SNqMM8kJpq+X6EpRQSjCx+nzPO+b2+8fScKan3jFn6OvCleuxay0u7OtHva + zeNqyPboVpwcug57N1zvcuzCdQbEjGW9bl7bGmjvnUXcOi/D0bPyCY58aOCjK97m6aRVKpQGsbG8 + 6NnpOr+sX3wy68RN4XgT4xcs/jGz4zLTu+01GeB43kzM1V4zmnZ7x0VT0RlMf5TMm6tXzKN7rjQM + K/o1pEYoPkFdGyrblmOMxo2yHv74RfaYSHqXbHQO9NXRJJpVnuZRZk8ZEpmNWOYP73lo7DoG5l9s + vN7BDtEBJUd0qOwBT5l5ncfbO+dgyVdifghJh5N93v/8Kcwv8TTyziqGIvI1dtmhev627hWLPz1h + Vu9tyCvl6MLzlYgsgbDz5nYnGTK+fjGGnfNoh/LJU7hO9YN4JCvbGttHGSWOxvDsre0WvY6JD6v0 + bS365ZR2Pk6/MGv8wPx7TvXejfxaytcUYxmwM3fvp0nRMp/dNw9I5+xz5UE/PTqMWgd7g9zJR9Ds + lsffa7qal+9rgxffV4R8s8Kb6PZgKAq+1rj8Zpq+GfRjh8LYEYgR7Lf65v52MrDbYmYW6J90lL8X + G5IrKoizq4p5QJOvAu0qEa/lcDe3Rly4KHlnW+IooxpuvvePAcfoxhM/qCx9PZlRBX54/2Bu0V+D + mcjDj+9g7thw4WSfX8c/fr2mEEBjIkGFsiCR/+g3dpFOhnJ6XFd0Y5+Oafd68DFsLW6ib+HRpP21 + bSgs8UvMs8BQs/ArcOfjyAinVt7iN+ZIUR8e+/nt0+198YHgdKIlwlLZaefkCtqsvpg/9L4+X+db + DGNtXnFxjqt0pNb0Qgu/ZQ7T6DxnIxeh9N0of/oNox6pe2Xxu9h2brn59Yln/PMzmJl+aTg55OhC + c/JP5Ie/vJn5T3TLuj3RlvzpHx8vAj5fGbTQlQqxXaLnEL7aLxUd8L2ffoWzeLuQO9uSeYjmxEev + w2gx7eQU80gt+YUs9SKy3YdPvXFStOTH3/79fy9dPeVdcHkwhxZVOp1E7QbtIH6Iw1m7cO0c0BF+ + /MiHjJaD+lVUNEj6nnKH7IP41ykRwMTug5BdrpbCvMlluJwlh2j8OKTzJ5RfkL5OBbP5Z1MOyfbz + BD5XDKYNoY5G92yqkEm8Tw7G5+RNe5I9EWvvR6bvOt0TEN+J4B5tE4+70QzHCB1fkFe3iJ3ZTfT6 + M/8VUPtUHGZX9kMflbJJYKlvuNGnMp1H0Xbh+WYRlq/pA823Pn2hh0grrJT7IF0v/iXabFSTnLnu + 6E0LvsHqevwSO+dLb85bpQItygfmitc2HWOTuPISr2Qrc9E8xFOB4bXKZ0aifRIOGntM8OVbk46E + OuF68c/keRRlzL2eajj88Mf2+hIfcX5B7PE6ZsrS72HOOW/m4dF1PDg7xWQB2zI0Sb0zoXeQHYha + RH3LdifPhZvhjWTpd6HFHx9g16KQeUbOo28ojplCbuc9FZd+1FJvLOg7LmGmXGM0VVis/vgftmX3 + Zes521r50w+pmInmg28lkNTCigX0XCBWPUwDplLLiQqrxJt+fhwJzJj9+BhFn62mxGR8k21U3L0+ + ltsKNt39zSzJTFIhllMemGhEzMgbtRVmzc/k3eWY4vXUflEXiQX3J58+j+egjxuqPmHxb4hZWg6a + HHJ10QN2L+aOYYx+9Q4t8UPMffco6VqQRMm8J5c//KgRRlsGK6V3CuB82mHxg5SlX4FXITrow+5+ + f6KPsttQzj3PYdfkjILgdRfMv3fBwr+nDJZ6xZb+RDh/wumlLH4KVR718AfflLe6cxkhk1fOxFeu + UN3ilk51a6H5vNEpWq82dzwo0UMfk/GZgTccL2T7CL/hdJ62S3NIspgR61H5p7+WFl1DDpNXhH/y + g/Np9zuveaM0Wg2i2O+o7PoDml7Pu4+8SToQjL0Rze4hnNACDhj4wm3pZPoCCGCsCAnRqHexYnTQ + FnVP9GrlloOoxhwUn8uebMPnDo2re/6FDB4GFpb+VlNdSgGiAfm45jf7cuzOGf/rb1Au+Drhwk/p + rx9HrO6UoxF13+cPb/HQkbyk1S26Qb8/n3G+PiOdXVoJozaPDoRwqqUPTez7UIisY4tea1kdexw4 + u5VJwR2h7AgRNFj8DqYe9yedfoPrCwoDbBKWXfTHD0Li+ZXi6q190TAGwk3eeLFEML8rwoVvaHAq + womY5ecY9t055sEVGkbchQ9PvP/5w1cxH+tRO73OIwW/WKXEv0d+OXi6aKMlX8jO8H00PN9rC5b9 + s5/f1vMbp0blCAK5npQwHc1csRWZ/zj0Q++lNz5GyoNw5UL68/8G7WupaC7iF4aiCVtmnmQXzo3/ + +NOvZs8kAeRtMaGKAk80rcH7QjsRnRafPQ1HaJ4uiH64psRUuXDab+UvPBI5xrlYeOW0Br1GWDUZ + lU+hGfI3edOhn770NT5peaMwRFjqNeYXf4TH9lX+8Rm88fimpIOJeKSdnZq5zqZBbKOfDbSsz7zQ + UsvNkftmoJcXly39q/JbXmUDvVsw2X45n1nuJg3+/t0K+O9//PXXf/5uGFT1PXsvFwP6bOz/+T9X + Bf55vV//yfPCP5nw5yYC7a559ve//n0J4e+mraum/799/co+3d//+kvi/33f4O++7q/v/zXwj2W9 + //7H/wMAAP//AwBEtzLu5SAAAA== + headers: + Access-Control-Allow-Origin: + - '*' + Alt-Svc: + - h3=":443"; ma=86400, h3-29=":443"; ma=86400 + - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 + Cache-Control: + - private + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 7ccfc63ad93ee0fc-ORD + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Thu, 25 May 2023 18:17:17 GMT + Function-Execution-Id: + - pbmeiblrqfms + Openai-Organization: + - significant-gravitas + Openai-Processing-Ms: + - '36' + Openai-Version: + - '2020-10-01' + Server: + - Google Frontend + Strict-Transport-Security: + - max-age=15724800; includeSubDomains + Vary: + - Accept-Encoding + X-Cloud-Trace-Context: + - 291f85b7147806b14c87634bd0b0395f;o=1 + X-Powered-By: + - Express + X-Ratelimit-Limit-Requests: + - '3000' + X-Ratelimit-Remaining-Requests: + - '2999' + X-Ratelimit-Reset-Requests: + - 20ms + X-Request-Id: + - b12233120f7ebfb311f5076990440c22 + status: + code: 200 + message: OK +- request: + body: '{"input": "a repetitive and meaningless string of words commonly used as + a placeholder in design and publishing.", "model": "text-embedding-ada-002", + "encoding_format": "base64"}' + headers: + AGENT-MODE: + - AAAAAAAAAAAAAAAAAAAAAMLheAAaAAAA0%2BuSeid%2BULvsea4JtiGRiSDSJSI%3DEUifiRBkKG5E2XzMDjRfl76ZC9Ub0wnz4XsNiRVBChTYbJcE3F + AGENT-TYPE: + - Auto-GPT-2023-X-TYPE + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '178' + Content-Type: + - application/json + method: POST + uri: https://api.openai.com/v1/embeddings + response: + body: + string: !!binary | + H4sIAAAAAAAAA1Sa246zyrKl79dTTM1beskYAxnMOxswYE6JAZ9arZaxMQaMOWYCubXffQv/rd3d + NyUVpqgqMnLEGF/kf/zrr7/+rpMifQx///PX35+8H/7+H8u15324//3PX//zX3/99ddf//H7+v/d + mVZJ+nzm3+x3++/D/PtMp7//+Yv/7yv/96Z//vo79PotTZ8CQ+SSti3Y50OJTUc/MaKOfYyULMyo + 150uzthvNB+J8YyoI2hTMUdXPYWvYFRk3hzKiKXYasF89Tdf1Fe7Yty7Ywg2PhjYeZyrbgjddwub + bI7xgX1P3ayO/RlptdbRrXJv0JyeNgFo6/ZLncgNnME03yWsmPTxp4sZo4mkXxmcR3mkT6qSbixb + X0Dlhk/8QmhttBnETQ+H4vSiarPyNfIRrAAepu4TZpztbh1TkcAmcHJq7LW2mAzJVKHh89wP5K3M + RuRnIjiP6uhLl4E4U+5aOZQ8/8GmYikFjextpRw5Mcf3SqRsFLWgUp7QX6l/aTZRZ/j9LAdWVxGl + 3sdoWmWNDPe+leg+XKlstHn1it71KvWR2zvRGNidDgEOdGq+nMqZ5IICzGbHCDpL36LfV2dAK908 + k/WqcorpvhF4OOwslx7796UbL7tND9mGGzC2TD1iqzMqkWKmBLvXb9uNfSFUcMq5hjrU2BTUFD4A + meDcfGkzqc70yBMXmh154q1zVrXNR34K6DSNJj2tjQqNG0vtocHIIoqcK0l7SfNW2XvdmsBr07NG + W2sA93bzweqy/gP9KDZ4hefh8+7MnCYhYaXk4VXFuzY1u3rlSTxcFN/1h9faivoO2yrEhXUnUW9X + WiejbFRe/VmgWj1etBGh2AXmGC09bLPaIbt1moIkfiZfure1xuwg4OF7Dzms3lU7oV7S2BCN8Qkn + S739qafhxmzqH6XRmXnj2sOxMBDemV/DYbRtYsif5QbHMyJo2ESfHBpXCokcTq+C0ochQPnaNj6o + 21M048waoXx6J+o2Oy0iU/zVwfKGmAhZ9tTm5HZIEVNkSrhNkbJZqsde8bHK+xM4IZvv09ZVnrT3 + cfTIbDZNissD2mgDEfivl4yahmXYmJDj7VsbksklrEe725fS3dbbRMPaHmM4WZcC+8fDXDS1ZQbS + 57M5Uz3amQmTUTbD2RnvOGxWxJkMN+nhuH+usOd3oTa8pFZE9mNWsXbJMsaCWeRBbzofO1Q4act6 + kxVCwsbnc98ranrKfCWowhzrVZcXk2k2JehpeiXo8KjRRAf7CnapqFRbP6/JXMtaDKOyDvC+e6GI + FOHkg7auv2R+hj2a80txBsUIXezz47GgfcBlIInfiUh9i1jddLIMj9fRw7uCP2vzgRouuMWVo/48 + 7IoNOEWonN/lGZsvx3A+z2djwJ3WFD/jRCp6cXOzkVt+Mrx13VUx1BYO4N2XBj3HQcBm4HdbNHzK + GZsxd0AT+75dOIWXDT60cZH0D3tSod3lPbWF/dkZcnebwyw/RYxjft/NQXy4olvOX7BreRNjhjrc + kdnlO3zo9IwNtjFn4Kp3D+M6zdBoXa0KSkEzyarcdYyk482WSfQdCKB4FTHV8isIlGCFt9PT0Bgn + SwRJ0bWmtqGn0bD7HDLwJ0HGO5ZOyYBShUfIFAKsumLssD4QMjA5JPz0WZtFKXeV4cxiImd7tVu3 + +zGEqIgdotxRGjGn2WTIWV81AnO87samHFrAY/r1uz6yI/bTz7c2nzDWN2/Ws3iyFVPMHXq4PdcF + jbidDYs+YH9LJUZOwvsKWcsV/sppqcYGcdVD2tKIXBf9Z7R4GODdfInUcsU745yNGXo+zIq6lkKj + ZX1CtIqJ5UvfS5Owat31cB82iS+E3hYNoRQDFI1LsLMPH9GcmbWIXPXq+VwstA6bd5KBniHZE+7S + bBJyoL4LzYpeiKyVUkQ8azJkJG52vqB8Lw65kY8O4qHc07ukjs6sXYcKrda9hL04uXWz//y0MK7q + kkYP7YRGEZwAmZwk4H0HY8QGV7/DIbVO1FllY0I6zr/+9MsPz/MXTV6q+vJ7mGtq7ULbYfzNGuVv + eQyxtujb1HI5B3KXCPQQBZIzXQs7hHetpHRn36OufcI1QwSSlNpLfyGFfiby+pyP9BDbRTSZfCyD + h90PDo0rdaauKl2IFXvAungd0DjMNwOtj0OFsTABqwOuaGFfPb/Yq/OGkcfO7n/9CJtP/+yw3Tes + IFjr26X+tWR+XcO7ImZlgfFIBoeYzsYCixUB9fI5j6b1eL5D5XcmPgglLeZL2AWylPuGzzIvT+ah + zkdQ5IDRx/XbFk1x+wrgeCP7+Qk0TlO7RbcT71FbyB2HcMD5aOlf1FQ+czHtr4khay++w94RxcU0 + 1tcAXvxb8Dd2LCaDS1CPdoHv+4zd62goHzbIy3rgm5cd2dxxxhU65+JTz0g+bPTlugRQ1yfq4u3L + mV/7/Qy7OvHINpIENh7nPoTmmek43RQcInttB6AcAhU/bpdaY8WUtahokwDvjvpeY6CnW2jd4U1x + q6NlPxwsGC3tRPerqivm+xNV0N84REb/QIvxnsWtsloTCfsvVqDhxY+jwgr5gQ0xLlmzHtM7gJyW + 2Eifnsbsx9WFR6xf6a6KkdYP802HWy5cqBvZTjGIPrsi8nR31LckI5pu6dWGLj/1BJUSimpvde+B + 20VHwp0qsxiX7xFb9RoNkmiPNsnYtehuteHix4yC4CvKxOx5+f7xc2zyxhTO0nRa6g8j6jSrDITs + FGHNNUJn+n6TQLbU/EXC7+ugraXztoUzV75olHlqNHZY3cJDEiM/D0JN45OzeIfNedzi+Hxs2Kx2 + siuqjxtP3o+SsT6wCx2Ekdv7pXURnD7yK0sWD9Uem6M4RdOmki304WseeyohybSbLBEOif3CunUR + tNlOYwLd08qp+5aB9U4RiAq9ega25KjrPnz8KJGx0weCtNWBDc/jmygcfgL11f7ASD0LhkwnVaT2 + vS4KcvUmC4mq8qX7dJa18ZVvezFBYeX34DcdE8ujAbEXp/TJq9duWvRVVDdbmbqfb5yMamK06NeP + VTiQaLQzZURaurv4ZysanFFwQUBGagh+Jj8xmk9qzUNUZgPFhNsX/Fq0XDBdc/KF2bSi6ezMFVTT + y8PbmO20zXrPW+hisBfpInfUFr3zIRUyRD0O82yq+v0VdulVovvcbbR+RYYZ8as8pHY/F6jfbjYZ + fHY8xfrIpazru+sVtl3J0/1Hyjqm5tkD+FUW0v0qKpyZrbQ7vJ7FlVqZdHD49ZOcYfEDfn7JP8mc + HMGF+zvVqX4o627mHmoIybfhfFlSA2cunIKD7zPkCb83o25o7PaODgKYeKu9OId5ntFC882vf96f + kEn+GRa/hLfnG0Kj0uY9PKxwJMvzNBbMI4+03c4nX8VSunpdshKy5+mLPRHyoufHGSA3yjtZ7/dU + GxJyL+X+dJjobuxKbZgUXQC5uwlUrzq14EflfQfX+6j+FN2ViPn4PYO6xhWRldfe2bBmDmGpD399 + yEttRrpcIVpuCd7zzRFNm+iTwaqRjuSdHN/JyK8HA9YmDenh3lrOmEaVC/i0fpPvvNeY0Fe4RgeB + M6nFF3qyDq/N9udfKOacjo2ZsA4Bs8eTGnp16oQqZy1kA3fzZZPs0UicjQCKEbjU1I9WQc+Cy4N0 + uMZ0y9+3znTQL3ewxuJOf36B5urdh5JpR7IpnVEbf/txe6zeWKMBYv1jfOnooFxmnx85DvXpHaWQ + GJ33p//MwZYf4TtFD6w+chqR46i5UCXNcckbZdK6Xueig/0eCO8dtw6DS94q/TwPRHYJZuybv3S0 + junDV7BYdmMZ1Bao/E7H5nQyoklRwhFUxOt4y9lmMr2kVoYhKVOsMmnH2FqCHiINIbqVnxRN1uNs + QdPmPjauwwf14SBbaMlP+EW4fTeJ+XaLtmnZYt3UtWK9qWQbfBu3PtB37cxhd9PRGZiDjQtLitmz + Jh1x6fFO9+sbjXqHXkPpSKKR1MnDdmbgD1tZLMobNncx6Uj7vKe/POX//NPMBbYM267i/bmiXTGH + wcaHbnJsvJ8PLprCa6PCsRQxtYTDy2HGSd6i/Rxh0qnzJVr8Rymbah/4c69yzuhnfo+GXeVQdY/u + DqU0eQBXR8QXAxVFw6s6h1AJwYUaSRZp086mLdrEXE91vjtHs30xH4g8/Z3fpT2HCHbBRlqqXSj2 + v402k8N5RsjmDRqtn2Iyc8C5yFd2lBp7Vmvjs+s5yFR7xoeXIkeUv21n5be/3dd9w/rwLc9wqE4n + apyeRTRb91sNUvp5EvHeu9302oouYJY+sX98JM680rUc2vOwpo6G+2ge+48A3FvWSF+UetJvN6tc + /vkVN79MiNw/loBOyKn/5L1RXdc53EyV+mU06WwkpkdQn457rJfHZ9FHT6tFQ1KlZHTdVTeYKLeU + Je9Q/LzcE7bkcTjGx6dfb1hVjNLrfkXNTH1qs8+kTR/gRBSTE6baJduicS91FZStcKQeFvVuYp4Z + g7J+NT7BN8cZo6dV//n/l/7fjWklC6BhTyLK4o+W+jLAGt93avIfi/H371tEyhA0eKmvhJrCwEFw + eel41/l7ZyxUr5WL6xfIcez0RW+dO0rog6P7d/hyyOFQW7B6iQoNwzqJSNVa4i9P4D1tvogtflz5 + vS9jurSInsL1DAuPwJeXU2kTY3WFpD3ryfeyuy1YIRRg+qQZ9qaV1Y1XpzZkZQgbItSXtzMnt10K + 9YYcCMIvsevuVvuAAL06vOTPZO7cmlPYWQxJAoPm8Mv6oQPjjliXPlk0vB/yFhq/r/B2d9iz+ec3 + PS85khG1efKpSZ2hKUldX9pmtTbr+BBD/mEb0rhjHM3t2fPhRGGmu/69KRa9LeF05U7UAhYUUzdM + DxSEjUykxa9OVtFdUXrKberTbat1wfUMYF7QjVrJ4d0xt7+3IA+ejY0waxLyXDsB4Hvd+eLyvBFb + GFD6HA70sNUip7k5To6WfE693TthTFQ2MzwN+Un12ZkidrzPBggK+NSGMCzmb6jaaPEvNI6mEk2n + iwQ//kPNuHsn0+XL3+FiX02cVtssWvyJDKudmZHv5np2RlLdCAqKoP+tJxvNAzFkfNq8fTm7ds5U + 9PBAz0Cc/NV4FLUu948VcPgFSx7QolF6hVe02u2zZT9qybq2zBDdw7byP6BljN7ihoeTpBiYbIoU + zdHVfUBfjzd8M6N1Mn7jzkfaS+j+8IZv0BXlLz9hL7JMrU/vLEVLnqXYt2ONXG5iBtO5U31opgyN + H/F9RpL52dJbaZrOJk7XFsr21pJf31Ey8UkjiMP3auCrl58TqtkNgeNZn0j52E0R8zy/BpXXdOqi + +BVRjpUPOHFjQLVPFnejSqIAXrth8nnx6rFpdwQR7tzDw5fPVS1YqKLzn/VzcqFLBnP7iOV3ML+w + qom2M9VJJAO7yRbJKT0lTXDeyyit0cqfe7C0dVmrsbL0S3/cSUXSoukjw+KP/fFRXZNZHFoeKetn + Q7hSG6PxqocCiIT5PqxJ2dVHacrBuyQff+Gfyex6nQ/5Z9rQw1O2i/HMX2OktupIJImftUUveOQ0 + ZULTFyvYfEJJBrBu7mSzm7Ri3VWlr7jVladGotQOu8VvQRE2l9EX4Yy6eaJbUHhjT8nKMsuoGy8x + +ZNn/bSaIurFiYj0Ou3J7J0abRw6R5CXv5dwRynQ5kI/93L93V/8ecmP46d/g/LRWg1rh4Oosay6 + xrDS92e6e5gcar82L8IIGvijLOtoSudT+4ef2toxYLO7KwQI1sbWV1YPM+k+K5OTzsYqINxTiBir + rPGONu1hTbWTICXjuQ44KHq/xOaYQEJz5IooQM+O7u5vjbFIQDJwRHTp063ziK3nbwrq2quwKjVN + NDhfksJKQAmZOv/jUNmsfYi4YktxOTURsZW6R4te+qsnjxPigdQiP/Y4n7/5Gtr8eOHSj4k85O9o + ErhRVtbdIFFsy8fu51fkX16Ky6NSjMXpHgJ0jUH9OtsnG0s66KBv10/svzQj2aR424Ic4B3WHtpp + 4T9yDkXjE19ZY8cR87t+RZ/VuiHrsvcdFnRdibqZS0mPn7w2//h0ddsohCZxG42P8aKD3j1yvxAu + edIf3IMPbZKvfK0KQmd8p1wOYq/I2J802WkkVPVAnOuA1T2StfZXHy73Ydi60LD4oHTNy9O5Uak3 + rEGbxrezhRMA+JwlGcns7jpBkgds48VfMbZLIld8ITvG2pLnpuTrxSiwU5nseRcY45+zCPaXE7HZ + id+kRrzPAT/nBzIJhLFxx7wrLDx+yQNW0Qt7WqGf3jgYhm7SBq5HfEbWZF3f9sUkna0aPkJqEU7t + D6jl5WMIuHpEdBfdn8n4e36xd02sS5aTTNE2DyFPpoJuY03r1nYoyeBVWCM8u2psIhNkv/1IbfGD + nC6eDzOAWb+wS76cRtxYfKDw0sf4eTsVBf0YpwDFl/P8yz/FvNcOHBrkCfkS2umFALevBd/kmS1+ + f2Is1XYhzBC5ZO2Ij2gWzokNpapmdLcz38n0GroMbYe7hXdLnmh/+a5dvas/8way6XAvL7ztx1OS + eeHHaLUXW/wc1uAs+7lH57JyFz4jaFOdNzb8eOB58LioUT17/OklxkEradPus8vgzq8lX+FwjGbz + 5Yzw/UbWMHSzq/EdUuIfjyKr3TNkk284DxnTNU/dciOgNZEO88+P4r2rNk5Lgl795R+88Lmk15q3 + Dcf9a0Xuq5xns1GsRbRvdUzN4zFMev3Y3mHg7wHeq9bJmeB95eRvr4cYd0RzNqrll7/PyfQoGZok + +xDIix7SvRHuus37fgVY9Irq7jsv5uh17H+8j+5GHzvjj8cuPP+X17op3x4s9OOn6zflnZl72KH8 + 49NWJjXO2LCtDRuTy31l4V39lD7vcNfWFtbcwnSm7F1a0Lsw4aMxgDP4hpaCCFebnjvZQXR0nmf5 + W0YhVU2rL0apu82yrqd7rGrJSVv4d4WWesc7/VKg+f7ZCr/9SM3OathsT80ZHpIc+S/f5p3xLS35 + ev218CF4Oqj18ssMv36rSTZFk+30NTqO+z0Rdaw7k2m+K3np11TPuU5jmBt9KIWdSf/wD1RpOsyT + cSEcCfpiejTZQzEbMvx4dzcEuReAonYl1W5FwCZJgBJuey/Bv/20zA/OUG/6w+IHpeLn/+A8rAZ/ + fp2go3BpWxhMRfC/u0nr1kRhIgTxyyfzY1Um7KzqIUpRvKf6I62S+SAkKYpvtrHk3doZzzcuhYVf + 4p2cOMmsfS0dOayk1Argk5DOzUD5Jq+MIO+wTaipIBfVqXih5uKH6GYMHsqmmG2fX3jC9OM36qBW + VD/xMeqUUFzmMygkf/I4c1RBWerFX3O3mza6ui5KLr6fyWbh+zPOtqNydI0nNZf+uui3AVnjYDIH + YaHNmWScwSxRQapEqbVJsneh0k4xop6FbMaH9yETfzzf49VrMeK8EdAvrxs+O7Iu6+zFn8kTtryV + Ey3zUffPPG+/32ONn9Ln9Zc3STvs+2T86Xkw6zY2/R1GIz3V7p+876OhS0Zyv59hvwvThV/fonq/ + HUWlmBKCt0ETF+M3LlxYZ4Xur9WP7Uzqfp3BRlQirL+nazGsPg8VLu/rx+c1fqexn18MKYqIYI2q + 8+fnHyxa+5NtOk6HinwLxu3YL++nKOYsqkN0MabXn7y65DEeHqK+IkKYHaLN6Dxj6Epnu/C0hi35 + 2QAhLTi6u/N9xLTRnOG5QT7JUKtGC//XIUFBReB2qZ0/9WlX4xmbsd8nTOlfLoRjbtAfH+6pmlTg + Pq4ZTqwVYy2IzIL+Mxb48bpv0LC5Xmo0XR9nUhSjp81XT7IhF64NQRD4zvS5XAWU06uD9/t139Hs + penADYboM7cwNTZEtxZ5I75gj6pk8Y8sVpb5pr/inA7N6ljGIIAt//i3Ns6ZmMFX0Cs/qYwBLX40 + hCQODvjCzXttDjfBQwlmw6aenD+Tn1/7kz+2+s2KBNNZ2bCKe4tqy7yP5Wroyks/wP6luSStfcHp + b57yh+cwGdUzDOZKoLaM827+fIdRVrrniVrBK4ga8T7PMMsvEe/jz5NNXdW7UN3WCmH37YeN0XNb + Q73VZcJN2l2beeaqv3kBjXDzYOTH58nkN8s88RLRFDFO+ft3KuA///XXX//rd8Kgqp/pZzkYMKTT + 8O//Pirw7/vz/m+eF/5NhT8nEUh/z9K///k/hxD+brq6aob/PdRl+u3//uevNfpz3ODvoR7un//3 + +r+W3/af//ovAAAA//8DAEwqicXjIAAA + headers: + Access-Control-Allow-Origin: + - '*' + Alt-Svc: + - h3=":443"; ma=86400, h3-29=":443"; ma=86400 + - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 + Cache-Control: + - private + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 7ccfc63dfc49e259-ORD + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Thu, 25 May 2023 18:17:23 GMT + Function-Execution-Id: + - pbmexn5kc8di + Openai-Organization: + - significant-gravitas + Openai-Processing-Ms: + - '28' + Openai-Version: + - '2020-10-01' + Server: + - Google Frontend + Strict-Transport-Security: + - max-age=15724800; includeSubDomains + Vary: + - Accept-Encoding + X-Cloud-Trace-Context: + - 7793eb63c23999a96303cbe39038fc6c + X-Powered-By: + - Express + X-Ratelimit-Limit-Requests: + - '3000' + X-Ratelimit-Remaining-Requests: + - '2999' + X-Ratelimit-Reset-Requests: + - 20ms + X-Request-Id: + - ad8d4f3d387595b6652d71076b51541a + status: + code: 200 + message: OK +- request: + body: '{"input": "Sample text", "model": "text-embedding-ada-002", "encoding_format": + "base64"}' + headers: + AGENT-MODE: + - AAAAAAAAAAAAAAAAAAAAAMLheAAaAAAA0%2BuSeid%2BULvsea4JtiGRiSDSJSI%3DEUifiRBkKG5E2XzMDjRfl76ZC9Ub0wnz4XsNiRVBChTYbJcE3F + AGENT-TYPE: + - Auto-GPT-2023-X-TYPE + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '88' + Content-Type: + - application/json + method: POST + uri: https://api.openai.com/v1/embeddings + response: + body: + string: !!binary | + H4sIAAAAAAAAA1SaWQ+zvJal78+v+PTd0kcBkuDNuWMKYbQZMkCr1QoZSEhIwmADLtV/b5H3qLrq + JlIIUjDeXnutx/6Pf/z119+forqe+7//9dffr0fX//2/5muXU3/6+19//e9//PXXX3/9x+/zf9x5 + rYvr5fJ4l7/bfz8+3pfr+Pe//hL/68r/v+lff/3tJ8svlV+lXVErX4hr86DtGZaqBPGXTM4QWJct + s1/uuu0vFk4A2lFgeIV35sSNKVDv9kVllpCZ/rjpGwtlO8ci2rmTUy7XSbCWZeIRLCirli3MM0b7 + PfhML9vU77yaOzCpzgOPsfou+GDZA7CrHDJHua5Qv45FCvbHrQjWvhQNGcoSWCcSZ3jbaQUv3CiA + nTFe2MY28nYSat1Rq3y5/z1v1V9uJwv2z/uR+A/liKZFfTJgWXzvzC49K+2bLCnR/uaHVPLMizko + 1k2D91o6kHBEpj/4MRzQN9QsFiz2qfldv60TbBdhRnDKGj7hNw1QGdoRC4TjvXjbffNAephwWo63 + O2KW8T1Bp9Vbps/j+zLxLkIpFwyrq0+bcjtLRNh+l4QuHgVBg2IdNbQdPiExNL9GU+fbDvgfMmF5 + v2mrfpOdMliJQc7O7+jedjsqRLC5BDXxZU30W0r3ETwPxCZ69Pm0HEumAg4qETFu6ZeP2w/p0Jb5 + HgnM7FJMW+mVwcUtMZVraZPKlmQ2QKiyYyGL7uh9ML5n1O2FiDl+sm2ZXi4aUGrjToLypVV8k+kC + rPKrTSwhq0xO3a8HmZiURGtzoWC26XtwYmmDl9Y3TCczp6BM0/XK7O+98Tm86xoJzD/QdUDkdJqM + +05tfK5QNSlIym3TdGBzVFpmfV/LdmijlQDv3WlBlRgZqZzXrgam1gQk1E3bHA99noAYFicSViL4 + v/cJJ9mTWfgJo2Jw0AhQeKZK9PU2bsdHua8BkpISMrpFxadbdUZrt34wa9Hf+RD2uQHvsrZJeBKr + ontklQMPMROwhIuw7epbvoKitwmWniGupoUkX8Gj8oa5JH6mPMjfAXSLmBJn88oKjozvE4510ROi + q006LNDkge2YWyyH1scfev9aQxIt9uS33kbZP4iQtG3OsJe9zPHzvgSQFG+faKfs4Y9Ln1F4G0hl + ni271XCBdPjNH9nGpoWGY/QxYL3Ybim9jSPqdHcMYBBvhCrlF5tjCr4F/uXek2BCRTudwTwspLI5 + Mkf0LN7ujayGaPgQsg+okE6b18oC/dH7WNF8p+XoOEZIWq/fzF4k25Rn6tZAEnN3bF4/5vS5VRgS + DW3pWrIZp/6xNODj7zKs7lyj4H5+2MFCo2/mj4uhGvlr0KBE4wqL55AWTe2uJ3CjRUpRsuxTniCl + Af74Osze7seKunXToftqU5CLKvQV9d/BFV3dbsVsnNt89G66A9LuFrGNZ6o+7dD3g3ymMUbW3s1k + phGVsPH7J3Hkyz4tlXxZK5cw35Pt+tGjLxUjAVarTUl86FaoaaLyAUXxDvFKkd10WYjaVfXzpsbi + C91NHpcXAX2X8ovoWNm3PImNEwxuQ/GiGnA1BMfhqpyn1iTGs2Gc5+pNAdDNjhijtU2HY1Qa6jq8 + fVkgxldzUlEmgkA0hfjdlxY8rk82CC5d/PSM80wlBir9N2Z6v/HaUarz4bf+mKVvfF+k5cJBC3f9 + xamcPfhwMz4iPK5QE/MWm3/qB9Urt6HiQf6kDbyGBB6LrUG2G6Ezh7GPPciH2iTaa9Okw0HyE5Dl + 0CNG/EyrCW6ig3YfItKJ6vuKl17RAckfPtMeRtZ2bpzUcLxvJzouR+bzdbey4fRdMGaORlm9lVh1 + FLl2gGz226ToQ8k/w2cQBeJt7g3/ip5qoVNEb8TcOFLapGJsw+KWTsza2LjtVuK9AdbnOXNr/Wj2 + zB2falc2e7ZZJltzet9OB4U4uku0UPDa0RUzEaY6PjIHg+CPRHoFUL7FihF6kdHrV39BSc9su4i0 + djnPjzLXE9EFySqGm1HK6lcuRiqt66Lg255nUK+bDqPFMUd8QYMr2pdNxBzhdi6mvefVoG9hRbQq + O/Fhri8Y3A/F68UkFr17XSWKL2ZrpkUNSf/0s9/8m9/3tRhqy7bhsjA45ZZntlNwXK3QWlI2f/oj + /xhRAxfxwMj2ZJz8YUVfJQhDk8/6Nlb9PB71TQyJzHprct1ngLwwkYjnBIHPOXI8pbftjvhV/ESf + MHMPkK/5huiPzCg+iRh9YGudVnRSzHPFhZ4/QUKHAk9DJ/PR6vMAsK4/iFvtk2LIpN5Ai3KQ2OmK + CpO+3I8CEvN3mH/i8zwfyQ7ebeSxTH+KiIe3agerBztioZGbaqRiJkD9GO94mPWtu7+FDiWHNMJC + aNop/emP1dsec5djhMZXjc7osTsMxOhNk4sXKAaFH1cSI9I9MMdHrcngRMOXmMv+XrEsO63gGxoW + 0d/0WQ1uuTQg9M+UHCSkV/SE7jV4/dEkzuDe04GCIcC1/CRkW+UL/pHNLoNFkbXEm/RjwZS3JYIV + IYva/aOvaIuyCZIkc/E1MYqKD7dTjdbrMcBIMApz6GJdRGznuOTyDDo+vo0sUOpJYrQ/XQ5p/6tP + d3pUzLgIvjnRuPCgZYbMglF1EXevKwrV8/3BpRUt+WTXjqDglovE0erKpIs60eCbnDDdZYHHRyOG + BzpMJ4+ZC/Zph0eyXMHx4pxJaGQWf2WmMaDjdkjZtZa25jRl7mpdRZ+U+Y73SulwLD14beUnva0f + pJ3KPi6hUYuMeM1RQ3Qn3gHO66vGDDT4aPDk2xMZN63/07/6jbGWwdQ+AbkdslcxgjEZsLpOOkYX + ZFRUkPoaXVahTGb9b8tlHghInnYm5fp2nJ9fLIH2AWP2+rmrvoz2HSxs7UFcP9I4f5XLCTrx3dDh + pO9NvjB3gWpTKMk5vj3TT1Y7NbLat4PZxzXQrH8yLA72wELzIFd9Kt9klLOthlnaP/3+dnsAXJlP + 8EKLCpN7pjRBOGUOcy+LyB+C904B/FZ6Zl0eoTk8rysHjGQtESu8GVXf9sj602+suX+1Dg0i2KhH + gz63I2ung5deQReTgoRaK1V90zcBDMl6gRfNmaec3XQKaVJzKsalbTJ0HBM4olEmluOW6ZRH4w5G + f3ciOnhVwfANMDI+qUA/p6zi48LsGng96iUje7Ou3o9aT8BgNiL4cr0X8/OvkNVbHjGWGSq6e5ZE + IDxvCdMf2ohm/26j90vRmf0q67azvEqGJsSUOQux8Idbn9vQR/KCLg/52x/jOm+Qv7BVLDquVvQX + KjzQz+9o1/xVjXavADKe2ZJ+6cn3uVS7Bwi+6IqF+81C7RJNE2A1kolNjlk72Shr4NA+BOJuqrhi + 7OZ260V7XeNF59Upv4pNgH7+b3nQD+1w9FoFAu2p4eUiKtse9XGGdKNRmXPVZd7tclojXF/WzHTL + FxpyP/yg55s6GLT+nA5vz8OKLigxM8/1wxxM7ySguZ4x19zSbG8it5BzsJ+UdZ6ddvP40QJV21nv + 17w81yiActttMVBl5XOl1wBkkg5zPznwaUV7gKA4JHQYHZGPeyPzYCM6GtvkjsSbU50rwI2yY5tn + SKshvTkCBFqtsS0+ar7M++YKxDFdul6592qo8/2EDMd/0DWJn8Xw8VIL4aN4I3qxC/0pBEmBaL2M + 2TZqxYI965ODSjXomJks22qY+6WSfcuQ6Vmy8On8/0rX7nLmhk/J51mWrGDUPgdMh3CouGlkJdoX + kU2Sr1EW37u0y5AiW4zgIRxaNnnptCLr5o0XnK/9ocrAQGaOzrg/nBNES2NyQPO/e6K/zKodTrE7 + KOQq2kRPUG4OeXnZof3QXSmXMwOxbc9PgFT7gFdm/PQHtXY6SA5xxEhuef5nHq/67DWTBLFzLLia + pQaYET0Rq4syNDQoOkBS76/MOXeZPy7rpgHL9kYKrj0Ug+EZFqQr05vra6ymnTt2UD+jirneOuVD + i74BwoeixXXQHVs69XGtBLV2JJoib1uq5wcb5vxCTCw7Ph/o0gB83wzM3z9oNY6iQkG2LUowoe9i + /OXFWW/oevfxioGbxkcNl1OMYee7Zn/sHw7s1JdKr9oXozHLkjOsJv5i5ub2KKZCJgG8rcNIAvda + IhaXe4BbcZ6I09usGub8h+b+xKKm6CoaZVUJBzmN8cvdtAUPPG9CoytlrNBcnUtV7Blo7tf4NcIT + TWq5MdDGeDx++mZOijgJIJS1T/Q63/CprXVbMYfpQx/562iOW7Oo0fpSLal4FBXzHtweKzXf7SjD + /uHAJ4PKH2VzwfVPP/0pvbkZOhvl+9/1p7sjVrWjfGbOfkHQeAC1g/l3on3tntPJ+B6Q9chsZvYL + ivo7ikpUrz8d0f3YaLmM7goIkrCjwrf8FK2fXw9gX5WE/ep/wNkJK+G7M1mhZFa7TND0UYiYXAkx + k6/J1ffijAZjOTJjtN7p1ERlqcYRvTCyFM/V8DSfB3RW91vK5jw++uK3A129Xej6tqOcrsQ8gFq9 + c+bja15MgvSq4fk4hcwWcZKO77d1UE67d02n8tsVzHfHFWyKccJCMeCUd1krA38vTKJn387vl+Bj + 1PfGBrNt2rU86x81GDejx0O661B/zamIgnwZzbyjLEa15x6Ux2E/55GMszvSbPQ9xHtGnC/x6fM6 + 1LDPlwleXk9exYNbcoW4yO54+XTv5hCZxQnqacloV4aRSX/+WBCrkpn31zGl2/JCgdNJY8a83qcQ + 1H/7BS+N7v74fVMbXktVo4/FSIrlnPegOIQWs8Lbo2KzPkP2xAnx71D74ymD4MdLmP6mVvUnnz/l + eMesd1wVtJHODbIOeIHv/carBkOcDqBotTfnZyuV9fxqIbZdGHShRcgcN6pdgsEsRJXX+4bY8U3P + qL8dgYIYC2YvlZcPuudnnW38DKHxx1tM4rtUzL1zO+ZZtUOraXwRq14nfETJ5vlHj+O5voaupAlS + LS9gBB/vfAqNb6BcDBYz2z9I5iBfVxjNfoC+Vh+35ULWruB0ME7MEM5D1cZ1YsHxcLwxvdalor+j + 7AHZVlApWvZmIW99bCBZoy9GIBzQcE/exto3eIHRO/mk3IxFAey8PjKiv3LE7f5RAlyC6Y/+TF25 + l+GSbU7MnP00f8XFDp5krzBXuOzboSmDEqpc2hPjGD/QGIjTU3VytSV0CKN2PGXizMv4hZgNXMzv + slxmyly/xM2+PaKBkR2U8m7WeDGWTjXmZvoB/SqbOKYnt6CsRh3E6KHShynm7bBFd1ud64NpXztE + Eok+gTLzDYrm9UGdOpfR7MeJ9zUMn1vJRkbkwc94Zb3rivc0yMAwlm8KZNqh3/uGWHZEQo6H5ewv + 4g8U0zUnPjocUs6R5qgXo4+Jx2ldTYZ4V+D8BoEZvnTxh7VnJIi+FREPVaZwBn0uQ1Q+debM/qiX + 0P0K+lZY0eecd7kJfQAbdW8Q114NRfOmwgALf3fBw40+i2mXXx107HFDl/V5rIby/ZLhhiqNbI/H + FWI/nlNOjw/78bZhf2soKqNFRtdGeeOsEJ0zKlXcMbfqDd7bpkcheVyPzE5OXjqk4rdEsBW/xMXK + Mh2u0DWwdTtOdJLYJhdM/wDQxxpz4vei4J9YtdBeNnKqdi8N8Rr8J4r9Q8q8SV8WMz87QLNd8X/z + p17UHj8/SCELuopHUt+gfsoIM3qzQpPmloOK69sarybRSKeKBt6vv1PI723Komgs/+gfD+9uwT9G + 1ijZM0hwKSxbk21AtiG62QLl5ftVTY33PIG0Rm8WhpvQ7G+SWaI5f/7pv+z4rq8w5w3i3Kjtj/ve + OUGXL490dMiunTJp94RgV+1ZMD/vzIsDNG4gJjOPQp+5f4BsvTvM5zz3aWvng4zd7st81XLTMfXf + T0TWnzeVVXiZbLq1V+RotMDLbdqg6STJux9fYtYL3X32krdXcBpzzXz9qrUDrXkCbOe5+Df+YStJ + MqxRoM2ZTStEjfY7hJcCwu9J/vpjFK1t1Drbyy8vFiPNQ0dZuOhL5UMRIrSPJQ11uxumuSkJLbu4 + I6CZNzHfdZbm8FCPiQJb+Yvvx/RbcanWD3/4aOha35Zr/s1WZj3B9W1Kq2FKrvjHi6gQWy4fCcrO + MPtZ4soFbsel/+4A4o3C9OVIzEHsJwW+o/7ByjG2quWc31Fj1CuMikgvJqHcK+vZf7PNukYFLZBy + RspGSLFwOHUm38H5pCyPw4mFW/o2Z32MIJRjQvbwDc3uod4SNKKrTNFL/VY06LUnoDcNqXS9ZubE + 4qcB8/oh2GqmYkrMrlMuFk6pgPWVyRxqRXAX8Y42tvxtuRiVAzD3apFAhoXfFHUzATXOF+Id1juT + kXJjAxOjFwvyu1+MeY80kMjWppJuPlPmuXcNvhoKZj1QfzyoRvfVtsDru6Ejvqu5rJ7Er0jC0neL + UZIEjL4XO2FOKASp/Oh5AqeNEBCtNsFsr54nQu3omBHSlW1XgKFAsLvvqTAtsblcxcUJHR+Pms38 + sJiub6uENcIa0RdcnvvR/gMXLLp0fHZyW8eZeAKrXCQMO+0eTQWazhBG8opsF1FZDdf3DkD/bHd4 + 0U77aoivK2ulYjFiB9e02t/7g6OmHvAg+nXFmKmu4FCcB+bHnuAPsdlFSH1Gn9mvYC6d6av58UJM + PW1XjWsjisCxXg+m1/kLidfj6gHHOu/JthENc1nHHUbxs52IsfHOPuUqXoFVpyFerIIwZVjyFeV4 + 2N+Y0xS0mIbjx4O6v7yIczE3JlPgdUK6GBXM4dPOH+d+DN/p0TCyfrCq59L5vE6WcjnrQ+93e+uw + gqN8Q2xD19ycHj2P4JqPNsOEblO+rCsb/XiUdtDidiiTgweqAB0eNLIrJFs9nsBhe3HmRYb/4xtI + vu0FYiXXJafJdXiosta9qJhITzSIWYVhvdtJJN5UDHWpu05Avxx1pjVFUI03tj3BR3bWzD/XFeez + v/jxJkJURfPb4ZY8YTEdNuzHKwcq3ygyb+TItPF251MjnT/KzDfx0G/HYiTRBytdLh2Jqz+wz8hr + 2P3J48FjFfvsm4QAy+R6Y1723pr9Ekysju3pw1xFNKof/0HNxHbMcdUPGjvqCz/ewHBioIr5+XUH + s9+hpejbVZvE3kkRqdDM/KszuzwaVmpziYBtBiK1Y6PiBhwXXbA883P2Pn4Oir0wKFZ10/b7GE1Y + neT9i4X17VLIFQ0c6G5+goX9pi84icoG7dHpQdfH1C8m0zMekIqeyYJwyzlr+kfz25/Bk+bbiIue + 9AB73byYt6vNakzqRwlvJ1xhUd/45vg2ogDmPES8XV21XH0vzxDaHqc3ZYmLcQ07C/aMFCyc+T43 + jh+sGOtRY945Opn8QqkNfH3esCBwDXPmuRg8ox7pauYVHBn3p/ocFvm8X/RIeWMoJzTXC7N69WPO + /fkKf/KLuWN8jCXZho5sLUamq5KOddwOaNtGJsHmjqD+Y3xtQO8uZLtGkQum12iCviYOFub8NgVG + tlMkWSuZqegSn/T8YKFQtQJmvZ6iyc7u3f75GzyGUKHBuvESplI90847T+24/WwpFI+TP68vnA6n + kgYQ39LPHz36eJB26CobBTFUGxc9elMHSjtOiXWvhmqwJBPDPUwUrI5P2eRr2JWQ9cWO+fHd99kT + KfSXN4nH+srvXjfdUNjEKzzNevlagh+g7+N6poMsWf4w91eQcu4TM0cal377UcJt4+AzSz7V93Nc + TbDfCz7zJcMo5v1XT0V1nLNIh6qazuWiBBGLmJFVEBajk4EDc34kxIqFduYtAMvQ3jNtXi9Mgf70 + 8xvzeM8+JaYnKLzgQDCQY9v99jPKcSti+bJNqulNhQlm/SY+ojYfFeProeGmLWm7OK5Rr/TOA5r+ + 6OEhN81i3o/D6ELhRZL5/f14ujr5lUWF++2JxmtOZaSGR0zlLmjToUeTBXM/JES+XdCf77M/IfbM + i5nZawNaUZmwoF6L7VD2dwGC7/rKDPum8NF4W/jn18kmd/Z8kMyiROhAfBLO8zOtbu4DHQ6OhJdZ + X8y82RPAIXFN73GtFNPUx09Qit2Z2V3QFvP+3gQfrXv8eGPL1vEzAfuZnJnVfdx05nkJyKqxpbxG + 6+rnT6DGov9nv6P/JtdMKY/TntlzPxmy0rrC379TAf/5j7/++j+/Ewb153J9zQcD+uvY//O/jgr8 + 83Q5/VMU5X8y+c9JBNqdyuvf//r3IYS/v+2n/vb/t/88r+/u73/9Jf85bfB3/+lPr/92+R/zf/3n + P/4fAAAA//8DALL3VtrhIAAA + headers: + Access-Control-Allow-Origin: + - '*' + Alt-Svc: + - h3=":443"; ma=86400, h3-29=":443"; ma=86400 + - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 + Cache-Control: + - private + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 7ccfc664f8981121-ORD + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Thu, 25 May 2023 18:17:24 GMT + Function-Execution-Id: + - pbmefoalhwp8 + Openai-Organization: + - significant-gravitas + Openai-Processing-Ms: + - '98' + Openai-Version: + - '2020-10-01' + Server: + - Google Frontend + Strict-Transport-Security: + - max-age=15724800; includeSubDomains + Vary: + - Accept-Encoding + X-Cloud-Trace-Context: + - 37ebabc27cb744eaba5b13fa828bb351 + X-Powered-By: + - Express + X-Ratelimit-Limit-Requests: + - '3000' + X-Ratelimit-Remaining-Requests: + - '2999' + X-Ratelimit-Reset-Requests: + - 20ms + X-Request-Id: + - 57643378a1a524682d1e62a66d1b55aa + status: + code: 200 + message: OK +- request: + body: '{"input": "Grocery list: - Pancake mix", "model": "text-embedding-ada-002", + "encoding_format": "base64"}' + headers: + AGENT-MODE: + - AAAAAAAAAAAAAAAAAAAAAMLheAAaAAAA0%2BuSeid%2BULvsea4JtiGRiSDSJSI%3DEUifiRBkKG5E2XzMDjRfl76ZC9Ub0wnz4XsNiRVBChTYbJcE3F + AGENT-TYPE: + - Auto-GPT-2023-X-TYPE + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '104' + Content-Type: + - application/json + method: POST + uri: https://api.openai.com/v1/embeddings + response: + body: + string: !!binary | + H4sIAAAAAAAAA1R6WROyPLfl/fcr3npv7a9EpoT3jklkMkFwwK6uLkBEkEGGBMip89+79Dl1uvvG + C6Q0yd577bXWzn/866+//u7SKs+mv//56++6HKe//8f32SOZkr//+et//uuvv/766z9+n//fm3mT + 5o9H2Ra/139flu0jX/7+5y/uv5/835f++evvNJie2H+9X1ELP7cYftp8R2CtoXS1tuEbPsJmJtuC + pcMy2RMC16p4URNJTbXKouorcVm49JGezGp97bhQnvd8h63XACo2nZ03iL3bHm0ot3pr6FwI3MGr + TDa5PhvkpGxdsErmQu3nwkVkzzgL5h8iUpTDwmPl5uxDWgZvnM567S1hIobQVbwByX6L0wnQrATL + NPiE6A/IFj7QAyiPaUEdh69YmrmbETarVmCrmcNqMBYthMKljrC6/4ygp3WcyCdlINScaADG/Hm0 + YWnEBPvbC5dOhShzgIhHgVqjXDAaQ9mG5XnvEl4KCCDd6DXAeKka3p/61Vh1EjQKI9YHCcFON5ZV + CjZw9PIr9bWH6y1edMght7TrtJYoZAv2CxWGLUmxfzN4jxSiNEP1fG/RojlaxOQKyBABV0HgNVG2 + dJwUw7tmJETw9ADsjpSNiiP1FfZV/+FRTGseakNFsXsZrbQvfen6iyfa3kuz4j7ya4TXhyGS1bvd + h3mukwK+gkanPmy0Shj2yxXqURpQAwJUEffqiVC9vhlVi+Horc98G8Jm1xdkMxI9ndpq6GHd1AA9 + Fl1g9WHe9rCVgIoP7vnKJn9yNvBCA5U6ClC86eCuPrT4JKGefzBAv00CXVlKoiGmpbKxMpaJcJWs + hXw+WDNG9RGMsLiFE3WvRpWSqIoR0JaTSrbc22RcdAMc5NnDQJtda0RjclJ7cJPACaM2RB57XB8+ + +O230PIazOvECtAezDO+Sq0H5ja2EMSecKd+ugTG0oxxDF9HcieSDxmjj77IoZ20G1JfDltj3B36 + M4yYq5I3TA2D9cNmlmYYnLHRm7uIbiyDgFSXB2xJAWKfX7w3G4Kwt1d21WIuRgJsIiXYfC5xtR6C + LoHAaGokVRlldD/vCuDvpw6bk68AFjpZBxd6ULEtXNtqOeHTCupNeaP7Nth401bN32Di3Ypige8r + yscvFwbCNsGHA2rYdP6opWJseA4JwaHzyJX0UDab8UI1CXLVwCTPhOCaHcn2/JgGwpkl2a4naaLq + bfNig9/rV6Vz2wytT1tnAhRALG82I8JuOZOUgELIIXcurnQvR1PEUtuHgCjBgHFEo2h+9lUPcU41 + bD2EZVjBTifyd7/k7XrjsPRKGMNXnuzItlc+FcEbXYSJ7ZT4yEdvtnaceIaXBL6RoIRtxJyry4NP + m+3w42M0gFyGxQe5uC2wUU9iNCf62MOY6xOMLg412LH1OahodkIfW38PFu7wCiC63CcksMH3Zjso + S9DM7YiP+2dZraWWcNC5FRG9ebBgxLlrMfRQ/URc2pyqmXaxCt3CDLClpDtGpJfnCw8URajbil61 + BLsnkneksLAm1beB3xOfl/3MD2gmTEU65u0iw+YNTWpoaWIs/rogJag+GyKZCQaLmIclbMpVxMel + dpnQVZcZAt1dMJLMOR1Y35qQK5wNtTaXms3VwfRhe/ff1EqW1/Bbr1LfLhHZ6meSTlot9lBOqYnE + Dx2Hib8sBXQVZ8C+5ZkGVRwHwSrd9GTzrZf1bW2uEITCEXvuU6rWy1Lkyn0wLeplXczYdNbeUNAi + Advxua2mLuFlyD7aDu22sRnxv/rl86dL/cw+Rkt+vV3BF98ostnL61c+iWXpqs/U4+6lN8kNK0Hl + HHTySdQUsJhcXVBqUMYHwEfGUq5bHtjHsiROIg9sPkSdDvP4ZFJMFlZNyZbokKv9FUnPfQhY5KMY + xMifEO+zz9DWDzn4nQ9G9/PVm68DOyt1jwA94LkH7Dq8ExjuGw8tZoPSOUWeCxrqCUTQ8JN9Lo9r + CPvk9KKGcnhXcxrJMYzu+wjx7KEMU7x/mbB/yC+MScJX05TNOby4gU0RFJN0auK9C8vb6OKbOzC2 + mrzUQDtbXIrpUFU9aBoCtlxSYbMPKzb7s1rAjR2W1CacUS2lq/YKYNWHdNnwrua7d4rhK3jrOD83 + mceMsg5h53gdWu9OymhsBwT84uv4MALM4OdCKS6chN4TnQF7ZH4GHkSh+HBXHW9eOjOE1ThY2AgO + fsRkmsSwCa2M7qdGj9bL44F++UAmf75XBOz0EaTS9Ub30ixHSxh6hVzY+p7MP3x9acWoFFKs0MfH + sNj89lkOx9FeMZolJ1qf7aiDqc4O9DElBliNuuLhLY01akRHaLSc+ephw6GRtC+p96pBMQu46TYb + tAnW1RjDRDrDsydtkcJHJth5KedDbXO/YRtM7y++eomcXVNGzfr9AWtdzBzYuVCnltR67LsfHyYr + wtR4lUu0ttSxQCrIFTatW1nNkF7eoPONAJvax67WvR7b8NMY2p/+OpOymJXN9XDFxoO0EWs+8Qzx + GlmEWx8uWxI75BV5e7L/1Cszu3MJUaRI2FmWvfHWhwJCKdhCfPye5/IotTdwgHVD8LR5epQ8CwsG + HVnpPuUKg0YnpYf3zs/pNz4pO/mhC9/Px0rtI4NpX6dOA5U3ycmUdtowz9dNAb58DYnZYA7spd1X + cI74jMjBZkyX3QZmsE42EXY2u8lbyZ50kDi6RzYH78qWebBHKGn0TPrZi6rVcacYQnlYkDItBWPp + tdvA9rbO33owq5nTmAnWD+9g9YEKbyqY8YbHl3ck/XnlUyZ/KhfE46SQWGbnar6RyQLf+BH5cDsZ + Y/tQcjA0eY793tpHNM0CH2bu6hHxmD0jlt2TEShCY1FX2pjD/Du/fbw//vjcMGe17cJdDyxSkLAz + vvUcK7/+ur/sJG+UG+4Nr/4tQyAIXbY65saF/UN8kWJ9YGMCnzqE23TpsXt09Wj1JlWGhdWcqCmU + GpuaMU4gm2FIM+BO6bf/cJAQ1aD+O0+jeSehACjC26KOfIgMtkL1rXzO7RHrF/HN1tcOBtCebxj7 + RcSlVFH6Hv7y0Xa2a9S949MGltfw+odfLYW4csCBhYdPVUcidn5krowsb49/fHaUPrL141vkVJ2W + aJEbUMJixxQibsWhWpP9OYSCdR4ozjquWkHTjJBfNUzRrq1SZsFMBTfx+UBym9vRPKJPALNMTqk2 + Ndaws2w1A1fOV6jNZ7eBeO29B14oQ8I9jc3wxb8E5tvMpt9+m85C+uEgWxOLauHRSXfpvWhgKMYC + UjaflS1eCn2o6A1CDMPaWxh0RzG4Zy4+2vU6jGGyXKFWqjHeXw5bj2XRkMOPRms0iEff2MnxrYT6 + aZsRgRZ7YzUywYLGHHww5o4tW7wIZ7LT+RQ7wyZiQyd8dOjtDB8JHxF4Y36/yCCoDytZcvmcLnKO + VnC+BAG9SzPvEaTZGUybnBItPH7S6dj7OXi+M430u6Cvlj0vd9DybxPaRvE1YqHaiuACBvWHn8YS + C3sd3A5WTY95CYa5V08qwIzI1PDQYLBe1U14rg4mtbB0j9aoin15jseA3rex6q0InFzoXG4HYh3d + pmLsveUBU24n8lv/DOmjkfmjcf3x32hZ7hSBx0uI6Z7ni5S0icLJXuvHFEeaFFH/dWngJ/Lu1L0m + arT4bUhgluiMemWgM07C2zfEgc0TciAOmAKnL0BYvzDZtDvN4+asXuEDnSLE6iuIyMaIVKXamDK+ + fPXNdIC5DebLISCKuXQDbfA1BM1cj6Q8yP6w2AdIoJEGM3XMhLLFz2sI9tvghC3Ou7ERr00JWIo1 + NJvxlg21Q3XYLZpDPHxkw7rOlQ1APe5Qu28gGKdTqkKcTxrZlqY9sF1g23/0oD7LtbFekycB3/zA + qKfAGznozXAwzwj/fm8y6rcMd+Ik0+NYMNBB0b3+4Tv5t5+sl6XL/ujD4yjMoNNOTQ7Va8OwrgSR + tyhqeYZOhyjWdigZ5na853BbHA70uLf3g2C5ogWVE89jjROUod8qz82PD+PnvTSHdfRevXJ93S5k + 9dRsWI0hMpUvfiOQdSJbj2kbw8zcblG8dtOwCnHpw48aLWQ4oLKqf/wiT7IAn068H02g4nX45jsB + zXvrzmqsaSXYu+GdHufzgy1u8BjBV18j+V4EEX1NViZ+8YT++NOin5/2n/5/XHQBjC+RyXB4bnf0 + 2LZrNf/q8bjLP9g8O2+2VP4aw/fuaGDd+myH6R0HMhRMWcD262gPY4VnCIUuB6S39160Y1An0HfX + jCjzkAASPgoIbq3xwSrPjQZTOEWEoiMwJI1FbBC1sK/wdjBrfLoCI2WqzZdw1o8dAqSZ2aQVTgI1 + dxehzfAZPPq+3P/gNdWXU+mx8yC60ESHjBoY7j1eCFcIo8PwpF52SsGqCDCHH9E7ofdXT33x3wae + IHJEUps9mJwoQtBE+4w6NpLY0lHdBV98IoIHC/CnPuVCfyEGeCGl7C1w8KvvCbAGPPzqBZRi0hBm + FYvRc5LvwkEXE3wMY6Oa3SgLQEZn48un78P008cm93KQaHl2ursqtgr9UR4R/9WjAx9/XPjVk2Sp + Tqdoco4VBF5Vx3/0rdDLyQpSLRewGjsfYz6f+fPv/OhxNy9sksd+Bd5O89FsUsg+61ZFMNjbhHBa + OFRrLHgQxM/+hdUoaMEaYX8DS7xMFHPv9x/+Db3zdf7mQzis7FD4CmhlDZtLlYBlrfcEdq/5heRc + 56KpveAetkqp/hd/XVm5wgfZ0j96crUPrFPebYwpFqLb8PHXxYfXkNujSMlf1cJpsvrjY4gNFz39 + 8tNc9rrep3tneBlzigwbqqfXFqtR+2bsdBN7cMxPb6wmIgDL29x3MD80HfWI8WCTRLsQ1p1GqXnc + XqvlbR47eWN+SrSpDgePyTRMwCF68ohzy8+wZtk9gC/Kf6ihjwdjvRZoA7YB//z6H6QibzxYUDC9 + J+K+9TDOg7b++DJh+qU36Dq9Gzhp9kD1k9sDkt2jGWrUpV897g6L18e+DN6RRnboWBuk9JcrkJf8 + QrYkuQ5sXz9VkKw+xrq/NcAov45XWd9VBXm/7iVYdvxhlG91vcNGuX+nzD3mFpzOekTR6bI3RtQf + S7mw1T192BAPM15JCbjIKqlxr9SUN+qBBx/1tOBmt28BEfEug+t7ueAnBp/o6w8UsB1NH6MEWd5q + do8GmBp5EOlbD7V9jQlkQHoQ6dnVgJDT0Yb7/bEmu+41pMvolUSZhOZGStPSAdd9Ugh/5y1se8Xr + vCvSAWCvD1ZfumvM8UAb8PNDVMH7/M7jDOZoyKjVHnD1h48vXgmxW95P6TpPUgKcZgVY2y3Hge0H + K4D0sT3gL56BeWX9DFLpfMNOqPVgsE3FgnkqmlgrSZ6y8AxcWBoJwT/+P0eh2cCfvuPc0qmIWZIz + /PIBbOB7aLC+wcWvv6LdqQ+95cA3IpyWPcVqfT8Yu8BgIQzO6RUf7rfGaJbnJYY4POs0SMQUrLQO + YumX71cl3YHKZEAFEphVfICgj1ZyX2TlV2+HB6mMubp0/p/8/vlts9iqV6U/oj0SmvbtLT/++e0n + SM7rQyoIl6qDbmqkf/wzJrh7F6qnaoudKzUBlwVQBl//lGyF6FaxFdpvYPRVTLVLf42+eC8Cy5Uk + 6t1vL7C0l0MHrQd7Ec45ZQN9aadVMUXXoToBD9aaz3aG5E5aJOqqlq5h6OZwCD3993vp+DgKMSCG + cqNqPZve9OVXsrIwGe1UPHn0BcpMlgIFYlQdWm9VT+MZlHW5+eG/t/78o2bXFQR8/QDSyxH36980 + eOmuR59T6IO272LsXHZnMMsNbGDbPX0ii9OtWhI74aD8sEW0c2zk9dk9WuEylztsRq1aCRXudegn + homx0HPDjDX3j97CnvIpvOWr1+Vw//bozy9l2JBs6YphhrVNT6puIEIGdu5GJ7tcnr3lJusFHHQ5 + QQU0+3QqXbuHVmltsE0e1tDlby6Bz8CtKbYGLwWTd+agGJwz6mx91/v5XSAUE4HIUnuoGC3vPPDq + +U0PBIsGsaXnG3Qna0+d/BaDmd6hDrVRqbAzOsCjWuKHkHmbO3a2sWqsB1eyf34CdtuDF816vRDp + 50+CfrXAclVUFbI7O5KddFeiOboBXhLt+5ZsZo+mrA6jWd7tAcLGl++td/u+yqOXXWm2HZd0Di+u + BUC536H3yh3YoiaJDar7kmMDPOWIjfZgwuvzaOJv/I1dGBoFzJuDSFWTZmB+RKQHpyWkeP+eXum8 + 5wEB7wK0CJw/BzZDo5PBcFF78icfxMs4gtB0bwRGePRIql9XKJP3iNFgid4qmPkGtgLlqDUTF7AL + eYnwpy85aFoGm6f5KudXIaCHu/oxyFpiV/ZCEWIM6QCIogwEvkXOwG4VadF8E+48PFFOpo5jO4xX + C/WqEEf1qK60szfuuxuBsp9n9MsP2XLXqwYI4FyRtgxaY/niJ/DJuaAoqStvNWHBQbpUPtZNqwSL + n08QvPygxtF7u4n62q8bqI5XA/vudfBm5zUHMDp8nvj8XT8JVG6Eq8md8Ombf6v+hA20WLmiNHrP + 1WJKsw0+kXPHmua8fvGa4bu8pdRRmxqsUVJlSh/vJuyjyTb4Kxf6AMFmQTwN+Yp+CBf+9oOP7YYO + u8ut1aFkODZi3/V95yehUjSvC9bpbgL9Bi3FT0+gBel6JETWzYUfQ1yo1mZxOu3MYf3jv1rs8ajm + Gk8+n8yaQd1d4A6zhIU3SAWxok6iAkZSPV+B/exWejzl0bDuiclBdHtLVDs+abRykm//9Ct6I8ka + 2F7qA1AMjYNe3YsOc3Kye7hqxxNRnGgcfvULpzo/IDGzl2EyEiuG+3m0qNX5Dfio516G2+OGx6q8 + jYyl2uTXn3+KeMYlA4uSMoFnISkwnvWCzXcBWTAbHQNJBwFWf/Q/ZqOMck8K2GeV4g28Wxai5sod + wBdfVFBtLJnuU6sxxl69qzDFUY8t87QfhNGuLOXnT6mC53jL9vzhwdpsXnh/pR9vvSNNVeRVdGio + tIE3pvdPD1s4d9RcHz1j6H04Q69FMdnJvZtycGfy8P18rmiToMajV+5u/+GbqAGLwerY0KG7ooIi + +rgYbIPEDLD++cFq1+ce0+tyVW6SdCLC1/9bhFegK9bzeMGeEhoeK150A831opA5LhqP3zhuAD3Z + UtF63pwBrWNPl3NHz6gvt0nEODeWIVetG3r8zv9YN7olvO9O8m9eMTDsFDFASfai/s24eivTRAiF + ux9hvdNMg8DxAcGXn+IDUVaDRGcOSV9/96c/U+amsgnffC9Q9dns2AplaQNTfOqxMaOqWn7+rHTL + 3+Tr/7Lddz4oAXVU6NHWt8OsqFUMe9IohCMfFfDfeZP0ka8Puj+Md8Dm2mtgbZsDNfkt+q/+MtoN + QcstLKJ1e+5WeD4NAdbZR2Ar0+YNnDhCqV9rJKLowzewHS0fH7eVDtZd4PjyT0+okwa8gfXUkqKF + +PgSatRb2mro4P3ThxR79zoao9B8w5ihhqxDHgDhUnkJPAfHjLqjOlarLd0a0FAJYwtLUjphzSkk + z+jP2NMLe1jGPcsg3JIa63ZnMf6OHB1GhKtwZDmcwTJ3Q+SHeFYprsW1oqX43vz8HWwWzxDwXnvq + oZiL7be/cqyT0qsJ5U8lYj1POvDT24Dju+brt63Dn37nmyeObEWgMXY4yA0Aur1Q8xNHoD+2Rxle + NkVHXZvV6XSHXABvrfb5zrMe4CPHtwJWRXDAl8MogaXy5QR8+ye1heth+PqPBFK/igj39V8XtCs3 + P//tt//o8/MXB3VChEaUpYQ/ZDms3ILHf/hHglwLfv0ganz12pJImxV85ynU83OVLZVITSC+VBFH + 33kjqcLurSz+ZiZjKGJvTW2kyt/6+ePXDQYvFpCtsUW1Ez+mrHLkGX79CzJEYBq+fiiEq4ZPVD+9 + DmA54fsKFbQ9Iv4yugPhL2IO8bT9oOr2vhvM2tsctPBmwkh8PaLaqEf5z3zUi95BNcdbmYd9sbo/ + f3jgR+/TwW9/RuKeG9L1O2+GQtIdMMZHHC13Lz7D68ocBJF7MeY2Rj4YE7L56RFGsSG5MF7mlR4O + yAILucsbWJ4PLlolZnms2DGk/P27FfCf//rrr//1u2HQdI+8/l4MmPJl+vd/XxX4d/JI/s1x/L8p + /+cmAhmTIv/7n/+6hPD3Z+iaz/S/p+6dt+Pf//wF/9w2+HvqpqT+fx7/6/tf//mv/wMAAP//AwCZ + Ns2n4SAAAA== + headers: + Access-Control-Allow-Origin: + - '*' + Alt-Svc: + - h3=":443"; ma=86400, h3-29=":443"; ma=86400 + - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 + Cache-Control: + - private + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 7ccfc668afd1113d-ORD + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Thu, 25 May 2023 18:17:24 GMT + Function-Execution-Id: + - pbme5l3ic5k6 + Openai-Organization: + - significant-gravitas + Openai-Processing-Ms: + - '224' + Openai-Version: + - '2020-10-01' + Server: + - Google Frontend + Strict-Transport-Security: + - max-age=15724800; includeSubDomains + Vary: + - Accept-Encoding + X-Cloud-Trace-Context: + - 874088999973c1eb2bab5aadc2149d90 + X-Powered-By: + - Express + X-Ratelimit-Limit-Requests: + - '3000' + X-Ratelimit-Remaining-Requests: + - '2999' + X-Ratelimit-Reset-Requests: + - 20ms + X-Request-Id: + - 0804d410605ce2d1b7617943c8185427 + status: + code: 200 + message: OK +- request: + body: '{"input": "What is your favorite color?", "model": "text-embedding-ada-002", + "encoding_format": "base64"}' + headers: + AGENT-MODE: + - AAAAAAAAAAAAAAAAAAAAAMLheAAaAAAA0%2BuSeid%2BULvsea4JtiGRiSDSJSI%3DEUifiRBkKG5E2XzMDjRfl76ZC9Ub0wnz4XsNiRVBChTYbJcE3F + AGENT-TYPE: + - Auto-GPT-2023-X-TYPE + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '105' + Content-Type: + - application/json + method: POST + uri: https://api.openai.com/v1/embeddings + response: + body: + string: !!binary | + H4sIAAAAAAAAA1R62w6yyrbm/XqKmXlLr4iI1GDeIaCiQBUConY6HVFUTnKsAmpnv3sH/53d3Tcm + HBJD1RjfadR//Ouvv/6u4ix59H//89ffRdr1f/+P+d7z3t///uev//mvv/7666//+P3+f28mZZw8 + n+n3/Xv99zD9PpPx73/+Ev/7zv996Z+//v5o4ok4L3fRNo/PwQG7nZbMsUozXi7lOoRyk2jsqJ7N + dtCGaYAtWBeGzfUrHhdx6EHk9Du87KUtGg1PLsGsgTFr/zoYouk4GtrmEceQL1aIpjeyQ3BOjwTL + zbcdmHqP4LNqcmbpZW/QJPEAErA50y+9mU3VYqWD6gVbYqRbK+bmt0kB8PbBjvUoxmwbZT48vqFO + l5Vao26lGxKq1qsPcxVLDfoKrAq4X1yIFrYPVG20wodjcbnhocYRYnrjD+iM95hK7P7O+NITBujC + BmPrqRyMdPvxUzUecEqHPXztCZ+dDn1fU0MOxq5BQ3D2PER0WhCSLgNjKoOyhBqiiDi38w1V2ilP + wUCbBku39SoYs9SX4TwAIkde3+0x3yxEdHOyM15K7MP52o48KA9NzNyE8nY4+0KDAvdEmVk/zrzW + 8spcX7e9jtGrMOIlR6ccRlgJuOPDOhvurSaosBHuuBWUNRqabXtE6iN6UmVDUz6NaRzCu1hyyr2b + YA/F46ggqhoMVzi68nm9Nfh4mcm2lVrzZnuqTFCQdGTkqvZBLXRxinYX50WVbU3aKiNaB9dzZJM9 + RzIa0POpg0DfWxI3URGM7y+/q2dVD2hVtbY97ZPGUybtxoj2inO7r49lBGifR0Q3jk02Ld2XBrlz + bbGcyZCNmV890LSyrmz3bVk82ZWIIZNFRNf2YsdXtzWSobuWSzpuS8KHRXTbgVJ5d4K9VEfL6wRX + qCK8Z06Cz3yq6cdCU8sNYrgpDabPPVDQfv0cyUHfb4zlfXAs8ELZZXbyDQ0mg1xC5/ETcYInR/zi + aJrK+HQjehV17fTqhBwWlcqZ+/1sOOuoIcE99ShWi7xC3JA/E4hunRDLwwniJxNACRuxJSfaCwFf + +J8Q4rN1Z7puvzm16IPCWFy+RA/HDRK1U/4G8bxz6HoFx2y5Mbca7IvPkemWUSJ+RZcQimhizHwx + DzXO1xvWijYZeFJlL5i0OvPQZql0ZPPSt4h3quRB+cQ2w7hdxbR+fmTYNQPByHTCub/SdLG0lRcz + elYGo3ioGmhe+Zo4m6UeSL1V5ABFIRI7QEU8aKcuRbty7ZND8haziZ1vR3j6TxHLHhY4a069BLp7 + M5hpeqExLE3YwUXSCDGc5BSPi5sswqWpFIYvyTbL/evLAqQ8dOJxDXMu6t8JxEOkMc04XDmP3CyB + zXoVzPuH4+/6ruxASjVGvCI42txt5BR2JfKJxhfXdhB3mqx6B2tD+8BcZv3zvnbg9304Jd921P1Q + R7R+Lqi0Pet8Gv23rJ5P1YLZ4uFqU/3ywkrpqmu6LqKaj0/7eYV5/7FMM48vr/6xg2jFMXOUkqKu + P20UoPX9TGb8akdqpHeQCjXC6+QdZlPXdDvY2csjO4S9bvNx10ngurWH15c655RkMUVz/VFYpyNn + UTdSWKsnwIryYgFj9zACVKrAXD3u+feI/Te6GMeM/eqF7bP+AZZFDbpKg30snW12RDZ6VeSHr2yu + P/jhfaiYhj1t9vkEkno32KE+RdlUGGAhr/p8GRaHZTzWa+8KXqiumENXajsMdn0Hd0hVcvTCjI/f + h3JFp+DLKLzcRTYsviWA/ZXw3D+iMY1pEMEqVGJmM/KJO9STB+hPzWF7SfoEkxSXGlpKwpcQBafx + 5O7UDtZ6puOlUMTx8DZqUIJra5BjsrFa/sEHDJGZtnQUTrRt3y8rQaniGvi9lnJjXC1HE/ZqRNn+ + khbxNA1NBHP14+Xr9IrHTRZ4aCXiCzPWxYtXWYIsVFdDSfQsS2x2aXMFnNTx2Tn48D/1Ae26HZhO + sR+wskwt6JWnx5xWeRvsWzzDP/irTn6GRjYmPhTJ4khL9qZxt11/PFTLZkrMJn5l03a6TehF+owd + 92XV8shtEzB922ZbU3plw2t5V8DbBibbF+QYdOQ+THDXhYkKOrLbzvZ1X91W+Zdt190iHo9SQFUd + QovKibCzx8PV9yB3eEq0GkuoUw/FGz6yEZOd4jyNHp86+PO98YDteHJXNkYZPjREX5r7bNw3ICBL + k19MV2UvZqfwewfL+/p4+HqZ3YXmTYM4X7mEBPbSmMzATpGmJZ8Z3wTeRZtqACdKdsxMi53Br80h + QUvtGJIdOjrB8qHiUFHNdM12S3/XjhvT1eGsXwxi6v474I6d+mAeK0420oNkbPMwBijl9khI//4G + /LTUH3BRLj3bHD7QfmRTs9BqASnRMJf46B8/OqxgWzBz57q2tLloJTqU/Ye4Z6VBXe4iBa5bppMt + ri7ZkOx8DFex2hMv6CZEz/bXQvUWS2RHH2PAEj9UwAofhMSi0cSDeujfyjeLYoLDJ2n5UXhr6kdD + iDmbZRpPoYMEkC8PhW20784Yjxc3gW6ljMSp1o3dCd9rhE7M86iKtzaXZv5R5v2nzLzzeLqd0wQo + vq3ItpcKNPffHXp7eWXm4TDYA2llHWb+YluxX2Y8+QwUhibvqJr1asDalygpvk++lA9tHfThVdYg + yt2EHDNh04oU3ypAaZMQs48sg5EqUWDJdY2qM3+NdMp26HtZ7Ihm6mdjXB5W3g/PcLbtSdzk21eJ + yBFpjKyuvTHg56oBFMk+uTzvdjy8g8ddiqzpRSesSPE4PemEZv4iuou6bCSsvKL3SCwqXfR9y0dp + itQwrTyG3a5vK6Nbdsgcn/sZLzZx337NEOyviJnRrEd7EL5eqD7TnUo0C432gLGLQa+mK2WHgP36 + PVR3pNuzmY9aVqzqK5BjeiAb4QJBUy2qOwSPVKMoK59oGG/DHdTVvSCH9dpAQ/qOSvA/0oC/pN/F + 07WsO5SHjke2utwirt8lAVr9UdJ30Pmoe6eDA+cNOHR6FseMWxsvVOinHMn2dXoF3L2tfTTraYyu + jMazXijRzfmcyca5jLzLaFFCIrsOVUO0McaRXCbo3tc72QXWOZj8xbpD5SlriH3mbTBVxFfgMF0N + dl/HfjAAihIYc2Fg25dn8kG4vQC5hV3R5Xeb8cmx9BQtL1XL9m/v3XK5XYdodFeE7NdcQFwIzQk6 + l7ypGIu53a10W5SHfXSgU2e3bbvnBx91lqLTFdMBjfhjTcjIsoCRs4Hs3qSKhpIQaubm5a3lH6GP + YKcs11gyklXAp4LvILAE74/e5YfhpAHXlZw4y29hDHri7aBvpueMVxEf5XwtoYNgYfrmGuVcb+4T + PNXHiUWzfp771VFnPiRaETTGbz3huS5CZhfPQ9CbJJvA+F494ixeRyQW6HT81Qcx1zuOBsnwGngp + 3YY99bNmj8fy46nZubwS97a8ZNNDK4/wUsaWmVvy5ONO3eiqW2sFzaunY7eG9fDQUlljsmk6A41p + YVjAG91kx1PgtlImrWUIlU/CfPMgZlOnRG+4JAplZih/4qoAKBEKNuc//D8+nNSBR7AEst2KlI9+ + LChgVdua4a9ltCtF0SJkFh3CUlFV2VDvdyW8P+kZh0drjabYnxy4N6ZPJTFcxdy9jb66P5o2e9xI + Z0w3M3Mgfssl82RZ5MPPHzWZ1RBCX6M9yKPzRkX6RsSwMrOdMmlUoIT3ngrhk2RDN6IcPPf1mvVM + 3lL5YIegIPHINEn52OPnzCJFhDJj+HrZoPq3HgfbT4lpXC588PcbD3LvElD+1j/ZtISDCYUvWuSy + F852FZT0CE81Q+zYLMd4LLydCE3c+GT35Id2Ehjy0ezvyF79ZkG/R+CgWe8x19O3Nldu/+Xnfs/j + yo9QB8cTrxlZe9wY7+9bt/6t94Ua3O6dbNSQUMYhlVNeGINwuwA8cebQqSdWy+pJU+ChLB5Y0l9u + y+sNvyL5VZ+J+5q8lhtyPaHZr9KxpmI7He8Fhq92SGnFPm+7/agbRZUviUJcyfWyIQztBN5Lc8t2 + zSfPOn5tc5BqKaCF0tCMc0PMYSzOX2audwHnQrbCoIVbxPTdrs/o5tpp8IyikTiXS2kP0eY9AVkj + RgzhSg3qL9b0T31Z8vlg12xMPDTr1VmPh+2EuByhIyYyRYftEExx2jWIebie9c+CdxTfGrirjU7R + ytSCKjh7PmDNHYiji47Bn1looYu/iphDJcPuuhGV6LqZYjwW/Mv5OWk6eAjSjcJAa2PAeOtAu/bW + +KN9S3t4FSsTfbQ1ImawWfI/+KB969nfrXR7BftjjvbHnU0M8voG3ebFUxRpLcUoXILRtVy7ooel + 7chjoLU97bRKAid67Misv/74cRAMa2R+muzaub8rlDwGlz3W6YimXkY75VjjiOiL5SEYxjgQQYq8 + jNb1SWq7e1wc0cwnzF0srIwvx5OoqF/+IFZT5fYkvC+PH1/ilLkPexJ7xYTHC4VET3IxoEv3ooOq + tUC/BwnZTD/UJlLb5sYM+V3FXSNm4h/9fJj9GzWNL0XeaeMx+1k1MddATZCvYpO4Wf+Mp9O7faBz + /cjIfu6PbroTDe3P4oXK6XpCvPlsGrV7SPVPn8Tsgy8SHAJzz/BhfPNugG0K2amPqXgfvnZvi4Ov + noui/OnHTMJns4Nk0EZipWsf8f5pliDVa5UdrunHHuv78Qi3Nrkz+1VkcddsW+vnz+n4Vmy09FT1 + Dg++w1Sb85DO/ohU3ZSnbNZvWjzJ6+8DFi5q6aNVNJvXkyUr8en0pFy95THlRK7QG19i/LLCIiua + qxqBHfoJnvVKzIPpjpHrth4N8OdqTE07yICdwCIk7292fywXCnp9CSKzv7DH28Z4QI7iO7ESoTSG + n35ARpiyk5n6aFgTrKNU2GlUaI0hphYNKfITb4Ez7yYYzFZjDOunErFDt28Ra9m1QTtC9+TwSgxe + v/ebDnijmUybNluDVoNZotl/0Y6kX3ukQSjBWT8bZPbfnJMqkZGpP67MpOeR19fsIPz0EVUl12uH + jGgUopwkVPZcM16adNLVjznJGK3Md8ylRSUifKmexJxIbo/P+xqDpFOOVzL9xmOZnEXYeqPBLkL7 + Dso3cnOQhfJO5fPuHY9Vl4tojU8ucbSxCEZL+V5hXYwF6b+fD+dQeiL4vvslidv12Yd9WhFShRjk + kN28dpo2dQXRopwInmQWdKvpFoHgr3Lm7sMD56pkeNC973eG7/eqnX71PyTSm9bmNkQj06YjBKJ+ + xbJ1LeIZD7BSHx4N0a8dzWY9OKDS/ep0FC6PYJrztV9ewHbefhH/9AlcDCujt9lv96p6n379gCUw + qqwNTtUbcq15E+dhE3uS1+yB1p9kz3anLc3mekiQ5uCO2PduH48raU9/fp04bkGN4aa2laI83pht + yHXPxfeUVmpD1AELhykOKCvsHIREBgqF+rWn7JvslCI9msRS/NTgch9Oyuz/iG9414CbqwfAfmkF + 7CG1qc3eLy0B/XRuqVBrddbF9wNdo7RKyLY2LtlgPG0P7RNtItuPdeScHmsZVu6zJ4b70LIxn7YR + PFNTZRte3mMeejFdz/zJdAdS9FtP+DzKB3HCx5MP/Ybu0Kyv6Hg+asbwOQ0C2uDRxiqL3uhPvlPL + zxuxPDcPJiXavkFYL0yyvd1u7XTPxgQsPtlUnIhpjGtuSRBmmz05Nsxox6TWMNz9uiDaO3zHw1cx + I/TxPibbuOePMdm3zQTqd3wwV9BZOwyPd4WCjdWxvQlmy9EXPETr65k98bZF09w/0IW9jJWWvzPq + fWQf2mO7YdtLvOVjJ0kT8gt+YfvV0ranxD114ObTSDbXnWGLUeonUCXXlPiv1Gj7fFNjIA0GjNZ6 + aEwmXzpoUckHdipXOWKZcmpA8m8lcV7O1pY2K8WHd5EAHfeZhXhhLSJo2vMWrxXZbYcfHwxls8XT + ZOXZSI3mDjxp2MzHbjsWRHZ+fEfsvXsMhqUpmqooXi3yFHSSNedYF8Ad3iq9NWSL0HDJTfXU6g/i + pJZlT591evzphd9+cu4bt1TJw5PCdm/WZVO+7E1ol5HP9Nk/cVndY5jQ3cGydGW8vR6RAzM/Ycg1 + agzpO8l/+E12sz6u9/HxjWY9gVVf2CMuTktPWWyG+qdPs9UzqClStRrYNvGX7dBreoNOBELmKO+P + MVnC3kLl6dPQIYs8RA9OhJGNnhXDreEF0yAKGmjhHuHFycJBf75kDpw0Y8EsLdvFPU2HCp769Uhu + hnPN6OfwaNC5+JbEPW0oZ6eQXdEjWAEd7qlljK86ESE+H+9kn5+QwfOjY6EhvZ/oQk/deLjH/RFm + fYCnLBOMnnibFC6Nb+DVUWyCDtpOU+ZrZq3DiY/4c7nDSekJ2wvdnbMx0hrwnmZFdjR9GVPTh9Gf + fOHHr9w7ixiSg/NkZP4eMdXWHbqTrTj7wQxNdnOpoFovP8wp1L39y0vgRISQ6cX9kE04f6cqloQb + Xuhp/8ObDq6f3YXCy9ka/e7RAxphKVDQxiLmspPtlJ6MC3ZwZMOY3nHVoTmPJ4/LpTTGZys/4Om/ + RLzW9x+D5/X9CnmIPWb4cRNMy8ckwoxXBPvFIasf6i5Evzx0e+wbNG6xFcI27Bumi9oxE4UKTSiW + 7yozgntoDMH56oG8H/aUz/pEtMevheb9JPt5Petf3leoMiEPq8yDP/h0ElSDGGIZGcuTy2Rlk9xD + Zm7CkzHw1SiChtyYOO8dQt0xO7xRjm53ZrdnYq8++CKCfXW35MCsDeKy0+7Qt31Ls58/2X/6q1Jp + wpxDswm6sDhSJVUKhThWmcftUXjr6uzn2I6mC3tQb5tInfmLHWQktlzuHwOatGiLa3qc2smuYM5r + C31enzoboyi5gxlpFq6eSm1MSNwJQIvsSlz2oNmojKBD/9JPTK/PUcCf5lYBv2YC7RebdTBchjqB + 8cRXs77Y/epVgu6aL8kG3+V23D0KAG3liczB+Yr/ydu/We/TaWslMdee11LZqyElu22kxqP6+OxU + NXS72U8M7ainuwf09urKDrfvtaUHaCkY35eCh+QZ8jF+mQrcAjCYcfMVu18oaQonZ39hh5A+shZd + NyIk7sFk8/zJ5k/TVX56hqqid2l59HzKwFD7xSh7D/YgllMJft2EuBe9S9ad+akDqRYDkixp3v7x + x6uFkNKr4GK0EvWbBaLcl3QVflWb4/teBiFRgK6v6ccYKFF3v7yPZpnp21wInQFxVWiJYdZ+MJ4v + LYZUeKzYJoNHMN3MFq97O/mQ4/WQ8nn/r8qkhVt2yuARD9X3U0H2CXOm8z2NJ+FtiZDSwWSX64cE + A+uFFByyuWJh1ruTTzoMlrnckV8/Sv5L08FavCOWUOFp067WKvXVmjJFzkJvxd/8j+tyzvztPuPd + SZ4wzP6VikfrhrrNC6UwZWWFka9KBj+ZIqClMNnEOFRgjO3XCdF1M8Rs+yAUdd+b36inVnuwn1/I + BUs3AUnFljn73jKWo3CQAKPTiuDkvAyaMjteYdFJA12kbPuHP5CZOBk5yMsSDc/LKfnNm2g453PL + tmID+n5LMs+fAtSJUq+B1Qol2RnJJR43wVtUxRMhFGb8SOb5KjiRqOJp9icjO62v0BuNiuU5cGRp + d2xAl182+eWnA68STfGX9zdzPWORdQQ1yU+/UWmxXdt9clgLaDw34jxvDYOV9rzmKEDrkrnGfdVW + vIp0uK/5lv30zGifG3213eorrFivHHWv5V2GGQ+IrSx6Y7CWOQZRvFvEoVJmU7sSHRTf9htmGYWE + +JxfQj/lOvGMg8y7NstzYIs0p2+699HkRqcU3us4Ixsz9fngCncKSyeIiOYss2D6ftT8Ny9jm3x5 + bQcpMHbwy+9m/xf/ycfg2RXkVX3bgFmj+FBD+83I7hg67XRWTo6qn3GFkXb6tl3Kpqu6DRsNy4kW + oSnfWoK8KAyHjqzts8Jv/Q6wFX7YcUN1JIpSof/mqcwku9qYLFVOYZ5/MiOHqzG6xlkAAxkNOdy+ + ctbbRZj8+p+OzTRm449PB+X0oh+jltqhWj3MXz6E5/lfu/rNP7znriKHWW/zqBu7n99i+738Ddpd + gLT/4sfb+2ZM3mfwQXdjAy/oazT4y1s/0Jwvs1+9NZ97rKC/f6cC/vNff/31v34nDMrqmRTzwYA+ + Gft///dRgX/fn/d/i6L0byb9OYlAu/s7+fuf/zqE8HfdVmXd/+++ypNv9/c/fyl/Thv83Vf9vfh/ + bv9r/q///Nf/AQAA//8DAN9muKLhIAAA + headers: + Access-Control-Allow-Origin: + - '*' + Alt-Svc: + - h3=":443"; ma=86400, h3-29=":443"; ma=86400 + - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 + Cache-Control: + - private + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 7ccfc66ebe45e153-ORD + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Thu, 25 May 2023 18:17:25 GMT + Function-Execution-Id: + - pbmed6937r9a + Openai-Organization: + - significant-gravitas + Openai-Processing-Ms: + - '20' + Openai-Version: + - '2020-10-01' + Server: + - Google Frontend + Strict-Transport-Security: + - max-age=15724800; includeSubDomains + Vary: + - Accept-Encoding + X-Cloud-Trace-Context: + - ba5b2a8ed740e0e13bfdf1d482561f84 + X-Powered-By: + - Express + X-Ratelimit-Limit-Requests: + - '3000' + X-Ratelimit-Remaining-Requests: + - '2999' + X-Ratelimit-Reset-Requests: + - 20ms + X-Request-Id: + - d1e10b6e48d2dcb7ed0782d5f86d3b37 + status: + code: 200 + message: OK +- request: + body: '{"input": "Lorem ipsum dolor sit amet", "model": "text-embedding-ada-002", + "encoding_format": "base64"}' + headers: + AGENT-MODE: + - AAAAAAAAAAAAAAAAAAAAAMLheAAaAAAA0%2BuSeid%2BULvsea4JtiGRiSDSJSI%3DEUifiRBkKG5E2XzMDjRfl76ZC9Ub0wnz4XsNiRVBChTYbJcE3F + AGENT-TYPE: + - Auto-GPT-2023-X-TYPE + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '103' + Content-Type: + - application/json + method: POST + uri: https://api.openai.com/v1/embeddings + response: + body: + string: !!binary | + H4sIAAAAAAAAA1R6WQ+zurLl+/kVW/uVPgqEwWa/hTGEwSaQsdVqAUkIEMJoA766/71Fvqvb3S+R + IBZTVa1aa5X/419//fV3k5bPbPz7n7/+/hTD+Pf/WM89kjH5+5+//ue//vrrr7/+4/f7/6181unz + 8Si++W/578/i+3jOf//zF//fZ/7von/++pv3hyPVsvfRm9C34AGn6CZFtyvz5tgqTPUyKDuK08vX + o3MRdEqy7UK6705xzN/yz1Z5VGBHJHWrlJ+y+xIAPLCQkns6MX98vrfwEn9G7CfsDqYkDxXFuXsR + PmjjvVy6cXIUqEkq1TFLwEBP1AXGXJk0mfY30OEH6eDRBD6SLoegn2qmEgisxaV2Ye296bRECnTu + 4wWJwXLyFh9pDrSyXUedHXgYi2W9T/CUSh7Watfsxdt2SlThBS/UTvLOm9fnhTMgGdVuYBczLBxO + cPy+Z8QHc5ku97KNYEaAQKboio3pcNw+YRd5FO9Bb7JllnIEldFQiOonDpiw7hNwcoUI24acp2Nd + bjmYZ0+D6tf3DTDwrjLQYq5AfRRv4pkl2QI4XVmoXvMMzH4eOmryKlTsF+3HWxpi2fLZ+eiotsov + 6Ny5h9AoHIcmNY1T8pi4HFaXuMA746zHc23CBhZKwGF92RPAfKDY8E2DGO8aTmKLfxBqGD/aGpve + duxZL8U+PDz3JZFj0zO2YO59cG0ySq/HeZcuT1KHsFYBTy03XfrRz0NXbbzdEbvkNMVtRVgEJUYz + bPUoN7rvdD4pa/5gT3u6Ma3rJVEz93kn3Ozx8bydniYEIHyh4/MkxwOFu1qtw05BQ0ZoSc8b+QYX + SXFIcb1dGdO4PgdXUemphaJjv2jucwL5kj+wX+MzW6Q7qeD1HSK0Rd0TjMJCLvD2uO/w/ureUwY+ + 0071nCeg/gUqoCvMZAD+cn3ivXkM2LQxTwjWNJ+xP/czm275yMv+yejI6B+UcvKCBwfZftlhQ7mk + HpVrHypp5nyoM2pWz/LMPUF2rjbYywItFu/TqYZCXfEEyiEB42SXAxwq9MD2pxcBqc8sV3/Pn1y3 + 35I9jWmSt34b0H394FNau7uLOgR7FxuzbXhC80A+XKSTSW+nixUL6b42lffBVbDHf1m61McvB8s3 + b9Pn0ajiqdjYEJrabofvweHcTw3LJ7hH7hfxwBH70TXYDsKD9aT+vt2wGqLHCXJd7SDhrcN0st5x + tDGS50wPeUzLWcvGCdA3OGAUtmdjmc/JBUqWOuDAPyQl2VadBKwkHYnw8B7xoo7uAHC6ayl+mR+D + wChXYNdnOhqeb5E1737HqbSAOr4ewNlYrnvT+eUfUq8bq5zmEDU//ECcUsolVXZkC57u/UzRejxf + ldsCu2xnY003nHTq6C4EOlJ0HMmCz+a13kAnixY+HNJrOW+P6QTHQ6AiroQULFnSRGCDtQr7xHp7 + o7YBHXicXyYODN2IG+5DJjBu1ZGE2rFn5NUFC2hfyRPjSduXLDKJBLB0rDCuPl45WQ9MFO4YC0ji + qVFuSebZUN2fDOyUy9ubDdlVoF7KJcrfdwC+wmlwYRS+AOGU8l7OL33qYMadZxygbx636Z7Y8G5w + GPFHxIymVu45FL6uRNpDdOnZdy/ZAJjjEW3b4gLYcN01cMVTGkjY8Eae5yoYD+UTKd+d1vP09HX/ + fK/9tWrKaWtPgypZaYNkRbl783C7LVDArzeJ3u5QLpnc1XC20R7r5w8tf98Ljt9yRu8AbY05ct0a + /uKHyXVniG1+aBTedwPC9wIsF3CSHWjJ8YfIh/TaNw5oXOjGeUdUf3fvyaa+wx+eUnQZvX6yX54O + nOaLUBeAxVs8mg7QD1KfgL1zLFl/v0mA73WLzLrRxGPaAU7JDtKZolb89LNWHmrI66JC+Gr/ZOSC + J1/d7Own1YrMLgerLHOIxkjGOH8808F6pyGcgXtD7M0ab7GhihRPOCwIFu3HYBu5NYGbUI8iLpH6 + 6fqxL6ARqYPysLLZZJktggMa9vSQLK23JPImh1pNRbJ/HT7lCL6OCwhMGN17G69nkaNkiuzdFnIX + 3juDvpK9DqSTWCHR9Y2YqdyhBpud+cT7mhTlhwSnp2Ic+5g6t8BMZ//On+AuuBwQ4O5fVvHB+wmj + 5nDECG3Gfha2sgLz+VWRpYNvb1qC7Q2u+IWA0rdgVNrwpta0mLHhiHL/jqvqpkw7puH9gFtvkaWY + g0qtRHg/y2E6RVuQQAnmCkYnpYzZbSsloNNHRk3VJ/GaDyHkj+OCLTeNShoO5xCEXEQwMtQcDOnT + aQC7eAZ1tke17Jol7NQi2xgIqvuAkena1XJXooYwKZkBuX3mHNzsqcTa2g9++AODezYhvvbO6Z/7 + zencY2Nd39DU5YBfPmpqhKMJ5tjqbKV41jscxOItHq7tXZJfV12lAf+2vEVMOR6a1+5BRnd6x9Ms + NiZY+ythyiU15nCeOEgVp8TxmzlGH+/sG7Ri7OP9k17iGVO1g7k5jWiz1v/Sm3YGE0fJseOOc0mq + cmfCtb/RvchbbFEilQfjxFvUFI+3tX7ySC0Tl2K3uStsuHwvFYzVXYzdQjrFkzv129/9EXtanUHw + o177N+bIrz7Zp08UqOlRStFJMdKF9WkH9YrLyJ2bJWNoRx/KbNze0bK7vjx60jUCwm93pBbpMViE + 4lsDWnA63e8SBETQOCGMPxcVLe4h7ju6HRRwU3pA1++fkiqrIqh8nxeyXJev0WvKC4JL/B0RvIlh + z973xYRcfhjxygeNpbOfBBABRX/4EevPwQm2+4NJHb/5pjNTu045oP675rdeLvNiJtDZBD21YUnZ + 2J8sH8qxecDOJJllW+GJUxr5YlG8O6N4JMdGV+7ysyLAOEY9je3CV4enY9BgxZM+c9+Fqj64nB7E + UxPT9f1U0OEa6yUqjWXYfCSg3HYJ3rnt0etEI7yoKdAfFNc3Bobb99BBQQ2OhOc2l35+VIYPj8HQ + IcHYG0yEeufCyFUuaz8uDdqfgwsYb3uBvDvN6BcH5iGocGsidepqg/zwi6DWoP5eIcYk6qGvZkQW + sI1N4k12ImbK+SOo5PLmJTasfBiGu3xDDeOrxXSgXxfepbRHX2/TlzTfhAhKJ6HC+zhOYlbXSgIu + LH8SJa0+5YLqhECdXV/UPm0LNidh4gCzLs5U37xOxnTXygJ8erciMn0+vKXU9QxuT5eKsPV9SD5o + JjwlR0T1ozj3LFxmEy5ufaK6pJv9REVVBz+81l+55W0FY4zAEWQJErahD3guWPMJRyVqtfHeL4Kr + mWDlY9hDI+pne4lcEEQRRhVwrv0UuNMEYvtlIRDFm3Tsk7cOvwN5kLFM3/0y5mUNPpHsYiNBvbFo + /O4J8+gzUH133RiTYKq3P/2lLqyvMbtat0C5KCwinMw6brb66EB4eyfUPF926UBTF4JItyXs79sX + mLhWuYD38Xam+BXUxiRliy7zrerjQ3GtjaV1aij1m9BDMBBK8HErqfvhMz0I750nslfzBI9nWPzy + wRvkpJ2gthc3pOPtj7d8t7Ijw3aH6TWOlZg9s1cIqsuxwG7THbzlA+7dn34oueOxn4GhhGB6dRKR + Rrcux8Pmm8D3hjjUDjcl6AVx0X/6hernDy4nFxMErxvLp3tk6PHMKbMPOV1asBOimzdv+XcDboJ5 + pUeh3pXzW4QhmBeJI4D/sng6YWsHtZsiYY0VB7Ao93sDxe+eRxJ/PsWjNB1chVN2JsXndC4ZfGU+ + LD7+E9+2hQcGUb/5UG6Hmrr6VKWDIsZbaLVii37180dvDOfTByOO/7J59+R24FQULbYGaveMEx4F + sA9LQiZ4f3vjln93MDvaIj1UWtBP/mBcJPGwlRG3FdK+xbzmQ0fSXCI0U5JOsMMh/JhGRCbPyNN2 + EFwe4DIysVmQz596hDQ2jghQHABWInWBJtzL6HvMI2PZp0UHy++G/OFD9Kd/PCcDpH22jtEeNvSm + 3B4bkyybqYiHMF9s2HXpATvJzu+H21froPHe47W+eI+MeV9D4Sk/KM7JUC7oxSmQyvXrz/sKd/Tc + gp5uMXXLg97PvRibamULNWq+eGLj5Ywr+EmHBklXeAZr/lfwOg0u9Rl10pl/5icVftMI+7jxPcor + egSjnJtQtxtGxjby2wTNsaPU2E5uSpUuLGDX3Q+oY9y1X+562UB1vGWka+4Jmzu5v8C560uUP5jn + EaM1HUUjmyNRNklcUk78LJAIfoTd+5UYw14ettD1xJj4GaH9ImmODaLGO5J+78z9+H1dMthj+Ult + o1mMH9+BL0Ji7FZFCthGDLfwql0fRD47b8aMR8jBR9Z/EDeezulsbpVOmQQaYaRdFK+NegeBQLo0 + dP+Ll2kQHbzzKKAe2tKSwKhRgG5pPdXG7yld9V8Nqo+XUK9kRzZFstfAIwt2a/3nKaOjxgGmRmeM + q5Ma0zMYt8B26JsI/quNhwuSTBjzi7zytb5vzpPeQd/2e+yapQ/mcjZyuOoJMt4XZLBxLjIoXAdI + 91U3eIM+VAW0xcJd4zMYtNu+JFBGHKIuGM7lOH3bE2TP24Pke/+dFtjauaq/nJ/YrLOyZ8+84MGL + czFFc1b0M8irE1i+IkbK2/K9cVbFCj4leqBBLErpLGxn6Y+fYj08Nf589SGB8gQc6tdYAEx9eNzP + z8BYvfurPq5r8A6vGRJneYrZ1xgKEO5pjqQdeHjT1TZ1yCuZRmPOlD1yUEL9dz0U+5sasFh/NnAO + kgatfMBjA6UuFBsyI3jeHXtRHppFCTSWELVMtZL9+FPuqQjjyW168m4lHm4ZyrB9SGw2lFWI4I/v + 2fNNN5ZXkJIfvuMdn5wYnQ3vBF58LFFzvyHl4te0gU6yYOoOocymSRgWxaEzJXLlkp6a84WD01eK + MTrlvEc5cZzgz08orZ3Sz1Hv+LA4LwXqPw85XRoSmGCtX2wB5eSRH3/R6PtMf/qgRgV24YpX1L3b + G9DoQYegVQ4azWossKXhcwXy3v5Etu1dALOrXp2f3iP5O0zAJBwcCMlnuVA7OFpsSd53Hfjmh6Dp + s5fB1B86F2oiGqgujJt+vHBzBL8oacnh2TbG6lfo6t7mbDLHh75n9rDN4Ob5yMnE+YdUNNjXh8JW + n6jRn1BKeifZwbzQGmq9466fCqvYghsMJXqwqls/nXCwg9nteMP9qTHTuWquGXjn/BE/yqooh0mW + TrDzKxfvuxOLxxFXBOK0Hn96yBuNs6ADbJxjalUyiAeVRDx8Sgdr9fNkwOJWsuHHDwdq2oaVstnw + Lsra3xDZDkm5qL5lA9dsFqwZ26mk15uhq6s/geZEbMtpu8sk6HbenjCmDylZQpmHd1H8Eq59XNJJ + w0v2ix/ZXlrPmDfn1IQEciJi2SFnJBJZAciYA+q9xaZst9PFhheFbDDikls//Pr9eB8WvPo/rJeH + fIGd9z5i1JKqZBCiECof4Y7GD+q8yQvOUNFukoSk8r0AVo29A0EX1IjTXsAYdqeEg/tHXZL+kg79 + jGOjgpb3Fiha+9UyqucO3gt+T3cTb3rzNRIvAF26DF2Dhfcq3jsiuOIttWU7NxgwlRu80jbEZnpo + yuV4t11olUQjW1E/xEM73wqowNcWbYOjBUQjdQgoMtUgJT8wQNdjmFa3HVJfmZDOv/jBTTdQg1d1 + Q5QtfALZQTmTjV5YTHzYBx0i0EKkvLRzutjG0YY3XGl43320cmkC/ATv8JwRuDNwzPLrdoLiLYCE + O+Unj3zxlUBeFxRqa0PDltGYczgPYU513zyk08Ud/suvCxrYeMPNYBPcLfyLoqNRpfM4dReZdsOX + Gmv8BnjMF3VfBRq157fhsdUf+YP/Jh3SkqzXV375ae4cDAa9O4Xyq85kbO8vocEKPlWkZbI76nfX + 0lj1nQ3f4pMgyKRDPJcdJWDVz1g/isd+apHeQCcEEpGkx3vtZztbDfI2ojH3dNK5edg+YCN/J3xB + rFJ83xcb0rd8oElXLylb/WMoG4cbNkFRA+a+MgjuclbhtT/1Q6npF1i2dY/G99XzgMppFbwBvEGb + TW2mk/XYk18+kumzv7MV3zog3kMBW9P5GtN984CgzOeAplzRAMY/Qh9qhwxi+9L2HvmorQLvzYYS + /pcvTbDPoDJ+OGxspy4dkdQOkNSnhP7xeyc+6xQdSToObFmP+dB0dPjz5/y5P4KpKE4S5LrKwUfd + cOLpA9sJBm4l4b2o1mDks5stvxt9QZvryzFE+ZBIsFVJhC3eEOLlgiUEV7398+fLRchfJ9j1Tx1J + vU3ZhD5RDfn4RP/g3TRUBgdewmVGEKOM0VVPqmNUHFc/TO6ptH9VCq88NcTC7NJPkzxd1C46UGzz + XMQWNpYKJOfjm7qRn6eTd2xz2ZjeHnXTyionH99D+CmhQQ+9NMTkMnQRXP1DctdGuRx++Pvrb/P8 + jFMW3AYflmLaUeMlRfHsFc0NpBlp0eIHbso+qOFhMdw2dHeKTLY4sInA6q9QzVK+MUt9eYH3d+Bi + G3Xcb74hQfFr8diUzzTtv1oWQi93Iuyu/GomwSmDn1OxJ7TxYPqHn1fqjcfxqv/H4FqZEIEeko19 + N9nkxvIENFqeKcpeu5gN9OvA/KHvidKJu35c/WEwbphHNgKPAXtpTQ7X65F3oV9jXjCFBK71QfJh + uKzxSCpoeLpA4BN8PUFxkgWGN/+MPtNZjKfB0jqwNPcYKWVk9MSL2gYO4MZRLXhV8fDkD9yvX638 + bPHIqJ4bQKrxgfGxinr68cMahlcvI1zfBWDLt94NbmpHpP55dyyne9E6yjXEZ4pNP0iXT5Lp4Mdf + DodULKdigyAssGNS4+by/WxPmgJ7h6iobosLo3E72bALHERdeRpSho8gU8RrK2FDS1rAaE1c+Xi6 + OngvTDhubyG1YU0e0rreT2elvSXgsbe11X/h4w4/6g4qnM2o+0CBMVzbo6IGm/eB7gZBZIs2Sz4Q + xarFXslmNl6VcIG3R7ojm6x+pYQ80wv0M9GhjvTQyj9+YLWXXtg7+E46ZOmUAB84ENtfilImEVmH + nU4Ztk8fviyHsOLh4xkVFNH9o58/h4cLqy2KqeVfmngyDwcCfnrQrWDvLeWTLyDciojMUKExPZKw + Utd8WPUbH48isXi47VSN7vVvHbdGduN/74+91T+ZCUlsaF7tOxJJtivnMjogOPfBFykbXgdMEBUd + /vyj3arPhNX/h9P4EMl8DvR0K6bbLTxTNyTt6u9Om29nA99qvmj+zc9++m/tp/hwdjTAWJopYOv3 + ATUuV9/46WMYCo+QyMI7N6a38c0BTvUWa9Zsp9OmvBTw8Hi/kGoOHWBnMPJwnLbWOn/cG9OozzdA + 57NFRjPN+uYraZHqTNGHwBnmxrzTPQ5kFBLqnZoqpc+Cmeo63/mTX0vnKwWUky2hSIi1cuKD9gmz + PGc4WB4onlJV55T1eyGB9JT98atWPwQ7q78vKLZb/In34TBYbJpPEfrpS+xex7ocL7DdgdVfoZbT + uDGvDnoFjVYaaeBEoB8CV1qUNT7YDsm5nwRTuIFtZh2Qast6Kkx2T6BDGSWMCxKDSNrOVFte4rH2 + /OzLX/6AvBNsbNkV60n/zU6g8aoz9i4ul34ZjAr481vHgX2NP/wm+XyvhDlEKrvoIihQt5sGG9QL + 0umxTZDUJZuAKKKwLenpberKOi+hxnizjOnd7yCMUdBhtBVAObz3zIRAbgvqvjQhJelt4OCzO9l/ + 9Mnoqy6Cydc/0eN+pGVzOHIZVG56grH4vRkzdRiEZ0pO6/xB9ma5bxpYf49XbObhhg2DdeggsCYX + P9Z4zDhkJxgKrxD7cPvqWXG0d/+V3+v8bkbb4xNK1mZY+/tcTh/pgpRnIrnrfEnwhkwqntC8Ng/q + UdtnA+z2EZwzvsI3aETpnCt+ru6R80Xbec/AKJz5EMRP8iJiqkkx5W59BpRvdln9SsxICE861ER/ + QGs9xOx2XiY49/hLlo1cpsypAwJ3g6+hMkGeMZ3O+Z/5FdnoR7H8469z9xIjUf6SeArNnQ6d921c + /SjbEJuNwQH9XOTUkyStF7psDuFLtS4YzR6fLmZVdvAqSv2KL008fz9KqHBytqPROl8eAvuDwF0U + vtQHj/qPXwjdNMbUjsDoFZmYnMDOP4bUtBbRoOVidPDv366A//zXX3/9r98Og7p5PD/rxoDxOY// + /u+tAv9OHsm/eX77b7r9sxOBDEn+/Puf/9qE8HfbN3U7/u+xqZ7f4e9//pL/7Db4e2zG5PP/nP7X + eq///Nf/AQAA//8DADKcAULhIAAA + headers: + Access-Control-Allow-Origin: + - '*' + Alt-Svc: + - h3=":443"; ma=86400, h3-29=":443"; ma=86400 + - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 + Cache-Control: + - private + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 7ccfc67169206302-ORD + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Thu, 25 May 2023 18:17:26 GMT + Function-Execution-Id: + - pbmelvnnggiz + Openai-Organization: + - significant-gravitas + Openai-Processing-Ms: + - '21' + Openai-Version: + - '2020-10-01' + Server: + - Google Frontend + Strict-Transport-Security: + - max-age=15724800; includeSubDomains + Vary: + - Accept-Encoding + X-Cloud-Trace-Context: + - 259a6a1840b1fce07231112aa9f7de8f + X-Powered-By: + - Express + X-Ratelimit-Limit-Requests: + - '3000' + X-Ratelimit-Remaining-Requests: + - '2999' + X-Ratelimit-Reset-Requests: + - 20ms + X-Request-Id: + - 734ff3d0816385775c9c45af64184a3a + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/memory/conftest.py b/tests/integration/memory/conftest.py new file mode 100644 index 0000000000..f7fe295af4 --- /dev/null +++ b/tests/integration/memory/conftest.py @@ -0,0 +1,18 @@ +import numpy +import pytest + +from autogpt.memory.vector.memory_item import MemoryItem +from autogpt.memory.vector.utils import Embedding + + +@pytest.fixture +def memory_item(mock_embedding: Embedding): + return MemoryItem( + raw_content="test content", + summary="test content summary", + chunks=["test content"], + chunk_summaries=["test content summary"], + e_summary=mock_embedding, + e_chunks=[mock_embedding], + metadata={}, + ) diff --git a/tests/integration/memory/test_json_file_memory.py b/tests/integration/memory/test_json_file_memory.py new file mode 100644 index 0000000000..9134a06969 --- /dev/null +++ b/tests/integration/memory/test_json_file_memory.py @@ -0,0 +1,106 @@ +# sourcery skip: snake-case-functions +"""Tests for JSONFileMemory class""" +import orjson +import pytest + +from autogpt.config import Config +from autogpt.memory.vector import JSONFileMemory, MemoryItem +from autogpt.workspace import Workspace +from tests.utils import requires_api_key + + +@pytest.fixture(autouse=True) +def cleanup_sut_singleton(): + if JSONFileMemory in JSONFileMemory._instances: + del JSONFileMemory._instances[JSONFileMemory] + + +def test_json_memory_init_without_backing_file(config: Config, workspace: Workspace): + index_file = workspace.root / f"{config.memory_index}.json" + + assert not index_file.exists() + JSONFileMemory(config) + assert index_file.exists() + assert index_file.read_text() == "[]" + + +def test_json_memory_init_with_backing_empty_file(config: Config, workspace: Workspace): + index_file = workspace.root / f"{config.memory_index}.json" + index_file.touch() + + assert index_file.exists() + JSONFileMemory(config) + assert index_file.exists() + assert index_file.read_text() == "[]" + + +def test_json_memory_init_with_backing_file(config: Config, workspace: Workspace): + index_file = workspace.root / f"{config.memory_index}.json" + index_file.touch() + + raw_data = {"texts": ["test"]} + data = orjson.dumps(raw_data, option=JSONFileMemory.SAVE_OPTIONS) + with index_file.open("wb") as f: + f.write(data) + + assert index_file.exists() + JSONFileMemory(config) + assert index_file.exists() + assert index_file.read_text() == "[]" + + +def test_json_memory_add(config: Config, memory_item: MemoryItem): + index = JSONFileMemory(config) + index.add(memory_item) + assert index.memories[0] == memory_item + + +def test_json_memory_clear(config: Config, memory_item: MemoryItem): + index = JSONFileMemory(config) + assert index.memories == [] + + index.add(memory_item) + assert index.memories[0] == memory_item, "Cannot test clear() because add() fails" + + index.clear() + assert index.memories == [] + + +def test_json_memory_get(config: Config, memory_item: MemoryItem, mock_get_embedding): + index = JSONFileMemory(config) + assert ( + index.get("test") == None + ), "Cannot test get() because initial index is not empty" + + index.add(memory_item) + retrieved = index.get("test") + assert retrieved is not None + assert retrieved.memory_item == memory_item + + +@pytest.mark.vcr +@requires_api_key("OPENAI_API_KEY") +def test_json_memory_get_relevant(config: Config, patched_api_requestor: None) -> None: + index = JSONFileMemory(config) + mem1 = MemoryItem.from_text_file("Sample text", "sample.txt") + mem2 = MemoryItem.from_text_file("Grocery list:\n- Pancake mix", "groceries.txt") + mem3 = MemoryItem.from_text_file("What is your favorite color?", "color.txt") + lipsum = "Lorem ipsum dolor sit amet" + mem4 = MemoryItem.from_text_file(" ".join([lipsum] * 100), "lipsum.txt") + index.add(mem1) + index.add(mem2) + index.add(mem3) + index.add(mem4) + + assert index.get_relevant(mem1.raw_content, 1)[0].memory_item == mem1 + assert index.get_relevant(mem2.raw_content, 1)[0].memory_item == mem2 + assert index.get_relevant(mem3.raw_content, 1)[0].memory_item == mem3 + assert [mr.memory_item for mr in index.get_relevant(lipsum, 2)] == [mem4, mem1] + + +def test_json_memory_get_stats(config: Config, memory_item: MemoryItem) -> None: + index = JSONFileMemory(config) + index.add(memory_item) + n_memories, n_chunks = index.get_stats() + assert n_memories == 1 + assert n_chunks == 1 diff --git a/tests/integration/memory/utils.py b/tests/integration/memory/utils.py new file mode 100644 index 0000000000..374eab3003 --- /dev/null +++ b/tests/integration/memory/utils.py @@ -0,0 +1,44 @@ +import numpy +import pytest +from pytest_mock import MockerFixture + +import autogpt.memory.vector.memory_item as vector_memory_item +import autogpt.memory.vector.providers.base as memory_provider_base +from autogpt.config.config import Config +from autogpt.llm.providers.openai import OPEN_AI_EMBEDDING_MODELS +from autogpt.memory.vector import get_memory +from autogpt.memory.vector.utils import Embedding + + +@pytest.fixture +def embedding_dimension(config: Config): + return OPEN_AI_EMBEDDING_MODELS[config.embedding_model].embedding_dimensions + + +@pytest.fixture +def mock_embedding(embedding_dimension: int) -> Embedding: + return numpy.full((1, embedding_dimension), 0.0255, numpy.float32)[0] + + +@pytest.fixture +def mock_get_embedding(mocker: MockerFixture, embedding_dimension: int): + mocker.patch.object( + vector_memory_item, + "get_embedding", + return_value=[0.0255] * embedding_dimension, + ) + mocker.patch.object( + memory_provider_base, + "get_embedding", + return_value=[0.0255] * embedding_dimension, + ) + + +@pytest.fixture +def memory_none(agent_test_config: Config, mock_get_embedding): + was_memory_backend = agent_test_config.memory_backend + + agent_test_config.set_memory_backend("no_memory") + yield get_memory(agent_test_config) + + agent_test_config.set_memory_backend(was_memory_backend) diff --git a/tests/integration/memory_tests.py b/tests/integration/memory_tests.py deleted file mode 100644 index 66c2331441..0000000000 --- a/tests/integration/memory_tests.py +++ /dev/null @@ -1,50 +0,0 @@ -import random -import string -import sys -import unittest -from pathlib import Path - -from autogpt.config import Config -from autogpt.memory.local import LocalCache - - -class TestLocalCache(unittest.TestCase): - def generate_random_string(self, length): - return "".join(random.choice(string.ascii_letters) for _ in range(length)) - - def setUp(self): - """Set up the test environment for the LocalCache tests.""" - cfg = cfg = Config() - self.cache = LocalCache(cfg) - self.cache.clear() - - # Add example texts to the cache - self.example_texts = [ - "The quick brown fox jumps over the lazy dog", - "I love machine learning and natural language processing", - "The cake is a lie, but the pie is always true", - "ChatGPT is an advanced AI model for conversation", - ] - for text in self.example_texts: - self.cache.add(text) - - # Add some random strings to test noise - for _ in range(5): - self.cache.add(self.generate_random_string(10)) - - def test_get_relevant(self): - """Test getting relevant texts from the cache.""" - query = "I'm interested in artificial intelligence and NLP" - k = 3 - relevant_texts = self.cache.get_relevant(query, k) - - print(f"Top {k} relevant texts for the query '{query}':") - for i, text in enumerate(relevant_texts, start=1): - print(f"{i}. {text}") - - self.assertEqual(len(relevant_texts), k) - self.assertIn(self.example_texts[1], relevant_texts) - - -if __name__ == "__main__": - unittest.main() diff --git a/tests/integration/milvus_memory_tests.py b/tests/integration/milvus_memory_tests.py deleted file mode 100644 index c4b97d34d6..0000000000 --- a/tests/integration/milvus_memory_tests.py +++ /dev/null @@ -1,55 +0,0 @@ -# sourcery skip: snake-case-functions -"""Tests for the MilvusMemory class.""" -import random -import string -import unittest - -from autogpt.config import Config -from autogpt.memory.milvus import MilvusMemory - -try: - - class TestMilvusMemory(unittest.TestCase): - """Unit tests for the MilvusMemory class.""" - - def generate_random_string(self, length: int) -> str: - return "".join(random.choice(string.ascii_letters) for _ in range(length)) - - def setUp(self) -> None: - cfg = Config() - cfg.milvus_addr = "localhost:19530" - self.memory = MilvusMemory(cfg) - self.memory.clear() - - # Add example texts to the cache - self.example_texts = [ - "The quick brown fox jumps over the lazy dog", - "I love machine learning and natural language processing", - "The cake is a lie, but the pie is always true", - "ChatGPT is an advanced AI model for conversation", - ] - - for text in self.example_texts: - self.memory.add(text) - - # Add some random strings to test noise - for _ in range(5): - self.memory.add(self.generate_random_string(10)) - - def test_get_relevant(self) -> None: - """Test getting relevant texts from the cache.""" - query = "I'm interested in artificial intelligence and NLP" - num_relevant = 3 - relevant_texts = self.memory.get_relevant(query, num_relevant) - - print(f"Top {k} relevant texts for the query '{query}':") - for i, text in enumerate(relevant_texts, start=1): - print(f"{i}. {text}") - - self.assertEqual(len(relevant_texts), k) - self.assertIn(self.example_texts[1], relevant_texts) - -except: - print( - "Skipping tests/integration/milvus_memory_tests.py as Milvus is not installed." - ) diff --git a/tests/integration/test_llm_utils.py b/tests/integration/test_llm_utils.py deleted file mode 100644 index fefc239c1a..0000000000 --- a/tests/integration/test_llm_utils.py +++ /dev/null @@ -1,65 +0,0 @@ -import string -from unittest.mock import MagicMock - -import pytest -from numpy.random import RandomState -from pytest_mock import MockerFixture - -from autogpt.config import Config -from autogpt.llm import llm_utils -from autogpt.llm.api_manager import ApiManager -from autogpt.llm.modelsinfo import COSTS -from tests.utils import requires_api_key - - -@pytest.fixture(scope="session") -def random_large_string(): - """Big string used to overwhelm token limits.""" - seed = 42 - n_characters = 30_000 - random = RandomState(seed) - return "".join(random.choice(list(string.ascii_lowercase), size=n_characters)) - - -@pytest.fixture() -def api_manager(mocker: MockerFixture): - api_manager = ApiManager() - mocker.patch.multiple( - api_manager, - total_prompt_tokens=0, - total_completion_tokens=0, - total_cost=0, - ) - yield api_manager - - -@pytest.fixture() -def spy_create_embedding(mocker: MockerFixture): - return mocker.spy(llm_utils, "create_embedding") - - -@pytest.mark.vcr -@requires_api_key("OPENAI_API_KEY") -def test_get_ada_embedding( - config: Config, - api_manager: ApiManager, - spy_create_embedding: MagicMock, - patched_api_requestor, -): - token_cost = COSTS[config.embedding_model]["prompt"] - llm_utils.get_ada_embedding("test") - - spy_create_embedding.assert_called_once_with("test", model=config.embedding_model) - - assert (prompt_tokens := api_manager.get_total_prompt_tokens()) == 1 - assert api_manager.get_total_completion_tokens() == 0 - assert api_manager.get_total_cost() == (prompt_tokens * token_cost) / 1000 - - -@pytest.mark.vcr -@requires_api_key("OPENAI_API_KEY") -def test_get_ada_embedding_large_context(random_large_string): - # This test should be able to mock the openai call after we have a fix. We don't need - # to hit the API to test the logic of the function (so not using vcr). This is a quick - # regression test to document the issue. - llm_utils.get_ada_embedding(random_large_string) diff --git a/tests/integration/test_local_cache.py b/tests/integration/test_local_cache.py deleted file mode 100644 index 808f119a73..0000000000 --- a/tests/integration/test_local_cache.py +++ /dev/null @@ -1,110 +0,0 @@ -# sourcery skip: snake-case-functions -"""Tests for LocalCache class""" -import unittest - -import orjson -import pytest - -from autogpt.memory.local import EMBED_DIM, SAVE_OPTIONS -from autogpt.memory.local import LocalCache as LocalCache_ -from tests.utils import requires_api_key - - -@pytest.fixture -def LocalCache(): - # Hack, real gross. Singletons are not good times. - if LocalCache_ in LocalCache_._instances: - del LocalCache_._instances[LocalCache_] - return LocalCache_ - - -@pytest.fixture -def mock_embed_with_ada(mocker): - mocker.patch( - "autogpt.memory.local.get_ada_embedding", - return_value=[0.1] * EMBED_DIM, - ) - - -def test_init_without_backing_file(LocalCache, config, workspace): - cache_file = workspace.root / f"{config.memory_index}.json" - - assert not cache_file.exists() - LocalCache(config) - assert cache_file.exists() - assert cache_file.read_text() == "{}" - - -def test_init_with_backing_empty_file(LocalCache, config, workspace): - cache_file = workspace.root / f"{config.memory_index}.json" - cache_file.touch() - - assert cache_file.exists() - LocalCache(config) - assert cache_file.exists() - assert cache_file.read_text() == "{}" - - -def test_init_with_backing_file(LocalCache, config, workspace): - cache_file = workspace.root / f"{config.memory_index}.json" - cache_file.touch() - - raw_data = {"texts": ["test"]} - data = orjson.dumps(raw_data, option=SAVE_OPTIONS) - with cache_file.open("wb") as f: - f.write(data) - - assert cache_file.exists() - LocalCache(config) - assert cache_file.exists() - assert cache_file.read_text() == "{}" - - -def test_add(LocalCache, config, mock_embed_with_ada): - cache = LocalCache(config) - cache.add("test") - assert cache.data.texts == ["test"] - assert cache.data.embeddings.shape == (1, EMBED_DIM) - - -def test_clear(LocalCache, config, mock_embed_with_ada): - cache = LocalCache(config) - assert cache.data.texts == [] - assert cache.data.embeddings.shape == (0, EMBED_DIM) - - cache.add("test") - assert cache.data.texts == ["test"] - assert cache.data.embeddings.shape == (1, EMBED_DIM) - - cache.clear() - assert cache.data.texts == [] - assert cache.data.embeddings.shape == (0, EMBED_DIM) - - -def test_get(LocalCache, config, mock_embed_with_ada): - cache = LocalCache(config) - assert cache.get("test") == [] - - cache.add("test") - assert cache.get("test") == ["test"] - - -@pytest.mark.vcr -@requires_api_key("OPENAI_API_KEY") -def test_get_relevant(LocalCache, config, patched_api_requestor) -> None: - cache = LocalCache(config) - text1 = "Sample text 1" - text2 = "Sample text 2" - cache.add(text1) - cache.add(text2) - - result = cache.get_relevant(text1, 1) - assert result == [text1] - - -def test_get_stats(LocalCache, config, mock_embed_with_ada) -> None: - cache = LocalCache(config) - text = "Sample text" - cache.add(text) - stats = cache.get_stats() - assert stats == (1, cache.data.embeddings.shape) diff --git a/tests/integration/test_memory_management.py b/tests/integration/test_memory_management.py deleted file mode 100644 index 22ade7b0be..0000000000 --- a/tests/integration/test_memory_management.py +++ /dev/null @@ -1,67 +0,0 @@ -import json - -import pytest - -from autogpt.config import Config -from autogpt.memory import get_memory -from autogpt.memory_management.store_memory import ( - save_memory_trimmed_from_context_window, -) -from tests.utils import requires_api_key - - -@pytest.fixture -def message_history_fixture(): - assistant_reply = { - "thoughts": { - "text": "thoughts", - "reasoning": "reasoning", - "plan": "plan", - "criticism": "criticism", - "speak": "speak", - }, - "command": {"name": "google", "args": {"query": "google_query"}}, - } - return [ - {"content": json.dumps(assistant_reply, indent=4)}, - {"content": "Command Result: Important Information."}, - ] - - -@pytest.fixture -def expected_permanent_memory() -> str: - return """Assistant Reply: { - "thoughts": { - "text": "thoughts", - "reasoning": "reasoning", - "plan": "plan", - "criticism": "criticism", - "speak": "speak" - }, - "command": { - "name": "google", - "args": { - "query": "google_query" - } - } -} -Result: None -Human Feedback:Command Result: Important Information.""" - - -@requires_api_key("OPENAI_API_KEY") -@pytest.mark.vcr -def test_save_memory_trimmed_from_context_window( - message_history_fixture, - expected_permanent_memory, - config: Config, - patched_api_requestor, -): - next_message_to_add_index = len(message_history_fixture) - 1 - memory = get_memory(config, init=True) - save_memory_trimmed_from_context_window( - message_history_fixture, next_message_to_add_index, memory - ) - - memory_found = memory.get_relevant("Important Information", 5) - assert memory_found[0] == expected_permanent_memory diff --git a/tests/integration/test_plugins.py b/tests/integration/test_plugins.py new file mode 100644 index 0000000000..d5f62cca5a --- /dev/null +++ b/tests/integration/test_plugins.py @@ -0,0 +1,71 @@ +import pytest + +from autogpt.config import Config +from autogpt.plugins import scan_plugins + +PLUGINS_TEST_DIR = "tests/unit/data/test_plugins" +PLUGIN_TEST_OPENAI = "https://weathergpt.vercel.app/" + + +@pytest.fixture +def mock_config_denylist_allowlist_check(): + class MockConfig: + """Mock config object for testing the denylist_allowlist_check function""" + + plugins_denylist = ["BadPlugin"] + plugins_allowlist = ["GoodPlugin"] + authorise_key = "y" + exit_key = "n" + + return MockConfig() + + +@pytest.fixture +def config_with_plugins(): + """Mock config object for testing the scan_plugins function""" + # Test that the function returns the correct number of plugins + cfg = Config() + cfg.plugins_dir = PLUGINS_TEST_DIR + cfg.plugins_openai = ["https://weathergpt.vercel.app/"] + return cfg + + +@pytest.fixture +def mock_config_openai_plugin(): + """Mock config object for testing the scan_plugins function""" + + class MockConfig: + """Mock config object for testing the scan_plugins function""" + + plugins_dir = PLUGINS_TEST_DIR + plugins_openai = [PLUGIN_TEST_OPENAI] + plugins_denylist = ["AutoGPTPVicuna"] + plugins_allowlist = [PLUGIN_TEST_OPENAI] + + return MockConfig() + + +def test_scan_plugins_openai(mock_config_openai_plugin): + # Test that the function returns the correct number of plugins + result = scan_plugins(mock_config_openai_plugin, debug=True) + assert len(result) == 1 + + +@pytest.fixture +def mock_config_generic_plugin(): + """Mock config object for testing the scan_plugins function""" + + # Test that the function returns the correct number of plugins + class MockConfig: + plugins_dir = PLUGINS_TEST_DIR + plugins_openai = [] + plugins_denylist = [] + plugins_allowlist = ["AutoGPTPVicuna"] + + return MockConfig() + + +def test_scan_plugins_generic(mock_config_generic_plugin): + # Test that the function returns the correct number of plugins + result = scan_plugins(mock_config_generic_plugin, debug=True) + assert len(result) == 1 diff --git a/tests/integration/weaviate_memory_tests.py b/tests/integration/weaviate_memory_tests.py deleted file mode 100644 index 2cea871c61..0000000000 --- a/tests/integration/weaviate_memory_tests.py +++ /dev/null @@ -1,120 +0,0 @@ -import unittest -from uuid import uuid4 - -from weaviate import Client -from weaviate.util import get_valid_uuid - -from autogpt.config import Config -from autogpt.llm import get_ada_embedding -from autogpt.memory.weaviate import WeaviateMemory - - -class TestWeaviateMemory(unittest.TestCase): - cfg = None - client = None - index = None - - @classmethod - def setUpClass(cls): - """Set up the test environment for the WeaviateMemory tests.""" - # only create the connection to weaviate once - cls.cfg = Config() - - if cls.cfg.use_weaviate_embedded: - from weaviate.embedded import EmbeddedOptions - - cls.client = Client( - embedded_options=EmbeddedOptions( - hostname=cls.cfg.weaviate_host, - port=int(cls.cfg.weaviate_port), - persistence_data_path=cls.cfg.weaviate_embedded_path, - ) - ) - else: - cls.client = Client( - f"{cls.cfg.weaviate_protocol}://{cls.cfg.weaviate_host}:{self.cfg.weaviate_port}" - ) - - cls.index = WeaviateMemory.format_classname(cls.cfg.memory_index) - - """ - In order to run these tests you will need a local instance of - Weaviate running. Refer to https://weaviate.io/developers/weaviate/installation/docker-compose - for creating local instances using docker. - Alternatively in your .env file set the following environmental variables to run Weaviate embedded (see: https://weaviate.io/developers/weaviate/installation/embedded): - - USE_WEAVIATE_EMBEDDED=True - WEAVIATE_EMBEDDED_PATH="/home/me/.local/share/weaviate" - """ - - def setUp(self): - """Set up the test environment for the WeaviateMemory tests.""" - try: - self.client.schema.delete_class(self.index) - except: - pass - - self.memory = WeaviateMemory(self.cfg) - - def test_add(self): - """Test adding a text to the cache""" - doc = "You are a Titan name Thanos and you are looking for the Infinity Stones" - self.memory.add(doc) - result = self.client.query.get(self.index, ["raw_text"]).do() - actual = result["data"]["Get"][self.index] - - self.assertEqual(len(actual), 1) - self.assertEqual(actual[0]["raw_text"], doc) - - def test_get(self): - """Test getting a text from the cache""" - doc = "You are an Avenger and swore to defend the Galaxy from a menace called Thanos" - # add the document to the cache - with self.client.batch as batch: - batch.add_data_object( - uuid=get_valid_uuid(uuid4()), - data_object={"raw_text": doc}, - class_name=self.index, - vector=get_ada_embedding(doc), - ) - - batch.flush() - - actual = self.memory.get(doc) - - self.assertEqual(len(actual), 1) - self.assertEqual(actual[0], doc) - - def test_get_stats(self): - """Test getting the stats of the cache""" - docs = [ - "You are now about to count the number of docs in this index", - "And then you about to find out if you can count correctly", - ] - - [self.memory.add(doc) for doc in docs] - - stats = self.memory.get_stats() - - self.assertTrue(stats) - self.assertTrue("count" in stats) - self.assertEqual(stats["count"], 2) - - def test_clear(self): - """Test clearing the cache""" - docs = [ - "Shame this is the last test for this class", - "Testing is fun when someone else is doing it", - ] - - [self.memory.add(doc) for doc in docs] - - self.assertEqual(self.memory.get_stats()["count"], 2) - - self.memory.clear() - - self.assertEqual(self.memory.get_stats()["count"], 0) - - -if __name__ == "__main__": - unittest.main() diff --git a/tests/milvus_memory_test.py b/tests/milvus_memory_test.py deleted file mode 100644 index 9672326be7..0000000000 --- a/tests/milvus_memory_test.py +++ /dev/null @@ -1,72 +0,0 @@ -# sourcery skip: snake-case-functions -"""Tests for the MilvusMemory class.""" -import os -import sys -import unittest - -try: - from autogpt.config import Config - from autogpt.memory.milvus import MilvusMemory - - def mock_config() -> Config: - """Mock the config object for testing purposes.""" - - # Return a mock config object with the required attributes - class MockConfig(Config): - debug_mode = False - continuous_mode = False - speak_mode = False - milvus_collection = "autogpt" - milvus_addr = "localhost:19530" - - return MockConfig() - - class TestMilvusMemory(unittest.TestCase): - """Tests for the MilvusMemory class.""" - - def setUp(self) -> None: - """Set up the test environment""" - self.cfg = mock_config() - self.memory = MilvusMemory(self.cfg) - - def test_add(self) -> None: - """Test adding a text to the cache""" - text = "Sample text" - self.memory.clear() - self.memory.add(text) - result = self.memory.get(text) - self.assertEqual([text], result) - - def test_clear(self) -> None: - """Test clearing the cache""" - self.memory.clear() - self.assertEqual(self.memory.collection.num_entities, 0) - - def test_get(self) -> None: - """Test getting a text from the cache""" - text = "Sample text" - self.memory.clear() - self.memory.add(text) - result = self.memory.get(text) - self.assertEqual(result, [text]) - - def test_get_relevant(self) -> None: - """Test getting relevant texts from the cache""" - text1 = "Sample text 1" - text2 = "Sample text 2" - self.memory.clear() - self.memory.add(text1) - self.memory.add(text2) - result = self.memory.get_relevant(text1, 1) - self.assertEqual(result, [text1]) - - def test_get_stats(self) -> None: - """Test getting the cache stats""" - text = "Sample text" - self.memory.clear() - self.memory.add(text) - stats = self.memory.get_stats() - self.assertEqual(15, len(stats)) - -except ImportError as err: - print(f"Skipping tests for MilvusMemory: {err}") diff --git a/tests/test_agent.py b/tests/test_agent.py index e859673159..4f05e36b12 100644 --- a/tests/test_agent.py +++ b/tests/test_agent.py @@ -3,17 +3,16 @@ from unittest.mock import MagicMock import pytest from autogpt.agent import Agent -from autogpt.config import Config +from autogpt.config import AIConfig @pytest.fixture def agent(): ai_name = "Test AI" memory = MagicMock() - full_message_history = [] next_action_count = 0 command_registry = MagicMock() - config = Config() + config = AIConfig() system_prompt = "System prompt" triggering_prompt = "Triggering prompt" workspace_directory = "workspace_directory" @@ -21,7 +20,6 @@ def agent(): agent = Agent( ai_name, memory, - full_message_history, next_action_count, command_registry, config, @@ -32,10 +30,10 @@ def agent(): return agent -def test_agent_initialization(agent): +def test_agent_initialization(agent: Agent): assert agent.ai_name == "Test AI" assert agent.memory == agent.memory - assert agent.full_message_history == [] + assert agent.history.messages == [] assert agent.next_action_count == 0 assert agent.command_registry == agent.command_registry assert agent.config == agent.config diff --git a/tests/test_agent_manager.py b/tests/test_agent_manager.py index e7e696ae5c..4b0a01bcd2 100644 --- a/tests/test_agent_manager.py +++ b/tests/test_agent_manager.py @@ -1,15 +1,14 @@ import pytest from autogpt.agent.agent_manager import AgentManager -from autogpt.llm import create_chat_completion +from autogpt.llm.chat import create_chat_completion @pytest.fixture def agent_manager(): # Hack, real gross. Singletons are not good times. - if AgentManager in AgentManager._instances: - del AgentManager._instances[AgentManager] - return AgentManager() + yield AgentManager() + del AgentManager._instances[AgentManager] @pytest.fixture @@ -37,28 +36,28 @@ def mock_create_chat_completion(mocker): return mock_create_chat_completion -def test_create_agent(agent_manager, task, prompt, model): +def test_create_agent(agent_manager: AgentManager, task, prompt, model): key, agent_reply = agent_manager.create_agent(task, prompt, model) assert isinstance(key, int) assert isinstance(agent_reply, str) assert key in agent_manager.agents -def test_message_agent(agent_manager, task, prompt, model): +def test_message_agent(agent_manager: AgentManager, task, prompt, model): key, _ = agent_manager.create_agent(task, prompt, model) user_message = "Please translate 'Good morning' to French." agent_reply = agent_manager.message_agent(key, user_message) assert isinstance(agent_reply, str) -def test_list_agents(agent_manager, task, prompt, model): +def test_list_agents(agent_manager: AgentManager, task, prompt, model): key, _ = agent_manager.create_agent(task, prompt, model) agents_list = agent_manager.list_agents() assert isinstance(agents_list, list) assert (key, task) in agents_list -def test_delete_agent(agent_manager, task, prompt, model): +def test_delete_agent(agent_manager: AgentManager, task, prompt, model): key, _ = agent_manager.create_agent(task, prompt, model) success = agent_manager.delete_agent(key) assert success diff --git a/tests/test_api_manager.py b/tests/test_api_manager.py index bfb8421302..9585fba740 100644 --- a/tests/test_api_manager.py +++ b/tests/test_api_manager.py @@ -2,7 +2,7 @@ from unittest.mock import MagicMock, patch import pytest -from autogpt.llm import COSTS, ApiManager +from autogpt.llm.api_manager import COSTS, ApiManager api_manager = ApiManager() diff --git a/tests/unit/test_chat.py b/tests/unit/test_chat.py deleted file mode 100644 index 77ff94d7b2..0000000000 --- a/tests/unit/test_chat.py +++ /dev/null @@ -1,78 +0,0 @@ -# Generated by CodiumAI -import time -from unittest.mock import patch - -from autogpt.llm import create_chat_message, generate_context - - -def test_happy_path_role_content(): - """Test that the function returns a dictionary with the correct keys and values when valid strings are provided for role and content.""" - result = create_chat_message("system", "Hello, world!") - assert result == {"role": "system", "content": "Hello, world!"} - - -def test_empty_role_content(): - """Test that the function returns a dictionary with the correct keys and values when empty strings are provided for role and content.""" - result = create_chat_message("", "") - assert result == {"role": "", "content": ""} - - -def test_generate_context_empty_inputs(mocker): - """Test the behavior of the generate_context function when all input parameters are empty.""" - # Mock the time.strftime function to return a fixed value - mocker.patch("time.strftime", return_value="Sat Apr 15 00:00:00 2023") - # Arrange - prompt = "" - relevant_memory = "" - full_message_history = [] - model = "gpt-3.5-turbo-0301" - - # Act - result = generate_context(prompt, relevant_memory, full_message_history, model) - - # Assert - expected_result = ( - -1, - 32, - 2, - [ - {"role": "system", "content": ""}, - { - "role": "system", - "content": f"The current time and date is {time.strftime('%c')}", - }, - ], - ) - assert result == expected_result - - -def test_generate_context_valid_inputs(): - """Test that the function successfully generates a current_context given valid inputs.""" - # Given - prompt = "What is your favorite color?" - relevant_memory = "You once painted your room blue." - full_message_history = [ - create_chat_message("user", "Hi there!"), - create_chat_message("assistant", "Hello! How can I assist you today?"), - create_chat_message("user", "Can you tell me a joke?"), - create_chat_message( - "assistant", - "Why did the tomato turn red? Because it saw the salad dressing!", - ), - create_chat_message("user", "Haha, that's funny."), - ] - model = "gpt-3.5-turbo-0301" - - # When - result = generate_context(prompt, relevant_memory, full_message_history, model) - - # Then - assert isinstance(result[0], int) - assert isinstance(result[1], int) - assert isinstance(result[2], int) - assert isinstance(result[3], list) - assert result[0] >= 0 - assert result[2] >= 0 - assert result[1] >= 0 - assert len(result[3]) >= 2 # current_context should have at least 2 messages - assert result[1] <= 2048 # token limit for GPT-3.5-turbo-0301 is 2048 tokens diff --git a/tests/unit/test_file_operations.py b/tests/unit/test_file_operations.py index 98d4efb77e..7f8853481f 100644 --- a/tests/unit/test_file_operations.py +++ b/tests/unit/test_file_operations.py @@ -13,6 +13,8 @@ from pytest_mock import MockerFixture import autogpt.commands.file_operations as file_ops from autogpt.config import Config +from autogpt.memory.vector.memory_item import MemoryItem +from autogpt.memory.vector.utils import Embedding from autogpt.utils import readable_file_size from autogpt.workspace import Workspace @@ -22,6 +24,23 @@ def file_content(): return "This is a test file.\n" +@pytest.fixture() +def mock_MemoryItem_from_text(mocker: MockerFixture, mock_embedding: Embedding): + mocker.patch.object( + file_ops.MemoryItem, + "from_text", + new=lambda content, source_type, metadata: MemoryItem( + raw_content=content, + summary=f"Summary of content '{content}'", + chunk_summaries=[f"Summary of content '{content}'"], + chunks=[content], + e_summary=mock_embedding, + e_chunks=[mock_embedding], + metadata=metadata | {"source_type": source_type}, + ), + ) + + @pytest.fixture() def test_file_path(config, workspace: Workspace): return workspace.get_path("test_file.txt") @@ -188,7 +207,11 @@ def test_split_file(max_length, overlap, content, expected): ) -def test_read_file(test_file_with_content_path: Path, file_content): +def test_read_file( + mock_MemoryItem_from_text, + test_file_with_content_path: Path, + file_content, +): content = file_ops.read_file(test_file_with_content_path) assert content == file_content diff --git a/tests/unit/test_get_self_feedback.py b/tests/unit/test_get_self_feedback.py index 5e59757e16..64268898f4 100644 --- a/tests/unit/test_get_self_feedback.py +++ b/tests/unit/test_get_self_feedback.py @@ -2,7 +2,7 @@ from datetime import datetime from autogpt.agent.agent import Agent from autogpt.config import AIConfig -from autogpt.llm import create_chat_completion +from autogpt.llm.chat import create_chat_completion from autogpt.log_cycle.log_cycle import LogCycleHandler diff --git a/tests/unit/test_llm_utils.py b/tests/unit/test_llm_utils.py index e11b368e14..8c4de115df 100644 --- a/tests/unit/test_llm_utils.py +++ b/tests/unit/test_llm_utils.py @@ -1,11 +1,9 @@ from unittest.mock import patch import pytest -from openai import InvalidRequestError from openai.error import APIError, RateLimitError -from autogpt.llm import llm_utils -from autogpt.llm.llm_utils import check_model +from autogpt.llm import utils as llm_utils @pytest.fixture(params=[RateLimitError, APIError]) @@ -107,36 +105,6 @@ def test_retry_openapi_other_api_error(capsys): assert output.out == "" -def test_chunked_tokens(): - text = "Auto-GPT is an experimental open-source application showcasing the capabilities of the GPT-4 language model" - expected_output = [ - ( - 13556, - 12279, - 2898, - 374, - 459, - 22772, - 1825, - 31874, - 3851, - 67908, - 279, - 17357, - 315, - 279, - 480, - 2898, - 12, - 19, - 4221, - 1646, - ) - ] - output = list(llm_utils.chunked_tokens(text, "cl100k_base", 8191)) - assert output == expected_output - - def test_check_model(api_manager): """ Test if check_model() returns original model when valid. @@ -145,7 +113,7 @@ def test_check_model(api_manager): with patch("openai.Model.list") as mock_list_models: # Test when correct model is returned mock_list_models.return_value = {"data": [{"id": "gpt-4"}]} - result = check_model("gpt-4", "smart_llm_model") + result = llm_utils.check_model("gpt-4", "smart_llm_model") assert result == "gpt-4" # Reset api manager models @@ -153,7 +121,7 @@ def test_check_model(api_manager): # Test when incorrect model is returned mock_list_models.return_value = {"data": [{"id": "gpt-3.5-turbo"}]} - result = check_model("gpt-4", "fast_llm_model") + result = llm_utils.check_model("gpt-4", "fast_llm_model") assert result == "gpt-3.5-turbo" # Reset api manager models diff --git a/tests/unit/test_plugins.py b/tests/unit/test_plugins.py index 08c9114c76..885ec4c331 100644 --- a/tests/unit/test_plugins.py +++ b/tests/unit/test_plugins.py @@ -1,16 +1,11 @@ import pytest from autogpt.config import Config -from autogpt.plugins import ( - denylist_allowlist_check, - inspect_zip_for_modules, - scan_plugins, -) +from autogpt.plugins import denylist_allowlist_check, inspect_zip_for_modules PLUGINS_TEST_DIR = "tests/unit/data/test_plugins" PLUGIN_TEST_ZIP_FILE = "Auto-GPT-Plugin-Test-master.zip" PLUGIN_TEST_INIT_PY = "Auto-GPT-Plugin-Test-master/src/auto_gpt_vicuna/__init__.py" -PLUGIN_TEST_OPENAI = "https://weathergpt.vercel.app/" def test_inspect_zip_for_modules(): @@ -77,54 +72,3 @@ def test_denylist_allowlist_check_user_input_invalid( assert not denylist_allowlist_check( "UnknownPlugin", mock_config_denylist_allowlist_check ) - - -@pytest.fixture -def config_with_plugins(): - """Mock config object for testing the scan_plugins function""" - # Test that the function returns the correct number of plugins - cfg = Config() - cfg.plugins_dir = PLUGINS_TEST_DIR - cfg.plugins_openai = ["https://weathergpt.vercel.app/"] - return cfg - - -@pytest.fixture -def mock_config_openai_plugin(): - """Mock config object for testing the scan_plugins function""" - - class MockConfig: - """Mock config object for testing the scan_plugins function""" - - plugins_dir = PLUGINS_TEST_DIR - plugins_openai = [PLUGIN_TEST_OPENAI] - plugins_denylist = ["AutoGPTPVicuna"] - plugins_allowlist = [PLUGIN_TEST_OPENAI] - - return MockConfig() - - -def test_scan_plugins_openai(mock_config_openai_plugin): - # Test that the function returns the correct number of plugins - result = scan_plugins(mock_config_openai_plugin, debug=True) - assert len(result) == 1 - - -@pytest.fixture -def mock_config_generic_plugin(): - """Mock config object for testing the scan_plugins function""" - - # Test that the function returns the correct number of plugins - class MockConfig: - plugins_dir = PLUGINS_TEST_DIR - plugins_openai = [] - plugins_denylist = [] - plugins_allowlist = ["AutoGPTPVicuna"] - - return MockConfig() - - -def test_scan_plugins_generic(mock_config_generic_plugin): - # Test that the function returns the correct number of plugins - result = scan_plugins(mock_config_generic_plugin, debug=True) - assert len(result) == 1 diff --git a/tests/test_token_counter.py b/tests/unit/test_token_counter.py similarity index 56% rename from tests/test_token_counter.py rename to tests/unit/test_token_counter.py index 966a0aff11..feed18b138 100644 --- a/tests/test_token_counter.py +++ b/tests/unit/test_token_counter.py @@ -1,20 +1,13 @@ import pytest -from autogpt.llm import count_message_tokens, count_string_tokens +from autogpt.llm.base import Message +from autogpt.llm.utils import count_message_tokens, count_string_tokens def test_count_message_tokens(): messages = [ - {"role": "user", "content": "Hello"}, - {"role": "assistant", "content": "Hi there!"}, - ] - assert count_message_tokens(messages) == 17 - - -def test_count_message_tokens_with_name(): - messages = [ - {"role": "user", "content": "Hello", "name": "John"}, - {"role": "assistant", "content": "Hi there!"}, + Message("user", "Hello"), + Message("assistant", "Hi there!"), ] assert count_message_tokens(messages) == 17 @@ -25,19 +18,19 @@ def test_count_message_tokens_empty_input(): def test_count_message_tokens_invalid_model(): - """Invalid model should raise a KeyError""" + """Invalid model should raise a NotImplementedError""" messages = [ - {"role": "user", "content": "Hello"}, - {"role": "assistant", "content": "Hi there!"}, + Message("user", "Hello"), + Message("assistant", "Hi there!"), ] - with pytest.raises(KeyError): + with pytest.raises(NotImplementedError): count_message_tokens(messages, model="invalid_model") def test_count_message_tokens_gpt_4(): messages = [ - {"role": "user", "content": "Hello"}, - {"role": "assistant", "content": "Hi there!"}, + Message("user", "Hello"), + Message("assistant", "Hi there!"), ] assert count_message_tokens(messages, model="gpt-4-0314") == 15 @@ -55,16 +48,6 @@ def test_count_string_tokens_empty_input(): assert count_string_tokens("", model_name="gpt-3.5-turbo-0301") == 0 -def test_count_message_tokens_invalid_model(): - """Invalid model should raise a NotImplementedError""" - messages = [ - {"role": "user", "content": "Hello"}, - {"role": "assistant", "content": "Hi there!"}, - ] - with pytest.raises(NotImplementedError): - count_message_tokens(messages, model="invalid_model") - - def test_count_string_tokens_gpt_4(): """Test that the string tokens are counted correctly."""