mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-04-08 03:00:28 -04:00
Removed test scripts
This commit is contained in:
@@ -1,44 +0,0 @@
|
||||
import json
|
||||
|
||||
import requests
|
||||
|
||||
# Define the base URL of the API
|
||||
base_url = "http://localhost:8000" # Replace with your actual API base URL
|
||||
|
||||
# Create a new task
|
||||
task_request = {
|
||||
"input": "Write the words you receive to the file 'output.txt'.",
|
||||
"additional_input": {"type": "python/code"},
|
||||
}
|
||||
response = requests.post(f"{base_url}/agent/tasks", json=task_request)
|
||||
task = response.json()
|
||||
print(f"Created task: {task}")
|
||||
|
||||
# Upload a file as an artifact for the task
|
||||
task_id = task["task_id"]
|
||||
test_file_content = "This is a test file for testing."
|
||||
relative_path = "./relative/path/to/your/file" # Add your relative path here
|
||||
file_path = "test_file.txt"
|
||||
with open(file_path, "w") as f:
|
||||
f.write(test_file_content)
|
||||
with open(file_path, "rb") as f:
|
||||
files = {"file": f}
|
||||
data = {"relative_path": relative_path}
|
||||
|
||||
response = requests.post(
|
||||
f"{base_url}/agent/tasks/{task_id}/artifacts?relative_path={relative_path}",
|
||||
files=files,
|
||||
)
|
||||
artifact = response.json()
|
||||
|
||||
print(f"Uploaded artifact: {response.text}")
|
||||
|
||||
# Download the artifact
|
||||
artifact_id = artifact["artifact_id"]
|
||||
response = requests.get(f"{base_url}/agent/tasks/{task_id}/artifacts/{artifact_id}")
|
||||
if response.status_code == 200:
|
||||
with open("downloaded_file.txt", "wb") as f:
|
||||
f.write(response.content)
|
||||
print("Downloaded artifact.")
|
||||
else:
|
||||
print(f"Error downloading artifact: {response.content}")
|
||||
@@ -1,23 +0,0 @@
|
||||
import openai
|
||||
|
||||
openai.api_base = "http://localhost:4891/v1"
|
||||
|
||||
openai.api_key = "not needed for a local LLM"
|
||||
|
||||
|
||||
model = "ggml-llama-2-13b-chat.ggmlv3.q4_0.bin"
|
||||
prompt = "Who is Michael Jordan?"
|
||||
response = openai.Completion.create(
|
||||
model=model,
|
||||
prompt=prompt,
|
||||
max_tokens=50,
|
||||
temperature=0.28,
|
||||
top_p=0.95,
|
||||
n=1,
|
||||
echo=True,
|
||||
stream=False,
|
||||
)
|
||||
assert len(response["choices"][0]["text"]) > len(prompt)
|
||||
print(f"Model: {response['model']}")
|
||||
print(f"Usage: {response['usage']}")
|
||||
print(f"Answer: {response['choices'][0]['text']}")
|
||||
Reference in New Issue
Block a user