Files
AutoGPT/autogpt_platform/backend/agents/agent_516d813b-d1bc-470f-add7-c63a4b2c2bad.json
Swifty 8be3c88711 feat(backend): add default store agents for seeding test databases (#11552)
This PR adds a collection of pre-built store agents that can be loaded
into test databases for development and testing purposes.

### Changes 🏗️

- Add 17 exported agent JSON files in `backend/agents/` directory
- Add `StoreAgent_rows.csv` containing store listing metadata (titles,
descriptions, categories, images)
- Add `load_store_agents.py` script to load agents into the test
database
- Add `load-store-agents` Makefile target for easy execution

**Included Agents:**
- Flux AI Image Generator
- YouTube Transcription Scraper  
- Decision Maker Lead Finder
- Smart Meeting Prep
- Automated Support Agent
- Unspirational Poster Maker
- AI Video Generator
- Automated SEO Blog Writer
- Lead Finder (Local Businesses)
- LinkedIn Post Generator
- YouTube to LinkedIn Post Converter
- Personal Newsletter
- Email Scout - Contact Finder Assistant
- YouTube Video to SEO Blog Writer
- AI Webpage Copy Improver
- Domain Name Finder
- AI Function

### Checklist 📋

#### For code changes:
- [x] I have clearly listed my changes in the PR description
- [x] I have made a test plan
- [x] I have tested my changes according to the test plan:
- [x] Run `make load-store-agents` and verify agents are loaded into the
database
  - [x] Verify store listings appear correctly with metadata from CSV
- [x] Confirm no sensitive information (API keys, secrets) is included
in the exported agents

#### For configuration changes:
- [x] `.env.default` is updated or already compatible with my changes
- [x] `docker-compose.yml` is updated or already compatible with my
changes
- [x] I have included a list of my configuration changes in the PR
description (under **Changes**)

No configuration changes required - this only adds test data and a
loading script.
2025-12-05 16:08:37 +01:00

447 lines
15 KiB
JSON

{
"id": "622849a7-5848-4838-894d-01f8f07e3fad",
"version": 18,
"is_active": true,
"name": "AI Function",
"description": "## AI-Powered Function Magic: Never code again!\nProvide a description of a python function and your inputs and AI will provide the results.",
"instructions": null,
"recommended_schedule_cron": null,
"nodes": [
{
"id": "26ff2973-3f9a-451d-b902-d45e5da0a7fe",
"block_id": "363ae599-353e-4804-937e-b2ee3cef3da4",
"input_default": {
"name": "return",
"title": null,
"value": null,
"format": "",
"secret": false,
"advanced": false,
"description": "The value returned by the function"
},
"metadata": {
"position": {
"x": 1598.8622921127233,
"y": 291.59140862204725
}
},
"input_links": [
{
"id": "caecc1de-fdbc-4fd9-9570-074057bb15f9",
"source_id": "c5d16ee4-de9e-4d93-bf32-ac2d15760d5b",
"sink_id": "26ff2973-3f9a-451d-b902-d45e5da0a7fe",
"source_name": "response",
"sink_name": "value",
"is_static": false
}
],
"output_links": [],
"graph_id": "622849a7-5848-4838-894d-01f8f07e3fad",
"graph_version": 18,
"webhook_id": null,
"webhook": null
},
{
"id": "c5d16ee4-de9e-4d93-bf32-ac2d15760d5b",
"block_id": "1f292d4a-41a4-4977-9684-7c8d560b9f91",
"input_default": {
"model": "o3-mini",
"retry": 3,
"prompt": "{{ARGS}}",
"sys_prompt": "You are now the following python function:\n\n```\n# {{DESCRIPTION}}\n{{FUNCTION}}\n```\n\nThe user will provide your input arguments.\nOnly respond with your `return` value.\nDo not include any commentary or additional text in your response. \nDo not include ``` backticks or any other decorators.",
"ollama_host": "localhost:11434",
"prompt_values": {}
},
"metadata": {
"position": {
"x": 995,
"y": 290.50000000000006
}
},
"input_links": [
{
"id": "dc7cb15f-76cc-4533-b96c-dd9e3f7f75ed",
"source_id": "4eab3a55-20f2-4c1d-804c-7377ba8202d2",
"sink_id": "c5d16ee4-de9e-4d93-bf32-ac2d15760d5b",
"source_name": "result",
"sink_name": "prompt_values_#_FUNCTION",
"is_static": true
},
{
"id": "093bdca5-9f44-42f9-8e1c-276dd2971675",
"source_id": "844530de-2354-46d8-b748-67306b7bbca1",
"sink_id": "c5d16ee4-de9e-4d93-bf32-ac2d15760d5b",
"source_name": "result",
"sink_name": "prompt_values_#_ARGS",
"is_static": true
},
{
"id": "6c63d8ee-b63d-4ff6-bae0-7db8f99bb7af",
"source_id": "0fd6ef54-c1cd-478d-b764-17e40f882b99",
"sink_id": "c5d16ee4-de9e-4d93-bf32-ac2d15760d5b",
"source_name": "result",
"sink_name": "prompt_values_#_DESCRIPTION",
"is_static": true
}
],
"output_links": [
{
"id": "caecc1de-fdbc-4fd9-9570-074057bb15f9",
"source_id": "c5d16ee4-de9e-4d93-bf32-ac2d15760d5b",
"sink_id": "26ff2973-3f9a-451d-b902-d45e5da0a7fe",
"source_name": "response",
"sink_name": "value",
"is_static": false
}
],
"graph_id": "622849a7-5848-4838-894d-01f8f07e3fad",
"graph_version": 18,
"webhook_id": null,
"webhook": null
},
{
"id": "4eab3a55-20f2-4c1d-804c-7377ba8202d2",
"block_id": "7fcd3bcb-8e1b-4e69-903d-32d3d4a92158",
"input_default": {
"name": "Function Definition",
"title": null,
"value": "def fake_people(n: int) -> list[dict]:",
"secret": false,
"advanced": false,
"description": "The function definition (text). This is what you would type on the first line of the function when programming.\n\ne.g \"def fake_people(n: int) -> list[dict]:\"",
"placeholder_values": []
},
"metadata": {
"position": {
"x": -672.6908629664215,
"y": 302.42044359789116
}
},
"input_links": [],
"output_links": [
{
"id": "dc7cb15f-76cc-4533-b96c-dd9e3f7f75ed",
"source_id": "4eab3a55-20f2-4c1d-804c-7377ba8202d2",
"sink_id": "c5d16ee4-de9e-4d93-bf32-ac2d15760d5b",
"source_name": "result",
"sink_name": "prompt_values_#_FUNCTION",
"is_static": true
}
],
"graph_id": "622849a7-5848-4838-894d-01f8f07e3fad",
"graph_version": 18,
"webhook_id": null,
"webhook": null
},
{
"id": "844530de-2354-46d8-b748-67306b7bbca1",
"block_id": "7fcd3bcb-8e1b-4e69-903d-32d3d4a92158",
"input_default": {
"name": "Arguments",
"title": null,
"value": "20",
"secret": false,
"advanced": false,
"description": "The function's inputs\n\ne.g \"20\"",
"placeholder_values": []
},
"metadata": {
"position": {
"x": -158.1623599617334,
"y": 295.410856928333
}
},
"input_links": [],
"output_links": [
{
"id": "093bdca5-9f44-42f9-8e1c-276dd2971675",
"source_id": "844530de-2354-46d8-b748-67306b7bbca1",
"sink_id": "c5d16ee4-de9e-4d93-bf32-ac2d15760d5b",
"source_name": "result",
"sink_name": "prompt_values_#_ARGS",
"is_static": true
}
],
"graph_id": "622849a7-5848-4838-894d-01f8f07e3fad",
"graph_version": 18,
"webhook_id": null,
"webhook": null
},
{
"id": "0fd6ef54-c1cd-478d-b764-17e40f882b99",
"block_id": "90a56ffb-7024-4b2b-ab50-e26c5e5ab8ba",
"input_default": {
"name": "Description",
"title": null,
"value": "Generates n examples of fake data representing people, each with a name, DoB, Job title, and an age.",
"secret": false,
"advanced": false,
"description": "Describe what the function does.\n\ne.g \"Generates n examples of fake data representing people, each with a name, DoB, Job title, and an age.\"",
"placeholder_values": []
},
"metadata": {
"position": {
"x": 374.4548658057796,
"y": 290.3779121974126
}
},
"input_links": [],
"output_links": [
{
"id": "6c63d8ee-b63d-4ff6-bae0-7db8f99bb7af",
"source_id": "0fd6ef54-c1cd-478d-b764-17e40f882b99",
"sink_id": "c5d16ee4-de9e-4d93-bf32-ac2d15760d5b",
"source_name": "result",
"sink_name": "prompt_values_#_DESCRIPTION",
"is_static": true
}
],
"graph_id": "622849a7-5848-4838-894d-01f8f07e3fad",
"graph_version": 18,
"webhook_id": null,
"webhook": null
}
],
"links": [
{
"id": "caecc1de-fdbc-4fd9-9570-074057bb15f9",
"source_id": "c5d16ee4-de9e-4d93-bf32-ac2d15760d5b",
"sink_id": "26ff2973-3f9a-451d-b902-d45e5da0a7fe",
"source_name": "response",
"sink_name": "value",
"is_static": false
},
{
"id": "6c63d8ee-b63d-4ff6-bae0-7db8f99bb7af",
"source_id": "0fd6ef54-c1cd-478d-b764-17e40f882b99",
"sink_id": "c5d16ee4-de9e-4d93-bf32-ac2d15760d5b",
"source_name": "result",
"sink_name": "prompt_values_#_DESCRIPTION",
"is_static": true
},
{
"id": "093bdca5-9f44-42f9-8e1c-276dd2971675",
"source_id": "844530de-2354-46d8-b748-67306b7bbca1",
"sink_id": "c5d16ee4-de9e-4d93-bf32-ac2d15760d5b",
"source_name": "result",
"sink_name": "prompt_values_#_ARGS",
"is_static": true
},
{
"id": "dc7cb15f-76cc-4533-b96c-dd9e3f7f75ed",
"source_id": "4eab3a55-20f2-4c1d-804c-7377ba8202d2",
"sink_id": "c5d16ee4-de9e-4d93-bf32-ac2d15760d5b",
"source_name": "result",
"sink_name": "prompt_values_#_FUNCTION",
"is_static": true
}
],
"forked_from_id": null,
"forked_from_version": null,
"sub_graphs": [],
"user_id": "",
"created_at": "2025-04-19T17:10:48.857Z",
"input_schema": {
"type": "object",
"properties": {
"Function Definition": {
"advanced": false,
"anyOf": [
{
"format": "short-text",
"type": "string"
},
{
"type": "null"
}
],
"secret": false,
"title": "Function Definition",
"description": "The function definition (text). This is what you would type on the first line of the function when programming.\n\ne.g \"def fake_people(n: int) -> list[dict]:\"",
"default": "def fake_people(n: int) -> list[dict]:"
},
"Arguments": {
"advanced": false,
"anyOf": [
{
"format": "short-text",
"type": "string"
},
{
"type": "null"
}
],
"secret": false,
"title": "Arguments",
"description": "The function's inputs\n\ne.g \"20\"",
"default": "20"
},
"Description": {
"advanced": false,
"anyOf": [
{
"format": "long-text",
"type": "string"
},
{
"type": "null"
}
],
"secret": false,
"title": "Description",
"description": "Describe what the function does.\n\ne.g \"Generates n examples of fake data representing people, each with a name, DoB, Job title, and an age.\"",
"default": "Generates n examples of fake data representing people, each with a name, DoB, Job title, and an age."
}
},
"required": []
},
"output_schema": {
"type": "object",
"properties": {
"return": {
"advanced": false,
"secret": false,
"title": "return",
"description": "The value returned by the function"
}
},
"required": [
"return"
]
},
"has_external_trigger": false,
"has_human_in_the_loop": false,
"trigger_setup_info": null,
"credentials_input_schema": {
"properties": {
"openai_api_key_credentials": {
"credentials_provider": [
"openai"
],
"credentials_types": [
"api_key"
],
"properties": {
"id": {
"title": "Id",
"type": "string"
},
"title": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"default": null,
"title": "Title"
},
"provider": {
"const": "openai",
"title": "Provider",
"type": "string"
},
"type": {
"const": "api_key",
"title": "Type",
"type": "string"
}
},
"required": [
"id",
"provider",
"type"
],
"title": "CredentialsMetaInput[Literal[<ProviderName.OPENAI: 'openai'>], Literal['api_key']]",
"type": "object",
"discriminator": "model",
"discriminator_mapping": {
"Llama-3.3-70B-Instruct": "llama_api",
"Llama-3.3-8B-Instruct": "llama_api",
"Llama-4-Maverick-17B-128E-Instruct-FP8": "llama_api",
"Llama-4-Scout-17B-16E-Instruct-FP8": "llama_api",
"Qwen/Qwen2.5-72B-Instruct-Turbo": "aiml_api",
"amazon/nova-lite-v1": "open_router",
"amazon/nova-micro-v1": "open_router",
"amazon/nova-pro-v1": "open_router",
"claude-3-7-sonnet-20250219": "anthropic",
"claude-3-haiku-20240307": "anthropic",
"claude-haiku-4-5-20251001": "anthropic",
"claude-opus-4-1-20250805": "anthropic",
"claude-opus-4-20250514": "anthropic",
"claude-opus-4-5-20251101": "anthropic",
"claude-sonnet-4-20250514": "anthropic",
"claude-sonnet-4-5-20250929": "anthropic",
"cohere/command-r-08-2024": "open_router",
"cohere/command-r-plus-08-2024": "open_router",
"deepseek/deepseek-chat": "open_router",
"deepseek/deepseek-r1-0528": "open_router",
"dolphin-mistral:latest": "ollama",
"google/gemini-2.0-flash-001": "open_router",
"google/gemini-2.0-flash-lite-001": "open_router",
"google/gemini-2.5-flash": "open_router",
"google/gemini-2.5-flash-lite-preview-06-17": "open_router",
"google/gemini-2.5-pro-preview-03-25": "open_router",
"google/gemini-3-pro-preview": "open_router",
"gpt-3.5-turbo": "openai",
"gpt-4-turbo": "openai",
"gpt-4.1-2025-04-14": "openai",
"gpt-4.1-mini-2025-04-14": "openai",
"gpt-4o": "openai",
"gpt-4o-mini": "openai",
"gpt-5-2025-08-07": "openai",
"gpt-5-chat-latest": "openai",
"gpt-5-mini-2025-08-07": "openai",
"gpt-5-nano-2025-08-07": "openai",
"gpt-5.1-2025-11-13": "openai",
"gryphe/mythomax-l2-13b": "open_router",
"llama-3.1-8b-instant": "groq",
"llama-3.3-70b-versatile": "groq",
"llama3": "ollama",
"llama3.1:405b": "ollama",
"llama3.2": "ollama",
"llama3.3": "ollama",
"meta-llama/Llama-3.2-3B-Instruct-Turbo": "aiml_api",
"meta-llama/Llama-3.3-70B-Instruct-Turbo": "aiml_api",
"meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo": "aiml_api",
"meta-llama/llama-4-maverick": "open_router",
"meta-llama/llama-4-scout": "open_router",
"microsoft/wizardlm-2-8x22b": "open_router",
"mistralai/mistral-nemo": "open_router",
"moonshotai/kimi-k2": "open_router",
"nousresearch/hermes-3-llama-3.1-405b": "open_router",
"nousresearch/hermes-3-llama-3.1-70b": "open_router",
"nvidia/llama-3.1-nemotron-70b-instruct": "aiml_api",
"o1": "openai",
"o1-mini": "openai",
"o3-2025-04-16": "openai",
"o3-mini": "openai",
"openai/gpt-oss-120b": "open_router",
"openai/gpt-oss-20b": "open_router",
"perplexity/sonar": "open_router",
"perplexity/sonar-deep-research": "open_router",
"perplexity/sonar-pro": "open_router",
"qwen/qwen3-235b-a22b-thinking-2507": "open_router",
"qwen/qwen3-coder": "open_router",
"v0-1.0-md": "v0",
"v0-1.5-lg": "v0",
"v0-1.5-md": "v0",
"x-ai/grok-4": "open_router",
"x-ai/grok-4-fast": "open_router",
"x-ai/grok-4.1-fast": "open_router",
"x-ai/grok-code-fast-1": "open_router"
},
"discriminator_values": [
"o3-mini"
]
}
},
"required": [
"openai_api_key_credentials"
],
"title": "AIFunctionCredentialsInputSchema",
"type": "object"
}
}