mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-01-12 00:28:31 -05:00
Compare commits
7 Commits
gitbook
...
swiftyos/d
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a86d750cf5 | ||
|
|
13bd648731 | ||
|
|
3d7ee7cc29 | ||
|
|
1ea52934cd | ||
|
|
7b6db6e260 | ||
|
|
2c9563353e | ||
|
|
fb2a70e2d8 |
@@ -1,4 +1,4 @@
|
||||
.PHONY: start-core stop-core logs-core format lint migrate run-backend run-frontend
|
||||
.PHONY: start-core stop-core logs-core format lint migrate run-backend run-frontend load-store-agents
|
||||
|
||||
# Run just Supabase + Redis + RabbitMQ
|
||||
start-core:
|
||||
@@ -42,7 +42,10 @@ run-frontend:
|
||||
|
||||
test-data:
|
||||
cd backend && poetry run python test/test_data_creator.py
|
||||
|
||||
|
||||
load-store-agents:
|
||||
cd backend && poetry run python test/load_store_agents.py
|
||||
|
||||
help:
|
||||
@echo "Usage: make <target>"
|
||||
@echo "Targets:"
|
||||
@@ -54,4 +57,5 @@ help:
|
||||
@echo " migrate - Run backend database migrations"
|
||||
@echo " run-backend - Run the backend FastAPI server"
|
||||
@echo " run-frontend - Run the frontend Next.js development server"
|
||||
@echo " test-data - Run the test data creator"
|
||||
@echo " test-data - Run the test data creator"
|
||||
@echo " load-store-agents - Load store agents from agents/ folder into test database"
|
||||
242
autogpt_platform/backend/agents/StoreAgent_rows.csv
Normal file
242
autogpt_platform/backend/agents/StoreAgent_rows.csv
Normal file
@@ -0,0 +1,242 @@
|
||||
listing_id,storeListingVersionId,slug,agent_name,agent_video,agent_image,featured,sub_heading,description,categories,useForOnboarding,is_available
|
||||
6e60a900-9d7d-490e-9af2-a194827ed632,d85882b8-633f-44ce-a315-c20a8c123d19,flux-ai-image-generator,Flux AI Image Generator,,"[""https://storage.googleapis.com/agpt-prod-website-artifacts/users/b3e41ea4-2f4c-4964-927c-fe682d857bad/images/ca154dd1-140e-454c-91bd-2d8a00de3f08.jpg"",""https://storage.googleapis.com/agpt-prod-website-artifacts/users/b3e41ea4-2f4c-4964-927c-fe682d857bad/images/577d995d-bc38-40a9-a23f-1f30f5774bdb.jpg"",""https://storage.googleapis.com/agpt-prod-website-artifacts/users/b3e41ea4-2f4c-4964-927c-fe682d857bad/images/415db1b7-115c-43ab-bd6c-4e9f7ef95be1.jpg""]",false,Transform ideas into breathtaking images,"Transform ideas into breathtaking images with this AI-powered Image Generator. Using cutting-edge Flux AI technology, the tool crafts highly detailed, photorealistic visuals from simple text prompts. Perfect for artists, marketers, and content creators, this generator produces unique images tailored to user specifications. From fantastical scenes to lifelike portraits, users can unleash creativity with professional-quality results in seconds. Easy to use and endlessly versatile, bring imagination to life with the AI Image Generator today!","[""creative""]",false,true
|
||||
f11fc6e9-6166-4676-ac5d-f07127b270c1,c775f60d-b99f-418b-8fe0-53172258c3ce,youtube-transcription-scraper,YouTube Transcription Scraper,https://youtu.be/H8S3pU68lGE,"[""https://storage.googleapis.com/agpt-prod-website-artifacts/users/b3e41ea4-2f4c-4964-927c-fe682d857bad/images/65bce54b-0124-4b0d-9e3e-f9b89d0dc99e.jpg""]",false,Fetch the transcriptions from the most popular YouTube videos in your chosen topic,"Effortlessly gather transcriptions from multiple YouTube videos with this agent. It scrapes and compiles video transcripts into a clean, organized list, making it easy to extract insights, quotes, or content from various sources in one go. Ideal for researchers, content creators, and marketers looking to quickly analyze or repurpose video content.","[""writing""]",false,true
|
||||
17908889-b599-4010-8e4f-bed19b8f3446,6e16e65a-ad34-4108-b4fd-4a23fced5ea2,business-ownerceo-finder,Decision Maker Lead Finder,,"[""https://storage.googleapis.com/agpt-prod-website-artifacts/users/b3e41ea4-2f4c-4964-927c-fe682d857bad/images/1020d94e-b6a2-4fa7-bbdf-2c218b0de563.jpg""]",false,Contact CEOs today,"Find the key decision-makers you need, fast.
|
||||
|
||||
This agent identifies business owners or CEOs of local companies in any area you choose. Simply enter what kind of businesses you’re looking for and where, and it will:
|
||||
|
||||
* Search the area and gather public information
|
||||
* Return names, roles, and contact details when available
|
||||
* Provide smart Google search suggestions if details aren’t found
|
||||
|
||||
Perfect for:
|
||||
|
||||
* B2B sales teams seeking verified leads
|
||||
* Recruiters sourcing local talent
|
||||
* Researchers looking to connect with business leaders
|
||||
|
||||
Save hours of manual searching and get straight to the people who matter most.","[""business""]",true,true
|
||||
72beca1d-45ea-4403-a7ce-e2af168ee428,415b7352-0dc6-4214-9d87-0ad3751b711d,smart-meeting-brief,Smart Meeting Prep,https://youtu.be/9ydZR2hkxaY,"[""https://storage.googleapis.com/agpt-prod-website-artifacts/users/b3e41ea4-2f4c-4964-927c-fe682d857bad/images/2f116ce1-63ae-4d39-a5cd-f514defc2b97.png"",""https://storage.googleapis.com/agpt-prod-website-artifacts/users/b3e41ea4-2f4c-4964-927c-fe682d857bad/images/0a71a60a-2263-4f12-9836-9c76ab49f155.png"",""https://storage.googleapis.com/agpt-prod-website-artifacts/users/b3e41ea4-2f4c-4964-927c-fe682d857bad/images/95327695-9184-403c-907a-a9d3bdafa6a5.png"",""https://storage.googleapis.com/agpt-prod-website-artifacts/users/b3e41ea4-2f4c-4964-927c-fe682d857bad/images/2bc77788-790b-47d4-8a61-ce97b695e9f5.png""]",true,Business meeting briefings delivered daily,"Never walk into a meeting unprepared again. Every day at 4 pm, the Smart Meeting Prep Agent scans your calendar for tomorrow's external meetings. It reviews your past email exchanges, researches each participant's background and role, and compiles the insights into a concise briefing, so you can close your workday ready for tomorrow's calls.
|
||||
|
||||
How It Works
|
||||
1. At 4 pm, the agent scans your calendar and identifies external meetings scheduled for the next day.
|
||||
2. It reviews recent email threads with each participant to surface key relationship history and communication context.
|
||||
3. It conducts online research to gather publicly available information on roles, company backgrounds, and relevant professional data.
|
||||
4. It produces a unified briefing for each participant, including past exchange highlights, profile notes, and strategic conversation points.","[""personal""]",true,true
|
||||
9fa5697a-617b-4fae-aea0-7dbbed279976,b8ceb480-a7a2-4c90-8513-181a49f7071f,automated-support-ai,Automated Support Agent,https://youtu.be/nBMfu_5sgDA,"[""https://storage.googleapis.com/agpt-prod-website-artifacts/users/b3e41ea4-2f4c-4964-927c-fe682d857bad/images/ed56febc-2205-4179-9e7e-505d8500b66c.png""]",true,Automate up to 80 percent of inbound support emails,"Overview:
|
||||
Support teams spend countless hours on basic tickets. This agent automates repetitive customer support tasks. It reads incoming requests, researches your knowledge base, and responds automatically when confident. When unsure, it escalates to a human for final resolution.
|
||||
|
||||
How it Works:
|
||||
New support emails are routed to the agent.
|
||||
The agent checks internal documentation for answers.
|
||||
It measures confidence in the answer found and either replies directly or escalates to a human.
|
||||
|
||||
Business Value:
|
||||
Automating the easy 80 percent of support tickets allows your team to focus on high-value, complex customer issues, improving efficiency and response times.","[""business""]",false,true
|
||||
2bdac92b-a12c-4131-bb46-0e3b89f61413,31daf49d-31d3-476b-aa4c-099abc59b458,unspirational-poster-maker,Unspirational Poster Maker,,"[""https://storage.googleapis.com/agpt-prod-website-artifacts/users/b3e41ea4-2f4c-4964-927c-fe682d857bad/images/6a490dac-27e5-405f-a4c4-8d1c55b85060.jpg"",""https://storage.googleapis.com/agpt-prod-website-artifacts/users/b3e41ea4-2f4c-4964-927c-fe682d857bad/images/d343fbb5-478c-4e38-94df-4337293b61f1.jpg""]",false,Because adulting is hard,"This witty AI agent generates hilariously relatable ""motivational"" posters that tackle the everyday struggles of procrastination, overthinking, and workplace chaos with a blend of absurdity and sarcasm. From goldfish facing impossible tasks to cats in existential crises, The Unspirational Poster Maker designs tongue-in-cheek graphics and captions that mock productivity clichés and embrace our collective struggles to ""get it together."" Perfect for adding a touch of humour to the workday, these posters remind us that sometimes, all we can do is laugh at the chaos.","[""creative""]",false,true
|
||||
9adf005e-2854-4cc7-98cf-f7103b92a7b7,a03b0d8c-4751-43d6-a54e-c3b7856ba4e3,ai-shortform-video-generator-create-viral-ready-content,AI Video Generator,,"[""https://storage.googleapis.com/agpt-prod-website-artifacts/users/b3e41ea4-2f4c-4964-927c-fe682d857bad/images/8d2670b9-fea5-4966-a597-0a4511bffdc3.png"",""https://storage.googleapis.com/agpt-prod-website-artifacts/users/b3e41ea4-2f4c-4964-927c-fe682d857bad/images/aabe8aec-0110-4ce7-a259-4f86fe8fe07d.png""]",false,Create Viral-Ready Shorts Content in Seconds,"OVERVIEW
|
||||
Transform any trending headline or broad topic into a polished, vertical short-form video in a single run.
|
||||
The agent automates research, scriptwriting, metadata creation, and Revid.ai rendering, returning one ready-to-publish MP4 plus its title, script and hashtags.
|
||||
|
||||
HOW IT WORKS
|
||||
1. Input a topic or an exact news headline.
|
||||
2. The agent fetches live search results and selects the most engaging related story.
|
||||
3. Key facts are summarised into concise research notes.
|
||||
4. Claude writes a 30–35 second script with visual cues, a three-second hook, tension loops, and a call-to-action.
|
||||
5. GPT-4o generates an eye-catching title and one or two discoverability hashtags.
|
||||
6. The script is sent to a state-of-the-art AI video generator to render a single 9:16 MP4 (default: 720 p, 30 fps, voice “Brian”, style “movingImage”, music “Bladerunner 2049”).
|
||||
– All voice, style and resolution settings can be adjusted in the Builder before you press ""Run"".
|
||||
7. Output delivered: Title, Script, Hashtags, Video URL.
|
||||
|
||||
KEY USE CASES
|
||||
- Broad-topic explainers (e.g. “Artificial Intelligence” or “Climate Tech”).
|
||||
- Real-time newsjacking with a specific breaking headline.
|
||||
- Product-launch spotlights and quick event recaps while interest is high.
|
||||
|
||||
BUSINESS VALUE
|
||||
- One-click speed: from idea to finished video in minutes.
|
||||
- Consistent brand look: Revid presets keep voice, style and aspect ratio on spec.
|
||||
- No-code workflow: marketers create social video without design or development queues.
|
||||
- Cloud convenience: Auto-GPT Cloud users are pre-configured with all required keys.
|
||||
Self-hosted users simply add OpenAI, Anthropic, Perplexity (OpenRouter/Jina) and Revid keys once.
|
||||
|
||||
IMPORTANT NOTES
|
||||
- The agent outputs exactly one video per execution. Run it again for additional shorts.
|
||||
- Video rendering time varies; AI-generated footage may take several minutes.","[""writing""]",false,true
|
||||
864e48ef-fee5-42c1-b6a4-2ae139db9fc1,55d40473-0f31-4ada-9e40-d3a7139fcbd4,automated-blog-writer,Automated SEO Blog Writer,https://youtu.be/nKcDCbDVobs,"[""https://storage.googleapis.com/agpt-prod-website-artifacts/users/b3e41ea4-2f4c-4964-927c-fe682d857bad/images/2dd5f95b-5b30-4bf8-a11b-bac776c5141a.jpg""]",true,"Automate research, writing, and publishing for high-ranking blog posts","Scale your blog with a fully automated content engine. The Automated SEO Blog Writer learns your brand voice, finds high-demand keywords, and creates SEO-optimized articles that attract organic traffic and boost visibility.
|
||||
|
||||
How it works:
|
||||
|
||||
1. Share your pitch, website, and values.
|
||||
2. The agent studies your site and uncovers proven SEO opportunities.
|
||||
3. It spends two hours researching and drafting each post.
|
||||
4. You set the cadence—publishing runs on autopilot.
|
||||
|
||||
Business value: Consistently publish research-backed, optimized posts that build domain authority, rankings, and thought leadership while you focus on what matters most.
|
||||
|
||||
Use cases:
|
||||
• Founders: Keep your blog active with no time drain.
|
||||
• Agencies: Deliver scalable SEO content for clients.
|
||||
• Strategists: Automate execution, focus on strategy.
|
||||
• Marketers: Drive steady organic growth.
|
||||
• Local businesses: Capture nearby search traffic.","[""writing""]",false,true
|
||||
6046f42e-eb84-406f-bae0-8e052064a4fa,a548e507-09a7-4b30-909c-f63fcda10fff,lead-finder-local-businesses,Lead Finder,,"[""https://storage.googleapis.com/agpt-prod-website-artifacts/users/b3e41ea4-2f4c-4964-927c-fe682d857bad/images/abd6605f-d5f8-426b-af36-052e8ba5044f.webp""]",false,Auto-Prospect Like a Pro,"Turbo-charge your local lead generation with the AutoGPT Marketplace’s top Google Maps prospecting agent. “Lead Finder: Local Businesses” delivers verified, ready-to-contact prospects in any niche and city—so you can focus on closing, not searching.
|
||||
|
||||
**WHAT IT DOES**
|
||||
• Searches Google Maps via the official API (no scraping)
|
||||
• Prompts like “dentists in Chicago” or “coffee shops near me”
|
||||
• Returns: Name, Website, Rating, Reviews, **Phone & Address**
|
||||
• Exports instantly to your CRM, sheet, or outreach workflow
|
||||
|
||||
**WHY YOU’LL LOVE IT**
|
||||
✓ Hyper-targeted leads in minutes
|
||||
✓ Unlimited searches & locations
|
||||
✓ Zero CAPTCHAs or IP blocks
|
||||
✓ Works on AutoGPT Cloud or self-hosted (with your API key)
|
||||
✓ Cut prospecting time by 90%
|
||||
|
||||
**PERFECT FOR**
|
||||
— Marketers & PPC agencies
|
||||
— SEO consultants & designers
|
||||
— SaaS founders & sales teams
|
||||
|
||||
Stop scrolling directories—start filling your pipeline. Start now and let AI prospect while you profit.
|
||||
|
||||
→ Click *Add to Library* and own your market today.","[""business""]",true,true
|
||||
f623c862-24e9-44fc-8ce8-d8282bb51ad2,eafa21d3-bf14-4f63-a97f-a5ee41df83b3,linkedin-post-generator,LinkedIn Post Generator,,"[""https://storage.googleapis.com/agpt-prod-website-artifacts/users/b3e41ea4-2f4c-4964-927c-fe682d857bad/images/297f6a8e-81a8-43e2-b106-c7ad4a5662df.png"",""https://storage.googleapis.com/agpt-prod-website-artifacts/users/b3e41ea4-2f4c-4964-927c-fe682d857bad/images/fceebdc1-aef6-4000-97fc-4ef587f56bda.png""]",false,Auto‑craft LinkedIn gold,"Create research‑driven, high‑impact LinkedIn posts in minutes. This agent searches YouTube for the best videos on your chosen topic, pulls their transcripts, and distils the most valuable insights into a polished post ready for your company page or personal feed.
|
||||
|
||||
FEATURES
|
||||
• Automated YouTube research – discovers and analyses top‑ranked videos so you don’t have to
|
||||
• AI‑curated synthesis – combines multiple transcripts into one authoritative narrative
|
||||
• Full creative control – adjust style, tone, objective, opinion, clarity, target word count and number of videos
|
||||
• LinkedIn‑optimised output – hook, 2‑3 key points, CTA, strategic line breaks, 3‑5 hashtags, no markdown
|
||||
• One‑click publish – returns a ready‑to‑post text block (≤1 300 characters)
|
||||
|
||||
HOW IT WORKS
|
||||
1. Enter a topic and your preferred writing parameters.
|
||||
2. The agent builds a YouTube search, fetches the page, and extracts the top N video URLs.
|
||||
3. It pulls each transcript, then feeds them—plus your settings—into Claude 3.5 Sonnet.
|
||||
4. The model writes a concise, engaging post designed for maximum LinkedIn engagement.
|
||||
|
||||
USE CASES
|
||||
• Thought‑leadership updates backed by fresh video research
|
||||
• Rapid industry summaries after major events, webinars, or conferences
|
||||
• Consistent LinkedIn content for busy founders, marketers, and creators
|
||||
|
||||
WHY YOU’LL LOVE IT
|
||||
Save hours of manual research, avoid surface‑level hot‑takes, and publish posts that showcase real expertise—without the heavy lift.","[""writing""]",true,true
|
||||
7d4120ad-b6b3-4419-8bdb-7dd7d350ef32,e7bb29a1-23c7-4fee-aa3b-5426174b8c52,youtube-to-linkedin-post-converter,YouTube to LinkedIn Post Converter,,"[""https://storage.googleapis.com/agpt-prod-website-artifacts/users/b3e41ea4-2f4c-4964-927c-fe682d857bad/images/f084b326-a708-4396-be51-7ba59ad2ef32.png""]",false,Transform Your YouTube Videos into Engaging LinkedIn Posts with AI,"WHAT IT DOES:
|
||||
This agent converts YouTube video content into a LinkedIn post by analyzing the video's transcript. It provides you with a tailored post that reflects the core ideas, key takeaways, and tone of the original video, optimizing it for engagement on LinkedIn.
|
||||
|
||||
HOW IT WORKS:
|
||||
- You provide the URL to the YouTube video (required)
|
||||
- You can choose the structure for the LinkedIn post (e.g., Personal Achievement Story, Lesson Learned, Thought Leadership, etc.)
|
||||
- You can also select the tone (e.g., Inspirational, Analytical, Conversational, etc.)
|
||||
- The transcript of the video is analyzed by the GPT-4 model and the Claude 3.5 Sonnet model
|
||||
- The models extract key insights, memorable quotes, and the main points from the video
|
||||
- You’ll receive a LinkedIn post, formatted according to your chosen structure and tone, optimized for professional engagement
|
||||
|
||||
INPUTS:
|
||||
- Source YouTube Video – Provide the URL to the YouTube video
|
||||
- Structure – Choose the post format (e.g., Personal Achievement Story, Thought Leadership, etc.)
|
||||
- Content – Specify the main message or idea of the post (e.g., Hot Take, Key Takeaways, etc.)
|
||||
- Tone – Select the tone for the post (e.g., Conversational, Inspirational, etc.)
|
||||
|
||||
OUTPUT:
|
||||
- LinkedIn Post – A well-crafted, AI-generated LinkedIn post with a professional tone, based on the video content and your specified preferences
|
||||
|
||||
Perfect for content creators, marketers, and professionals who want to repurpose YouTube videos for LinkedIn and boost their professional branding.","[""writing""]",false,true
|
||||
c61d6a83-ea48-4df8-b447-3da2d9fe5814,00fdd42c-a14c-4d19-a567-65374ea0e87f,personalized-morning-coffee-newsletter,Personal Newsletter,,"[""https://storage.googleapis.com/agpt-prod-website-artifacts/users/b3e41ea4-2f4c-4964-927c-fe682d857bad/images/f4b38e4c-8166-4caf-9411-96c9c4c82d4c.png""]",false,Start your day with personalized AI newsletters that deliver credibility and context for every interest or mood.,"This Personal Newsletter Agent provides a bespoke daily digest on your favorite topics and tone. Whether you prefer industry insights, lighthearted reads, or breaking news, this agent crafts your own unique newsletter to keep you informed and entertained.
|
||||
|
||||
|
||||
How It Works
|
||||
1. Enter your favorite topics, industries, or areas of interest.
|
||||
2. Choose your tone—professional, casual, or humorous.
|
||||
3. Set your preferred delivery cadence: daily or weekly.
|
||||
4. The agent scans top sources and compiles 3–5 engaging stories, insights, and fun facts into a conversational newsletter.
|
||||
|
||||
Skip the morning scroll and enjoy a thoughtfully curated newsletter designed just for you. Stay ahead of trends, spark creative ideas, and enjoy an effortless, informed start to your day.
|
||||
|
||||
|
||||
Use Cases
|
||||
• Executives: Get a daily digest of market updates and leadership insights.
|
||||
• Marketers: Receive curated creative trends and campaign inspiration.
|
||||
• Entrepreneurs: Stay updated on your industry without information overload.","[""research""]",true,true
|
||||
e2e49cfc-4a39-4d62-a6b3-c095f6d025ff,fc2c9976-0962-4625-a27b-d316573a9e7f,email-address-finder,Email Scout - Contact Finder Assistant,,"[""https://storage.googleapis.com/agpt-prod-website-artifacts/users/b3e41ea4-2f4c-4964-927c-fe682d857bad/images/da8a690a-7a8b-4c1d-b6f8-e2f840c0205d.jpg"",""https://storage.googleapis.com/agpt-prod-website-artifacts/users/b3e41ea4-2f4c-4964-927c-fe682d857bad/images/6a2ac25c-1609-4881-8140-e6da2421afb3.jpg"",""https://storage.googleapis.com/agpt-prod-website-artifacts/users/b3e41ea4-2f4c-4964-927c-fe682d857bad/images/26179263-fe06-45bd-b6a0-0754660a0a46.jpg""]",false,Find contact details from name and location using AI search,"Finding someone's professional email address can be time-consuming and frustrating. Manual searching across multiple websites, social profiles, and business directories often leads to dead ends or outdated information.
|
||||
|
||||
Email Scout automates this process by intelligently searching across publicly available sources when you provide a person's name and location. Simply input basic information like ""Tim Cook, USA"" or ""Sarah Smith, London"" and let the AI assistant do the work of finding potential contact details.
|
||||
|
||||
Key Features:
|
||||
- Quick search from just name and location
|
||||
- Scans multiple public sources
|
||||
- Automated AI-powered search process
|
||||
- Easy to use with simple inputs
|
||||
|
||||
Perfect for recruiters, business development professionals, researchers, and anyone needing to establish professional contact.
|
||||
|
||||
Note: This tool searches only publicly available information. Search results depend on what contact information people have made public. Some searches may not yield results if the information isn't publicly accessible.","[""""]",false,true
|
||||
81bcc372-0922-4a36-bc35-f7b1e51d6939,e437cc95-e671-489d-b915-76561fba8c7f,ai-youtube-to-blog-converter,YouTube Video to SEO Blog Writer,,"[""https://storage.googleapis.com/agpt-prod-website-artifacts/users/b3e41ea4-2f4c-4964-927c-fe682d857bad/images/239e5a41-2515-4e1c-96ef-31d0d37ecbeb.webp"",""https://storage.googleapis.com/agpt-prod-website-artifacts/users/b3e41ea4-2f4c-4964-927c-fe682d857bad/images/c7d96966-786f-4be6-ad7d-3a51c84efc0e.png"",""https://storage.googleapis.com/agpt-prod-website-artifacts/users/b3e41ea4-2f4c-4964-927c-fe682d857bad/images/0275a74c-e2c2-4e29-a6e4-3a616c3c35dd.png""]",false,One link. One click. One powerful blog post.,"Effortlessly transform your YouTube videos into high-quality, SEO-optimized blog posts.
|
||||
|
||||
Your videos deserve a second life—in writing.
|
||||
Make your content work twice as hard by repurposing it into engaging, searchable articles.
|
||||
|
||||
Perfect for content creators, marketers, and bloggers, this tool analyzes video content and generates well-structured blog posts tailored to your tone, audience, and word count. Just paste a YouTube URL and let the AI handle the rest.
|
||||
|
||||
FEATURES
|
||||
|
||||
• CONTENT ANALYSIS
|
||||
Extracts key points from the video while preserving your message and intent.
|
||||
|
||||
• CUSTOMIZABLE OUTPUT
|
||||
Select a tone that fits your audience: casual, professional, educational, or formal.
|
||||
|
||||
• SEO OPTIMIZATION
|
||||
Automatically creates engaging titles and structured subheadings for better search visibility.
|
||||
|
||||
• USER-FRIENDLY
|
||||
Repurpose your videos into written content to expand your reach and improve accessibility.
|
||||
|
||||
Whether you're looking to grow your blog, boost SEO, or simply get more out of your content, the AI YouTube-to-Blog Converter makes it effortless.
|
||||
","[""writing""]",true,true
|
||||
5c3510d2-fc8b-4053-8e19-67f53c86eb1a,f2cc74bb-f43f-4395-9c35-ecb30b5b4fc9,ai-webpage-copy-improver,AI Webpage Copy Improver,,"[""https://storage.googleapis.com/agpt-prod-website-artifacts/users/b3e41ea4-2f4c-4964-927c-fe682d857bad/images/d562d26f-5891-4b09-8859-fbb205972313.jpg""]",false,Boost Your Website's Search Engine Performance,"Elevate your web content with this powerful AI Webpage Copy Improver. Designed for marketers, SEO specialists, and web developers, this tool analyses and enhances website copy for maximum impact. Using advanced language models, it optimizes text for better clarity, SEO performance, and increased conversion rates. The AI examines your existing content, identifies areas for improvement, and generates refined copy that maintains your brand voice while boosting engagement. From homepage headlines to product descriptions, transform your web presence with AI-driven insights. Improve readability, incorporate targeted keywords, and craft compelling calls-to-action - all with the click of a button. Take your digital marketing to the next level with the AI Webpage Copy Improver.","[""marketing""]",true,true
|
||||
94d03bd3-7d44-4d47-b60c-edb2f89508d6,b6f6f0d3-49f4-4e3b-8155-ffe9141b32c0,domain-name-finder,Domain Name Finder,,"[""https://storage.googleapis.com/agpt-prod-website-artifacts/users/b3e41ea4-2f4c-4964-927c-fe682d857bad/images/28545e09-b2b8-4916-b4c6-67f982510a78.jpeg""]",false,Instantly generate brand-ready domain names that are actually available,"Overview:
|
||||
Finding a domain name that fits your brand shouldn’t take hours of searching and failed checks. The Domain Name Finder Agent turns your pitch into hundreds of creative, brand-ready domain ideas—filtered by live availability so every result is actionable.
|
||||
|
||||
How It Works
|
||||
1. Input your product pitch, company name, or core keywords.
|
||||
2. The agent analyzes brand tone, audience, and industry context.
|
||||
3. It generates a list of unique, memorable domains that match your criteria.
|
||||
4. All names are pre-filtered for real-time availability, so you can register immediately.
|
||||
|
||||
|
||||
Business Value
|
||||
Save hours of guesswork and eliminate dead ends. Accelerate brand launches, startup naming, and campaign creation with ready-to-claim domains.
|
||||
|
||||
|
||||
Key Use Cases
|
||||
• Startup Founders: Quickly find brand-ready domains for MVP launches or rebrands.
|
||||
• Marketers: Test name options across campaigns with instant availability data.
|
||||
• Entrepreneurs: Validate ideas faster with instant domain options.","[""business""]",false,true
|
||||
7a831906-daab-426f-9d66-bcf98d869426,516d813b-d1bc-470f-add7-c63a4b2c2bad,ai-function,AI Function,,"[""https://storage.googleapis.com/agpt-prod-website-artifacts/users/b3e41ea4-2f4c-4964-927c-fe682d857bad/images/620e8117-2ee1-4384-89e6-c2ef4ec3d9c9.webp"",""https://storage.googleapis.com/agpt-prod-website-artifacts/users/b3e41ea4-2f4c-4964-927c-fe682d857bad/images/476259e2-5a79-4a7b-8e70-deeebfca70d7.png""]",false,Never Code Again,"AI FUNCTION MAGIC
|
||||
Your AI‑powered assistant for turning plain‑English descriptions into working Python functions.
|
||||
|
||||
HOW IT WORKS
|
||||
1. Describe what the function should do.
|
||||
2. Specify the inputs it needs.
|
||||
3. Receive the generated Python code.
|
||||
|
||||
FEATURES
|
||||
- Effortless Function Generation: convert natural‑language specs into complete functions.
|
||||
- Customizable Inputs: define the parameters that matter to you.
|
||||
- Versatile Use Cases: simulate data, automate tasks, prototype ideas.
|
||||
- Seamless Integration: add the generated function directly to your codebase.
|
||||
|
||||
EXAMPLE
|
||||
Request: “Create a function that generates 20 examples of fake people, each with a name, date of birth, job title, and age.”
|
||||
Input parameter: number_of_people (default 20)
|
||||
Result: a list of dictionaries such as
|
||||
[
|
||||
{ ""name"": ""Emma Martinez"", ""date_of_birth"": ""1992‑11‑03"", ""job_title"": ""Data Analyst"", ""age"": 32 },
|
||||
{ ""name"": ""Liam O’Connor"", ""date_of_birth"": ""1985‑07‑19"", ""job_title"": ""Marketing Manager"", ""age"": 39 },
|
||||
…18 more entries…
|
||||
]","[""development""]",false,true
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,590 @@
|
||||
{
|
||||
"id": "7b2e2095-782a-4f8d-adda-e62b661bccf5",
|
||||
"version": 29,
|
||||
"is_active": false,
|
||||
"name": "Unspirational Poster Maker",
|
||||
"description": "This witty AI agent generates hilariously relatable \"motivational\" posters that tackle the everyday struggles of procrastination, overthinking, and workplace chaos with a blend of absurdity and sarcasm. From goldfish facing impossible tasks to cats in existential crises, The Unspirational Poster Maker designs tongue-in-cheek graphics and captions that mock productivity clich\u00e9s and embrace our collective struggles to \"get it together.\" Perfect for adding a touch of humour to the workday, these posters remind us that sometimes, all we can do is laugh at the chaos.",
|
||||
"instructions": null,
|
||||
"recommended_schedule_cron": null,
|
||||
"nodes": [
|
||||
{
|
||||
"id": "5ac3727a-1ea7-436b-a902-ef1bfd883a30",
|
||||
"block_id": "363ae599-353e-4804-937e-b2ee3cef3da4",
|
||||
"input_default": {
|
||||
"name": "Generated Image",
|
||||
"description": "The resulting generated image ready for you to review and post."
|
||||
},
|
||||
"metadata": {
|
||||
"position": {
|
||||
"x": 2329.937006807125,
|
||||
"y": 80.49068076698347
|
||||
}
|
||||
},
|
||||
"input_links": [
|
||||
{
|
||||
"id": "c6c511e8-e6a4-4969-9bc8-f67d60c1e229",
|
||||
"source_id": "86665e90-ffbf-48fb-ad3f-e5d31fd50c51",
|
||||
"sink_id": "5ac3727a-1ea7-436b-a902-ef1bfd883a30",
|
||||
"source_name": "result",
|
||||
"sink_name": "value",
|
||||
"is_static": false
|
||||
},
|
||||
{
|
||||
"id": "20845dda-91de-4508-8077-0504b1a5ae03",
|
||||
"source_id": "28bda769-b88b-44c9-be5c-52c2667f137e",
|
||||
"sink_id": "5ac3727a-1ea7-436b-a902-ef1bfd883a30",
|
||||
"source_name": "result",
|
||||
"sink_name": "value",
|
||||
"is_static": false
|
||||
},
|
||||
{
|
||||
"id": "6524c611-774b-45e9-899d-9a6aa80c549c",
|
||||
"source_id": "e7cdc1a2-4427-4a8a-a31b-63c8e74842f8",
|
||||
"sink_id": "5ac3727a-1ea7-436b-a902-ef1bfd883a30",
|
||||
"source_name": "result",
|
||||
"sink_name": "value",
|
||||
"is_static": false
|
||||
},
|
||||
{
|
||||
"id": "714a0821-e5ba-4af7-9432-50491adda7b1",
|
||||
"source_id": "576c5677-9050-4d1c-aad4-36b820c04fef",
|
||||
"sink_id": "5ac3727a-1ea7-436b-a902-ef1bfd883a30",
|
||||
"source_name": "result",
|
||||
"sink_name": "value",
|
||||
"is_static": false
|
||||
}
|
||||
],
|
||||
"output_links": [],
|
||||
"graph_id": "7b2e2095-782a-4f8d-adda-e62b661bccf5",
|
||||
"graph_version": 29,
|
||||
"webhook_id": null,
|
||||
"webhook": null
|
||||
},
|
||||
{
|
||||
"id": "7e026d19-f9a6-412f-8082-610f9ba0c410",
|
||||
"block_id": "c0a8e994-ebf1-4a9c-a4d8-89d09c86741b",
|
||||
"input_default": {
|
||||
"name": "Theme",
|
||||
"value": "Cooking"
|
||||
},
|
||||
"metadata": {
|
||||
"position": {
|
||||
"x": -1219.5966324967521,
|
||||
"y": 80.50339731789956
|
||||
}
|
||||
},
|
||||
"input_links": [],
|
||||
"output_links": [
|
||||
{
|
||||
"id": "8c2bd1f7-b17b-4835-81b6-bb336097aa7a",
|
||||
"source_id": "7e026d19-f9a6-412f-8082-610f9ba0c410",
|
||||
"sink_id": "7543b9b0-0409-4cf8-bc4e-e0336273e2c4",
|
||||
"source_name": "result",
|
||||
"sink_name": "prompt_values_#_THEME",
|
||||
"is_static": true
|
||||
}
|
||||
],
|
||||
"graph_id": "7b2e2095-782a-4f8d-adda-e62b661bccf5",
|
||||
"graph_version": 29,
|
||||
"webhook_id": null,
|
||||
"webhook": null
|
||||
},
|
||||
{
|
||||
"id": "28bda769-b88b-44c9-be5c-52c2667f137e",
|
||||
"block_id": "6ab085e2-20b3-4055-bc3e-08036e01eca6",
|
||||
"input_default": {
|
||||
"upscale": "No Upscale"
|
||||
},
|
||||
"metadata": {
|
||||
"position": {
|
||||
"x": 1132.373897280427,
|
||||
"y": 88.44610377514573
|
||||
}
|
||||
},
|
||||
"input_links": [
|
||||
{
|
||||
"id": "54588c74-e090-4e49-89e4-844b9952a585",
|
||||
"source_id": "7543b9b0-0409-4cf8-bc4e-e0336273e2c4",
|
||||
"sink_id": "28bda769-b88b-44c9-be5c-52c2667f137e",
|
||||
"source_name": "response",
|
||||
"sink_name": "prompt",
|
||||
"is_static": false
|
||||
}
|
||||
],
|
||||
"output_links": [
|
||||
{
|
||||
"id": "20845dda-91de-4508-8077-0504b1a5ae03",
|
||||
"source_id": "28bda769-b88b-44c9-be5c-52c2667f137e",
|
||||
"sink_id": "5ac3727a-1ea7-436b-a902-ef1bfd883a30",
|
||||
"source_name": "result",
|
||||
"sink_name": "value",
|
||||
"is_static": false
|
||||
}
|
||||
],
|
||||
"graph_id": "7b2e2095-782a-4f8d-adda-e62b661bccf5",
|
||||
"graph_version": 29,
|
||||
"webhook_id": null,
|
||||
"webhook": null
|
||||
},
|
||||
{
|
||||
"id": "e7cdc1a2-4427-4a8a-a31b-63c8e74842f8",
|
||||
"block_id": "6ab085e2-20b3-4055-bc3e-08036e01eca6",
|
||||
"input_default": {
|
||||
"upscale": "No Upscale"
|
||||
},
|
||||
"metadata": {
|
||||
"position": {
|
||||
"x": 590.7543882245375,
|
||||
"y": 85.69546832466654
|
||||
}
|
||||
},
|
||||
"input_links": [
|
||||
{
|
||||
"id": "66646786-3006-4417-a6b7-0158f2603d1d",
|
||||
"source_id": "7543b9b0-0409-4cf8-bc4e-e0336273e2c4",
|
||||
"sink_id": "e7cdc1a2-4427-4a8a-a31b-63c8e74842f8",
|
||||
"source_name": "response",
|
||||
"sink_name": "prompt",
|
||||
"is_static": false
|
||||
}
|
||||
],
|
||||
"output_links": [
|
||||
{
|
||||
"id": "6524c611-774b-45e9-899d-9a6aa80c549c",
|
||||
"source_id": "e7cdc1a2-4427-4a8a-a31b-63c8e74842f8",
|
||||
"sink_id": "5ac3727a-1ea7-436b-a902-ef1bfd883a30",
|
||||
"source_name": "result",
|
||||
"sink_name": "value",
|
||||
"is_static": false
|
||||
}
|
||||
],
|
||||
"graph_id": "7b2e2095-782a-4f8d-adda-e62b661bccf5",
|
||||
"graph_version": 29,
|
||||
"webhook_id": null,
|
||||
"webhook": null
|
||||
},
|
||||
{
|
||||
"id": "576c5677-9050-4d1c-aad4-36b820c04fef",
|
||||
"block_id": "6ab085e2-20b3-4055-bc3e-08036e01eca6",
|
||||
"input_default": {
|
||||
"upscale": "No Upscale"
|
||||
},
|
||||
"metadata": {
|
||||
"position": {
|
||||
"x": 60.48904654237981,
|
||||
"y": 86.06183359510214
|
||||
}
|
||||
},
|
||||
"input_links": [
|
||||
{
|
||||
"id": "201d3e03-bc06-4cee-846d-4c3c804d8857",
|
||||
"source_id": "7543b9b0-0409-4cf8-bc4e-e0336273e2c4",
|
||||
"sink_id": "576c5677-9050-4d1c-aad4-36b820c04fef",
|
||||
"source_name": "response",
|
||||
"sink_name": "prompt",
|
||||
"is_static": false
|
||||
}
|
||||
],
|
||||
"output_links": [
|
||||
{
|
||||
"id": "714a0821-e5ba-4af7-9432-50491adda7b1",
|
||||
"source_id": "576c5677-9050-4d1c-aad4-36b820c04fef",
|
||||
"sink_id": "5ac3727a-1ea7-436b-a902-ef1bfd883a30",
|
||||
"source_name": "result",
|
||||
"sink_name": "value",
|
||||
"is_static": false
|
||||
}
|
||||
],
|
||||
"graph_id": "7b2e2095-782a-4f8d-adda-e62b661bccf5",
|
||||
"graph_version": 29,
|
||||
"webhook_id": null,
|
||||
"webhook": null
|
||||
},
|
||||
{
|
||||
"id": "86665e90-ffbf-48fb-ad3f-e5d31fd50c51",
|
||||
"block_id": "6ab085e2-20b3-4055-bc3e-08036e01eca6",
|
||||
"input_default": {
|
||||
"prompt": "A cat sprawled dramatically across an important-looking document during a work-from-home meeting, making direct eye contact with the camera while knocking over a coffee mug in slow motion. Text Overlay: \"Chaos is a career path. Be the obstacle everyone has to work around.\"",
|
||||
"upscale": "No Upscale"
|
||||
},
|
||||
"metadata": {
|
||||
"position": {
|
||||
"x": 1668.3572666956795,
|
||||
"y": 89.69665262457966
|
||||
}
|
||||
},
|
||||
"input_links": [
|
||||
{
|
||||
"id": "509b7587-1940-4a06-808d-edde9a74f400",
|
||||
"source_id": "7543b9b0-0409-4cf8-bc4e-e0336273e2c4",
|
||||
"sink_id": "86665e90-ffbf-48fb-ad3f-e5d31fd50c51",
|
||||
"source_name": "response",
|
||||
"sink_name": "prompt",
|
||||
"is_static": false
|
||||
}
|
||||
],
|
||||
"output_links": [
|
||||
{
|
||||
"id": "c6c511e8-e6a4-4969-9bc8-f67d60c1e229",
|
||||
"source_id": "86665e90-ffbf-48fb-ad3f-e5d31fd50c51",
|
||||
"sink_id": "5ac3727a-1ea7-436b-a902-ef1bfd883a30",
|
||||
"source_name": "result",
|
||||
"sink_name": "value",
|
||||
"is_static": false
|
||||
}
|
||||
],
|
||||
"graph_id": "7b2e2095-782a-4f8d-adda-e62b661bccf5",
|
||||
"graph_version": 29,
|
||||
"webhook_id": null,
|
||||
"webhook": null
|
||||
},
|
||||
{
|
||||
"id": "7543b9b0-0409-4cf8-bc4e-e0336273e2c4",
|
||||
"block_id": "1f292d4a-41a4-4977-9684-7c8d560b9f91",
|
||||
"input_default": {
|
||||
"model": "gpt-4o",
|
||||
"prompt": "<example_output>\nA photo of a sloth lounging on a desk, with its head resting on a keyboard. The keyboard is on top of a laptop with a blank spreadsheet open. A to-do list is placed beside the laptop, with the top item written as \"Do literally anything\". There is a text overlay that says \"If you can't outwork them, outnap them.\".\n</example_output>\n\nCreate a relatable satirical, snarky, user-deprecating motivational style image based on the theme: \"{{THEME}}\".\n\nOutput only the image description and caption, without any additional commentary or formatting.",
|
||||
"prompt_values": {}
|
||||
},
|
||||
"metadata": {
|
||||
"position": {
|
||||
"x": -561.1139207164056,
|
||||
"y": 78.60434452403524
|
||||
}
|
||||
},
|
||||
"input_links": [
|
||||
{
|
||||
"id": "8c2bd1f7-b17b-4835-81b6-bb336097aa7a",
|
||||
"source_id": "7e026d19-f9a6-412f-8082-610f9ba0c410",
|
||||
"sink_id": "7543b9b0-0409-4cf8-bc4e-e0336273e2c4",
|
||||
"source_name": "result",
|
||||
"sink_name": "prompt_values_#_THEME",
|
||||
"is_static": true
|
||||
}
|
||||
],
|
||||
"output_links": [
|
||||
{
|
||||
"id": "54588c74-e090-4e49-89e4-844b9952a585",
|
||||
"source_id": "7543b9b0-0409-4cf8-bc4e-e0336273e2c4",
|
||||
"sink_id": "28bda769-b88b-44c9-be5c-52c2667f137e",
|
||||
"source_name": "response",
|
||||
"sink_name": "prompt",
|
||||
"is_static": false
|
||||
},
|
||||
{
|
||||
"id": "201d3e03-bc06-4cee-846d-4c3c804d8857",
|
||||
"source_id": "7543b9b0-0409-4cf8-bc4e-e0336273e2c4",
|
||||
"sink_id": "576c5677-9050-4d1c-aad4-36b820c04fef",
|
||||
"source_name": "response",
|
||||
"sink_name": "prompt",
|
||||
"is_static": false
|
||||
},
|
||||
{
|
||||
"id": "509b7587-1940-4a06-808d-edde9a74f400",
|
||||
"source_id": "7543b9b0-0409-4cf8-bc4e-e0336273e2c4",
|
||||
"sink_id": "86665e90-ffbf-48fb-ad3f-e5d31fd50c51",
|
||||
"source_name": "response",
|
||||
"sink_name": "prompt",
|
||||
"is_static": false
|
||||
},
|
||||
{
|
||||
"id": "66646786-3006-4417-a6b7-0158f2603d1d",
|
||||
"source_id": "7543b9b0-0409-4cf8-bc4e-e0336273e2c4",
|
||||
"sink_id": "e7cdc1a2-4427-4a8a-a31b-63c8e74842f8",
|
||||
"source_name": "response",
|
||||
"sink_name": "prompt",
|
||||
"is_static": false
|
||||
}
|
||||
],
|
||||
"graph_id": "7b2e2095-782a-4f8d-adda-e62b661bccf5",
|
||||
"graph_version": 29,
|
||||
"webhook_id": null,
|
||||
"webhook": null
|
||||
}
|
||||
],
|
||||
"links": [
|
||||
{
|
||||
"id": "66646786-3006-4417-a6b7-0158f2603d1d",
|
||||
"source_id": "7543b9b0-0409-4cf8-bc4e-e0336273e2c4",
|
||||
"sink_id": "e7cdc1a2-4427-4a8a-a31b-63c8e74842f8",
|
||||
"source_name": "response",
|
||||
"sink_name": "prompt",
|
||||
"is_static": false
|
||||
},
|
||||
{
|
||||
"id": "c6c511e8-e6a4-4969-9bc8-f67d60c1e229",
|
||||
"source_id": "86665e90-ffbf-48fb-ad3f-e5d31fd50c51",
|
||||
"sink_id": "5ac3727a-1ea7-436b-a902-ef1bfd883a30",
|
||||
"source_name": "result",
|
||||
"sink_name": "value",
|
||||
"is_static": false
|
||||
},
|
||||
{
|
||||
"id": "6524c611-774b-45e9-899d-9a6aa80c549c",
|
||||
"source_id": "e7cdc1a2-4427-4a8a-a31b-63c8e74842f8",
|
||||
"sink_id": "5ac3727a-1ea7-436b-a902-ef1bfd883a30",
|
||||
"source_name": "result",
|
||||
"sink_name": "value",
|
||||
"is_static": false
|
||||
},
|
||||
{
|
||||
"id": "20845dda-91de-4508-8077-0504b1a5ae03",
|
||||
"source_id": "28bda769-b88b-44c9-be5c-52c2667f137e",
|
||||
"sink_id": "5ac3727a-1ea7-436b-a902-ef1bfd883a30",
|
||||
"source_name": "result",
|
||||
"sink_name": "value",
|
||||
"is_static": false
|
||||
},
|
||||
{
|
||||
"id": "8c2bd1f7-b17b-4835-81b6-bb336097aa7a",
|
||||
"source_id": "7e026d19-f9a6-412f-8082-610f9ba0c410",
|
||||
"sink_id": "7543b9b0-0409-4cf8-bc4e-e0336273e2c4",
|
||||
"source_name": "result",
|
||||
"sink_name": "prompt_values_#_THEME",
|
||||
"is_static": true
|
||||
},
|
||||
{
|
||||
"id": "201d3e03-bc06-4cee-846d-4c3c804d8857",
|
||||
"source_id": "7543b9b0-0409-4cf8-bc4e-e0336273e2c4",
|
||||
"sink_id": "576c5677-9050-4d1c-aad4-36b820c04fef",
|
||||
"source_name": "response",
|
||||
"sink_name": "prompt",
|
||||
"is_static": false
|
||||
},
|
||||
{
|
||||
"id": "714a0821-e5ba-4af7-9432-50491adda7b1",
|
||||
"source_id": "576c5677-9050-4d1c-aad4-36b820c04fef",
|
||||
"sink_id": "5ac3727a-1ea7-436b-a902-ef1bfd883a30",
|
||||
"source_name": "result",
|
||||
"sink_name": "value",
|
||||
"is_static": false
|
||||
},
|
||||
{
|
||||
"id": "54588c74-e090-4e49-89e4-844b9952a585",
|
||||
"source_id": "7543b9b0-0409-4cf8-bc4e-e0336273e2c4",
|
||||
"sink_id": "28bda769-b88b-44c9-be5c-52c2667f137e",
|
||||
"source_name": "response",
|
||||
"sink_name": "prompt",
|
||||
"is_static": false
|
||||
},
|
||||
{
|
||||
"id": "509b7587-1940-4a06-808d-edde9a74f400",
|
||||
"source_id": "7543b9b0-0409-4cf8-bc4e-e0336273e2c4",
|
||||
"sink_id": "86665e90-ffbf-48fb-ad3f-e5d31fd50c51",
|
||||
"source_name": "response",
|
||||
"sink_name": "prompt",
|
||||
"is_static": false
|
||||
}
|
||||
],
|
||||
"forked_from_id": null,
|
||||
"forked_from_version": null,
|
||||
"sub_graphs": [],
|
||||
"user_id": "",
|
||||
"created_at": "2024-12-20T19:58:34.390Z",
|
||||
"input_schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"Theme": {
|
||||
"advanced": false,
|
||||
"secret": false,
|
||||
"title": "Theme",
|
||||
"default": "Cooking"
|
||||
}
|
||||
},
|
||||
"required": []
|
||||
},
|
||||
"output_schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"Generated Image": {
|
||||
"advanced": false,
|
||||
"secret": false,
|
||||
"title": "Generated Image",
|
||||
"description": "The resulting generated image ready for you to review and post."
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"Generated Image"
|
||||
]
|
||||
},
|
||||
"has_external_trigger": false,
|
||||
"has_human_in_the_loop": false,
|
||||
"trigger_setup_info": null,
|
||||
"credentials_input_schema": {
|
||||
"properties": {
|
||||
"ideogram_api_key_credentials": {
|
||||
"credentials_provider": [
|
||||
"ideogram"
|
||||
],
|
||||
"credentials_types": [
|
||||
"api_key"
|
||||
],
|
||||
"properties": {
|
||||
"id": {
|
||||
"title": "Id",
|
||||
"type": "string"
|
||||
},
|
||||
"title": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": null,
|
||||
"title": "Title"
|
||||
},
|
||||
"provider": {
|
||||
"const": "ideogram",
|
||||
"title": "Provider",
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"const": "api_key",
|
||||
"title": "Type",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"provider",
|
||||
"type"
|
||||
],
|
||||
"title": "CredentialsMetaInput[Literal[<ProviderName.IDEOGRAM: 'ideogram'>], Literal['api_key']]",
|
||||
"type": "object",
|
||||
"discriminator_values": []
|
||||
},
|
||||
"openai_api_key_credentials": {
|
||||
"credentials_provider": [
|
||||
"openai"
|
||||
],
|
||||
"credentials_types": [
|
||||
"api_key"
|
||||
],
|
||||
"properties": {
|
||||
"id": {
|
||||
"title": "Id",
|
||||
"type": "string"
|
||||
},
|
||||
"title": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": null,
|
||||
"title": "Title"
|
||||
},
|
||||
"provider": {
|
||||
"const": "openai",
|
||||
"title": "Provider",
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"const": "api_key",
|
||||
"title": "Type",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"provider",
|
||||
"type"
|
||||
],
|
||||
"title": "CredentialsMetaInput[Literal[<ProviderName.OPENAI: 'openai'>], Literal['api_key']]",
|
||||
"type": "object",
|
||||
"discriminator": "model",
|
||||
"discriminator_mapping": {
|
||||
"Llama-3.3-70B-Instruct": "llama_api",
|
||||
"Llama-3.3-8B-Instruct": "llama_api",
|
||||
"Llama-4-Maverick-17B-128E-Instruct-FP8": "llama_api",
|
||||
"Llama-4-Scout-17B-16E-Instruct-FP8": "llama_api",
|
||||
"Qwen/Qwen2.5-72B-Instruct-Turbo": "aiml_api",
|
||||
"amazon/nova-lite-v1": "open_router",
|
||||
"amazon/nova-micro-v1": "open_router",
|
||||
"amazon/nova-pro-v1": "open_router",
|
||||
"claude-3-7-sonnet-20250219": "anthropic",
|
||||
"claude-3-haiku-20240307": "anthropic",
|
||||
"claude-haiku-4-5-20251001": "anthropic",
|
||||
"claude-opus-4-1-20250805": "anthropic",
|
||||
"claude-opus-4-20250514": "anthropic",
|
||||
"claude-opus-4-5-20251101": "anthropic",
|
||||
"claude-sonnet-4-20250514": "anthropic",
|
||||
"claude-sonnet-4-5-20250929": "anthropic",
|
||||
"cohere/command-r-08-2024": "open_router",
|
||||
"cohere/command-r-plus-08-2024": "open_router",
|
||||
"deepseek/deepseek-chat": "open_router",
|
||||
"deepseek/deepseek-r1-0528": "open_router",
|
||||
"dolphin-mistral:latest": "ollama",
|
||||
"google/gemini-2.0-flash-001": "open_router",
|
||||
"google/gemini-2.0-flash-lite-001": "open_router",
|
||||
"google/gemini-2.5-flash": "open_router",
|
||||
"google/gemini-2.5-flash-lite-preview-06-17": "open_router",
|
||||
"google/gemini-2.5-pro-preview-03-25": "open_router",
|
||||
"google/gemini-3-pro-preview": "open_router",
|
||||
"gpt-3.5-turbo": "openai",
|
||||
"gpt-4-turbo": "openai",
|
||||
"gpt-4.1-2025-04-14": "openai",
|
||||
"gpt-4.1-mini-2025-04-14": "openai",
|
||||
"gpt-4o": "openai",
|
||||
"gpt-4o-mini": "openai",
|
||||
"gpt-5-2025-08-07": "openai",
|
||||
"gpt-5-chat-latest": "openai",
|
||||
"gpt-5-mini-2025-08-07": "openai",
|
||||
"gpt-5-nano-2025-08-07": "openai",
|
||||
"gpt-5.1-2025-11-13": "openai",
|
||||
"gryphe/mythomax-l2-13b": "open_router",
|
||||
"llama-3.1-8b-instant": "groq",
|
||||
"llama-3.3-70b-versatile": "groq",
|
||||
"llama3": "ollama",
|
||||
"llama3.1:405b": "ollama",
|
||||
"llama3.2": "ollama",
|
||||
"llama3.3": "ollama",
|
||||
"meta-llama/Llama-3.2-3B-Instruct-Turbo": "aiml_api",
|
||||
"meta-llama/Llama-3.3-70B-Instruct-Turbo": "aiml_api",
|
||||
"meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo": "aiml_api",
|
||||
"meta-llama/llama-4-maverick": "open_router",
|
||||
"meta-llama/llama-4-scout": "open_router",
|
||||
"microsoft/wizardlm-2-8x22b": "open_router",
|
||||
"mistralai/mistral-nemo": "open_router",
|
||||
"moonshotai/kimi-k2": "open_router",
|
||||
"nousresearch/hermes-3-llama-3.1-405b": "open_router",
|
||||
"nousresearch/hermes-3-llama-3.1-70b": "open_router",
|
||||
"nvidia/llama-3.1-nemotron-70b-instruct": "aiml_api",
|
||||
"o1": "openai",
|
||||
"o1-mini": "openai",
|
||||
"o3-2025-04-16": "openai",
|
||||
"o3-mini": "openai",
|
||||
"openai/gpt-oss-120b": "open_router",
|
||||
"openai/gpt-oss-20b": "open_router",
|
||||
"perplexity/sonar": "open_router",
|
||||
"perplexity/sonar-deep-research": "open_router",
|
||||
"perplexity/sonar-pro": "open_router",
|
||||
"qwen/qwen3-235b-a22b-thinking-2507": "open_router",
|
||||
"qwen/qwen3-coder": "open_router",
|
||||
"v0-1.0-md": "v0",
|
||||
"v0-1.5-lg": "v0",
|
||||
"v0-1.5-md": "v0",
|
||||
"x-ai/grok-4": "open_router",
|
||||
"x-ai/grok-4-fast": "open_router",
|
||||
"x-ai/grok-4.1-fast": "open_router",
|
||||
"x-ai/grok-code-fast-1": "open_router"
|
||||
},
|
||||
"discriminator_values": [
|
||||
"gpt-4o"
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"ideogram_api_key_credentials",
|
||||
"openai_api_key_credentials"
|
||||
],
|
||||
"title": "UnspirationalPosterMakerCredentialsInputSchema",
|
||||
"type": "object"
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,447 @@
|
||||
{
|
||||
"id": "622849a7-5848-4838-894d-01f8f07e3fad",
|
||||
"version": 18,
|
||||
"is_active": true,
|
||||
"name": "AI Function",
|
||||
"description": "## AI-Powered Function Magic: Never code again!\nProvide a description of a python function and your inputs and AI will provide the results.",
|
||||
"instructions": null,
|
||||
"recommended_schedule_cron": null,
|
||||
"nodes": [
|
||||
{
|
||||
"id": "26ff2973-3f9a-451d-b902-d45e5da0a7fe",
|
||||
"block_id": "363ae599-353e-4804-937e-b2ee3cef3da4",
|
||||
"input_default": {
|
||||
"name": "return",
|
||||
"title": null,
|
||||
"value": null,
|
||||
"format": "",
|
||||
"secret": false,
|
||||
"advanced": false,
|
||||
"description": "The value returned by the function"
|
||||
},
|
||||
"metadata": {
|
||||
"position": {
|
||||
"x": 1598.8622921127233,
|
||||
"y": 291.59140862204725
|
||||
}
|
||||
},
|
||||
"input_links": [
|
||||
{
|
||||
"id": "caecc1de-fdbc-4fd9-9570-074057bb15f9",
|
||||
"source_id": "c5d16ee4-de9e-4d93-bf32-ac2d15760d5b",
|
||||
"sink_id": "26ff2973-3f9a-451d-b902-d45e5da0a7fe",
|
||||
"source_name": "response",
|
||||
"sink_name": "value",
|
||||
"is_static": false
|
||||
}
|
||||
],
|
||||
"output_links": [],
|
||||
"graph_id": "622849a7-5848-4838-894d-01f8f07e3fad",
|
||||
"graph_version": 18,
|
||||
"webhook_id": null,
|
||||
"webhook": null
|
||||
},
|
||||
{
|
||||
"id": "c5d16ee4-de9e-4d93-bf32-ac2d15760d5b",
|
||||
"block_id": "1f292d4a-41a4-4977-9684-7c8d560b9f91",
|
||||
"input_default": {
|
||||
"model": "o3-mini",
|
||||
"retry": 3,
|
||||
"prompt": "{{ARGS}}",
|
||||
"sys_prompt": "You are now the following python function:\n\n```\n# {{DESCRIPTION}}\n{{FUNCTION}}\n```\n\nThe user will provide your input arguments.\nOnly respond with your `return` value.\nDo not include any commentary or additional text in your response. \nDo not include ``` backticks or any other decorators.",
|
||||
"ollama_host": "localhost:11434",
|
||||
"prompt_values": {}
|
||||
},
|
||||
"metadata": {
|
||||
"position": {
|
||||
"x": 995,
|
||||
"y": 290.50000000000006
|
||||
}
|
||||
},
|
||||
"input_links": [
|
||||
{
|
||||
"id": "dc7cb15f-76cc-4533-b96c-dd9e3f7f75ed",
|
||||
"source_id": "4eab3a55-20f2-4c1d-804c-7377ba8202d2",
|
||||
"sink_id": "c5d16ee4-de9e-4d93-bf32-ac2d15760d5b",
|
||||
"source_name": "result",
|
||||
"sink_name": "prompt_values_#_FUNCTION",
|
||||
"is_static": true
|
||||
},
|
||||
{
|
||||
"id": "093bdca5-9f44-42f9-8e1c-276dd2971675",
|
||||
"source_id": "844530de-2354-46d8-b748-67306b7bbca1",
|
||||
"sink_id": "c5d16ee4-de9e-4d93-bf32-ac2d15760d5b",
|
||||
"source_name": "result",
|
||||
"sink_name": "prompt_values_#_ARGS",
|
||||
"is_static": true
|
||||
},
|
||||
{
|
||||
"id": "6c63d8ee-b63d-4ff6-bae0-7db8f99bb7af",
|
||||
"source_id": "0fd6ef54-c1cd-478d-b764-17e40f882b99",
|
||||
"sink_id": "c5d16ee4-de9e-4d93-bf32-ac2d15760d5b",
|
||||
"source_name": "result",
|
||||
"sink_name": "prompt_values_#_DESCRIPTION",
|
||||
"is_static": true
|
||||
}
|
||||
],
|
||||
"output_links": [
|
||||
{
|
||||
"id": "caecc1de-fdbc-4fd9-9570-074057bb15f9",
|
||||
"source_id": "c5d16ee4-de9e-4d93-bf32-ac2d15760d5b",
|
||||
"sink_id": "26ff2973-3f9a-451d-b902-d45e5da0a7fe",
|
||||
"source_name": "response",
|
||||
"sink_name": "value",
|
||||
"is_static": false
|
||||
}
|
||||
],
|
||||
"graph_id": "622849a7-5848-4838-894d-01f8f07e3fad",
|
||||
"graph_version": 18,
|
||||
"webhook_id": null,
|
||||
"webhook": null
|
||||
},
|
||||
{
|
||||
"id": "4eab3a55-20f2-4c1d-804c-7377ba8202d2",
|
||||
"block_id": "7fcd3bcb-8e1b-4e69-903d-32d3d4a92158",
|
||||
"input_default": {
|
||||
"name": "Function Definition",
|
||||
"title": null,
|
||||
"value": "def fake_people(n: int) -> list[dict]:",
|
||||
"secret": false,
|
||||
"advanced": false,
|
||||
"description": "The function definition (text). This is what you would type on the first line of the function when programming.\n\ne.g \"def fake_people(n: int) -> list[dict]:\"",
|
||||
"placeholder_values": []
|
||||
},
|
||||
"metadata": {
|
||||
"position": {
|
||||
"x": -672.6908629664215,
|
||||
"y": 302.42044359789116
|
||||
}
|
||||
},
|
||||
"input_links": [],
|
||||
"output_links": [
|
||||
{
|
||||
"id": "dc7cb15f-76cc-4533-b96c-dd9e3f7f75ed",
|
||||
"source_id": "4eab3a55-20f2-4c1d-804c-7377ba8202d2",
|
||||
"sink_id": "c5d16ee4-de9e-4d93-bf32-ac2d15760d5b",
|
||||
"source_name": "result",
|
||||
"sink_name": "prompt_values_#_FUNCTION",
|
||||
"is_static": true
|
||||
}
|
||||
],
|
||||
"graph_id": "622849a7-5848-4838-894d-01f8f07e3fad",
|
||||
"graph_version": 18,
|
||||
"webhook_id": null,
|
||||
"webhook": null
|
||||
},
|
||||
{
|
||||
"id": "844530de-2354-46d8-b748-67306b7bbca1",
|
||||
"block_id": "7fcd3bcb-8e1b-4e69-903d-32d3d4a92158",
|
||||
"input_default": {
|
||||
"name": "Arguments",
|
||||
"title": null,
|
||||
"value": "20",
|
||||
"secret": false,
|
||||
"advanced": false,
|
||||
"description": "The function's inputs\n\ne.g \"20\"",
|
||||
"placeholder_values": []
|
||||
},
|
||||
"metadata": {
|
||||
"position": {
|
||||
"x": -158.1623599617334,
|
||||
"y": 295.410856928333
|
||||
}
|
||||
},
|
||||
"input_links": [],
|
||||
"output_links": [
|
||||
{
|
||||
"id": "093bdca5-9f44-42f9-8e1c-276dd2971675",
|
||||
"source_id": "844530de-2354-46d8-b748-67306b7bbca1",
|
||||
"sink_id": "c5d16ee4-de9e-4d93-bf32-ac2d15760d5b",
|
||||
"source_name": "result",
|
||||
"sink_name": "prompt_values_#_ARGS",
|
||||
"is_static": true
|
||||
}
|
||||
],
|
||||
"graph_id": "622849a7-5848-4838-894d-01f8f07e3fad",
|
||||
"graph_version": 18,
|
||||
"webhook_id": null,
|
||||
"webhook": null
|
||||
},
|
||||
{
|
||||
"id": "0fd6ef54-c1cd-478d-b764-17e40f882b99",
|
||||
"block_id": "90a56ffb-7024-4b2b-ab50-e26c5e5ab8ba",
|
||||
"input_default": {
|
||||
"name": "Description",
|
||||
"title": null,
|
||||
"value": "Generates n examples of fake data representing people, each with a name, DoB, Job title, and an age.",
|
||||
"secret": false,
|
||||
"advanced": false,
|
||||
"description": "Describe what the function does.\n\ne.g \"Generates n examples of fake data representing people, each with a name, DoB, Job title, and an age.\"",
|
||||
"placeholder_values": []
|
||||
},
|
||||
"metadata": {
|
||||
"position": {
|
||||
"x": 374.4548658057796,
|
||||
"y": 290.3779121974126
|
||||
}
|
||||
},
|
||||
"input_links": [],
|
||||
"output_links": [
|
||||
{
|
||||
"id": "6c63d8ee-b63d-4ff6-bae0-7db8f99bb7af",
|
||||
"source_id": "0fd6ef54-c1cd-478d-b764-17e40f882b99",
|
||||
"sink_id": "c5d16ee4-de9e-4d93-bf32-ac2d15760d5b",
|
||||
"source_name": "result",
|
||||
"sink_name": "prompt_values_#_DESCRIPTION",
|
||||
"is_static": true
|
||||
}
|
||||
],
|
||||
"graph_id": "622849a7-5848-4838-894d-01f8f07e3fad",
|
||||
"graph_version": 18,
|
||||
"webhook_id": null,
|
||||
"webhook": null
|
||||
}
|
||||
],
|
||||
"links": [
|
||||
{
|
||||
"id": "caecc1de-fdbc-4fd9-9570-074057bb15f9",
|
||||
"source_id": "c5d16ee4-de9e-4d93-bf32-ac2d15760d5b",
|
||||
"sink_id": "26ff2973-3f9a-451d-b902-d45e5da0a7fe",
|
||||
"source_name": "response",
|
||||
"sink_name": "value",
|
||||
"is_static": false
|
||||
},
|
||||
{
|
||||
"id": "6c63d8ee-b63d-4ff6-bae0-7db8f99bb7af",
|
||||
"source_id": "0fd6ef54-c1cd-478d-b764-17e40f882b99",
|
||||
"sink_id": "c5d16ee4-de9e-4d93-bf32-ac2d15760d5b",
|
||||
"source_name": "result",
|
||||
"sink_name": "prompt_values_#_DESCRIPTION",
|
||||
"is_static": true
|
||||
},
|
||||
{
|
||||
"id": "093bdca5-9f44-42f9-8e1c-276dd2971675",
|
||||
"source_id": "844530de-2354-46d8-b748-67306b7bbca1",
|
||||
"sink_id": "c5d16ee4-de9e-4d93-bf32-ac2d15760d5b",
|
||||
"source_name": "result",
|
||||
"sink_name": "prompt_values_#_ARGS",
|
||||
"is_static": true
|
||||
},
|
||||
{
|
||||
"id": "dc7cb15f-76cc-4533-b96c-dd9e3f7f75ed",
|
||||
"source_id": "4eab3a55-20f2-4c1d-804c-7377ba8202d2",
|
||||
"sink_id": "c5d16ee4-de9e-4d93-bf32-ac2d15760d5b",
|
||||
"source_name": "result",
|
||||
"sink_name": "prompt_values_#_FUNCTION",
|
||||
"is_static": true
|
||||
}
|
||||
],
|
||||
"forked_from_id": null,
|
||||
"forked_from_version": null,
|
||||
"sub_graphs": [],
|
||||
"user_id": "",
|
||||
"created_at": "2025-04-19T17:10:48.857Z",
|
||||
"input_schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"Function Definition": {
|
||||
"advanced": false,
|
||||
"anyOf": [
|
||||
{
|
||||
"format": "short-text",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"secret": false,
|
||||
"title": "Function Definition",
|
||||
"description": "The function definition (text). This is what you would type on the first line of the function when programming.\n\ne.g \"def fake_people(n: int) -> list[dict]:\"",
|
||||
"default": "def fake_people(n: int) -> list[dict]:"
|
||||
},
|
||||
"Arguments": {
|
||||
"advanced": false,
|
||||
"anyOf": [
|
||||
{
|
||||
"format": "short-text",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"secret": false,
|
||||
"title": "Arguments",
|
||||
"description": "The function's inputs\n\ne.g \"20\"",
|
||||
"default": "20"
|
||||
},
|
||||
"Description": {
|
||||
"advanced": false,
|
||||
"anyOf": [
|
||||
{
|
||||
"format": "long-text",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"secret": false,
|
||||
"title": "Description",
|
||||
"description": "Describe what the function does.\n\ne.g \"Generates n examples of fake data representing people, each with a name, DoB, Job title, and an age.\"",
|
||||
"default": "Generates n examples of fake data representing people, each with a name, DoB, Job title, and an age."
|
||||
}
|
||||
},
|
||||
"required": []
|
||||
},
|
||||
"output_schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"return": {
|
||||
"advanced": false,
|
||||
"secret": false,
|
||||
"title": "return",
|
||||
"description": "The value returned by the function"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"return"
|
||||
]
|
||||
},
|
||||
"has_external_trigger": false,
|
||||
"has_human_in_the_loop": false,
|
||||
"trigger_setup_info": null,
|
||||
"credentials_input_schema": {
|
||||
"properties": {
|
||||
"openai_api_key_credentials": {
|
||||
"credentials_provider": [
|
||||
"openai"
|
||||
],
|
||||
"credentials_types": [
|
||||
"api_key"
|
||||
],
|
||||
"properties": {
|
||||
"id": {
|
||||
"title": "Id",
|
||||
"type": "string"
|
||||
},
|
||||
"title": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": null,
|
||||
"title": "Title"
|
||||
},
|
||||
"provider": {
|
||||
"const": "openai",
|
||||
"title": "Provider",
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"const": "api_key",
|
||||
"title": "Type",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"provider",
|
||||
"type"
|
||||
],
|
||||
"title": "CredentialsMetaInput[Literal[<ProviderName.OPENAI: 'openai'>], Literal['api_key']]",
|
||||
"type": "object",
|
||||
"discriminator": "model",
|
||||
"discriminator_mapping": {
|
||||
"Llama-3.3-70B-Instruct": "llama_api",
|
||||
"Llama-3.3-8B-Instruct": "llama_api",
|
||||
"Llama-4-Maverick-17B-128E-Instruct-FP8": "llama_api",
|
||||
"Llama-4-Scout-17B-16E-Instruct-FP8": "llama_api",
|
||||
"Qwen/Qwen2.5-72B-Instruct-Turbo": "aiml_api",
|
||||
"amazon/nova-lite-v1": "open_router",
|
||||
"amazon/nova-micro-v1": "open_router",
|
||||
"amazon/nova-pro-v1": "open_router",
|
||||
"claude-3-7-sonnet-20250219": "anthropic",
|
||||
"claude-3-haiku-20240307": "anthropic",
|
||||
"claude-haiku-4-5-20251001": "anthropic",
|
||||
"claude-opus-4-1-20250805": "anthropic",
|
||||
"claude-opus-4-20250514": "anthropic",
|
||||
"claude-opus-4-5-20251101": "anthropic",
|
||||
"claude-sonnet-4-20250514": "anthropic",
|
||||
"claude-sonnet-4-5-20250929": "anthropic",
|
||||
"cohere/command-r-08-2024": "open_router",
|
||||
"cohere/command-r-plus-08-2024": "open_router",
|
||||
"deepseek/deepseek-chat": "open_router",
|
||||
"deepseek/deepseek-r1-0528": "open_router",
|
||||
"dolphin-mistral:latest": "ollama",
|
||||
"google/gemini-2.0-flash-001": "open_router",
|
||||
"google/gemini-2.0-flash-lite-001": "open_router",
|
||||
"google/gemini-2.5-flash": "open_router",
|
||||
"google/gemini-2.5-flash-lite-preview-06-17": "open_router",
|
||||
"google/gemini-2.5-pro-preview-03-25": "open_router",
|
||||
"google/gemini-3-pro-preview": "open_router",
|
||||
"gpt-3.5-turbo": "openai",
|
||||
"gpt-4-turbo": "openai",
|
||||
"gpt-4.1-2025-04-14": "openai",
|
||||
"gpt-4.1-mini-2025-04-14": "openai",
|
||||
"gpt-4o": "openai",
|
||||
"gpt-4o-mini": "openai",
|
||||
"gpt-5-2025-08-07": "openai",
|
||||
"gpt-5-chat-latest": "openai",
|
||||
"gpt-5-mini-2025-08-07": "openai",
|
||||
"gpt-5-nano-2025-08-07": "openai",
|
||||
"gpt-5.1-2025-11-13": "openai",
|
||||
"gryphe/mythomax-l2-13b": "open_router",
|
||||
"llama-3.1-8b-instant": "groq",
|
||||
"llama-3.3-70b-versatile": "groq",
|
||||
"llama3": "ollama",
|
||||
"llama3.1:405b": "ollama",
|
||||
"llama3.2": "ollama",
|
||||
"llama3.3": "ollama",
|
||||
"meta-llama/Llama-3.2-3B-Instruct-Turbo": "aiml_api",
|
||||
"meta-llama/Llama-3.3-70B-Instruct-Turbo": "aiml_api",
|
||||
"meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo": "aiml_api",
|
||||
"meta-llama/llama-4-maverick": "open_router",
|
||||
"meta-llama/llama-4-scout": "open_router",
|
||||
"microsoft/wizardlm-2-8x22b": "open_router",
|
||||
"mistralai/mistral-nemo": "open_router",
|
||||
"moonshotai/kimi-k2": "open_router",
|
||||
"nousresearch/hermes-3-llama-3.1-405b": "open_router",
|
||||
"nousresearch/hermes-3-llama-3.1-70b": "open_router",
|
||||
"nvidia/llama-3.1-nemotron-70b-instruct": "aiml_api",
|
||||
"o1": "openai",
|
||||
"o1-mini": "openai",
|
||||
"o3-2025-04-16": "openai",
|
||||
"o3-mini": "openai",
|
||||
"openai/gpt-oss-120b": "open_router",
|
||||
"openai/gpt-oss-20b": "open_router",
|
||||
"perplexity/sonar": "open_router",
|
||||
"perplexity/sonar-deep-research": "open_router",
|
||||
"perplexity/sonar-pro": "open_router",
|
||||
"qwen/qwen3-235b-a22b-thinking-2507": "open_router",
|
||||
"qwen/qwen3-coder": "open_router",
|
||||
"v0-1.0-md": "v0",
|
||||
"v0-1.5-lg": "v0",
|
||||
"v0-1.5-md": "v0",
|
||||
"x-ai/grok-4": "open_router",
|
||||
"x-ai/grok-4-fast": "open_router",
|
||||
"x-ai/grok-4.1-fast": "open_router",
|
||||
"x-ai/grok-code-fast-1": "open_router"
|
||||
},
|
||||
"discriminator_values": [
|
||||
"o3-mini"
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"openai_api_key_credentials"
|
||||
],
|
||||
"title": "AIFunctionCredentialsInputSchema",
|
||||
"type": "object"
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,403 @@
|
||||
{
|
||||
"id": "ed2091cf-5b27-45a9-b3ea-42396f95b256",
|
||||
"version": 12,
|
||||
"is_active": true,
|
||||
"name": "Flux AI Image Generator",
|
||||
"description": "Transform ideas into breathtaking images with this AI-powered Image Generator. Using cutting-edge Flux AI technology, the tool crafts highly detailed, photorealistic visuals from simple text prompts. Perfect for artists, marketers, and content creators, this generator produces unique images tailored to user specifications. From fantastical scenes to lifelike portraits, users can unleash creativity with professional-quality results in seconds. Easy to use and endlessly versatile, bring imagination to life with the AI Image Generator today!",
|
||||
"instructions": null,
|
||||
"recommended_schedule_cron": null,
|
||||
"nodes": [
|
||||
{
|
||||
"id": "7482c59d-725f-4686-82b9-0dfdc4e92316",
|
||||
"block_id": "cc10ff7b-7753-4ff2-9af6-9399b1a7eddc",
|
||||
"input_default": {
|
||||
"text": "Press the \"Advanced\" toggle and input your replicate API key.\n\nYou can get one here:\nhttps://replicate.com/account/api-tokens\n"
|
||||
},
|
||||
"metadata": {
|
||||
"position": {
|
||||
"x": 872.8268131538296,
|
||||
"y": 614.9436919065381
|
||||
}
|
||||
},
|
||||
"input_links": [],
|
||||
"output_links": [],
|
||||
"graph_id": "ed2091cf-5b27-45a9-b3ea-42396f95b256",
|
||||
"graph_version": 12,
|
||||
"webhook_id": null,
|
||||
"webhook": null
|
||||
},
|
||||
{
|
||||
"id": "0d1dec1a-e4ee-4349-9673-449a01bbf14e",
|
||||
"block_id": "363ae599-353e-4804-937e-b2ee3cef3da4",
|
||||
"input_default": {
|
||||
"name": "Generated Image"
|
||||
},
|
||||
"metadata": {
|
||||
"position": {
|
||||
"x": 1453.6844137728922,
|
||||
"y": 963.2466395125115
|
||||
}
|
||||
},
|
||||
"input_links": [
|
||||
{
|
||||
"id": "06665d23-2f3d-4445-8f22-573446fcff5b",
|
||||
"source_id": "50bc23e9-f2b7-4959-8710-99679ed9eeea",
|
||||
"sink_id": "0d1dec1a-e4ee-4349-9673-449a01bbf14e",
|
||||
"source_name": "result",
|
||||
"sink_name": "value",
|
||||
"is_static": false
|
||||
}
|
||||
],
|
||||
"output_links": [],
|
||||
"graph_id": "ed2091cf-5b27-45a9-b3ea-42396f95b256",
|
||||
"graph_version": 12,
|
||||
"webhook_id": null,
|
||||
"webhook": null
|
||||
},
|
||||
{
|
||||
"id": "6f24c45f-1548-4eda-9784-da06ce0abef8",
|
||||
"block_id": "c0a8e994-ebf1-4a9c-a4d8-89d09c86741b",
|
||||
"input_default": {
|
||||
"name": "Image Subject",
|
||||
"value": "Otto the friendly, purple \"Chief Automation Octopus\" helping people automate their tedious tasks.",
|
||||
"description": "The subject of the image"
|
||||
},
|
||||
"metadata": {
|
||||
"position": {
|
||||
"x": -314.43009631839783,
|
||||
"y": 962.935949165938
|
||||
}
|
||||
},
|
||||
"input_links": [],
|
||||
"output_links": [
|
||||
{
|
||||
"id": "1077c61a-a32a-4ed7-becf-11bcf835b914",
|
||||
"source_id": "6f24c45f-1548-4eda-9784-da06ce0abef8",
|
||||
"sink_id": "0d1bca9a-d9b8-4bfd-a19c-fe50b54f4b12",
|
||||
"source_name": "result",
|
||||
"sink_name": "prompt_values_#_TOPIC",
|
||||
"is_static": true
|
||||
}
|
||||
],
|
||||
"graph_id": "ed2091cf-5b27-45a9-b3ea-42396f95b256",
|
||||
"graph_version": 12,
|
||||
"webhook_id": null,
|
||||
"webhook": null
|
||||
},
|
||||
{
|
||||
"id": "50bc23e9-f2b7-4959-8710-99679ed9eeea",
|
||||
"block_id": "90f8c45e-e983-4644-aa0b-b4ebe2f531bc",
|
||||
"input_default": {
|
||||
"prompt": "dog",
|
||||
"output_format": "png",
|
||||
"replicate_model_name": "Flux Pro 1.1"
|
||||
},
|
||||
"metadata": {
|
||||
"position": {
|
||||
"x": 873.0119949791526,
|
||||
"y": 966.1604399052493
|
||||
}
|
||||
},
|
||||
"input_links": [
|
||||
{
|
||||
"id": "a17ec505-9377-4700-8fe0-124ca81d43a9",
|
||||
"source_id": "0d1bca9a-d9b8-4bfd-a19c-fe50b54f4b12",
|
||||
"sink_id": "50bc23e9-f2b7-4959-8710-99679ed9eeea",
|
||||
"source_name": "response",
|
||||
"sink_name": "prompt",
|
||||
"is_static": false
|
||||
}
|
||||
],
|
||||
"output_links": [
|
||||
{
|
||||
"id": "06665d23-2f3d-4445-8f22-573446fcff5b",
|
||||
"source_id": "50bc23e9-f2b7-4959-8710-99679ed9eeea",
|
||||
"sink_id": "0d1dec1a-e4ee-4349-9673-449a01bbf14e",
|
||||
"source_name": "result",
|
||||
"sink_name": "value",
|
||||
"is_static": false
|
||||
}
|
||||
],
|
||||
"graph_id": "ed2091cf-5b27-45a9-b3ea-42396f95b256",
|
||||
"graph_version": 12,
|
||||
"webhook_id": null,
|
||||
"webhook": null
|
||||
},
|
||||
{
|
||||
"id": "0d1bca9a-d9b8-4bfd-a19c-fe50b54f4b12",
|
||||
"block_id": "1f292d4a-41a4-4977-9684-7c8d560b9f91",
|
||||
"input_default": {
|
||||
"model": "gpt-4o-mini",
|
||||
"prompt": "Generate an incredibly detailed, photorealistic image prompt about {{TOPIC}}, describing the camera it's taken with and prompting the diffusion model to use all the best quality techniques.\n\nOutput only the prompt with no additional commentary.",
|
||||
"prompt_values": {}
|
||||
},
|
||||
"metadata": {
|
||||
"position": {
|
||||
"x": 277.3057034159709,
|
||||
"y": 962.8382498113764
|
||||
}
|
||||
},
|
||||
"input_links": [
|
||||
{
|
||||
"id": "1077c61a-a32a-4ed7-becf-11bcf835b914",
|
||||
"source_id": "6f24c45f-1548-4eda-9784-da06ce0abef8",
|
||||
"sink_id": "0d1bca9a-d9b8-4bfd-a19c-fe50b54f4b12",
|
||||
"source_name": "result",
|
||||
"sink_name": "prompt_values_#_TOPIC",
|
||||
"is_static": true
|
||||
}
|
||||
],
|
||||
"output_links": [
|
||||
{
|
||||
"id": "a17ec505-9377-4700-8fe0-124ca81d43a9",
|
||||
"source_id": "0d1bca9a-d9b8-4bfd-a19c-fe50b54f4b12",
|
||||
"sink_id": "50bc23e9-f2b7-4959-8710-99679ed9eeea",
|
||||
"source_name": "response",
|
||||
"sink_name": "prompt",
|
||||
"is_static": false
|
||||
}
|
||||
],
|
||||
"graph_id": "ed2091cf-5b27-45a9-b3ea-42396f95b256",
|
||||
"graph_version": 12,
|
||||
"webhook_id": null,
|
||||
"webhook": null
|
||||
}
|
||||
],
|
||||
"links": [
|
||||
{
|
||||
"id": "1077c61a-a32a-4ed7-becf-11bcf835b914",
|
||||
"source_id": "6f24c45f-1548-4eda-9784-da06ce0abef8",
|
||||
"sink_id": "0d1bca9a-d9b8-4bfd-a19c-fe50b54f4b12",
|
||||
"source_name": "result",
|
||||
"sink_name": "prompt_values_#_TOPIC",
|
||||
"is_static": true
|
||||
},
|
||||
{
|
||||
"id": "06665d23-2f3d-4445-8f22-573446fcff5b",
|
||||
"source_id": "50bc23e9-f2b7-4959-8710-99679ed9eeea",
|
||||
"sink_id": "0d1dec1a-e4ee-4349-9673-449a01bbf14e",
|
||||
"source_name": "result",
|
||||
"sink_name": "value",
|
||||
"is_static": false
|
||||
},
|
||||
{
|
||||
"id": "a17ec505-9377-4700-8fe0-124ca81d43a9",
|
||||
"source_id": "0d1bca9a-d9b8-4bfd-a19c-fe50b54f4b12",
|
||||
"sink_id": "50bc23e9-f2b7-4959-8710-99679ed9eeea",
|
||||
"source_name": "response",
|
||||
"sink_name": "prompt",
|
||||
"is_static": false
|
||||
}
|
||||
],
|
||||
"forked_from_id": null,
|
||||
"forked_from_version": null,
|
||||
"sub_graphs": [],
|
||||
"user_id": "",
|
||||
"created_at": "2024-12-20T18:46:11.492Z",
|
||||
"input_schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"Image Subject": {
|
||||
"advanced": false,
|
||||
"secret": false,
|
||||
"title": "Image Subject",
|
||||
"description": "The subject of the image",
|
||||
"default": "Otto the friendly, purple \"Chief Automation Octopus\" helping people automate their tedious tasks."
|
||||
}
|
||||
},
|
||||
"required": []
|
||||
},
|
||||
"output_schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"Generated Image": {
|
||||
"advanced": false,
|
||||
"secret": false,
|
||||
"title": "Generated Image"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"Generated Image"
|
||||
]
|
||||
},
|
||||
"has_external_trigger": false,
|
||||
"has_human_in_the_loop": false,
|
||||
"trigger_setup_info": null,
|
||||
"credentials_input_schema": {
|
||||
"properties": {
|
||||
"replicate_api_key_credentials": {
|
||||
"credentials_provider": [
|
||||
"replicate"
|
||||
],
|
||||
"credentials_types": [
|
||||
"api_key"
|
||||
],
|
||||
"properties": {
|
||||
"id": {
|
||||
"title": "Id",
|
||||
"type": "string"
|
||||
},
|
||||
"title": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": null,
|
||||
"title": "Title"
|
||||
},
|
||||
"provider": {
|
||||
"const": "replicate",
|
||||
"title": "Provider",
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"const": "api_key",
|
||||
"title": "Type",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"provider",
|
||||
"type"
|
||||
],
|
||||
"title": "CredentialsMetaInput[Literal[<ProviderName.REPLICATE: 'replicate'>], Literal['api_key']]",
|
||||
"type": "object",
|
||||
"discriminator_values": []
|
||||
},
|
||||
"openai_api_key_credentials": {
|
||||
"credentials_provider": [
|
||||
"openai"
|
||||
],
|
||||
"credentials_types": [
|
||||
"api_key"
|
||||
],
|
||||
"properties": {
|
||||
"id": {
|
||||
"title": "Id",
|
||||
"type": "string"
|
||||
},
|
||||
"title": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": null,
|
||||
"title": "Title"
|
||||
},
|
||||
"provider": {
|
||||
"const": "openai",
|
||||
"title": "Provider",
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"const": "api_key",
|
||||
"title": "Type",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"provider",
|
||||
"type"
|
||||
],
|
||||
"title": "CredentialsMetaInput[Literal[<ProviderName.OPENAI: 'openai'>], Literal['api_key']]",
|
||||
"type": "object",
|
||||
"discriminator": "model",
|
||||
"discriminator_mapping": {
|
||||
"Llama-3.3-70B-Instruct": "llama_api",
|
||||
"Llama-3.3-8B-Instruct": "llama_api",
|
||||
"Llama-4-Maverick-17B-128E-Instruct-FP8": "llama_api",
|
||||
"Llama-4-Scout-17B-16E-Instruct-FP8": "llama_api",
|
||||
"Qwen/Qwen2.5-72B-Instruct-Turbo": "aiml_api",
|
||||
"amazon/nova-lite-v1": "open_router",
|
||||
"amazon/nova-micro-v1": "open_router",
|
||||
"amazon/nova-pro-v1": "open_router",
|
||||
"claude-3-7-sonnet-20250219": "anthropic",
|
||||
"claude-3-haiku-20240307": "anthropic",
|
||||
"claude-haiku-4-5-20251001": "anthropic",
|
||||
"claude-opus-4-1-20250805": "anthropic",
|
||||
"claude-opus-4-20250514": "anthropic",
|
||||
"claude-opus-4-5-20251101": "anthropic",
|
||||
"claude-sonnet-4-20250514": "anthropic",
|
||||
"claude-sonnet-4-5-20250929": "anthropic",
|
||||
"cohere/command-r-08-2024": "open_router",
|
||||
"cohere/command-r-plus-08-2024": "open_router",
|
||||
"deepseek/deepseek-chat": "open_router",
|
||||
"deepseek/deepseek-r1-0528": "open_router",
|
||||
"dolphin-mistral:latest": "ollama",
|
||||
"google/gemini-2.0-flash-001": "open_router",
|
||||
"google/gemini-2.0-flash-lite-001": "open_router",
|
||||
"google/gemini-2.5-flash": "open_router",
|
||||
"google/gemini-2.5-flash-lite-preview-06-17": "open_router",
|
||||
"google/gemini-2.5-pro-preview-03-25": "open_router",
|
||||
"google/gemini-3-pro-preview": "open_router",
|
||||
"gpt-3.5-turbo": "openai",
|
||||
"gpt-4-turbo": "openai",
|
||||
"gpt-4.1-2025-04-14": "openai",
|
||||
"gpt-4.1-mini-2025-04-14": "openai",
|
||||
"gpt-4o": "openai",
|
||||
"gpt-4o-mini": "openai",
|
||||
"gpt-5-2025-08-07": "openai",
|
||||
"gpt-5-chat-latest": "openai",
|
||||
"gpt-5-mini-2025-08-07": "openai",
|
||||
"gpt-5-nano-2025-08-07": "openai",
|
||||
"gpt-5.1-2025-11-13": "openai",
|
||||
"gryphe/mythomax-l2-13b": "open_router",
|
||||
"llama-3.1-8b-instant": "groq",
|
||||
"llama-3.3-70b-versatile": "groq",
|
||||
"llama3": "ollama",
|
||||
"llama3.1:405b": "ollama",
|
||||
"llama3.2": "ollama",
|
||||
"llama3.3": "ollama",
|
||||
"meta-llama/Llama-3.2-3B-Instruct-Turbo": "aiml_api",
|
||||
"meta-llama/Llama-3.3-70B-Instruct-Turbo": "aiml_api",
|
||||
"meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo": "aiml_api",
|
||||
"meta-llama/llama-4-maverick": "open_router",
|
||||
"meta-llama/llama-4-scout": "open_router",
|
||||
"microsoft/wizardlm-2-8x22b": "open_router",
|
||||
"mistralai/mistral-nemo": "open_router",
|
||||
"moonshotai/kimi-k2": "open_router",
|
||||
"nousresearch/hermes-3-llama-3.1-405b": "open_router",
|
||||
"nousresearch/hermes-3-llama-3.1-70b": "open_router",
|
||||
"nvidia/llama-3.1-nemotron-70b-instruct": "aiml_api",
|
||||
"o1": "openai",
|
||||
"o1-mini": "openai",
|
||||
"o3-2025-04-16": "openai",
|
||||
"o3-mini": "openai",
|
||||
"openai/gpt-oss-120b": "open_router",
|
||||
"openai/gpt-oss-20b": "open_router",
|
||||
"perplexity/sonar": "open_router",
|
||||
"perplexity/sonar-deep-research": "open_router",
|
||||
"perplexity/sonar-pro": "open_router",
|
||||
"qwen/qwen3-235b-a22b-thinking-2507": "open_router",
|
||||
"qwen/qwen3-coder": "open_router",
|
||||
"v0-1.0-md": "v0",
|
||||
"v0-1.5-lg": "v0",
|
||||
"v0-1.5-md": "v0",
|
||||
"x-ai/grok-4": "open_router",
|
||||
"x-ai/grok-4-fast": "open_router",
|
||||
"x-ai/grok-4.1-fast": "open_router",
|
||||
"x-ai/grok-code-fast-1": "open_router"
|
||||
},
|
||||
"discriminator_values": [
|
||||
"gpt-4o-mini"
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"replicate_api_key_credentials",
|
||||
"openai_api_key_credentials"
|
||||
],
|
||||
"title": "FluxAIImageGeneratorCredentialsInputSchema",
|
||||
"type": "object"
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,505 @@
|
||||
{
|
||||
"id": "0d440799-44ba-4d6c-85b3-b3739f1e1287",
|
||||
"version": 12,
|
||||
"is_active": true,
|
||||
"name": "AI Webpage Copy Improver",
|
||||
"description": "Elevate your web content with this powerful AI Webpage Copy Improver. Designed for marketers, SEO specialists, and web developers, this tool analyses and enhances website copy for maximum impact. Using advanced language models, it optimizes text for better clarity, SEO performance, and increased conversion rates. The AI examines your existing content, identifies areas for improvement, and generates refined copy that maintains your brand voice while boosting engagement. From homepage headlines to product descriptions, transform your web presence with AI-driven insights. Improve readability, incorporate targeted keywords, and craft compelling calls-to-action - all with the click of a button. Take your digital marketing to the next level with the AI Webpage Copy Improver.",
|
||||
"instructions": null,
|
||||
"recommended_schedule_cron": null,
|
||||
"nodes": [
|
||||
{
|
||||
"id": "130ec496-f75d-4fe2-9cd6-8c00d08ea4a7",
|
||||
"block_id": "363ae599-353e-4804-937e-b2ee3cef3da4",
|
||||
"input_default": {
|
||||
"name": "Improved Webpage Copy"
|
||||
},
|
||||
"metadata": {
|
||||
"position": {
|
||||
"x": 1039.5884372540172,
|
||||
"y": -0.8359099621230968
|
||||
}
|
||||
},
|
||||
"input_links": [
|
||||
{
|
||||
"id": "d4334477-3616-454f-a430-614ca27f5b36",
|
||||
"source_id": "c9924577-70d8-4ccb-9106-6f796df09ef9",
|
||||
"sink_id": "130ec496-f75d-4fe2-9cd6-8c00d08ea4a7",
|
||||
"source_name": "response",
|
||||
"sink_name": "value",
|
||||
"is_static": false
|
||||
}
|
||||
],
|
||||
"output_links": [],
|
||||
"graph_id": "0d440799-44ba-4d6c-85b3-b3739f1e1287",
|
||||
"graph_version": 12,
|
||||
"webhook_id": null,
|
||||
"webhook": null
|
||||
},
|
||||
{
|
||||
"id": "cefccd07-fe70-4feb-bf76-46b20aaa5d35",
|
||||
"block_id": "363ae599-353e-4804-937e-b2ee3cef3da4",
|
||||
"input_default": {
|
||||
"name": "Original Page Analysis",
|
||||
"description": "Analysis of the webpage as it currently stands."
|
||||
},
|
||||
"metadata": {
|
||||
"position": {
|
||||
"x": 1037.7724103954706,
|
||||
"y": -606.5934325506903
|
||||
}
|
||||
},
|
||||
"input_links": [
|
||||
{
|
||||
"id": "f979ab78-0903-4f19-a7c2-a419d5d81aef",
|
||||
"source_id": "08612ce2-625b-4c17-accd-3acace7b6477",
|
||||
"sink_id": "cefccd07-fe70-4feb-bf76-46b20aaa5d35",
|
||||
"source_name": "response",
|
||||
"sink_name": "value",
|
||||
"is_static": false
|
||||
}
|
||||
],
|
||||
"output_links": [],
|
||||
"graph_id": "0d440799-44ba-4d6c-85b3-b3739f1e1287",
|
||||
"graph_version": 12,
|
||||
"webhook_id": null,
|
||||
"webhook": null
|
||||
},
|
||||
{
|
||||
"id": "375f8bc3-afd9-4025-ad8e-9aeb329af7ce",
|
||||
"block_id": "c0a8e994-ebf1-4a9c-a4d8-89d09c86741b",
|
||||
"input_default": {
|
||||
"name": "Homepage URL",
|
||||
"value": "https://agpt.co",
|
||||
"description": "Enter the URL of the homepage you want to improve"
|
||||
},
|
||||
"metadata": {
|
||||
"position": {
|
||||
"x": -1195.1455674454749,
|
||||
"y": 0
|
||||
}
|
||||
},
|
||||
"input_links": [],
|
||||
"output_links": [
|
||||
{
|
||||
"id": "cbb12335-fefd-4560-9fff-98675130fbad",
|
||||
"source_id": "375f8bc3-afd9-4025-ad8e-9aeb329af7ce",
|
||||
"sink_id": "b40595c6-dba3-4779-a129-cd4f01fff103",
|
||||
"source_name": "result",
|
||||
"sink_name": "url",
|
||||
"is_static": true
|
||||
}
|
||||
],
|
||||
"graph_id": "0d440799-44ba-4d6c-85b3-b3739f1e1287",
|
||||
"graph_version": 12,
|
||||
"webhook_id": null,
|
||||
"webhook": null
|
||||
},
|
||||
{
|
||||
"id": "b40595c6-dba3-4779-a129-cd4f01fff103",
|
||||
"block_id": "436c3984-57fd-4b85-8e9a-459b356883bd",
|
||||
"input_default": {
|
||||
"raw_content": false
|
||||
},
|
||||
"metadata": {
|
||||
"position": {
|
||||
"x": -631.7330786555249,
|
||||
"y": 1.9638396496230826
|
||||
}
|
||||
},
|
||||
"input_links": [
|
||||
{
|
||||
"id": "cbb12335-fefd-4560-9fff-98675130fbad",
|
||||
"source_id": "375f8bc3-afd9-4025-ad8e-9aeb329af7ce",
|
||||
"sink_id": "b40595c6-dba3-4779-a129-cd4f01fff103",
|
||||
"source_name": "result",
|
||||
"sink_name": "url",
|
||||
"is_static": true
|
||||
}
|
||||
],
|
||||
"output_links": [
|
||||
{
|
||||
"id": "adfa6113-77b3-4e32-b136-3e694b87553e",
|
||||
"source_id": "b40595c6-dba3-4779-a129-cd4f01fff103",
|
||||
"sink_id": "c9924577-70d8-4ccb-9106-6f796df09ef9",
|
||||
"source_name": "content",
|
||||
"sink_name": "prompt_values_#_CONTENT",
|
||||
"is_static": false
|
||||
},
|
||||
{
|
||||
"id": "5d5656fd-4208-4296-bc70-e39cc31caada",
|
||||
"source_id": "b40595c6-dba3-4779-a129-cd4f01fff103",
|
||||
"sink_id": "08612ce2-625b-4c17-accd-3acace7b6477",
|
||||
"source_name": "content",
|
||||
"sink_name": "prompt_values_#_CONTENT",
|
||||
"is_static": false
|
||||
}
|
||||
],
|
||||
"graph_id": "0d440799-44ba-4d6c-85b3-b3739f1e1287",
|
||||
"graph_version": 12,
|
||||
"webhook_id": null,
|
||||
"webhook": null
|
||||
},
|
||||
{
|
||||
"id": "c9924577-70d8-4ccb-9106-6f796df09ef9",
|
||||
"block_id": "1f292d4a-41a4-4977-9684-7c8d560b9f91",
|
||||
"input_default": {
|
||||
"model": "gpt-4o",
|
||||
"prompt": "Current Webpage Content:\n```\n{{CONTENT}}\n```\n\nBased on the following analysis of the webpage content:\n\n```\n{{ANALYSIS}}\n```\n\nRewrite and improve the content to address the identified issues. Focus on:\n1. Enhancing clarity and readability\n2. Optimizing for SEO (suggest and incorporate relevant keywords)\n3. Improving calls-to-action for better conversion rates\n4. Refining the structure and organization\n5. Maintaining brand consistency while improving the overall tone\n\nProvide the improved content in HTML format inside a code-block with \"```\" backticks, preserving the original structure where appropriate. Also, include a brief summary of the changes made and their potential impact.",
|
||||
"prompt_values": {}
|
||||
},
|
||||
"metadata": {
|
||||
"position": {
|
||||
"x": 488.37278423303917,
|
||||
"y": 0
|
||||
}
|
||||
},
|
||||
"input_links": [
|
||||
{
|
||||
"id": "adfa6113-77b3-4e32-b136-3e694b87553e",
|
||||
"source_id": "b40595c6-dba3-4779-a129-cd4f01fff103",
|
||||
"sink_id": "c9924577-70d8-4ccb-9106-6f796df09ef9",
|
||||
"source_name": "content",
|
||||
"sink_name": "prompt_values_#_CONTENT",
|
||||
"is_static": false
|
||||
},
|
||||
{
|
||||
"id": "6bcca45d-c9d5-439e-ac43-e4a1264d8f57",
|
||||
"source_id": "08612ce2-625b-4c17-accd-3acace7b6477",
|
||||
"sink_id": "c9924577-70d8-4ccb-9106-6f796df09ef9",
|
||||
"source_name": "response",
|
||||
"sink_name": "prompt_values_#_ANALYSIS",
|
||||
"is_static": false
|
||||
}
|
||||
],
|
||||
"output_links": [
|
||||
{
|
||||
"id": "d4334477-3616-454f-a430-614ca27f5b36",
|
||||
"source_id": "c9924577-70d8-4ccb-9106-6f796df09ef9",
|
||||
"sink_id": "130ec496-f75d-4fe2-9cd6-8c00d08ea4a7",
|
||||
"source_name": "response",
|
||||
"sink_name": "value",
|
||||
"is_static": false
|
||||
}
|
||||
],
|
||||
"graph_id": "0d440799-44ba-4d6c-85b3-b3739f1e1287",
|
||||
"graph_version": 12,
|
||||
"webhook_id": null,
|
||||
"webhook": null
|
||||
},
|
||||
{
|
||||
"id": "08612ce2-625b-4c17-accd-3acace7b6477",
|
||||
"block_id": "1f292d4a-41a4-4977-9684-7c8d560b9f91",
|
||||
"input_default": {
|
||||
"model": "gpt-4o",
|
||||
"prompt": "Analyze the following webpage content and provide a detailed report on its current state, including strengths and weaknesses in terms of clarity, SEO optimization, and potential for conversion:\n\n{{CONTENT}}\n\nInclude observations on:\n1. Overall readability and clarity\n2. Use of keywords and SEO-friendly language\n3. Effectiveness of calls-to-action\n4. Structure and organization of content\n5. Tone and brand consistency",
|
||||
"prompt_values": {}
|
||||
},
|
||||
"metadata": {
|
||||
"position": {
|
||||
"x": -72.66206703605442,
|
||||
"y": -0.58403945075381
|
||||
}
|
||||
},
|
||||
"input_links": [
|
||||
{
|
||||
"id": "5d5656fd-4208-4296-bc70-e39cc31caada",
|
||||
"source_id": "b40595c6-dba3-4779-a129-cd4f01fff103",
|
||||
"sink_id": "08612ce2-625b-4c17-accd-3acace7b6477",
|
||||
"source_name": "content",
|
||||
"sink_name": "prompt_values_#_CONTENT",
|
||||
"is_static": false
|
||||
}
|
||||
],
|
||||
"output_links": [
|
||||
{
|
||||
"id": "f979ab78-0903-4f19-a7c2-a419d5d81aef",
|
||||
"source_id": "08612ce2-625b-4c17-accd-3acace7b6477",
|
||||
"sink_id": "cefccd07-fe70-4feb-bf76-46b20aaa5d35",
|
||||
"source_name": "response",
|
||||
"sink_name": "value",
|
||||
"is_static": false
|
||||
},
|
||||
{
|
||||
"id": "6bcca45d-c9d5-439e-ac43-e4a1264d8f57",
|
||||
"source_id": "08612ce2-625b-4c17-accd-3acace7b6477",
|
||||
"sink_id": "c9924577-70d8-4ccb-9106-6f796df09ef9",
|
||||
"source_name": "response",
|
||||
"sink_name": "prompt_values_#_ANALYSIS",
|
||||
"is_static": false
|
||||
}
|
||||
],
|
||||
"graph_id": "0d440799-44ba-4d6c-85b3-b3739f1e1287",
|
||||
"graph_version": 12,
|
||||
"webhook_id": null,
|
||||
"webhook": null
|
||||
}
|
||||
],
|
||||
"links": [
|
||||
{
|
||||
"id": "adfa6113-77b3-4e32-b136-3e694b87553e",
|
||||
"source_id": "b40595c6-dba3-4779-a129-cd4f01fff103",
|
||||
"sink_id": "c9924577-70d8-4ccb-9106-6f796df09ef9",
|
||||
"source_name": "content",
|
||||
"sink_name": "prompt_values_#_CONTENT",
|
||||
"is_static": false
|
||||
},
|
||||
{
|
||||
"id": "d4334477-3616-454f-a430-614ca27f5b36",
|
||||
"source_id": "c9924577-70d8-4ccb-9106-6f796df09ef9",
|
||||
"sink_id": "130ec496-f75d-4fe2-9cd6-8c00d08ea4a7",
|
||||
"source_name": "response",
|
||||
"sink_name": "value",
|
||||
"is_static": false
|
||||
},
|
||||
{
|
||||
"id": "5d5656fd-4208-4296-bc70-e39cc31caada",
|
||||
"source_id": "b40595c6-dba3-4779-a129-cd4f01fff103",
|
||||
"sink_id": "08612ce2-625b-4c17-accd-3acace7b6477",
|
||||
"source_name": "content",
|
||||
"sink_name": "prompt_values_#_CONTENT",
|
||||
"is_static": false
|
||||
},
|
||||
{
|
||||
"id": "f979ab78-0903-4f19-a7c2-a419d5d81aef",
|
||||
"source_id": "08612ce2-625b-4c17-accd-3acace7b6477",
|
||||
"sink_id": "cefccd07-fe70-4feb-bf76-46b20aaa5d35",
|
||||
"source_name": "response",
|
||||
"sink_name": "value",
|
||||
"is_static": false
|
||||
},
|
||||
{
|
||||
"id": "6bcca45d-c9d5-439e-ac43-e4a1264d8f57",
|
||||
"source_id": "08612ce2-625b-4c17-accd-3acace7b6477",
|
||||
"sink_id": "c9924577-70d8-4ccb-9106-6f796df09ef9",
|
||||
"source_name": "response",
|
||||
"sink_name": "prompt_values_#_ANALYSIS",
|
||||
"is_static": false
|
||||
},
|
||||
{
|
||||
"id": "cbb12335-fefd-4560-9fff-98675130fbad",
|
||||
"source_id": "375f8bc3-afd9-4025-ad8e-9aeb329af7ce",
|
||||
"sink_id": "b40595c6-dba3-4779-a129-cd4f01fff103",
|
||||
"source_name": "result",
|
||||
"sink_name": "url",
|
||||
"is_static": true
|
||||
}
|
||||
],
|
||||
"forked_from_id": null,
|
||||
"forked_from_version": null,
|
||||
"sub_graphs": [],
|
||||
"user_id": "",
|
||||
"created_at": "2024-12-20T19:47:22.036Z",
|
||||
"input_schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"Homepage URL": {
|
||||
"advanced": false,
|
||||
"secret": false,
|
||||
"title": "Homepage URL",
|
||||
"description": "Enter the URL of the homepage you want to improve",
|
||||
"default": "https://agpt.co"
|
||||
}
|
||||
},
|
||||
"required": []
|
||||
},
|
||||
"output_schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"Improved Webpage Copy": {
|
||||
"advanced": false,
|
||||
"secret": false,
|
||||
"title": "Improved Webpage Copy"
|
||||
},
|
||||
"Original Page Analysis": {
|
||||
"advanced": false,
|
||||
"secret": false,
|
||||
"title": "Original Page Analysis",
|
||||
"description": "Analysis of the webpage as it currently stands."
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"Improved Webpage Copy",
|
||||
"Original Page Analysis"
|
||||
]
|
||||
},
|
||||
"has_external_trigger": false,
|
||||
"has_human_in_the_loop": false,
|
||||
"trigger_setup_info": null,
|
||||
"credentials_input_schema": {
|
||||
"properties": {
|
||||
"jina_api_key_credentials": {
|
||||
"credentials_provider": [
|
||||
"jina"
|
||||
],
|
||||
"credentials_types": [
|
||||
"api_key"
|
||||
],
|
||||
"properties": {
|
||||
"id": {
|
||||
"title": "Id",
|
||||
"type": "string"
|
||||
},
|
||||
"title": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": null,
|
||||
"title": "Title"
|
||||
},
|
||||
"provider": {
|
||||
"const": "jina",
|
||||
"title": "Provider",
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"const": "api_key",
|
||||
"title": "Type",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"provider",
|
||||
"type"
|
||||
],
|
||||
"title": "CredentialsMetaInput[Literal[<ProviderName.JINA: 'jina'>], Literal['api_key']]",
|
||||
"type": "object",
|
||||
"discriminator_values": []
|
||||
},
|
||||
"openai_api_key_credentials": {
|
||||
"credentials_provider": [
|
||||
"openai"
|
||||
],
|
||||
"credentials_types": [
|
||||
"api_key"
|
||||
],
|
||||
"properties": {
|
||||
"id": {
|
||||
"title": "Id",
|
||||
"type": "string"
|
||||
},
|
||||
"title": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": null,
|
||||
"title": "Title"
|
||||
},
|
||||
"provider": {
|
||||
"const": "openai",
|
||||
"title": "Provider",
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"const": "api_key",
|
||||
"title": "Type",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"provider",
|
||||
"type"
|
||||
],
|
||||
"title": "CredentialsMetaInput[Literal[<ProviderName.OPENAI: 'openai'>], Literal['api_key']]",
|
||||
"type": "object",
|
||||
"discriminator": "model",
|
||||
"discriminator_mapping": {
|
||||
"Llama-3.3-70B-Instruct": "llama_api",
|
||||
"Llama-3.3-8B-Instruct": "llama_api",
|
||||
"Llama-4-Maverick-17B-128E-Instruct-FP8": "llama_api",
|
||||
"Llama-4-Scout-17B-16E-Instruct-FP8": "llama_api",
|
||||
"Qwen/Qwen2.5-72B-Instruct-Turbo": "aiml_api",
|
||||
"amazon/nova-lite-v1": "open_router",
|
||||
"amazon/nova-micro-v1": "open_router",
|
||||
"amazon/nova-pro-v1": "open_router",
|
||||
"claude-3-7-sonnet-20250219": "anthropic",
|
||||
"claude-3-haiku-20240307": "anthropic",
|
||||
"claude-haiku-4-5-20251001": "anthropic",
|
||||
"claude-opus-4-1-20250805": "anthropic",
|
||||
"claude-opus-4-20250514": "anthropic",
|
||||
"claude-opus-4-5-20251101": "anthropic",
|
||||
"claude-sonnet-4-20250514": "anthropic",
|
||||
"claude-sonnet-4-5-20250929": "anthropic",
|
||||
"cohere/command-r-08-2024": "open_router",
|
||||
"cohere/command-r-plus-08-2024": "open_router",
|
||||
"deepseek/deepseek-chat": "open_router",
|
||||
"deepseek/deepseek-r1-0528": "open_router",
|
||||
"dolphin-mistral:latest": "ollama",
|
||||
"google/gemini-2.0-flash-001": "open_router",
|
||||
"google/gemini-2.0-flash-lite-001": "open_router",
|
||||
"google/gemini-2.5-flash": "open_router",
|
||||
"google/gemini-2.5-flash-lite-preview-06-17": "open_router",
|
||||
"google/gemini-2.5-pro-preview-03-25": "open_router",
|
||||
"google/gemini-3-pro-preview": "open_router",
|
||||
"gpt-3.5-turbo": "openai",
|
||||
"gpt-4-turbo": "openai",
|
||||
"gpt-4.1-2025-04-14": "openai",
|
||||
"gpt-4.1-mini-2025-04-14": "openai",
|
||||
"gpt-4o": "openai",
|
||||
"gpt-4o-mini": "openai",
|
||||
"gpt-5-2025-08-07": "openai",
|
||||
"gpt-5-chat-latest": "openai",
|
||||
"gpt-5-mini-2025-08-07": "openai",
|
||||
"gpt-5-nano-2025-08-07": "openai",
|
||||
"gpt-5.1-2025-11-13": "openai",
|
||||
"gryphe/mythomax-l2-13b": "open_router",
|
||||
"llama-3.1-8b-instant": "groq",
|
||||
"llama-3.3-70b-versatile": "groq",
|
||||
"llama3": "ollama",
|
||||
"llama3.1:405b": "ollama",
|
||||
"llama3.2": "ollama",
|
||||
"llama3.3": "ollama",
|
||||
"meta-llama/Llama-3.2-3B-Instruct-Turbo": "aiml_api",
|
||||
"meta-llama/Llama-3.3-70B-Instruct-Turbo": "aiml_api",
|
||||
"meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo": "aiml_api",
|
||||
"meta-llama/llama-4-maverick": "open_router",
|
||||
"meta-llama/llama-4-scout": "open_router",
|
||||
"microsoft/wizardlm-2-8x22b": "open_router",
|
||||
"mistralai/mistral-nemo": "open_router",
|
||||
"moonshotai/kimi-k2": "open_router",
|
||||
"nousresearch/hermes-3-llama-3.1-405b": "open_router",
|
||||
"nousresearch/hermes-3-llama-3.1-70b": "open_router",
|
||||
"nvidia/llama-3.1-nemotron-70b-instruct": "aiml_api",
|
||||
"o1": "openai",
|
||||
"o1-mini": "openai",
|
||||
"o3-2025-04-16": "openai",
|
||||
"o3-mini": "openai",
|
||||
"openai/gpt-oss-120b": "open_router",
|
||||
"openai/gpt-oss-20b": "open_router",
|
||||
"perplexity/sonar": "open_router",
|
||||
"perplexity/sonar-deep-research": "open_router",
|
||||
"perplexity/sonar-pro": "open_router",
|
||||
"qwen/qwen3-235b-a22b-thinking-2507": "open_router",
|
||||
"qwen/qwen3-coder": "open_router",
|
||||
"v0-1.0-md": "v0",
|
||||
"v0-1.5-lg": "v0",
|
||||
"v0-1.5-md": "v0",
|
||||
"x-ai/grok-4": "open_router",
|
||||
"x-ai/grok-4-fast": "open_router",
|
||||
"x-ai/grok-4.1-fast": "open_router",
|
||||
"x-ai/grok-code-fast-1": "open_router"
|
||||
},
|
||||
"discriminator_values": [
|
||||
"gpt-4o"
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"jina_api_key_credentials",
|
||||
"openai_api_key_credentials"
|
||||
],
|
||||
"title": "AIWebpageCopyImproverCredentialsInputSchema",
|
||||
"type": "object"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,615 @@
|
||||
{
|
||||
"id": "4c6b68cb-bb75-4044-b1cb-2cee3fd39b26",
|
||||
"version": 29,
|
||||
"is_active": true,
|
||||
"name": "Email Address Finder",
|
||||
"description": "Input information of a business and find their email address",
|
||||
"instructions": null,
|
||||
"recommended_schedule_cron": null,
|
||||
"nodes": [
|
||||
{
|
||||
"id": "04cad535-9f1a-4876-8b07-af5897d8c282",
|
||||
"block_id": "c0a8e994-ebf1-4a9c-a4d8-89d09c86741b",
|
||||
"input_default": {
|
||||
"name": "Address",
|
||||
"value": "USA"
|
||||
},
|
||||
"metadata": {
|
||||
"position": {
|
||||
"x": 1047.9357219838776,
|
||||
"y": 1067.9123910370954
|
||||
}
|
||||
},
|
||||
"input_links": [],
|
||||
"output_links": [
|
||||
{
|
||||
"id": "aac29f7b-3cd1-4c91-9a2a-72a8301c0957",
|
||||
"source_id": "04cad535-9f1a-4876-8b07-af5897d8c282",
|
||||
"sink_id": "28b5ddcc-dc20-41cc-ad21-c54ff459f694",
|
||||
"source_name": "result",
|
||||
"sink_name": "values_#_ADDRESS",
|
||||
"is_static": true
|
||||
}
|
||||
],
|
||||
"graph_id": "4c6b68cb-bb75-4044-b1cb-2cee3fd39b26",
|
||||
"graph_version": 29,
|
||||
"webhook_id": null,
|
||||
"webhook": null
|
||||
},
|
||||
{
|
||||
"id": "a6e7355e-5bf8-4b09-b11c-a5e140389981",
|
||||
"block_id": "3146e4fe-2cdd-4f29-bd12-0c9d5bb4deb0",
|
||||
"input_default": {
|
||||
"group": 1,
|
||||
"pattern": "<email>(.*?)<\\/email>"
|
||||
},
|
||||
"metadata": {
|
||||
"position": {
|
||||
"x": 3381.2821481740634,
|
||||
"y": 246.091098184158
|
||||
}
|
||||
},
|
||||
"input_links": [
|
||||
{
|
||||
"id": "9f8188ce-1f3d-46fb-acda-b2a57c0e5da6",
|
||||
"source_id": "510937b3-0134-4e45-b2ba-05a447bbaf50",
|
||||
"sink_id": "a6e7355e-5bf8-4b09-b11c-a5e140389981",
|
||||
"source_name": "response",
|
||||
"sink_name": "text",
|
||||
"is_static": false
|
||||
}
|
||||
],
|
||||
"output_links": [
|
||||
{
|
||||
"id": "b15b5143-27b7-486e-a166-4095e72e5235",
|
||||
"source_id": "a6e7355e-5bf8-4b09-b11c-a5e140389981",
|
||||
"sink_id": "266b7255-11c4-4b88-99e2-85db31a2e865",
|
||||
"source_name": "negative",
|
||||
"sink_name": "values_#_Result",
|
||||
"is_static": false
|
||||
},
|
||||
{
|
||||
"id": "23591872-3c6b-4562-87d3-5b6ade698e48",
|
||||
"source_id": "a6e7355e-5bf8-4b09-b11c-a5e140389981",
|
||||
"sink_id": "310c8fab-2ae6-4158-bd48-01dbdc434130",
|
||||
"source_name": "positive",
|
||||
"sink_name": "value",
|
||||
"is_static": false
|
||||
}
|
||||
],
|
||||
"graph_id": "4c6b68cb-bb75-4044-b1cb-2cee3fd39b26",
|
||||
"graph_version": 29,
|
||||
"webhook_id": null,
|
||||
"webhook": null
|
||||
},
|
||||
{
|
||||
"id": "310c8fab-2ae6-4158-bd48-01dbdc434130",
|
||||
"block_id": "363ae599-353e-4804-937e-b2ee3cef3da4",
|
||||
"input_default": {
|
||||
"name": "Email"
|
||||
},
|
||||
"metadata": {
|
||||
"position": {
|
||||
"x": 4525.4246310882,
|
||||
"y": 246.36913665010354
|
||||
}
|
||||
},
|
||||
"input_links": [
|
||||
{
|
||||
"id": "d87b07ea-dcec-4d38-a644-2c1d741ea3cb",
|
||||
"source_id": "266b7255-11c4-4b88-99e2-85db31a2e865",
|
||||
"sink_id": "310c8fab-2ae6-4158-bd48-01dbdc434130",
|
||||
"source_name": "output",
|
||||
"sink_name": "value",
|
||||
"is_static": false
|
||||
},
|
||||
{
|
||||
"id": "23591872-3c6b-4562-87d3-5b6ade698e48",
|
||||
"source_id": "a6e7355e-5bf8-4b09-b11c-a5e140389981",
|
||||
"sink_id": "310c8fab-2ae6-4158-bd48-01dbdc434130",
|
||||
"source_name": "positive",
|
||||
"sink_name": "value",
|
||||
"is_static": false
|
||||
}
|
||||
],
|
||||
"output_links": [],
|
||||
"graph_id": "4c6b68cb-bb75-4044-b1cb-2cee3fd39b26",
|
||||
"graph_version": 29,
|
||||
"webhook_id": null,
|
||||
"webhook": null
|
||||
},
|
||||
{
|
||||
"id": "4a41df99-ffe2-4c12-b528-632979c9c030",
|
||||
"block_id": "87840993-2053-44b7-8da4-187ad4ee518c",
|
||||
"input_default": {},
|
||||
"metadata": {
|
||||
"position": {
|
||||
"x": 2182.7499999999995,
|
||||
"y": 242.00001144409185
|
||||
}
|
||||
},
|
||||
"input_links": [
|
||||
{
|
||||
"id": "2e411d3d-79ba-4958-9c1c-b76a45a2e649",
|
||||
"source_id": "28b5ddcc-dc20-41cc-ad21-c54ff459f694",
|
||||
"sink_id": "4a41df99-ffe2-4c12-b528-632979c9c030",
|
||||
"source_name": "output",
|
||||
"sink_name": "query",
|
||||
"is_static": false
|
||||
}
|
||||
],
|
||||
"output_links": [
|
||||
{
|
||||
"id": "899cc7d8-a96b-4107-b3c6-4c78edcf0c6b",
|
||||
"source_id": "4a41df99-ffe2-4c12-b528-632979c9c030",
|
||||
"sink_id": "510937b3-0134-4e45-b2ba-05a447bbaf50",
|
||||
"source_name": "results",
|
||||
"sink_name": "prompt_values_#_WEBSITE_CONTENT",
|
||||
"is_static": false
|
||||
}
|
||||
],
|
||||
"graph_id": "4c6b68cb-bb75-4044-b1cb-2cee3fd39b26",
|
||||
"graph_version": 29,
|
||||
"webhook_id": null,
|
||||
"webhook": null
|
||||
},
|
||||
{
|
||||
"id": "9708a10a-8be0-4c44-abb3-bd0f7c594794",
|
||||
"block_id": "c0a8e994-ebf1-4a9c-a4d8-89d09c86741b",
|
||||
"input_default": {
|
||||
"name": "Business Name",
|
||||
"value": "Tim Cook"
|
||||
},
|
||||
"metadata": {
|
||||
"position": {
|
||||
"x": 1049.9704155272595,
|
||||
"y": 244.49931152418344
|
||||
}
|
||||
},
|
||||
"input_links": [],
|
||||
"output_links": [
|
||||
{
|
||||
"id": "946b522c-365f-4ee0-96f9-28863d9882ea",
|
||||
"source_id": "9708a10a-8be0-4c44-abb3-bd0f7c594794",
|
||||
"sink_id": "28b5ddcc-dc20-41cc-ad21-c54ff459f694",
|
||||
"source_name": "result",
|
||||
"sink_name": "values_#_NAME",
|
||||
"is_static": true
|
||||
},
|
||||
{
|
||||
"id": "43e920a7-0bb4-4fae-9a22-91df95c7342a",
|
||||
"source_id": "9708a10a-8be0-4c44-abb3-bd0f7c594794",
|
||||
"sink_id": "510937b3-0134-4e45-b2ba-05a447bbaf50",
|
||||
"source_name": "result",
|
||||
"sink_name": "prompt_values_#_BUSINESS_NAME",
|
||||
"is_static": true
|
||||
}
|
||||
],
|
||||
"graph_id": "4c6b68cb-bb75-4044-b1cb-2cee3fd39b26",
|
||||
"graph_version": 29,
|
||||
"webhook_id": null,
|
||||
"webhook": null
|
||||
},
|
||||
{
|
||||
"id": "28b5ddcc-dc20-41cc-ad21-c54ff459f694",
|
||||
"block_id": "db7d8f02-2f44-4c55-ab7a-eae0941f0c30",
|
||||
"input_default": {
|
||||
"format": "Email Address of {{NAME}}, {{ADDRESS}}",
|
||||
"values": {}
|
||||
},
|
||||
"metadata": {
|
||||
"position": {
|
||||
"x": 1625.25,
|
||||
"y": 243.25001144409185
|
||||
}
|
||||
},
|
||||
"input_links": [
|
||||
{
|
||||
"id": "946b522c-365f-4ee0-96f9-28863d9882ea",
|
||||
"source_id": "9708a10a-8be0-4c44-abb3-bd0f7c594794",
|
||||
"sink_id": "28b5ddcc-dc20-41cc-ad21-c54ff459f694",
|
||||
"source_name": "result",
|
||||
"sink_name": "values_#_NAME",
|
||||
"is_static": true
|
||||
},
|
||||
{
|
||||
"id": "aac29f7b-3cd1-4c91-9a2a-72a8301c0957",
|
||||
"source_id": "04cad535-9f1a-4876-8b07-af5897d8c282",
|
||||
"sink_id": "28b5ddcc-dc20-41cc-ad21-c54ff459f694",
|
||||
"source_name": "result",
|
||||
"sink_name": "values_#_ADDRESS",
|
||||
"is_static": true
|
||||
}
|
||||
],
|
||||
"output_links": [
|
||||
{
|
||||
"id": "2e411d3d-79ba-4958-9c1c-b76a45a2e649",
|
||||
"source_id": "28b5ddcc-dc20-41cc-ad21-c54ff459f694",
|
||||
"sink_id": "4a41df99-ffe2-4c12-b528-632979c9c030",
|
||||
"source_name": "output",
|
||||
"sink_name": "query",
|
||||
"is_static": false
|
||||
}
|
||||
],
|
||||
"graph_id": "4c6b68cb-bb75-4044-b1cb-2cee3fd39b26",
|
||||
"graph_version": 29,
|
||||
"webhook_id": null,
|
||||
"webhook": null
|
||||
},
|
||||
{
|
||||
"id": "266b7255-11c4-4b88-99e2-85db31a2e865",
|
||||
"block_id": "db7d8f02-2f44-4c55-ab7a-eae0941f0c30",
|
||||
"input_default": {
|
||||
"format": "Failed to find email. \nResult:\n{{RESULT}}",
|
||||
"values": {}
|
||||
},
|
||||
"metadata": {
|
||||
"position": {
|
||||
"x": 3949.7493830805934,
|
||||
"y": 705.209819698647
|
||||
}
|
||||
},
|
||||
"input_links": [
|
||||
{
|
||||
"id": "b15b5143-27b7-486e-a166-4095e72e5235",
|
||||
"source_id": "a6e7355e-5bf8-4b09-b11c-a5e140389981",
|
||||
"sink_id": "266b7255-11c4-4b88-99e2-85db31a2e865",
|
||||
"source_name": "negative",
|
||||
"sink_name": "values_#_Result",
|
||||
"is_static": false
|
||||
}
|
||||
],
|
||||
"output_links": [
|
||||
{
|
||||
"id": "d87b07ea-dcec-4d38-a644-2c1d741ea3cb",
|
||||
"source_id": "266b7255-11c4-4b88-99e2-85db31a2e865",
|
||||
"sink_id": "310c8fab-2ae6-4158-bd48-01dbdc434130",
|
||||
"source_name": "output",
|
||||
"sink_name": "value",
|
||||
"is_static": false
|
||||
}
|
||||
],
|
||||
"graph_id": "4c6b68cb-bb75-4044-b1cb-2cee3fd39b26",
|
||||
"graph_version": 29,
|
||||
"webhook_id": null,
|
||||
"webhook": null
|
||||
},
|
||||
{
|
||||
"id": "510937b3-0134-4e45-b2ba-05a447bbaf50",
|
||||
"block_id": "1f292d4a-41a4-4977-9684-7c8d560b9f91",
|
||||
"input_default": {
|
||||
"model": "claude-sonnet-4-5-20250929",
|
||||
"prompt": "<business_website>\n{{WEBSITE_CONTENT}}\n</business_website>\n\nExtract the Contact Email of {{BUSINESS_NAME}}.\n\nIf no email that can be used to contact {{BUSINESS_NAME}} is present, output `N/A`.\nDo not share any emails other than the email for this specific entity.\n\nIf multiple present pick the likely best one.\n\nRespond with the email (or N/A) inside <email></email> tags.\n\nExample Response:\n\n<thoughts_or_comments>\nThere were many emails present, but luckily one was for {{BUSINESS_NAME}} which I have included below.\n</thoughts_or_comments>\n<email>\nexample@email.com\n</email>",
|
||||
"prompt_values": {}
|
||||
},
|
||||
"metadata": {
|
||||
"position": {
|
||||
"x": 2774.879259081777,
|
||||
"y": 243.3102035752969
|
||||
}
|
||||
},
|
||||
"input_links": [
|
||||
{
|
||||
"id": "43e920a7-0bb4-4fae-9a22-91df95c7342a",
|
||||
"source_id": "9708a10a-8be0-4c44-abb3-bd0f7c594794",
|
||||
"sink_id": "510937b3-0134-4e45-b2ba-05a447bbaf50",
|
||||
"source_name": "result",
|
||||
"sink_name": "prompt_values_#_BUSINESS_NAME",
|
||||
"is_static": true
|
||||
},
|
||||
{
|
||||
"id": "899cc7d8-a96b-4107-b3c6-4c78edcf0c6b",
|
||||
"source_id": "4a41df99-ffe2-4c12-b528-632979c9c030",
|
||||
"sink_id": "510937b3-0134-4e45-b2ba-05a447bbaf50",
|
||||
"source_name": "results",
|
||||
"sink_name": "prompt_values_#_WEBSITE_CONTENT",
|
||||
"is_static": false
|
||||
}
|
||||
],
|
||||
"output_links": [
|
||||
{
|
||||
"id": "9f8188ce-1f3d-46fb-acda-b2a57c0e5da6",
|
||||
"source_id": "510937b3-0134-4e45-b2ba-05a447bbaf50",
|
||||
"sink_id": "a6e7355e-5bf8-4b09-b11c-a5e140389981",
|
||||
"source_name": "response",
|
||||
"sink_name": "text",
|
||||
"is_static": false
|
||||
}
|
||||
],
|
||||
"graph_id": "4c6b68cb-bb75-4044-b1cb-2cee3fd39b26",
|
||||
"graph_version": 29,
|
||||
"webhook_id": null,
|
||||
"webhook": null
|
||||
}
|
||||
],
|
||||
"links": [
|
||||
{
|
||||
"id": "9f8188ce-1f3d-46fb-acda-b2a57c0e5da6",
|
||||
"source_id": "510937b3-0134-4e45-b2ba-05a447bbaf50",
|
||||
"sink_id": "a6e7355e-5bf8-4b09-b11c-a5e140389981",
|
||||
"source_name": "response",
|
||||
"sink_name": "text",
|
||||
"is_static": false
|
||||
},
|
||||
{
|
||||
"id": "b15b5143-27b7-486e-a166-4095e72e5235",
|
||||
"source_id": "a6e7355e-5bf8-4b09-b11c-a5e140389981",
|
||||
"sink_id": "266b7255-11c4-4b88-99e2-85db31a2e865",
|
||||
"source_name": "negative",
|
||||
"sink_name": "values_#_Result",
|
||||
"is_static": false
|
||||
},
|
||||
{
|
||||
"id": "d87b07ea-dcec-4d38-a644-2c1d741ea3cb",
|
||||
"source_id": "266b7255-11c4-4b88-99e2-85db31a2e865",
|
||||
"sink_id": "310c8fab-2ae6-4158-bd48-01dbdc434130",
|
||||
"source_name": "output",
|
||||
"sink_name": "value",
|
||||
"is_static": false
|
||||
},
|
||||
{
|
||||
"id": "946b522c-365f-4ee0-96f9-28863d9882ea",
|
||||
"source_id": "9708a10a-8be0-4c44-abb3-bd0f7c594794",
|
||||
"sink_id": "28b5ddcc-dc20-41cc-ad21-c54ff459f694",
|
||||
"source_name": "result",
|
||||
"sink_name": "values_#_NAME",
|
||||
"is_static": true
|
||||
},
|
||||
{
|
||||
"id": "23591872-3c6b-4562-87d3-5b6ade698e48",
|
||||
"source_id": "a6e7355e-5bf8-4b09-b11c-a5e140389981",
|
||||
"sink_id": "310c8fab-2ae6-4158-bd48-01dbdc434130",
|
||||
"source_name": "positive",
|
||||
"sink_name": "value",
|
||||
"is_static": false
|
||||
},
|
||||
{
|
||||
"id": "43e920a7-0bb4-4fae-9a22-91df95c7342a",
|
||||
"source_id": "9708a10a-8be0-4c44-abb3-bd0f7c594794",
|
||||
"sink_id": "510937b3-0134-4e45-b2ba-05a447bbaf50",
|
||||
"source_name": "result",
|
||||
"sink_name": "prompt_values_#_BUSINESS_NAME",
|
||||
"is_static": true
|
||||
},
|
||||
{
|
||||
"id": "2e411d3d-79ba-4958-9c1c-b76a45a2e649",
|
||||
"source_id": "28b5ddcc-dc20-41cc-ad21-c54ff459f694",
|
||||
"sink_id": "4a41df99-ffe2-4c12-b528-632979c9c030",
|
||||
"source_name": "output",
|
||||
"sink_name": "query",
|
||||
"is_static": false
|
||||
},
|
||||
{
|
||||
"id": "aac29f7b-3cd1-4c91-9a2a-72a8301c0957",
|
||||
"source_id": "04cad535-9f1a-4876-8b07-af5897d8c282",
|
||||
"sink_id": "28b5ddcc-dc20-41cc-ad21-c54ff459f694",
|
||||
"source_name": "result",
|
||||
"sink_name": "values_#_ADDRESS",
|
||||
"is_static": true
|
||||
},
|
||||
{
|
||||
"id": "899cc7d8-a96b-4107-b3c6-4c78edcf0c6b",
|
||||
"source_id": "4a41df99-ffe2-4c12-b528-632979c9c030",
|
||||
"sink_id": "510937b3-0134-4e45-b2ba-05a447bbaf50",
|
||||
"source_name": "results",
|
||||
"sink_name": "prompt_values_#_WEBSITE_CONTENT",
|
||||
"is_static": false
|
||||
}
|
||||
],
|
||||
"forked_from_id": null,
|
||||
"forked_from_version": null,
|
||||
"sub_graphs": [],
|
||||
"user_id": "",
|
||||
"created_at": "2025-01-03T00:46:30.244Z",
|
||||
"input_schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"Address": {
|
||||
"advanced": false,
|
||||
"secret": false,
|
||||
"title": "Address",
|
||||
"default": "USA"
|
||||
},
|
||||
"Business Name": {
|
||||
"advanced": false,
|
||||
"secret": false,
|
||||
"title": "Business Name",
|
||||
"default": "Tim Cook"
|
||||
}
|
||||
},
|
||||
"required": []
|
||||
},
|
||||
"output_schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"Email": {
|
||||
"advanced": false,
|
||||
"secret": false,
|
||||
"title": "Email"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"Email"
|
||||
]
|
||||
},
|
||||
"has_external_trigger": false,
|
||||
"has_human_in_the_loop": false,
|
||||
"trigger_setup_info": null,
|
||||
"credentials_input_schema": {
|
||||
"properties": {
|
||||
"jina_api_key_credentials": {
|
||||
"credentials_provider": [
|
||||
"jina"
|
||||
],
|
||||
"credentials_types": [
|
||||
"api_key"
|
||||
],
|
||||
"properties": {
|
||||
"id": {
|
||||
"title": "Id",
|
||||
"type": "string"
|
||||
},
|
||||
"title": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": null,
|
||||
"title": "Title"
|
||||
},
|
||||
"provider": {
|
||||
"const": "jina",
|
||||
"title": "Provider",
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"const": "api_key",
|
||||
"title": "Type",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"provider",
|
||||
"type"
|
||||
],
|
||||
"title": "CredentialsMetaInput[Literal[<ProviderName.JINA: 'jina'>], Literal['api_key']]",
|
||||
"type": "object",
|
||||
"discriminator_values": []
|
||||
},
|
||||
"anthropic_api_key_credentials": {
|
||||
"credentials_provider": [
|
||||
"anthropic"
|
||||
],
|
||||
"credentials_types": [
|
||||
"api_key"
|
||||
],
|
||||
"properties": {
|
||||
"id": {
|
||||
"title": "Id",
|
||||
"type": "string"
|
||||
},
|
||||
"title": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": null,
|
||||
"title": "Title"
|
||||
},
|
||||
"provider": {
|
||||
"const": "anthropic",
|
||||
"title": "Provider",
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"const": "api_key",
|
||||
"title": "Type",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"provider",
|
||||
"type"
|
||||
],
|
||||
"title": "CredentialsMetaInput[Literal[<ProviderName.ANTHROPIC: 'anthropic'>], Literal['api_key']]",
|
||||
"type": "object",
|
||||
"discriminator": "model",
|
||||
"discriminator_mapping": {
|
||||
"Llama-3.3-70B-Instruct": "llama_api",
|
||||
"Llama-3.3-8B-Instruct": "llama_api",
|
||||
"Llama-4-Maverick-17B-128E-Instruct-FP8": "llama_api",
|
||||
"Llama-4-Scout-17B-16E-Instruct-FP8": "llama_api",
|
||||
"Qwen/Qwen2.5-72B-Instruct-Turbo": "aiml_api",
|
||||
"amazon/nova-lite-v1": "open_router",
|
||||
"amazon/nova-micro-v1": "open_router",
|
||||
"amazon/nova-pro-v1": "open_router",
|
||||
"claude-3-7-sonnet-20250219": "anthropic",
|
||||
"claude-3-haiku-20240307": "anthropic",
|
||||
"claude-haiku-4-5-20251001": "anthropic",
|
||||
"claude-opus-4-1-20250805": "anthropic",
|
||||
"claude-opus-4-20250514": "anthropic",
|
||||
"claude-opus-4-5-20251101": "anthropic",
|
||||
"claude-sonnet-4-20250514": "anthropic",
|
||||
"claude-sonnet-4-5-20250929": "anthropic",
|
||||
"cohere/command-r-08-2024": "open_router",
|
||||
"cohere/command-r-plus-08-2024": "open_router",
|
||||
"deepseek/deepseek-chat": "open_router",
|
||||
"deepseek/deepseek-r1-0528": "open_router",
|
||||
"dolphin-mistral:latest": "ollama",
|
||||
"google/gemini-2.0-flash-001": "open_router",
|
||||
"google/gemini-2.0-flash-lite-001": "open_router",
|
||||
"google/gemini-2.5-flash": "open_router",
|
||||
"google/gemini-2.5-flash-lite-preview-06-17": "open_router",
|
||||
"google/gemini-2.5-pro-preview-03-25": "open_router",
|
||||
"google/gemini-3-pro-preview": "open_router",
|
||||
"gpt-3.5-turbo": "openai",
|
||||
"gpt-4-turbo": "openai",
|
||||
"gpt-4.1-2025-04-14": "openai",
|
||||
"gpt-4.1-mini-2025-04-14": "openai",
|
||||
"gpt-4o": "openai",
|
||||
"gpt-4o-mini": "openai",
|
||||
"gpt-5-2025-08-07": "openai",
|
||||
"gpt-5-chat-latest": "openai",
|
||||
"gpt-5-mini-2025-08-07": "openai",
|
||||
"gpt-5-nano-2025-08-07": "openai",
|
||||
"gpt-5.1-2025-11-13": "openai",
|
||||
"gryphe/mythomax-l2-13b": "open_router",
|
||||
"llama-3.1-8b-instant": "groq",
|
||||
"llama-3.3-70b-versatile": "groq",
|
||||
"llama3": "ollama",
|
||||
"llama3.1:405b": "ollama",
|
||||
"llama3.2": "ollama",
|
||||
"llama3.3": "ollama",
|
||||
"meta-llama/Llama-3.2-3B-Instruct-Turbo": "aiml_api",
|
||||
"meta-llama/Llama-3.3-70B-Instruct-Turbo": "aiml_api",
|
||||
"meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo": "aiml_api",
|
||||
"meta-llama/llama-4-maverick": "open_router",
|
||||
"meta-llama/llama-4-scout": "open_router",
|
||||
"microsoft/wizardlm-2-8x22b": "open_router",
|
||||
"mistralai/mistral-nemo": "open_router",
|
||||
"moonshotai/kimi-k2": "open_router",
|
||||
"nousresearch/hermes-3-llama-3.1-405b": "open_router",
|
||||
"nousresearch/hermes-3-llama-3.1-70b": "open_router",
|
||||
"nvidia/llama-3.1-nemotron-70b-instruct": "aiml_api",
|
||||
"o1": "openai",
|
||||
"o1-mini": "openai",
|
||||
"o3-2025-04-16": "openai",
|
||||
"o3-mini": "openai",
|
||||
"openai/gpt-oss-120b": "open_router",
|
||||
"openai/gpt-oss-20b": "open_router",
|
||||
"perplexity/sonar": "open_router",
|
||||
"perplexity/sonar-deep-research": "open_router",
|
||||
"perplexity/sonar-pro": "open_router",
|
||||
"qwen/qwen3-235b-a22b-thinking-2507": "open_router",
|
||||
"qwen/qwen3-coder": "open_router",
|
||||
"v0-1.0-md": "v0",
|
||||
"v0-1.5-lg": "v0",
|
||||
"v0-1.5-md": "v0",
|
||||
"x-ai/grok-4": "open_router",
|
||||
"x-ai/grok-4-fast": "open_router",
|
||||
"x-ai/grok-4.1-fast": "open_router",
|
||||
"x-ai/grok-code-fast-1": "open_router"
|
||||
},
|
||||
"discriminator_values": [
|
||||
"claude-sonnet-4-5-20250929"
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"jina_api_key_credentials",
|
||||
"anthropic_api_key_credentials"
|
||||
],
|
||||
"title": "EmailAddressFinderCredentialsInputSchema",
|
||||
"type": "object"
|
||||
}
|
||||
}
|
||||
156
autogpt_platform/backend/backend/integrations/embeddings.py
Normal file
156
autogpt_platform/backend/backend/integrations/embeddings.py
Normal file
@@ -0,0 +1,156 @@
|
||||
"""
|
||||
Embedding service for generating text embeddings using OpenAI.
|
||||
|
||||
Used for vector-based semantic search in the store.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
import openai
|
||||
|
||||
from backend.util.settings import Settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Model configuration
|
||||
# Using text-embedding-3-small (1536 dimensions) for compatibility with pgvector indexes
|
||||
# pgvector IVFFlat/HNSW indexes have dimension limits (2000 for IVFFlat, varies for HNSW)
|
||||
EMBEDDING_MODEL = "text-embedding-3-small"
|
||||
EMBEDDING_DIMENSIONS = 1536
|
||||
|
||||
# Input validation limits
|
||||
# OpenAI text-embedding-3-large supports up to 8191 tokens (~32k chars)
|
||||
# We set a conservative limit to prevent abuse
|
||||
MAX_TEXT_LENGTH = 10000 # characters
|
||||
MAX_BATCH_SIZE = 100 # maximum texts per batch request
|
||||
|
||||
|
||||
class EmbeddingService:
|
||||
"""Service for generating text embeddings using OpenAI."""
|
||||
|
||||
def __init__(self, api_key: Optional[str] = None):
|
||||
settings = Settings()
|
||||
self.api_key = (
|
||||
api_key
|
||||
or settings.secrets.openai_internal_api_key
|
||||
or settings.secrets.openai_api_key
|
||||
)
|
||||
if not self.api_key:
|
||||
raise ValueError(
|
||||
"OpenAI API key not configured. "
|
||||
"Set OPENAI_API_KEY or OPENAI_INTERNAL_API_KEY environment variable."
|
||||
)
|
||||
self.client = openai.AsyncOpenAI(api_key=self.api_key)
|
||||
|
||||
async def generate_embedding(self, text: str) -> list[float]:
|
||||
"""
|
||||
Generate embedding for a single text string.
|
||||
|
||||
Args:
|
||||
text: The text to generate an embedding for.
|
||||
|
||||
Returns:
|
||||
A list of floats representing the embedding vector.
|
||||
|
||||
Raises:
|
||||
ValueError: If the text is empty or exceeds maximum length.
|
||||
openai.APIError: If the OpenAI API call fails.
|
||||
"""
|
||||
# Input validation
|
||||
if not text or not text.strip():
|
||||
raise ValueError("Text cannot be empty")
|
||||
if len(text) > MAX_TEXT_LENGTH:
|
||||
raise ValueError(
|
||||
f"Text exceeds maximum length of {MAX_TEXT_LENGTH} characters"
|
||||
)
|
||||
|
||||
try:
|
||||
response = await self.client.embeddings.create(
|
||||
model=EMBEDDING_MODEL,
|
||||
input=text,
|
||||
dimensions=EMBEDDING_DIMENSIONS,
|
||||
)
|
||||
return response.data[0].embedding
|
||||
except openai.APIError as e:
|
||||
logger.error(f"OpenAI API error generating embedding: {e}")
|
||||
raise
|
||||
|
||||
async def generate_embeddings(self, texts: list[str]) -> list[list[float]]:
|
||||
"""
|
||||
Generate embeddings for multiple texts (batch).
|
||||
|
||||
Args:
|
||||
texts: List of texts to generate embeddings for.
|
||||
|
||||
Returns:
|
||||
List of embedding vectors, one per input text.
|
||||
|
||||
Raises:
|
||||
ValueError: If any text is invalid or batch size exceeds limit.
|
||||
openai.APIError: If the OpenAI API call fails.
|
||||
"""
|
||||
# Input validation
|
||||
if not texts:
|
||||
raise ValueError("Texts list cannot be empty")
|
||||
if len(texts) > MAX_BATCH_SIZE:
|
||||
raise ValueError(f"Batch size exceeds maximum of {MAX_BATCH_SIZE} texts")
|
||||
for i, text in enumerate(texts):
|
||||
if not text or not text.strip():
|
||||
raise ValueError(f"Text at index {i} cannot be empty")
|
||||
if len(text) > MAX_TEXT_LENGTH:
|
||||
raise ValueError(
|
||||
f"Text at index {i} exceeds maximum length of {MAX_TEXT_LENGTH} characters"
|
||||
)
|
||||
|
||||
try:
|
||||
response = await self.client.embeddings.create(
|
||||
model=EMBEDDING_MODEL,
|
||||
input=texts,
|
||||
dimensions=EMBEDDING_DIMENSIONS,
|
||||
)
|
||||
# Sort by index to ensure correct ordering
|
||||
sorted_data = sorted(response.data, key=lambda x: x.index)
|
||||
return [item.embedding for item in sorted_data]
|
||||
except openai.APIError as e:
|
||||
logger.error(f"OpenAI API error generating embeddings: {e}")
|
||||
raise
|
||||
|
||||
|
||||
def create_search_text(name: str, sub_heading: str, description: str) -> str:
|
||||
"""
|
||||
Combine fields into searchable text for embedding.
|
||||
|
||||
This creates a single text string from the agent's name, sub-heading,
|
||||
and description, which is then converted to an embedding vector.
|
||||
|
||||
Args:
|
||||
name: The agent name.
|
||||
sub_heading: The agent sub-heading/tagline.
|
||||
description: The agent description.
|
||||
|
||||
Returns:
|
||||
A single string combining all non-empty fields.
|
||||
"""
|
||||
parts = [name or "", sub_heading or "", description or ""]
|
||||
return " ".join(filter(None, parts)).strip()
|
||||
|
||||
|
||||
# Singleton instance
|
||||
_embedding_service: Optional[EmbeddingService] = None
|
||||
|
||||
|
||||
async def get_embedding_service() -> EmbeddingService:
|
||||
"""
|
||||
Get or create the embedding service singleton.
|
||||
|
||||
Returns:
|
||||
The shared EmbeddingService instance.
|
||||
|
||||
Raises:
|
||||
ValueError: If OpenAI API key is not configured.
|
||||
"""
|
||||
global _embedding_service
|
||||
if _embedding_service is None:
|
||||
_embedding_service = EmbeddingService()
|
||||
return _embedding_service
|
||||
231
autogpt_platform/backend/backend/integrations/embeddings_test.py
Normal file
231
autogpt_platform/backend/backend/integrations/embeddings_test.py
Normal file
@@ -0,0 +1,231 @@
|
||||
"""Tests for the embedding service.
|
||||
|
||||
This module tests:
|
||||
- create_search_text utility function
|
||||
- EmbeddingService input validation
|
||||
- EmbeddingService API interaction (mocked)
|
||||
"""
|
||||
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from backend.integrations.embeddings import (
|
||||
EMBEDDING_DIMENSIONS,
|
||||
MAX_BATCH_SIZE,
|
||||
MAX_TEXT_LENGTH,
|
||||
EmbeddingService,
|
||||
create_search_text,
|
||||
)
|
||||
|
||||
|
||||
class TestCreateSearchText:
|
||||
"""Tests for the create_search_text utility function."""
|
||||
|
||||
def test_combines_all_fields(self):
|
||||
result = create_search_text("Agent Name", "A cool agent", "Does amazing things")
|
||||
assert result == "Agent Name A cool agent Does amazing things"
|
||||
|
||||
def test_handles_empty_name(self):
|
||||
result = create_search_text("", "Sub heading", "Description")
|
||||
assert result == "Sub heading Description"
|
||||
|
||||
def test_handles_empty_sub_heading(self):
|
||||
result = create_search_text("Name", "", "Description")
|
||||
assert result == "Name Description"
|
||||
|
||||
def test_handles_empty_description(self):
|
||||
result = create_search_text("Name", "Sub heading", "")
|
||||
assert result == "Name Sub heading"
|
||||
|
||||
def test_handles_all_empty(self):
|
||||
result = create_search_text("", "", "")
|
||||
assert result == ""
|
||||
|
||||
def test_handles_none_values(self):
|
||||
# The function expects strings but should handle None gracefully
|
||||
result = create_search_text(None, None, None) # type: ignore
|
||||
assert result == ""
|
||||
|
||||
def test_preserves_content_strips_outer_whitespace(self):
|
||||
# The function joins parts and strips the outer result
|
||||
# Internal whitespace in each part is preserved
|
||||
result = create_search_text(" Name ", " Sub ", " Desc ")
|
||||
# Each part is joined with space, then outer strip applied
|
||||
assert result == "Name Sub Desc"
|
||||
|
||||
def test_handles_only_whitespace(self):
|
||||
# Parts that are only whitespace become empty after filter
|
||||
result = create_search_text(" ", " ", " ")
|
||||
assert result == ""
|
||||
|
||||
|
||||
class TestEmbeddingServiceValidation:
|
||||
"""Tests for EmbeddingService input validation."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_settings(self):
|
||||
"""Mock settings with a test API key."""
|
||||
with patch("backend.integrations.embeddings.Settings") as mock:
|
||||
mock_instance = MagicMock()
|
||||
mock_instance.secrets.openai_internal_api_key = "test-api-key"
|
||||
mock_instance.secrets.openai_api_key = ""
|
||||
mock.return_value = mock_instance
|
||||
yield mock
|
||||
|
||||
@pytest.fixture
|
||||
def service(self, mock_settings):
|
||||
"""Create an EmbeddingService instance with mocked settings."""
|
||||
with patch("backend.integrations.embeddings.openai.AsyncOpenAI"):
|
||||
return EmbeddingService()
|
||||
|
||||
def test_init_requires_api_key(self):
|
||||
"""Test that initialization fails without an API key."""
|
||||
with patch("backend.integrations.embeddings.Settings") as mock:
|
||||
mock_instance = MagicMock()
|
||||
mock_instance.secrets.openai_internal_api_key = ""
|
||||
mock_instance.secrets.openai_api_key = ""
|
||||
mock.return_value = mock_instance
|
||||
|
||||
with pytest.raises(ValueError, match="OpenAI API key not configured"):
|
||||
EmbeddingService()
|
||||
|
||||
def test_init_accepts_explicit_api_key(self):
|
||||
"""Test that explicit API key overrides settings."""
|
||||
with patch("backend.integrations.embeddings.Settings") as mock:
|
||||
mock_instance = MagicMock()
|
||||
mock_instance.secrets.openai_internal_api_key = ""
|
||||
mock_instance.secrets.openai_api_key = ""
|
||||
mock.return_value = mock_instance
|
||||
|
||||
with patch("backend.integrations.embeddings.openai.AsyncOpenAI"):
|
||||
service = EmbeddingService(api_key="explicit-key")
|
||||
assert service.api_key == "explicit-key"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_generate_embedding_empty_text(self, service):
|
||||
"""Test that empty text raises ValueError."""
|
||||
with pytest.raises(ValueError, match="Text cannot be empty"):
|
||||
await service.generate_embedding("")
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_generate_embedding_whitespace_only(self, service):
|
||||
"""Test that whitespace-only text raises ValueError."""
|
||||
with pytest.raises(ValueError, match="Text cannot be empty"):
|
||||
await service.generate_embedding(" ")
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_generate_embedding_exceeds_max_length(self, service):
|
||||
"""Test that text exceeding max length raises ValueError."""
|
||||
long_text = "a" * (MAX_TEXT_LENGTH + 1)
|
||||
with pytest.raises(ValueError, match="exceeds maximum length"):
|
||||
await service.generate_embedding(long_text)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_generate_embeddings_empty_list(self, service):
|
||||
"""Test that empty list raises ValueError."""
|
||||
with pytest.raises(ValueError, match="Texts list cannot be empty"):
|
||||
await service.generate_embeddings([])
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_generate_embeddings_exceeds_batch_size(self, service):
|
||||
"""Test that batch exceeding max size raises ValueError."""
|
||||
texts = ["text"] * (MAX_BATCH_SIZE + 1)
|
||||
with pytest.raises(ValueError, match="Batch size exceeds maximum"):
|
||||
await service.generate_embeddings(texts)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_generate_embeddings_empty_text_in_batch(self, service):
|
||||
"""Test that empty text in batch raises ValueError with index."""
|
||||
with pytest.raises(ValueError, match="Text at index 1 cannot be empty"):
|
||||
await service.generate_embeddings(["valid", "", "also valid"])
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_generate_embeddings_long_text_in_batch(self, service):
|
||||
"""Test that long text in batch raises ValueError with index."""
|
||||
long_text = "a" * (MAX_TEXT_LENGTH + 1)
|
||||
with pytest.raises(ValueError, match="Text at index 2 exceeds maximum length"):
|
||||
await service.generate_embeddings(["short", "also short", long_text])
|
||||
|
||||
|
||||
class TestEmbeddingServiceAPI:
|
||||
"""Tests for EmbeddingService API interaction."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_openai_client(self):
|
||||
"""Create a mock OpenAI client."""
|
||||
mock_client = MagicMock()
|
||||
mock_client.embeddings = MagicMock()
|
||||
return mock_client
|
||||
|
||||
@pytest.fixture
|
||||
def service_with_mock_client(self, mock_openai_client):
|
||||
"""Create an EmbeddingService with a mocked OpenAI client."""
|
||||
with patch("backend.integrations.embeddings.Settings") as mock_settings:
|
||||
mock_instance = MagicMock()
|
||||
mock_instance.secrets.openai_internal_api_key = "test-key"
|
||||
mock_instance.secrets.openai_api_key = ""
|
||||
mock_settings.return_value = mock_instance
|
||||
|
||||
with patch(
|
||||
"backend.integrations.embeddings.openai.AsyncOpenAI"
|
||||
) as mock_openai:
|
||||
mock_openai.return_value = mock_openai_client
|
||||
service = EmbeddingService()
|
||||
return service, mock_openai_client
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_generate_embedding_success(self, service_with_mock_client):
|
||||
"""Test successful embedding generation."""
|
||||
service, mock_client = service_with_mock_client
|
||||
|
||||
# Create mock response
|
||||
mock_embedding = [0.1] * EMBEDDING_DIMENSIONS
|
||||
mock_response = MagicMock()
|
||||
mock_response.data = [MagicMock(embedding=mock_embedding)]
|
||||
mock_client.embeddings.create = AsyncMock(return_value=mock_response)
|
||||
|
||||
result = await service.generate_embedding("test text")
|
||||
|
||||
assert result == mock_embedding
|
||||
mock_client.embeddings.create.assert_called_once()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_generate_embeddings_success(self, service_with_mock_client):
|
||||
"""Test successful batch embedding generation."""
|
||||
service, mock_client = service_with_mock_client
|
||||
|
||||
# Create mock response with multiple embeddings
|
||||
mock_embeddings = [[0.1] * EMBEDDING_DIMENSIONS, [0.2] * EMBEDDING_DIMENSIONS]
|
||||
mock_response = MagicMock()
|
||||
mock_response.data = [
|
||||
MagicMock(embedding=mock_embeddings[0], index=0),
|
||||
MagicMock(embedding=mock_embeddings[1], index=1),
|
||||
]
|
||||
mock_client.embeddings.create = AsyncMock(return_value=mock_response)
|
||||
|
||||
result = await service.generate_embeddings(["text1", "text2"])
|
||||
|
||||
assert result == mock_embeddings
|
||||
mock_client.embeddings.create.assert_called_once()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_generate_embeddings_preserves_order(self, service_with_mock_client):
|
||||
"""Test that batch embeddings are returned in correct order even if API returns out of order."""
|
||||
service, mock_client = service_with_mock_client
|
||||
|
||||
# Create mock response with embeddings out of order
|
||||
mock_embeddings = [[0.1] * EMBEDDING_DIMENSIONS, [0.2] * EMBEDDING_DIMENSIONS]
|
||||
mock_response = MagicMock()
|
||||
# Return in reverse order
|
||||
mock_response.data = [
|
||||
MagicMock(embedding=mock_embeddings[1], index=1),
|
||||
MagicMock(embedding=mock_embeddings[0], index=0),
|
||||
]
|
||||
mock_client.embeddings.create = AsyncMock(return_value=mock_response)
|
||||
|
||||
result = await service.generate_embeddings(["text1", "text2"])
|
||||
|
||||
# Should be sorted by index
|
||||
assert result[0] == mock_embeddings[0]
|
||||
assert result[1] == mock_embeddings[1]
|
||||
@@ -273,6 +273,8 @@ async def list_providers(
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to load blocks: {e}")
|
||||
|
||||
from backend.sdk.registry import AutoRegistry
|
||||
|
||||
providers = []
|
||||
for name in get_all_provider_names():
|
||||
supports_oauth = name in HANDLERS_BY_NAME
|
||||
@@ -281,13 +283,27 @@ async def list_providers(
|
||||
getattr(handler_class, "DEFAULT_SCOPES", []) if handler_class else []
|
||||
)
|
||||
|
||||
# Check if provider has specific auth types from SDK registration
|
||||
sdk_provider = AutoRegistry.get_provider(name)
|
||||
if sdk_provider and sdk_provider.supported_auth_types:
|
||||
supports_api_key = "api_key" in sdk_provider.supported_auth_types
|
||||
supports_user_password = (
|
||||
"user_password" in sdk_provider.supported_auth_types
|
||||
)
|
||||
supports_host_scoped = "host_scoped" in sdk_provider.supported_auth_types
|
||||
else:
|
||||
# Fallback for legacy providers
|
||||
supports_api_key = True # All providers can accept API keys
|
||||
supports_user_password = name in ("smtp",)
|
||||
supports_host_scoped = name == "http"
|
||||
|
||||
providers.append(
|
||||
ProviderInfo(
|
||||
name=name,
|
||||
supports_oauth=supports_oauth,
|
||||
supports_api_key=True, # All providers can accept API keys
|
||||
supports_user_password=name in ("smtp",), # SMTP uses user/password
|
||||
supports_host_scoped=name == "http", # HTTP block uses host-scoped
|
||||
supports_api_key=supports_api_key,
|
||||
supports_user_password=supports_user_password,
|
||||
supports_host_scoped=supports_host_scoped,
|
||||
default_scopes=default_scopes,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -0,0 +1,168 @@
|
||||
"""
|
||||
Script to backfill embeddings for existing store listing versions.
|
||||
|
||||
This script should be run after the migration to add the embedding column
|
||||
to populate embeddings for all existing store listing versions.
|
||||
|
||||
Usage:
|
||||
poetry run python -m backend.server.v2.store.backfill_embeddings
|
||||
poetry run python -m backend.server.v2.store.backfill_embeddings --dry-run
|
||||
poetry run python -m backend.server.v2.store.backfill_embeddings --batch-size 25
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import asyncio
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from backend.data.db import connect, disconnect, query_raw_with_schema
|
||||
from backend.integrations.embeddings import EmbeddingService, create_search_text
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format="%(asctime)s - %(levelname)s - %(message)s",
|
||||
)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Default batch size for processing
|
||||
DEFAULT_BATCH_SIZE = 50
|
||||
|
||||
# Delay between batches to avoid rate limits (seconds)
|
||||
BATCH_DELAY_SECONDS = 1.0
|
||||
|
||||
|
||||
async def backfill_embeddings(
|
||||
dry_run: bool = False,
|
||||
batch_size: int = DEFAULT_BATCH_SIZE,
|
||||
) -> tuple[int, int]:
|
||||
"""
|
||||
Backfill embeddings for all store listing versions without embeddings.
|
||||
|
||||
Args:
|
||||
dry_run: If True, don't make any changes, just report what would be done.
|
||||
batch_size: Number of versions to process in each batch.
|
||||
|
||||
Returns:
|
||||
Tuple of (processed_count, error_count)
|
||||
"""
|
||||
await connect()
|
||||
|
||||
try:
|
||||
embedding_service = EmbeddingService()
|
||||
|
||||
# Get all versions without embeddings
|
||||
versions = await query_raw_with_schema(
|
||||
"""
|
||||
SELECT id, name, "subHeading", description
|
||||
FROM {schema_prefix}"StoreListingVersion"
|
||||
WHERE embedding IS NULL
|
||||
ORDER BY "createdAt" DESC
|
||||
"""
|
||||
)
|
||||
|
||||
total = len(versions)
|
||||
logger.info(f"Found {total} versions without embeddings")
|
||||
|
||||
if dry_run:
|
||||
logger.info("Dry run mode - no changes will be made")
|
||||
return (0, 0)
|
||||
|
||||
if total == 0:
|
||||
logger.info("No versions need embeddings")
|
||||
return (0, 0)
|
||||
|
||||
processed = 0
|
||||
errors = 0
|
||||
|
||||
for i in range(0, total, batch_size):
|
||||
batch = versions[i : i + batch_size]
|
||||
batch_num = (i // batch_size) + 1
|
||||
total_batches = (total + batch_size - 1) // batch_size
|
||||
|
||||
logger.info(f"Processing batch {batch_num}/{total_batches}")
|
||||
|
||||
for version in batch:
|
||||
version_id = version["id"]
|
||||
try:
|
||||
search_text = create_search_text(
|
||||
version["name"] or "",
|
||||
version["subHeading"] or "",
|
||||
version["description"] or "",
|
||||
)
|
||||
|
||||
if not search_text:
|
||||
logger.warning(f"Skipping {version_id} - no searchable text")
|
||||
continue
|
||||
|
||||
embedding = await embedding_service.generate_embedding(search_text)
|
||||
embedding_str = "[" + ",".join(map(str, embedding)) + "]"
|
||||
|
||||
await query_raw_with_schema(
|
||||
"""
|
||||
UPDATE {schema_prefix}"StoreListingVersion"
|
||||
SET embedding = $1::vector
|
||||
WHERE id = $2
|
||||
""",
|
||||
embedding_str,
|
||||
version_id,
|
||||
)
|
||||
|
||||
processed += 1
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing {version_id}: {e}")
|
||||
errors += 1
|
||||
|
||||
logger.info(f"Progress: {processed}/{total} processed, {errors} errors")
|
||||
|
||||
# Rate limit: wait between batches to avoid hitting API limits
|
||||
if i + batch_size < total:
|
||||
await asyncio.sleep(BATCH_DELAY_SECONDS)
|
||||
|
||||
logger.info(f"Backfill complete: {processed} processed, {errors} errors")
|
||||
return (processed, errors)
|
||||
|
||||
finally:
|
||||
await disconnect()
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Backfill embeddings for store listing versions"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--dry-run",
|
||||
action="store_true",
|
||||
help="Don't make any changes, just report what would be done",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--batch-size",
|
||||
type=int,
|
||||
default=DEFAULT_BATCH_SIZE,
|
||||
help=f"Number of versions to process in each batch (default: {DEFAULT_BATCH_SIZE})",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
try:
|
||||
processed, errors = asyncio.run(
|
||||
backfill_embeddings(dry_run=args.dry_run, batch_size=args.batch_size)
|
||||
)
|
||||
|
||||
if errors > 0:
|
||||
logger.warning(f"Completed with {errors} errors")
|
||||
sys.exit(1)
|
||||
else:
|
||||
logger.info("Completed successfully")
|
||||
sys.exit(0)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
logger.info("Interrupted by user")
|
||||
sys.exit(130)
|
||||
except Exception as e:
|
||||
logger.error(f"Fatal error: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -26,6 +26,7 @@ from backend.data.notifications import (
|
||||
AgentRejectionData,
|
||||
NotificationEventModel,
|
||||
)
|
||||
from backend.integrations.embeddings import create_search_text, get_embedding_service
|
||||
from backend.notifications.notifications import queue_notification_async
|
||||
from backend.util.exceptions import DatabaseError
|
||||
from backend.util.settings import Settings
|
||||
@@ -56,31 +57,40 @@ async def get_store_agents(
|
||||
)
|
||||
|
||||
try:
|
||||
# If search_query is provided, use full-text search
|
||||
# If search_query is provided, use vector similarity search
|
||||
if search_query:
|
||||
offset = (page - 1) * page_size
|
||||
|
||||
# Generate embedding for search query
|
||||
embedding_service = await get_embedding_service()
|
||||
query_embedding = await embedding_service.generate_embedding(search_query)
|
||||
# Convert embedding to PostgreSQL array format
|
||||
embedding_str = "[" + ",".join(map(str, query_embedding)) + "]"
|
||||
|
||||
# Whitelist allowed order_by columns
|
||||
# For vector search, we use similarity instead of rank
|
||||
ALLOWED_ORDER_BY = {
|
||||
"rating": "rating DESC, rank DESC",
|
||||
"runs": "runs DESC, rank DESC",
|
||||
"name": "agent_name ASC, rank ASC",
|
||||
"updated_at": "updated_at DESC, rank DESC",
|
||||
"rating": "rating DESC, similarity DESC",
|
||||
"runs": "runs DESC, similarity DESC",
|
||||
"name": "agent_name ASC, similarity DESC",
|
||||
"updated_at": "updated_at DESC, similarity DESC",
|
||||
}
|
||||
|
||||
# Validate and get order clause
|
||||
if sorted_by and sorted_by in ALLOWED_ORDER_BY:
|
||||
order_by_clause = ALLOWED_ORDER_BY[sorted_by]
|
||||
else:
|
||||
order_by_clause = "updated_at DESC, rank DESC"
|
||||
# Default: order by vector similarity (most similar first)
|
||||
order_by_clause = "similarity DESC, updated_at DESC"
|
||||
|
||||
# Build WHERE conditions and parameters list
|
||||
where_parts: list[str] = []
|
||||
params: list[typing.Any] = [search_query] # $1 - search term
|
||||
params: list[typing.Any] = [embedding_str] # $1 - query embedding
|
||||
param_index = 2 # Start at $2 for next parameter
|
||||
|
||||
# Always filter for available agents
|
||||
# Always filter for available agents and agents with embeddings
|
||||
where_parts.append("is_available = true")
|
||||
where_parts.append("embedding IS NOT NULL")
|
||||
|
||||
if featured:
|
||||
where_parts.append("featured = true")
|
||||
@@ -103,7 +113,9 @@ async def get_store_agents(
|
||||
limit_param = f"${param_index}"
|
||||
offset_param = f"${param_index + 1}"
|
||||
|
||||
# Execute full-text search query with parameterized values
|
||||
# Vector similarity search query using cosine distance
|
||||
# The <=> operator returns cosine distance (0 = identical, 2 = opposite)
|
||||
# We convert to similarity: 1 - distance/2 gives range [0, 1]
|
||||
sql_query = f"""
|
||||
SELECT
|
||||
slug,
|
||||
@@ -119,22 +131,18 @@ async def get_store_agents(
|
||||
featured,
|
||||
is_available,
|
||||
updated_at,
|
||||
ts_rank_cd(search, query) AS rank
|
||||
FROM {{schema_prefix}}"StoreAgent",
|
||||
plainto_tsquery('english', $1) AS query
|
||||
1 - (embedding <=> $1::vector) AS similarity
|
||||
FROM {{schema_prefix}}"StoreAgent"
|
||||
WHERE {sql_where_clause}
|
||||
AND search @@ query
|
||||
ORDER BY {order_by_clause}
|
||||
LIMIT {limit_param} OFFSET {offset_param}
|
||||
"""
|
||||
|
||||
# Count query for pagination - only uses search term parameter
|
||||
# Count query for pagination
|
||||
count_query = f"""
|
||||
SELECT COUNT(*) as count
|
||||
FROM {{schema_prefix}}"StoreAgent",
|
||||
plainto_tsquery('english', $1) AS query
|
||||
FROM {{schema_prefix}}"StoreAgent"
|
||||
WHERE {sql_where_clause}
|
||||
AND search @@ query
|
||||
"""
|
||||
|
||||
# Execute both queries with parameters
|
||||
@@ -255,6 +263,56 @@ async def log_search_term(search_query: str):
|
||||
logger.error(f"Error logging search term: {e}")
|
||||
|
||||
|
||||
async def _generate_and_store_embedding(
|
||||
store_listing_version_id: str,
|
||||
name: str,
|
||||
sub_heading: str,
|
||||
description: str,
|
||||
) -> None:
|
||||
"""
|
||||
Generate and store embedding for a store listing version.
|
||||
|
||||
This creates a vector embedding from the agent's name, sub_heading, and
|
||||
description, which is used for semantic search.
|
||||
|
||||
Args:
|
||||
store_listing_version_id: The ID of the store listing version.
|
||||
name: The agent name.
|
||||
sub_heading: The agent sub-heading/tagline.
|
||||
description: The agent description.
|
||||
"""
|
||||
try:
|
||||
embedding_service = await get_embedding_service()
|
||||
search_text = create_search_text(name, sub_heading, description)
|
||||
|
||||
if not search_text:
|
||||
logger.warning(
|
||||
f"No searchable text for version {store_listing_version_id}, "
|
||||
"skipping embedding generation"
|
||||
)
|
||||
return
|
||||
|
||||
embedding = await embedding_service.generate_embedding(search_text)
|
||||
embedding_str = "[" + ",".join(map(str, embedding)) + "]"
|
||||
|
||||
await query_raw_with_schema(
|
||||
"""
|
||||
UPDATE {schema_prefix}"StoreListingVersion"
|
||||
SET embedding = $1::vector
|
||||
WHERE id = $2
|
||||
""",
|
||||
embedding_str,
|
||||
store_listing_version_id,
|
||||
)
|
||||
logger.debug(f"Generated embedding for version {store_listing_version_id}")
|
||||
except Exception as e:
|
||||
# Log error but don't fail the whole operation
|
||||
# Embeddings can be generated later via backfill
|
||||
logger.error(
|
||||
f"Failed to generate embedding for {store_listing_version_id}: {e}"
|
||||
)
|
||||
|
||||
|
||||
async def get_store_agent_details(
|
||||
username: str, agent_name: str
|
||||
) -> backend.server.v2.store.model.StoreAgentDetails:
|
||||
@@ -801,6 +859,12 @@ async def create_store_submission(
|
||||
else None
|
||||
)
|
||||
|
||||
# Generate embedding for semantic search
|
||||
if store_listing_version_id:
|
||||
await _generate_and_store_embedding(
|
||||
store_listing_version_id, name, sub_heading, description
|
||||
)
|
||||
|
||||
logger.debug(f"Created store listing for agent {agent_id}")
|
||||
# Return submission details
|
||||
return backend.server.v2.store.model.StoreSubmission(
|
||||
@@ -963,6 +1027,12 @@ async def edit_store_submission(
|
||||
|
||||
if not updated_version:
|
||||
raise DatabaseError("Failed to update store listing version")
|
||||
|
||||
# Regenerate embedding with updated content
|
||||
await _generate_and_store_embedding(
|
||||
store_listing_version_id, name, sub_heading, description
|
||||
)
|
||||
|
||||
return backend.server.v2.store.model.StoreSubmission(
|
||||
agent_id=current_version.agentGraphId,
|
||||
agent_version=current_version.agentGraphVersion,
|
||||
@@ -1093,6 +1163,12 @@ async def create_store_version(
|
||||
logger.debug(
|
||||
f"Created new version for listing {store_listing_id} of agent {agent_id}"
|
||||
)
|
||||
|
||||
# Generate embedding for semantic search
|
||||
await _generate_and_store_embedding(
|
||||
new_version.id, name, sub_heading, description
|
||||
)
|
||||
|
||||
# Return submission details
|
||||
return backend.server.v2.store.model.StoreSubmission(
|
||||
agent_id=agent_id,
|
||||
|
||||
@@ -405,3 +405,237 @@ async def test_get_store_agents_search_category_array_injection():
|
||||
# Verify the query executed without error
|
||||
# Category should be parameterized, preventing SQL injection
|
||||
assert isinstance(result.agents, list)
|
||||
|
||||
|
||||
# Vector search tests
|
||||
|
||||
|
||||
@pytest.mark.asyncio(loop_scope="session")
|
||||
async def test_get_store_agents_vector_search_mocked(mocker):
|
||||
"""Test vector search uses embedding service and executes query safely."""
|
||||
from backend.integrations.embeddings import EMBEDDING_DIMENSIONS
|
||||
|
||||
# Mock embedding service
|
||||
mock_embedding = [0.1] * EMBEDDING_DIMENSIONS
|
||||
mock_embedding_service = mocker.MagicMock()
|
||||
mock_embedding_service.generate_embedding = mocker.AsyncMock(
|
||||
return_value=mock_embedding
|
||||
)
|
||||
mocker.patch(
|
||||
"backend.server.v2.store.db.get_embedding_service",
|
||||
mocker.AsyncMock(return_value=mock_embedding_service),
|
||||
)
|
||||
|
||||
# Mock query_raw_with_schema to return empty results
|
||||
mocker.patch(
|
||||
"backend.server.v2.store.db.query_raw_with_schema",
|
||||
mocker.AsyncMock(side_effect=[[], [{"count": 0}]]),
|
||||
)
|
||||
|
||||
# Call function with search query
|
||||
result = await db.get_store_agents(search_query="test query")
|
||||
|
||||
# Verify embedding service was called
|
||||
mock_embedding_service.generate_embedding.assert_called_once_with("test query")
|
||||
|
||||
# Verify results
|
||||
assert isinstance(result.agents, list)
|
||||
assert len(result.agents) == 0
|
||||
|
||||
|
||||
@pytest.mark.asyncio(loop_scope="session")
|
||||
async def test_get_store_agents_vector_search_with_results(mocker):
|
||||
"""Test vector search returns properly formatted results."""
|
||||
from backend.integrations.embeddings import EMBEDDING_DIMENSIONS
|
||||
|
||||
# Mock embedding service
|
||||
mock_embedding = [0.1] * EMBEDDING_DIMENSIONS
|
||||
mock_embedding_service = mocker.MagicMock()
|
||||
mock_embedding_service.generate_embedding = mocker.AsyncMock(
|
||||
return_value=mock_embedding
|
||||
)
|
||||
mocker.patch(
|
||||
"backend.server.v2.store.db.get_embedding_service",
|
||||
mocker.AsyncMock(return_value=mock_embedding_service),
|
||||
)
|
||||
|
||||
# Mock query results
|
||||
mock_agents = [
|
||||
{
|
||||
"slug": "test-agent",
|
||||
"agent_name": "Test Agent",
|
||||
"agent_image": ["image.jpg"],
|
||||
"creator_username": "creator",
|
||||
"creator_avatar": "avatar.jpg",
|
||||
"sub_heading": "Test heading",
|
||||
"description": "Test description",
|
||||
"runs": 10,
|
||||
"rating": 4.5,
|
||||
"categories": ["test"],
|
||||
"featured": False,
|
||||
"is_available": True,
|
||||
"updated_at": datetime.now(),
|
||||
"similarity": 0.95,
|
||||
}
|
||||
]
|
||||
mock_count = [{"count": 1}]
|
||||
|
||||
mocker.patch(
|
||||
"backend.server.v2.store.db.query_raw_with_schema",
|
||||
mocker.AsyncMock(side_effect=[mock_agents, mock_count]),
|
||||
)
|
||||
|
||||
# Call function with search query
|
||||
result = await db.get_store_agents(search_query="test query")
|
||||
|
||||
# Verify results
|
||||
assert len(result.agents) == 1
|
||||
assert result.agents[0].slug == "test-agent"
|
||||
assert result.agents[0].agent_name == "Test Agent"
|
||||
assert result.pagination.total_items == 1
|
||||
|
||||
|
||||
@pytest.mark.asyncio(loop_scope="session")
|
||||
async def test_get_store_agents_vector_search_with_filters(mocker):
|
||||
"""Test vector search works correctly with additional filters."""
|
||||
from backend.integrations.embeddings import EMBEDDING_DIMENSIONS
|
||||
|
||||
# Mock embedding service
|
||||
mock_embedding = [0.1] * EMBEDDING_DIMENSIONS
|
||||
mock_embedding_service = mocker.MagicMock()
|
||||
mock_embedding_service.generate_embedding = mocker.AsyncMock(
|
||||
return_value=mock_embedding
|
||||
)
|
||||
mocker.patch(
|
||||
"backend.server.v2.store.db.get_embedding_service",
|
||||
mocker.AsyncMock(return_value=mock_embedding_service),
|
||||
)
|
||||
|
||||
# Mock query_raw_with_schema
|
||||
mock_query = mocker.patch(
|
||||
"backend.server.v2.store.db.query_raw_with_schema",
|
||||
mocker.AsyncMock(side_effect=[[], [{"count": 0}]]),
|
||||
)
|
||||
|
||||
# Call function with search query and filters
|
||||
await db.get_store_agents(
|
||||
search_query="test query",
|
||||
featured=True,
|
||||
creators=["creator1", "creator2"],
|
||||
category="AI",
|
||||
sorted_by="rating",
|
||||
)
|
||||
|
||||
# Verify query was called with parameterized values
|
||||
# First call is the main query, second is count
|
||||
assert mock_query.call_count == 2
|
||||
|
||||
# Check that the SQL query includes proper parameterization
|
||||
first_call_args = mock_query.call_args_list[0]
|
||||
sql_query = first_call_args[0][0]
|
||||
|
||||
# Verify key elements of the query
|
||||
assert "embedding <=> $1::vector" in sql_query
|
||||
assert "featured = true" in sql_query
|
||||
assert "creator_username = ANY($" in sql_query
|
||||
assert "= ANY(categories)" in sql_query
|
||||
|
||||
|
||||
@pytest.mark.asyncio(loop_scope="session")
|
||||
async def test_generate_and_store_embedding_success(mocker):
|
||||
"""Test that embedding generation and storage works correctly."""
|
||||
from backend.integrations.embeddings import EMBEDDING_DIMENSIONS
|
||||
|
||||
# Mock embedding service
|
||||
mock_embedding = [0.1] * EMBEDDING_DIMENSIONS
|
||||
mock_embedding_service = mocker.MagicMock()
|
||||
mock_embedding_service.generate_embedding = mocker.AsyncMock(
|
||||
return_value=mock_embedding
|
||||
)
|
||||
mocker.patch(
|
||||
"backend.server.v2.store.db.get_embedding_service",
|
||||
mocker.AsyncMock(return_value=mock_embedding_service),
|
||||
)
|
||||
|
||||
# Mock query_raw_with_schema
|
||||
mock_query = mocker.patch(
|
||||
"backend.server.v2.store.db.query_raw_with_schema",
|
||||
mocker.AsyncMock(return_value=[]),
|
||||
)
|
||||
|
||||
# Call the internal function
|
||||
await db._generate_and_store_embedding(
|
||||
store_listing_version_id="version-123",
|
||||
name="Test Agent",
|
||||
sub_heading="A test agent",
|
||||
description="Does testing",
|
||||
)
|
||||
|
||||
# Verify embedding service was called with combined text
|
||||
mock_embedding_service.generate_embedding.assert_called_once_with(
|
||||
"Test Agent A test agent Does testing"
|
||||
)
|
||||
|
||||
# Verify database update was called
|
||||
mock_query.assert_called_once()
|
||||
call_args = mock_query.call_args
|
||||
assert "UPDATE" in call_args[0][0]
|
||||
assert "embedding = $1::vector" in call_args[0][0]
|
||||
assert call_args[0][2] == "version-123"
|
||||
|
||||
|
||||
@pytest.mark.asyncio(loop_scope="session")
|
||||
async def test_generate_and_store_embedding_empty_text(mocker):
|
||||
"""Test that embedding is not generated for empty text."""
|
||||
# Mock embedding service
|
||||
mock_embedding_service = mocker.MagicMock()
|
||||
mock_embedding_service.generate_embedding = mocker.AsyncMock()
|
||||
mocker.patch(
|
||||
"backend.server.v2.store.db.get_embedding_service",
|
||||
mocker.AsyncMock(return_value=mock_embedding_service),
|
||||
)
|
||||
|
||||
# Mock query_raw_with_schema
|
||||
mock_query = mocker.patch(
|
||||
"backend.server.v2.store.db.query_raw_with_schema",
|
||||
mocker.AsyncMock(return_value=[]),
|
||||
)
|
||||
|
||||
# Call with empty fields
|
||||
await db._generate_and_store_embedding(
|
||||
store_listing_version_id="version-123",
|
||||
name="",
|
||||
sub_heading="",
|
||||
description="",
|
||||
)
|
||||
|
||||
# Verify embedding service was NOT called
|
||||
mock_embedding_service.generate_embedding.assert_not_called()
|
||||
|
||||
# Verify database was NOT updated
|
||||
mock_query.assert_not_called()
|
||||
|
||||
|
||||
@pytest.mark.asyncio(loop_scope="session")
|
||||
async def test_generate_and_store_embedding_handles_error(mocker):
|
||||
"""Test that embedding generation errors don't crash the operation."""
|
||||
# Mock embedding service to raise an error
|
||||
mock_embedding_service = mocker.MagicMock()
|
||||
mock_embedding_service.generate_embedding = mocker.AsyncMock(
|
||||
side_effect=Exception("API error")
|
||||
)
|
||||
mocker.patch(
|
||||
"backend.server.v2.store.db.get_embedding_service",
|
||||
mocker.AsyncMock(return_value=mock_embedding_service),
|
||||
)
|
||||
|
||||
# Call should not raise - errors are logged but not propagated
|
||||
await db._generate_and_store_embedding(
|
||||
store_listing_version_id="version-123",
|
||||
name="Test Agent",
|
||||
sub_heading="A test agent",
|
||||
description="Does testing",
|
||||
)
|
||||
|
||||
# Verify embedding service was called (and failed)
|
||||
mock_embedding_service.generate_embedding.assert_called_once()
|
||||
|
||||
@@ -0,0 +1,92 @@
|
||||
-- Migration: Replace full-text search with pgvector-based vector search
|
||||
-- This migration:
|
||||
-- 1. Enables the pgvector extension
|
||||
-- 2. Drops the StoreAgent view (depends on search column)
|
||||
-- 3. Removes the full-text search infrastructure (trigger, function, tsvector column)
|
||||
-- 4. Adds a vector embedding column for semantic search
|
||||
-- 5. Creates an index for fast vector similarity search
|
||||
-- 6. Recreates the StoreAgent view with the embedding column
|
||||
|
||||
-- Enable pgvector extension
|
||||
CREATE EXTENSION IF NOT EXISTS vector;
|
||||
|
||||
-- First drop the view that depends on the search column
|
||||
DROP VIEW IF EXISTS "StoreAgent";
|
||||
|
||||
-- Remove full-text search infrastructure
|
||||
DROP TRIGGER IF EXISTS "update_tsvector" ON "StoreListingVersion";
|
||||
DROP FUNCTION IF EXISTS update_tsvector_column();
|
||||
|
||||
-- Drop the tsvector search column
|
||||
ALTER TABLE "StoreListingVersion" DROP COLUMN IF EXISTS "search";
|
||||
|
||||
-- Add embedding column for vector search (1536 dimensions for text-embedding-3-small)
|
||||
ALTER TABLE "StoreListingVersion"
|
||||
ADD COLUMN IF NOT EXISTS "embedding" vector(1536);
|
||||
|
||||
-- Create IVFFlat index for fast similarity search
|
||||
-- Using cosine distance (vector_cosine_ops) which is standard for text embeddings
|
||||
-- lists = 100 is appropriate for datasets under 1M rows
|
||||
CREATE INDEX IF NOT EXISTS idx_store_listing_version_embedding
|
||||
ON "StoreListingVersion"
|
||||
USING ivfflat (embedding vector_cosine_ops)
|
||||
WITH (lists = 100);
|
||||
|
||||
-- Recreate StoreAgent view WITHOUT search column, WITH embedding column
|
||||
CREATE OR REPLACE VIEW "StoreAgent" AS
|
||||
WITH latest_versions AS (
|
||||
SELECT
|
||||
"storeListingId",
|
||||
MAX(version) AS max_version
|
||||
FROM "StoreListingVersion"
|
||||
WHERE "submissionStatus" = 'APPROVED'
|
||||
GROUP BY "storeListingId"
|
||||
),
|
||||
agent_versions AS (
|
||||
SELECT
|
||||
"storeListingId",
|
||||
array_agg(DISTINCT version::text ORDER BY version::text) AS versions
|
||||
FROM "StoreListingVersion"
|
||||
WHERE "submissionStatus" = 'APPROVED'
|
||||
GROUP BY "storeListingId"
|
||||
)
|
||||
SELECT
|
||||
sl.id AS listing_id,
|
||||
slv.id AS "storeListingVersionId",
|
||||
slv."createdAt" AS updated_at,
|
||||
sl.slug,
|
||||
COALESCE(slv.name, '') AS agent_name,
|
||||
slv."videoUrl" AS agent_video,
|
||||
COALESCE(slv."imageUrls", ARRAY[]::text[]) AS agent_image,
|
||||
slv."isFeatured" AS featured,
|
||||
p.username AS creator_username,
|
||||
p."avatarUrl" AS creator_avatar,
|
||||
slv."subHeading" AS sub_heading,
|
||||
slv.description,
|
||||
slv.categories,
|
||||
slv.embedding,
|
||||
COALESCE(ar.run_count, 0::bigint) AS runs,
|
||||
COALESCE(rs.avg_rating, 0.0)::double precision AS rating,
|
||||
COALESCE(av.versions, ARRAY[slv.version::text]) AS versions,
|
||||
COALESCE(sl."useForOnboarding", false) AS "useForOnboarding",
|
||||
slv."isAvailable" AS is_available
|
||||
FROM "StoreListing" sl
|
||||
JOIN latest_versions lv
|
||||
ON sl.id = lv."storeListingId"
|
||||
JOIN "StoreListingVersion" slv
|
||||
ON slv."storeListingId" = lv."storeListingId"
|
||||
AND slv.version = lv.max_version
|
||||
AND slv."submissionStatus" = 'APPROVED'
|
||||
JOIN "AgentGraph" a
|
||||
ON slv."agentGraphId" = a.id
|
||||
AND slv."agentGraphVersion" = a.version
|
||||
LEFT JOIN "Profile" p
|
||||
ON sl."owningUserId" = p."userId"
|
||||
LEFT JOIN "mv_review_stats" rs
|
||||
ON sl.id = rs."storeListingId"
|
||||
LEFT JOIN "mv_agent_run_counts" ar
|
||||
ON a.id = ar."agentGraphId"
|
||||
LEFT JOIN agent_versions av
|
||||
ON sl.id = av."storeListingId"
|
||||
WHERE sl."isDeleted" = false
|
||||
AND sl."hasApprovedVersion" = true;
|
||||
@@ -712,7 +712,7 @@ view StoreAgent {
|
||||
sub_heading String
|
||||
description String
|
||||
categories String[]
|
||||
search Unsupported("tsvector")? @default(dbgenerated("''::tsvector"))
|
||||
embedding Unsupported("vector(1536)")?
|
||||
runs Int
|
||||
rating Float
|
||||
versions String[]
|
||||
@@ -847,7 +847,8 @@ model StoreListingVersion {
|
||||
// Old versions can be made unavailable by the author if desired
|
||||
isAvailable Boolean @default(true)
|
||||
|
||||
search Unsupported("tsvector")? @default(dbgenerated("''::tsvector"))
|
||||
// Vector embedding for semantic search (replaces tsvector full-text search)
|
||||
embedding Unsupported("vector(1536)")?
|
||||
|
||||
// Version workflow state
|
||||
submissionStatus SubmissionStatus @default(DRAFT)
|
||||
|
||||
431
autogpt_platform/backend/test/load_store_agents.py
Normal file
431
autogpt_platform/backend/test/load_store_agents.py
Normal file
@@ -0,0 +1,431 @@
|
||||
"""
|
||||
Load Store Agents Script
|
||||
|
||||
This script loads the exported store agents from the agents/ folder into the test database.
|
||||
It creates:
|
||||
- A user and profile for the 'autogpt' creator
|
||||
- AgentGraph records from JSON files
|
||||
- StoreListing and StoreListingVersion records from CSV metadata
|
||||
- Approves agents that have is_available=true in the CSV
|
||||
|
||||
Usage:
|
||||
cd backend
|
||||
poetry run python test/load_store_agents.py
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import csv
|
||||
import json
|
||||
import re
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
import prisma.enums
|
||||
from prisma import Json, Prisma
|
||||
from prisma.types import (
|
||||
AgentBlockCreateInput,
|
||||
AgentGraphCreateInput,
|
||||
AgentNodeCreateInput,
|
||||
AgentNodeLinkCreateInput,
|
||||
ProfileCreateInput,
|
||||
StoreListingCreateInput,
|
||||
StoreListingVersionCreateInput,
|
||||
UserCreateInput,
|
||||
)
|
||||
|
||||
# Path to agents folder (relative to backend directory)
|
||||
AGENTS_DIR = Path(__file__).parent.parent / "agents"
|
||||
CSV_FILE = AGENTS_DIR / "StoreAgent_rows.csv"
|
||||
|
||||
# Fixed user ID for the autogpt creator (test data, not production)
|
||||
AUTOGPT_USER_ID = "79d96c73-e6f5-4656-a83a-185b41ee0d06"
|
||||
AUTOGPT_EMAIL = "autogpt-test@agpt.co"
|
||||
AUTOGPT_USERNAME = "autogpt"
|
||||
|
||||
|
||||
async def initialize_blocks(db: Prisma) -> set[str]:
|
||||
"""Initialize agent blocks in the database from the registered blocks.
|
||||
|
||||
Returns a set of block IDs that exist in the database.
|
||||
"""
|
||||
from backend.data.block import get_blocks
|
||||
|
||||
print(" Initializing agent blocks...")
|
||||
blocks = get_blocks()
|
||||
created_count = 0
|
||||
block_ids = set()
|
||||
|
||||
for block_cls in blocks.values():
|
||||
block = block_cls()
|
||||
block_ids.add(block.id)
|
||||
existing_block = await db.agentblock.find_first(
|
||||
where={"OR": [{"id": block.id}, {"name": block.name}]}
|
||||
)
|
||||
if not existing_block:
|
||||
await db.agentblock.create(
|
||||
data=AgentBlockCreateInput(
|
||||
id=block.id,
|
||||
name=block.name,
|
||||
inputSchema=json.dumps(block.input_schema.jsonschema()),
|
||||
outputSchema=json.dumps(block.output_schema.jsonschema()),
|
||||
)
|
||||
)
|
||||
created_count += 1
|
||||
elif block.id != existing_block.id or block.name != existing_block.name:
|
||||
await db.agentblock.update(
|
||||
where={"id": existing_block.id},
|
||||
data={
|
||||
"id": block.id,
|
||||
"name": block.name,
|
||||
"inputSchema": json.dumps(block.input_schema.jsonschema()),
|
||||
"outputSchema": json.dumps(block.output_schema.jsonschema()),
|
||||
},
|
||||
)
|
||||
|
||||
print(f" Initialized {len(blocks)} blocks ({created_count} new)")
|
||||
return block_ids
|
||||
|
||||
|
||||
async def ensure_block_exists(
|
||||
db: Prisma, block_id: str, known_blocks: set[str]
|
||||
) -> bool:
|
||||
"""Ensure a block exists in the database, create a placeholder if needed.
|
||||
|
||||
Returns True if the block exists (or was created), False otherwise.
|
||||
"""
|
||||
if block_id in known_blocks:
|
||||
return True
|
||||
|
||||
# Check if it already exists in the database
|
||||
existing = await db.agentblock.find_unique(where={"id": block_id})
|
||||
if existing:
|
||||
known_blocks.add(block_id)
|
||||
return True
|
||||
|
||||
# Create a placeholder block
|
||||
print(f" Creating placeholder block: {block_id}")
|
||||
try:
|
||||
await db.agentblock.create(
|
||||
data=AgentBlockCreateInput(
|
||||
id=block_id,
|
||||
name=f"Placeholder_{block_id[:8]}",
|
||||
inputSchema="{}",
|
||||
outputSchema="{}",
|
||||
)
|
||||
)
|
||||
known_blocks.add(block_id)
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f" Warning: Could not create placeholder block {block_id}: {e}")
|
||||
return False
|
||||
|
||||
|
||||
def parse_image_urls(image_str: str) -> list[str]:
|
||||
"""Parse the image URLs from CSV format like ["url1","url2"]."""
|
||||
if not image_str or image_str == "[]":
|
||||
return []
|
||||
try:
|
||||
return json.loads(image_str)
|
||||
except json.JSONDecodeError:
|
||||
return []
|
||||
|
||||
|
||||
def parse_categories(categories_str: str) -> list[str]:
|
||||
"""Parse categories from CSV format like ["cat1","cat2"]."""
|
||||
if not categories_str or categories_str == "[]":
|
||||
return []
|
||||
try:
|
||||
return json.loads(categories_str)
|
||||
except json.JSONDecodeError:
|
||||
return []
|
||||
|
||||
|
||||
def sanitize_slug(slug: str) -> str:
|
||||
"""Ensure slug only contains valid characters."""
|
||||
return re.sub(r"[^a-z0-9-]", "", slug.lower())
|
||||
|
||||
|
||||
async def create_user_and_profile(db: Prisma) -> None:
|
||||
"""Create the autogpt user and profile if they don't exist."""
|
||||
# Check if user exists
|
||||
existing_user = await db.user.find_unique(where={"id": AUTOGPT_USER_ID})
|
||||
if existing_user:
|
||||
print(f"User {AUTOGPT_USER_ID} already exists, skipping user creation")
|
||||
else:
|
||||
print(f"Creating user {AUTOGPT_USER_ID}")
|
||||
await db.user.create(
|
||||
data=UserCreateInput(
|
||||
id=AUTOGPT_USER_ID,
|
||||
email=AUTOGPT_EMAIL,
|
||||
name="AutoGPT",
|
||||
metadata=Json({}),
|
||||
integrations="",
|
||||
)
|
||||
)
|
||||
|
||||
# Check if profile exists
|
||||
existing_profile = await db.profile.find_first(where={"userId": AUTOGPT_USER_ID})
|
||||
if existing_profile:
|
||||
print(
|
||||
f"Profile for user {AUTOGPT_USER_ID} already exists, skipping profile creation"
|
||||
)
|
||||
else:
|
||||
print(f"Creating profile for user {AUTOGPT_USER_ID}")
|
||||
await db.profile.create(
|
||||
data=ProfileCreateInput(
|
||||
userId=AUTOGPT_USER_ID,
|
||||
name="AutoGPT",
|
||||
username=AUTOGPT_USERNAME,
|
||||
description="Official AutoGPT agents and templates",
|
||||
links=["https://agpt.co"],
|
||||
avatarUrl="https://storage.googleapis.com/agpt-prod-website-artifacts/users/b3e41ea4-2f4c-4964-927c-fe682d857bad/images/4b5781a6-49e1-433c-9a75-65af1be5c02d.png",
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
async def load_csv_metadata() -> dict[str, dict]:
|
||||
"""Load CSV metadata and return a dict keyed by storeListingVersionId."""
|
||||
metadata = {}
|
||||
with open(CSV_FILE, "r", encoding="utf-8") as f:
|
||||
reader = csv.DictReader(f)
|
||||
for row in reader:
|
||||
version_id = row["storeListingVersionId"]
|
||||
metadata[version_id] = {
|
||||
"listing_id": row["listing_id"],
|
||||
"store_listing_version_id": version_id,
|
||||
"slug": sanitize_slug(row["slug"]),
|
||||
"agent_name": row["agent_name"],
|
||||
"agent_video": row["agent_video"] if row["agent_video"] else None,
|
||||
"agent_image": parse_image_urls(row["agent_image"]),
|
||||
"featured": row["featured"].lower() == "true",
|
||||
"sub_heading": row["sub_heading"],
|
||||
"description": row["description"],
|
||||
"categories": parse_categories(row["categories"]),
|
||||
"use_for_onboarding": row["useForOnboarding"].lower() == "true",
|
||||
"is_available": row["is_available"].lower() == "true",
|
||||
}
|
||||
return metadata
|
||||
|
||||
|
||||
async def load_agent_json(json_path: Path) -> dict:
|
||||
"""Load and parse an agent JSON file."""
|
||||
with open(json_path, "r", encoding="utf-8") as f:
|
||||
return json.load(f)
|
||||
|
||||
|
||||
async def create_agent_graph(
|
||||
db: Prisma, agent_data: dict, known_blocks: set[str]
|
||||
) -> tuple[str, int]:
|
||||
"""Create an AgentGraph and its nodes/links from JSON data."""
|
||||
graph_id = agent_data["id"]
|
||||
version = agent_data.get("version", 1)
|
||||
|
||||
# Check if graph already exists
|
||||
existing_graph = await db.agentgraph.find_unique(
|
||||
where={"graphVersionId": {"id": graph_id, "version": version}}
|
||||
)
|
||||
if existing_graph:
|
||||
print(f" Graph {graph_id} v{version} already exists, skipping")
|
||||
return graph_id, version
|
||||
|
||||
print(
|
||||
f" Creating graph {graph_id} v{version}: {agent_data.get('name', 'Unnamed')}"
|
||||
)
|
||||
|
||||
# Create the main graph
|
||||
await db.agentgraph.create(
|
||||
data=AgentGraphCreateInput(
|
||||
id=graph_id,
|
||||
version=version,
|
||||
name=agent_data.get("name"),
|
||||
description=agent_data.get("description"),
|
||||
instructions=agent_data.get("instructions"),
|
||||
recommendedScheduleCron=agent_data.get("recommended_schedule_cron"),
|
||||
isActive=agent_data.get("is_active", True),
|
||||
userId=AUTOGPT_USER_ID,
|
||||
forkedFromId=agent_data.get("forked_from_id"),
|
||||
forkedFromVersion=agent_data.get("forked_from_version"),
|
||||
)
|
||||
)
|
||||
|
||||
# Create nodes
|
||||
nodes = agent_data.get("nodes", [])
|
||||
for node in nodes:
|
||||
block_id = node["block_id"]
|
||||
# Ensure the block exists (create placeholder if needed)
|
||||
await ensure_block_exists(db, block_id, known_blocks)
|
||||
|
||||
await db.agentnode.create(
|
||||
data=AgentNodeCreateInput(
|
||||
id=node["id"],
|
||||
agentBlockId=block_id,
|
||||
agentGraphId=graph_id,
|
||||
agentGraphVersion=version,
|
||||
constantInput=Json(node.get("input_default", {})),
|
||||
metadata=Json(node.get("metadata", {})),
|
||||
)
|
||||
)
|
||||
|
||||
# Create links
|
||||
links = agent_data.get("links", [])
|
||||
for link in links:
|
||||
await db.agentnodelink.create(
|
||||
data=AgentNodeLinkCreateInput(
|
||||
id=link["id"],
|
||||
agentNodeSourceId=link["source_id"],
|
||||
agentNodeSinkId=link["sink_id"],
|
||||
sourceName=link["source_name"],
|
||||
sinkName=link["sink_name"],
|
||||
isStatic=link.get("is_static", False),
|
||||
)
|
||||
)
|
||||
|
||||
# Handle sub_graphs recursively
|
||||
sub_graphs = agent_data.get("sub_graphs", [])
|
||||
for sub_graph in sub_graphs:
|
||||
await create_agent_graph(db, sub_graph, known_blocks)
|
||||
|
||||
return graph_id, version
|
||||
|
||||
|
||||
async def create_store_listing(
|
||||
db: Prisma,
|
||||
graph_id: str,
|
||||
graph_version: int,
|
||||
metadata: dict,
|
||||
) -> None:
|
||||
"""Create StoreListing and StoreListingVersion for an agent."""
|
||||
listing_id = metadata["listing_id"]
|
||||
version_id = metadata["store_listing_version_id"]
|
||||
|
||||
# Check if listing already exists
|
||||
existing_listing = await db.storelisting.find_unique(where={"id": listing_id})
|
||||
if existing_listing:
|
||||
print(f" Store listing {listing_id} already exists, skipping")
|
||||
return
|
||||
|
||||
print(f" Creating store listing: {metadata['agent_name']}")
|
||||
|
||||
# Determine if this should be approved
|
||||
is_approved = metadata["is_available"]
|
||||
submission_status = (
|
||||
prisma.enums.SubmissionStatus.APPROVED
|
||||
if is_approved
|
||||
else prisma.enums.SubmissionStatus.PENDING
|
||||
)
|
||||
|
||||
# Create the store listing first (without activeVersionId - will update after)
|
||||
await db.storelisting.create(
|
||||
data=StoreListingCreateInput(
|
||||
id=listing_id,
|
||||
slug=metadata["slug"],
|
||||
agentGraphId=graph_id,
|
||||
agentGraphVersion=graph_version,
|
||||
owningUserId=AUTOGPT_USER_ID,
|
||||
hasApprovedVersion=is_approved,
|
||||
useForOnboarding=metadata["use_for_onboarding"],
|
||||
)
|
||||
)
|
||||
|
||||
# Create the store listing version
|
||||
await db.storelistingversion.create(
|
||||
data=StoreListingVersionCreateInput(
|
||||
id=version_id,
|
||||
version=1,
|
||||
agentGraphId=graph_id,
|
||||
agentGraphVersion=graph_version,
|
||||
name=metadata["agent_name"],
|
||||
subHeading=metadata["sub_heading"],
|
||||
videoUrl=metadata["agent_video"],
|
||||
imageUrls=metadata["agent_image"],
|
||||
description=metadata["description"],
|
||||
categories=metadata["categories"],
|
||||
isFeatured=metadata["featured"],
|
||||
isAvailable=metadata["is_available"],
|
||||
submissionStatus=submission_status,
|
||||
submittedAt=datetime.now() if is_approved else None,
|
||||
reviewedAt=datetime.now() if is_approved else None,
|
||||
storeListingId=listing_id,
|
||||
)
|
||||
)
|
||||
|
||||
# Update the store listing with the active version if approved
|
||||
if is_approved:
|
||||
await db.storelisting.update(
|
||||
where={"id": listing_id},
|
||||
data={"activeVersionId": version_id},
|
||||
)
|
||||
|
||||
|
||||
async def main():
|
||||
"""Main function to load all store agents."""
|
||||
print("=" * 60)
|
||||
print("Loading Store Agents into Test Database")
|
||||
print("=" * 60)
|
||||
|
||||
db = Prisma()
|
||||
await db.connect()
|
||||
|
||||
try:
|
||||
# Step 0: Initialize agent blocks
|
||||
print("\n[Step 0] Initializing agent blocks...")
|
||||
known_blocks = await initialize_blocks(db)
|
||||
|
||||
# Step 1: Create user and profile
|
||||
print("\n[Step 1] Creating user and profile...")
|
||||
await create_user_and_profile(db)
|
||||
|
||||
# Step 2: Load CSV metadata
|
||||
print("\n[Step 2] Loading CSV metadata...")
|
||||
csv_metadata = await load_csv_metadata()
|
||||
print(f" Found {len(csv_metadata)} store listing entries in CSV")
|
||||
|
||||
# Step 3: Find all JSON files and match with CSV
|
||||
print("\n[Step 3] Processing agent JSON files...")
|
||||
json_files = list(AGENTS_DIR.glob("agent_*.json"))
|
||||
print(f" Found {len(json_files)} agent JSON files")
|
||||
|
||||
# Build mapping from version_id to json file
|
||||
loaded_graphs = {} # graph_id -> (graph_id, version)
|
||||
|
||||
for json_file in json_files:
|
||||
# Extract the version ID from filename (agent_<version_id>.json)
|
||||
version_id = json_file.stem.replace("agent_", "")
|
||||
|
||||
if version_id not in csv_metadata:
|
||||
print(
|
||||
f" Warning: {json_file.name} not found in CSV metadata, skipping"
|
||||
)
|
||||
continue
|
||||
|
||||
metadata = csv_metadata[version_id]
|
||||
print(f"\nProcessing: {metadata['agent_name']}")
|
||||
|
||||
# Load and create the agent graph
|
||||
agent_data = await load_agent_json(json_file)
|
||||
graph_id, graph_version = await create_agent_graph(
|
||||
db, agent_data, known_blocks
|
||||
)
|
||||
loaded_graphs[graph_id] = (graph_id, graph_version)
|
||||
|
||||
# Create store listing
|
||||
await create_store_listing(db, graph_id, graph_version, metadata)
|
||||
|
||||
# Step 4: Refresh materialized views
|
||||
print("\n[Step 4] Refreshing materialized views...")
|
||||
try:
|
||||
await db.execute_raw("SELECT refresh_store_materialized_views();")
|
||||
print(" Materialized views refreshed successfully")
|
||||
except Exception as e:
|
||||
print(f" Warning: Could not refresh materialized views: {e}")
|
||||
|
||||
print("\n" + "=" * 60)
|
||||
print(f"Successfully loaded {len(loaded_graphs)} agents")
|
||||
print("=" * 60)
|
||||
|
||||
finally:
|
||||
await db.disconnect()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
Reference in New Issue
Block a user