Compare commits

...

5 Commits

Author SHA1 Message Date
dependabot[bot]
cd64562e1b chore(libs/deps): bump the production-dependencies group across 1 directory with 8 updates (#11934)
Bumps the production-dependencies group with 8 updates in the
/autogpt_platform/autogpt_libs directory:

| Package | From | To |
| --- | --- | --- |
| [fastapi](https://github.com/fastapi/fastapi) | `0.116.1` | `0.128.0`
|
| [google-cloud-logging](https://github.com/googleapis/python-logging) |
`3.12.1` | `3.13.0` |
|
[launchdarkly-server-sdk](https://github.com/launchdarkly/python-server-sdk)
| `9.12.0` | `9.14.1` |
| [pydantic](https://github.com/pydantic/pydantic) | `2.11.7` | `2.12.5`
|
| [pydantic-settings](https://github.com/pydantic/pydantic-settings) |
`2.10.1` | `2.12.0` |
| [pyjwt](https://github.com/jpadilla/pyjwt) | `2.10.1` | `2.11.0` |
| [supabase](https://github.com/supabase/supabase-py) | `2.16.0` |
`2.27.2` |
| [uvicorn](https://github.com/Kludex/uvicorn) | `0.35.0` | `0.40.0` |


Updates `fastapi` from 0.116.1 to 0.128.0
<details>
<summary>Release notes</summary>
<p><em>Sourced from <a
href="https://github.com/fastapi/fastapi/releases">fastapi's
releases</a>.</em></p>
<blockquote>
<h2>0.128.0</h2>
<h3>Breaking Changes</h3>
<ul>
<li> Drop support for <code>pydantic.v1</code>. PR <a
href="https://redirect.github.com/fastapi/fastapi/pull/14609">#14609</a>
by <a
href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li>
</ul>
<h3>Internal</h3>
<ul>
<li> Run performance tests only on Pydantic v2. PR <a
href="https://redirect.github.com/fastapi/fastapi/pull/14608">#14608</a>
by <a
href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li>
</ul>
<h2>0.127.1</h2>
<h3>Refactors</h3>
<ul>
<li>🔊 Add a custom <code>FastAPIDeprecationWarning</code>. PR <a
href="https://redirect.github.com/fastapi/fastapi/pull/14605">#14605</a>
by <a
href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li>
</ul>
<h3>Docs</h3>
<ul>
<li>📝 Add documentary to website. PR <a
href="https://redirect.github.com/fastapi/fastapi/pull/14600">#14600</a>
by <a
href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li>
</ul>
<h3>Translations</h3>
<ul>
<li>🌐 Update translations for de (update-outdated). PR <a
href="https://redirect.github.com/fastapi/fastapi/pull/14602">#14602</a>
by <a
href="https://github.com/nilslindemann"><code>@​nilslindemann</code></a>.</li>
<li>🌐 Update translations for de (update-outdated). PR <a
href="https://redirect.github.com/fastapi/fastapi/pull/14581">#14581</a>
by <a
href="https://github.com/nilslindemann"><code>@​nilslindemann</code></a>.</li>
</ul>
<h3>Internal</h3>
<ul>
<li>🔧 Update pre-commit to use local Ruff instead of hook. PR <a
href="https://redirect.github.com/fastapi/fastapi/pull/14604">#14604</a>
by <a
href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li>
<li> Add missing tests for code examples. PR <a
href="https://redirect.github.com/fastapi/fastapi/pull/14569">#14569</a>
by <a
href="https://github.com/YuriiMotov"><code>@​YuriiMotov</code></a>.</li>
<li>👷 Remove <code>lint</code> job from <code>test</code> CI workflow.
PR <a
href="https://redirect.github.com/fastapi/fastapi/pull/14593">#14593</a>
by <a
href="https://github.com/YuriiMotov"><code>@​YuriiMotov</code></a>.</li>
<li>👷 Update secrets check. PR <a
href="https://redirect.github.com/fastapi/fastapi/pull/14592">#14592</a>
by <a
href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li>
<li>👷 Run CodSpeed tests in parallel to other tests to speed up CI. PR
<a
href="https://redirect.github.com/fastapi/fastapi/pull/14586">#14586</a>
by <a
href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li>
<li>🔨 Update scripts and pre-commit to autofix files. PR <a
href="https://redirect.github.com/fastapi/fastapi/pull/14585">#14585</a>
by <a
href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li>
</ul>
<h2>0.127.0</h2>
<h3>Breaking Changes</h3>
<ul>
<li>🔊 Add deprecation warnings when using <code>pydantic.v1</code>. PR
<a
href="https://redirect.github.com/fastapi/fastapi/pull/14583">#14583</a>
by <a
href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li>
</ul>
<h3>Translations</h3>
<ul>
<li>🔧 Add LLM prompt file for Korean, generated from the existing
translations. PR <a
href="https://redirect.github.com/fastapi/fastapi/pull/14546">#14546</a>
by <a
href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li>
<li>🔧 Add LLM prompt file for Japanese, generated from the existing
translations. PR <a
href="https://redirect.github.com/fastapi/fastapi/pull/14545">#14545</a>
by <a
href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li>
</ul>
<h3>Internal</h3>
<ul>
<li>⬆️ Upgrade OpenAI model for translations to gpt-5.2. PR <a
href="https://redirect.github.com/fastapi/fastapi/pull/14579">#14579</a>
by <a
href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li>
</ul>
<h2>0.126.0</h2>
<h3>Upgrades</h3>
<ul>
<li> Drop support for Pydantic v1, keeping short temporary support for
Pydantic v2's <code>pydantic.v1</code>. PR <a
href="https://redirect.github.com/fastapi/fastapi/pull/14575">#14575</a>
by <a
href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li>
</ul>
<!-- raw HTML omitted -->
</blockquote>
<p>... (truncated)</p>
</details>
<details>
<summary>Commits</summary>
<ul>
<li><a
href="8322a4445a"><code>8322a44</code></a>
🔖 Release version 0.128.0</li>
<li><a
href="4b2cfcfd34"><code>4b2cfcf</code></a>
📝 Update release notes</li>
<li><a
href="e300630551"><code>e300630</code></a>
 Drop support for <code>pydantic.v1</code> (<a
href="https://redirect.github.com/fastapi/fastapi/issues/14609">#14609</a>)</li>
<li><a
href="1b3bea8b6b"><code>1b3bea8</code></a>
📝 Update release notes</li>
<li><a
href="34e884156f"><code>34e8841</code></a>
 Run performance tests only on Pydantic v2 (<a
href="https://redirect.github.com/fastapi/fastapi/issues/14608">#14608</a>)</li>
<li><a
href="cd90c78391"><code>cd90c78</code></a>
🔖 Release version 0.127.1</li>
<li><a
href="93f4dfd88b"><code>93f4dfd</code></a>
📝 Update release notes</li>
<li><a
href="535b5daa31"><code>535b5da</code></a>
🔊 Add a custom <code>FastAPIDeprecationWarning</code> (<a
href="https://redirect.github.com/fastapi/fastapi/issues/14605">#14605</a>)</li>
<li><a
href="6b53786f62"><code>6b53786</code></a>
📝 Update release notes</li>
<li><a
href="d98f4eb56e"><code>d98f4eb</code></a>
🔧 Update pre-commit to use local Ruff instead of hook (<a
href="https://redirect.github.com/fastapi/fastapi/issues/14604">#14604</a>)</li>
<li>Additional commits viewable in <a
href="https://github.com/fastapi/fastapi/compare/0.116.1...0.128.0">compare
view</a></li>
</ul>
</details>
<br />

Updates `google-cloud-logging` from 3.12.1 to 3.13.0
<details>
<summary>Release notes</summary>
<p><em>Sourced from <a
href="https://github.com/googleapis/python-logging/releases">google-cloud-logging's
releases</a>.</em></p>
<blockquote>
<h2>google-cloud-logging 3.13.0</h2>
<h2><a
href="https://github.com/googleapis/python-logging/compare/v3.12.1...v3.13.0">3.13.0</a>
(2025-12-15)</h2>
<h3>Features</h3>
<ul>
<li>Add support for python 3.14 (<a
href="https://redirect.github.com/googleapis/python-logging/issues/1065">#1065</a>)
(<a
href="https://github.com/googleapis/python-logging/commit/6be3df6a">6be3df6a</a>)</li>
</ul>
<h3>Bug Fixes</h3>
<ul>
<li>remove setup.cfg configuration for creating universal wheels (<a
href="https://redirect.github.com/googleapis/python-logging/issues/981">#981</a>)
(<a
href="https://github.com/googleapis/python-logging/commit/70f612c3">70f612c3</a>)</li>
</ul>
</blockquote>
</details>
<details>
<summary>Changelog</summary>
<p><em>Sourced from <a
href="https://github.com/googleapis/python-logging/blob/main/CHANGELOG.md">google-cloud-logging's
changelog</a>.</em></p>
<blockquote>
<h2><a
href="https://github.com/googleapis/python-logging/compare/v3.12.1...v3.13.0">3.13.0</a>
(2025-12-15)</h2>
<h3>Features</h3>
<ul>
<li>Add support for python 3.14 (<a
href="https://redirect.github.com/googleapis/python-logging/issues/1065">#1065</a>)
(<a
href="6be3df6aa9">6be3df6aa94539cd2ab22a4fac55b343862228b2</a>)</li>
</ul>
<h3>Bug Fixes</h3>
<ul>
<li>remove setup.cfg configuration for creating universal wheels (<a
href="https://redirect.github.com/googleapis/python-logging/issues/981">#981</a>)
(<a
href="70f612c328">70f612c3281f1df13f3aba6b19bc4e9397297f3d</a>)</li>
</ul>
</blockquote>
</details>
<details>
<summary>Commits</summary>
<ul>
<li><a
href="1415883be0"><code>1415883</code></a>
chore: librarian release pull request: 20251215T134006Z (<a
href="https://redirect.github.com/googleapis/python-logging/issues/1066">#1066</a>)</li>
<li><a
href="6be3df6aa9"><code>6be3df6</code></a>
feat: Add support for python 3.14 (<a
href="https://redirect.github.com/googleapis/python-logging/issues/1065">#1065</a>)</li>
<li><a
href="36fb4270b3"><code>36fb427</code></a>
chore(librarian): onboard to librarian (<a
href="https://redirect.github.com/googleapis/python-logging/issues/1061">#1061</a>)</li>
<li><a
href="eb189bf712"><code>eb189bf</code></a>
chore: update Python generator version to 1.25.1 (<a
href="https://redirect.github.com/googleapis/python-logging/issues/1003">#1003</a>)</li>
<li><a
href="a7a28d1b93"><code>a7a28d1</code></a>
test: ignore DeprecationWarning for <code>credentials_file</code>
argument and Python ve...</li>
<li><a
href="70f612c328"><code>70f612c</code></a>
fix: remove setup.cfg configuration for creating universal wheels (<a
href="https://redirect.github.com/googleapis/python-logging/issues/981">#981</a>)</li>
<li><a
href="e4c445a856"><code>e4c445a</code></a>
chore: Update gapic-generator-python to 1.25.0 (<a
href="https://redirect.github.com/googleapis/python-logging/issues/985">#985</a>)</li>
<li><a
href="14364a534a"><code>14364a5</code></a>
test: Added cleanup of old sink storage buckets (<a
href="https://redirect.github.com/googleapis/python-logging/issues/991">#991</a>)</li>
<li>See full diff in <a
href="https://github.com/googleapis/python-logging/compare/v3.12.1...v3.13.0">compare
view</a></li>
</ul>
</details>
<br />

Updates `launchdarkly-server-sdk` from 9.12.0 to 9.14.1
<details>
<summary>Release notes</summary>
<p><em>Sourced from <a
href="https://github.com/launchdarkly/python-server-sdk/releases">launchdarkly-server-sdk's
releases</a>.</em></p>
<blockquote>
<h2>v9.14.1</h2>
<h2><a
href="https://github.com/launchdarkly/python-server-sdk/compare/9.14.0...9.14.1">9.14.1</a>
(2025-12-15)</h2>
<h3>Bug Fixes</h3>
<ul>
<li>Remove all synchronizers in daemon mode (<a
href="https://redirect.github.com/launchdarkly/python-server-sdk/issues/388">#388</a>)
(<a
href="441a5ecb3d">441a5ec</a>)</li>
</ul>
<hr />
<p>This PR was generated with <a
href="https://github.com/googleapis/release-please">Release Please</a>.
See <a
href="https://github.com/googleapis/release-please#release-please">documentation</a>.</p>
<!-- raw HTML omitted -->
<h2>v9.14.0</h2>
<h2><a
href="https://github.com/launchdarkly/python-server-sdk/compare/9.13.1...9.14.0">9.14.0</a>
(2025-12-04)</h2>
<h3>Features</h3>
<ul>
<li>adding data system option to create file datasource intializer (<a
href="e5b121f92a">e5b121f</a>)</li>
<li>adding file data source as an intializer (<a
href="https://redirect.github.com/launchdarkly/python-server-sdk/issues/381">#381</a>)
(<a
href="3700d1ddd9">3700d1d</a>)</li>
</ul>
<h3>Bug Fixes</h3>
<ul>
<li>Add warning if relying on Redis <code>max_connections</code>
parameter (<a
href="https://redirect.github.com/launchdarkly/python-server-sdk/issues/387">#387</a>)
(<a
href="e6395fa531">e6395fa</a>),
closes <a
href="https://redirect.github.com/launchdarkly/python-server-sdk/issues/386">#386</a></li>
<li>modified initializer behavior to spec (<a
href="064f65c761">064f65c</a>)</li>
</ul>
<hr />
<p>This PR was generated with <a
href="https://github.com/googleapis/release-please">Release Please</a>.
See <a
href="https://github.com/googleapis/release-please#release-please">documentation</a>.</p>
<!-- raw HTML omitted -->
<h2>v9.13.1</h2>
<h2><a
href="https://github.com/launchdarkly/python-server-sdk/compare/9.13.0...9.13.1">9.13.1</a>
(2025-11-19)</h2>
<h3>Bug Fixes</h3>
<ul>
<li>Include ldclient.datasystem in docs (<a
href="https://redirect.github.com/launchdarkly/python-server-sdk/issues/379">#379</a>)
(<a
href="318c6fea07">318c6fe</a>)</li>
</ul>
<hr />
<p>This PR was generated with <a
href="https://github.com/googleapis/release-please">Release Please</a>.
See <a
href="https://github.com/googleapis/release-please#release-please">documentation</a>.</p>
<!-- raw HTML omitted -->
<h2>v9.13.0</h2>
<h2><a
href="https://github.com/launchdarkly/python-server-sdk/compare/9.12.3...9.13.0">9.13.0</a>
(2025-11-19)</h2>
<!-- raw HTML omitted -->
</blockquote>
<p>... (truncated)</p>
</details>
<details>
<summary>Changelog</summary>
<p><em>Sourced from <a
href="https://github.com/launchdarkly/python-server-sdk/blob/main/CHANGELOG.md">launchdarkly-server-sdk's
changelog</a>.</em></p>
<blockquote>
<h2><a
href="https://github.com/launchdarkly/python-server-sdk/compare/9.14.0...9.14.1">9.14.1</a>
(2025-12-15)</h2>
<h3>Bug Fixes</h3>
<ul>
<li>Remove all synchronizers in daemon mode (<a
href="https://redirect.github.com/launchdarkly/python-server-sdk/issues/388">#388</a>)
(<a
href="441a5ecb3d">441a5ec</a>)</li>
</ul>
<h2><a
href="https://github.com/launchdarkly/python-server-sdk/compare/9.13.1...9.14.0">9.14.0</a>
(2025-12-04)</h2>
<h3>Features</h3>
<ul>
<li>adding data system option to create file datasource intializer (<a
href="e5b121f92a">e5b121f</a>)</li>
<li>adding file data source as an intializer (<a
href="https://redirect.github.com/launchdarkly/python-server-sdk/issues/381">#381</a>)
(<a
href="3700d1ddd9">3700d1d</a>)</li>
</ul>
<h3>Bug Fixes</h3>
<ul>
<li>Add warning if relying on Redis <code>max_connections</code>
parameter (<a
href="https://redirect.github.com/launchdarkly/python-server-sdk/issues/387">#387</a>)
(<a
href="e6395fa531">e6395fa</a>),
closes <a
href="https://redirect.github.com/launchdarkly/python-server-sdk/issues/386">#386</a></li>
<li>modified initializer behavior to spec (<a
href="064f65c761">064f65c</a>)</li>
</ul>
<h2><a
href="https://github.com/launchdarkly/python-server-sdk/compare/9.13.0...9.13.1">9.13.1</a>
(2025-11-19)</h2>
<h3>Bug Fixes</h3>
<ul>
<li>Include ldclient.datasystem in docs (<a
href="https://redirect.github.com/launchdarkly/python-server-sdk/issues/379">#379</a>)
(<a
href="318c6fea07">318c6fe</a>)</li>
</ul>
<h2><a
href="https://github.com/launchdarkly/python-server-sdk/compare/9.12.3...9.13.0">9.13.0</a>
(2025-11-19)</h2>
<h3>Features</h3>
<ul>
<li><strong>experimental:</strong> Release EAP support for FDv2 data
system (<a
href="https://redirect.github.com/launchdarkly/python-server-sdk/issues/376">#376</a>)
(<a
href="0e7c32b4df">0e7c32b</a>)</li>
</ul>
<h2><a
href="https://github.com/launchdarkly/python-server-sdk/compare/9.12.2...9.12.3">9.12.3</a>
(2025-10-30)</h2>
<h3>Bug Fixes</h3>
<ul>
<li>Fix overly generic type hint on File data source (<a
href="https://redirect.github.com/launchdarkly/python-server-sdk/issues/365">#365</a>)
(<a
href="52a7499f7c">52a7499</a>),
closes <a
href="https://redirect.github.com/launchdarkly/python-server-sdk/issues/364">#364</a></li>
</ul>
<h2><a
href="https://github.com/launchdarkly/python-server-sdk/compare/9.12.1...9.12.2">9.12.2</a>
(2025-10-27)</h2>
<h3>Bug Fixes</h3>
<ul>
<li>Fix incorrect event count in failure message (<a
href="https://redirect.github.com/launchdarkly/python-server-sdk/issues/359">#359</a>)
(<a
href="91f416329b">91f4163</a>)</li>
</ul>
<h2><a
href="https://github.com/launchdarkly/python-server-sdk/compare/9.12.0...9.12.1">9.12.1</a>
(2025-09-30)</h2>
<!-- raw HTML omitted -->
</blockquote>
<p>... (truncated)</p>
</details>
<details>
<summary>Commits</summary>
<ul>
<li><a
href="54e62cc706"><code>54e62cc</code></a>
chore(main): release 9.14.1 (<a
href="https://redirect.github.com/launchdarkly/python-server-sdk/issues/389">#389</a>)</li>
<li><a
href="441a5ecb3d"><code>441a5ec</code></a>
fix: Remove all synchronizers in daemon mode (<a
href="https://redirect.github.com/launchdarkly/python-server-sdk/issues/388">#388</a>)</li>
<li><a
href="7bb537827f"><code>7bb5378</code></a>
chore(main): release 9.14.0 (<a
href="https://redirect.github.com/launchdarkly/python-server-sdk/issues/382">#382</a>)</li>
<li><a
href="e6395fa531"><code>e6395fa</code></a>
fix: Add warning if relying on Redis <code>max_connections</code>
parameter (<a
href="https://redirect.github.com/launchdarkly/python-server-sdk/issues/387">#387</a>)</li>
<li><a
href="45786a9a7e"><code>45786a9</code></a>
chore: Expose flag change listeners from data system (<a
href="https://redirect.github.com/launchdarkly/python-server-sdk/issues/384">#384</a>)</li>
<li><a
href="2b7eedc836"><code>2b7eedc</code></a>
chore: Clean up unused _data_availability (<a
href="https://redirect.github.com/launchdarkly/python-server-sdk/issues/383">#383</a>)</li>
<li><a
href="3700d1ddd9"><code>3700d1d</code></a>
feat: adding file data source as an intializer (<a
href="https://redirect.github.com/launchdarkly/python-server-sdk/issues/381">#381</a>)</li>
<li><a
href="04a2c538e5"><code>04a2c53</code></a>
chore: PR comments</li>
<li><a
href="064f65c761"><code>064f65c</code></a>
fix: modified initializer behavior to spec</li>
<li><a
href="e5b121f92a"><code>e5b121f</code></a>
feat: adding data system option to create file datasource
intializer</li>
<li>Additional commits viewable in <a
href="https://github.com/launchdarkly/python-server-sdk/compare/9.12.0...9.14.1">compare
view</a></li>
</ul>
</details>
<br />

Updates `pydantic` from 2.11.7 to 2.12.5
<details>
<summary>Release notes</summary>
<p><em>Sourced from <a
href="https://github.com/pydantic/pydantic/releases">pydantic's
releases</a>.</em></p>
<blockquote>
<h2>v2.12.5 2025-11-26</h2>
<h2>v2.12.5 (2025-11-26)</h2>
<p>This is the fifth 2.12 patch release, addressing an issue with the
<code>MISSING</code> sentinel and providing several documentation
improvements.</p>
<p>The next 2.13 minor release will be published in a couple weeks, and
will include a new <em>polymorphic serialization</em> feature addressing
the remaining unexpected changes to the <em>serialize as any</em>
behavior.</p>
<ul>
<li>Fix pickle error when using <code>model_construct()</code> on a
model with <code>MISSING</code> as a default value by <a
href="https://github.com/ornariece"><code>@​ornariece</code></a> in <a
href="https://redirect.github.com/pydantic/pydantic/pull/12522">#12522</a>.</li>
<li>Several updates to the documentation by <a
href="https://github.com/Viicos"><code>@​Viicos</code></a>.</li>
</ul>
<p><strong>Full Changelog</strong>: <a
href="https://github.com/pydantic/pydantic/compare/v2.12.4...v2.12.5">https://github.com/pydantic/pydantic/compare/v2.12.4...v2.12.5</a></p>
<h2>v2.12.4 2025-11-05</h2>
<h2>v2.12.4 (2025-11-05)</h2>
<p>This is the fourth 2.12 patch release, fixing more regressions, and
reverting a change in the <code>build()</code> method
of the <a
href="https://docs.pydantic.dev/latest/api/networks/"><code>AnyUrl</code>
and Dsn types</a>.</p>
<p>This patch release also fixes an issue with the serialization of IP
address types, when <code>serialize_as_any</code> is used. The next
patch release
will try to address the remaining issues with <em>serialize as any</em>
behavior by introducing a new <em>polymorphic serialization</em>
feature, that
should be used in most cases in place of <em>serialize as any</em>.</p>
<ul>
<li>
<p>Fix issue with forward references in parent <code>TypedDict</code>
classes by <a href="https://github.com/Viicos"><code>@​Viicos</code></a>
in <a
href="https://redirect.github.com/pydantic/pydantic/pull/12427">#12427</a>.</p>
<p>This issue is only relevant on Python 3.14 and greater.</p>
</li>
<li>
<p>Exclude fields with <code>exclude_if</code> from JSON Schema required
fields by <a href="https://github.com/Viicos"><code>@​Viicos</code></a>
in <a
href="https://redirect.github.com/pydantic/pydantic/pull/12430">#12430</a></p>
</li>
<li>
<p>Revert URL percent-encoding of credentials in the
<code>build()</code> method of the <a
href="https://docs.pydantic.dev/latest/api/networks/"><code>AnyUrl</code>
and Dsn types</a> by <a
href="https://github.com/davidhewitt"><code>@​davidhewitt</code></a> in
<a
href="https://redirect.github.com/pydantic/pydantic-core/pull/1833">pydantic-core#1833</a>.</p>
<p>This was initially considered as a bugfix, but caused regressions and
as such was fully reverted. The next release will include
an opt-in option to percent-encode components of the URL.</p>
</li>
<li>
<p>Add type inference for IP address types by <a
href="https://github.com/davidhewitt"><code>@​davidhewitt</code></a> in
<a
href="https://redirect.github.com/pydantic/pydantic-core/pull/1868">pydantic-core#1868</a>.</p>
<p>The 2.12 changes to the <code>serialize_as_any</code> behavior made
it so that IP address types could not properly serialize to JSON.</p>
</li>
<li>
<p>Avoid getting default values from defaultdict by <a
href="https://github.com/davidhewitt"><code>@​davidhewitt</code></a> in
<a
href="https://redirect.github.com/pydantic/pydantic-core/pull/1853">pydantic-core#1853</a>.</p>
<p>This fixes a subtle regression in the validation behavior of the <a
href="https://docs.python.org/3/library/collections.html#collections.defaultdict"><code>collections.defaultdict</code></a>
type.</p>
</li>
<li>
<p>Fix issue with field serializers on nested typed dictionaries by <a
href="https://github.com/davidhewitt"><code>@​davidhewitt</code></a> in
<a
href="https://redirect.github.com/pydantic/pydantic-core/pull/1879">pydantic-core#1879</a>.</p>
</li>
<li>
<p>Add more <code>pydantic-core</code> builds for the three-threaded
version of Python 3.14 by <a
href="https://github.com/davidhewitt"><code>@​davidhewitt</code></a> in
<a
href="https://redirect.github.com/pydantic/pydantic-core/pull/1864">pydantic-core#1864</a>.</p>
</li>
</ul>
<p><strong>Full Changelog</strong>: <a
href="https://github.com/pydantic/pydantic/compare/v2.12.3...v2.12.4">https://github.com/pydantic/pydantic/compare/v2.12.3...v2.12.4</a></p>
<h2>v2.12.3 2025-10-17</h2>
<h2>v2.12.3 (2025-10-17)</h2>
<h3>What's Changed</h3>
<p>This is the third 2.13 patch release, fixing issues related to the
<code>FieldInfo</code> class, and reverting a change to the supported <a
href="https://docs.pydantic.dev/latest/concepts/validators/#model-validators"><em>after</em>
model validator</a> function signatures.</p>
<!-- raw HTML omitted -->
</blockquote>
<p>... (truncated)</p>
</details>
<details>
<summary>Changelog</summary>
<p><em>Sourced from <a
href="https://github.com/pydantic/pydantic/blob/main/HISTORY.md">pydantic's
changelog</a>.</em></p>
<blockquote>
<h2>v2.12.5 (2025-11-26)</h2>
<p><a
href="https://github.com/pydantic/pydantic/releases/tag/v2.12.5">GitHub
release</a></p>
<p>This is the fifth 2.12 patch release, addressing an issue with the
<code>MISSING</code> sentinel and providing several documentation
improvements.</p>
<p>The next 2.13 minor release will be published in a couple weeks, and
will include a new <em>polymorphic serialization</em> feature addressing
the remaining unexpected changes to the <em>serialize as any</em>
behavior.</p>
<ul>
<li>Fix pickle error when using <code>model_construct()</code> on a
model with <code>MISSING</code> as a default value by <a
href="https://github.com/ornariece"><code>@​ornariece</code></a> in <a
href="https://redirect.github.com/pydantic/pydantic/pull/12522">#12522</a>.</li>
<li>Several updates to the documentation by <a
href="https://github.com/Viicos"><code>@​Viicos</code></a>.</li>
</ul>
<h2>v2.12.4 (2025-11-05)</h2>
<p><a
href="https://github.com/pydantic/pydantic/releases/tag/v2.12.4">GitHub
release</a></p>
<p>This is the fourth 2.12 patch release, fixing more regressions, and
reverting a change in the <code>build()</code> method
of the <a
href="https://docs.pydantic.dev/latest/api/networks/"><code>AnyUrl</code>
and Dsn types</a>.</p>
<p>This patch release also fixes an issue with the serialization of IP
address types, when <code>serialize_as_any</code> is used. The next
patch release
will try to address the remaining issues with <em>serialize as any</em>
behavior by introducing a new <em>polymorphic serialization</em>
feature, that
should be used in most cases in place of <em>serialize as any</em>.</p>
<ul>
<li>
<p>Fix issue with forward references in parent <code>TypedDict</code>
classes by <a href="https://github.com/Viicos"><code>@​Viicos</code></a>
in <a
href="https://redirect.github.com/pydantic/pydantic/pull/12427">#12427</a>.</p>
<p>This issue is only relevant on Python 3.14 and greater.</p>
</li>
<li>
<p>Exclude fields with <code>exclude_if</code> from JSON Schema required
fields by <a href="https://github.com/Viicos"><code>@​Viicos</code></a>
in <a
href="https://redirect.github.com/pydantic/pydantic/pull/12430">#12430</a></p>
</li>
<li>
<p>Revert URL percent-encoding of credentials in the
<code>build()</code> method
of the <a
href="https://docs.pydantic.dev/latest/api/networks/"><code>AnyUrl</code>
and Dsn types</a> by <a
href="https://github.com/davidhewitt"><code>@​davidhewitt</code></a> in
<a
href="https://redirect.github.com/pydantic/pydantic-core/pull/1833">pydantic-core#1833</a>.</p>
<p>This was initially considered as a bugfix, but caused regressions and
as such was fully reverted. The next release will include
an opt-in option to percent-encode components of the URL.</p>
</li>
<li>
<p>Add type inference for IP address types by <a
href="https://github.com/davidhewitt"><code>@​davidhewitt</code></a> in
<a
href="https://redirect.github.com/pydantic/pydantic-core/pull/1868">pydantic-core#1868</a>.</p>
<p>The 2.12 changes to the <code>serialize_as_any</code> behavior made
it so that IP address types could not properly serialize to JSON.</p>
</li>
<li>
<p>Avoid getting default values from defaultdict by <a
href="https://github.com/davidhewitt"><code>@​davidhewitt</code></a> in
<a
href="https://redirect.github.com/pydantic/pydantic-core/pull/1853">pydantic-core#1853</a>.</p>
<p>This fixes a subtle regression in the validation behavior of the <a
href="https://docs.python.org/3/library/collections.html#collections.defaultdict"><code>collections.defaultdict</code></a>
type.</p>
</li>
<li>
<p>Fix issue with field serializers on nested typed dictionaries by <a
href="https://github.com/davidhewitt"><code>@​davidhewitt</code></a> in
<a
href="https://redirect.github.com/pydantic/pydantic-core/pull/1879">pydantic-core#1879</a>.</p>
</li>
<li>
<p>Add more <code>pydantic-core</code> builds for the three-threaded
version of Python 3.14 by <a
href="https://github.com/davidhewitt"><code>@​davidhewitt</code></a> in
<a
href="https://redirect.github.com/pydantic/pydantic-core/pull/1864">pydantic-core#1864</a>.</p>
</li>
</ul>
<h2>v2.12.3 (2025-10-17)</h2>
<p><a
href="https://github.com/pydantic/pydantic/releases/tag/v2.12.3">GitHub
release</a></p>
<!-- raw HTML omitted -->
</blockquote>
<p>... (truncated)</p>
</details>
<details>
<summary>Commits</summary>
<ul>
<li><a
href="bd2d0dd013"><code>bd2d0dd</code></a>
Prepare release v2.12.5</li>
<li><a
href="7d0302ec7e"><code>7d0302e</code></a>
Document security implications when using
<code>create_model()</code></li>
<li><a
href="e9ef980def"><code>e9ef980</code></a>
Fix typo in Standard Library Types documentation</li>
<li><a
href="f2c20c00c2"><code>f2c20c0</code></a>
Add <code>pydantic-docs</code> dev dependency, make use of versioning
blocks</li>
<li><a
href="a76c1aa26f"><code>a76c1aa</code></a>
Update documentation about JSON Schema</li>
<li><a
href="8cbc72ca48"><code>8cbc72c</code></a>
Add documentation about custom <code>__init__()</code></li>
<li><a
href="99eba59906"><code>99eba59</code></a>
Add additional test for <code>FieldInfo.get_default()</code></li>
<li><a
href="c71076988e"><code>c710769</code></a>
Special case <code>MISSING</code> sentinel in
<code>smart_deepcopy()</code></li>
<li><a
href="20a9d771c2"><code>20a9d77</code></a>
Do not delete mock validator/serializer in
<code>rebuild_dataclass()</code></li>
<li><a
href="c86515a3a8"><code>c86515a</code></a>
Update parts of the model and <code>revalidate_instances</code>
documentation</li>
<li>Additional commits viewable in <a
href="https://github.com/pydantic/pydantic/compare/v2.11.7...v2.12.5">compare
view</a></li>
</ul>
</details>
<br />

Updates `pydantic-settings` from 2.10.1 to 2.12.0
<details>
<summary>Release notes</summary>
<p><em>Sourced from <a
href="https://github.com/pydantic/pydantic-settings/releases">pydantic-settings's
releases</a>.</em></p>
<blockquote>
<h2>v2.12.0</h2>
<h2>What's Changed</h2>
<ul>
<li>Support for enum kebab case. by <a
href="https://github.com/kschwab"><code>@​kschwab</code></a> in <a
href="https://redirect.github.com/pydantic/pydantic-settings/pull/686">pydantic/pydantic-settings#686</a></li>
<li>Apply source order: init &gt; env &gt; dotenv &gt; secrets &gt;
defaults and pres… by <a
href="https://github.com/chbndrhnns"><code>@​chbndrhnns</code></a> in <a
href="https://redirect.github.com/pydantic/pydantic-settings/pull/688">pydantic/pydantic-settings#688</a></li>
<li>Add NestedSecretsSettings source by <a
href="https://github.com/makukha"><code>@​makukha</code></a> in <a
href="https://redirect.github.com/pydantic/pydantic-settings/pull/690">pydantic/pydantic-settings#690</a></li>
<li>Strip non-explicit default values. by <a
href="https://github.com/kschwab"><code>@​kschwab</code></a> in <a
href="https://redirect.github.com/pydantic/pydantic-settings/pull/692">pydantic/pydantic-settings#692</a></li>
<li>Coerce env vars if strict is True. by <a
href="https://github.com/kschwab"><code>@​kschwab</code></a> in <a
href="https://redirect.github.com/pydantic/pydantic-settings/pull/693">pydantic/pydantic-settings#693</a></li>
<li>Restore init kwarg names before returning final state dictionary. by
<a href="https://github.com/kschwab"><code>@​kschwab</code></a> in <a
href="https://redirect.github.com/pydantic/pydantic-settings/pull/700">pydantic/pydantic-settings#700</a></li>
<li>Drop Python3.9 support by <a
href="https://github.com/hramezani"><code>@​hramezani</code></a> in <a
href="https://redirect.github.com/pydantic/pydantic-settings/pull/699">pydantic/pydantic-settings#699</a></li>
<li>Adapt test_protected_namespace_defaults for dev. Pydantic by <a
href="https://github.com/musicinmybrain"><code>@​musicinmybrain</code></a>
in <a
href="https://redirect.github.com/pydantic/pydantic-settings/pull/637">pydantic/pydantic-settings#637</a></li>
<li>Add Python 3.14 by <a
href="https://github.com/hramezani"><code>@​hramezani</code></a> in <a
href="https://redirect.github.com/pydantic/pydantic-settings/pull/704">pydantic/pydantic-settings#704</a></li>
<li>Prepare release 2.12 by <a
href="https://github.com/hramezani"><code>@​hramezani</code></a> in <a
href="https://redirect.github.com/pydantic/pydantic-settings/pull/705">pydantic/pydantic-settings#705</a></li>
</ul>
<h2>New Contributors</h2>
<ul>
<li><a
href="https://github.com/chbndrhnns"><code>@​chbndrhnns</code></a> made
their first contribution in <a
href="https://redirect.github.com/pydantic/pydantic-settings/pull/688">pydantic/pydantic-settings#688</a></li>
</ul>
<p><strong>Full Changelog</strong>: <a
href="https://github.com/pydantic/pydantic-settings/compare/v2.11.0...v2.12.0">https://github.com/pydantic/pydantic-settings/compare/v2.11.0...v2.12.0</a></p>
<h2>v2.11.0</h2>
<h2>What's Changed</h2>
<ul>
<li>CLI Serialize Support by <a
href="https://github.com/kschwab"><code>@​kschwab</code></a> in <a
href="https://redirect.github.com/pydantic/pydantic-settings/pull/643">pydantic/pydantic-settings#643</a></li>
<li>Inspect type aliases to determine if an annotation is complex by <a
href="https://github.com/tselepakis"><code>@​tselepakis</code></a> in <a
href="https://redirect.github.com/pydantic/pydantic-settings/pull/644">pydantic/pydantic-settings#644</a></li>
<li>Revert &quot;fix: Respect 'cli_parse_args' from model_config with
settings_customise_sources (<a
href="https://redirect.github.com/pydantic/pydantic-settings/issues/611">#611</a>)&quot;
by <a href="https://github.com/hramezani"><code>@​hramezani</code></a>
in <a
href="https://redirect.github.com/pydantic/pydantic-settings/pull/655">pydantic/pydantic-settings#655</a></li>
<li>Remove parsing of command line arguments from
<code>CliSettingsSource.__init__</code>. by <a
href="https://github.com/trygve-baerland"><code>@​trygve-baerland</code></a>
in <a
href="https://redirect.github.com/pydantic/pydantic-settings/pull/656">pydantic/pydantic-settings#656</a></li>
<li>turn off allow_abbrev on subparsers by <a
href="https://github.com/mroch"><code>@​mroch</code></a> in <a
href="https://redirect.github.com/pydantic/pydantic-settings/pull/658">pydantic/pydantic-settings#658</a></li>
<li>CLI Serialization Fixes by <a
href="https://github.com/kschwab"><code>@​kschwab</code></a> in <a
href="https://redirect.github.com/pydantic/pydantic-settings/pull/649">pydantic/pydantic-settings#649</a></li>
<li>Fix PydanticModel type checking. by <a
href="https://github.com/kschwab"><code>@​kschwab</code></a> in <a
href="https://redirect.github.com/pydantic/pydantic-settings/pull/659">pydantic/pydantic-settings#659</a></li>
<li>Avoid env_prefix falling back to env vars without prefix by <a
href="https://github.com/tselepakis"><code>@​tselepakis</code></a> in <a
href="https://redirect.github.com/pydantic/pydantic-settings/pull/648">pydantic/pydantic-settings#648</a></li>
<li>Warn if model_config sets unused keys for missing settings sources
by <a href="https://github.com/HomerusJa"><code>@​HomerusJa</code></a>
in <a
href="https://redirect.github.com/pydantic/pydantic-settings/pull/663">pydantic/pydantic-settings#663</a></li>
<li>Included endpoint_url kwarg in AWSSecretsManagerSettingsSource class
by <a href="https://github.com/adrianohrl"><code>@​adrianohrl</code></a>
in <a
href="https://redirect.github.com/pydantic/pydantic-settings/pull/664">pydantic/pydantic-settings#664</a></li>
<li>Fix typo (&quot;Accesing&quot;) in the &quot;Adding sources&quot;
docs by <a
href="https://github.com/deepyaman"><code>@​deepyaman</code></a> in <a
href="https://redirect.github.com/pydantic/pydantic-settings/pull/668">pydantic/pydantic-settings#668</a></li>
<li>CLI Windows Path Fix by <a
href="https://github.com/kschwab"><code>@​kschwab</code></a> in <a
href="https://redirect.github.com/pydantic/pydantic-settings/pull/669">pydantic/pydantic-settings#669</a></li>
<li>Cli root model support by <a
href="https://github.com/kschwab"><code>@​kschwab</code></a> in <a
href="https://redirect.github.com/pydantic/pydantic-settings/pull/677">pydantic/pydantic-settings#677</a></li>
<li>Snake case conversion in Azure Key Vault by <a
href="https://github.com/AndreuCodina"><code>@​AndreuCodina</code></a>
in <a
href="https://redirect.github.com/pydantic/pydantic-settings/pull/680">pydantic/pydantic-settings#680</a></li>
<li>Make <code>InitSettingsSource</code> resolution deterministic by <a
href="https://github.com/enrico-stauss"><code>@​enrico-stauss</code></a>
in <a
href="https://redirect.github.com/pydantic/pydantic-settings/pull/681">pydantic/pydantic-settings#681</a></li>
<li>Update deps by <a
href="https://github.com/hramezani"><code>@​hramezani</code></a> in <a
href="https://redirect.github.com/pydantic/pydantic-settings/pull/683">pydantic/pydantic-settings#683</a></li>
</ul>
<h2>New Contributors</h2>
<ul>
<li><a
href="https://github.com/tselepakis"><code>@​tselepakis</code></a> made
their first contribution in <a
href="https://redirect.github.com/pydantic/pydantic-settings/pull/644">pydantic/pydantic-settings#644</a></li>
<li><a
href="https://github.com/trygve-baerland"><code>@​trygve-baerland</code></a>
made their first contribution in <a
href="https://redirect.github.com/pydantic/pydantic-settings/pull/656">pydantic/pydantic-settings#656</a></li>
<li><a href="https://github.com/mroch"><code>@​mroch</code></a> made
their first contribution in <a
href="https://redirect.github.com/pydantic/pydantic-settings/pull/658">pydantic/pydantic-settings#658</a></li>
<li><a href="https://github.com/HomerusJa"><code>@​HomerusJa</code></a>
made their first contribution in <a
href="https://redirect.github.com/pydantic/pydantic-settings/pull/663">pydantic/pydantic-settings#663</a></li>
<li><a
href="https://github.com/adrianohrl"><code>@​adrianohrl</code></a> made
their first contribution in <a
href="https://redirect.github.com/pydantic/pydantic-settings/pull/664">pydantic/pydantic-settings#664</a></li>
<li><a href="https://github.com/deepyaman"><code>@​deepyaman</code></a>
made their first contribution in <a
href="https://redirect.github.com/pydantic/pydantic-settings/pull/668">pydantic/pydantic-settings#668</a></li>
<li><a
href="https://github.com/enrico-stauss"><code>@​enrico-stauss</code></a>
made their first contribution in <a
href="https://redirect.github.com/pydantic/pydantic-settings/pull/681">pydantic/pydantic-settings#681</a></li>
</ul>
<p><strong>Full Changelog</strong>: <a
href="https://github.com/pydantic/pydantic-settings/compare/2.10.1...v2.11.0">https://github.com/pydantic/pydantic-settings/compare/2.10.1...v2.11.0</a></p>
</blockquote>
</details>
<details>
<summary>Commits</summary>
<ul>
<li><a
href="584983d253"><code>584983d</code></a>
Prepare release 2.12 (<a
href="https://redirect.github.com/pydantic/pydantic-settings/issues/705">#705</a>)</li>
<li><a
href="6b4d87e776"><code>6b4d87e</code></a>
Add Python 3.14 (<a
href="https://redirect.github.com/pydantic/pydantic-settings/issues/704">#704</a>)</li>
<li><a
href="02de5b622b"><code>02de5b6</code></a>
Adapt test_protected_namespace_defaults for dev. Pydantic (<a
href="https://redirect.github.com/pydantic/pydantic-settings/issues/637">#637</a>)</li>
<li><a
href="4239ea460a"><code>4239ea4</code></a>
Drop Python3.9 support (<a
href="https://redirect.github.com/pydantic/pydantic-settings/issues/699">#699</a>)</li>
<li><a
href="5008c694f6"><code>5008c69</code></a>
Restore init kwarg names before returning final state dictionary. (<a
href="https://redirect.github.com/pydantic/pydantic-settings/issues/700">#700</a>)</li>
<li><a
href="4433101fef"><code>4433101</code></a>
Coerce env vars if strict is True. (<a
href="https://redirect.github.com/pydantic/pydantic-settings/issues/693">#693</a>)</li>
<li><a
href="4d2ebfd543"><code>4d2ebfd</code></a>
Strip non-explicit default values. (<a
href="https://redirect.github.com/pydantic/pydantic-settings/issues/692">#692</a>)</li>
<li><a
href="4a6ffcaeae"><code>4a6ffca</code></a>
Add NestedSecretsSettings source (<a
href="https://redirect.github.com/pydantic/pydantic-settings/issues/690">#690</a>)</li>
<li><a
href="7a6e96ebfc"><code>7a6e96e</code></a>
Apply source order: init &gt; env &gt; dotenv &gt; secrets &gt; defaults
and pres… (<a
href="https://redirect.github.com/pydantic/pydantic-settings/issues/688">#688</a>)</li>
<li><a
href="68563eddc0"><code>68563ed</code></a>
Support for enum kebab case. (<a
href="https://redirect.github.com/pydantic/pydantic-settings/issues/686">#686</a>)</li>
<li>Additional commits viewable in <a
href="https://github.com/pydantic/pydantic-settings/compare/2.10.1...v2.12.0">compare
view</a></li>
</ul>
</details>
<br />

Updates `pyjwt` from 2.10.1 to 2.11.0
<details>
<summary>Release notes</summary>
<p><em>Sourced from <a
href="https://github.com/jpadilla/pyjwt/releases">pyjwt's
releases</a>.</em></p>
<blockquote>
<h2>2.11.0</h2>
<h2>What's Changed</h2>
<ul>
<li>Fixed type error in comment by <a
href="https://github.com/shuhaib-aot"><code>@​shuhaib-aot</code></a> in
<a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1026">jpadilla/pyjwt#1026</a></li>
<li>[pre-commit.ci] pre-commit autoupdate by <a
href="https://github.com/pre-commit-ci"><code>@​pre-commit-ci</code></a>[bot]
in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1018">jpadilla/pyjwt#1018</a></li>
<li>[pre-commit.ci] pre-commit autoupdate by <a
href="https://github.com/pre-commit-ci"><code>@​pre-commit-ci</code></a>[bot]
in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1033">jpadilla/pyjwt#1033</a></li>
<li>Make note of use of leeway with nbf by <a
href="https://github.com/djw8605"><code>@​djw8605</code></a> in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1034">jpadilla/pyjwt#1034</a></li>
<li>[pre-commit.ci] pre-commit autoupdate by <a
href="https://github.com/pre-commit-ci"><code>@​pre-commit-ci</code></a>[bot]
in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1035">jpadilla/pyjwt#1035</a></li>
<li>Fixes <a
href="https://redirect.github.com/jpadilla/pyjwt/issues/964">#964</a>:
Validate key against allowed types for Algorithm family by <a
href="https://github.com/pachewise"><code>@​pachewise</code></a> in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/985">jpadilla/pyjwt#985</a></li>
<li>Feat <a
href="https://redirect.github.com/jpadilla/pyjwt/issues/1024">#1024</a>:
Add iterator for PyJWKSet by <a
href="https://github.com/pachewise"><code>@​pachewise</code></a> in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1041">jpadilla/pyjwt#1041</a></li>
<li>Fixes <a
href="https://redirect.github.com/jpadilla/pyjwt/issues/1039">#1039</a>:
Add iss, issuer type checks by <a
href="https://github.com/pachewise"><code>@​pachewise</code></a> in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1040">jpadilla/pyjwt#1040</a></li>
<li>Fixes <a
href="https://redirect.github.com/jpadilla/pyjwt/issues/660">#660</a>:
Improve typing/logic for <code>options</code> in decode,
decode_complete; Improve docs by <a
href="https://github.com/pachewise"><code>@​pachewise</code></a> in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1045">jpadilla/pyjwt#1045</a></li>
<li>[pre-commit.ci] pre-commit autoupdate by <a
href="https://github.com/pre-commit-ci"><code>@​pre-commit-ci</code></a>[bot]
in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1042">jpadilla/pyjwt#1042</a></li>
<li>[pre-commit.ci] pre-commit autoupdate by <a
href="https://github.com/pre-commit-ci"><code>@​pre-commit-ci</code></a>[bot]
in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1052">jpadilla/pyjwt#1052</a></li>
<li>[pre-commit.ci] pre-commit autoupdate by <a
href="https://github.com/pre-commit-ci"><code>@​pre-commit-ci</code></a>[bot]
in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1053">jpadilla/pyjwt#1053</a></li>
<li>Fix <a
href="https://redirect.github.com/jpadilla/pyjwt/issues/1022">#1022</a>:
Map <code>algorithm=None</code> to &quot;none&quot; by <a
href="https://github.com/qqii"><code>@​qqii</code></a> in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1056">jpadilla/pyjwt#1056</a></li>
<li>[pre-commit.ci] pre-commit autoupdate by <a
href="https://github.com/pre-commit-ci"><code>@​pre-commit-ci</code></a>[bot]
in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1055">jpadilla/pyjwt#1055</a></li>
<li>[pre-commit.ci] pre-commit autoupdate by <a
href="https://github.com/pre-commit-ci"><code>@​pre-commit-ci</code></a>[bot]
in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1058">jpadilla/pyjwt#1058</a></li>
<li>[pre-commit.ci] pre-commit autoupdate by <a
href="https://github.com/pre-commit-ci"><code>@​pre-commit-ci</code></a>[bot]
in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1060">jpadilla/pyjwt#1060</a></li>
<li>[pre-commit.ci] pre-commit autoupdate by <a
href="https://github.com/pre-commit-ci"><code>@​pre-commit-ci</code></a>[bot]
in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1061">jpadilla/pyjwt#1061</a></li>
<li>Fixes <a
href="https://redirect.github.com/jpadilla/pyjwt/issues/1047">#1047</a>:
Correct <code>PyJWKClient.get_signing_key_from_jwt</code> annotation by
<a href="https://github.com/khvn26"><code>@​khvn26</code></a> in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1048">jpadilla/pyjwt#1048</a></li>
<li>[pre-commit.ci] pre-commit autoupdate by <a
href="https://github.com/pre-commit-ci"><code>@​pre-commit-ci</code></a>[bot]
in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1062">jpadilla/pyjwt#1062</a></li>
<li>Fixed doc string typo in _validate_jti() function <a
href="https://redirect.github.com/jpadilla/pyjwt/issues/1063">#1063</a>
by <a
href="https://github.com/kuldeepkhatke"><code>@​kuldeepkhatke</code></a>
in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1064">jpadilla/pyjwt#1064</a></li>
<li>[pre-commit.ci] pre-commit autoupdate by <a
href="https://github.com/pre-commit-ci"><code>@​pre-commit-ci</code></a>[bot]
in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1065">jpadilla/pyjwt#1065</a></li>
<li>Update SECURITY.md by <a
href="https://github.com/auvipy"><code>@​auvipy</code></a> in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1057">jpadilla/pyjwt#1057</a></li>
<li>Typing fix: use <code>float</code> instead of <code>int</code> for
<code>lifespan</code> and <code>timeout</code> by <a
href="https://github.com/nikitagashkov"><code>@​nikitagashkov</code></a>
in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1068">jpadilla/pyjwt#1068</a></li>
<li>[pre-commit.ci] pre-commit autoupdate by <a
href="https://github.com/pre-commit-ci"><code>@​pre-commit-ci</code></a>[bot]
in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1067">jpadilla/pyjwt#1067</a></li>
<li>[pre-commit.ci] pre-commit autoupdate by <a
href="https://github.com/pre-commit-ci"><code>@​pre-commit-ci</code></a>[bot]
in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1071">jpadilla/pyjwt#1071</a></li>
<li>[pre-commit.ci] pre-commit autoupdate by <a
href="https://github.com/pre-commit-ci"><code>@​pre-commit-ci</code></a>[bot]
in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1076">jpadilla/pyjwt#1076</a></li>
<li>Fix TYP header documentation by <a
href="https://github.com/fobiasmog"><code>@​fobiasmog</code></a> in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1046">jpadilla/pyjwt#1046</a></li>
<li>doc: Document claims sub and jti by <a
href="https://github.com/cleder"><code>@​cleder</code></a> in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1088">jpadilla/pyjwt#1088</a></li>
<li>[pre-commit.ci] pre-commit autoupdate by <a
href="https://github.com/pre-commit-ci"><code>@​pre-commit-ci</code></a>[bot]
in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1077">jpadilla/pyjwt#1077</a></li>
<li>Bump actions/setup-python from 5 to 6 by <a
href="https://github.com/dependabot"><code>@​dependabot</code></a>[bot]
in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1089">jpadilla/pyjwt#1089</a></li>
<li>Bump actions/stale from 8 to 10 by <a
href="https://github.com/dependabot"><code>@​dependabot</code></a>[bot]
in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1090">jpadilla/pyjwt#1090</a></li>
<li>Bump actions/checkout from 4 to 5 by <a
href="https://github.com/dependabot"><code>@​dependabot</code></a>[bot]
in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1083">jpadilla/pyjwt#1083</a></li>
<li>[pre-commit.ci] pre-commit autoupdate by <a
href="https://github.com/pre-commit-ci"><code>@​pre-commit-ci</code></a>[bot]
in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1091">jpadilla/pyjwt#1091</a></li>
<li>[pre-commit.ci] pre-commit autoupdate by <a
href="https://github.com/pre-commit-ci"><code>@​pre-commit-ci</code></a>[bot]
in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1093">jpadilla/pyjwt#1093</a></li>
<li>[pre-commit.ci] pre-commit autoupdate by <a
href="https://github.com/pre-commit-ci"><code>@​pre-commit-ci</code></a>[bot]
in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1096">jpadilla/pyjwt#1096</a></li>
<li>Resolve package build warnings by <a
href="https://github.com/kurtmckee"><code>@​kurtmckee</code></a> in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1105">jpadilla/pyjwt#1105</a></li>
<li>Support Python 3.14, and test against PyPy 3.10+ by <a
href="https://github.com/kurtmckee"><code>@​kurtmckee</code></a> in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1104">jpadilla/pyjwt#1104</a></li>
<li>Fix a <code>SyntaxWarning</code> caused by invalid escape sequences
by <a href="https://github.com/kurtmckee"><code>@​kurtmckee</code></a>
in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1103">jpadilla/pyjwt#1103</a></li>
<li>Standardize CHANGELOG links to PRs by <a
href="https://github.com/kurtmckee"><code>@​kurtmckee</code></a> in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1110">jpadilla/pyjwt#1110</a></li>
<li>Migrate from <code>pep517</code>, which is deprecated, to
<code>build</code> by <a
href="https://github.com/kurtmckee"><code>@​kurtmckee</code></a> in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1108">jpadilla/pyjwt#1108</a></li>
<li>Fix incorrectly-named test suite function by <a
href="https://github.com/kurtmckee"><code>@​kurtmckee</code></a> in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1116">jpadilla/pyjwt#1116</a></li>
<li>Fix Read the Docs builds by <a
href="https://github.com/kurtmckee"><code>@​kurtmckee</code></a> in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1111">jpadilla/pyjwt#1111</a></li>
<li>Bump actions/download-artifact from 4 to 6 by <a
href="https://github.com/dependabot"><code>@​dependabot</code></a>[bot]
in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1118">jpadilla/pyjwt#1118</a></li>
<li>Escalate test suite warnings to errors by <a
href="https://github.com/kurtmckee"><code>@​kurtmckee</code></a> in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1107">jpadilla/pyjwt#1107</a></li>
<li>Add pyupgrade as a pre-commit hook by <a
href="https://github.com/kurtmckee"><code>@​kurtmckee</code></a> in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1109">jpadilla/pyjwt#1109</a></li>
<li>Simplify the test suite decorators by <a
href="https://github.com/kurtmckee"><code>@​kurtmckee</code></a> in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1113">jpadilla/pyjwt#1113</a></li>
<li>Improve coverage config and eliminate unused test suite code by <a
href="https://github.com/kurtmckee"><code>@​kurtmckee</code></a> in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1115">jpadilla/pyjwt#1115</a></li>
<li>Build a shared wheel once in the test suite by <a
href="https://github.com/kurtmckee"><code>@​kurtmckee</code></a> in <a
href="https://redirect.github.com/jpadilla/pyjwt/pull/1114">jpadilla/pyjwt#1114</a></li>
</ul>
<!-- raw HTML omitted -->
</blockquote>
<p>... (truncated)</p>
</details>
<details>
<summary>Changelog</summary>
<p><em>Sourced from <a
href="https://github.com/jpadilla/pyjwt/blob/master/CHANGELOG.rst">pyjwt's
changelog</a>.</em></p>
<blockquote>
<h2><code>v2.11.0
&lt;https://github.com/jpadilla/pyjwt/compare/2.10.1...2.11.0&gt;</code>__</h2>
<p>Fixed</p>
<pre><code>
- Enforce ECDSA curve validation per RFC 7518 Section 3.4.
- Fix build system warnings by @kurtmckee in
`[#1105](https://github.com/jpadilla/pyjwt/issues/1105)
&lt;https://github.com/jpadilla/pyjwt/pull/1105&gt;`__
- Validate key against allowed types for Algorithm family in
`[#964](https://github.com/jpadilla/pyjwt/issues/964)
&lt;https://github.com/jpadilla/pyjwt/pull/964&gt;`__
- Add iterator for JWKSet in
`[#1041](https://github.com/jpadilla/pyjwt/issues/1041)
&lt;https://github.com/jpadilla/pyjwt/pull/1041&gt;`__
- Validate `iss` claim is a string during encoding and decoding by
@pachewise in `[#1040](https://github.com/jpadilla/pyjwt/issues/1040)
&lt;https://github.com/jpadilla/pyjwt/pull/1040&gt;`__
- Improve typing/logic for `options` in decode, decode_complete by
@pachewise in `[#1045](https://github.com/jpadilla/pyjwt/issues/1045)
&lt;https://github.com/jpadilla/pyjwt/pull/1045&gt;`__
- Declare float supported type for lifespan and timeout by
@nikitagashkov in
`[#1068](https://github.com/jpadilla/pyjwt/issues/1068)
&lt;https://github.com/jpadilla/pyjwt/pull/1068&gt;`__
- Fix ``SyntaxWarning``\s/``DeprecationWarning``\s caused by invalid
escape sequences by @kurtmckee in
`[#1103](https://github.com/jpadilla/pyjwt/issues/1103)
&lt;https://github.com/jpadilla/pyjwt/pull/1103&gt;`__
- Development: Build a shared wheel once to speed up test suite setup
times by @kurtmckee in
`[#1114](https://github.com/jpadilla/pyjwt/issues/1114)
&lt;https://github.com/jpadilla/pyjwt/pull/1114&gt;`__
- Development: Test type annotations across all supported Python
versions,
increase the strictness of the type checking, and remove the mypy
pre-commit hook
by @kurtmckee in `[#1112](https://github.com/jpadilla/pyjwt/issues/1112)
&lt;https://github.com/jpadilla/pyjwt/pull/1112&gt;`__
<p>Added
</code></pre></p>
<ul>
<li>Support Python 3.14, and test against PyPy 3.10 and 3.11 by <a
href="https://github.com/kurtmckee"><code>@​kurtmckee</code></a> in
<code>[#1104](https://github.com/jpadilla/pyjwt/issues/1104)
&lt;https://github.com/jpadilla/pyjwt/pull/1104&gt;</code>__</li>
<li>Development: Migrate to <code>build</code> to test package building
in CI by <a
href="https://github.com/kurtmckee"><code>@​kurtmckee</code></a> in
<code>[#1108](https://github.com/jpadilla/pyjwt/issues/1108)
&lt;https://github.com/jpadilla/pyjwt/pull/1108&gt;</code>__</li>
<li>Development: Improve coverage config and eliminate unused test suite
code by <a
href="https://github.com/kurtmckee"><code>@​kurtmckee</code></a> in
<code>[#1115](https://github.com/jpadilla/pyjwt/issues/1115)
&lt;https://github.com/jpadilla/pyjwt/pull/1115&gt;</code>__</li>
<li>Docs: Standardize CHANGELOG links to PRs by <a
href="https://github.com/kurtmckee"><code>@​kurtmckee</code></a> in
<code>[#1110](https://github.com/jpadilla/pyjwt/issues/1110)
&lt;https://github.com/jpadilla/pyjwt/pull/1110&gt;</code>__</li>
<li>Docs: Fix Read the Docs builds by <a
href="https://github.com/kurtmckee"><code>@​kurtmckee</code></a> in
<code>[#1111](https://github.com/jpadilla/pyjwt/issues/1111)
&lt;https://github.com/jpadilla/pyjwt/pull/1111&gt;</code>__</li>
<li>Docs: Add example of using leeway with nbf by <a
href="https://github.com/djw8605"><code>@​djw8605</code></a> in
<code>[#1034](https://github.com/jpadilla/pyjwt/issues/1034)
&lt;https://github.com/jpadilla/pyjwt/pull/1034&gt;</code>__</li>
<li>Docs: Refactored docs with <code>autodoc</code>; added
<code>PyJWS</code> and <code>jwt.algorithms</code> docs by <a
href="https://github.com/pachewise"><code>@​pachewise</code></a> in
<code>[#1045](https://github.com/jpadilla/pyjwt/issues/1045)
&lt;https://github.com/jpadilla/pyjwt/pull/1045&gt;</code>__</li>
<li>Docs: Documentation improvements for &quot;sub&quot; and
&quot;jti&quot; claims by <a
href="https://github.com/cleder"><code>@​cleder</code></a> in
<code>[#1088](https://github.com/jpadilla/pyjwt/issues/1088)
&lt;https://github.com/jpadilla/pyjwt/pull/1088&gt;</code>__</li>
<li>Development: Add pyupgrade as a pre-commit hook by <a
href="https://github.com/kurtmckee"><code>@​kurtmckee</code></a> in
<code>[#1109](https://github.com/jpadilla/pyjwt/issues/1109)
&lt;https://github.com/jpadilla/pyjwt/pull/1109&gt;</code>__</li>
<li>Add minimum key length validation for HMAC and RSA keys (CWE-326).
Warns by default via <code>InsecureKeyLengthWarning</code> when keys are
below
minimum recommended lengths per RFC 7518 Section 3.2 (HMAC) and
NIST SP 800-131A (RSA). Pass
<code>enforce_minimum_key_length=True</code> in
options to <code>PyJWT</code> or <code>PyJWS</code> to raise
<code>InvalidKeyError</code> instead.</li>
<li>Refactor <code>PyJWT</code> to own an internal <code>PyJWS</code>
instance instead of
calling global <code>api_jws</code> functions.</li>
</ul>
</blockquote>
</details>
<details>
<summary>Commits</summary>
<ul>
<li><a
href="697344d259"><code>697344d</code></a>
bump up version</li>
<li><a
href="e4d0aec024"><code>e4d0aec</code></a>
fix: pre-commit</li>
<li><a
href="df9a6a0c44"><code>df9a6a0</code></a>
fix: failing test</li>
<li><a
href="2b2e53cd23"><code>2b2e53c</code></a>
fix: docs</li>
<li><a
href="635c8d89dd"><code>635c8d8</code></a>
fix: failing mypy</li>
<li><a
href="96ae3563b9"><code>96ae356</code></a>
feat: add minimum key length validation for HMAC and RSA</li>
<li><a
href="5b86227733"><code>5b86227</code></a>
fix: enforce ECDSA curve validation per RFC 7518 Section 3.4</li>
<li><a
href="04947d75dc"><code>04947d7</code></a>
Bump actions/download-artifact from 6 to 7 (<a
href="https://redirect.github.com/jpadilla/pyjwt/issues/1125">#1125</a>)</li>
<li><a
href="dd448344c3"><code>dd44834</code></a>
Fix leeway value in usage documentation (<a
href="https://redirect.github.com/jpadilla/pyjwt/issues/1124">#1124</a>)</li>
<li><a
href="407f0bde99"><code>407f0bd</code></a>
Thoroughly test type annotations, and resolve errors (<a
href="https://redirect.github.com/jpadilla/pyjwt/issues/1112">#1112</a>)</li>
<li>Additional commits viewable in <a
href="https://github.com/jpadilla/pyjwt/compare/2.10.1...2.11.0">compare
view</a></li>
</ul>
</details>
<br />

Updates `supabase` from 2.16.0 to 2.27.2
<details>
<summary>Release notes</summary>
<p><em>Sourced from <a
href="https://github.com/supabase/supabase-py/releases">supabase's
releases</a>.</em></p>
<blockquote>
<h2>v2.27.2</h2>
<h2><a
href="https://github.com/supabase/supabase-py/compare/v2.27.1...v2.27.2">2.27.2</a>
(2026-01-14)</h2>
<h3>Bug Fixes</h3>
<ul>
<li><strong>ci:</strong> generate new token for release-please (<a
href="https://redirect.github.com/supabase/supabase-py/issues/1348">#1348</a>)
(<a
href="c2ad37f9dc">c2ad37f</a>)</li>
<li><strong>ci:</strong> run CI when .github files change (<a
href="https://redirect.github.com/supabase/supabase-py/issues/1349">#1349</a>)
(<a
href="a221aac029">a221aac</a>)</li>
<li><strong>realtime:</strong> ammend reconnect logic to not unsubscribe
(<a
href="https://redirect.github.com/supabase/supabase-py/issues/1346">#1346</a>)
(<a
href="cfbe5943cb">cfbe594</a>)</li>
</ul>
<h2>v2.27.1</h2>
<h2><a
href="https://github.com/supabase/supabase-py/compare/v2.27.0...v2.27.1">2.27.1</a>
(2026-01-06)</h2>
<h3>Bug Fixes</h3>
<ul>
<li><strong>realtime:</strong> use 'event' instead of 'events' in
postgres_changes protocol (<a
href="https://redirect.github.com/supabase/supabase-py/issues/1339">#1339</a>)
(<a
href="c1e7986c5e">c1e7986</a>)</li>
<li><strong>storage:</strong> catch bad responses from server (<a
href="https://redirect.github.com/supabase/supabase-py/issues/1344">#1344</a>)
(<a
href="ddb50547db">ddb5054</a>)</li>
</ul>
<h2>v2.27.0</h2>
<h2><a
href="https://github.com/supabase/supabase-py/compare/v2.26.0...v2.27.0">2.27.0</a>
(2025-12-16)</h2>
<h3>Features</h3>
<ul>
<li><strong>auth:</strong> add X (OAuth 2.0) provider (<a
href="https://redirect.github.com/supabase/supabase-py/issues/1335">#1335</a>)
(<a
href="f600f96b52">f600f96</a>)</li>
</ul>
<h3>Bug Fixes</h3>
<ul>
<li><strong>storage:</strong> replace deprecated pydantic Extra with
literal values (<a
href="https://redirect.github.com/supabase/supabase-py/issues/1334">#1334</a>)
(<a
href="6df3545785">6df3545</a>)</li>
</ul>
<h2>v2.26....

_Description has been truncated_

---------

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: claude[bot] <41898282+claude[bot]@users.noreply.github.com>
Co-authored-by: Nicholas Tindle <ntindle@users.noreply.github.com>
Co-authored-by: Nicholas Tindle <nicholas.tindle@agpt.co>
Co-authored-by: Nick Tindle <nick@ntindle.com>
2026-02-07 02:17:38 +00:00
Reinier van der Leer
8fddc9d71f fix(backend): Reduce GET /api/graphs expense + latency (#11986)
[SECRT-1896: Fix crazy `GET /api/graphs` latency (P95 =
107s)](https://linear.app/autogpt/issue/SECRT-1896)

These changes should decrease latency of this endpoint by ~~60-65%~~ a
lot.

### Changes 🏗️

- Make `Graph.credentials_input_schema` cheaper by avoiding constructing
a new `BlockSchema` subclass
- Strip down `GraphMeta` - drop all computed fields
- Replace with either `GraphModel` or `GraphModelWithoutNodes` wherever
those computed fields are used
- Simplify usage in `list_graphs_paginated` and
`fetch_graph_from_store_slug`
- Refactor and clarify relationships between the different graph models
  - Split `BaseGraph` into `GraphBaseMeta` + `BaseGraph`
- Strip down `Graph` - move `credentials_input_schema` and
`aggregate_credentials_inputs` to `GraphModel`
- Refactor to eliminate double `aggregate_credentials_inputs()` call in
`credentials_input_schema` call tree
  - Add `GraphModelWithoutNodes` (similar to current `GraphMeta`)

### Checklist 📋

#### For code changes:
- [x] I have clearly listed my changes in the PR description
- [x] I have made a test plan
- [x] I have tested my changes according to the test plan:
  - [x] `GET /api/graphs` works as it should
  - [x] Running a graph succeeds
  - [x] Adding a sub-agent in the Builder works as it should
2026-02-06 19:13:21 +00:00
Ubbe
3d1cd03fc8 ci(frontend): disable chromatic for this month (#11994)
### Changes 🏗️

- we react the max snapshots quota and don't wanna upgrade
- make it run (when re-enabled) on `src/components` changes only to
reduce snapshots

### Checklist 📋

#### For code changes:
- [x] I have clearly listed my changes in the PR description
- [x] I have made a test plan
- [x] I have tested my changes according to the test plan:
  - [x] CI hope for the best
2026-02-06 19:17:25 +07:00
Swifty
e7ebe42306 fix(frontend): Revert ThinkingMessage progress bar delay to original values (#11993) 2026-02-06 12:23:32 +01:00
Otto
e0fab7e34e fix(frontend): Improve clarification answer message formatting (#11985)
## Summary

Improves the auto-generated message format when users submit
clarification answers in the agent generator.

## Before

```
I have the answers to your questions:

keyword_1: User answer 1
keyword_2: User answer 2

Please proceed with creating the agent.
```
<img width="748" height="153" alt="image"
src="https://github.com/user-attachments/assets/7231aaab-8ea4-406b-ba31-fa2b6055b82d"
/>

## After

```
**Here are my answers:**

> What is the primary purpose?

User answer 1

> What is the target audience?

User answer 2

Please proceed with creating the agent.
```
<img width="619" height="352" alt="image"
src="https://github.com/user-attachments/assets/ef8c1fbf-fb60-4488-b51f-407c1b9e3e44"
/>


## Changes

- Use human-readable question text instead of machine-readable keywords
- Use blockquote format for questions (natural "quote and reply"
pattern)
- Use double newlines for proper Markdown paragraph breaks
- Iterate over `message.questions` array to preserve original question
order
- Move handler inside conditional block for proper TypeScript type
narrowing

## Why

- The old format was ugly and hard to read (raw keywords, no line
breaks)
- The new format uses a natural "quoting and replying" pattern
- Better readability for both users and the LLM (verified: backend does
NOT parse keywords)

## Linear Ticket

Fixes [SECRT-1822](https://linear.app/autogpt/issue/SECRT-1822)

## Testing

- [ ] Trigger agent creation that requires clarifying questions
- [ ] Fill out the form and submit
- [ ] Verify message appears with new blockquote format
- [ ] Verify questions appear in original order
- [ ] Verify agent generation proceeds correctly

Co-authored-by: Toran Bruce Richards <toran.richards@gmail.com>
2026-02-06 08:41:06 +00:00
41 changed files with 5694 additions and 3720 deletions

View File

@@ -27,11 +27,20 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
outputs: outputs:
cache-key: ${{ steps.cache-key.outputs.key }} cache-key: ${{ steps.cache-key.outputs.key }}
components-changed: ${{ steps.filter.outputs.components }}
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v4 uses: actions/checkout@v4
- name: Check for component changes
uses: dorny/paths-filter@v3
id: filter
with:
filters: |
components:
- 'autogpt_platform/frontend/src/components/**'
- name: Set up Node.js - name: Set up Node.js
uses: actions/setup-node@v4 uses: actions/setup-node@v4
with: with:
@@ -90,8 +99,11 @@ jobs:
chromatic: chromatic:
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: setup needs: setup
# Only run on dev branch pushes or PRs targeting dev # Disabled: to re-enable, remove 'false &&' from the condition below
if: github.ref == 'refs/heads/dev' || github.base_ref == 'dev' if: >-
false
&& (github.ref == 'refs/heads/dev' || github.base_ref == 'dev')
&& needs.setup.outputs.components-changed == 'true'
steps: steps:
- name: Checkout repository - name: Checkout repository

File diff suppressed because it is too large Load Diff

View File

@@ -11,15 +11,15 @@ python = ">=3.10,<4.0"
colorama = "^0.4.6" colorama = "^0.4.6"
cryptography = "^45.0" cryptography = "^45.0"
expiringdict = "^1.2.2" expiringdict = "^1.2.2"
fastapi = "^0.116.1" fastapi = "^0.128.0"
google-cloud-logging = "^3.12.1" google-cloud-logging = "^3.13.0"
launchdarkly-server-sdk = "^9.12.0" launchdarkly-server-sdk = "^9.14.1"
pydantic = "^2.11.7" pydantic = "^2.12.5"
pydantic-settings = "^2.10.1" pydantic-settings = "^2.12.0"
pyjwt = { version = "^2.10.1", extras = ["crypto"] } pyjwt = { version = "^2.11.0", extras = ["crypto"] }
redis = "^6.2.0" redis = "^6.2.0"
supabase = "^2.16.0" supabase = "^2.27.2"
uvicorn = "^0.35.0" uvicorn = "^0.40.0"
[tool.poetry.group.dev.dependencies] [tool.poetry.group.dev.dependencies]
pyright = "^1.1.404" pyright = "^1.1.404"

View File

@@ -6,7 +6,6 @@ from typing import Any
from backend.api.features.library import db as library_db from backend.api.features.library import db as library_db
from backend.api.features.library import model as library_model from backend.api.features.library import model as library_model
from backend.api.features.store import db as store_db from backend.api.features.store import db as store_db
from backend.data import graph as graph_db
from backend.data.graph import GraphModel from backend.data.graph import GraphModel
from backend.data.model import ( from backend.data.model import (
CredentialsFieldInfo, CredentialsFieldInfo,
@@ -44,14 +43,8 @@ async def fetch_graph_from_store_slug(
return None, None return None, None
# Get the graph from store listing version # Get the graph from store listing version
graph_meta = await store_db.get_available_graph( graph = await store_db.get_available_graph(
store_agent.store_listing_version_id store_agent.store_listing_version_id, hide_nodes=False
)
graph = await graph_db.get_graph(
graph_id=graph_meta.id,
version=graph_meta.version,
user_id=None, # Public access
include_subgraphs=True,
) )
return graph, store_agent return graph, store_agent
@@ -128,7 +121,7 @@ def build_missing_credentials_from_graph(
return { return {
field_key: _serialize_missing_credential(field_key, field_info) field_key: _serialize_missing_credential(field_key, field_info)
for field_key, (field_info, _node_fields) in aggregated_fields.items() for field_key, (field_info, _, _) in aggregated_fields.items()
if field_key not in matched_keys if field_key not in matched_keys
} }
@@ -269,7 +262,8 @@ async def match_user_credentials_to_graph(
# provider is in the set of acceptable providers. # provider is in the set of acceptable providers.
for credential_field_name, ( for credential_field_name, (
credential_requirements, credential_requirements,
_node_fields, _,
_,
) in aggregated_creds.items(): ) in aggregated_creds.items():
# Find first matching credential by provider, type, and scopes # Find first matching credential by provider, type, and scopes
matching_cred = next( matching_cred = next(

View File

@@ -374,7 +374,7 @@ async def get_library_agent_by_graph_id(
async def add_generated_agent_image( async def add_generated_agent_image(
graph: graph_db.BaseGraph, graph: graph_db.GraphBaseMeta,
user_id: str, user_id: str,
library_agent_id: str, library_agent_id: str,
) -> Optional[prisma.models.LibraryAgent]: ) -> Optional[prisma.models.LibraryAgent]:

View File

@@ -1,7 +1,7 @@
import asyncio import asyncio
import logging import logging
from datetime import datetime, timezone from datetime import datetime, timezone
from typing import Any, Literal from typing import Any, Literal, overload
import fastapi import fastapi
import prisma.enums import prisma.enums
@@ -11,8 +11,8 @@ import prisma.types
from backend.data.db import transaction from backend.data.db import transaction
from backend.data.graph import ( from backend.data.graph import (
GraphMeta,
GraphModel, GraphModel,
GraphModelWithoutNodes,
get_graph, get_graph,
get_graph_as_admin, get_graph_as_admin,
get_sub_graphs, get_sub_graphs,
@@ -334,7 +334,22 @@ async def get_store_agent_details(
raise DatabaseError("Failed to fetch agent details") from e raise DatabaseError("Failed to fetch agent details") from e
async def get_available_graph(store_listing_version_id: str) -> GraphMeta: @overload
async def get_available_graph(
store_listing_version_id: str, hide_nodes: Literal[False]
) -> GraphModel: ...
@overload
async def get_available_graph(
store_listing_version_id: str, hide_nodes: Literal[True] = True
) -> GraphModelWithoutNodes: ...
async def get_available_graph(
store_listing_version_id: str,
hide_nodes: bool = True,
) -> GraphModelWithoutNodes | GraphModel:
try: try:
# Get avaialble, non-deleted store listing version # Get avaialble, non-deleted store listing version
store_listing_version = ( store_listing_version = (
@@ -344,7 +359,7 @@ async def get_available_graph(store_listing_version_id: str) -> GraphMeta:
"isAvailable": True, "isAvailable": True,
"isDeleted": False, "isDeleted": False,
}, },
include={"AgentGraph": {"include": {"Nodes": True}}}, include={"AgentGraph": {"include": AGENT_GRAPH_INCLUDE}},
) )
) )
@@ -354,7 +369,9 @@ async def get_available_graph(store_listing_version_id: str) -> GraphMeta:
detail=f"Store listing version {store_listing_version_id} not found", detail=f"Store listing version {store_listing_version_id} not found",
) )
return GraphModel.from_db(store_listing_version.AgentGraph).meta() return (GraphModelWithoutNodes if hide_nodes else GraphModel).from_db(
store_listing_version.AgentGraph
)
except Exception as e: except Exception as e:
logger.error(f"Error getting agent: {e}") logger.error(f"Error getting agent: {e}")

View File

@@ -16,7 +16,7 @@ from backend.blocks.ideogram import (
StyleType, StyleType,
UpscaleOption, UpscaleOption,
) )
from backend.data.graph import BaseGraph from backend.data.graph import GraphBaseMeta
from backend.data.model import CredentialsMetaInput, ProviderName from backend.data.model import CredentialsMetaInput, ProviderName
from backend.integrations.credentials_store import ideogram_credentials from backend.integrations.credentials_store import ideogram_credentials
from backend.util.request import Requests from backend.util.request import Requests
@@ -34,14 +34,14 @@ class ImageStyle(str, Enum):
DIGITAL_ART = "digital art" DIGITAL_ART = "digital art"
async def generate_agent_image(agent: BaseGraph | AgentGraph) -> io.BytesIO: async def generate_agent_image(agent: GraphBaseMeta | AgentGraph) -> io.BytesIO:
if settings.config.use_agent_image_generation_v2: if settings.config.use_agent_image_generation_v2:
return await generate_agent_image_v2(graph=agent) return await generate_agent_image_v2(graph=agent)
else: else:
return await generate_agent_image_v1(agent=agent) return await generate_agent_image_v1(agent=agent)
async def generate_agent_image_v2(graph: BaseGraph | AgentGraph) -> io.BytesIO: async def generate_agent_image_v2(graph: GraphBaseMeta | AgentGraph) -> io.BytesIO:
""" """
Generate an image for an agent using Ideogram model. Generate an image for an agent using Ideogram model.
Returns: Returns:
@@ -54,14 +54,17 @@ async def generate_agent_image_v2(graph: BaseGraph | AgentGraph) -> io.BytesIO:
description = f"{name} ({graph.description})" if graph.description else name description = f"{name} ({graph.description})" if graph.description else name
prompt = ( prompt = (
f"Create a visually striking retro-futuristic vector pop art illustration prominently featuring " "Create a visually striking retro-futuristic vector pop art illustration "
f'"{name}" in bold typography. The image clearly and literally depicts a {description}, ' f'prominently featuring "{name}" in bold typography. The image clearly and '
f"along with recognizable objects directly associated with the primary function of a {name}. " f"literally depicts a {description}, along with recognizable objects directly "
f"Ensure the imagery is concrete, intuitive, and immediately understandable, clearly conveying the " f"associated with the primary function of a {name}. "
f"purpose of a {name}. Maintain vibrant, limited-palette colors, sharp vector lines, geometric " f"Ensure the imagery is concrete, intuitive, and immediately understandable, "
f"shapes, flat illustration techniques, and solid colors without gradients or shading. Preserve a " f"clearly conveying the purpose of a {name}. "
f"retro-futuristic aesthetic influenced by mid-century futurism and 1960s psychedelia, " "Maintain vibrant, limited-palette colors, sharp vector lines, "
f"prioritizing clear visual storytelling and thematic clarity above all else." "geometric shapes, flat illustration techniques, and solid colors "
"without gradients or shading. Preserve a retro-futuristic aesthetic "
"influenced by mid-century futurism and 1960s psychedelia, "
"prioritizing clear visual storytelling and thematic clarity above all else."
) )
custom_colors = [ custom_colors = [
@@ -99,12 +102,12 @@ async def generate_agent_image_v2(graph: BaseGraph | AgentGraph) -> io.BytesIO:
return io.BytesIO(response.content) return io.BytesIO(response.content)
async def generate_agent_image_v1(agent: BaseGraph | AgentGraph) -> io.BytesIO: async def generate_agent_image_v1(agent: GraphBaseMeta | AgentGraph) -> io.BytesIO:
""" """
Generate an image for an agent using Flux model via Replicate API. Generate an image for an agent using Flux model via Replicate API.
Args: Args:
agent (Graph): The agent to generate an image for agent (GraphBaseMeta | AgentGraph): The agent to generate an image for
Returns: Returns:
io.BytesIO: The generated image as bytes io.BytesIO: The generated image as bytes
@@ -114,7 +117,13 @@ async def generate_agent_image_v1(agent: BaseGraph | AgentGraph) -> io.BytesIO:
raise ValueError("Missing Replicate API key in settings") raise ValueError("Missing Replicate API key in settings")
# Construct prompt from agent details # Construct prompt from agent details
prompt = f"Create a visually engaging app store thumbnail for the AI agent that highlights what it does in a clear and captivating way:\n- **Name**: {agent.name}\n- **Description**: {agent.description}\nFocus on showcasing its core functionality with an appealing design." prompt = (
"Create a visually engaging app store thumbnail for the AI agent "
"that highlights what it does in a clear and captivating way:\n"
f"- **Name**: {agent.name}\n"
f"- **Description**: {agent.description}\n"
f"Focus on showcasing its core functionality with an appealing design."
)
# Set up Replicate client # Set up Replicate client
client = ReplicateClient(api_token=settings.secrets.replicate_api_key) client = ReplicateClient(api_token=settings.secrets.replicate_api_key)

View File

@@ -278,7 +278,7 @@ async def get_agent(
) )
async def get_graph_meta_by_store_listing_version_id( async def get_graph_meta_by_store_listing_version_id(
store_listing_version_id: str, store_listing_version_id: str,
) -> backend.data.graph.GraphMeta: ) -> backend.data.graph.GraphModelWithoutNodes:
""" """
Get Agent Graph from Store Listing Version ID. Get Agent Graph from Store Listing Version ID.
""" """

View File

@@ -478,7 +478,7 @@ class ExaCreateOrFindWebsetBlock(Block):
aexa = AsyncExa(api_key=credentials.api_key.get_secret_value()) aexa = AsyncExa(api_key=credentials.api_key.get_secret_value())
try: try:
webset = aexa.websets.get(id=input_data.external_id) webset = await aexa.websets.get(id=input_data.external_id)
webset_result = Webset.model_validate(webset.model_dump(by_alias=True)) webset_result = Webset.model_validate(webset.model_dump(by_alias=True))
yield "webset", webset_result yield "webset", webset_result
@@ -494,7 +494,7 @@ class ExaCreateOrFindWebsetBlock(Block):
count=input_data.search_count, count=input_data.search_count,
) )
webset = aexa.websets.create( webset = await aexa.websets.create(
params=CreateWebsetParameters( params=CreateWebsetParameters(
search=search_params, search=search_params,
external_id=input_data.external_id, external_id=input_data.external_id,
@@ -554,7 +554,7 @@ class ExaUpdateWebsetBlock(Block):
if input_data.metadata is not None: if input_data.metadata is not None:
payload["metadata"] = input_data.metadata payload["metadata"] = input_data.metadata
sdk_webset = aexa.websets.update(id=input_data.webset_id, params=payload) sdk_webset = await aexa.websets.update(id=input_data.webset_id, params=payload)
status_str = ( status_str = (
sdk_webset.status.value sdk_webset.status.value
@@ -617,7 +617,7 @@ class ExaListWebsetsBlock(Block):
) -> BlockOutput: ) -> BlockOutput:
aexa = AsyncExa(api_key=credentials.api_key.get_secret_value()) aexa = AsyncExa(api_key=credentials.api_key.get_secret_value())
response = aexa.websets.list( response = await aexa.websets.list(
cursor=input_data.cursor, cursor=input_data.cursor,
limit=input_data.limit, limit=input_data.limit,
) )
@@ -678,7 +678,7 @@ class ExaGetWebsetBlock(Block):
) -> BlockOutput: ) -> BlockOutput:
aexa = AsyncExa(api_key=credentials.api_key.get_secret_value()) aexa = AsyncExa(api_key=credentials.api_key.get_secret_value())
sdk_webset = aexa.websets.get(id=input_data.webset_id) sdk_webset = await aexa.websets.get(id=input_data.webset_id)
status_str = ( status_str = (
sdk_webset.status.value sdk_webset.status.value
@@ -748,7 +748,7 @@ class ExaDeleteWebsetBlock(Block):
) -> BlockOutput: ) -> BlockOutput:
aexa = AsyncExa(api_key=credentials.api_key.get_secret_value()) aexa = AsyncExa(api_key=credentials.api_key.get_secret_value())
deleted_webset = aexa.websets.delete(id=input_data.webset_id) deleted_webset = await aexa.websets.delete(id=input_data.webset_id)
status_str = ( status_str = (
deleted_webset.status.value deleted_webset.status.value
@@ -798,7 +798,7 @@ class ExaCancelWebsetBlock(Block):
) -> BlockOutput: ) -> BlockOutput:
aexa = AsyncExa(api_key=credentials.api_key.get_secret_value()) aexa = AsyncExa(api_key=credentials.api_key.get_secret_value())
canceled_webset = aexa.websets.cancel(id=input_data.webset_id) canceled_webset = await aexa.websets.cancel(id=input_data.webset_id)
status_str = ( status_str = (
canceled_webset.status.value canceled_webset.status.value
@@ -968,7 +968,7 @@ class ExaPreviewWebsetBlock(Block):
entity["description"] = input_data.entity_description entity["description"] = input_data.entity_description
payload["entity"] = entity payload["entity"] = entity
sdk_preview = aexa.websets.preview(params=payload) sdk_preview = await aexa.websets.preview(params=payload)
preview = PreviewWebsetModel.from_sdk(sdk_preview) preview = PreviewWebsetModel.from_sdk(sdk_preview)
@@ -1051,7 +1051,7 @@ class ExaWebsetStatusBlock(Block):
) -> BlockOutput: ) -> BlockOutput:
aexa = AsyncExa(api_key=credentials.api_key.get_secret_value()) aexa = AsyncExa(api_key=credentials.api_key.get_secret_value())
webset = aexa.websets.get(id=input_data.webset_id) webset = await aexa.websets.get(id=input_data.webset_id)
status = ( status = (
webset.status.value webset.status.value
@@ -1185,7 +1185,7 @@ class ExaWebsetSummaryBlock(Block):
) -> BlockOutput: ) -> BlockOutput:
aexa = AsyncExa(api_key=credentials.api_key.get_secret_value()) aexa = AsyncExa(api_key=credentials.api_key.get_secret_value())
webset = aexa.websets.get(id=input_data.webset_id) webset = await aexa.websets.get(id=input_data.webset_id)
# Extract basic info # Extract basic info
webset_id = webset.id webset_id = webset.id
@@ -1211,7 +1211,7 @@ class ExaWebsetSummaryBlock(Block):
total_items = 0 total_items = 0
if input_data.include_sample_items and input_data.sample_size > 0: if input_data.include_sample_items and input_data.sample_size > 0:
items_response = aexa.websets.items.list( items_response = await aexa.websets.items.list(
webset_id=input_data.webset_id, limit=input_data.sample_size webset_id=input_data.webset_id, limit=input_data.sample_size
) )
sample_items_data = [ sample_items_data = [
@@ -1362,7 +1362,7 @@ class ExaWebsetReadyCheckBlock(Block):
aexa = AsyncExa(api_key=credentials.api_key.get_secret_value()) aexa = AsyncExa(api_key=credentials.api_key.get_secret_value())
# Get webset details # Get webset details
webset = aexa.websets.get(id=input_data.webset_id) webset = await aexa.websets.get(id=input_data.webset_id)
status = ( status = (
webset.status.value webset.status.value

View File

@@ -202,7 +202,7 @@ class ExaCreateEnrichmentBlock(Block):
# Use AsyncExa SDK # Use AsyncExa SDK
aexa = AsyncExa(api_key=credentials.api_key.get_secret_value()) aexa = AsyncExa(api_key=credentials.api_key.get_secret_value())
sdk_enrichment = aexa.websets.enrichments.create( sdk_enrichment = await aexa.websets.enrichments.create(
webset_id=input_data.webset_id, params=payload webset_id=input_data.webset_id, params=payload
) )
@@ -223,7 +223,7 @@ class ExaCreateEnrichmentBlock(Block):
items_enriched = 0 items_enriched = 0
while time.time() - poll_start < input_data.polling_timeout: while time.time() - poll_start < input_data.polling_timeout:
current_enrich = aexa.websets.enrichments.get( current_enrich = await aexa.websets.enrichments.get(
webset_id=input_data.webset_id, id=enrichment_id webset_id=input_data.webset_id, id=enrichment_id
) )
current_status = ( current_status = (
@@ -234,7 +234,7 @@ class ExaCreateEnrichmentBlock(Block):
if current_status in ["completed", "failed", "cancelled"]: if current_status in ["completed", "failed", "cancelled"]:
# Estimate items from webset searches # Estimate items from webset searches
webset = aexa.websets.get(id=input_data.webset_id) webset = await aexa.websets.get(id=input_data.webset_id)
if webset.searches: if webset.searches:
for search in webset.searches: for search in webset.searches:
if search.progress: if search.progress:
@@ -329,7 +329,7 @@ class ExaGetEnrichmentBlock(Block):
# Use AsyncExa SDK # Use AsyncExa SDK
aexa = AsyncExa(api_key=credentials.api_key.get_secret_value()) aexa = AsyncExa(api_key=credentials.api_key.get_secret_value())
sdk_enrichment = aexa.websets.enrichments.get( sdk_enrichment = await aexa.websets.enrichments.get(
webset_id=input_data.webset_id, id=input_data.enrichment_id webset_id=input_data.webset_id, id=input_data.enrichment_id
) )
@@ -474,7 +474,7 @@ class ExaDeleteEnrichmentBlock(Block):
# Use AsyncExa SDK # Use AsyncExa SDK
aexa = AsyncExa(api_key=credentials.api_key.get_secret_value()) aexa = AsyncExa(api_key=credentials.api_key.get_secret_value())
deleted_enrichment = aexa.websets.enrichments.delete( deleted_enrichment = await aexa.websets.enrichments.delete(
webset_id=input_data.webset_id, id=input_data.enrichment_id webset_id=input_data.webset_id, id=input_data.enrichment_id
) )
@@ -525,13 +525,13 @@ class ExaCancelEnrichmentBlock(Block):
# Use AsyncExa SDK # Use AsyncExa SDK
aexa = AsyncExa(api_key=credentials.api_key.get_secret_value()) aexa = AsyncExa(api_key=credentials.api_key.get_secret_value())
canceled_enrichment = aexa.websets.enrichments.cancel( canceled_enrichment = await aexa.websets.enrichments.cancel(
webset_id=input_data.webset_id, id=input_data.enrichment_id webset_id=input_data.webset_id, id=input_data.enrichment_id
) )
# Try to estimate how many items were enriched before cancellation # Try to estimate how many items were enriched before cancellation
items_enriched = 0 items_enriched = 0
items_response = aexa.websets.items.list( items_response = await aexa.websets.items.list(
webset_id=input_data.webset_id, limit=100 webset_id=input_data.webset_id, limit=100
) )

View File

@@ -222,7 +222,7 @@ class ExaCreateImportBlock(Block):
def _create_test_mock(): def _create_test_mock():
"""Create test mocks for the AsyncExa SDK.""" """Create test mocks for the AsyncExa SDK."""
from datetime import datetime from datetime import datetime
from unittest.mock import MagicMock from unittest.mock import AsyncMock, MagicMock
# Create mock SDK import object # Create mock SDK import object
mock_import = MagicMock() mock_import = MagicMock()
@@ -247,7 +247,7 @@ class ExaCreateImportBlock(Block):
return { return {
"_get_client": lambda *args, **kwargs: MagicMock( "_get_client": lambda *args, **kwargs: MagicMock(
websets=MagicMock( websets=MagicMock(
imports=MagicMock(create=lambda *args, **kwargs: mock_import) imports=MagicMock(create=AsyncMock(return_value=mock_import))
) )
) )
} }
@@ -294,7 +294,7 @@ class ExaCreateImportBlock(Block):
if input_data.metadata: if input_data.metadata:
payload["metadata"] = input_data.metadata payload["metadata"] = input_data.metadata
sdk_import = aexa.websets.imports.create( sdk_import = await aexa.websets.imports.create(
params=payload, csv_data=input_data.csv_data params=payload, csv_data=input_data.csv_data
) )
@@ -360,7 +360,7 @@ class ExaGetImportBlock(Block):
# Use AsyncExa SDK # Use AsyncExa SDK
aexa = AsyncExa(api_key=credentials.api_key.get_secret_value()) aexa = AsyncExa(api_key=credentials.api_key.get_secret_value())
sdk_import = aexa.websets.imports.get(import_id=input_data.import_id) sdk_import = await aexa.websets.imports.get(import_id=input_data.import_id)
import_obj = ImportModel.from_sdk(sdk_import) import_obj = ImportModel.from_sdk(sdk_import)
@@ -426,7 +426,7 @@ class ExaListImportsBlock(Block):
# Use AsyncExa SDK # Use AsyncExa SDK
aexa = AsyncExa(api_key=credentials.api_key.get_secret_value()) aexa = AsyncExa(api_key=credentials.api_key.get_secret_value())
response = aexa.websets.imports.list( response = await aexa.websets.imports.list(
cursor=input_data.cursor, cursor=input_data.cursor,
limit=input_data.limit, limit=input_data.limit,
) )
@@ -474,7 +474,9 @@ class ExaDeleteImportBlock(Block):
# Use AsyncExa SDK # Use AsyncExa SDK
aexa = AsyncExa(api_key=credentials.api_key.get_secret_value()) aexa = AsyncExa(api_key=credentials.api_key.get_secret_value())
deleted_import = aexa.websets.imports.delete(import_id=input_data.import_id) deleted_import = await aexa.websets.imports.delete(
import_id=input_data.import_id
)
yield "import_id", deleted_import.id yield "import_id", deleted_import.id
yield "success", "true" yield "success", "true"
@@ -573,14 +575,14 @@ class ExaExportWebsetBlock(Block):
} }
) )
# Create mock iterator # Create async iterator for list_all
mock_items = [mock_item1, mock_item2] async def async_item_iterator(*args, **kwargs):
for item in [mock_item1, mock_item2]:
yield item
return { return {
"_get_client": lambda *args, **kwargs: MagicMock( "_get_client": lambda *args, **kwargs: MagicMock(
websets=MagicMock( websets=MagicMock(items=MagicMock(list_all=async_item_iterator))
items=MagicMock(list_all=lambda *args, **kwargs: iter(mock_items))
)
) )
} }
@@ -602,7 +604,7 @@ class ExaExportWebsetBlock(Block):
webset_id=input_data.webset_id, limit=input_data.max_items webset_id=input_data.webset_id, limit=input_data.max_items
) )
for sdk_item in item_iterator: async for sdk_item in item_iterator:
if len(all_items) >= input_data.max_items: if len(all_items) >= input_data.max_items:
break break

View File

@@ -178,7 +178,7 @@ class ExaGetWebsetItemBlock(Block):
) -> BlockOutput: ) -> BlockOutput:
aexa = AsyncExa(api_key=credentials.api_key.get_secret_value()) aexa = AsyncExa(api_key=credentials.api_key.get_secret_value())
sdk_item = aexa.websets.items.get( sdk_item = await aexa.websets.items.get(
webset_id=input_data.webset_id, id=input_data.item_id webset_id=input_data.webset_id, id=input_data.item_id
) )
@@ -269,7 +269,7 @@ class ExaListWebsetItemsBlock(Block):
response = None response = None
while time.time() - start_time < input_data.wait_timeout: while time.time() - start_time < input_data.wait_timeout:
response = aexa.websets.items.list( response = await aexa.websets.items.list(
webset_id=input_data.webset_id, webset_id=input_data.webset_id,
cursor=input_data.cursor, cursor=input_data.cursor,
limit=input_data.limit, limit=input_data.limit,
@@ -282,13 +282,13 @@ class ExaListWebsetItemsBlock(Block):
interval = min(interval * 1.2, 10) interval = min(interval * 1.2, 10)
if not response: if not response:
response = aexa.websets.items.list( response = await aexa.websets.items.list(
webset_id=input_data.webset_id, webset_id=input_data.webset_id,
cursor=input_data.cursor, cursor=input_data.cursor,
limit=input_data.limit, limit=input_data.limit,
) )
else: else:
response = aexa.websets.items.list( response = await aexa.websets.items.list(
webset_id=input_data.webset_id, webset_id=input_data.webset_id,
cursor=input_data.cursor, cursor=input_data.cursor,
limit=input_data.limit, limit=input_data.limit,
@@ -340,7 +340,7 @@ class ExaDeleteWebsetItemBlock(Block):
) -> BlockOutput: ) -> BlockOutput:
aexa = AsyncExa(api_key=credentials.api_key.get_secret_value()) aexa = AsyncExa(api_key=credentials.api_key.get_secret_value())
deleted_item = aexa.websets.items.delete( deleted_item = await aexa.websets.items.delete(
webset_id=input_data.webset_id, id=input_data.item_id webset_id=input_data.webset_id, id=input_data.item_id
) )
@@ -408,7 +408,7 @@ class ExaBulkWebsetItemsBlock(Block):
webset_id=input_data.webset_id, limit=input_data.max_items webset_id=input_data.webset_id, limit=input_data.max_items
) )
for sdk_item in item_iterator: async for sdk_item in item_iterator:
if len(all_items) >= input_data.max_items: if len(all_items) >= input_data.max_items:
break break
@@ -475,7 +475,7 @@ class ExaWebsetItemsSummaryBlock(Block):
# Use AsyncExa SDK # Use AsyncExa SDK
aexa = AsyncExa(api_key=credentials.api_key.get_secret_value()) aexa = AsyncExa(api_key=credentials.api_key.get_secret_value())
webset = aexa.websets.get(id=input_data.webset_id) webset = await aexa.websets.get(id=input_data.webset_id)
entity_type = "unknown" entity_type = "unknown"
if webset.searches: if webset.searches:
@@ -495,7 +495,7 @@ class ExaWebsetItemsSummaryBlock(Block):
# Get sample items if requested # Get sample items if requested
sample_items: List[WebsetItemModel] = [] sample_items: List[WebsetItemModel] = []
if input_data.sample_size > 0: if input_data.sample_size > 0:
items_response = aexa.websets.items.list( items_response = await aexa.websets.items.list(
webset_id=input_data.webset_id, limit=input_data.sample_size webset_id=input_data.webset_id, limit=input_data.sample_size
) )
# Convert to our stable models # Convert to our stable models
@@ -569,7 +569,7 @@ class ExaGetNewItemsBlock(Block):
aexa = AsyncExa(api_key=credentials.api_key.get_secret_value()) aexa = AsyncExa(api_key=credentials.api_key.get_secret_value())
# Get items starting from cursor # Get items starting from cursor
response = aexa.websets.items.list( response = await aexa.websets.items.list(
webset_id=input_data.webset_id, webset_id=input_data.webset_id,
cursor=input_data.since_cursor, cursor=input_data.since_cursor,
limit=input_data.max_items, limit=input_data.max_items,

View File

@@ -233,7 +233,7 @@ class ExaCreateMonitorBlock(Block):
def _create_test_mock(): def _create_test_mock():
"""Create test mocks for the AsyncExa SDK.""" """Create test mocks for the AsyncExa SDK."""
from datetime import datetime from datetime import datetime
from unittest.mock import MagicMock from unittest.mock import AsyncMock, MagicMock
# Create mock SDK monitor object # Create mock SDK monitor object
mock_monitor = MagicMock() mock_monitor = MagicMock()
@@ -263,7 +263,7 @@ class ExaCreateMonitorBlock(Block):
return { return {
"_get_client": lambda *args, **kwargs: MagicMock( "_get_client": lambda *args, **kwargs: MagicMock(
websets=MagicMock( websets=MagicMock(
monitors=MagicMock(create=lambda *args, **kwargs: mock_monitor) monitors=MagicMock(create=AsyncMock(return_value=mock_monitor))
) )
) )
} }
@@ -320,7 +320,7 @@ class ExaCreateMonitorBlock(Block):
if input_data.metadata: if input_data.metadata:
payload["metadata"] = input_data.metadata payload["metadata"] = input_data.metadata
sdk_monitor = aexa.websets.monitors.create(params=payload) sdk_monitor = await aexa.websets.monitors.create(params=payload)
monitor = MonitorModel.from_sdk(sdk_monitor) monitor = MonitorModel.from_sdk(sdk_monitor)
@@ -384,7 +384,7 @@ class ExaGetMonitorBlock(Block):
# Use AsyncExa SDK # Use AsyncExa SDK
aexa = AsyncExa(api_key=credentials.api_key.get_secret_value()) aexa = AsyncExa(api_key=credentials.api_key.get_secret_value())
sdk_monitor = aexa.websets.monitors.get(monitor_id=input_data.monitor_id) sdk_monitor = await aexa.websets.monitors.get(monitor_id=input_data.monitor_id)
monitor = MonitorModel.from_sdk(sdk_monitor) monitor = MonitorModel.from_sdk(sdk_monitor)
@@ -476,7 +476,7 @@ class ExaUpdateMonitorBlock(Block):
if input_data.metadata is not None: if input_data.metadata is not None:
payload["metadata"] = input_data.metadata payload["metadata"] = input_data.metadata
sdk_monitor = aexa.websets.monitors.update( sdk_monitor = await aexa.websets.monitors.update(
monitor_id=input_data.monitor_id, params=payload monitor_id=input_data.monitor_id, params=payload
) )
@@ -522,7 +522,9 @@ class ExaDeleteMonitorBlock(Block):
# Use AsyncExa SDK # Use AsyncExa SDK
aexa = AsyncExa(api_key=credentials.api_key.get_secret_value()) aexa = AsyncExa(api_key=credentials.api_key.get_secret_value())
deleted_monitor = aexa.websets.monitors.delete(monitor_id=input_data.monitor_id) deleted_monitor = await aexa.websets.monitors.delete(
monitor_id=input_data.monitor_id
)
yield "monitor_id", deleted_monitor.id yield "monitor_id", deleted_monitor.id
yield "success", "true" yield "success", "true"
@@ -579,7 +581,7 @@ class ExaListMonitorsBlock(Block):
# Use AsyncExa SDK # Use AsyncExa SDK
aexa = AsyncExa(api_key=credentials.api_key.get_secret_value()) aexa = AsyncExa(api_key=credentials.api_key.get_secret_value())
response = aexa.websets.monitors.list( response = await aexa.websets.monitors.list(
cursor=input_data.cursor, cursor=input_data.cursor,
limit=input_data.limit, limit=input_data.limit,
webset_id=input_data.webset_id, webset_id=input_data.webset_id,

View File

@@ -121,7 +121,7 @@ class ExaWaitForWebsetBlock(Block):
WebsetTargetStatus.IDLE, WebsetTargetStatus.IDLE,
WebsetTargetStatus.ANY_COMPLETE, WebsetTargetStatus.ANY_COMPLETE,
]: ]:
final_webset = aexa.websets.wait_until_idle( final_webset = await aexa.websets.wait_until_idle(
id=input_data.webset_id, id=input_data.webset_id,
timeout=input_data.timeout, timeout=input_data.timeout,
poll_interval=input_data.check_interval, poll_interval=input_data.check_interval,
@@ -164,7 +164,7 @@ class ExaWaitForWebsetBlock(Block):
interval = input_data.check_interval interval = input_data.check_interval
while time.time() - start_time < input_data.timeout: while time.time() - start_time < input_data.timeout:
# Get current webset status # Get current webset status
webset = aexa.websets.get(id=input_data.webset_id) webset = await aexa.websets.get(id=input_data.webset_id)
current_status = ( current_status = (
webset.status.value webset.status.value
if hasattr(webset.status, "value") if hasattr(webset.status, "value")
@@ -209,7 +209,7 @@ class ExaWaitForWebsetBlock(Block):
# Timeout reached # Timeout reached
elapsed = time.time() - start_time elapsed = time.time() - start_time
webset = aexa.websets.get(id=input_data.webset_id) webset = await aexa.websets.get(id=input_data.webset_id)
final_status = ( final_status = (
webset.status.value webset.status.value
if hasattr(webset.status, "value") if hasattr(webset.status, "value")
@@ -345,7 +345,7 @@ class ExaWaitForSearchBlock(Block):
try: try:
while time.time() - start_time < input_data.timeout: while time.time() - start_time < input_data.timeout:
# Get current search status using SDK # Get current search status using SDK
search = aexa.websets.searches.get( search = await aexa.websets.searches.get(
webset_id=input_data.webset_id, id=input_data.search_id webset_id=input_data.webset_id, id=input_data.search_id
) )
@@ -401,7 +401,7 @@ class ExaWaitForSearchBlock(Block):
elapsed = time.time() - start_time elapsed = time.time() - start_time
# Get last known status # Get last known status
search = aexa.websets.searches.get( search = await aexa.websets.searches.get(
webset_id=input_data.webset_id, id=input_data.search_id webset_id=input_data.webset_id, id=input_data.search_id
) )
final_status = ( final_status = (
@@ -503,7 +503,7 @@ class ExaWaitForEnrichmentBlock(Block):
try: try:
while time.time() - start_time < input_data.timeout: while time.time() - start_time < input_data.timeout:
# Get current enrichment status using SDK # Get current enrichment status using SDK
enrichment = aexa.websets.enrichments.get( enrichment = await aexa.websets.enrichments.get(
webset_id=input_data.webset_id, id=input_data.enrichment_id webset_id=input_data.webset_id, id=input_data.enrichment_id
) )
@@ -548,7 +548,7 @@ class ExaWaitForEnrichmentBlock(Block):
elapsed = time.time() - start_time elapsed = time.time() - start_time
# Get last known status # Get last known status
enrichment = aexa.websets.enrichments.get( enrichment = await aexa.websets.enrichments.get(
webset_id=input_data.webset_id, id=input_data.enrichment_id webset_id=input_data.webset_id, id=input_data.enrichment_id
) )
final_status = ( final_status = (
@@ -575,7 +575,7 @@ class ExaWaitForEnrichmentBlock(Block):
) -> tuple[list[SampleEnrichmentModel], int]: ) -> tuple[list[SampleEnrichmentModel], int]:
"""Get sample enriched data and count.""" """Get sample enriched data and count."""
# Get a few items to see enrichment results using SDK # Get a few items to see enrichment results using SDK
response = aexa.websets.items.list(webset_id=webset_id, limit=5) response = await aexa.websets.items.list(webset_id=webset_id, limit=5)
sample_data: list[SampleEnrichmentModel] = [] sample_data: list[SampleEnrichmentModel] = []
enriched_count = 0 enriched_count = 0

View File

@@ -317,7 +317,7 @@ class ExaCreateWebsetSearchBlock(Block):
aexa = AsyncExa(api_key=credentials.api_key.get_secret_value()) aexa = AsyncExa(api_key=credentials.api_key.get_secret_value())
sdk_search = aexa.websets.searches.create( sdk_search = await aexa.websets.searches.create(
webset_id=input_data.webset_id, params=payload webset_id=input_data.webset_id, params=payload
) )
@@ -350,7 +350,7 @@ class ExaCreateWebsetSearchBlock(Block):
poll_start = time.time() poll_start = time.time()
while time.time() - poll_start < input_data.polling_timeout: while time.time() - poll_start < input_data.polling_timeout:
current_search = aexa.websets.searches.get( current_search = await aexa.websets.searches.get(
webset_id=input_data.webset_id, id=search_id webset_id=input_data.webset_id, id=search_id
) )
current_status = ( current_status = (
@@ -442,7 +442,7 @@ class ExaGetWebsetSearchBlock(Block):
# Use AsyncExa SDK # Use AsyncExa SDK
aexa = AsyncExa(api_key=credentials.api_key.get_secret_value()) aexa = AsyncExa(api_key=credentials.api_key.get_secret_value())
sdk_search = aexa.websets.searches.get( sdk_search = await aexa.websets.searches.get(
webset_id=input_data.webset_id, id=input_data.search_id webset_id=input_data.webset_id, id=input_data.search_id
) )
@@ -523,7 +523,7 @@ class ExaCancelWebsetSearchBlock(Block):
# Use AsyncExa SDK # Use AsyncExa SDK
aexa = AsyncExa(api_key=credentials.api_key.get_secret_value()) aexa = AsyncExa(api_key=credentials.api_key.get_secret_value())
canceled_search = aexa.websets.searches.cancel( canceled_search = await aexa.websets.searches.cancel(
webset_id=input_data.webset_id, id=input_data.search_id webset_id=input_data.webset_id, id=input_data.search_id
) )
@@ -604,7 +604,7 @@ class ExaFindOrCreateSearchBlock(Block):
aexa = AsyncExa(api_key=credentials.api_key.get_secret_value()) aexa = AsyncExa(api_key=credentials.api_key.get_secret_value())
# Get webset to check existing searches # Get webset to check existing searches
webset = aexa.websets.get(id=input_data.webset_id) webset = await aexa.websets.get(id=input_data.webset_id)
# Look for existing search with same query # Look for existing search with same query
existing_search = None existing_search = None
@@ -636,7 +636,7 @@ class ExaFindOrCreateSearchBlock(Block):
if input_data.entity_type != SearchEntityType.AUTO: if input_data.entity_type != SearchEntityType.AUTO:
payload["entity"] = {"type": input_data.entity_type.value} payload["entity"] = {"type": input_data.entity_type.value}
sdk_search = aexa.websets.searches.create( sdk_search = await aexa.websets.searches.create(
webset_id=input_data.webset_id, params=payload webset_id=input_data.webset_id, params=payload
) )

View File

@@ -596,10 +596,10 @@ def extract_openai_tool_calls(response) -> list[ToolContentBlock] | None:
def get_parallel_tool_calls_param( def get_parallel_tool_calls_param(
llm_model: LlmModel, parallel_tool_calls: bool | None llm_model: LlmModel, parallel_tool_calls: bool | None
): ) -> bool | openai.Omit:
"""Get the appropriate parallel_tool_calls parameter for OpenAI-compatible APIs.""" """Get the appropriate parallel_tool_calls parameter for OpenAI-compatible APIs."""
if llm_model.startswith("o") or parallel_tool_calls is None: if llm_model.startswith("o") or parallel_tool_calls is None:
return openai.NOT_GIVEN return openai.omit
return parallel_tool_calls return parallel_tool_calls

View File

@@ -246,7 +246,9 @@ class BlockSchema(BaseModel):
f"is not of type {CredentialsMetaInput.__name__}" f"is not of type {CredentialsMetaInput.__name__}"
) )
credentials_fields[field_name].validate_credentials_field_schema(cls) CredentialsMetaInput.validate_credentials_field_schema(
cls.get_field_schema(field_name), field_name
)
elif field_name in credentials_fields: elif field_name in credentials_fields:
raise KeyError( raise KeyError(

View File

@@ -3,7 +3,7 @@ import logging
import uuid import uuid
from collections import defaultdict from collections import defaultdict
from datetime import datetime, timezone from datetime import datetime, timezone
from typing import TYPE_CHECKING, Annotated, Any, Literal, Optional, cast from typing import TYPE_CHECKING, Annotated, Any, Literal, Optional, Self, cast
from prisma.enums import SubmissionStatus from prisma.enums import SubmissionStatus
from prisma.models import ( from prisma.models import (
@@ -20,7 +20,7 @@ from prisma.types import (
AgentNodeLinkCreateInput, AgentNodeLinkCreateInput,
StoreListingVersionWhereInput, StoreListingVersionWhereInput,
) )
from pydantic import BaseModel, BeforeValidator, Field, create_model from pydantic import BaseModel, BeforeValidator, Field
from pydantic.fields import computed_field from pydantic.fields import computed_field
from backend.blocks.agent import AgentExecutorBlock from backend.blocks.agent import AgentExecutorBlock
@@ -30,7 +30,6 @@ from backend.data.db import prisma as db
from backend.data.dynamic_fields import is_tool_pin, sanitize_pin_name from backend.data.dynamic_fields import is_tool_pin, sanitize_pin_name
from backend.data.includes import MAX_GRAPH_VERSIONS_FETCH from backend.data.includes import MAX_GRAPH_VERSIONS_FETCH
from backend.data.model import ( from backend.data.model import (
CredentialsField,
CredentialsFieldInfo, CredentialsFieldInfo,
CredentialsMetaInput, CredentialsMetaInput,
is_credentials_field_name, is_credentials_field_name,
@@ -45,7 +44,6 @@ from .block import (
AnyBlockSchema, AnyBlockSchema,
Block, Block,
BlockInput, BlockInput,
BlockSchema,
BlockType, BlockType,
EmptySchema, EmptySchema,
get_block, get_block,
@@ -113,10 +111,12 @@ class Link(BaseDbModel):
class Node(BaseDbModel): class Node(BaseDbModel):
block_id: str block_id: str
input_default: BlockInput = {} # dict[input_name, default_value] input_default: BlockInput = Field( # dict[input_name, default_value]
metadata: dict[str, Any] = {} default_factory=dict
input_links: list[Link] = [] )
output_links: list[Link] = [] metadata: dict[str, Any] = Field(default_factory=dict)
input_links: list[Link] = Field(default_factory=list)
output_links: list[Link] = Field(default_factory=list)
@property @property
def credentials_optional(self) -> bool: def credentials_optional(self) -> bool:
@@ -221,18 +221,33 @@ class NodeModel(Node):
return result return result
class BaseGraph(BaseDbModel): class GraphBaseMeta(BaseDbModel):
"""
Shared base for `GraphMeta` and `BaseGraph`, with core graph metadata fields.
"""
version: int = 1 version: int = 1
is_active: bool = True is_active: bool = True
name: str name: str
description: str description: str
instructions: str | None = None instructions: str | None = None
recommended_schedule_cron: str | None = None recommended_schedule_cron: str | None = None
nodes: list[Node] = []
links: list[Link] = []
forked_from_id: str | None = None forked_from_id: str | None = None
forked_from_version: int | None = None forked_from_version: int | None = None
class BaseGraph(GraphBaseMeta):
"""
Graph with nodes, links, and computed I/O schema fields.
Used to represent sub-graphs within a `Graph`. Contains the full graph
structure including nodes and links, plus computed fields for schemas
and trigger info. Does NOT include user_id or created_at (see GraphModel).
"""
nodes: list[Node] = Field(default_factory=list)
links: list[Link] = Field(default_factory=list)
@computed_field @computed_field
@property @property
def input_schema(self) -> dict[str, Any]: def input_schema(self) -> dict[str, Any]:
@@ -361,44 +376,79 @@ class GraphTriggerInfo(BaseModel):
class Graph(BaseGraph): class Graph(BaseGraph):
sub_graphs: list[BaseGraph] = [] # Flattened sub-graphs """Creatable graph model used in API create/update endpoints."""
sub_graphs: list[BaseGraph] = Field(default_factory=list) # Flattened sub-graphs
class GraphMeta(GraphBaseMeta):
"""
Lightweight graph metadata model representing an existing graph from the database,
for use in listings and summaries.
Lacks `GraphModel`'s nodes, links, and expensive computed fields.
Use for list endpoints where full graph data is not needed and performance matters.
"""
id: str # type: ignore
version: int # type: ignore
user_id: str
created_at: datetime
@classmethod
def from_db(cls, graph: "AgentGraph") -> Self:
return cls(
id=graph.id,
version=graph.version,
is_active=graph.isActive,
name=graph.name or "",
description=graph.description or "",
instructions=graph.instructions,
recommended_schedule_cron=graph.recommendedScheduleCron,
forked_from_id=graph.forkedFromId,
forked_from_version=graph.forkedFromVersion,
user_id=graph.userId,
created_at=graph.createdAt,
)
class GraphModel(Graph, GraphMeta):
"""
Full graph model representing an existing graph from the database.
This is the primary model for working with persisted graphs. Includes all
graph data (nodes, links, sub_graphs) plus user ownership and timestamps.
Provides computed fields (input_schema, output_schema, etc.) used during
set-up (frontend) and execution (backend).
Inherits from:
- `Graph`: provides structure (nodes, links, sub_graphs) and computed schemas
- `GraphMeta`: provides user_id, created_at for database records
"""
nodes: list[NodeModel] = Field(default_factory=list) # type: ignore
@property
def starting_nodes(self) -> list[NodeModel]:
outbound_nodes = {link.sink_id for link in self.links}
input_nodes = {
node.id for node in self.nodes if node.block.block_type == BlockType.INPUT
}
return [
node
for node in self.nodes
if node.id not in outbound_nodes or node.id in input_nodes
]
@property
def webhook_input_node(self) -> NodeModel | None: # type: ignore
return cast(NodeModel, super().webhook_input_node)
@computed_field @computed_field
@property @property
def credentials_input_schema(self) -> dict[str, Any]: def credentials_input_schema(self) -> dict[str, Any]:
schema = self._credentials_input_schema.jsonschema()
# Determine which credential fields are required based on credentials_optional metadata
graph_credentials_inputs = self.aggregate_credentials_inputs() graph_credentials_inputs = self.aggregate_credentials_inputs()
required_fields = []
# Build a map of node_id -> node for quick lookup
all_nodes = {node.id: node for node in self.nodes}
for sub_graph in self.sub_graphs:
for node in sub_graph.nodes:
all_nodes[node.id] = node
for field_key, (
_field_info,
node_field_pairs,
) in graph_credentials_inputs.items():
# A field is required if ANY node using it has credentials_optional=False
is_required = False
for node_id, _field_name in node_field_pairs:
node = all_nodes.get(node_id)
if node and not node.credentials_optional:
is_required = True
break
if is_required:
required_fields.append(field_key)
schema["required"] = required_fields
return schema
@property
def _credentials_input_schema(self) -> type[BlockSchema]:
graph_credentials_inputs = self.aggregate_credentials_inputs()
logger.debug( logger.debug(
f"Combined credentials input fields for graph #{self.id} ({self.name}): " f"Combined credentials input fields for graph #{self.id} ({self.name}): "
f"{graph_credentials_inputs}" f"{graph_credentials_inputs}"
@@ -406,8 +456,8 @@ class Graph(BaseGraph):
# Warn if same-provider credentials inputs can't be combined (= bad UX) # Warn if same-provider credentials inputs can't be combined (= bad UX)
graph_cred_fields = list(graph_credentials_inputs.values()) graph_cred_fields = list(graph_credentials_inputs.values())
for i, (field, keys) in enumerate(graph_cred_fields): for i, (field, keys, _) in enumerate(graph_cred_fields):
for other_field, other_keys in list(graph_cred_fields)[i + 1 :]: for other_field, other_keys, _ in list(graph_cred_fields)[i + 1 :]:
if field.provider != other_field.provider: if field.provider != other_field.provider:
continue continue
if ProviderName.HTTP in field.provider: if ProviderName.HTTP in field.provider:
@@ -423,31 +473,78 @@ class Graph(BaseGraph):
f"keys: {keys} <> {other_keys}." f"keys: {keys} <> {other_keys}."
) )
fields: dict[str, tuple[type[CredentialsMetaInput], CredentialsMetaInput]] = { # Build JSON schema directly to avoid expensive create_model + validation overhead
agg_field_key: ( properties = {}
CredentialsMetaInput[ required_fields = []
Literal[tuple(field_info.provider)], # type: ignore
Literal[tuple(field_info.supported_types)], # type: ignore for agg_field_key, (
], field_info,
CredentialsField( _,
required_scopes=set(field_info.required_scopes or []), is_required,
discriminator=field_info.discriminator, ) in graph_credentials_inputs.items():
discriminator_mapping=field_info.discriminator_mapping, providers = list(field_info.provider)
discriminator_values=field_info.discriminator_values, cred_types = list(field_info.supported_types)
field_schema: dict[str, Any] = {
"credentials_provider": providers,
"credentials_types": cred_types,
"type": "object",
"properties": {
"id": {"title": "Id", "type": "string"},
"title": {
"anyOf": [{"type": "string"}, {"type": "null"}],
"default": None,
"title": "Title",
},
"provider": {
"title": "Provider",
"type": "string",
**(
{"enum": providers}
if len(providers) > 1
else {"const": providers[0]}
), ),
) },
for agg_field_key, (field_info, _) in graph_credentials_inputs.items() "type": {
"title": "Type",
"type": "string",
**(
{"enum": cred_types}
if len(cred_types) > 1
else {"const": cred_types[0]}
),
},
},
"required": ["id", "provider", "type"],
} }
return create_model( # Add other (optional) field info items
self.name.replace(" ", "") + "CredentialsInputSchema", field_schema.update(
__base__=BlockSchema, field_info.model_dump(
**fields, # type: ignore by_alias=True,
exclude_defaults=True,
exclude={"provider", "supported_types"}, # already included above
) )
)
# Ensure field schema is well-formed
CredentialsMetaInput.validate_credentials_field_schema(
field_schema, agg_field_key
)
properties[agg_field_key] = field_schema
if is_required:
required_fields.append(agg_field_key)
return {
"type": "object",
"properties": properties,
"required": required_fields,
}
def aggregate_credentials_inputs( def aggregate_credentials_inputs(
self, self,
) -> dict[str, tuple[CredentialsFieldInfo, set[tuple[str, str]]]]: ) -> dict[str, tuple[CredentialsFieldInfo, set[tuple[str, str]], bool]]:
""" """
Returns: Returns:
dict[aggregated_field_key, tuple( dict[aggregated_field_key, tuple(
@@ -455,13 +552,19 @@ class Graph(BaseGraph):
(now includes discriminator_values from matching nodes) (now includes discriminator_values from matching nodes)
set[(node_id, field_name)]: Node credentials fields that are set[(node_id, field_name)]: Node credentials fields that are
compatible with this aggregated field spec compatible with this aggregated field spec
bool: True if the field is required (any node has credentials_optional=False)
)] )]
""" """
# First collect all credential field data with input defaults # First collect all credential field data with input defaults
node_credential_data = [] # Track (field_info, (node_id, field_name), is_required) for each credential field
node_credential_data: list[tuple[CredentialsFieldInfo, tuple[str, str]]] = []
node_required_map: dict[str, bool] = {} # node_id -> is_required
for graph in [self] + self.sub_graphs: for graph in [self] + self.sub_graphs:
for node in graph.nodes: for node in graph.nodes:
# Track if this node requires credentials (credentials_optional=False means required)
node_required_map[node.id] = not node.credentials_optional
for ( for (
field_name, field_name,
field_info, field_info,
@@ -485,37 +588,21 @@ class Graph(BaseGraph):
) )
# Combine credential field info (this will merge discriminator_values automatically) # Combine credential field info (this will merge discriminator_values automatically)
return CredentialsFieldInfo.combine(*node_credential_data) combined = CredentialsFieldInfo.combine(*node_credential_data)
# Add is_required flag to each aggregated field
class GraphModel(Graph): # A field is required if ANY node using it has credentials_optional=False
user_id: str return {
nodes: list[NodeModel] = [] # type: ignore key: (
field_info,
created_at: datetime node_field_pairs,
any(
@property node_required_map.get(node_id, True)
def starting_nodes(self) -> list[NodeModel]: for node_id, _ in node_field_pairs
outbound_nodes = {link.sink_id for link in self.links} ),
input_nodes = { )
node.id for node in self.nodes if node.block.block_type == BlockType.INPUT for key, (field_info, node_field_pairs) in combined.items()
} }
return [
node
for node in self.nodes
if node.id not in outbound_nodes or node.id in input_nodes
]
@property
def webhook_input_node(self) -> NodeModel | None: # type: ignore
return cast(NodeModel, super().webhook_input_node)
def meta(self) -> "GraphMeta":
"""
Returns a GraphMeta object with metadata about the graph.
This is used to return metadata about the graph without exposing nodes and links.
"""
return GraphMeta.from_graph(self)
def reassign_ids(self, user_id: str, reassign_graph_id: bool = False): def reassign_ids(self, user_id: str, reassign_graph_id: bool = False):
""" """
@@ -799,13 +886,14 @@ class GraphModel(Graph):
if is_static_output_block(link.source_id): if is_static_output_block(link.source_id):
link.is_static = True # Each value block output should be static. link.is_static = True # Each value block output should be static.
@staticmethod @classmethod
def from_db( def from_db( # type: ignore[reportIncompatibleMethodOverride]
cls,
graph: AgentGraph, graph: AgentGraph,
for_export: bool = False, for_export: bool = False,
sub_graphs: list[AgentGraph] | None = None, sub_graphs: list[AgentGraph] | None = None,
) -> "GraphModel": ) -> Self:
return GraphModel( return cls(
id=graph.id, id=graph.id,
user_id=graph.userId if not for_export else "", user_id=graph.userId if not for_export else "",
version=graph.version, version=graph.version,
@@ -831,17 +919,28 @@ class GraphModel(Graph):
], ],
) )
def hide_nodes(self) -> "GraphModelWithoutNodes":
"""
Returns a copy of the `GraphModel` with nodes, links, and sub-graphs hidden
(excluded from serialization). They are still present in the model instance
so all computed fields (e.g. `credentials_input_schema`) still work.
"""
return GraphModelWithoutNodes.model_validate(self, from_attributes=True)
class GraphMeta(Graph):
user_id: str
# Easy work-around to prevent exposing nodes and links in the API response class GraphModelWithoutNodes(GraphModel):
nodes: list[NodeModel] = Field(default=[], exclude=True) # type: ignore """
links: list[Link] = Field(default=[], exclude=True) GraphModel variant that excludes nodes, links, and sub-graphs from serialization.
@staticmethod Used in contexts like the store where exposing internal graph structure
def from_graph(graph: GraphModel) -> "GraphMeta": is not desired. Inherits all computed fields from GraphModel but marks
return GraphMeta(**graph.model_dump()) nodes and links as excluded from JSON output.
"""
nodes: list[NodeModel] = Field(default_factory=list, exclude=True)
links: list[Link] = Field(default_factory=list, exclude=True)
sub_graphs: list[BaseGraph] = Field(default_factory=list, exclude=True)
class GraphsPaginated(BaseModel): class GraphsPaginated(BaseModel):
@@ -912,21 +1011,11 @@ async def list_graphs_paginated(
where=where_clause, where=where_clause,
distinct=["id"], distinct=["id"],
order={"version": "desc"}, order={"version": "desc"},
include=AGENT_GRAPH_INCLUDE,
skip=offset, skip=offset,
take=page_size, take=page_size,
) )
graph_models: list[GraphMeta] = [] graph_models = [GraphMeta.from_db(graph) for graph in graphs]
for graph in graphs:
try:
graph_meta = GraphModel.from_db(graph).meta()
# Trigger serialization to validate that the graph is well formed
graph_meta.model_dump()
graph_models.append(graph_meta)
except Exception as e:
logger.error(f"Error processing graph {graph.id}: {e}")
continue
return GraphsPaginated( return GraphsPaginated(
graphs=graph_models, graphs=graph_models,

View File

@@ -163,7 +163,6 @@ class User(BaseModel):
if TYPE_CHECKING: if TYPE_CHECKING:
from prisma.models import User as PrismaUser from prisma.models import User as PrismaUser
from backend.data.block import BlockSchema
T = TypeVar("T") T = TypeVar("T")
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -508,15 +507,13 @@ class CredentialsMetaInput(BaseModel, Generic[CP, CT]):
def allowed_cred_types(cls) -> tuple[CredentialsType, ...]: def allowed_cred_types(cls) -> tuple[CredentialsType, ...]:
return get_args(cls.model_fields["type"].annotation) return get_args(cls.model_fields["type"].annotation)
@classmethod @staticmethod
def validate_credentials_field_schema(cls, model: type["BlockSchema"]): def validate_credentials_field_schema(
field_schema: dict[str, Any], field_name: str
):
"""Validates the schema of a credentials input field""" """Validates the schema of a credentials input field"""
field_name = next(
name for name, type in model.get_credentials_fields().items() if type is cls
)
field_schema = model.jsonschema()["properties"][field_name]
try: try:
schema_extra = CredentialsFieldInfo[CP, CT].model_validate(field_schema) field_info = CredentialsFieldInfo[CP, CT].model_validate(field_schema)
except ValidationError as e: except ValidationError as e:
if "Field required [type=missing" not in str(e): if "Field required [type=missing" not in str(e):
raise raise
@@ -526,11 +523,11 @@ class CredentialsMetaInput(BaseModel, Generic[CP, CT]):
f"{field_schema}" f"{field_schema}"
) from e ) from e
providers = cls.allowed_providers() providers = field_info.provider
if ( if (
providers is not None providers is not None
and len(providers) > 1 and len(providers) > 1
and not schema_extra.discriminator and not field_info.discriminator
): ):
raise TypeError( raise TypeError(
f"Multi-provider CredentialsField '{field_name}' " f"Multi-provider CredentialsField '{field_name}' "

View File

@@ -373,7 +373,7 @@ def make_node_credentials_input_map(
# Get aggregated credentials fields for the graph # Get aggregated credentials fields for the graph
graph_cred_inputs = graph.aggregate_credentials_inputs() graph_cred_inputs = graph.aggregate_credentials_inputs()
for graph_input_name, (_, compatible_node_fields) in graph_cred_inputs.items(): for graph_input_name, (_, compatible_node_fields, _) in graph_cred_inputs.items():
# Best-effort map: skip missing items # Best-effort map: skip missing items
if graph_input_name not in graph_credentials_input: if graph_input_name not in graph_credentials_input:
continue continue

File diff suppressed because it is too large Load Diff

View File

@@ -21,7 +21,7 @@ cryptography = "^45.0"
discord-py = "^2.5.2" discord-py = "^2.5.2"
e2b-code-interpreter = "^1.5.2" e2b-code-interpreter = "^1.5.2"
elevenlabs = "^1.50.0" elevenlabs = "^1.50.0"
fastapi = "^0.116.1" fastapi = "^0.128.0"
feedparser = "^6.0.11" feedparser = "^6.0.11"
flake8 = "^7.3.0" flake8 = "^7.3.0"
google-api-python-client = "^2.177.0" google-api-python-client = "^2.177.0"
@@ -35,7 +35,7 @@ jinja2 = "^3.1.6"
jsonref = "^1.1.0" jsonref = "^1.1.0"
jsonschema = "^4.25.0" jsonschema = "^4.25.0"
langfuse = "^3.11.0" langfuse = "^3.11.0"
launchdarkly-server-sdk = "^9.12.0" launchdarkly-server-sdk = "^9.14.1"
mem0ai = "^0.1.115" mem0ai = "^0.1.115"
moviepy = "^2.1.2" moviepy = "^2.1.2"
ollama = "^0.5.1" ollama = "^0.5.1"
@@ -52,8 +52,8 @@ prometheus-client = "^0.22.1"
prometheus-fastapi-instrumentator = "^7.0.0" prometheus-fastapi-instrumentator = "^7.0.0"
psutil = "^7.0.0" psutil = "^7.0.0"
psycopg2-binary = "^2.9.10" psycopg2-binary = "^2.9.10"
pydantic = { extras = ["email"], version = "^2.11.7" } pydantic = { extras = ["email"], version = "^2.12.5" }
pydantic-settings = "^2.10.1" pydantic-settings = "^2.12.0"
pytest = "^8.4.1" pytest = "^8.4.1"
pytest-asyncio = "^1.1.0" pytest-asyncio = "^1.1.0"
python-dotenv = "^1.1.1" python-dotenv = "^1.1.1"
@@ -65,11 +65,11 @@ sentry-sdk = {extras = ["anthropic", "fastapi", "launchdarkly", "openai", "sqlal
sqlalchemy = "^2.0.40" sqlalchemy = "^2.0.40"
strenum = "^0.4.9" strenum = "^0.4.9"
stripe = "^11.5.0" stripe = "^11.5.0"
supabase = "2.17.0" supabase = "2.27.2"
tenacity = "^9.1.2" tenacity = "^9.1.2"
todoist-api-python = "^2.1.7" todoist-api-python = "^2.1.7"
tweepy = "^4.16.0" tweepy = "^4.16.0"
uvicorn = { extras = ["standard"], version = "^0.35.0" } uvicorn = { extras = ["standard"], version = "^0.40.0" }
websockets = "^15.0" websockets = "^15.0"
youtube-transcript-api = "^1.2.1" youtube-transcript-api = "^1.2.1"
yt-dlp = "2025.12.08" yt-dlp = "2025.12.08"

View File

@@ -3,7 +3,6 @@
"credentials_input_schema": { "credentials_input_schema": {
"properties": {}, "properties": {},
"required": [], "required": [],
"title": "TestGraphCredentialsInputSchema",
"type": "object" "type": "object"
}, },
"description": "A test graph", "description": "A test graph",

View File

@@ -1,34 +1,14 @@
[ [
{ {
"credentials_input_schema": { "created_at": "2025-09-04T13:37:00",
"properties": {},
"required": [],
"title": "TestGraphCredentialsInputSchema",
"type": "object"
},
"description": "A test graph", "description": "A test graph",
"forked_from_id": null, "forked_from_id": null,
"forked_from_version": null, "forked_from_version": null,
"has_external_trigger": false,
"has_human_in_the_loop": false,
"has_sensitive_action": false,
"id": "graph-123", "id": "graph-123",
"input_schema": {
"properties": {},
"required": [],
"type": "object"
},
"instructions": null, "instructions": null,
"is_active": true, "is_active": true,
"name": "Test Graph", "name": "Test Graph",
"output_schema": {
"properties": {},
"required": [],
"type": "object"
},
"recommended_schedule_cron": null, "recommended_schedule_cron": null,
"sub_graphs": [],
"trigger_setup_info": null,
"user_id": "3e53486c-cf57-477e-ba2a-cb02dc828e1a", "user_id": "3e53486c-cf57-477e-ba2a-cb02dc828e1a",
"version": 1 "version": 1
} }

View File

@@ -1,5 +1,5 @@
import { CredentialsMetaInput } from "@/app/api/__generated__/models/credentialsMetaInput"; import { CredentialsMetaInput } from "@/app/api/__generated__/models/credentialsMetaInput";
import { GraphMeta } from "@/app/api/__generated__/models/graphMeta"; import { GraphModel } from "@/app/api/__generated__/models/graphModel";
import { CredentialsInput } from "@/components/contextual/CredentialsInput/CredentialsInput"; import { CredentialsInput } from "@/components/contextual/CredentialsInput/CredentialsInput";
import { useState } from "react"; import { useState } from "react";
import { getSchemaDefaultCredentials } from "../../helpers"; import { getSchemaDefaultCredentials } from "../../helpers";
@@ -9,7 +9,7 @@ type Credential = CredentialsMetaInput | undefined;
type Credentials = Record<string, Credential>; type Credentials = Record<string, Credential>;
type Props = { type Props = {
agent: GraphMeta | null; agent: GraphModel | null;
siblingInputs?: Record<string, any>; siblingInputs?: Record<string, any>;
onCredentialsChange: ( onCredentialsChange: (
credentials: Record<string, CredentialsMetaInput>, credentials: Record<string, CredentialsMetaInput>,

View File

@@ -1,9 +1,9 @@
import { CredentialsMetaInput } from "@/app/api/__generated__/models/credentialsMetaInput"; import { CredentialsMetaInput } from "@/app/api/__generated__/models/credentialsMetaInput";
import { GraphMeta } from "@/app/api/__generated__/models/graphMeta"; import { GraphModel } from "@/app/api/__generated__/models/graphModel";
import { BlockIOCredentialsSubSchema } from "@/lib/autogpt-server-api/types"; import { BlockIOCredentialsSubSchema } from "@/lib/autogpt-server-api/types";
export function getCredentialFields( export function getCredentialFields(
agent: GraphMeta | null, agent: GraphModel | null,
): AgentCredentialsFields { ): AgentCredentialsFields {
if (!agent) return {}; if (!agent) return {};

View File

@@ -3,10 +3,10 @@ import type {
CredentialsMetaInput, CredentialsMetaInput,
} from "@/lib/autogpt-server-api/types"; } from "@/lib/autogpt-server-api/types";
import type { InputValues } from "./types"; import type { InputValues } from "./types";
import { GraphMeta } from "@/app/api/__generated__/models/graphMeta"; import { GraphModel } from "@/app/api/__generated__/models/graphModel";
export function computeInitialAgentInputs( export function computeInitialAgentInputs(
agent: GraphMeta | null, agent: GraphModel | null,
existingInputs?: InputValues | null, existingInputs?: InputValues | null,
): InputValues { ): InputValues {
const properties = agent?.input_schema?.properties || {}; const properties = agent?.input_schema?.properties || {};
@@ -29,7 +29,7 @@ export function computeInitialAgentInputs(
} }
type IsRunDisabledParams = { type IsRunDisabledParams = {
agent: GraphMeta | null; agent: GraphModel | null;
isRunning: boolean; isRunning: boolean;
agentInputs: InputValues | null | undefined; agentInputs: InputValues | null | undefined;
}; };

View File

@@ -30,6 +30,8 @@ import {
} from "@/components/atoms/Tooltip/BaseTooltip"; } from "@/components/atoms/Tooltip/BaseTooltip";
import { GraphMeta } from "@/lib/autogpt-server-api"; import { GraphMeta } from "@/lib/autogpt-server-api";
import jaro from "jaro-winkler"; import jaro from "jaro-winkler";
import { getV1GetSpecificGraph } from "@/app/api/__generated__/endpoints/graphs/graphs";
import { okData } from "@/app/api/helpers";
type _Block = Omit<Block, "inputSchema" | "outputSchema"> & { type _Block = Omit<Block, "inputSchema" | "outputSchema"> & {
uiKey?: string; uiKey?: string;
@@ -107,6 +109,8 @@ export function BlocksControl({
.filter((b) => b.uiType !== BlockUIType.AGENT) .filter((b) => b.uiType !== BlockUIType.AGENT)
.sort((a, b) => a.name.localeCompare(b.name)); .sort((a, b) => a.name.localeCompare(b.name));
// Agent blocks are created from GraphMeta which doesn't include schemas.
// Schemas will be fetched on-demand when the block is actually added.
const agentBlockList = flows const agentBlockList = flows
.map((flow): _Block => { .map((flow): _Block => {
return { return {
@@ -116,8 +120,9 @@ export function BlocksControl({
`Ver.${flow.version}` + `Ver.${flow.version}` +
(flow.description ? ` | ${flow.description}` : ""), (flow.description ? ` | ${flow.description}` : ""),
categories: [{ category: "AGENT", description: "" }], categories: [{ category: "AGENT", description: "" }],
inputSchema: flow.input_schema, // Empty schemas - will be populated when block is added
outputSchema: flow.output_schema, inputSchema: { type: "object", properties: {} },
outputSchema: { type: "object", properties: {} },
staticOutput: false, staticOutput: false,
uiType: BlockUIType.AGENT, uiType: BlockUIType.AGENT,
costs: [], costs: [],
@@ -125,8 +130,7 @@ export function BlocksControl({
hardcodedValues: { hardcodedValues: {
graph_id: flow.id, graph_id: flow.id,
graph_version: flow.version, graph_version: flow.version,
input_schema: flow.input_schema, // Schemas will be fetched on-demand when block is added
output_schema: flow.output_schema,
}, },
}; };
}) })
@@ -182,6 +186,37 @@ export function BlocksControl({
setSelectedCategory(null); setSelectedCategory(null);
}, []); }, []);
// Handler to add a block, fetching graph data on-demand for agent blocks
const handleAddBlock = useCallback(
async (block: _Block & { notAvailable: string | null }) => {
if (block.notAvailable) return;
// For agent blocks, fetch the full graph to get schemas
if (block.uiType === BlockUIType.AGENT && block.hardcodedValues) {
const graphID = block.hardcodedValues.graph_id as string;
const graphVersion = block.hardcodedValues.graph_version as number;
const graphData = okData(
await getV1GetSpecificGraph(graphID, { version: graphVersion }),
);
if (graphData) {
addBlock(block.id, block.name, {
...block.hardcodedValues,
input_schema: graphData.input_schema,
output_schema: graphData.output_schema,
});
} else {
// Fallback: add without schemas (will be incomplete)
console.error("Failed to fetch graph data for agent block");
addBlock(block.id, block.name, block.hardcodedValues || {});
}
} else {
addBlock(block.id, block.name, block.hardcodedValues || {});
}
},
[addBlock],
);
// Extract unique categories from blocks // Extract unique categories from blocks
const categories = useMemo(() => { const categories = useMemo(() => {
return Array.from( return Array.from(
@@ -303,10 +338,7 @@ export function BlocksControl({
}), }),
); );
}} }}
onClick={() => onClick={() => handleAddBlock(block)}
!block.notAvailable &&
addBlock(block.id, block.name, block?.hardcodedValues || {})
}
title={block.notAvailable ?? undefined} title={block.notAvailable ?? undefined}
> >
<div <div

View File

@@ -29,13 +29,17 @@ import "@xyflow/react/dist/style.css";
import { ConnectedEdge, CustomNode } from "../CustomNode/CustomNode"; import { ConnectedEdge, CustomNode } from "../CustomNode/CustomNode";
import "./flow.css"; import "./flow.css";
import { import {
BlockIORootSchema,
BlockUIType, BlockUIType,
formatEdgeID, formatEdgeID,
GraphExecutionID, GraphExecutionID,
GraphID, GraphID,
GraphMeta, GraphMeta,
LibraryAgent, LibraryAgent,
SpecialBlockID,
} from "@/lib/autogpt-server-api"; } from "@/lib/autogpt-server-api";
import { getV1GetSpecificGraph } from "@/app/api/__generated__/endpoints/graphs/graphs";
import { okData } from "@/app/api/helpers";
import { IncompatibilityInfo } from "../../../hooks/useSubAgentUpdate/types"; import { IncompatibilityInfo } from "../../../hooks/useSubAgentUpdate/types";
import { Key, storage } from "@/services/storage/local-storage"; import { Key, storage } from "@/services/storage/local-storage";
import { findNewlyAddedBlockCoordinates, getTypeColor } from "@/lib/utils"; import { findNewlyAddedBlockCoordinates, getTypeColor } from "@/lib/utils";
@@ -687,8 +691,94 @@ const FlowEditor: React.FC<{
[getNode, updateNode, nodes], [getNode, updateNode, nodes],
); );
/* Shared helper to create and add a node */
const createAndAddNode = useCallback(
async (
blockID: string,
blockName: string,
hardcodedValues: Record<string, any>,
position: { x: number; y: number },
): Promise<CustomNode | null> => {
const nodeSchema = availableBlocks.find((node) => node.id === blockID);
if (!nodeSchema) {
console.error(`Schema not found for block ID: ${blockID}`);
return null;
}
// For agent blocks, fetch the full graph to get schemas
let inputSchema: BlockIORootSchema = nodeSchema.inputSchema;
let outputSchema: BlockIORootSchema = nodeSchema.outputSchema;
let finalHardcodedValues = hardcodedValues;
if (blockID === SpecialBlockID.AGENT) {
const graphID = hardcodedValues.graph_id as string;
const graphVersion = hardcodedValues.graph_version as number;
const graphData = okData(
await getV1GetSpecificGraph(graphID, { version: graphVersion }),
);
if (graphData) {
inputSchema = graphData.input_schema as BlockIORootSchema;
outputSchema = graphData.output_schema as BlockIORootSchema;
finalHardcodedValues = {
...hardcodedValues,
input_schema: graphData.input_schema,
output_schema: graphData.output_schema,
};
} else {
console.error("Failed to fetch graph data for agent block");
}
}
const newNode: CustomNode = {
id: nodeId.toString(),
type: "custom",
position,
data: {
blockType: blockName,
blockCosts: nodeSchema.costs || [],
title: `${blockName} ${nodeId}`,
description: nodeSchema.description,
categories: nodeSchema.categories,
inputSchema: inputSchema,
outputSchema: outputSchema,
hardcodedValues: finalHardcodedValues,
connections: [],
isOutputOpen: false,
block_id: blockID,
isOutputStatic: nodeSchema.staticOutput,
uiType: nodeSchema.uiType,
},
};
addNodes(newNode);
setNodeId((prevId) => prevId + 1);
clearNodesStatusAndOutput();
history.push({
type: "ADD_NODE",
payload: { node: { ...newNode, ...newNode.data } },
undo: () => deleteElements({ nodes: [{ id: newNode.id }] }),
redo: () => addNodes(newNode),
});
return newNode;
},
[
availableBlocks,
nodeId,
addNodes,
deleteElements,
clearNodesStatusAndOutput,
],
);
const addNode = useCallback( const addNode = useCallback(
(blockId: string, nodeType: string, hardcodedValues: any = {}) => { async (
blockId: string,
nodeType: string,
hardcodedValues: Record<string, any> = {},
) => {
const nodeSchema = availableBlocks.find((node) => node.id === blockId); const nodeSchema = availableBlocks.find((node) => node.id === blockId);
if (!nodeSchema) { if (!nodeSchema) {
console.error(`Schema not found for block ID: ${blockId}`); console.error(`Schema not found for block ID: ${blockId}`);
@@ -707,73 +797,42 @@ const FlowEditor: React.FC<{
// Alternative: We could also use D3 force, Intersection for this (React flow Pro examples) // Alternative: We could also use D3 force, Intersection for this (React flow Pro examples)
const { x, y } = getViewport(); const { x, y } = getViewport();
const viewportCoordinates = const position =
nodeDimensions && Object.keys(nodeDimensions).length > 0 nodeDimensions && Object.keys(nodeDimensions).length > 0
? // we will get all the dimension of nodes, then store ? findNewlyAddedBlockCoordinates(
findNewlyAddedBlockCoordinates(
nodeDimensions, nodeDimensions,
nodeSchema.uiType == BlockUIType.NOTE ? 300 : 500, nodeSchema.uiType == BlockUIType.NOTE ? 300 : 500,
60, 60,
1.0, 1.0,
) )
: // we will get all the dimension of nodes, then store : {
{
x: window.innerWidth / 2 - x, x: window.innerWidth / 2 - x,
y: window.innerHeight / 2 - y, y: window.innerHeight / 2 - y,
}; };
const newNode: CustomNode = { const newNode = await createAndAddNode(
id: nodeId.toString(), blockId,
type: "custom", nodeType,
position: viewportCoordinates, // Set the position to the calculated viewport center hardcodedValues,
data: { position,
blockType: nodeType, );
blockCosts: nodeSchema.costs, if (!newNode) return;
title: `${nodeType} ${nodeId}`,
description: nodeSchema.description,
categories: nodeSchema.categories,
inputSchema: nodeSchema.inputSchema,
outputSchema: nodeSchema.outputSchema,
hardcodedValues: hardcodedValues,
connections: [],
isOutputOpen: false,
block_id: blockId,
isOutputStatic: nodeSchema.staticOutput,
uiType: nodeSchema.uiType,
},
};
addNodes(newNode);
setNodeId((prevId) => prevId + 1);
clearNodesStatusAndOutput(); // Clear status and output when a new node is added
setViewport( setViewport(
{ {
// Rough estimate of the dimension of the node is: 500x400px. x: -position.x * 0.8 + (window.innerWidth - 0.0) / 2,
// Though we skip shifting the X, considering the block menu side-bar. y: -position.y * 0.8 + (window.innerHeight - 400) / 2,
x: -viewportCoordinates.x * 0.8 + (window.innerWidth - 0.0) / 2,
y: -viewportCoordinates.y * 0.8 + (window.innerHeight - 400) / 2,
zoom: 0.8, zoom: 0.8,
}, },
{ duration: 500 }, { duration: 500 },
); );
history.push({
type: "ADD_NODE",
payload: { node: { ...newNode, ...newNode.data } },
undo: () => deleteElements({ nodes: [{ id: newNode.id }] }),
redo: () => addNodes(newNode),
});
}, },
[ [
nodeId,
getViewport, getViewport,
setViewport, setViewport,
availableBlocks, availableBlocks,
addNodes,
nodeDimensions, nodeDimensions,
deleteElements, createAndAddNode,
clearNodesStatusAndOutput,
], ],
); );
@@ -920,7 +979,7 @@ const FlowEditor: React.FC<{
}, []); }, []);
const onDrop = useCallback( const onDrop = useCallback(
(event: React.DragEvent) => { async (event: React.DragEvent) => {
event.preventDefault(); event.preventDefault();
const blockData = event.dataTransfer.getData("application/reactflow"); const blockData = event.dataTransfer.getData("application/reactflow");
@@ -935,62 +994,17 @@ const FlowEditor: React.FC<{
y: event.clientY, y: event.clientY,
}); });
// Find the block schema await createAndAddNode(
const nodeSchema = availableBlocks.find((node) => node.id === blockId); blockId,
if (!nodeSchema) { blockName,
console.error(`Schema not found for block ID: ${blockId}`); hardcodedValues || {},
return;
}
// Create the new node at the drop position
const newNode: CustomNode = {
id: nodeId.toString(),
type: "custom",
position, position,
data: { );
blockType: blockName,
blockCosts: nodeSchema.costs || [],
title: `${blockName} ${nodeId}`,
description: nodeSchema.description,
categories: nodeSchema.categories,
inputSchema: nodeSchema.inputSchema,
outputSchema: nodeSchema.outputSchema,
hardcodedValues: hardcodedValues,
connections: [],
isOutputOpen: false,
block_id: blockId,
uiType: nodeSchema.uiType,
},
};
history.push({
type: "ADD_NODE",
payload: { node: { ...newNode, ...newNode.data } },
undo: () => {
deleteElements({ nodes: [{ id: newNode.id } as any], edges: [] });
},
redo: () => {
addNodes([newNode]);
},
});
addNodes([newNode]);
clearNodesStatusAndOutput();
setNodeId((prevId) => prevId + 1);
} catch (error) { } catch (error) {
console.error("Failed to drop block:", error); console.error("Failed to drop block:", error);
} }
}, },
[ [screenToFlowPosition, createAndAddNode],
nodeId,
availableBlocks,
nodes,
edges,
addNodes,
screenToFlowPosition,
deleteElements,
clearNodesStatusAndOutput,
],
); );
const buildContextValue: BuilderContextType = useMemo( const buildContextValue: BuilderContextType = useMemo(

View File

@@ -4,13 +4,13 @@ import { AgentRunDraftView } from "@/app/(platform)/library/agents/[id]/componen
import { Dialog } from "@/components/molecules/Dialog/Dialog"; import { Dialog } from "@/components/molecules/Dialog/Dialog";
import type { import type {
CredentialsMetaInput, CredentialsMetaInput,
GraphMeta, Graph,
} from "@/lib/autogpt-server-api/types"; } from "@/lib/autogpt-server-api/types";
interface RunInputDialogProps { interface RunInputDialogProps {
isOpen: boolean; isOpen: boolean;
doClose: () => void; doClose: () => void;
graph: GraphMeta; graph: Graph;
doRun?: ( doRun?: (
inputs: Record<string, any>, inputs: Record<string, any>,
credentialsInputs: Record<string, CredentialsMetaInput>, credentialsInputs: Record<string, CredentialsMetaInput>,

View File

@@ -9,13 +9,13 @@ import { CustomNodeData } from "@/app/(platform)/build/components/legacy-builder
import { import {
BlockUIType, BlockUIType,
CredentialsMetaInput, CredentialsMetaInput,
GraphMeta, Graph,
} from "@/lib/autogpt-server-api/types"; } from "@/lib/autogpt-server-api/types";
import RunnerOutputUI, { OutputNodeInfo } from "./RunnerOutputUI"; import RunnerOutputUI, { OutputNodeInfo } from "./RunnerOutputUI";
import { RunnerInputDialog } from "./RunnerInputUI"; import { RunnerInputDialog } from "./RunnerInputUI";
interface RunnerUIWrapperProps { interface RunnerUIWrapperProps {
graph: GraphMeta; graph: Graph;
nodes: Node<CustomNodeData>[]; nodes: Node<CustomNodeData>[];
graphExecutionError?: string | null; graphExecutionError?: string | null;
saveAndRun: ( saveAndRun: (

View File

@@ -1,5 +1,5 @@
import { GraphInputSchema } from "@/lib/autogpt-server-api"; import { GraphInputSchema } from "@/lib/autogpt-server-api";
import { GraphMetaLike, IncompatibilityInfo } from "./types"; import { GraphLike, IncompatibilityInfo } from "./types";
// Helper type for schema properties - the generated types are too loose // Helper type for schema properties - the generated types are too loose
type SchemaProperties = Record<string, GraphInputSchema["properties"][string]>; type SchemaProperties = Record<string, GraphInputSchema["properties"][string]>;
@@ -36,7 +36,7 @@ export function getSchemaRequired(schema: unknown): SchemaRequired {
*/ */
export function createUpdatedAgentNodeInputs( export function createUpdatedAgentNodeInputs(
currentInputs: Record<string, unknown>, currentInputs: Record<string, unknown>,
latestSubGraphVersion: GraphMetaLike, latestSubGraphVersion: GraphLike,
): Record<string, unknown> { ): Record<string, unknown> {
return { return {
...currentInputs, ...currentInputs,

View File

@@ -1,7 +1,11 @@
import type { GraphMeta as LegacyGraphMeta } from "@/lib/autogpt-server-api"; import type {
Graph as LegacyGraph,
GraphMeta as LegacyGraphMeta,
} from "@/lib/autogpt-server-api";
import type { GraphModel as GeneratedGraph } from "@/app/api/__generated__/models/graphModel";
import type { GraphMeta as GeneratedGraphMeta } from "@/app/api/__generated__/models/graphMeta"; import type { GraphMeta as GeneratedGraphMeta } from "@/app/api/__generated__/models/graphMeta";
export type SubAgentUpdateInfo<T extends GraphMetaLike = GraphMetaLike> = { export type SubAgentUpdateInfo<T extends GraphLike = GraphLike> = {
hasUpdate: boolean; hasUpdate: boolean;
currentVersion: number; currentVersion: number;
latestVersion: number; latestVersion: number;
@@ -10,7 +14,10 @@ export type SubAgentUpdateInfo<T extends GraphMetaLike = GraphMetaLike> = {
incompatibilities: IncompatibilityInfo | null; incompatibilities: IncompatibilityInfo | null;
}; };
// Union type for GraphMeta that works with both legacy and new builder // Union type for Graph (with schemas) that works with both legacy and new builder
export type GraphLike = LegacyGraph | GeneratedGraph;
// Union type for GraphMeta (without schemas) for version detection
export type GraphMetaLike = LegacyGraphMeta | GeneratedGraphMeta; export type GraphMetaLike = LegacyGraphMeta | GeneratedGraphMeta;
export type IncompatibilityInfo = { export type IncompatibilityInfo = {

View File

@@ -1,5 +1,11 @@
import { useMemo } from "react"; import { useMemo } from "react";
import { GraphInputSchema, GraphOutputSchema } from "@/lib/autogpt-server-api"; import type {
GraphInputSchema,
GraphOutputSchema,
} from "@/lib/autogpt-server-api";
import type { GraphModel } from "@/app/api/__generated__/models/graphModel";
import { useGetV1GetSpecificGraph } from "@/app/api/__generated__/endpoints/graphs/graphs";
import { okData } from "@/app/api/helpers";
import { getEffectiveType } from "@/lib/utils"; import { getEffectiveType } from "@/lib/utils";
import { EdgeLike, getSchemaProperties, getSchemaRequired } from "./helpers"; import { EdgeLike, getSchemaProperties, getSchemaRequired } from "./helpers";
import { import {
@@ -11,26 +17,38 @@ import {
/** /**
* Checks if a newer version of a sub-agent is available and determines compatibility * Checks if a newer version of a sub-agent is available and determines compatibility
*/ */
export function useSubAgentUpdate<T extends GraphMetaLike>( export function useSubAgentUpdate(
nodeID: string, nodeID: string,
graphID: string | undefined, graphID: string | undefined,
graphVersion: number | undefined, graphVersion: number | undefined,
currentInputSchema: GraphInputSchema | undefined, currentInputSchema: GraphInputSchema | undefined,
currentOutputSchema: GraphOutputSchema | undefined, currentOutputSchema: GraphOutputSchema | undefined,
connections: EdgeLike[], connections: EdgeLike[],
availableGraphs: T[], availableGraphs: GraphMetaLike[],
): SubAgentUpdateInfo<T> { ): SubAgentUpdateInfo<GraphModel> {
// Find the latest version of the same graph // Find the latest version of the same graph
const latestGraph = useMemo(() => { const latestGraphInfo = useMemo(() => {
if (!graphID) return null; if (!graphID) return null;
return availableGraphs.find((graph) => graph.id === graphID) || null; return availableGraphs.find((graph) => graph.id === graphID) || null;
}, [graphID, availableGraphs]); }, [graphID, availableGraphs]);
// Check if there's an update available // Check if there's a newer version available
const hasUpdate = useMemo(() => { const hasUpdate = useMemo(() => {
if (!latestGraph || graphVersion === undefined) return false; if (!latestGraphInfo || graphVersion === undefined) return false;
return latestGraph.version! > graphVersion; return latestGraphInfo.version! > graphVersion;
}, [latestGraph, graphVersion]); }, [latestGraphInfo, graphVersion]);
// Fetch full graph IF an update is detected
const { data: latestGraph } = useGetV1GetSpecificGraph(
graphID ?? "",
{ version: latestGraphInfo?.version },
{
query: {
enabled: hasUpdate && !!graphID && !!latestGraphInfo?.version,
select: okData,
},
},
);
// Get connected input and output handles for this specific node // Get connected input and output handles for this specific node
const connectedHandles = useMemo(() => { const connectedHandles = useMemo(() => {
@@ -152,8 +170,8 @@ export function useSubAgentUpdate<T extends GraphMetaLike>(
return { return {
hasUpdate, hasUpdate,
currentVersion: graphVersion || 0, currentVersion: graphVersion || 0,
latestVersion: latestGraph?.version || 0, latestVersion: latestGraphInfo?.version || 0,
latestGraph, latestGraph: latestGraph || null,
isCompatible: compatibilityResult.isCompatible, isCompatible: compatibilityResult.isCompatible,
incompatibilities: compatibilityResult.incompatibilities, incompatibilities: compatibilityResult.incompatibilities,
}; };

View File

@@ -18,7 +18,7 @@ interface GraphStore {
outputSchema: Record<string, any> | null, outputSchema: Record<string, any> | null,
) => void; ) => void;
// Available graphs; used for sub-graph updates // Available graphs; used for sub-graph updated version detection
availableSubGraphs: GraphMeta[]; availableSubGraphs: GraphMeta[];
setAvailableSubGraphs: (graphs: GraphMeta[]) => void; setAvailableSubGraphs: (graphs: GraphMeta[]) => void;

View File

@@ -10,8 +10,8 @@ import React, {
import { import {
CredentialsMetaInput, CredentialsMetaInput,
CredentialsType, CredentialsType,
Graph,
GraphExecutionID, GraphExecutionID,
GraphMeta,
LibraryAgentPreset, LibraryAgentPreset,
LibraryAgentPresetID, LibraryAgentPresetID,
LibraryAgentPresetUpdatable, LibraryAgentPresetUpdatable,
@@ -69,7 +69,7 @@ export function AgentRunDraftView({
className, className,
recommendedScheduleCron, recommendedScheduleCron,
}: { }: {
graph: GraphMeta; graph: Graph;
agentActions?: ButtonAction[]; agentActions?: ButtonAction[];
recommendedScheduleCron?: string | null; recommendedScheduleCron?: string | null;
doRun?: ( doRun?: (

View File

@@ -2,8 +2,8 @@
import React, { useCallback, useMemo } from "react"; import React, { useCallback, useMemo } from "react";
import { import {
Graph,
GraphExecutionID, GraphExecutionID,
GraphMeta,
Schedule, Schedule,
ScheduleID, ScheduleID,
} from "@/lib/autogpt-server-api"; } from "@/lib/autogpt-server-api";
@@ -35,7 +35,7 @@ export function AgentScheduleDetailsView({
onForcedRun, onForcedRun,
doDeleteSchedule, doDeleteSchedule,
}: { }: {
graph: GraphMeta; graph: Graph;
schedule: Schedule; schedule: Schedule;
agentActions: ButtonAction[]; agentActions: ButtonAction[];
onForcedRun: (runID: GraphExecutionID) => void; onForcedRun: (runID: GraphExecutionID) => void;

View File

@@ -5629,7 +5629,9 @@
"description": "Successful Response", "description": "Successful Response",
"content": { "content": {
"application/json": { "application/json": {
"schema": { "$ref": "#/components/schemas/GraphMeta" } "schema": {
"$ref": "#/components/schemas/GraphModelWithoutNodes"
}
} }
} }
}, },
@@ -6495,18 +6497,6 @@
"anyOf": [{ "type": "string" }, { "type": "null" }], "anyOf": [{ "type": "string" }, { "type": "null" }],
"title": "Recommended Schedule Cron" "title": "Recommended Schedule Cron"
}, },
"nodes": {
"items": { "$ref": "#/components/schemas/Node" },
"type": "array",
"title": "Nodes",
"default": []
},
"links": {
"items": { "$ref": "#/components/schemas/Link" },
"type": "array",
"title": "Links",
"default": []
},
"forked_from_id": { "forked_from_id": {
"anyOf": [{ "type": "string" }, { "type": "null" }], "anyOf": [{ "type": "string" }, { "type": "null" }],
"title": "Forked From Id" "title": "Forked From Id"
@@ -6514,11 +6504,22 @@
"forked_from_version": { "forked_from_version": {
"anyOf": [{ "type": "integer" }, { "type": "null" }], "anyOf": [{ "type": "integer" }, { "type": "null" }],
"title": "Forked From Version" "title": "Forked From Version"
},
"nodes": {
"items": { "$ref": "#/components/schemas/Node" },
"type": "array",
"title": "Nodes"
},
"links": {
"items": { "$ref": "#/components/schemas/Link" },
"type": "array",
"title": "Links"
} }
}, },
"type": "object", "type": "object",
"required": ["name", "description"], "required": ["name", "description"],
"title": "BaseGraph" "title": "BaseGraph",
"description": "Graph with nodes, links, and computed I/O schema fields.\n\nUsed to represent sub-graphs within a `Graph`. Contains the full graph\nstructure including nodes and links, plus computed fields for schemas\nand trigger info. Does NOT include user_id or created_at (see GraphModel)."
}, },
"BaseGraph-Output": { "BaseGraph-Output": {
"properties": { "properties": {
@@ -6539,18 +6540,6 @@
"anyOf": [{ "type": "string" }, { "type": "null" }], "anyOf": [{ "type": "string" }, { "type": "null" }],
"title": "Recommended Schedule Cron" "title": "Recommended Schedule Cron"
}, },
"nodes": {
"items": { "$ref": "#/components/schemas/Node" },
"type": "array",
"title": "Nodes",
"default": []
},
"links": {
"items": { "$ref": "#/components/schemas/Link" },
"type": "array",
"title": "Links",
"default": []
},
"forked_from_id": { "forked_from_id": {
"anyOf": [{ "type": "string" }, { "type": "null" }], "anyOf": [{ "type": "string" }, { "type": "null" }],
"title": "Forked From Id" "title": "Forked From Id"
@@ -6559,6 +6548,16 @@
"anyOf": [{ "type": "integer" }, { "type": "null" }], "anyOf": [{ "type": "integer" }, { "type": "null" }],
"title": "Forked From Version" "title": "Forked From Version"
}, },
"nodes": {
"items": { "$ref": "#/components/schemas/Node" },
"type": "array",
"title": "Nodes"
},
"links": {
"items": { "$ref": "#/components/schemas/Link" },
"type": "array",
"title": "Links"
},
"input_schema": { "input_schema": {
"additionalProperties": true, "additionalProperties": true,
"type": "object", "type": "object",
@@ -6605,7 +6604,8 @@
"has_sensitive_action", "has_sensitive_action",
"trigger_setup_info" "trigger_setup_info"
], ],
"title": "BaseGraph" "title": "BaseGraph",
"description": "Graph with nodes, links, and computed I/O schema fields.\n\nUsed to represent sub-graphs within a `Graph`. Contains the full graph\nstructure including nodes and links, plus computed fields for schemas\nand trigger info. Does NOT include user_id or created_at (see GraphModel)."
}, },
"BlockCategoryResponse": { "BlockCategoryResponse": {
"properties": { "properties": {
@@ -7399,18 +7399,6 @@
"anyOf": [{ "type": "string" }, { "type": "null" }], "anyOf": [{ "type": "string" }, { "type": "null" }],
"title": "Recommended Schedule Cron" "title": "Recommended Schedule Cron"
}, },
"nodes": {
"items": { "$ref": "#/components/schemas/Node" },
"type": "array",
"title": "Nodes",
"default": []
},
"links": {
"items": { "$ref": "#/components/schemas/Link" },
"type": "array",
"title": "Links",
"default": []
},
"forked_from_id": { "forked_from_id": {
"anyOf": [{ "type": "string" }, { "type": "null" }], "anyOf": [{ "type": "string" }, { "type": "null" }],
"title": "Forked From Id" "title": "Forked From Id"
@@ -7419,16 +7407,26 @@
"anyOf": [{ "type": "integer" }, { "type": "null" }], "anyOf": [{ "type": "integer" }, { "type": "null" }],
"title": "Forked From Version" "title": "Forked From Version"
}, },
"nodes": {
"items": { "$ref": "#/components/schemas/Node" },
"type": "array",
"title": "Nodes"
},
"links": {
"items": { "$ref": "#/components/schemas/Link" },
"type": "array",
"title": "Links"
},
"sub_graphs": { "sub_graphs": {
"items": { "$ref": "#/components/schemas/BaseGraph-Input" }, "items": { "$ref": "#/components/schemas/BaseGraph-Input" },
"type": "array", "type": "array",
"title": "Sub Graphs", "title": "Sub Graphs"
"default": []
} }
}, },
"type": "object", "type": "object",
"required": ["name", "description"], "required": ["name", "description"],
"title": "Graph" "title": "Graph",
"description": "Creatable graph model used in API create/update endpoints."
}, },
"GraphExecution": { "GraphExecution": {
"properties": { "properties": {
@@ -7778,6 +7776,52 @@
"description": "Response schema for paginated graph executions." "description": "Response schema for paginated graph executions."
}, },
"GraphMeta": { "GraphMeta": {
"properties": {
"id": { "type": "string", "title": "Id" },
"version": { "type": "integer", "title": "Version" },
"is_active": {
"type": "boolean",
"title": "Is Active",
"default": true
},
"name": { "type": "string", "title": "Name" },
"description": { "type": "string", "title": "Description" },
"instructions": {
"anyOf": [{ "type": "string" }, { "type": "null" }],
"title": "Instructions"
},
"recommended_schedule_cron": {
"anyOf": [{ "type": "string" }, { "type": "null" }],
"title": "Recommended Schedule Cron"
},
"forked_from_id": {
"anyOf": [{ "type": "string" }, { "type": "null" }],
"title": "Forked From Id"
},
"forked_from_version": {
"anyOf": [{ "type": "integer" }, { "type": "null" }],
"title": "Forked From Version"
},
"user_id": { "type": "string", "title": "User Id" },
"created_at": {
"type": "string",
"format": "date-time",
"title": "Created At"
}
},
"type": "object",
"required": [
"id",
"version",
"name",
"description",
"user_id",
"created_at"
],
"title": "GraphMeta",
"description": "Lightweight graph metadata model representing an existing graph from the database,\nfor use in listings and summaries.\n\nLacks `GraphModel`'s nodes, links, and expensive computed fields.\nUse for list endpoints where full graph data is not needed and performance matters."
},
"GraphModel": {
"properties": { "properties": {
"id": { "type": "string", "title": "Id" }, "id": { "type": "string", "title": "Id" },
"version": { "type": "integer", "title": "Version", "default": 1 }, "version": { "type": "integer", "title": "Version", "default": 1 },
@@ -7804,13 +7848,27 @@
"anyOf": [{ "type": "integer" }, { "type": "null" }], "anyOf": [{ "type": "integer" }, { "type": "null" }],
"title": "Forked From Version" "title": "Forked From Version"
}, },
"user_id": { "type": "string", "title": "User Id" },
"created_at": {
"type": "string",
"format": "date-time",
"title": "Created At"
},
"nodes": {
"items": { "$ref": "#/components/schemas/NodeModel" },
"type": "array",
"title": "Nodes"
},
"links": {
"items": { "$ref": "#/components/schemas/Link" },
"type": "array",
"title": "Links"
},
"sub_graphs": { "sub_graphs": {
"items": { "$ref": "#/components/schemas/BaseGraph-Output" }, "items": { "$ref": "#/components/schemas/BaseGraph-Output" },
"type": "array", "type": "array",
"title": "Sub Graphs", "title": "Sub Graphs"
"default": []
}, },
"user_id": { "type": "string", "title": "User Id" },
"input_schema": { "input_schema": {
"additionalProperties": true, "additionalProperties": true,
"type": "object", "type": "object",
@@ -7857,6 +7915,7 @@
"name", "name",
"description", "description",
"user_id", "user_id",
"created_at",
"input_schema", "input_schema",
"output_schema", "output_schema",
"has_external_trigger", "has_external_trigger",
@@ -7865,9 +7924,10 @@
"trigger_setup_info", "trigger_setup_info",
"credentials_input_schema" "credentials_input_schema"
], ],
"title": "GraphMeta" "title": "GraphModel",
"description": "Full graph model representing an existing graph from the database.\n\nThis is the primary model for working with persisted graphs. Includes all\ngraph data (nodes, links, sub_graphs) plus user ownership and timestamps.\nProvides computed fields (input_schema, output_schema, etc.) used during\nset-up (frontend) and execution (backend).\n\nInherits from:\n- `Graph`: provides structure (nodes, links, sub_graphs) and computed schemas\n- `GraphMeta`: provides user_id, created_at for database records"
}, },
"GraphModel": { "GraphModelWithoutNodes": {
"properties": { "properties": {
"id": { "type": "string", "title": "Id" }, "id": { "type": "string", "title": "Id" },
"version": { "type": "integer", "title": "Version", "default": 1 }, "version": { "type": "integer", "title": "Version", "default": 1 },
@@ -7886,18 +7946,6 @@
"anyOf": [{ "type": "string" }, { "type": "null" }], "anyOf": [{ "type": "string" }, { "type": "null" }],
"title": "Recommended Schedule Cron" "title": "Recommended Schedule Cron"
}, },
"nodes": {
"items": { "$ref": "#/components/schemas/NodeModel" },
"type": "array",
"title": "Nodes",
"default": []
},
"links": {
"items": { "$ref": "#/components/schemas/Link" },
"type": "array",
"title": "Links",
"default": []
},
"forked_from_id": { "forked_from_id": {
"anyOf": [{ "type": "string" }, { "type": "null" }], "anyOf": [{ "type": "string" }, { "type": "null" }],
"title": "Forked From Id" "title": "Forked From Id"
@@ -7906,12 +7954,6 @@
"anyOf": [{ "type": "integer" }, { "type": "null" }], "anyOf": [{ "type": "integer" }, { "type": "null" }],
"title": "Forked From Version" "title": "Forked From Version"
}, },
"sub_graphs": {
"items": { "$ref": "#/components/schemas/BaseGraph-Output" },
"type": "array",
"title": "Sub Graphs",
"default": []
},
"user_id": { "type": "string", "title": "User Id" }, "user_id": { "type": "string", "title": "User Id" },
"created_at": { "created_at": {
"type": "string", "type": "string",
@@ -7973,7 +8015,8 @@
"trigger_setup_info", "trigger_setup_info",
"credentials_input_schema" "credentials_input_schema"
], ],
"title": "GraphModel" "title": "GraphModelWithoutNodes",
"description": "GraphModel variant that excludes nodes, links, and sub-graphs from serialization.\n\nUsed in contexts like the store where exposing internal graph structure\nis not desired. Inherits all computed fields from GraphModel but marks\nnodes and links as excluded from JSON output."
}, },
"GraphSettings": { "GraphSettings": {
"properties": { "properties": {
@@ -8613,26 +8656,22 @@
"input_default": { "input_default": {
"additionalProperties": true, "additionalProperties": true,
"type": "object", "type": "object",
"title": "Input Default", "title": "Input Default"
"default": {}
}, },
"metadata": { "metadata": {
"additionalProperties": true, "additionalProperties": true,
"type": "object", "type": "object",
"title": "Metadata", "title": "Metadata"
"default": {}
}, },
"input_links": { "input_links": {
"items": { "$ref": "#/components/schemas/Link" }, "items": { "$ref": "#/components/schemas/Link" },
"type": "array", "type": "array",
"title": "Input Links", "title": "Input Links"
"default": []
}, },
"output_links": { "output_links": {
"items": { "$ref": "#/components/schemas/Link" }, "items": { "$ref": "#/components/schemas/Link" },
"type": "array", "type": "array",
"title": "Output Links", "title": "Output Links"
"default": []
} }
}, },
"type": "object", "type": "object",
@@ -8712,26 +8751,22 @@
"input_default": { "input_default": {
"additionalProperties": true, "additionalProperties": true,
"type": "object", "type": "object",
"title": "Input Default", "title": "Input Default"
"default": {}
}, },
"metadata": { "metadata": {
"additionalProperties": true, "additionalProperties": true,
"type": "object", "type": "object",
"title": "Metadata", "title": "Metadata"
"default": {}
}, },
"input_links": { "input_links": {
"items": { "$ref": "#/components/schemas/Link" }, "items": { "$ref": "#/components/schemas/Link" },
"type": "array", "type": "array",
"title": "Input Links", "title": "Input Links"
"default": []
}, },
"output_links": { "output_links": {
"items": { "$ref": "#/components/schemas/Link" }, "items": { "$ref": "#/components/schemas/Link" },
"type": "array", "type": "array",
"title": "Output Links", "title": "Output Links"
"default": []
}, },
"graph_id": { "type": "string", "title": "Graph Id" }, "graph_id": { "type": "string", "title": "Graph Id" },
"graph_version": { "type": "integer", "title": "Graph Version" }, "graph_version": { "type": "integer", "title": "Graph Version" },
@@ -12272,7 +12307,9 @@
"title": "Location" "title": "Location"
}, },
"msg": { "type": "string", "title": "Message" }, "msg": { "type": "string", "title": "Message" },
"type": { "type": "string", "title": "Error Type" } "type": { "type": "string", "title": "Error Type" },
"input": { "title": "Input" },
"ctx": { "type": "object", "title": "Context" }
}, },
"type": "object", "type": "object",
"required": ["loc", "msg", "type"], "required": ["loc", "msg", "type"],

View File

@@ -102,18 +102,6 @@ export function ChatMessage({
} }
} }
function handleClarificationAnswers(answers: Record<string, string>) {
if (onSendMessage) {
const contextMessage = Object.entries(answers)
.map(([keyword, answer]) => `${keyword}: ${answer}`)
.join("\n");
onSendMessage(
`I have the answers to your questions:\n\n${contextMessage}\n\nPlease proceed with creating the agent.`,
);
}
}
const handleCopy = useCallback( const handleCopy = useCallback(
async function handleCopy() { async function handleCopy() {
if (message.type !== "message") return; if (message.type !== "message") return;
@@ -162,6 +150,22 @@ export function ChatMessage({
.slice(index + 1) .slice(index + 1)
.some((m) => m.type === "message" && m.role === "user"); .some((m) => m.type === "message" && m.role === "user");
const handleClarificationAnswers = (answers: Record<string, string>) => {
if (onSendMessage) {
// Iterate over questions (preserves original order) instead of answers
const contextMessage = message.questions
.map((q) => {
const answer = answers[q.keyword] || "";
return `> ${q.question}\n\n${answer}`;
})
.join("\n\n");
onSendMessage(
`**Here are my answers:**\n\n${contextMessage}\n\nPlease proceed with creating the agent.`,
);
}
};
return ( return (
<ClarificationQuestionsWidget <ClarificationQuestionsWidget
questions={message.questions} questions={message.questions}

View File

@@ -19,13 +19,13 @@ export function ThinkingMessage({ className }: ThinkingMessageProps) {
if (timerRef.current === null) { if (timerRef.current === null) {
timerRef.current = setTimeout(() => { timerRef.current = setTimeout(() => {
setShowSlowLoader(true); setShowSlowLoader(true);
}, 3000); }, 8000);
} }
if (coffeeTimerRef.current === null) { if (coffeeTimerRef.current === null) {
coffeeTimerRef.current = setTimeout(() => { coffeeTimerRef.current = setTimeout(() => {
setShowCoffeeMessage(true); setShowCoffeeMessage(true);
}, 8000); }, 10000);
} }
return () => { return () => {

View File

@@ -362,25 +362,14 @@ export type GraphMeta = {
user_id: UserID; user_id: UserID;
version: number; version: number;
is_active: boolean; is_active: boolean;
created_at: Date;
name: string; name: string;
description: string; description: string;
instructions?: string | null; instructions?: string | null;
recommended_schedule_cron: string | null; recommended_schedule_cron: string | null;
forked_from_id?: GraphID | null; forked_from_id?: GraphID | null;
forked_from_version?: number | null; forked_from_version?: number | null;
input_schema: GraphInputSchema; };
output_schema: GraphOutputSchema;
credentials_input_schema: CredentialsInputSchema;
} & (
| {
has_external_trigger: true;
trigger_setup_info: GraphTriggerInfo;
}
| {
has_external_trigger: false;
trigger_setup_info: null;
}
);
export type GraphID = Brand<string, "GraphID">; export type GraphID = Brand<string, "GraphID">;
@@ -447,11 +436,22 @@ export type GraphTriggerInfo = {
/* Mirror of backend/data/graph.py:Graph */ /* Mirror of backend/data/graph.py:Graph */
export type Graph = GraphMeta & { export type Graph = GraphMeta & {
created_at: Date;
nodes: Node[]; nodes: Node[];
links: Link[]; links: Link[];
sub_graphs: Omit<Graph, "sub_graphs">[]; // Flattened sub-graphs sub_graphs: Omit<Graph, "sub_graphs">[]; // Flattened sub-graphs
}; input_schema: GraphInputSchema;
output_schema: GraphOutputSchema;
credentials_input_schema: CredentialsInputSchema;
} & (
| {
has_external_trigger: true;
trigger_setup_info: GraphTriggerInfo;
}
| {
has_external_trigger: false;
trigger_setup_info: null;
}
);
export type GraphUpdateable = Omit< export type GraphUpdateable = Omit<
Graph, Graph,