Compare commits

..

197 Commits

Author SHA1 Message Date
Bentlybro
7cfc0a0c4f update blocks.md 2025-01-13 11:05:15 +00:00
Bently
c869fd0119 Update block docs for: slant3d/slicing.md 2025-01-13 10:24:59 +00:00
Bently
097348e2ba Update block docs for: slant3d/order.md 2025-01-13 10:24:57 +00:00
Bently
2f97724ba5 Update block docs for: slant3d/filament.md 2025-01-13 10:24:54 +00:00
Bently
805a988e21 Update block docs for: slant3d/base.md 2025-01-13 10:24:51 +00:00
Bently
6166e31942 Update block docs for: nvidia/deepfake.md 2025-01-13 10:24:48 +00:00
Bently
fa56b68071 Update block docs for: jina/search.md 2025-01-13 10:24:46 +00:00
Bently
4207dc2bcf Update block docs for: jina/fact_checker.md 2025-01-13 10:24:44 +00:00
Bently
73d032a937 Update block docs for: jina/embeddings.md 2025-01-13 10:24:42 +00:00
Bently
2011322511 Update block docs for: jina/chunking.md 2025-01-13 10:24:40 +00:00
Bently
09e1a4081f Update block docs for: hubspot/engagement.md 2025-01-13 10:24:39 +00:00
Bently
3831aa99e7 Update block docs for: hubspot/contact.md 2025-01-13 10:24:36 +00:00
Bently
f15633833c Update block docs for: hubspot/company.md 2025-01-13 10:24:29 +00:00
Bently
2352c50433 Update block docs for: helpers/http.md 2025-01-13 10:24:27 +00:00
Bently
00add5738f Update block docs for: google/sheets.md 2025-01-13 10:24:24 +00:00
Bently
00b163b0a5 Update block docs for: google/gmail.md 2025-01-13 10:24:23 +00:00
Bently
3c23fd5b1a Update block docs for: github/triggers.md 2025-01-13 10:24:21 +00:00
Bently
7c719d6835 Update block docs for: github/repo.md 2025-01-13 10:24:20 +00:00
Bently
0b0c810861 Update block docs for: github/pull_requests.md 2025-01-13 10:24:18 +00:00
Bently
1c1dda57e8 Update block docs for: github/issues.md 2025-01-13 10:24:17 +00:00
Bently
20e93f32ff Update block docs for: fal/ai_video_generator.md 2025-01-13 10:24:10 +00:00
Bently
8fc00dfe02 Update block docs for: exa/similar.md 2025-01-13 10:24:02 +00:00
Bently
185d103371 Update block docs for: exa/search.md 2025-01-13 10:23:59 +00:00
Bently
dfc024acc3 Update block docs for: exa/contents.md 2025-01-13 10:23:58 +00:00
Bently
791ab5d671 Update block docs for: compass/triggers.md 2025-01-13 10:23:57 +00:00
Bently
6dc88e3f21 Update block docs for: youtube.md 2025-01-13 10:23:55 +00:00
Bently
d3f5718cac Update block docs for: time_blocks.md 2025-01-13 10:23:51 +00:00
Bently
aef7b5db7d Update block docs for: text.md 2025-01-13 10:23:50 +00:00
Bently
df071ca5a5 Update block docs for: talking_head.md 2025-01-13 10:23:49 +00:00
Bently
2b192a0d20 Update block docs for: search.md 2025-01-13 10:23:48 +00:00
Bently
9572415b74 Update block docs for: sampling.md 2025-01-13 10:23:46 +00:00
Bently
ad1bf2f27f Update block docs for: rss.md 2025-01-13 10:23:45 +00:00
Bently
67991f7c6d Update block docs for: replicate_flux_advanced.md 2025-01-13 10:23:44 +00:00
Bently
c2aad7d2d9 Update block docs for: reddit.md 2025-01-13 10:23:43 +00:00
Bently
504a0a1250 Update block docs for: pinecone.md 2025-01-13 10:23:42 +00:00
Bently
69ae276bb8 Update block docs for: medium.md 2025-01-13 10:23:40 +00:00
Bently
66f2e2a77b Update block docs for: maths.md 2025-01-13 10:23:39 +00:00
Bently
c463d1022b Update block docs for: llm.md 2025-01-13 10:23:38 +00:00
Bently
88dc0bbe0b Update block docs for: iteration.md 2025-01-13 10:23:37 +00:00
Bently
5ea176e457 Update block docs for: ideogram.md 2025-01-13 10:23:35 +00:00
Bently
134163aa88 Update block docs for: http.md 2025-01-13 10:23:34 +00:00
Bently
ff494bee93 Update block docs for: google_maps.md 2025-01-13 10:23:33 +00:00
Bently
f02e2fd8bb Update block docs for: discord.md 2025-01-13 10:23:32 +00:00
Bently
2012213af5 Update block docs for: csv.md 2025-01-13 10:23:31 +00:00
Bently
94f4702f6b Update block docs for: code_executor.md 2025-01-13 10:23:29 +00:00
Bently
abf05d6407 Update block docs for: branching.md 2025-01-13 10:23:28 +00:00
Bently
58a8b0ddeb Update block docs for: basic.md 2025-01-13 10:23:27 +00:00
Bently
6a4a8f5a46 Update block docs for: ai_music_generator.md 2025-01-13 10:23:26 +00:00
Bentlybro
240ad756aa rm files 2025-01-13 10:10:59 +00:00
Bently
6c62a6a558 Update block docs for: slant3d/base.md 2025-01-13 10:06:26 +00:00
Bently
d36ac1471c Update block docs for: nvidia/deepfake.md 2025-01-13 10:06:25 +00:00
Bently
a2b34739c9 Update block docs for: jina/search.md 2025-01-13 10:06:24 +00:00
Bently
7d4775e3b7 Update block docs for: jina/fact_checker.md 2025-01-13 10:06:23 +00:00
Bently
d133b474a8 Update block docs for: jina/embeddings.md 2025-01-13 10:06:21 +00:00
Bently
14ef675784 Update block docs for: jina/chunking.md 2025-01-13 10:06:19 +00:00
Bently
2c9baa6966 Update block docs for: hubspot/engagement.md 2025-01-13 10:06:18 +00:00
Bently
43ca817a85 Update block docs for: hubspot/contact.md 2025-01-13 10:06:17 +00:00
Bently
5cf97dd296 Update block docs for: hubspot/company.md 2025-01-13 10:06:15 +00:00
Bently
c577758b4a Update block docs for: helpers/http.md 2025-01-13 10:06:14 +00:00
Bently
ed9d0b85b4 Update block docs for: google/sheets.md 2025-01-13 10:06:13 +00:00
Bently
cfb92dd4f9 Update block docs for: google/gmail.md 2025-01-13 10:06:12 +00:00
Bently
b892c2b272 Update block docs for: github/triggers.md 2025-01-13 10:06:10 +00:00
Bently
d4679bbae8 Update block docs for: github/repo.md 2025-01-13 10:06:09 +00:00
Bently
34954d8df3 Update block docs for: github/pull_requests.md 2025-01-13 10:06:08 +00:00
Bently
780c893c91 Update block docs for: github/issues.md 2025-01-13 10:06:07 +00:00
Bently
a113fdb134 Update block docs for: fal/ai_video_generator.md 2025-01-13 10:06:05 +00:00
Bently
b73cb9fc98 Update block docs for: exa/similar.md 2025-01-13 10:06:04 +00:00
Bently
223e3de073 Update block docs for: exa/search.md 2025-01-13 10:06:03 +00:00
Bently
abfbdd0934 Update block docs for: exa/helpers.md 2025-01-13 10:06:01 +00:00
Bently
d8e38b505c Update block docs for: exa/contents.md 2025-01-13 10:06:00 +00:00
Bently
4ebc34da62 Update block docs for: compass/triggers.md 2025-01-13 10:05:59 +00:00
Bently
056c539bde Update block docs for: youtube.md 2025-01-13 10:05:57 +00:00
Bently
7e2e8843c0 Update block docs for: time_blocks.md 2025-01-13 10:05:55 +00:00
Bently
a8cde7c3c5 Update block docs for: text_to_speech_block.md 2025-01-13 10:05:53 +00:00
Bently
64e59c5324 Update block docs for: text.md 2025-01-13 10:05:52 +00:00
Bently
0da1461a79 Update block docs for: talking_head.md 2025-01-13 10:05:51 +00:00
Bently
192ff65bbf Update block docs for: search.md 2025-01-13 10:05:50 +00:00
Bently
388003ff2d Update block docs for: sampling.md 2025-01-13 10:05:49 +00:00
Bently
1081b15c91 Update block docs for: rss.md 2025-01-13 10:05:48 +00:00
Bently
0e17f5757b Update block docs for: replicate_flux_advanced.md 2025-01-13 10:05:47 +00:00
Bently
80de45c610 Update block docs for: reddit.md 2025-01-13 10:05:45 +00:00
Bently
bd237c5b52 Update block docs for: pinecone.md 2025-01-13 10:05:44 +00:00
Bently
6f8f7ac716 Update block docs for: medium.md 2025-01-13 10:05:43 +00:00
Bently
cca71f99b9 Update block docs for: maths.md 2025-01-13 10:05:42 +00:00
Bently
e54a999ff4 Update block docs for: llm.md 2025-01-13 10:05:40 +00:00
Bently
36360b3ade Update block docs for: iteration.md 2025-01-13 10:05:39 +00:00
Bently
a1607a3b21 Update block docs for: ideogram.md 2025-01-13 10:05:38 +00:00
Bently
8804417c72 Update block docs for: http.md 2025-01-13 10:05:37 +00:00
Bently
6aeec36a3c Update block docs for: google_maps.md 2025-01-13 10:05:36 +00:00
Bently
6d09a46652 Update block docs for: email_block.md 2025-01-13 10:05:34 +00:00
Bently
be4b2b1ba1 Update block docs for: discord.md 2025-01-13 10:05:33 +00:00
Bently
d2d6346f59 Update block docs for: decoder_block.md 2025-01-13 10:05:32 +00:00
Bently
d83984bf38 Update block docs for: csv.md 2025-01-13 10:05:31 +00:00
Bently
146bf8e692 Update block docs for: count_words_and_char_block.md 2025-01-13 10:05:29 +00:00
Bently
015e7d2b10 Update block docs for: code_extraction_block.md 2025-01-13 10:05:28 +00:00
Bently
dac7e4aa57 Update block docs for: code_executor.md 2025-01-13 10:05:27 +00:00
Bently
1764cf9837 Update block docs for: branching.md 2025-01-13 10:05:26 +00:00
Bently
fbd1e26524 Update block docs for: 45e78db5-03e9-447f-9395-308d712f5f08.md 2025-01-13 10:05:25 +00:00
Bently
fb10bacfda Update block docs for: basic.md 2025-01-13 10:05:23 +00:00
Bently
f6a12828f0 Update block docs for: ai_shortform_video_block.md 2025-01-13 10:05:22 +00:00
Bently
fba186e5e1 Update block docs for: ai_music_generator.md 2025-01-13 10:05:21 +00:00
Bently
91b88b840e Update block docs for: ai_image_generator_block.md 2025-01-13 10:05:19 +00:00
Bentlybro
f6b00f07ce rm 2025-01-12 12:42:45 +00:00
Bently
a4bd7c9b58 Update block docs for: google_maps.md 2025-01-12 12:41:05 +00:00
Bently
bc643492e8 Update block docs for: email_block.md 2025-01-12 12:41:04 +00:00
Bently
ba64e05803 Update block docs for: discord.md 2025-01-12 12:41:03 +00:00
Bently
01c0284fd2 Update block docs for: text_decoder.md 2025-01-12 12:41:02 +00:00
Bently
a08a7bd1e1 Update block docs for: read_csv.md 2025-01-12 12:41:00 +00:00
Bently
25bab0eaa5 Update block docs for: word_character_count_block.md 2025-01-12 12:40:59 +00:00
Bently
bb646e865e Update block docs for: code_extraction_block.md 2025-01-12 12:40:57 +00:00
Bently
dc777ce89a Update block docs for: code_executor.md 2025-01-12 12:40:56 +00:00
Bently
a3955385ec Update block docs for: condition_block.md 2025-01-12 12:40:55 +00:00
Bently
88b03563f5 Update block docs for: block_installation.md 2025-01-12 12:40:54 +00:00
Bently
f57f6fc1c0 Update block docs for: basic.md 2025-01-12 12:40:53 +00:00
Bently
2f7ad767c2 Update block docs for: ai_shortform_video_block.md 2025-01-12 12:40:52 +00:00
Bently
e3e4bc8c96 Update block docs for: ai_music_generator.md 2025-01-12 12:40:51 +00:00
Bently
d3cb3c73d1 Update block docs for: ai_image_generator_block.md 2025-01-12 12:40:50 +00:00
Bently
cc75eea402 Update block docs for: agent.md 2025-01-12 12:40:49 +00:00
Bentlybro
40961cb9f1 rm 2025-01-12 12:23:46 +00:00
Bently
13be1a03b7 Update block docs for: youtube.md 2025-01-12 11:53:52 +00:00
Bently
757381c889 Update block docs for: time_blocks.md 2025-01-12 11:53:50 +00:00
Bently
154f0a2b83 Update block docs for: unreal_text_to_speech.md 2025-01-12 11:53:49 +00:00
Bently
bf91cc507f Update block docs for: text.md 2025-01-12 11:53:48 +00:00
Bently
940fef027a Update block docs for: talking_avatar_video.md 2025-01-12 11:53:47 +00:00
Bently
d1b51ad09b Update block docs for: search.md 2025-01-12 11:53:46 +00:00
Bently
4cb0bbe67e Update block docs for: sampling.md 2025-01-12 11:53:45 +00:00
Bently
ebbb7b07cf Update block docs for: rss.md 2025-01-12 11:53:44 +00:00
Bently
db3d86bce6 Update block docs for: replicate_flux_advanced.md 2025-01-12 11:53:43 +00:00
Bently
f1f6f87b25 Update block docs for: reddit.md 2025-01-12 11:53:42 +00:00
Bently
0d5ab20f14 Update block docs for: pinecone.md 2025-01-12 11:53:41 +00:00
Bently
4017c5be70 Update block docs for: medium.md 2025-01-12 11:53:40 +00:00
Bently
b7cc83854a Update block docs for: maths.md 2025-01-12 11:53:39 +00:00
Bently
0599083e0e Update block docs for: llm.md 2025-01-12 11:53:38 +00:00
Bently
9e36144b40 Update block docs for: step_through_items.md 2025-01-12 11:53:36 +00:00
Bently
ce3c86cb1d Update block docs for: ideogram.md 2025-01-12 11:53:35 +00:00
Bently
e33263c9fc Update block docs for: http.md 2025-01-12 11:53:34 +00:00
Bently
04f0f64fbb Update block docs for: google_maps.md 2025-01-12 11:53:33 +00:00
Bently
755a7b620d Update block docs for: email_block.md 2025-01-12 11:53:32 +00:00
Bently
d54c9d4e7e Update block docs for: discord.md 2025-01-12 11:53:31 +00:00
Bently
852af17294 Update block docs for: text_decoder.md 2025-01-12 11:53:29 +00:00
Bently
8889d029d4 Update block docs for: read_csv.md 2025-01-12 11:53:28 +00:00
Bently
e93c7dc89e Update block docs for: word_character_count_block.md 2025-01-12 11:53:27 +00:00
Bently
7791281b90 Update block docs for: code_extraction_block.md 2025-01-12 11:53:26 +00:00
Bently
cc9ff9e2bb Update block docs for: code_executor.md 2025-01-12 11:53:25 +00:00
Bently
6b82b9e73b Update block docs for: condition_block.md 2025-01-12 11:53:24 +00:00
Bently
61bfbeeb01 Update block docs for: block_installation.md 2025-01-12 11:53:23 +00:00
Bently
9518ff4f02 Update block docs for: basic.md 2025-01-12 11:53:21 +00:00
Bently
49490e899e Update block docs for: ai_shortform_video_block.md 2025-01-12 11:53:20 +00:00
Bently
d19866bdf5 Update block docs for: ai_music_generator.md 2025-01-12 11:53:19 +00:00
Bently
b6c946ac4f Update block docs for: ai_image_generator_block.md 2025-01-12 11:53:18 +00:00
Bently
d6f5dcd717 Update block docs for: agent.md 2025-01-12 11:53:17 +00:00
Bently
99779f52b2 Update block docs for:
blocks_init.md
2025-01-12 11:53:15 +00:00
Bently
d4e5a48163 Update block docs for: code_execution_block.md 2025-01-12 11:07:29 +00:00
Bentlybro
a40bb6f6d6 rm file 2025-01-12 11:07:08 +00:00
Bently
a393f8bf9f Update block docs for: code_executor.md 2025-01-12 11:05:08 +00:00
Bentlybro
d56931f4cb rm files 2025-01-10 13:30:37 +00:00
Bently
03691329be Update block docs for: text_to_speech_block.md 2025-01-10 12:55:02 +00:00
Bently
9813012c12 Update block docs for: text.md 2025-01-10 12:55:01 +00:00
Bently
8f74e58ecc Update block docs for: talking_head.md 2025-01-10 12:55:00 +00:00
Bently
b1f5413dab Update block docs for: search.md 2025-01-10 12:54:59 +00:00
Bently
cc41b2f4ab Update block docs for: sampling.md 2025-01-10 12:54:58 +00:00
Bently
f45b4cc243 Update block docs for: rss.md 2025-01-10 12:54:56 +00:00
Bently
ed2e5e813d Update block docs for: replicate_flux_advanced.md 2025-01-10 12:54:55 +00:00
Bently
ab33d079e2 Update block docs for: reddit.md 2025-01-10 12:54:54 +00:00
Bently
a24c869a44 Update block docs for: pinecone.md 2025-01-10 12:54:52 +00:00
Bently
d6b1cf64ed Update block docs for: medium.md 2025-01-10 12:54:51 +00:00
Bently
e9982ba9bd Update block docs for: maths.md 2025-01-10 12:54:50 +00:00
Bently
58c1e050f2 Update block docs for: llm.md 2025-01-10 12:54:49 +00:00
Bently
f125bb658c Update block docs for: iteration.md 2025-01-10 12:54:48 +00:00
Bently
ba9c91d0b7 Update block docs for: ideogram.md 2025-01-10 12:54:46 +00:00
Bently
e6254f0e83 Update block docs for: http.md 2025-01-10 12:54:45 +00:00
Bently
3c8cf8bd1e Update block docs for: google_maps.md 2025-01-10 12:54:44 +00:00
Bently
79f3888d61 Update block docs for: email_block.md 2025-01-10 12:54:43 +00:00
Bently
ace5d34cc6 Update block docs for: discord.md 2025-01-10 12:54:42 +00:00
Bently
100ab90f44 Update block docs for: decoder_block.md 2025-01-10 12:54:40 +00:00
Bently
a0ad796432 Update block docs for: csv.md 2025-01-10 12:54:39 +00:00
Bently
0cc625ca15 Update block docs for: count_words_and_char_block.md 2025-01-10 12:54:38 +00:00
Bently
a43b329132 Update block docs for: code_extraction_block.md 2025-01-10 12:54:37 +00:00
Bently
beeadd16f1 Update block docs for: code_executor.md 2025-01-10 12:54:36 +00:00
Bently
b2d5b9efb4 Update block docs for: branching.md 2025-01-10 12:54:35 +00:00
Bently
c77f32b23f Update block docs for: block.md 2025-01-10 12:54:33 +00:00
Bently
b92223cf7b Update block docs for: basic.md 2025-01-10 12:54:32 +00:00
Bently
0d47b0ce38 Update block docs for: ai_shortform_video_block.md 2025-01-10 12:54:31 +00:00
Bently
c0409ba0b1 Update block docs for: ai_music_generator.md 2025-01-10 12:54:29 +00:00
Bently
41c8504bdc Update block docs for: ai_image_generator_block.md 2025-01-10 12:54:28 +00:00
Bently
c4d38e4ff3 Update block docs for: agent.md 2025-01-10 12:54:27 +00:00
Bently
b10a275676 Update block docs for: __init__.md 2025-01-10 12:54:26 +00:00
Bentlybro
8baabb0379 remove files 2025-01-10 12:47:54 +00:00
Bently
8bf977958a Update block docs for: pinecone.md 2025-01-10 11:10:33 +00:00
Bently
c73da1f79c Update block docs for: count_words_and_char_block.md 2025-01-10 11:10:29 +00:00
Bently
3c3c1ce90a Update block docs for: code_extraction_block.md 2025-01-10 11:10:27 +00:00
Bently
e9a198f5da Update block docs for: code_executor.md 2025-01-10 11:10:26 +00:00
Bently
706bf0578e Update block docs for: block.md 2025-01-10 11:10:25 +00:00
Bently
610b613367 Update block docs for: ai_music_generator.md 2025-01-10 11:10:22 +00:00
Bently
5f4a411b15 Update block docs for: ai_image_generator_block.md 2025-01-10 11:10:21 +00:00
Bently
7b004f07e7 Update block docs for: agent.md 2025-01-10 11:10:20 +00:00
Bently
c16d2f94a6 Update block docs for: __init__.md 2025-01-10 11:10:19 +00:00
273 changed files with 8664 additions and 17639 deletions

View File

@@ -1,18 +0,0 @@
version = 1
test_patterns = ["**/*.spec.ts","**/*_test.py","**/*_tests.py","**/test_*.py"]
exclude_patterns = ["classic/**"]
[[analyzers]]
name = "javascript"
[analyzers.meta]
plugins = ["react"]
environment = ["nodejs"]
[[analyzers]]
name = "python"
[analyzers.meta]
runtime_version = "3.x.x"

View File

@@ -37,25 +37,6 @@ jobs:
run: |
yarn lint
type-check:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: "21"
- name: Install dependencies
run: |
yarn install --frozen-lockfile
- name: Run tsc check
run: |
yarn type-check
test:
runs-on: ubuntu-latest
strategy:

View File

@@ -25,7 +25,7 @@ jobs:
close-issue-message: >
This issue was closed automatically because it has been stale for 10 days
with no activity.
days-before-stale: 100
days-before-stale: 50
days-before-close: 10
# Do not touch meta issues:
exempt-issue-labels: meta,fridge,project management

View File

@@ -22,7 +22,7 @@ To run the AutoGPT Platform, follow these steps:
2. Run the following command:
```
git submodule update --init --recursive --progress
git submodule update --init --recursive
```
This command will initialize and update the submodules in the repository. The `supabase` folder will be cloned to the root directory.

View File

@@ -18,7 +18,7 @@ ERROR_LOG_FILE = "error.log"
SIMPLE_LOG_FORMAT = "%(asctime)s %(levelname)s %(title)s%(message)s"
DEBUG_LOG_FORMAT = (
"%(asctime)s %(levelname)s %(filename)s:%(lineno)d %(title)s%(message)s"
"%(asctime)s %(levelname)s %(filename)s:%(lineno)d" " %(title)s%(message)s"
)

View File

@@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 2.0.1 and should not be changed by hand.
# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand.
[[package]]
name = "aiohappyeyeballs"
@@ -6,7 +6,6 @@ version = "2.4.0"
description = "Happy Eyeballs for asyncio"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "aiohappyeyeballs-2.4.0-py3-none-any.whl", hash = "sha256:7ce92076e249169a13c2f49320d1967425eaf1f407522d707d59cac7628d62bd"},
{file = "aiohappyeyeballs-2.4.0.tar.gz", hash = "sha256:55a1714f084e63d49639800f95716da97a1f173d46a16dfcfda0016abb93b6b2"},
@@ -18,7 +17,6 @@ version = "3.10.5"
description = "Async http client/server framework (asyncio)"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "aiohttp-3.10.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:18a01eba2574fb9edd5f6e5fb25f66e6ce061da5dab5db75e13fe1558142e0a3"},
{file = "aiohttp-3.10.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:94fac7c6e77ccb1ca91e9eb4cb0ac0270b9fb9b289738654120ba8cebb1189c6"},
@@ -131,7 +129,6 @@ version = "1.3.1"
description = "aiosignal: a list of registered asynchronous callbacks"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"},
{file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"},
@@ -146,7 +143,6 @@ version = "0.7.0"
description = "Reusable constraint types to use with typing.Annotated"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"},
{file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"},
@@ -158,7 +154,6 @@ version = "4.4.0"
description = "High level compatibility layer for multiple asynchronous event loop implementations"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"},
{file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"},
@@ -181,12 +176,10 @@ version = "4.0.3"
description = "Timeout context manager for asyncio programs"
optional = false
python-versions = ">=3.7"
groups = ["main", "dev"]
files = [
{file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"},
{file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"},
]
markers = {main = "python_version < \"3.11\"", dev = "python_full_version < \"3.11.3\""}
[[package]]
name = "attrs"
@@ -194,7 +187,6 @@ version = "24.2.0"
description = "Classes Without Boilerplate"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"},
{file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"},
@@ -214,7 +206,6 @@ version = "5.5.0"
description = "Extensible memoizing collections and decorators"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"},
{file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"},
@@ -226,7 +217,6 @@ version = "2024.8.30"
description = "Python package for providing Mozilla's CA Bundle."
optional = false
python-versions = ">=3.6"
groups = ["main"]
files = [
{file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"},
{file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"},
@@ -238,7 +228,6 @@ version = "3.3.2"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
optional = false
python-versions = ">=3.7.0"
groups = ["main"]
files = [
{file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"},
{file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"},
@@ -338,7 +327,6 @@ version = "0.4.6"
description = "Cross-platform colored terminal text."
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
groups = ["main"]
files = [
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
@@ -350,7 +338,6 @@ version = "1.2.14"
description = "Python @deprecated decorator to deprecate old python classes, functions or methods."
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
groups = ["main"]
files = [
{file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"},
{file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"},
@@ -368,7 +355,6 @@ version = "2.1.0"
description = "A library to handle automated deprecations"
optional = false
python-versions = "*"
groups = ["main"]
files = [
{file = "deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a"},
{file = "deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff"},
@@ -383,8 +369,6 @@ version = "1.2.2"
description = "Backport of PEP 654 (exception groups)"
optional = false
python-versions = ">=3.7"
groups = ["main"]
markers = "python_version < \"3.11\""
files = [
{file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"},
{file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"},
@@ -399,7 +383,6 @@ version = "1.2.2"
description = "Dictionary with auto-expiring values for caching purposes"
optional = false
python-versions = "*"
groups = ["main"]
files = [
{file = "expiringdict-1.2.2-py3-none-any.whl", hash = "sha256:09a5d20bc361163e6432a874edd3179676e935eb81b925eccef48d409a8a45e8"},
{file = "expiringdict-1.2.2.tar.gz", hash = "sha256:300fb92a7e98f15b05cf9a856c1415b3bc4f2e132be07daa326da6414c23ee09"},
@@ -414,7 +397,6 @@ version = "1.4.1"
description = "A list-like structure which implements collections.abc.MutableSequence"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"},
{file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"},
@@ -501,7 +483,6 @@ version = "2.19.2"
description = "Google API client core library"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "google_api_core-2.19.2-py3-none-any.whl", hash = "sha256:53ec0258f2837dd53bbd3d3df50f5359281b3cc13f800c941dd15a9b5a415af4"},
{file = "google_api_core-2.19.2.tar.gz", hash = "sha256:ca07de7e8aa1c98a8bfca9321890ad2340ef7f2eb136e558cee68f24b94b0a8f"},
@@ -533,7 +514,6 @@ version = "2.34.0"
description = "Google Authentication Library"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "google_auth-2.34.0-py2.py3-none-any.whl", hash = "sha256:72fd4733b80b6d777dcde515628a9eb4a577339437012874ea286bca7261ee65"},
{file = "google_auth-2.34.0.tar.gz", hash = "sha256:8eb87396435c19b20d32abd2f984e31c191a15284af72eb922f10e5bde9c04cc"},
@@ -557,7 +537,6 @@ version = "1.4.5"
description = "Google Cloud Appengine Logging API client library"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "google_cloud_appengine_logging-1.4.5-py2.py3-none-any.whl", hash = "sha256:344e0244404049b42164e4d6dc718ca2c81b393d066956e7cb85fd9407ed9c48"},
{file = "google_cloud_appengine_logging-1.4.5.tar.gz", hash = "sha256:de7d766e5d67b19fc5833974b505b32d2a5bbdfb283fd941e320e7cfdae4cb83"},
@@ -575,7 +554,6 @@ version = "0.3.0"
description = "Google Cloud Audit Protos"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "google_cloud_audit_log-0.3.0-py2.py3-none-any.whl", hash = "sha256:8340793120a1d5aa143605def8704ecdcead15106f754ef1381ae3bab533722f"},
{file = "google_cloud_audit_log-0.3.0.tar.gz", hash = "sha256:901428b257020d8c1d1133e0fa004164a555e5a395c7ca3cdbb8486513df3a65"},
@@ -591,7 +569,6 @@ version = "2.4.1"
description = "Google Cloud API client core library"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "google-cloud-core-2.4.1.tar.gz", hash = "sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073"},
{file = "google_cloud_core-2.4.1-py2.py3-none-any.whl", hash = "sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61"},
@@ -606,14 +583,13 @@ grpc = ["grpcio (>=1.38.0,<2.0dev)", "grpcio-status (>=1.38.0,<2.0.dev0)"]
[[package]]
name = "google-cloud-logging"
version = "3.11.4"
version = "3.11.3"
description = "Stackdriver Logging API client library"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "google_cloud_logging-3.11.4-py2.py3-none-any.whl", hash = "sha256:1d465ac62df29fb94bba4d6b4891035e57d573d84541dd8a40eebbc74422b2f0"},
{file = "google_cloud_logging-3.11.4.tar.gz", hash = "sha256:32305d989323f3c58603044e2ac5d9cf23e9465ede511bbe90b4309270d3195c"},
{file = "google_cloud_logging-3.11.3-py2.py3-none-any.whl", hash = "sha256:b8ec23f2998f76a58f8492db26a0f4151dd500425c3f08448586b85972f3c494"},
{file = "google_cloud_logging-3.11.3.tar.gz", hash = "sha256:0a73cd94118875387d4535371d9e9426861edef8e44fba1261e86782d5b8d54f"},
]
[package.dependencies]
@@ -625,8 +601,7 @@ google-cloud-core = ">=2.0.0,<3.0.0dev"
grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev"
opentelemetry-api = ">=1.9.0"
proto-plus = [
{version = ">=1.25.0,<2.0.0dev", markers = "python_version >= \"3.13\""},
{version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\" and python_version < \"3.13\""},
{version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""},
{version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""},
]
protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev"
@@ -637,7 +612,6 @@ version = "1.65.0"
description = "Common protobufs used in Google APIs"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "googleapis_common_protos-1.65.0-py2.py3-none-any.whl", hash = "sha256:2972e6c496f435b92590fd54045060867f3fe9be2c82ab148fc8885035479a63"},
{file = "googleapis_common_protos-1.65.0.tar.gz", hash = "sha256:334a29d07cddc3aa01dee4988f9afd9b2916ee2ff49d6b757155dc0d197852c0"},
@@ -652,18 +626,17 @@ grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"]
[[package]]
name = "gotrue"
version = "2.11.1"
version = "2.10.0"
description = "Python Client Library for Supabase Auth"
optional = false
python-versions = "<4.0,>=3.9"
groups = ["main"]
files = [
{file = "gotrue-2.11.1-py3-none-any.whl", hash = "sha256:1b2d915bdc65fd0ad608532759ce9c72fa2e910145c1e6901f2188519e7bcd2d"},
{file = "gotrue-2.11.1.tar.gz", hash = "sha256:5594ceee60bd873e5f4fdd028b08dece3906f6013b6ed08e7786b71c0092fed0"},
{file = "gotrue-2.10.0-py3-none-any.whl", hash = "sha256:768e58207488e5184ffbdc4351b7280d913daf97962f4e9f2cca05c80004b042"},
{file = "gotrue-2.10.0.tar.gz", hash = "sha256:4edf4c251da3535f2b044e23deba221e848ca1210c17d0c7a9b19f79a1e3f3c0"},
]
[package.dependencies]
httpx = {version = ">=0.26,<0.29", extras = ["http2"]}
httpx = {version = ">=0.26,<0.28", extras = ["http2"]}
pydantic = ">=1.10,<3"
[[package]]
@@ -672,7 +645,6 @@ version = "0.13.1"
description = "IAM API client library"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "grpc-google-iam-v1-0.13.1.tar.gz", hash = "sha256:3ff4b2fd9d990965e410965253c0da6f66205d5a8291c4c31c6ebecca18a9001"},
{file = "grpc_google_iam_v1-0.13.1-py2.py3-none-any.whl", hash = "sha256:c3e86151a981811f30d5e7330f271cee53e73bb87755e88cc3b6f0c7b5fe374e"},
@@ -689,7 +661,6 @@ version = "1.66.1"
description = "HTTP/2-based RPC framework"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "grpcio-1.66.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:4877ba180591acdf127afe21ec1c7ff8a5ecf0fe2600f0d3c50e8c4a1cbc6492"},
{file = "grpcio-1.66.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:3750c5a00bd644c75f4507f77a804d0189d97a107eb1481945a0cf3af3e7a5ac"},
@@ -748,7 +719,6 @@ version = "1.66.1"
description = "Status proto mapping for gRPC"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "grpcio_status-1.66.1-py3-none-any.whl", hash = "sha256:cf9ed0b4a83adbe9297211c95cb5488b0cd065707e812145b842c85c4782ff02"},
{file = "grpcio_status-1.66.1.tar.gz", hash = "sha256:b3f7d34ccc46d83fea5261eea3786174459f763c31f6e34f1d24eba6d515d024"},
@@ -765,7 +735,6 @@ version = "0.14.0"
description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"},
{file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"},
@@ -777,7 +746,6 @@ version = "4.1.0"
description = "HTTP/2 State-Machine based protocol implementation"
optional = false
python-versions = ">=3.6.1"
groups = ["main"]
files = [
{file = "h2-4.1.0-py3-none-any.whl", hash = "sha256:03a46bcf682256c95b5fd9e9a99c1323584c3eec6440d379b9903d709476bc6d"},
{file = "h2-4.1.0.tar.gz", hash = "sha256:a83aca08fbe7aacb79fec788c9c0bac936343560ed9ec18b82a13a12c28d2abb"},
@@ -793,7 +761,6 @@ version = "4.0.0"
description = "Pure-Python HPACK header compression"
optional = false
python-versions = ">=3.6.1"
groups = ["main"]
files = [
{file = "hpack-4.0.0-py3-none-any.whl", hash = "sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c"},
{file = "hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095"},
@@ -805,7 +772,6 @@ version = "1.0.5"
description = "A minimal low-level HTTP client."
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"},
{file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"},
@@ -827,7 +793,6 @@ version = "0.27.2"
description = "The next generation HTTP client."
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"},
{file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"},
@@ -854,7 +819,6 @@ version = "6.0.1"
description = "HTTP/2 framing layer for Python"
optional = false
python-versions = ">=3.6.1"
groups = ["main"]
files = [
{file = "hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15"},
{file = "hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914"},
@@ -866,7 +830,6 @@ version = "3.8"
description = "Internationalized Domain Names in Applications (IDNA)"
optional = false
python-versions = ">=3.6"
groups = ["main"]
files = [
{file = "idna-3.8-py3-none-any.whl", hash = "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac"},
{file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"},
@@ -878,7 +841,6 @@ version = "8.4.0"
description = "Read metadata from Python packages"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "importlib_metadata-8.4.0-py3-none-any.whl", hash = "sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1"},
{file = "importlib_metadata-8.4.0.tar.gz", hash = "sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5"},
@@ -898,7 +860,6 @@ version = "2.0.0"
description = "brain-dead simple config-ini parsing"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
{file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
@@ -910,7 +871,6 @@ version = "6.1.0"
description = "multidict implementation"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"},
{file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"},
@@ -1015,7 +975,6 @@ version = "1.27.0"
description = "OpenTelemetry Python API"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "opentelemetry_api-1.27.0-py3-none-any.whl", hash = "sha256:953d5871815e7c30c81b56d910c707588000fff7a3ca1c73e6531911d53065e7"},
{file = "opentelemetry_api-1.27.0.tar.gz", hash = "sha256:ed673583eaa5f81b5ce5e86ef7cdaf622f88ef65f0b9aab40b843dcae5bef342"},
@@ -1031,7 +990,6 @@ version = "24.1"
description = "Core utilities for Python packages"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"},
{file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"},
@@ -1043,7 +1001,6 @@ version = "1.5.0"
description = "plugin and hook calling mechanisms for python"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"},
{file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"},
@@ -1055,32 +1012,30 @@ testing = ["pytest", "pytest-benchmark"]
[[package]]
name = "postgrest"
version = "0.19.1"
version = "0.18.0"
description = "PostgREST client for Python. This library provides an ORM interface to PostgREST."
optional = false
python-versions = "<4.0,>=3.9"
groups = ["main"]
files = [
{file = "postgrest-0.19.1-py3-none-any.whl", hash = "sha256:a8e7be4e1abc69fd8eee5a49d7dc3a76dfbffbd778beed0b2bd7accb3f4f3a2a"},
{file = "postgrest-0.19.1.tar.gz", hash = "sha256:d8fa88953cced4f45efa0f412056c364f64ece8a35b5b35f458a7e58c133fbca"},
{file = "postgrest-0.18.0-py3-none-any.whl", hash = "sha256:200baad0d23fee986b3a0ffd3e07bfe0cdd40e09760f11e8e13a6c0c2376d5fa"},
{file = "postgrest-0.18.0.tar.gz", hash = "sha256:29c1a94801a17eb9ad590189993fe5a7a6d8c1bfc11a3c9d0ce7ba146454ebb3"},
]
[package.dependencies]
deprecation = ">=2.1.0,<3.0.0"
httpx = {version = ">=0.26,<0.29", extras = ["http2"]}
httpx = {version = ">=0.26,<0.28", extras = ["http2"]}
pydantic = ">=1.9,<3.0"
strenum = {version = ">=0.4.9,<0.5.0", markers = "python_version < \"3.11\""}
[[package]]
name = "proto-plus"
version = "1.26.0"
description = "Beautiful, Pythonic protocol buffers"
version = "1.24.0"
description = "Beautiful, Pythonic protocol buffers."
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "proto_plus-1.26.0-py3-none-any.whl", hash = "sha256:bf2dfaa3da281fc3187d12d224c707cb57214fb2c22ba854eb0c105a3fb2d4d7"},
{file = "proto_plus-1.26.0.tar.gz", hash = "sha256:6e93d5f5ca267b54300880fff156b6a3386b3fa3f43b1da62e680fc0c586ef22"},
{file = "proto-plus-1.24.0.tar.gz", hash = "sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445"},
{file = "proto_plus-1.24.0-py3-none-any.whl", hash = "sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12"},
]
[package.dependencies]
@@ -1095,7 +1050,6 @@ version = "5.28.0"
description = ""
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "protobuf-5.28.0-cp310-abi3-win32.whl", hash = "sha256:66c3edeedb774a3508ae70d87b3a19786445fe9a068dd3585e0cefa8a77b83d0"},
{file = "protobuf-5.28.0-cp310-abi3-win_amd64.whl", hash = "sha256:6d7cc9e60f976cf3e873acb9a40fed04afb5d224608ed5c1a105db4a3f09c5b6"},
@@ -1116,7 +1070,6 @@ version = "0.6.1"
description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"},
{file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"},
@@ -1128,7 +1081,6 @@ version = "0.4.1"
description = "A collection of ASN.1-based protocols modules"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd"},
{file = "pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c"},
@@ -1139,19 +1091,18 @@ pyasn1 = ">=0.4.6,<0.7.0"
[[package]]
name = "pydantic"
version = "2.10.6"
version = "2.10.3"
description = "Data validation using Python type hints"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"},
{file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"},
{file = "pydantic-2.10.3-py3-none-any.whl", hash = "sha256:be04d85bbc7b65651c5f8e6b9976ed9c6f41782a55524cef079a34a0bb82144d"},
{file = "pydantic-2.10.3.tar.gz", hash = "sha256:cb5ac360ce894ceacd69c403187900a02c4b20b693a9dd1d643e1effab9eadf9"},
]
[package.dependencies]
annotated-types = ">=0.6.0"
pydantic-core = "2.27.2"
pydantic-core = "2.27.1"
typing-extensions = ">=4.12.2"
[package.extras]
@@ -1160,112 +1111,111 @@ timezone = ["tzdata"]
[[package]]
name = "pydantic-core"
version = "2.27.2"
version = "2.27.1"
description = "Core functionality for Pydantic validation and serialization"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"},
{file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"},
{file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a"},
{file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5"},
{file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c"},
{file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7"},
{file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a"},
{file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236"},
{file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962"},
{file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9"},
{file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af"},
{file = "pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4"},
{file = "pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31"},
{file = "pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc"},
{file = "pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7"},
{file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15"},
{file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306"},
{file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99"},
{file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459"},
{file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048"},
{file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d"},
{file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b"},
{file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474"},
{file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6"},
{file = "pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c"},
{file = "pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc"},
{file = "pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4"},
{file = "pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0"},
{file = "pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef"},
{file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7"},
{file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934"},
{file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6"},
{file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c"},
{file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2"},
{file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4"},
{file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3"},
{file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4"},
{file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57"},
{file = "pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc"},
{file = "pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9"},
{file = "pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b"},
{file = "pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b"},
{file = "pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154"},
{file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9"},
{file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9"},
{file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1"},
{file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a"},
{file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e"},
{file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4"},
{file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27"},
{file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee"},
{file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1"},
{file = "pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130"},
{file = "pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee"},
{file = "pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b"},
{file = "pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506"},
{file = "pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320"},
{file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145"},
{file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1"},
{file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228"},
{file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046"},
{file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5"},
{file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a"},
{file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d"},
{file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9"},
{file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da"},
{file = "pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b"},
{file = "pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad"},
{file = "pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993"},
{file = "pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308"},
{file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4"},
{file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf"},
{file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76"},
{file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118"},
{file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630"},
{file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54"},
{file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f"},
{file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362"},
{file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96"},
{file = "pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e"},
{file = "pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67"},
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e"},
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8"},
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3"},
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f"},
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133"},
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc"},
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50"},
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9"},
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151"},
{file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656"},
{file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278"},
{file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb"},
{file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd"},
{file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc"},
{file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b"},
{file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b"},
{file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2"},
{file = "pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35"},
{file = "pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39"},
{file = "pydantic_core-2.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:71a5e35c75c021aaf400ac048dacc855f000bdfed91614b4a726f7432f1f3d6a"},
{file = "pydantic_core-2.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f82d068a2d6ecfc6e054726080af69a6764a10015467d7d7b9f66d6ed5afa23b"},
{file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:121ceb0e822f79163dd4699e4c54f5ad38b157084d97b34de8b232bcaad70278"},
{file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4603137322c18eaf2e06a4495f426aa8d8388940f3c457e7548145011bb68e05"},
{file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a33cd6ad9017bbeaa9ed78a2e0752c5e250eafb9534f308e7a5f7849b0b1bfb4"},
{file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15cc53a3179ba0fcefe1e3ae50beb2784dede4003ad2dfd24f81bba4b23a454f"},
{file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45d9c5eb9273aa50999ad6adc6be5e0ecea7e09dbd0d31bd0c65a55a2592ca08"},
{file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8bf7b66ce12a2ac52d16f776b31d16d91033150266eb796967a7e4621707e4f6"},
{file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:655d7dd86f26cb15ce8a431036f66ce0318648f8853d709b4167786ec2fa4807"},
{file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:5556470f1a2157031e676f776c2bc20acd34c1990ca5f7e56f1ebf938b9ab57c"},
{file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f69ed81ab24d5a3bd93861c8c4436f54afdf8e8cc421562b0c7504cf3be58206"},
{file = "pydantic_core-2.27.1-cp310-none-win32.whl", hash = "sha256:f5a823165e6d04ccea61a9f0576f345f8ce40ed533013580e087bd4d7442b52c"},
{file = "pydantic_core-2.27.1-cp310-none-win_amd64.whl", hash = "sha256:57866a76e0b3823e0b56692d1a0bf722bffb324839bb5b7226a7dbd6c9a40b17"},
{file = "pydantic_core-2.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac3b20653bdbe160febbea8aa6c079d3df19310d50ac314911ed8cc4eb7f8cb8"},
{file = "pydantic_core-2.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a5a8e19d7c707c4cadb8c18f5f60c843052ae83c20fa7d44f41594c644a1d330"},
{file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f7059ca8d64fea7f238994c97d91f75965216bcbe5f695bb44f354893f11d52"},
{file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bed0f8a0eeea9fb72937ba118f9db0cb7e90773462af7962d382445f3005e5a4"},
{file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3cb37038123447cf0f3ea4c74751f6a9d7afef0eb71aa07bf5f652b5e6a132c"},
{file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84286494f6c5d05243456e04223d5a9417d7f443c3b76065e75001beb26f88de"},
{file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acc07b2cfc5b835444b44a9956846b578d27beeacd4b52e45489e93276241025"},
{file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4fefee876e07a6e9aad7a8c8c9f85b0cdbe7df52b8a9552307b09050f7512c7e"},
{file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:258c57abf1188926c774a4c94dd29237e77eda19462e5bb901d88adcab6af919"},
{file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:35c14ac45fcfdf7167ca76cc80b2001205a8d5d16d80524e13508371fb8cdd9c"},
{file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d1b26e1dff225c31897696cab7d4f0a315d4c0d9e8666dbffdb28216f3b17fdc"},
{file = "pydantic_core-2.27.1-cp311-none-win32.whl", hash = "sha256:2cdf7d86886bc6982354862204ae3b2f7f96f21a3eb0ba5ca0ac42c7b38598b9"},
{file = "pydantic_core-2.27.1-cp311-none-win_amd64.whl", hash = "sha256:3af385b0cee8df3746c3f406f38bcbfdc9041b5c2d5ce3e5fc6637256e60bbc5"},
{file = "pydantic_core-2.27.1-cp311-none-win_arm64.whl", hash = "sha256:81f2ec23ddc1b476ff96563f2e8d723830b06dceae348ce02914a37cb4e74b89"},
{file = "pydantic_core-2.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9cbd94fc661d2bab2bc702cddd2d3370bbdcc4cd0f8f57488a81bcce90c7a54f"},
{file = "pydantic_core-2.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f8c4718cd44ec1580e180cb739713ecda2bdee1341084c1467802a417fe0f02"},
{file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15aae984e46de8d376df515f00450d1522077254ef6b7ce189b38ecee7c9677c"},
{file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ba5e3963344ff25fc8c40da90f44b0afca8cfd89d12964feb79ac1411a260ac"},
{file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:992cea5f4f3b29d6b4f7f1726ed8ee46c8331c6b4eed6db5b40134c6fe1768bb"},
{file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0325336f348dbee6550d129b1627cb8f5351a9dc91aad141ffb96d4937bd9529"},
{file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7597c07fbd11515f654d6ece3d0e4e5093edc30a436c63142d9a4b8e22f19c35"},
{file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3bbd5d8cc692616d5ef6fbbbd50dbec142c7e6ad9beb66b78a96e9c16729b089"},
{file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:dc61505e73298a84a2f317255fcc72b710b72980f3a1f670447a21efc88f8381"},
{file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:e1f735dc43da318cad19b4173dd1ffce1d84aafd6c9b782b3abc04a0d5a6f5bb"},
{file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f4e5658dbffe8843a0f12366a4c2d1c316dbe09bb4dfbdc9d2d9cd6031de8aae"},
{file = "pydantic_core-2.27.1-cp312-none-win32.whl", hash = "sha256:672ebbe820bb37988c4d136eca2652ee114992d5d41c7e4858cdd90ea94ffe5c"},
{file = "pydantic_core-2.27.1-cp312-none-win_amd64.whl", hash = "sha256:66ff044fd0bb1768688aecbe28b6190f6e799349221fb0de0e6f4048eca14c16"},
{file = "pydantic_core-2.27.1-cp312-none-win_arm64.whl", hash = "sha256:9a3b0793b1bbfd4146304e23d90045f2a9b5fd5823aa682665fbdaf2a6c28f3e"},
{file = "pydantic_core-2.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f216dbce0e60e4d03e0c4353c7023b202d95cbaeff12e5fd2e82ea0a66905073"},
{file = "pydantic_core-2.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a2e02889071850bbfd36b56fd6bc98945e23670773bc7a76657e90e6b6603c08"},
{file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42b0e23f119b2b456d07ca91b307ae167cc3f6c846a7b169fca5326e32fdc6cf"},
{file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:764be71193f87d460a03f1f7385a82e226639732214b402f9aa61f0d025f0737"},
{file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c00666a3bd2f84920a4e94434f5974d7bbc57e461318d6bb34ce9cdbbc1f6b2"},
{file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ccaa88b24eebc0f849ce0a4d09e8a408ec5a94afff395eb69baf868f5183107"},
{file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c65af9088ac534313e1963443d0ec360bb2b9cba6c2909478d22c2e363d98a51"},
{file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206b5cf6f0c513baffaeae7bd817717140770c74528f3e4c3e1cec7871ddd61a"},
{file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:062f60e512fc7fff8b8a9d680ff0ddaaef0193dba9fa83e679c0c5f5fbd018bc"},
{file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:a0697803ed7d4af5e4c1adf1670af078f8fcab7a86350e969f454daf598c4960"},
{file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:58ca98a950171f3151c603aeea9303ef6c235f692fe555e883591103da709b23"},
{file = "pydantic_core-2.27.1-cp313-none-win32.whl", hash = "sha256:8065914ff79f7eab1599bd80406681f0ad08f8e47c880f17b416c9f8f7a26d05"},
{file = "pydantic_core-2.27.1-cp313-none-win_amd64.whl", hash = "sha256:ba630d5e3db74c79300d9a5bdaaf6200172b107f263c98a0539eeecb857b2337"},
{file = "pydantic_core-2.27.1-cp313-none-win_arm64.whl", hash = "sha256:45cf8588c066860b623cd11c4ba687f8d7175d5f7ef65f7129df8a394c502de5"},
{file = "pydantic_core-2.27.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:5897bec80a09b4084aee23f9b73a9477a46c3304ad1d2d07acca19723fb1de62"},
{file = "pydantic_core-2.27.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d0165ab2914379bd56908c02294ed8405c252250668ebcb438a55494c69f44ab"},
{file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b9af86e1d8e4cfc82c2022bfaa6f459381a50b94a29e95dcdda8442d6d83864"},
{file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f6c8a66741c5f5447e047ab0ba7a1c61d1e95580d64bce852e3df1f895c4067"},
{file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a42d6a8156ff78981f8aa56eb6394114e0dedb217cf8b729f438f643608cbcd"},
{file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64c65f40b4cd8b0e049a8edde07e38b476da7e3aaebe63287c899d2cff253fa5"},
{file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdcf339322a3fae5cbd504edcefddd5a50d9ee00d968696846f089b4432cf78"},
{file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bf99c8404f008750c846cb4ac4667b798a9f7de673ff719d705d9b2d6de49c5f"},
{file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8f1edcea27918d748c7e5e4d917297b2a0ab80cad10f86631e488b7cddf76a36"},
{file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:159cac0a3d096f79ab6a44d77a961917219707e2a130739c64d4dd46281f5c2a"},
{file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:029d9757eb621cc6e1848fa0b0310310de7301057f623985698ed7ebb014391b"},
{file = "pydantic_core-2.27.1-cp38-none-win32.whl", hash = "sha256:a28af0695a45f7060e6f9b7092558a928a28553366519f64083c63a44f70e618"},
{file = "pydantic_core-2.27.1-cp38-none-win_amd64.whl", hash = "sha256:2d4567c850905d5eaaed2f7a404e61012a51caf288292e016360aa2b96ff38d4"},
{file = "pydantic_core-2.27.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e9386266798d64eeb19dd3677051f5705bf873e98e15897ddb7d76f477131967"},
{file = "pydantic_core-2.27.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4228b5b646caa73f119b1ae756216b59cc6e2267201c27d3912b592c5e323b60"},
{file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3dfe500de26c52abe0477dde16192ac39c98f05bf2d80e76102d394bd13854"},
{file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aee66be87825cdf72ac64cb03ad4c15ffef4143dbf5c113f64a5ff4f81477bf9"},
{file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b748c44bb9f53031c8cbc99a8a061bc181c1000c60a30f55393b6e9c45cc5bd"},
{file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ca038c7f6a0afd0b2448941b6ef9d5e1949e999f9e5517692eb6da58e9d44be"},
{file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e0bd57539da59a3e4671b90a502da9a28c72322a4f17866ba3ac63a82c4498e"},
{file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ac6c2c45c847bbf8f91930d88716a0fb924b51e0c6dad329b793d670ec5db792"},
{file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b94d4ba43739bbe8b0ce4262bcc3b7b9f31459ad120fb595627eaeb7f9b9ca01"},
{file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:00e6424f4b26fe82d44577b4c842d7df97c20be6439e8e685d0d715feceb9fb9"},
{file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:38de0a70160dd97540335b7ad3a74571b24f1dc3ed33f815f0880682e6880131"},
{file = "pydantic_core-2.27.1-cp39-none-win32.whl", hash = "sha256:7ccebf51efc61634f6c2344da73e366c75e735960b5654b63d7e6f69a5885fa3"},
{file = "pydantic_core-2.27.1-cp39-none-win_amd64.whl", hash = "sha256:a57847b090d7892f123726202b7daa20df6694cbd583b67a592e856bff603d6c"},
{file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3fa80ac2bd5856580e242dbc202db873c60a01b20309c8319b5c5986fbe53ce6"},
{file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d950caa237bb1954f1b8c9227b5065ba6875ac9771bb8ec790d956a699b78676"},
{file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e4216e64d203e39c62df627aa882f02a2438d18a5f21d7f721621f7a5d3611d"},
{file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02a3d637bd387c41d46b002f0e49c52642281edacd2740e5a42f7017feea3f2c"},
{file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:161c27ccce13b6b0c8689418da3885d3220ed2eae2ea5e9b2f7f3d48f1d52c27"},
{file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:19910754e4cc9c63bc1c7f6d73aa1cfee82f42007e407c0f413695c2f7ed777f"},
{file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:e173486019cc283dc9778315fa29a363579372fe67045e971e89b6365cc035ed"},
{file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:af52d26579b308921b73b956153066481f064875140ccd1dfd4e77db89dbb12f"},
{file = "pydantic_core-2.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:981fb88516bd1ae8b0cbbd2034678a39dedc98752f264ac9bc5839d3923fa04c"},
{file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5fde892e6c697ce3e30c61b239330fc5d569a71fefd4eb6512fc6caec9dd9e2f"},
{file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:816f5aa087094099fff7edabb5e01cc370eb21aa1a1d44fe2d2aefdfb5599b31"},
{file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c10c309e18e443ddb108f0ef64e8729363adbfd92d6d57beec680f6261556f3"},
{file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98476c98b02c8e9b2eec76ac4156fd006628b1b2d0ef27e548ffa978393fd154"},
{file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c3027001c28434e7ca5a6e1e527487051136aa81803ac812be51802150d880dd"},
{file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:7699b1df36a48169cdebda7ab5a2bac265204003f153b4bd17276153d997670a"},
{file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1c39b07d90be6b48968ddc8c19e7585052088fd7ec8d568bb31ff64c70ae3c97"},
{file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:46ccfe3032b3915586e469d4972973f893c0a2bb65669194a5bdea9bacc088c2"},
{file = "pydantic_core-2.27.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:62ba45e21cf6571d7f716d903b5b7b6d2617e2d5d67c0923dc47b9d41369f840"},
{file = "pydantic_core-2.27.1.tar.gz", hash = "sha256:62a763352879b84aa31058fc931884055fd75089cccbd9d58bb6afd01141b235"},
]
[package.dependencies]
@@ -1273,14 +1223,13 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
[[package]]
name = "pydantic-settings"
version = "2.7.1"
version = "2.7.0"
description = "Settings management using Pydantic"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "pydantic_settings-2.7.1-py3-none-any.whl", hash = "sha256:590be9e6e24d06db33a4262829edef682500ef008565a969c73d39d5f8bfb3fd"},
{file = "pydantic_settings-2.7.1.tar.gz", hash = "sha256:10c9caad35e64bfb3c2fbf70a078c0e25cc92499782e5200747f942a065dec93"},
{file = "pydantic_settings-2.7.0-py3-none-any.whl", hash = "sha256:e00c05d5fa6cbbb227c84bd7487c5c1065084119b750df7c8c1a554aed236eb5"},
{file = "pydantic_settings-2.7.0.tar.gz", hash = "sha256:ac4bfd4a36831a48dbf8b2d9325425b549a0a6f18cea118436d728eb4f1c4d66"},
]
[package.dependencies]
@@ -1298,7 +1247,6 @@ version = "2.10.1"
description = "JSON Web Token implementation in Python"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb"},
{file = "pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953"},
@@ -1316,7 +1264,6 @@ version = "8.3.3"
description = "pytest: simple powerful testing with Python"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"},
{file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"},
@@ -1335,14 +1282,13 @@ dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments
[[package]]
name = "pytest-asyncio"
version = "0.25.3"
version = "0.25.0"
description = "Pytest support for asyncio"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "pytest_asyncio-0.25.3-py3-none-any.whl", hash = "sha256:9e89518e0f9bd08928f97a3482fdc4e244df17529460bc038291ccaf8f85c7c3"},
{file = "pytest_asyncio-0.25.3.tar.gz", hash = "sha256:fc1da2cf9f125ada7e710b4ddad05518d4cee187ae9412e9ac9271003497f07a"},
{file = "pytest_asyncio-0.25.0-py3-none-any.whl", hash = "sha256:db5432d18eac6b7e28b46dcd9b69921b55c3b1086e85febfe04e70b18d9e81b3"},
{file = "pytest_asyncio-0.25.0.tar.gz", hash = "sha256:8c0610303c9e0442a5db8604505fc0f545456ba1528824842b37b4a626cbf609"},
]
[package.dependencies]
@@ -1358,7 +1304,6 @@ version = "3.14.0"
description = "Thin-wrapper around the mock package for easier use with pytest"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"},
{file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"},
@@ -1376,7 +1321,6 @@ version = "2.9.0.post0"
description = "Extensions to the standard Python datetime module"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
groups = ["main"]
files = [
{file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"},
{file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"},
@@ -1391,7 +1335,6 @@ version = "1.0.1"
description = "Read key-value pairs from a .env file and set them as environment variables"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"},
{file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"},
@@ -1406,7 +1349,6 @@ version = "2.0.2"
description = ""
optional = false
python-versions = "<4.0,>=3.9"
groups = ["main"]
files = [
{file = "realtime-2.0.2-py3-none-any.whl", hash = "sha256:2634c915bc38807f2013f21e8bcc4d2f79870dfd81460ddb9393883d0489928a"},
{file = "realtime-2.0.2.tar.gz", hash = "sha256:519da9325b3b8102139d51785013d592f6b2403d81fa21d838a0b0234723ed7d"},
@@ -1424,7 +1366,6 @@ version = "5.2.1"
description = "Python client for Redis database and key-value store"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "redis-5.2.1-py3-none-any.whl", hash = "sha256:ee7e1056b9aea0f04c6c2ed59452947f34c4940ee025f5dd83e6a6418b6989e4"},
{file = "redis-5.2.1.tar.gz", hash = "sha256:16f2e22dff21d5125e8481515e386711a34cbec50f0e44413dd7d9c060a54e0f"},
@@ -1443,7 +1384,6 @@ version = "2.32.3"
description = "Python HTTP for Humans."
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"},
{file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"},
@@ -1465,7 +1405,6 @@ version = "4.9"
description = "Pure-Python RSA implementation"
optional = false
python-versions = ">=3.6,<4"
groups = ["main"]
files = [
{file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"},
{file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"},
@@ -1476,30 +1415,29 @@ pyasn1 = ">=0.1.3"
[[package]]
name = "ruff"
version = "0.9.3"
version = "0.8.6"
description = "An extremely fast Python linter and code formatter, written in Rust."
optional = false
python-versions = ">=3.7"
groups = ["dev"]
files = [
{file = "ruff-0.9.3-py3-none-linux_armv6l.whl", hash = "sha256:7f39b879064c7d9670197d91124a75d118d00b0990586549949aae80cdc16624"},
{file = "ruff-0.9.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:a187171e7c09efa4b4cc30ee5d0d55a8d6c5311b3e1b74ac5cb96cc89bafc43c"},
{file = "ruff-0.9.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:c59ab92f8e92d6725b7ded9d4a31be3ef42688a115c6d3da9457a5bda140e2b4"},
{file = "ruff-0.9.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dc153c25e715be41bb228bc651c1e9b1a88d5c6e5ed0194fa0dfea02b026439"},
{file = "ruff-0.9.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:646909a1e25e0dc28fbc529eab8eb7bb583079628e8cbe738192853dbbe43af5"},
{file = "ruff-0.9.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a5a46e09355695fbdbb30ed9889d6cf1c61b77b700a9fafc21b41f097bfbba4"},
{file = "ruff-0.9.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c4bb09d2bbb394e3730d0918c00276e79b2de70ec2a5231cd4ebb51a57df9ba1"},
{file = "ruff-0.9.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96a87ec31dc1044d8c2da2ebbed1c456d9b561e7d087734336518181b26b3aa5"},
{file = "ruff-0.9.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bb7554aca6f842645022fe2d301c264e6925baa708b392867b7a62645304df4"},
{file = "ruff-0.9.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cabc332b7075a914ecea912cd1f3d4370489c8018f2c945a30bcc934e3bc06a6"},
{file = "ruff-0.9.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:33866c3cc2a575cbd546f2cd02bdd466fed65118e4365ee538a3deffd6fcb730"},
{file = "ruff-0.9.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:006e5de2621304c8810bcd2ee101587712fa93b4f955ed0985907a36c427e0c2"},
{file = "ruff-0.9.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:ba6eea4459dbd6b1be4e6bfc766079fb9b8dd2e5a35aff6baee4d9b1514ea519"},
{file = "ruff-0.9.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:90230a6b8055ad47d3325e9ee8f8a9ae7e273078a66401ac66df68943ced029b"},
{file = "ruff-0.9.3-py3-none-win32.whl", hash = "sha256:eabe5eb2c19a42f4808c03b82bd313fc84d4e395133fb3fc1b1516170a31213c"},
{file = "ruff-0.9.3-py3-none-win_amd64.whl", hash = "sha256:040ceb7f20791dfa0e78b4230ee9dce23da3b64dd5848e40e3bf3ab76468dcf4"},
{file = "ruff-0.9.3-py3-none-win_arm64.whl", hash = "sha256:800d773f6d4d33b0a3c60e2c6ae8f4c202ea2de056365acfa519aa48acf28e0b"},
{file = "ruff-0.9.3.tar.gz", hash = "sha256:8293f89985a090ebc3ed1064df31f3b4b56320cdfcec8b60d3295bddb955c22a"},
{file = "ruff-0.8.6-py3-none-linux_armv6l.whl", hash = "sha256:defed167955d42c68b407e8f2e6f56ba52520e790aba4ca707a9c88619e580e3"},
{file = "ruff-0.8.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:54799ca3d67ae5e0b7a7ac234baa657a9c1784b48ec954a094da7c206e0365b1"},
{file = "ruff-0.8.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:e88b8f6d901477c41559ba540beeb5a671e14cd29ebd5683903572f4b40a9807"},
{file = "ruff-0.8.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0509e8da430228236a18a677fcdb0c1f102dd26d5520f71f79b094963322ed25"},
{file = "ruff-0.8.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:91a7ddb221779871cf226100e677b5ea38c2d54e9e2c8ed847450ebbdf99b32d"},
{file = "ruff-0.8.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:248b1fb3f739d01d528cc50b35ee9c4812aa58cc5935998e776bf8ed5b251e75"},
{file = "ruff-0.8.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:bc3c083c50390cf69e7e1b5a5a7303898966be973664ec0c4a4acea82c1d4315"},
{file = "ruff-0.8.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52d587092ab8df308635762386f45f4638badb0866355b2b86760f6d3c076188"},
{file = "ruff-0.8.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:61323159cf21bc3897674e5adb27cd9e7700bab6b84de40d7be28c3d46dc67cf"},
{file = "ruff-0.8.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ae4478b1471fc0c44ed52a6fb787e641a2ac58b1c1f91763bafbc2faddc5117"},
{file = "ruff-0.8.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:0c000a471d519b3e6cfc9c6680025d923b4ca140ce3e4612d1a2ef58e11f11fe"},
{file = "ruff-0.8.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:9257aa841e9e8d9b727423086f0fa9a86b6b420fbf4bf9e1465d1250ce8e4d8d"},
{file = "ruff-0.8.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:45a56f61b24682f6f6709636949ae8cc82ae229d8d773b4c76c09ec83964a95a"},
{file = "ruff-0.8.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:496dd38a53aa173481a7d8866bcd6451bd934d06976a2505028a50583e001b76"},
{file = "ruff-0.8.6-py3-none-win32.whl", hash = "sha256:e169ea1b9eae61c99b257dc83b9ee6c76f89042752cb2d83486a7d6e48e8f764"},
{file = "ruff-0.8.6-py3-none-win_amd64.whl", hash = "sha256:f1d70bef3d16fdc897ee290d7d20da3cbe4e26349f62e8a0274e7a3f4ce7a905"},
{file = "ruff-0.8.6-py3-none-win_arm64.whl", hash = "sha256:7d7fc2377a04b6e04ffe588caad613d0c460eb2ecba4c0ccbbfe2bc973cbc162"},
{file = "ruff-0.8.6.tar.gz", hash = "sha256:dcad24b81b62650b0eb8814f576fc65cfee8674772a6e24c9b747911801eeaa5"},
]
[[package]]
@@ -1508,7 +1446,6 @@ version = "1.16.0"
description = "Python 2 and 3 compatibility utilities"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
groups = ["main"]
files = [
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
@@ -1520,7 +1457,6 @@ version = "1.3.1"
description = "Sniff out which async library your code is running under"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"},
{file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"},
@@ -1528,18 +1464,17 @@ files = [
[[package]]
name = "storage3"
version = "0.11.0"
version = "0.9.0"
description = "Supabase Storage client for Python."
optional = false
python-versions = "<4.0,>=3.9"
groups = ["main"]
files = [
{file = "storage3-0.11.0-py3-none-any.whl", hash = "sha256:de2d8f9c9103ca91a9a9d0d69d80b07a3ab6f647b93e023e6a1a97d3607b9728"},
{file = "storage3-0.11.0.tar.gz", hash = "sha256:243583f2180686c0f0a19e6117d8a9796fd60c0ca72ec567d62b75a5af0d57a1"},
{file = "storage3-0.9.0-py3-none-any.whl", hash = "sha256:8b2fb91f0c61583a2f4eac74a8bae67e00d41ff38095c8a6cd3f2ce5e0ab76e7"},
{file = "storage3-0.9.0.tar.gz", hash = "sha256:e16697f60894c94e1d9df0d2e4af783c1b3f7dd08c9013d61978825c624188c4"},
]
[package.dependencies]
httpx = {version = ">=0.26,<0.29", extras = ["http2"]}
httpx = {version = ">=0.26,<0.28", extras = ["http2"]}
python-dateutil = ">=2.8.2,<3.0.0"
[[package]]
@@ -1548,7 +1483,6 @@ version = "0.4.15"
description = "An Enum that inherits from str."
optional = false
python-versions = "*"
groups = ["main"]
files = [
{file = "StrEnum-0.4.15-py3-none-any.whl", hash = "sha256:a30cda4af7cc6b5bf52c8055bc4bf4b2b6b14a93b574626da33df53cf7740659"},
{file = "StrEnum-0.4.15.tar.gz", hash = "sha256:878fb5ab705442070e4dd1929bb5e2249511c0bcf2b0eeacf3bcd80875c82eff"},
@@ -1561,39 +1495,36 @@ test = ["pylint", "pytest", "pytest-black", "pytest-cov", "pytest-pylint"]
[[package]]
name = "supabase"
version = "2.13.0"
version = "2.10.0"
description = "Supabase client for Python."
optional = false
python-versions = "<4.0,>=3.9"
groups = ["main"]
files = [
{file = "supabase-2.13.0-py3-none-any.whl", hash = "sha256:6cfccc055be21dab311afc5e9d5b37f3a4966f8394703763fbc8f8e86f36eaa6"},
{file = "supabase-2.13.0.tar.gz", hash = "sha256:452574d34bd978c8d11b5f02b0182b48e8854e511c969483c83875ec01495f11"},
{file = "supabase-2.10.0-py3-none-any.whl", hash = "sha256:183fb23c04528593f8f81c24ceb8178f3a56bff40fec7ed873b6c55ebc2e420a"},
{file = "supabase-2.10.0.tar.gz", hash = "sha256:9ac095f8947bf60780e67c0edcbab53e2db3f6f3f022329397b093500bf2607c"},
]
[package.dependencies]
gotrue = ">=2.11.0,<3.0.0"
httpx = ">=0.26,<0.29"
postgrest = ">=0.19,<0.20"
gotrue = ">=2.10.0,<3.0.0"
httpx = ">=0.26,<0.28"
postgrest = ">=0.18,<0.19"
realtime = ">=2.0.0,<3.0.0"
storage3 = ">=0.10,<0.12"
supafunc = ">=0.9,<0.10"
storage3 = ">=0.9.0,<0.10.0"
supafunc = ">=0.7.0,<0.8.0"
[[package]]
name = "supafunc"
version = "0.9.2"
version = "0.7.0"
description = "Library for Supabase Functions"
optional = false
python-versions = "<4.0,>=3.9"
groups = ["main"]
files = [
{file = "supafunc-0.9.2-py3-none-any.whl", hash = "sha256:be5ee9f53842c4b0ba5f4abfb5bddf9f9e37e69e755ec0526852bb15af9d2ff5"},
{file = "supafunc-0.9.2.tar.gz", hash = "sha256:f5164114a3e65e7e552539f3f1050aa3d4970885abdd7405555c17fd216e2da1"},
{file = "supafunc-0.7.0-py3-none-any.whl", hash = "sha256:4160260dc02bdd906be1e2ffd7cb3ae8b74ae437c892bb475352b6a99d9ff8eb"},
{file = "supafunc-0.7.0.tar.gz", hash = "sha256:5b1c415fba1395740b2b4eedd1d786384bd58b98f6333a11ba7889820a48b6a7"},
]
[package.dependencies]
httpx = {version = ">=0.26,<0.29", extras = ["http2"]}
strenum = ">=0.4.15,<0.5.0"
httpx = {version = ">=0.26,<0.28", extras = ["http2"]}
[[package]]
name = "tomli"
@@ -1601,8 +1532,6 @@ version = "2.1.0"
description = "A lil' TOML parser"
optional = false
python-versions = ">=3.8"
groups = ["main"]
markers = "python_version < \"3.11\""
files = [
{file = "tomli-2.1.0-py3-none-any.whl", hash = "sha256:a5c57c3d1c56f5ccdf89f6523458f60ef716e210fc47c4cfb188c5ba473e0391"},
{file = "tomli-2.1.0.tar.gz", hash = "sha256:3f646cae2aec94e17d04973e4249548320197cfabdf130015d023de4b74d8ab8"},
@@ -1614,7 +1543,6 @@ version = "4.12.2"
description = "Backported and Experimental Type Hints for Python 3.8+"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"},
{file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
@@ -1626,7 +1554,6 @@ version = "2.2.2"
description = "HTTP library with thread-safe connection pooling, file post, and more."
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"},
{file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"},
@@ -1644,7 +1571,6 @@ version = "12.0"
description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "websockets-12.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d554236b2a2006e0ce16315c16eaa0d628dab009c33b63ea03f41c6107958374"},
{file = "websockets-12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2d225bb6886591b1746b17c0573e29804619c8f755b5598d875bb4235ea639be"},
@@ -1726,7 +1652,6 @@ version = "1.16.0"
description = "Module for decorators, wrappers and monkey patching."
optional = false
python-versions = ">=3.6"
groups = ["main"]
files = [
{file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"},
{file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"},
@@ -1806,7 +1731,6 @@ version = "1.11.1"
description = "Yet another URL library"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "yarl-1.11.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:400cd42185f92de559d29eeb529e71d80dfbd2f45c36844914a4a34297ca6f00"},
{file = "yarl-1.11.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8258c86f47e080a258993eed877d579c71da7bda26af86ce6c2d2d072c11320d"},
@@ -1912,7 +1836,6 @@ version = "3.20.1"
description = "Backport of pathlib-compatible object wrapper for zip files"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "zipp-3.20.1-py3-none-any.whl", hash = "sha256:9960cd8967c8f85a56f920d5d507274e74f9ff813a0ab8889a5b5be2daf44064"},
{file = "zipp-3.20.1.tar.gz", hash = "sha256:c22b14cc4763c5a5b04134207736c107db42e9d3ef2d9779d465f5f1bcba572b"},
@@ -1927,6 +1850,6 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools",
type = ["pytest-mypy"]
[metadata]
lock-version = "2.1"
lock-version = "2.0"
python-versions = ">=3.10,<4.0"
content-hash = "a4d81b3b55a67036ca7a441793e13e8fbe20af973fcf1623f36cdee7bc82999f"
content-hash = "bf1b0125759dadb1369fff05ffba64fea3e82b9b7a43d0068e1c80974a4ebc1c"

View File

@@ -9,19 +9,19 @@ packages = [{ include = "autogpt_libs" }]
[tool.poetry.dependencies]
colorama = "^0.4.6"
expiringdict = "^1.2.2"
google-cloud-logging = "^3.11.4"
pydantic = "^2.10.6"
pydantic-settings = "^2.7.1"
google-cloud-logging = "^3.11.3"
pydantic = "^2.10.3"
pydantic-settings = "^2.7.0"
pyjwt = "^2.10.1"
pytest-asyncio = "^0.25.3"
pytest-asyncio = "^0.25.0"
pytest-mock = "^3.14.0"
python = ">=3.10,<4.0"
python-dotenv = "^1.0.1"
supabase = "^2.13.0"
supabase = "^2.10.0"
[tool.poetry.group.dev.dependencies]
redis = "^5.2.1"
ruff = "^0.9.3"
ruff = "^0.8.6"
[build-system]
requires = ["poetry-core"]

View File

@@ -15,9 +15,6 @@ REDIS_PORT=6379
REDIS_PASSWORD=password
ENABLE_CREDIT=false
STRIPE_API_KEY=
STRIPE_WEBHOOK_SECRET=
# What environment things should be logged under: local dev or prod
APP_ENV=local
# What environment to behave as: "local" or "cloud"
@@ -31,12 +28,6 @@ SUPABASE_URL=http://localhost:8000
SUPABASE_SERVICE_ROLE_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q
SUPABASE_JWT_SECRET=your-super-secret-jwt-token-with-at-least-32-characters-long
# RabbitMQ credentials -- Used for communication between services
RABBITMQ_HOST=localhost
RABBITMQ_PORT=5672
RABBITMQ_DEFAULT_USER=rabbitmq_user_default
RABBITMQ_DEFAULT_PASS=k0VMxyIJF9S35f3x2uaw5IWAl6Y536O7
## For local development, you may need to set FRONTEND_BASE_URL for the OAuth flow
## for integrations to work. Defaults to the value of PLATFORM_BASE_URL if not set.
# FRONTEND_BASE_URL=http://localhost:3000
@@ -45,7 +36,7 @@ RABBITMQ_DEFAULT_PASS=k0VMxyIJF9S35f3x2uaw5IWAl6Y536O7
## to use the platform's webhook-related functionality.
## If you are developing locally, you can use something like ngrok to get a publc URL
## and tunnel it to your locally running backend.
PLATFORM_BASE_URL=http://localhost:3000
PLATFORM_BASE_URL=https://your-public-url-here
## == INTEGRATION CREDENTIALS == ##
# Each set of server side credentials is required for the corresponding 3rd party
@@ -81,20 +72,6 @@ GOOGLE_CLIENT_SECRET=
TWITTER_CLIENT_ID=
TWITTER_CLIENT_SECRET=
# Linear App
# Make a new workspace for your OAuth APP -- trust me
# https://linear.app/settings/api/applications/new
# Callback URL: http://localhost:3000/auth/integrations/oauth_callback
LINEAR_CLIENT_ID=
LINEAR_CLIENT_SECRET=
# To obtain Todoist API credentials:
# 1. Create a Todoist account at todoist.com
# 2. Visit the Developer Console: https://developer.todoist.com/appconsole.html
# 3. Click "Create new app"
# 4. Once created, copy your Client ID and Client Secret below
TODOIST_CLIENT_ID=
TODOIST_CLIENT_SECRET=
## ===== OPTIONAL API KEYS ===== ##
@@ -105,12 +82,10 @@ GROQ_API_KEY=
OPEN_ROUTER_API_KEY=
# Reddit
# Go to https://www.reddit.com/prefs/apps and create a new app
# Choose "script" for the type
# Fill in the redirect uri as <your_frontend_url>/auth/integrations/oauth_callback, e.g. http://localhost:3000/auth/integrations/oauth_callback
REDDIT_CLIENT_ID=
REDDIT_CLIENT_SECRET=
REDDIT_USER_AGENT="AutoGPT:1.0 (by /u/autogpt)"
REDDIT_USERNAME=
REDDIT_PASSWORD=
# Discord
DISCORD_BOT_TOKEN=
@@ -155,9 +130,6 @@ EXA_API_KEY=
# E2B
E2B_API_KEY=
# Mem0
MEM0_API_KEY=
# Nvidia
NVIDIA_API_KEY=

View File

@@ -66,17 +66,10 @@ We use the Poetry to manage the dependencies. To set up the project, follow thes
### Starting the server without Docker
To run the server locally, start in the autogpt_platform folder:
```sh
cd ..
```
Run the following command to run database in docker but the application locally:
```sh
docker compose --profile local up deps --build --detach
cd backend
poetry run app
```

View File

@@ -1,52 +1,13 @@
import enum
from typing import Any, List
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema, BlockType
from backend.data.model import SchemaField
from backend.util.file import MediaFile, store_media_file
from backend.util.mock import MockObject
from backend.util.text import TextFormatter
from backend.util.type import convert
formatter = TextFormatter()
class FileStoreBlock(Block):
class Input(BlockSchema):
file_in: MediaFile = SchemaField(
description="The file to store in the temporary directory, it can be a URL, data URI, or local path."
)
class Output(BlockSchema):
file_out: MediaFile = SchemaField(
description="The relative path to the stored file in the temporary directory."
)
def __init__(self):
super().__init__(
id="cbb50872-625b-42f0-8203-a2ae78242d8a",
description="Stores the input file in the temporary directory.",
categories={BlockCategory.BASIC, BlockCategory.MULTIMEDIA},
input_schema=FileStoreBlock.Input,
output_schema=FileStoreBlock.Output,
static_output=True,
)
def run(
self,
input_data: Input,
*,
graph_exec_id: str,
**kwargs,
) -> BlockOutput:
file_path = store_media_file(
graph_exec_id=graph_exec_id,
file=input_data.file_in,
return_content=False,
)
yield "file_out", file_path
class StoreValueBlock(Block):
"""
This block allows you to provide a constant value as a block, in a stateless manner.
@@ -297,7 +258,6 @@ class AgentOutputBlock(Block):
class Output(BlockSchema):
output: Any = SchemaField(description="The value recorded as output.")
name: Any = SchemaField(description="The name of the value recorded as output.")
def __init__(self):
super().__init__(
@@ -349,7 +309,6 @@ class AgentOutputBlock(Block):
yield "output", f"Error: {e}, {input_data.value}"
else:
yield "output", input_data.value
yield "name", input_data.name
class AddToDictionaryBlock(Block):
@@ -510,48 +469,6 @@ class AddToListBlock(Block):
yield "updated_list", updated_list
class FindInListBlock(Block):
class Input(BlockSchema):
list: List[Any] = SchemaField(description="The list to search in.")
value: Any = SchemaField(description="The value to search for.")
class Output(BlockSchema):
index: int = SchemaField(description="The index of the value in the list.")
found: bool = SchemaField(
description="Whether the value was found in the list."
)
not_found_value: Any = SchemaField(
description="The value that was not found in the list."
)
def __init__(self):
super().__init__(
id="5e2c6d0a-1e37-489f-b1d0-8e1812b23333",
description="Finds the index of the value in the list.",
categories={BlockCategory.BASIC},
input_schema=FindInListBlock.Input,
output_schema=FindInListBlock.Output,
test_input=[
{"list": [1, 2, 3, 4, 5], "value": 3},
{"list": [1, 2, 3, 4, 5], "value": 6},
],
test_output=[
("index", 2),
("found", True),
("found", False),
("not_found_value", 6),
],
)
def run(self, input_data: Input, **kwargs) -> BlockOutput:
try:
yield "index", input_data.list.index(input_data.value)
yield "found", True
except ValueError:
yield "found", False
yield "not_found_value", input_data.value
class NoteBlock(Block):
class Input(BlockSchema):
text: str = SchemaField(description="The text to display in the sticky note.")
@@ -673,47 +590,3 @@ class CreateListBlock(Block):
yield "list", input_data.values
except Exception as e:
yield "error", f"Failed to create list: {str(e)}"
class TypeOptions(enum.Enum):
STRING = "string"
NUMBER = "number"
BOOLEAN = "boolean"
LIST = "list"
DICTIONARY = "dictionary"
class UniversalTypeConverterBlock(Block):
class Input(BlockSchema):
value: Any = SchemaField(
description="The value to convert to a universal type."
)
type: TypeOptions = SchemaField(description="The type to convert the value to.")
class Output(BlockSchema):
value: Any = SchemaField(description="The converted value.")
def __init__(self):
super().__init__(
id="95d1b990-ce13-4d88-9737-ba5c2070c97b",
description="This block is used to convert a value to a universal type.",
categories={BlockCategory.BASIC},
input_schema=UniversalTypeConverterBlock.Input,
output_schema=UniversalTypeConverterBlock.Output,
)
def run(self, input_data: Input, **kwargs) -> BlockOutput:
try:
converted_value = convert(
input_data.value,
{
TypeOptions.STRING: str,
TypeOptions.NUMBER: float,
TypeOptions.BOOLEAN: bool,
TypeOptions.LIST: list,
TypeOptions.DICTIONARY: dict,
}[input_data.type],
)
yield "value", converted_value
except Exception as e:
yield "error", f"Failed to convert value: {str(e)}"

View File

@@ -107,83 +107,3 @@ class ConditionBlock(Block):
yield "yes_output", yes_value
else:
yield "no_output", no_value
class IfInputMatchesBlock(Block):
class Input(BlockSchema):
input: Any = SchemaField(
description="The input to match against",
placeholder="For example: 10 or 'hello' or True",
)
value: Any = SchemaField(
description="The value to output if the input matches",
placeholder="For example: 'Greater' or 20 or False",
)
yes_value: Any = SchemaField(
description="The value to output if the input matches",
placeholder="For example: 'Greater' or 20 or False",
default=None,
)
no_value: Any = SchemaField(
description="The value to output if the input does not match",
placeholder="For example: 'Greater' or 20 or False",
default=None,
)
class Output(BlockSchema):
result: bool = SchemaField(
description="The result of the condition evaluation (True or False)"
)
yes_output: Any = SchemaField(
description="The output value if the condition is true"
)
no_output: Any = SchemaField(
description="The output value if the condition is false"
)
def __init__(self):
super().__init__(
id="6dbbc4b3-ca6c-42b6-b508-da52d23e13f2",
input_schema=IfInputMatchesBlock.Input,
output_schema=IfInputMatchesBlock.Output,
description="Handles conditional logic based on comparison operators",
categories={BlockCategory.LOGIC},
test_input=[
{
"input": 10,
"value": 10,
"yes_value": "Greater",
"no_value": "Not greater",
},
{
"input": 10,
"value": 20,
"yes_value": "Greater",
"no_value": "Not greater",
},
{
"input": 10,
"value": None,
"yes_value": "Yes",
"no_value": "No",
},
],
test_output=[
("result", True),
("yes_output", "Greater"),
("result", False),
("no_output", "Not greater"),
("result", False),
("no_output", "No"),
# ("result", True),
# ("yes_output", "Yes"),
],
)
def run(self, input_data: Input, **kwargs) -> BlockOutput:
if input_data.input == input_data.value or input_data.input is input_data.value:
yield "result", True
yield "yes_output", input_data.yes_value
else:
yield "result", False
yield "no_output", input_data.no_value

View File

@@ -188,270 +188,3 @@ class CodeExecutionBlock(Block):
yield "stderr_logs", stderr_logs
except Exception as e:
yield "error", str(e)
class InstantiationBlock(Block):
class Input(BlockSchema):
credentials: CredentialsMetaInput[
Literal[ProviderName.E2B], Literal["api_key"]
] = CredentialsField(
description="Enter your api key for the E2B Sandbox. You can get it in here - https://e2b.dev/docs",
)
# Todo : Option to run commond in background
setup_commands: list[str] = SchemaField(
description=(
"Shell commands to set up the sandbox before running the code. "
"You can use `curl` or `git` to install your desired Debian based "
"package manager. `pip` and `npm` are pre-installed.\n\n"
"These commands are executed with `sh`, in the foreground."
),
placeholder="pip install cowsay",
default=[],
advanced=False,
)
setup_code: str = SchemaField(
description="Code to execute in the sandbox",
placeholder="print('Hello, World!')",
default="",
advanced=False,
)
language: ProgrammingLanguage = SchemaField(
description="Programming language to execute",
default=ProgrammingLanguage.PYTHON,
advanced=False,
)
timeout: int = SchemaField(
description="Execution timeout in seconds", default=300
)
template_id: str = SchemaField(
description=(
"You can use an E2B sandbox template by entering its ID here. "
"Check out the E2B docs for more details: "
"[E2B - Sandbox template](https://e2b.dev/docs/sandbox-template)"
),
default="",
advanced=True,
)
class Output(BlockSchema):
sandbox_id: str = SchemaField(description="ID of the sandbox instance")
response: str = SchemaField(description="Response from code execution")
stdout_logs: str = SchemaField(
description="Standard output logs from execution"
)
stderr_logs: str = SchemaField(description="Standard error logs from execution")
error: str = SchemaField(description="Error message if execution failed")
def __init__(self):
super().__init__(
id="ff0861c9-1726-4aec-9e5b-bf53f3622112",
description="Instantiate an isolated sandbox environment with internet access where to execute code in.",
categories={BlockCategory.DEVELOPER_TOOLS},
input_schema=InstantiationBlock.Input,
output_schema=InstantiationBlock.Output,
test_credentials=TEST_CREDENTIALS,
test_input={
"credentials": TEST_CREDENTIALS_INPUT,
"setup_code": "print('Hello World')",
"language": ProgrammingLanguage.PYTHON.value,
"setup_commands": [],
"timeout": 300,
"template_id": "",
},
test_output=[
("sandbox_id", str),
("response", "Hello World"),
("stdout_logs", "Hello World\n"),
],
test_mock={
"execute_code": lambda setup_code, language, setup_commands, timeout, api_key, template_id: (
"sandbox_id",
"Hello World",
"Hello World\n",
"",
),
},
)
def run(
self, input_data: Input, *, credentials: APIKeyCredentials, **kwargs
) -> BlockOutput:
try:
sandbox_id, response, stdout_logs, stderr_logs = self.execute_code(
input_data.setup_code,
input_data.language,
input_data.setup_commands,
input_data.timeout,
credentials.api_key.get_secret_value(),
input_data.template_id,
)
if sandbox_id:
yield "sandbox_id", sandbox_id
else:
yield "error", "Sandbox ID not found"
if response:
yield "response", response
if stdout_logs:
yield "stdout_logs", stdout_logs
if stderr_logs:
yield "stderr_logs", stderr_logs
except Exception as e:
yield "error", str(e)
def execute_code(
self,
code: str,
language: ProgrammingLanguage,
setup_commands: list[str],
timeout: int,
api_key: str,
template_id: str,
):
try:
sandbox = None
if template_id:
sandbox = Sandbox(
template=template_id, api_key=api_key, timeout=timeout
)
else:
sandbox = Sandbox(api_key=api_key, timeout=timeout)
if not sandbox:
raise Exception("Sandbox not created")
# Running setup commands
for cmd in setup_commands:
sandbox.commands.run(cmd)
# Executing the code
execution = sandbox.run_code(
code,
language=language.value,
on_error=lambda e: sandbox.kill(), # Kill the sandbox if there is an error
)
if execution.error:
raise Exception(execution.error)
response = execution.text
stdout_logs = "".join(execution.logs.stdout)
stderr_logs = "".join(execution.logs.stderr)
return sandbox.sandbox_id, response, stdout_logs, stderr_logs
except Exception as e:
raise e
class StepExecutionBlock(Block):
class Input(BlockSchema):
credentials: CredentialsMetaInput[
Literal[ProviderName.E2B], Literal["api_key"]
] = CredentialsField(
description="Enter your api key for the E2B Sandbox. You can get it in here - https://e2b.dev/docs",
)
sandbox_id: str = SchemaField(
description="ID of the sandbox instance to execute the code in",
advanced=False,
)
step_code: str = SchemaField(
description="Code to execute in the sandbox",
placeholder="print('Hello, World!')",
default="",
advanced=False,
)
language: ProgrammingLanguage = SchemaField(
description="Programming language to execute",
default=ProgrammingLanguage.PYTHON,
advanced=False,
)
class Output(BlockSchema):
response: str = SchemaField(description="Response from code execution")
stdout_logs: str = SchemaField(
description="Standard output logs from execution"
)
stderr_logs: str = SchemaField(description="Standard error logs from execution")
error: str = SchemaField(description="Error message if execution failed")
def __init__(self):
super().__init__(
id="82b59b8e-ea10-4d57-9161-8b169b0adba6",
description="Execute code in a previously instantiated sandbox environment.",
categories={BlockCategory.DEVELOPER_TOOLS},
input_schema=StepExecutionBlock.Input,
output_schema=StepExecutionBlock.Output,
test_credentials=TEST_CREDENTIALS,
test_input={
"credentials": TEST_CREDENTIALS_INPUT,
"sandbox_id": "sandbox_id",
"step_code": "print('Hello World')",
"language": ProgrammingLanguage.PYTHON.value,
},
test_output=[
("response", "Hello World"),
("stdout_logs", "Hello World\n"),
],
test_mock={
"execute_step_code": lambda sandbox_id, step_code, language, api_key: (
"Hello World",
"Hello World\n",
"",
),
},
)
def execute_step_code(
self,
sandbox_id: str,
code: str,
language: ProgrammingLanguage,
api_key: str,
):
try:
sandbox = Sandbox.connect(sandbox_id=sandbox_id, api_key=api_key)
if not sandbox:
raise Exception("Sandbox not found")
# Executing the code
execution = sandbox.run_code(code, language=language.value)
if execution.error:
raise Exception(execution.error)
response = execution.text
stdout_logs = "".join(execution.logs.stdout)
stderr_logs = "".join(execution.logs.stderr)
return response, stdout_logs, stderr_logs
except Exception as e:
raise e
def run(
self, input_data: Input, *, credentials: APIKeyCredentials, **kwargs
) -> BlockOutput:
try:
response, stdout_logs, stderr_logs = self.execute_step_code(
input_data.sandbox_id,
input_data.step_code,
input_data.language,
credentials.api_key.get_secret_value(),
)
if response:
yield "response", response
if stdout_logs:
yield "stdout_logs", stdout_logs
if stderr_logs:
yield "stderr_logs", stderr_logs
except Exception as e:
yield "error", str(e)

View File

@@ -1,53 +1,22 @@
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from typing import Literal
from pydantic import BaseModel, ConfigDict, SecretStr
from pydantic import BaseModel, ConfigDict
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import (
CredentialsField,
CredentialsMetaInput,
SchemaField,
UserPasswordCredentials,
)
from backend.integrations.providers import ProviderName
TEST_CREDENTIALS = UserPasswordCredentials(
id="01234567-89ab-cdef-0123-456789abcdef",
provider="smtp",
username=SecretStr("mock-smtp-username"),
password=SecretStr("mock-smtp-password"),
title="Mock SMTP credentials",
)
TEST_CREDENTIALS_INPUT = {
"provider": TEST_CREDENTIALS.provider,
"id": TEST_CREDENTIALS.id,
"type": TEST_CREDENTIALS.type,
"title": TEST_CREDENTIALS.title,
}
SMTPCredentials = UserPasswordCredentials
SMTPCredentialsInput = CredentialsMetaInput[
Literal[ProviderName.SMTP],
Literal["user_password"],
]
from backend.data.model import BlockSecret, SchemaField, SecretField
def SMTPCredentialsField() -> SMTPCredentialsInput:
return CredentialsField(
description="The SMTP integration requires a username and password.",
)
class SMTPConfig(BaseModel):
class EmailCredentials(BaseModel):
smtp_server: str = SchemaField(
default="smtp.example.com", description="SMTP server address"
default="smtp.gmail.com", description="SMTP server address"
)
smtp_port: int = SchemaField(default=25, description="SMTP port number")
smtp_username: BlockSecret = SecretField(key="smtp_username")
smtp_password: BlockSecret = SecretField(key="smtp_password")
model_config = ConfigDict(title="SMTP Config")
model_config = ConfigDict(title="Email Credentials")
class SendEmailBlock(Block):
@@ -61,11 +30,10 @@ class SendEmailBlock(Block):
body: str = SchemaField(
description="Body of the email", placeholder="Enter the email body"
)
config: SMTPConfig = SchemaField(
description="SMTP Config",
default=SMTPConfig(),
creds: EmailCredentials = SchemaField(
description="SMTP credentials",
default=EmailCredentials(),
)
credentials: SMTPCredentialsInput = SMTPCredentialsField()
class Output(BlockSchema):
status: str = SchemaField(description="Status of the email sending operation")
@@ -75,6 +43,7 @@ class SendEmailBlock(Block):
def __init__(self):
super().__init__(
disabled=True,
id="4335878a-394e-4e67-adf2-919877ff49ae",
description="This block sends an email using the provided SMTP credentials.",
categories={BlockCategory.OUTPUT},
@@ -84,29 +53,25 @@ class SendEmailBlock(Block):
"to_email": "recipient@example.com",
"subject": "Test Email",
"body": "This is a test email.",
"config": {
"creds": {
"smtp_server": "smtp.gmail.com",
"smtp_port": 25,
"smtp_username": "your-email@gmail.com",
"smtp_password": "your-gmail-password",
},
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[("status", "Email sent successfully")],
test_mock={"send_email": lambda *args, **kwargs: "Email sent successfully"},
)
@staticmethod
def send_email(
config: SMTPConfig,
to_email: str,
subject: str,
body: str,
credentials: SMTPCredentials,
creds: EmailCredentials, to_email: str, subject: str, body: str
) -> str:
smtp_server = config.smtp_server
smtp_port = config.smtp_port
smtp_username = credentials.username.get_secret_value()
smtp_password = credentials.password.get_secret_value()
smtp_server = creds.smtp_server
smtp_port = creds.smtp_port
smtp_username = creds.smtp_username.get_secret_value()
smtp_password = creds.smtp_password.get_secret_value()
msg = MIMEMultipart()
msg["From"] = smtp_username
@@ -121,13 +86,10 @@ class SendEmailBlock(Block):
return "Email sent successfully"
def run(
self, input_data: Input, *, credentials: SMTPCredentials, **kwargs
) -> BlockOutput:
def run(self, input_data: Input, **kwargs) -> BlockOutput:
yield "status", self.send_email(
config=input_data.config,
to_email=input_data.to_email,
subject=input_data.subject,
body=input_data.body,
credentials=credentials,
input_data.creds,
input_data.to_email,
input_data.subject,
input_data.body,
)

View File

@@ -1,4 +1,4 @@
from typing import List
from typing import List, Optional
from pydantic import BaseModel
@@ -13,12 +13,12 @@ from backend.util.request import requests
class ContentRetrievalSettings(BaseModel):
text: dict = SchemaField(
text: Optional[dict] = SchemaField(
description="Text content settings",
default={"maxCharacters": 1000, "includeHtmlTags": False},
advanced=True,
)
highlights: dict = SchemaField(
highlights: Optional[dict] = SchemaField(
description="Highlight settings",
default={
"numSentences": 3,
@@ -27,7 +27,7 @@ class ContentRetrievalSettings(BaseModel):
},
advanced=True,
)
summary: dict = SchemaField(
summary: Optional[dict] = SchemaField(
description="Summary settings",
default={"query": ""},
advanced=True,

View File

@@ -1,9 +1,6 @@
from urllib.parse import urlparse
from backend.blocks.github._auth import (
GithubCredentials,
GithubFineGrainedAPICredentials,
)
from backend.blocks.github._auth import GithubCredentials
from backend.util.request import Requests
@@ -33,15 +30,12 @@ def _convert_to_api_url(url: str) -> str:
def _get_headers(credentials: GithubCredentials) -> dict[str, str]:
return {
"Authorization": credentials.auth_header(),
"Authorization": credentials.bearer(),
"Accept": "application/vnd.github.v3+json",
}
def get_api(
credentials: GithubCredentials | GithubFineGrainedAPICredentials,
convert_urls: bool = True,
) -> Requests:
def get_api(credentials: GithubCredentials, convert_urls: bool = True) -> Requests:
return Requests(
trusted_origins=["https://api.github.com", "https://github.com"],
extra_url_validator=_convert_to_api_url if convert_urls else None,

View File

@@ -22,11 +22,6 @@ GithubCredentialsInput = CredentialsMetaInput[
Literal["api_key", "oauth2"] if GITHUB_OAUTH_IS_CONFIGURED else Literal["api_key"],
]
GithubFineGrainedAPICredentials = APIKeyCredentials
GithubFineGrainedAPICredentialsInput = CredentialsMetaInput[
Literal[ProviderName.GITHUB], Literal["api_key"]
]
def GithubCredentialsField(scope: str) -> GithubCredentialsInput:
"""
@@ -42,16 +37,6 @@ def GithubCredentialsField(scope: str) -> GithubCredentialsInput:
)
def GithubFineGrainedAPICredentialsField(
scope: str,
) -> GithubFineGrainedAPICredentialsInput:
return CredentialsField(
required_scopes={scope},
description="The GitHub integration can be used with OAuth, "
"or any API key with sufficient permissions for the blocks it is used on.",
)
TEST_CREDENTIALS = APIKeyCredentials(
id="01234567-89ab-cdef-0123-456789abcdef",
provider="github",
@@ -65,18 +50,3 @@ TEST_CREDENTIALS_INPUT = {
"type": TEST_CREDENTIALS.type,
"title": TEST_CREDENTIALS.type,
}
TEST_FINE_GRAINED_CREDENTIALS = APIKeyCredentials(
id="01234567-89ab-cdef-0123-456789abcdef",
provider="github",
api_key=SecretStr("mock-github-api-key"),
title="Mock GitHub API key",
expires_at=None,
)
TEST_FINE_GRAINED_CREDENTIALS_INPUT = {
"provider": TEST_FINE_GRAINED_CREDENTIALS.provider,
"id": TEST_FINE_GRAINED_CREDENTIALS.id,
"type": TEST_FINE_GRAINED_CREDENTIALS.type,
"title": TEST_FINE_GRAINED_CREDENTIALS.type,
}

View File

@@ -1,356 +0,0 @@
from enum import Enum
from typing import Optional
from pydantic import BaseModel
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import SchemaField
from ._api import get_api
from ._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
GithubCredentials,
GithubCredentialsField,
GithubCredentialsInput,
)
# queued, in_progress, completed, waiting, requested, pending
class ChecksStatus(Enum):
QUEUED = "queued"
IN_PROGRESS = "in_progress"
COMPLETED = "completed"
WAITING = "waiting"
REQUESTED = "requested"
PENDING = "pending"
class ChecksConclusion(Enum):
SUCCESS = "success"
FAILURE = "failure"
NEUTRAL = "neutral"
CANCELLED = "cancelled"
TIMED_OUT = "timed_out"
ACTION_REQUIRED = "action_required"
SKIPPED = "skipped"
class GithubCreateCheckRunBlock(Block):
"""Block for creating a new check run on a GitHub repository."""
class Input(BlockSchema):
credentials: GithubCredentialsInput = GithubCredentialsField("repo:status")
repo_url: str = SchemaField(
description="URL of the GitHub repository",
placeholder="https://github.com/owner/repo",
)
name: str = SchemaField(
description="The name of the check run (e.g., 'code-coverage')",
)
head_sha: str = SchemaField(
description="The SHA of the commit to check",
)
status: ChecksStatus = SchemaField(
description="Current status of the check run",
default=ChecksStatus.QUEUED,
)
conclusion: Optional[ChecksConclusion] = SchemaField(
description="The final conclusion of the check (required if status is completed)",
default=None,
)
details_url: str = SchemaField(
description="The URL for the full details of the check",
default="",
)
output_title: str = SchemaField(
description="Title of the check run output",
default="",
)
output_summary: str = SchemaField(
description="Summary of the check run output",
default="",
)
output_text: str = SchemaField(
description="Detailed text of the check run output",
default="",
)
class Output(BlockSchema):
class CheckRunResult(BaseModel):
id: int
html_url: str
status: str
check_run: CheckRunResult = SchemaField(
description="Details of the created check run"
)
error: str = SchemaField(
description="Error message if check run creation failed"
)
def __init__(self):
super().__init__(
id="2f45e89a-3b7d-4f22-b89e-6c4f5c7e1234",
description="Creates a new check run for a specific commit in a GitHub repository",
categories={BlockCategory.DEVELOPER_TOOLS},
input_schema=GithubCreateCheckRunBlock.Input,
output_schema=GithubCreateCheckRunBlock.Output,
test_input={
"repo_url": "https://github.com/owner/repo",
"name": "test-check",
"head_sha": "ce587453ced02b1526dfb4cb910479d431683101",
"status": ChecksStatus.COMPLETED.value,
"conclusion": ChecksConclusion.SUCCESS.value,
"output_title": "Test Results",
"output_summary": "All tests passed",
"credentials": TEST_CREDENTIALS_INPUT,
},
# requires a github app not available to oauth in our current system
disabled=True,
test_credentials=TEST_CREDENTIALS,
test_output=[
(
"check_run",
{
"id": 4,
"html_url": "https://github.com/owner/repo/runs/4",
"status": "completed",
},
),
],
test_mock={
"create_check_run": lambda *args, **kwargs: {
"id": 4,
"html_url": "https://github.com/owner/repo/runs/4",
"status": "completed",
}
},
)
@staticmethod
def create_check_run(
credentials: GithubCredentials,
repo_url: str,
name: str,
head_sha: str,
status: ChecksStatus,
conclusion: Optional[ChecksConclusion] = None,
details_url: Optional[str] = None,
output_title: Optional[str] = None,
output_summary: Optional[str] = None,
output_text: Optional[str] = None,
) -> dict:
api = get_api(credentials)
class CheckRunData(BaseModel):
name: str
head_sha: str
status: str
conclusion: Optional[str] = None
details_url: Optional[str] = None
output: Optional[dict[str, str]] = None
data = CheckRunData(
name=name,
head_sha=head_sha,
status=status.value,
)
if conclusion:
data.conclusion = conclusion.value
if details_url:
data.details_url = details_url
if output_title or output_summary or output_text:
output_data = {
"title": output_title or "",
"summary": output_summary or "",
"text": output_text or "",
}
data.output = output_data
check_runs_url = f"{repo_url}/check-runs"
response = api.post(check_runs_url)
result = response.json()
return {
"id": result["id"],
"html_url": result["html_url"],
"status": result["status"],
}
def run(
self,
input_data: Input,
*,
credentials: GithubCredentials,
**kwargs,
) -> BlockOutput:
try:
result = self.create_check_run(
credentials=credentials,
repo_url=input_data.repo_url,
name=input_data.name,
head_sha=input_data.head_sha,
status=input_data.status,
conclusion=input_data.conclusion,
details_url=input_data.details_url,
output_title=input_data.output_title,
output_summary=input_data.output_summary,
output_text=input_data.output_text,
)
yield "check_run", result
except Exception as e:
yield "error", str(e)
class GithubUpdateCheckRunBlock(Block):
"""Block for updating an existing check run on a GitHub repository."""
class Input(BlockSchema):
credentials: GithubCredentialsInput = GithubCredentialsField("repo:status")
repo_url: str = SchemaField(
description="URL of the GitHub repository",
placeholder="https://github.com/owner/repo",
)
check_run_id: int = SchemaField(
description="The ID of the check run to update",
)
status: ChecksStatus = SchemaField(
description="New status of the check run",
)
conclusion: ChecksConclusion = SchemaField(
description="The final conclusion of the check (required if status is completed)",
)
output_title: Optional[str] = SchemaField(
description="New title of the check run output",
default=None,
)
output_summary: Optional[str] = SchemaField(
description="New summary of the check run output",
default=None,
)
output_text: Optional[str] = SchemaField(
description="New detailed text of the check run output",
default=None,
)
class Output(BlockSchema):
class CheckRunResult(BaseModel):
id: int
html_url: str
status: str
conclusion: Optional[str]
check_run: CheckRunResult = SchemaField(
description="Details of the updated check run"
)
error: str = SchemaField(description="Error message if check run update failed")
def __init__(self):
super().__init__(
id="8a23c567-9d01-4e56-b789-0c12d3e45678", # Generated UUID
description="Updates an existing check run in a GitHub repository",
categories={BlockCategory.DEVELOPER_TOOLS},
input_schema=GithubUpdateCheckRunBlock.Input,
output_schema=GithubUpdateCheckRunBlock.Output,
# requires a github app not available to oauth in our current system
disabled=True,
test_input={
"repo_url": "https://github.com/owner/repo",
"check_run_id": 4,
"status": ChecksStatus.COMPLETED.value,
"conclusion": ChecksConclusion.SUCCESS.value,
"output_title": "Updated Results",
"output_summary": "All tests passed after retry",
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
(
"check_run",
{
"id": 4,
"html_url": "https://github.com/owner/repo/runs/4",
"status": "completed",
"conclusion": "success",
},
),
],
test_mock={
"update_check_run": lambda *args, **kwargs: {
"id": 4,
"html_url": "https://github.com/owner/repo/runs/4",
"status": "completed",
"conclusion": "success",
}
},
)
@staticmethod
def update_check_run(
credentials: GithubCredentials,
repo_url: str,
check_run_id: int,
status: ChecksStatus,
conclusion: Optional[ChecksConclusion] = None,
output_title: Optional[str] = None,
output_summary: Optional[str] = None,
output_text: Optional[str] = None,
) -> dict:
api = get_api(credentials)
class UpdateCheckRunData(BaseModel):
status: str
conclusion: Optional[str] = None
output: Optional[dict[str, str]] = None
data = UpdateCheckRunData(
status=status.value,
)
if conclusion:
data.conclusion = conclusion.value
if output_title or output_summary or output_text:
output_data = {
"title": output_title or "",
"summary": output_summary or "",
"text": output_text or "",
}
data.output = output_data
check_run_url = f"{repo_url}/check-runs/{check_run_id}"
response = api.patch(check_run_url)
result = response.json()
return {
"id": result["id"],
"html_url": result["html_url"],
"status": result["status"],
"conclusion": result.get("conclusion"),
}
def run(
self,
input_data: Input,
*,
credentials: GithubCredentials,
**kwargs,
) -> BlockOutput:
try:
result = self.update_check_run(
credentials=credentials,
repo_url=input_data.repo_url,
check_run_id=input_data.check_run_id,
status=input_data.status,
conclusion=input_data.conclusion,
output_title=input_data.output_title,
output_summary=input_data.output_summary,
output_text=input_data.output_text,
)
yield "check_run", result
except Exception as e:
yield "error", str(e)

View File

@@ -200,7 +200,6 @@ class GithubReadPullRequestBlock(Block):
include_pr_changes: bool = SchemaField(
description="Whether to include the changes made in the pull request",
default=False,
advanced=False,
)
class Output(BlockSchema):

View File

@@ -1,180 +0,0 @@
from enum import Enum
from typing import Optional
from pydantic import BaseModel
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import SchemaField
from ._api import get_api
from ._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
GithubFineGrainedAPICredentials,
GithubFineGrainedAPICredentialsField,
GithubFineGrainedAPICredentialsInput,
)
class StatusState(Enum):
ERROR = "error"
FAILURE = "failure"
PENDING = "pending"
SUCCESS = "success"
class GithubCreateStatusBlock(Block):
"""Block for creating a commit status on a GitHub repository."""
class Input(BlockSchema):
credentials: GithubFineGrainedAPICredentialsInput = (
GithubFineGrainedAPICredentialsField("repo:status")
)
repo_url: str = SchemaField(
description="URL of the GitHub repository",
placeholder="https://github.com/owner/repo",
)
sha: str = SchemaField(
description="The SHA of the commit to set status for",
)
state: StatusState = SchemaField(
description="The state of the status (error, failure, pending, success)",
)
target_url: Optional[str] = SchemaField(
description="URL with additional details about this status",
default=None,
)
description: Optional[str] = SchemaField(
description="Short description of the status",
default=None,
)
check_name: Optional[str] = SchemaField(
description="Label to differentiate this status from others",
default="AutoGPT Platform Checks",
advanced=False,
)
class Output(BlockSchema):
class StatusResult(BaseModel):
id: int
url: str
state: str
context: str
description: Optional[str]
target_url: Optional[str]
created_at: str
updated_at: str
status: StatusResult = SchemaField(description="Details of the created status")
error: str = SchemaField(description="Error message if status creation failed")
def __init__(self):
super().__init__(
id="3d67f123-a4b5-4c89-9d01-2e34f5c67890", # Generated UUID
description="Creates a new commit status in a GitHub repository",
categories={BlockCategory.DEVELOPER_TOOLS},
input_schema=GithubCreateStatusBlock.Input,
output_schema=GithubCreateStatusBlock.Output,
test_input={
"repo_url": "https://github.com/owner/repo",
"sha": "ce587453ced02b1526dfb4cb910479d431683101",
"state": StatusState.SUCCESS.value,
"target_url": "https://example.com/build/status",
"description": "The build succeeded!",
"check_name": "continuous-integration/jenkins",
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
(
"status",
{
"id": 1234567890,
"url": "https://api.github.com/repos/owner/repo/statuses/ce587453ced02b1526dfb4cb910479d431683101",
"state": "success",
"context": "continuous-integration/jenkins",
"description": "The build succeeded!",
"target_url": "https://example.com/build/status",
"created_at": "2024-01-21T10:00:00Z",
"updated_at": "2024-01-21T10:00:00Z",
},
),
],
test_mock={
"create_status": lambda *args, **kwargs: {
"id": 1234567890,
"url": "https://api.github.com/repos/owner/repo/statuses/ce587453ced02b1526dfb4cb910479d431683101",
"state": "success",
"context": "continuous-integration/jenkins",
"description": "The build succeeded!",
"target_url": "https://example.com/build/status",
"created_at": "2024-01-21T10:00:00Z",
"updated_at": "2024-01-21T10:00:00Z",
}
},
)
@staticmethod
def create_status(
credentials: GithubFineGrainedAPICredentials,
repo_url: str,
sha: str,
state: StatusState,
target_url: Optional[str] = None,
description: Optional[str] = None,
context: str = "default",
) -> dict:
api = get_api(credentials)
class StatusData(BaseModel):
state: str
target_url: Optional[str] = None
description: Optional[str] = None
context: str
data = StatusData(
state=state.value,
context=context,
)
if target_url:
data.target_url = target_url
if description:
data.description = description
status_url = f"{repo_url}/statuses/{sha}"
response = api.post(status_url, json=data)
result = response.json()
return {
"id": result["id"],
"url": result["url"],
"state": result["state"],
"context": result["context"],
"description": result.get("description"),
"target_url": result.get("target_url"),
"created_at": result["created_at"],
"updated_at": result["updated_at"],
}
def run(
self,
input_data: Input,
*,
credentials: GithubFineGrainedAPICredentials,
**kwargs,
) -> BlockOutput:
try:
result = self.create_status(
credentials=credentials,
repo_url=input_data.repo_url,
sha=input_data.sha,
state=input_data.state,
target_url=input_data.target_url,
description=input_data.description,
context=input_data.check_name or "AutoGPT Platform Checks",
)
yield "status", result
except Exception as e:
yield "error", str(e)

View File

@@ -151,7 +151,7 @@ class IdeogramModelBlock(Block):
super().__init__(
id="6ab085e2-20b3-4055-bc3e-08036e01eca6",
description="This block runs Ideogram models with both simple and advanced settings.",
categories={BlockCategory.AI, BlockCategory.MULTIMEDIA},
categories={BlockCategory.AI},
input_schema=IdeogramModelBlock.Input,
output_schema=IdeogramModelBlock.Output,
test_input={

View File

@@ -1,272 +0,0 @@
from __future__ import annotations
import json
from typing import Any, Dict, Optional
from backend.blocks.linear._auth import LinearCredentials
from backend.blocks.linear.models import (
CreateCommentResponse,
CreateIssueResponse,
Issue,
Project,
)
from backend.util.request import Requests
class LinearAPIException(Exception):
def __init__(self, message: str, status_code: int):
super().__init__(message)
self.status_code = status_code
class LinearClient:
"""Client for the Linear API
If you're looking for the schema: https://studio.apollographql.com/public/Linear-API/variant/current/schema
"""
API_URL = "https://api.linear.app/graphql"
def __init__(
self,
credentials: LinearCredentials | None = None,
custom_requests: Optional[Requests] = None,
):
if custom_requests:
self._requests = custom_requests
else:
headers: Dict[str, str] = {
"Content-Type": "application/json",
}
if credentials:
headers["Authorization"] = credentials.auth_header()
self._requests = Requests(
extra_headers=headers,
trusted_origins=["https://api.linear.app"],
raise_for_status=False,
)
def _execute_graphql_request(
self, query: str, variables: dict | None = None
) -> Any:
"""
Executes a GraphQL request against the Linear API and returns the response data.
Args:
query: The GraphQL query string.
variables (optional): Any GraphQL query variables
Returns:
The parsed JSON response data, or raises a LinearAPIException on error.
"""
payload: Dict[str, Any] = {"query": query}
if variables:
payload["variables"] = variables
response = self._requests.post(self.API_URL, json=payload)
if not response.ok:
try:
error_data = response.json()
error_message = error_data.get("errors", [{}])[0].get("message", "")
except json.JSONDecodeError:
error_message = response.text
raise LinearAPIException(
f"Linear API request failed ({response.status_code}): {error_message}",
response.status_code,
)
response_data = response.json()
if "errors" in response_data:
error_messages = [
error.get("message", "") for error in response_data["errors"]
]
raise LinearAPIException(
f"Linear API returned errors: {', '.join(error_messages)}",
response.status_code,
)
return response_data["data"]
def query(self, query: str, variables: Optional[dict] = None) -> dict:
"""Executes a GraphQL query.
Args:
query: The GraphQL query string.
variables: Query variables, if any.
Returns:
The response data.
"""
return self._execute_graphql_request(query, variables)
def mutate(self, mutation: str, variables: Optional[dict] = None) -> dict:
"""Executes a GraphQL mutation.
Args:
mutation: The GraphQL mutation string.
variables: Query variables, if any.
Returns:
The response data.
"""
return self._execute_graphql_request(mutation, variables)
def try_create_comment(self, issue_id: str, comment: str) -> CreateCommentResponse:
try:
mutation = """
mutation CommentCreate($input: CommentCreateInput!) {
commentCreate(input: $input) {
success
comment {
id
body
}
}
}
"""
variables = {
"input": {
"body": comment,
"issueId": issue_id,
}
}
added_comment = self.mutate(mutation, variables)
# Select the commentCreate field from the mutation response
return CreateCommentResponse(**added_comment["commentCreate"])
except LinearAPIException as e:
raise e
def try_get_team_by_name(self, team_name: str) -> str:
try:
query = """
query GetTeamId($searchTerm: String!) {
teams(filter: {
or: [
{ name: { eqIgnoreCase: $searchTerm } },
{ key: { eqIgnoreCase: $searchTerm } }
]
}) {
nodes {
id
name
key
}
}
}
"""
variables: dict[str, Any] = {
"searchTerm": team_name,
}
team_id = self.query(query, variables)
return team_id["teams"]["nodes"][0]["id"]
except LinearAPIException as e:
raise e
def try_create_issue(
self,
team_id: str,
title: str,
description: str | None = None,
priority: int | None = None,
project_id: str | None = None,
) -> CreateIssueResponse:
try:
mutation = """
mutation IssueCreate($input: IssueCreateInput!) {
issueCreate(input: $input) {
issue {
title
description
id
identifier
priority
}
}
}
"""
variables: dict[str, Any] = {
"input": {
"teamId": team_id,
"title": title,
}
}
if project_id:
variables["input"]["projectId"] = project_id
if description:
variables["input"]["description"] = description
if priority:
variables["input"]["priority"] = priority
added_issue = self.mutate(mutation, variables)
return CreateIssueResponse(**added_issue["issueCreate"])
except LinearAPIException as e:
raise e
def try_search_projects(self, term: str) -> list[Project]:
try:
query = """
query SearchProjects($term: String!, $includeComments: Boolean!) {
searchProjects(term: $term, includeComments: $includeComments) {
nodes {
id
name
description
priority
progress
content
}
}
}
"""
variables: dict[str, Any] = {
"term": term,
"includeComments": True,
}
projects = self.query(query, variables)
return [
Project(**project) for project in projects["searchProjects"]["nodes"]
]
except LinearAPIException as e:
raise e
def try_search_issues(self, term: str) -> list[Issue]:
try:
query = """
query SearchIssues($term: String!, $includeComments: Boolean!) {
searchIssues(term: $term, includeComments: $includeComments) {
nodes {
id
identifier
title
description
priority
}
}
}
"""
variables: dict[str, Any] = {
"term": term,
"includeComments": True,
}
issues = self.query(query, variables)
return [Issue(**issue) for issue in issues["searchIssues"]["nodes"]]
except LinearAPIException as e:
raise e

View File

@@ -1,101 +0,0 @@
from enum import Enum
from typing import Literal
from pydantic import SecretStr
from backend.data.model import (
APIKeyCredentials,
CredentialsField,
CredentialsMetaInput,
OAuth2Credentials,
)
from backend.integrations.providers import ProviderName
from backend.util.settings import Secrets
secrets = Secrets()
LINEAR_OAUTH_IS_CONFIGURED = bool(
secrets.linear_client_id and secrets.linear_client_secret
)
LinearCredentials = OAuth2Credentials | APIKeyCredentials
# LinearCredentialsInput = CredentialsMetaInput[
# Literal[ProviderName.LINEAR],
# Literal["oauth2", "api_key"] if LINEAR_OAUTH_IS_CONFIGURED else Literal["oauth2"],
# ]
LinearCredentialsInput = CredentialsMetaInput[
Literal[ProviderName.LINEAR], Literal["oauth2"]
]
# (required) Comma separated list of scopes:
# read - (Default) Read access for the user's account. This scope will always be present.
# write - Write access for the user's account. If your application only needs to create comments, use a more targeted scope
# issues:create - Allows creating new issues and their attachments
# comments:create - Allows creating new issue comments
# timeSchedule:write - Allows creating and modifying time schedules
# admin - Full access to admin level endpoints. You should never ask for this permission unless it's absolutely needed
class LinearScope(str, Enum):
READ = "read"
WRITE = "write"
ISSUES_CREATE = "issues:create"
COMMENTS_CREATE = "comments:create"
TIME_SCHEDULE_WRITE = "timeSchedule:write"
ADMIN = "admin"
def LinearCredentialsField(scopes: list[LinearScope]) -> LinearCredentialsInput:
"""
Creates a Linear credentials input on a block.
Params:
scope: The authorization scope needed for the block to work. ([list of available scopes](https://docs.github.com/en/apps/oauth-apps/building-oauth-apps/scopes-for-oauth-apps#available-scopes))
""" # noqa
return CredentialsField(
required_scopes=set([LinearScope.READ.value]).union(
set([scope.value for scope in scopes])
),
description="The Linear integration can be used with OAuth, "
"or any API key with sufficient permissions for the blocks it is used on.",
)
TEST_CREDENTIALS_OAUTH = OAuth2Credentials(
id="01234567-89ab-cdef-0123-456789abcdef",
provider="linear",
title="Mock Linear API key",
username="mock-linear-username",
access_token=SecretStr("mock-linear-access-token"),
access_token_expires_at=None,
refresh_token=SecretStr("mock-linear-refresh-token"),
refresh_token_expires_at=None,
scopes=["mock-linear-scopes"],
)
TEST_CREDENTIALS_API_KEY = APIKeyCredentials(
id="01234567-89ab-cdef-0123-456789abcdef",
provider="linear",
title="Mock Linear API key",
api_key=SecretStr("mock-linear-api-key"),
expires_at=None,
)
TEST_CREDENTIALS_INPUT_OAUTH = {
"provider": TEST_CREDENTIALS_OAUTH.provider,
"id": TEST_CREDENTIALS_OAUTH.id,
"type": TEST_CREDENTIALS_OAUTH.type,
"title": TEST_CREDENTIALS_OAUTH.type,
}
TEST_CREDENTIALS_INPUT_API_KEY = {
"provider": TEST_CREDENTIALS_API_KEY.provider,
"id": TEST_CREDENTIALS_API_KEY.id,
"type": TEST_CREDENTIALS_API_KEY.type,
"title": TEST_CREDENTIALS_API_KEY.type,
}

View File

@@ -1,83 +0,0 @@
from backend.blocks.linear._api import LinearAPIException, LinearClient
from backend.blocks.linear._auth import (
LINEAR_OAUTH_IS_CONFIGURED,
TEST_CREDENTIALS_INPUT_OAUTH,
TEST_CREDENTIALS_OAUTH,
LinearCredentials,
LinearCredentialsField,
LinearCredentialsInput,
LinearScope,
)
from backend.blocks.linear.models import CreateCommentResponse
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import SchemaField
class LinearCreateCommentBlock(Block):
"""Block for creating comments on Linear issues"""
class Input(BlockSchema):
credentials: LinearCredentialsInput = LinearCredentialsField(
scopes=[LinearScope.COMMENTS_CREATE],
)
issue_id: str = SchemaField(description="ID of the issue to comment on")
comment: str = SchemaField(description="Comment text to add to the issue")
class Output(BlockSchema):
comment_id: str = SchemaField(description="ID of the created comment")
comment_body: str = SchemaField(
description="Text content of the created comment"
)
error: str = SchemaField(description="Error message if comment creation failed")
def __init__(self):
super().__init__(
id="8f7d3a2e-9b5c-4c6a-8f1d-7c8b3e4a5d6c",
description="Creates a new comment on a Linear issue",
input_schema=self.Input,
output_schema=self.Output,
categories={BlockCategory.PRODUCTIVITY, BlockCategory.ISSUE_TRACKING},
test_input={
"issue_id": "TEST-123",
"comment": "Test comment",
"credentials": TEST_CREDENTIALS_INPUT_OAUTH,
},
disabled=not LINEAR_OAUTH_IS_CONFIGURED,
test_credentials=TEST_CREDENTIALS_OAUTH,
test_output=[("comment_id", "abc123"), ("comment_body", "Test comment")],
test_mock={
"create_comment": lambda *args, **kwargs: (
"abc123",
"Test comment",
)
},
)
@staticmethod
def create_comment(
credentials: LinearCredentials, issue_id: str, comment: str
) -> tuple[str, str]:
client = LinearClient(credentials=credentials)
response: CreateCommentResponse = client.try_create_comment(
issue_id=issue_id, comment=comment
)
return response.comment.id, response.comment.body
def run(
self, input_data: Input, *, credentials: LinearCredentials, **kwargs
) -> BlockOutput:
"""Execute the comment creation"""
try:
comment_id, comment_body = self.create_comment(
credentials=credentials,
issue_id=input_data.issue_id,
comment=input_data.comment,
)
yield "comment_id", comment_id
yield "comment_body", comment_body
except LinearAPIException as e:
yield "error", str(e)
except Exception as e:
yield "error", f"Unexpected error: {str(e)}"

View File

@@ -1,189 +0,0 @@
from backend.blocks.linear._api import LinearAPIException, LinearClient
from backend.blocks.linear._auth import (
LINEAR_OAUTH_IS_CONFIGURED,
TEST_CREDENTIALS_INPUT_OAUTH,
TEST_CREDENTIALS_OAUTH,
LinearCredentials,
LinearCredentialsField,
LinearCredentialsInput,
LinearScope,
)
from backend.blocks.linear.models import CreateIssueResponse, Issue
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import SchemaField
class LinearCreateIssueBlock(Block):
"""Block for creating issues on Linear"""
class Input(BlockSchema):
credentials: LinearCredentialsInput = LinearCredentialsField(
scopes=[LinearScope.ISSUES_CREATE],
)
title: str = SchemaField(description="Title of the issue")
description: str | None = SchemaField(description="Description of the issue")
team_name: str = SchemaField(
description="Name of the team to create the issue on"
)
priority: int | None = SchemaField(
description="Priority of the issue",
default=None,
minimum=0,
maximum=4,
)
project_name: str | None = SchemaField(
description="Name of the project to create the issue on",
default=None,
)
class Output(BlockSchema):
issue_id: str = SchemaField(description="ID of the created issue")
issue_title: str = SchemaField(description="Title of the created issue")
error: str = SchemaField(description="Error message if issue creation failed")
def __init__(self):
super().__init__(
id="f9c68f55-dcca-40a8-8771-abf9601680aa",
description="Creates a new issue on Linear",
disabled=not LINEAR_OAUTH_IS_CONFIGURED,
input_schema=self.Input,
output_schema=self.Output,
categories={BlockCategory.PRODUCTIVITY, BlockCategory.ISSUE_TRACKING},
test_input={
"title": "Test issue",
"description": "Test description",
"team_name": "Test team",
"project_name": "Test project",
"credentials": TEST_CREDENTIALS_INPUT_OAUTH,
},
test_credentials=TEST_CREDENTIALS_OAUTH,
test_output=[("issue_id", "abc123"), ("issue_title", "Test issue")],
test_mock={
"create_issue": lambda *args, **kwargs: (
"abc123",
"Test issue",
)
},
)
@staticmethod
def create_issue(
credentials: LinearCredentials,
team_name: str,
title: str,
description: str | None = None,
priority: int | None = None,
project_name: str | None = None,
) -> tuple[str, str]:
client = LinearClient(credentials=credentials)
team_id = client.try_get_team_by_name(team_name=team_name)
project_id: str | None = None
if project_name:
projects = client.try_search_projects(term=project_name)
if projects:
project_id = projects[0].id
else:
raise LinearAPIException("Project not found", status_code=404)
response: CreateIssueResponse = client.try_create_issue(
team_id=team_id,
title=title,
description=description,
priority=priority,
project_id=project_id,
)
return response.issue.identifier, response.issue.title
def run(
self, input_data: Input, *, credentials: LinearCredentials, **kwargs
) -> BlockOutput:
"""Execute the issue creation"""
try:
issue_id, issue_title = self.create_issue(
credentials=credentials,
team_name=input_data.team_name,
title=input_data.title,
description=input_data.description,
priority=input_data.priority,
project_name=input_data.project_name,
)
yield "issue_id", issue_id
yield "issue_title", issue_title
except LinearAPIException as e:
yield "error", str(e)
except Exception as e:
yield "error", f"Unexpected error: {str(e)}"
class LinearSearchIssuesBlock(Block):
"""Block for searching issues on Linear"""
class Input(BlockSchema):
term: str = SchemaField(description="Term to search for issues")
credentials: LinearCredentialsInput = LinearCredentialsField(
scopes=[LinearScope.READ],
)
class Output(BlockSchema):
issues: list[Issue] = SchemaField(description="List of issues")
def __init__(self):
super().__init__(
id="b5a2a0e6-26b4-4c5b-8a42-bc79e9cb65c2",
description="Searches for issues on Linear",
input_schema=self.Input,
output_schema=self.Output,
disabled=not LINEAR_OAUTH_IS_CONFIGURED,
test_input={
"term": "Test issue",
"credentials": TEST_CREDENTIALS_INPUT_OAUTH,
},
test_credentials=TEST_CREDENTIALS_OAUTH,
test_output=[
(
"issues",
[
Issue(
id="abc123",
identifier="abc123",
title="Test issue",
description="Test description",
priority=1,
)
],
)
],
test_mock={
"search_issues": lambda *args, **kwargs: [
Issue(
id="abc123",
identifier="abc123",
title="Test issue",
description="Test description",
priority=1,
)
]
},
)
@staticmethod
def search_issues(
credentials: LinearCredentials,
term: str,
) -> list[Issue]:
client = LinearClient(credentials=credentials)
response: list[Issue] = client.try_search_issues(term=term)
return response
def run(
self, input_data: Input, *, credentials: LinearCredentials, **kwargs
) -> BlockOutput:
"""Execute the issue search"""
try:
issues = self.search_issues(credentials=credentials, term=input_data.term)
yield "issues", issues
except LinearAPIException as e:
yield "error", str(e)
except Exception as e:
yield "error", f"Unexpected error: {str(e)}"

View File

@@ -1,41 +0,0 @@
from pydantic import BaseModel
class Comment(BaseModel):
id: str
body: str
class CreateCommentInput(BaseModel):
body: str
issueId: str
class CreateCommentResponse(BaseModel):
success: bool
comment: Comment
class CreateCommentResponseWrapper(BaseModel):
commentCreate: CreateCommentResponse
class Issue(BaseModel):
id: str
identifier: str
title: str
description: str | None
priority: int
class CreateIssueResponse(BaseModel):
issue: Issue
class Project(BaseModel):
id: str
name: str
description: str
priority: int
progress: int
content: str

View File

@@ -1,95 +0,0 @@
from backend.blocks.linear._api import LinearAPIException, LinearClient
from backend.blocks.linear._auth import (
LINEAR_OAUTH_IS_CONFIGURED,
TEST_CREDENTIALS_INPUT_OAUTH,
TEST_CREDENTIALS_OAUTH,
LinearCredentials,
LinearCredentialsField,
LinearCredentialsInput,
LinearScope,
)
from backend.blocks.linear.models import Project
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import SchemaField
class LinearSearchProjectsBlock(Block):
"""Block for searching projects on Linear"""
class Input(BlockSchema):
credentials: LinearCredentialsInput = LinearCredentialsField(
scopes=[LinearScope.READ],
)
term: str = SchemaField(description="Term to search for projects")
class Output(BlockSchema):
projects: list[Project] = SchemaField(description="List of projects")
error: str = SchemaField(description="Error message if issue creation failed")
def __init__(self):
super().__init__(
id="446a1d35-9d8f-4ac5-83ea-7684ec50e6af",
description="Searches for projects on Linear",
input_schema=self.Input,
output_schema=self.Output,
categories={BlockCategory.PRODUCTIVITY, BlockCategory.ISSUE_TRACKING},
test_input={
"term": "Test project",
"credentials": TEST_CREDENTIALS_INPUT_OAUTH,
},
disabled=not LINEAR_OAUTH_IS_CONFIGURED,
test_credentials=TEST_CREDENTIALS_OAUTH,
test_output=[
(
"projects",
[
Project(
id="abc123",
name="Test project",
description="Test description",
priority=1,
progress=1,
content="Test content",
)
],
)
],
test_mock={
"search_projects": lambda *args, **kwargs: [
Project(
id="abc123",
name="Test project",
description="Test description",
priority=1,
progress=1,
content="Test content",
)
]
},
)
@staticmethod
def search_projects(
credentials: LinearCredentials,
term: str,
) -> list[Project]:
client = LinearClient(credentials=credentials)
response: list[Project] = client.try_search_projects(term=term)
return response
def run(
self, input_data: Input, *, credentials: LinearCredentials, **kwargs
) -> BlockOutput:
"""Execute the project search"""
try:
projects = self.search_projects(
credentials=credentials,
term=input_data.term,
)
yield "projects", projects
except LinearAPIException as e:
yield "error", str(e)
except Exception as e:
yield "error", f"Unexpected error: {str(e)}"

View File

@@ -1,6 +1,5 @@
import ast
import logging
from abc import ABC
from enum import Enum, EnumMeta
from json import JSONDecodeError
from types import MappingProxyType
@@ -69,7 +68,6 @@ def AICredentialsField() -> AICredentials:
class ModelMetadata(NamedTuple):
provider: str
context_window: int
max_output_tokens: int | None
class LlmModelMeta(EnumMeta):
@@ -93,8 +91,6 @@ class LlmModelMeta(EnumMeta):
class LlmModel(str, Enum, metaclass=LlmModelMeta):
# OpenAI models
O3_MINI = "o3-mini"
O1 = "o1"
O1_PREVIEW = "o1-preview"
O1_MINI = "o1-mini"
GPT4O_MINI = "gpt-4o-mini"
@@ -103,31 +99,30 @@ class LlmModel(str, Enum, metaclass=LlmModelMeta):
GPT3_5_TURBO = "gpt-3.5-turbo"
# Anthropic models
CLAUDE_3_5_SONNET = "claude-3-5-sonnet-latest"
CLAUDE_3_5_HAIKU = "claude-3-5-haiku-latest"
CLAUDE_3_HAIKU = "claude-3-haiku-20240307"
# Groq models
GEMMA2_9B = "gemma2-9b-it"
LLAMA3_3_70B = "llama-3.3-70b-versatile"
LLAMA3_1_8B = "llama-3.1-8b-instant"
LLAMA3_70B = "llama3-70b-8192"
LLAMA3_8B = "llama3-8b-8192"
LLAMA3_70B = "llama3-70b-8192"
MIXTRAL_8X7B = "mixtral-8x7b-32768"
# Groq preview models
DEEPSEEK_LLAMA_70B = "deepseek-r1-distill-llama-70b"
GEMMA_7B = "gemma-7b-it"
GEMMA2_9B = "gemma2-9b-it"
# New Groq models (Preview)
LLAMA3_1_405B = "llama-3.1-405b-reasoning"
LLAMA3_1_70B = "llama-3.1-70b-versatile"
LLAMA3_1_8B = "llama-3.1-8b-instant"
# Ollama models
OLLAMA_LLAMA3_3 = "llama3.3"
OLLAMA_LLAMA3_2 = "llama3.2"
OLLAMA_LLAMA3_8B = "llama3"
OLLAMA_LLAMA3_405B = "llama3.1:405b"
OLLAMA_DOLPHIN = "dolphin-mistral:latest"
# OpenRouter models
GEMINI_FLASH_1_5 = "google/gemini-flash-1.5"
GEMINI_FLASH_1_5_8B = "google/gemini-flash-1.5"
GROK_BETA = "x-ai/grok-beta"
MISTRAL_NEMO = "mistralai/mistral-nemo"
COHERE_COMMAND_R_08_2024 = "cohere/command-r-08-2024"
COHERE_COMMAND_R_PLUS_08_2024 = "cohere/command-r-plus-08-2024"
EVA_QWEN_2_5_32B = "eva-unit-01/eva-qwen-2.5-32b"
DEEPSEEK_CHAT = "deepseek/deepseek-chat" # Actually: DeepSeek V3
DEEPSEEK_CHAT = "deepseek/deepseek-chat"
PERPLEXITY_LLAMA_3_1_SONAR_LARGE_128K_ONLINE = (
"perplexity/llama-3.1-sonar-large-128k-online"
)
@@ -152,74 +147,47 @@ class LlmModel(str, Enum, metaclass=LlmModelMeta):
def context_window(self) -> int:
return self.metadata.context_window
@property
def max_output_tokens(self) -> int | None:
return self.metadata.max_output_tokens
MODEL_METADATA = {
# https://platform.openai.com/docs/models
LlmModel.O3_MINI: ModelMetadata("openai", 200000, 100000), # o3-mini-2025-01-31
LlmModel.O1: ModelMetadata("openai", 200000, 100000), # o1-2024-12-17
LlmModel.O1_PREVIEW: ModelMetadata(
"openai", 128000, 32768
), # o1-preview-2024-09-12
LlmModel.O1_MINI: ModelMetadata("openai", 128000, 65536), # o1-mini-2024-09-12
LlmModel.GPT4O_MINI: ModelMetadata(
"openai", 128000, 16384
), # gpt-4o-mini-2024-07-18
LlmModel.GPT4O: ModelMetadata("openai", 128000, 16384), # gpt-4o-2024-08-06
LlmModel.GPT4_TURBO: ModelMetadata(
"openai", 128000, 4096
), # gpt-4-turbo-2024-04-09
LlmModel.GPT3_5_TURBO: ModelMetadata("openai", 16385, 4096), # gpt-3.5-turbo-0125
# https://docs.anthropic.com/en/docs/about-claude/models
LlmModel.CLAUDE_3_5_SONNET: ModelMetadata(
"anthropic", 200000, 8192
), # claude-3-5-sonnet-20241022
LlmModel.CLAUDE_3_5_HAIKU: ModelMetadata(
"anthropic", 200000, 8192
), # claude-3-5-haiku-20241022
LlmModel.CLAUDE_3_HAIKU: ModelMetadata(
"anthropic", 200000, 4096
), # claude-3-haiku-20240307
# https://console.groq.com/docs/models
LlmModel.GEMMA2_9B: ModelMetadata("groq", 8192, None),
LlmModel.LLAMA3_3_70B: ModelMetadata("groq", 128000, 32768),
LlmModel.LLAMA3_1_8B: ModelMetadata("groq", 128000, 8192),
LlmModel.LLAMA3_70B: ModelMetadata("groq", 8192, None),
LlmModel.LLAMA3_8B: ModelMetadata("groq", 8192, None),
LlmModel.MIXTRAL_8X7B: ModelMetadata("groq", 32768, None),
LlmModel.DEEPSEEK_LLAMA_70B: ModelMetadata("groq", 128000, None),
# https://ollama.com/library
LlmModel.OLLAMA_LLAMA3_3: ModelMetadata("ollama", 8192, None),
LlmModel.OLLAMA_LLAMA3_2: ModelMetadata("ollama", 8192, None),
LlmModel.OLLAMA_LLAMA3_8B: ModelMetadata("ollama", 8192, None),
LlmModel.OLLAMA_LLAMA3_405B: ModelMetadata("ollama", 8192, None),
LlmModel.OLLAMA_DOLPHIN: ModelMetadata("ollama", 32768, None),
# https://openrouter.ai/models
LlmModel.GEMINI_FLASH_1_5: ModelMetadata("open_router", 1000000, 8192),
LlmModel.GROK_BETA: ModelMetadata("open_router", 131072, 131072),
LlmModel.MISTRAL_NEMO: ModelMetadata("open_router", 128000, 4096),
LlmModel.COHERE_COMMAND_R_08_2024: ModelMetadata("open_router", 128000, 4096),
LlmModel.COHERE_COMMAND_R_PLUS_08_2024: ModelMetadata("open_router", 128000, 4096),
LlmModel.EVA_QWEN_2_5_32B: ModelMetadata("open_router", 16384, 4096),
LlmModel.DEEPSEEK_CHAT: ModelMetadata("open_router", 64000, 2048),
LlmModel.O1_PREVIEW: ModelMetadata("openai", 32000),
LlmModel.O1_MINI: ModelMetadata("openai", 62000),
LlmModel.GPT4O_MINI: ModelMetadata("openai", 128000),
LlmModel.GPT4O: ModelMetadata("openai", 128000),
LlmModel.GPT4_TURBO: ModelMetadata("openai", 128000),
LlmModel.GPT3_5_TURBO: ModelMetadata("openai", 16385),
LlmModel.CLAUDE_3_5_SONNET: ModelMetadata("anthropic", 200000),
LlmModel.CLAUDE_3_HAIKU: ModelMetadata("anthropic", 200000),
LlmModel.LLAMA3_8B: ModelMetadata("groq", 8192),
LlmModel.LLAMA3_70B: ModelMetadata("groq", 8192),
LlmModel.MIXTRAL_8X7B: ModelMetadata("groq", 32768),
LlmModel.GEMMA_7B: ModelMetadata("groq", 8192),
LlmModel.GEMMA2_9B: ModelMetadata("groq", 8192),
LlmModel.LLAMA3_1_405B: ModelMetadata("groq", 8192),
# Limited to 16k during preview
LlmModel.LLAMA3_1_70B: ModelMetadata("groq", 131072),
LlmModel.LLAMA3_1_8B: ModelMetadata("groq", 131072),
LlmModel.OLLAMA_LLAMA3_2: ModelMetadata("ollama", 8192),
LlmModel.OLLAMA_LLAMA3_8B: ModelMetadata("ollama", 8192),
LlmModel.OLLAMA_LLAMA3_405B: ModelMetadata("ollama", 8192),
LlmModel.OLLAMA_DOLPHIN: ModelMetadata("ollama", 32768),
LlmModel.GEMINI_FLASH_1_5_8B: ModelMetadata("open_router", 8192),
LlmModel.GROK_BETA: ModelMetadata("open_router", 8192),
LlmModel.MISTRAL_NEMO: ModelMetadata("open_router", 4000),
LlmModel.COHERE_COMMAND_R_08_2024: ModelMetadata("open_router", 4000),
LlmModel.COHERE_COMMAND_R_PLUS_08_2024: ModelMetadata("open_router", 4000),
LlmModel.EVA_QWEN_2_5_32B: ModelMetadata("open_router", 4000),
LlmModel.DEEPSEEK_CHAT: ModelMetadata("open_router", 8192),
LlmModel.PERPLEXITY_LLAMA_3_1_SONAR_LARGE_128K_ONLINE: ModelMetadata(
"open_router", 127072, 127072
"open_router", 8192
),
LlmModel.QWEN_QWQ_32B_PREVIEW: ModelMetadata("open_router", 32768, 32768),
LlmModel.NOUSRESEARCH_HERMES_3_LLAMA_3_1_405B: ModelMetadata(
"open_router", 131000, 4096
),
LlmModel.NOUSRESEARCH_HERMES_3_LLAMA_3_1_70B: ModelMetadata(
"open_router", 12288, 12288
),
LlmModel.AMAZON_NOVA_LITE_V1: ModelMetadata("open_router", 300000, 5120),
LlmModel.AMAZON_NOVA_MICRO_V1: ModelMetadata("open_router", 128000, 5120),
LlmModel.AMAZON_NOVA_PRO_V1: ModelMetadata("open_router", 300000, 5120),
LlmModel.MICROSOFT_WIZARDLM_2_8X22B: ModelMetadata("open_router", 65536, 4096),
LlmModel.GRYPHE_MYTHOMAX_L2_13B: ModelMetadata("open_router", 4096, 4096),
LlmModel.QWEN_QWQ_32B_PREVIEW: ModelMetadata("open_router", 4000),
LlmModel.NOUSRESEARCH_HERMES_3_LLAMA_3_1_405B: ModelMetadata("open_router", 4000),
LlmModel.NOUSRESEARCH_HERMES_3_LLAMA_3_1_70B: ModelMetadata("open_router", 4000),
LlmModel.AMAZON_NOVA_LITE_V1: ModelMetadata("open_router", 4000),
LlmModel.AMAZON_NOVA_MICRO_V1: ModelMetadata("open_router", 4000),
LlmModel.AMAZON_NOVA_PRO_V1: ModelMetadata("open_router", 4000),
LlmModel.MICROSOFT_WIZARDLM_2_8X22B: ModelMetadata("open_router", 4000),
LlmModel.GRYPHE_MYTHOMAX_L2_13B: ModelMetadata("open_router", 4000),
}
for model in LlmModel:
@@ -238,17 +206,7 @@ class Message(BlockSchema):
content: str
class AIBlockBase(Block, ABC):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.prompt = ""
def merge_llm_stats(self, block: "AIBlockBase"):
self.merge_stats(block.execution_stats)
self.prompt = block.prompt
class AIStructuredResponseGeneratorBlock(AIBlockBase):
class AIStructuredResponseGeneratorBlock(Block):
class Input(BlockSchema):
prompt: str = SchemaField(
description="The prompt to send to the language model.",
@@ -300,7 +258,6 @@ class AIStructuredResponseGeneratorBlock(AIBlockBase):
response: dict[str, Any] = SchemaField(
description="The response object generated by the language model."
)
prompt: str = SchemaField(description="The prompt sent to the language model.")
error: str = SchemaField(description="Error message if the API call failed.")
def __init__(self):
@@ -320,10 +277,7 @@ class AIStructuredResponseGeneratorBlock(AIBlockBase):
"prompt": "User prompt",
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("response", {"key1": "key1Value", "key2": "key2Value"}),
("prompt", str),
],
test_output=("response", {"key1": "key1Value", "key2": "key2Value"}),
test_mock={
"llm_call": lambda *args, **kwargs: (
json.dumps(
@@ -337,20 +291,19 @@ class AIStructuredResponseGeneratorBlock(AIBlockBase):
)
},
)
self.prompt = ""
@staticmethod
def llm_call(
self,
credentials: APIKeyCredentials,
llm_model: LlmModel,
prompt: list[dict],
json_format: bool,
max_tokens: int | None,
max_tokens: int | None = None,
ollama_host: str = "localhost:11434",
) -> tuple[str, int, int]:
"""
Args:
credentials: The API key credentials to use.
api_key: API key for the LLM provider.
llm_model: The LLM model to use.
prompt: The prompt to send to the LLM.
json_format: Whether the response should be in JSON format.
@@ -363,7 +316,6 @@ class AIStructuredResponseGeneratorBlock(AIBlockBase):
The number of tokens used in the completion.
"""
provider = llm_model.metadata.provider
max_tokens = max_tokens or llm_model.max_output_tokens or 4096
if provider == "openai":
oai_client = openai.OpenAI(api_key=credentials.api_key.get_secret_value())
@@ -385,7 +337,6 @@ class AIStructuredResponseGeneratorBlock(AIBlockBase):
response_format=response_format, # type: ignore
max_completion_tokens=max_tokens,
)
self.prompt = json.dumps(prompt)
return (
response.choices[0].message.content or "",
@@ -413,9 +364,8 @@ class AIStructuredResponseGeneratorBlock(AIBlockBase):
model=llm_model.value,
system=sysprompt,
messages=messages,
max_tokens=max_tokens,
max_tokens=max_tokens or 8192,
)
self.prompt = json.dumps(prompt)
if not resp.content:
raise ValueError("No content returned from Anthropic.")
@@ -442,7 +392,6 @@ class AIStructuredResponseGeneratorBlock(AIBlockBase):
response_format=response_format, # type: ignore
max_tokens=max_tokens,
)
self.prompt = json.dumps(prompt)
return (
response.choices[0].message.content or "",
response.usage.prompt_tokens if response.usage else 0,
@@ -457,7 +406,6 @@ class AIStructuredResponseGeneratorBlock(AIBlockBase):
prompt=f"{sys_messages}\n\n{usr_messages}",
stream=False,
)
self.prompt = json.dumps(prompt)
return (
response.get("response") or "",
response.get("prompt_eval_count") or 0,
@@ -478,7 +426,6 @@ class AIStructuredResponseGeneratorBlock(AIBlockBase):
messages=prompt, # type: ignore
max_tokens=max_tokens,
)
self.prompt = json.dumps(prompt)
# If there's no response, raise an error
if not response.choices:
@@ -578,11 +525,9 @@ class AIStructuredResponseGeneratorBlock(AIBlockBase):
)
for k, v in parsed_dict.items()
}
yield "prompt", self.prompt
return
else:
yield "response", {"response": response_text}
yield "prompt", self.prompt
return
retry_prompt = trim_prompt(
@@ -613,7 +558,7 @@ class AIStructuredResponseGeneratorBlock(AIBlockBase):
raise RuntimeError(retry_prompt)
class AITextGeneratorBlock(AIBlockBase):
class AITextGeneratorBlock(Block):
class Input(BlockSchema):
prompt: str = SchemaField(
description="The prompt to send to the language model. You can use any of the {keys} from Prompt Values to fill in the prompt with values from the prompt values dictionary by putting them in curly braces.",
@@ -656,7 +601,6 @@ class AITextGeneratorBlock(AIBlockBase):
response: str = SchemaField(
description="The response generated by the language model."
)
prompt: str = SchemaField(description="The prompt sent to the language model.")
error: str = SchemaField(description="Error message if the API call failed.")
def __init__(self):
@@ -671,10 +615,7 @@ class AITextGeneratorBlock(AIBlockBase):
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("response", "Response text"),
("prompt", str),
],
test_output=("response", "Response text"),
test_mock={"llm_call": lambda *args, **kwargs: "Response text"},
)
@@ -685,7 +626,7 @@ class AITextGeneratorBlock(AIBlockBase):
) -> str:
block = AIStructuredResponseGeneratorBlock()
response = block.run_once(input_data, "response", credentials=credentials)
self.merge_llm_stats(block)
self.merge_stats(block.execution_stats)
return response["response"]
def run(
@@ -696,7 +637,6 @@ class AITextGeneratorBlock(AIBlockBase):
expected_format={},
)
yield "response", self.llm_call(object_input_data, credentials)
yield "prompt", self.prompt
class SummaryStyle(Enum):
@@ -706,7 +646,7 @@ class SummaryStyle(Enum):
NUMBERED_LIST = "numbered list"
class AITextSummarizerBlock(AIBlockBase):
class AITextSummarizerBlock(Block):
class Input(BlockSchema):
text: str = SchemaField(
description="The text to summarize.",
@@ -749,7 +689,6 @@ class AITextSummarizerBlock(AIBlockBase):
class Output(BlockSchema):
summary: str = SchemaField(description="The final summary of the text.")
prompt: str = SchemaField(description="The prompt sent to the language model.")
error: str = SchemaField(description="Error message if the API call failed.")
def __init__(self):
@@ -764,10 +703,7 @@ class AITextSummarizerBlock(AIBlockBase):
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("summary", "Final summary of a long text"),
("prompt", str),
],
test_output=("summary", "Final summary of a long text"),
test_mock={
"llm_call": lambda input_data, credentials: (
{"final_summary": "Final summary of a long text"}
@@ -795,7 +731,6 @@ class AITextSummarizerBlock(AIBlockBase):
final_summary = self._combine_summaries(summaries, input_data, credentials)
yield "summary", final_summary
yield "prompt", self.prompt
@staticmethod
def _split_text(text: str, max_tokens: int, overlap: int) -> list[str]:
@@ -816,7 +751,7 @@ class AITextSummarizerBlock(AIBlockBase):
) -> dict:
block = AIStructuredResponseGeneratorBlock()
response = block.run_once(input_data, "response", credentials=credentials)
self.merge_llm_stats(block)
self.merge_stats(block.execution_stats)
return response
def _summarize_chunk(
@@ -873,7 +808,7 @@ class AITextSummarizerBlock(AIBlockBase):
] # Get the first yielded value
class AIConversationBlock(AIBlockBase):
class AIConversationBlock(Block):
class Input(BlockSchema):
messages: List[Message] = SchemaField(
description="List of messages in the conversation.", min_length=1
@@ -899,7 +834,6 @@ class AIConversationBlock(AIBlockBase):
response: str = SchemaField(
description="The model's response to the conversation."
)
prompt: str = SchemaField(description="The prompt sent to the language model.")
error: str = SchemaField(description="Error message if the API call failed.")
def __init__(self):
@@ -923,13 +857,10 @@ class AIConversationBlock(AIBlockBase):
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
(
"response",
"The 2020 World Series was played at Globe Life Field in Arlington, Texas.",
),
("prompt", str),
],
test_output=(
"response",
"The 2020 World Series was played at Globe Life Field in Arlington, Texas.",
),
test_mock={
"llm_call": lambda *args, **kwargs: "The 2020 World Series was played at Globe Life Field in Arlington, Texas."
},
@@ -942,7 +873,7 @@ class AIConversationBlock(AIBlockBase):
) -> str:
block = AIStructuredResponseGeneratorBlock()
response = block.run_once(input_data, "response", credentials=credentials)
self.merge_llm_stats(block)
self.merge_stats(block.execution_stats)
return response["response"]
def run(
@@ -956,16 +887,14 @@ class AIConversationBlock(AIBlockBase):
conversation_history=input_data.messages,
max_tokens=input_data.max_tokens,
expected_format={},
ollama_host=input_data.ollama_host,
),
credentials=credentials,
)
yield "response", response
yield "prompt", self.prompt
class AIListGeneratorBlock(AIBlockBase):
class AIListGeneratorBlock(Block):
class Input(BlockSchema):
focus: str | None = SchemaField(
description="The focus of the list to generate.",
@@ -1008,7 +937,6 @@ class AIListGeneratorBlock(AIBlockBase):
list_item: str = SchemaField(
description="Each individual item in the list.",
)
prompt: str = SchemaField(description="The prompt sent to the language model.")
error: str = SchemaField(
description="Error message if the list generation failed."
)
@@ -1040,7 +968,6 @@ class AIListGeneratorBlock(AIBlockBase):
"generated_list",
["Zylora Prime", "Kharon-9", "Vortexia", "Oceara", "Draknos"],
),
("prompt", str),
("list_item", "Zylora Prime"),
("list_item", "Kharon-9"),
("list_item", "Vortexia"),
@@ -1054,14 +981,13 @@ class AIListGeneratorBlock(AIBlockBase):
},
)
@staticmethod
def llm_call(
self,
input_data: AIStructuredResponseGeneratorBlock.Input,
credentials: APIKeyCredentials,
) -> dict[str, str]:
llm_block = AIStructuredResponseGeneratorBlock()
response = llm_block.run_once(input_data, "response", credentials=credentials)
self.merge_llm_stats(llm_block)
return response
@staticmethod
@@ -1175,7 +1101,6 @@ class AIListGeneratorBlock(AIBlockBase):
# If we reach here, we have a valid Python list
logger.debug("Successfully generated a valid Python list")
yield "generated_list", parsed_list
yield "prompt", self.prompt
# Yield each item in the list
for item in parsed_list:

View File

@@ -1,245 +0,0 @@
import os
import tempfile
from typing import Literal, Optional
from moviepy.audio.io.AudioFileClip import AudioFileClip
from moviepy.video.fx.Loop import Loop
from moviepy.video.io.VideoFileClip import VideoFileClip
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import SchemaField
from backend.util.file import MediaFile, get_exec_file_path, store_media_file
class MediaDurationBlock(Block):
class Input(BlockSchema):
media_in: MediaFile = SchemaField(
description="Media input (URL, data URI, or local path)."
)
is_video: bool = SchemaField(
description="Whether the media is a video (True) or audio (False).",
default=True,
)
class Output(BlockSchema):
duration: float = SchemaField(
description="Duration of the media file (in seconds)."
)
error: str = SchemaField(
description="Error message if something fails.", default=""
)
def __init__(self):
super().__init__(
id="d8b91fd4-da26-42d4-8ecb-8b196c6d84b6",
description="Block to get the duration of a media file.",
categories={BlockCategory.MULTIMEDIA},
input_schema=MediaDurationBlock.Input,
output_schema=MediaDurationBlock.Output,
)
def run(
self,
input_data: Input,
*,
graph_exec_id: str,
**kwargs,
) -> BlockOutput:
# 1) Store the input media locally
local_media_path = store_media_file(
graph_exec_id=graph_exec_id,
file=input_data.media_in,
return_content=False,
)
media_abspath = get_exec_file_path(graph_exec_id, local_media_path)
# 2) Load the clip
if input_data.is_video:
clip = VideoFileClip(media_abspath)
else:
clip = AudioFileClip(media_abspath)
yield "duration", clip.duration
class LoopVideoBlock(Block):
"""
Block for looping (repeating) a video clip until a given duration or number of loops.
"""
class Input(BlockSchema):
video_in: MediaFile = SchemaField(
description="The input video (can be a URL, data URI, or local path)."
)
# Provide EITHER a `duration` or `n_loops` or both. We'll demonstrate `duration`.
duration: Optional[float] = SchemaField(
description="Target duration (in seconds) to loop the video to. If omitted, defaults to no looping.",
default=None,
ge=0.0,
)
n_loops: Optional[int] = SchemaField(
description="Number of times to repeat the video. If omitted, defaults to 1 (no repeat).",
default=None,
ge=1,
)
output_return_type: Literal["file_path", "data_uri"] = SchemaField(
description="How to return the output video. Either a relative path or base64 data URI.",
default="file_path",
)
class Output(BlockSchema):
video_out: str = SchemaField(
description="Looped video returned either as a relative path or a data URI."
)
error: str = SchemaField(
description="Error message if something fails.", default=""
)
def __init__(self):
super().__init__(
id="8bf9eef6-5451-4213-b265-25306446e94b",
description="Block to loop a video to a given duration or number of repeats.",
categories={BlockCategory.MULTIMEDIA},
input_schema=LoopVideoBlock.Input,
output_schema=LoopVideoBlock.Output,
)
def run(
self,
input_data: Input,
*,
node_exec_id: str,
graph_exec_id: str,
**kwargs,
) -> BlockOutput:
# 1) Store the input video locally
local_video_path = store_media_file(
graph_exec_id=graph_exec_id,
file=input_data.video_in,
return_content=False,
)
input_abspath = get_exec_file_path(graph_exec_id, local_video_path)
# 2) Load the clip
clip = VideoFileClip(input_abspath)
# 3) Apply the loop effect
looped_clip = clip
if input_data.duration:
# Loop until we reach the specified duration
looped_clip = looped_clip.with_effects([Loop(duration=input_data.duration)])
elif input_data.n_loops:
looped_clip = looped_clip.with_effects([Loop(n=input_data.n_loops)])
else:
raise ValueError("Either 'duration' or 'n_loops' must be provided.")
assert isinstance(looped_clip, VideoFileClip)
# 4) Save the looped output
output_filename = MediaFile(
f"{node_exec_id}_looped_{os.path.basename(local_video_path)}"
)
output_abspath = get_exec_file_path(graph_exec_id, output_filename)
looped_clip = looped_clip.with_audio(clip.audio)
looped_clip.write_videofile(output_abspath, codec="libx264", audio_codec="aac")
# Return as data URI
video_out = store_media_file(
graph_exec_id=graph_exec_id,
file=output_filename,
return_content=input_data.output_return_type == "data_uri",
)
yield "video_out", video_out
class AddAudioToVideoBlock(Block):
"""
Block that adds (attaches) an audio track to an existing video.
Optionally scale the volume of the new track.
"""
class Input(BlockSchema):
video_in: MediaFile = SchemaField(
description="Video input (URL, data URI, or local path)."
)
audio_in: MediaFile = SchemaField(
description="Audio input (URL, data URI, or local path)."
)
volume: float = SchemaField(
description="Volume scale for the newly attached audio track (1.0 = original).",
default=1.0,
)
output_return_type: Literal["file_path", "data_uri"] = SchemaField(
description="Return the final output as a relative path or base64 data URI.",
default="file_path",
)
class Output(BlockSchema):
video_out: MediaFile = SchemaField(
description="Final video (with attached audio), as a path or data URI."
)
error: str = SchemaField(
description="Error message if something fails.", default=""
)
def __init__(self):
super().__init__(
id="3503748d-62b6-4425-91d6-725b064af509",
description="Block to attach an audio file to a video file using moviepy.",
categories={BlockCategory.MULTIMEDIA},
input_schema=AddAudioToVideoBlock.Input,
output_schema=AddAudioToVideoBlock.Output,
)
def run(
self,
input_data: Input,
*,
node_exec_id: str,
graph_exec_id: str,
**kwargs,
) -> BlockOutput:
# 1) Store the inputs locally
local_video_path = store_media_file(
graph_exec_id=graph_exec_id,
file=input_data.video_in,
return_content=False,
)
local_audio_path = store_media_file(
graph_exec_id=graph_exec_id,
file=input_data.audio_in,
return_content=False,
)
abs_temp_dir = os.path.join(tempfile.gettempdir(), "exec_file", graph_exec_id)
video_abspath = os.path.join(abs_temp_dir, local_video_path)
audio_abspath = os.path.join(abs_temp_dir, local_audio_path)
# 2) Load video + audio with moviepy
video_clip = VideoFileClip(video_abspath)
audio_clip = AudioFileClip(audio_abspath)
# Optionally scale volume
if input_data.volume != 1.0:
audio_clip = audio_clip.with_volume_scaled(input_data.volume)
# 3) Attach the new audio track
final_clip = video_clip.with_audio(audio_clip)
# 4) Write to output file
output_filename = MediaFile(
f"{node_exec_id}_audio_attached_{os.path.basename(local_video_path)}"
)
output_abspath = os.path.join(abs_temp_dir, output_filename)
final_clip.write_videofile(output_abspath, codec="libx264", audio_codec="aac")
# 5) Return either path or data URI
video_out = store_media_file(
graph_exec_id=graph_exec_id,
file=output_filename,
return_content=input_data.output_return_type == "data_uri",
)
yield "video_out", video_out

View File

@@ -1,338 +0,0 @@
from typing import Any, Literal, Optional, Union
from mem0 import MemoryClient
from pydantic import BaseModel, SecretStr
from backend.data.block import Block, BlockOutput, BlockSchema
from backend.data.model import (
APIKeyCredentials,
CredentialsField,
CredentialsMetaInput,
SchemaField,
)
from backend.integrations.providers import ProviderName
TEST_CREDENTIALS = APIKeyCredentials(
id="ed55ac19-356e-4243-a6cb-bc599e9b716f",
provider="mem0",
api_key=SecretStr("mock-mem0-api-key"),
title="Mock Mem0 API key",
expires_at=None,
)
TEST_CREDENTIALS_INPUT = {
"provider": TEST_CREDENTIALS.provider,
"id": TEST_CREDENTIALS.id,
"type": TEST_CREDENTIALS.type,
"title": TEST_CREDENTIALS.title,
}
class Mem0Base:
"""Base class with shared utilities for Mem0 blocks"""
@staticmethod
def _get_client(credentials: APIKeyCredentials) -> MemoryClient:
"""Get initialized Mem0 client"""
return MemoryClient(api_key=credentials.api_key.get_secret_value())
Filter = dict[str, list[dict[str, str | dict[str, list[str]]]]]
class Conversation(BaseModel):
discriminator: Literal["conversation"]
messages: list[dict[str, str]]
class Content(BaseModel):
discriminator: Literal["content"]
content: str
class AddMemoryBlock(Block, Mem0Base):
"""Block for adding memories to Mem0
Always limited by user_id and optional graph_id and graph_exec_id"""
class Input(BlockSchema):
credentials: CredentialsMetaInput[
Literal[ProviderName.MEM0], Literal["api_key"]
] = CredentialsField(description="Mem0 API key credentials")
content: Union[Content, Conversation] = SchemaField(
discriminator="discriminator",
description="Content to add - either a string or list of message objects as output from an AI block",
default=Content(discriminator="content", content="I'm a vegetarian"),
)
metadata: dict[str, Any] = SchemaField(
description="Optional metadata for the memory", default={}
)
limit_memory_to_run: bool = SchemaField(
description="Limit the memory to the run", default=False
)
limit_memory_to_agent: bool = SchemaField(
description="Limit the memory to the agent", default=False
)
class Output(BlockSchema):
action: str = SchemaField(description="Action of the operation")
memory: str = SchemaField(description="Memory created")
error: str = SchemaField(description="Error message if operation fails")
def __init__(self):
super().__init__(
id="dce97578-86be-45a4-ae50-f6de33fc935a",
description="Add new memories to Mem0 with user segmentation",
input_schema=AddMemoryBlock.Input,
output_schema=AddMemoryBlock.Output,
test_input=[
{
"content": {
"discriminator": "conversation",
"messages": [{"role": "user", "content": "I'm a vegetarian"}],
},
"metadata": {"food": "vegetarian"},
"credentials": TEST_CREDENTIALS_INPUT,
},
{
"content": {
"discriminator": "content",
"content": "I am a vegetarian",
},
"metadata": {"food": "vegetarian"},
"credentials": TEST_CREDENTIALS_INPUT,
},
],
test_output=[("action", "NO_CHANGE"), ("action", "NO_CHANGE")],
test_credentials=TEST_CREDENTIALS,
test_mock={"_get_client": lambda credentials: MockMemoryClient()},
)
def run(
self,
input_data: Input,
*,
credentials: APIKeyCredentials,
user_id: str,
graph_id: str,
graph_exec_id: str,
**kwargs
) -> BlockOutput:
try:
client = self._get_client(credentials)
if isinstance(input_data.content, Conversation):
messages = input_data.content.messages
else:
messages = [{"role": "user", "content": input_data.content}]
params = {
"user_id": user_id,
"output_format": "v1.1",
"metadata": input_data.metadata,
}
if input_data.limit_memory_to_run:
params["run_id"] = graph_exec_id
if input_data.limit_memory_to_agent:
params["agent_id"] = graph_id
# Use the client to add memory
result = client.add(
messages,
**params,
)
if len(result.get("results", [])) > 0:
for result in result.get("results", []):
yield "action", result["event"]
yield "memory", result["memory"]
else:
yield "action", "NO_CHANGE"
except Exception as e:
yield "error", str(object=e)
class SearchMemoryBlock(Block, Mem0Base):
"""Block for searching memories in Mem0"""
class Input(BlockSchema):
credentials: CredentialsMetaInput[
Literal[ProviderName.MEM0], Literal["api_key"]
] = CredentialsField(description="Mem0 API key credentials")
query: str = SchemaField(
description="Search query",
advanced=False,
)
trigger: bool = SchemaField(
description="An unused field that is used to (re-)trigger the block when you have no other inputs",
default=False,
advanced=False,
)
categories_filter: list[str] = SchemaField(
description="Categories to filter by",
default=[],
advanced=True,
)
limit_memory_to_run: bool = SchemaField(
description="Limit the memory to the run", default=False
)
limit_memory_to_agent: bool = SchemaField(
description="Limit the memory to the agent", default=True
)
class Output(BlockSchema):
memories: Any = SchemaField(description="List of matching memories")
error: str = SchemaField(description="Error message if operation fails")
def __init__(self):
super().__init__(
id="bd7c84e3-e073-4b75-810c-600886ec8a5b",
description="Search memories in Mem0 by user",
input_schema=SearchMemoryBlock.Input,
output_schema=SearchMemoryBlock.Output,
test_input={
"query": "vegetarian preferences",
"credentials": TEST_CREDENTIALS_INPUT,
"top_k": 10,
"rerank": True,
},
test_output=[
("memories", [{"id": "test-memory", "content": "test content"}])
],
test_credentials=TEST_CREDENTIALS,
test_mock={"_get_client": lambda credentials: MockMemoryClient()},
)
def run(
self,
input_data: Input,
*,
credentials: APIKeyCredentials,
user_id: str,
graph_id: str,
graph_exec_id: str,
**kwargs
) -> BlockOutput:
try:
client = self._get_client(credentials)
filters: Filter = {
# This works with only one filter, so we can allow others to add on later
"AND": [
{"user_id": user_id},
]
}
if input_data.categories_filter:
filters["AND"].append(
{"categories": {"contains": input_data.categories_filter}}
)
if input_data.limit_memory_to_run:
filters["AND"].append({"run_id": graph_exec_id})
if input_data.limit_memory_to_agent:
filters["AND"].append({"agent_id": graph_id})
result: list[dict[str, Any]] = client.search(
input_data.query, version="v2", filters=filters
)
yield "memories", result
except Exception as e:
yield "error", str(e)
class GetAllMemoriesBlock(Block, Mem0Base):
"""Block for retrieving all memories from Mem0"""
class Input(BlockSchema):
credentials: CredentialsMetaInput[
Literal[ProviderName.MEM0], Literal["api_key"]
] = CredentialsField(description="Mem0 API key credentials")
trigger: bool = SchemaField(
description="An unused field that is used to trigger the block when you have no other inputs",
default=False,
advanced=False,
)
categories: Optional[list[str]] = SchemaField(
description="Filter by categories", default=None
)
limit_memory_to_run: bool = SchemaField(
description="Limit the memory to the run", default=False
)
limit_memory_to_agent: bool = SchemaField(
description="Limit the memory to the agent", default=False
)
class Output(BlockSchema):
memories: Any = SchemaField(description="List of memories")
error: str = SchemaField(description="Error message if operation fails")
def __init__(self):
super().__init__(
id="45aee5bf-4767-45d1-a28b-e01c5aae9fc1",
description="Retrieve all memories from Mem0 with pagination",
input_schema=GetAllMemoriesBlock.Input,
output_schema=GetAllMemoriesBlock.Output,
test_input={
"user_id": "test_user",
"credentials": TEST_CREDENTIALS_INPUT,
},
test_output=[
("memories", [{"id": "test-memory", "content": "test content"}]),
],
test_credentials=TEST_CREDENTIALS,
test_mock={"_get_client": lambda credentials: MockMemoryClient()},
)
def run(
self,
input_data: Input,
*,
credentials: APIKeyCredentials,
user_id: str,
graph_id: str,
graph_exec_id: str,
**kwargs
) -> BlockOutput:
try:
client = self._get_client(credentials)
filters: Filter = {
"AND": [
{"user_id": user_id},
]
}
if input_data.limit_memory_to_run:
filters["AND"].append({"run_id": graph_exec_id})
if input_data.limit_memory_to_agent:
filters["AND"].append({"agent_id": graph_id})
if input_data.categories:
filters["AND"].append(
{"categories": {"contains": input_data.categories}}
)
memories: list[dict[str, Any]] = client.get_all(
filters=filters,
version="v2",
)
yield "memories", memories
except Exception as e:
yield "error", str(e)
# Mock client for testing
class MockMemoryClient:
"""Mock Mem0 client for testing"""
def add(self, *args, **kwargs):
return {"memory_id": "test-memory-id", "status": "success"}
def search(self, *args, **kwargs) -> list[dict[str, str]]:
return [{"id": "test-memory", "content": "test content"}]
def get_all(self, *args, **kwargs) -> list[dict[str, str]]:
return [{"id": "test-memory", "content": "test content"}]

View File

@@ -1,48 +1,22 @@
from datetime import datetime, timezone
from typing import Iterator, Literal
from typing import Iterator
import praw
from pydantic import BaseModel, SecretStr
from pydantic import BaseModel, ConfigDict
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import (
CredentialsField,
CredentialsMetaInput,
SchemaField,
UserPasswordCredentials,
)
from backend.integrations.providers import ProviderName
from backend.data.model import BlockSecret, SchemaField, SecretField
from backend.util.mock import MockObject
from backend.util.settings import Settings
RedditCredentials = UserPasswordCredentials
RedditCredentialsInput = CredentialsMetaInput[
Literal[ProviderName.REDDIT],
Literal["user_password"],
]
def RedditCredentialsField() -> RedditCredentialsInput:
"""Creates a Reddit credentials input on a block."""
return CredentialsField(
description="The Reddit integration requires a username and password.",
)
class RedditCredentials(BaseModel):
client_id: BlockSecret = SecretField(key="reddit_client_id")
client_secret: BlockSecret = SecretField(key="reddit_client_secret")
username: BlockSecret = SecretField(key="reddit_username")
password: BlockSecret = SecretField(key="reddit_password")
user_agent: str = "AutoGPT:1.0 (by /u/autogpt)"
TEST_CREDENTIALS = UserPasswordCredentials(
id="01234567-89ab-cdef-0123-456789abcdef",
provider="reddit",
username=SecretStr("mock-reddit-username"),
password=SecretStr("mock-reddit-password"),
title="Mock Reddit credentials",
)
TEST_CREDENTIALS_INPUT = {
"provider": TEST_CREDENTIALS.provider,
"id": TEST_CREDENTIALS.id,
"type": TEST_CREDENTIALS.type,
"title": TEST_CREDENTIALS.title,
}
model_config = ConfigDict(title="Reddit Credentials")
class RedditPost(BaseModel):
@@ -57,16 +31,13 @@ class RedditComment(BaseModel):
comment: str
settings = Settings()
def get_praw(creds: RedditCredentials) -> praw.Reddit:
client = praw.Reddit(
client_id=settings.secrets.reddit_client_id,
client_secret=settings.secrets.reddit_client_secret,
client_id=creds.client_id.get_secret_value(),
client_secret=creds.client_secret.get_secret_value(),
username=creds.username.get_secret_value(),
password=creds.password.get_secret_value(),
user_agent=settings.config.reddit_user_agent,
user_agent=creds.user_agent,
)
me = client.user.me()
if not me:
@@ -77,11 +48,11 @@ def get_praw(creds: RedditCredentials) -> praw.Reddit:
class GetRedditPostsBlock(Block):
class Input(BlockSchema):
subreddit: str = SchemaField(
description="Subreddit name, excluding the /r/ prefix",
default="writingprompts",
subreddit: str = SchemaField(description="Subreddit name")
creds: RedditCredentials = SchemaField(
description="Reddit credentials",
default=RedditCredentials(),
)
credentials: RedditCredentialsInput = RedditCredentialsField()
last_minutes: int | None = SchemaField(
description="Post time to stop minutes ago while fetching posts",
default=None,
@@ -99,18 +70,20 @@ class GetRedditPostsBlock(Block):
def __init__(self):
super().__init__(
disabled=True,
id="c6731acb-4285-4ee1-bc9b-03d0766c370f",
description="This block fetches Reddit posts from a defined subreddit name.",
categories={BlockCategory.SOCIAL},
disabled=(
not settings.secrets.reddit_client_id
or not settings.secrets.reddit_client_secret
),
input_schema=GetRedditPostsBlock.Input,
output_schema=GetRedditPostsBlock.Output,
test_credentials=TEST_CREDENTIALS,
test_input={
"credentials": TEST_CREDENTIALS_INPUT,
"creds": {
"client_id": "client_id",
"client_secret": "client_secret",
"username": "username",
"password": "password",
"user_agent": "user_agent",
},
"subreddit": "subreddit",
"last_post": "id3",
"post_limit": 2,
@@ -130,7 +103,7 @@ class GetRedditPostsBlock(Block):
),
],
test_mock={
"get_posts": lambda input_data, credentials: [
"get_posts": lambda _: [
MockObject(id="id1", title="title1", selftext="body1"),
MockObject(id="id2", title="title2", selftext="body2"),
MockObject(id="id3", title="title2", selftext="body2"),
@@ -139,18 +112,14 @@ class GetRedditPostsBlock(Block):
)
@staticmethod
def get_posts(
input_data: Input, *, credentials: RedditCredentials
) -> Iterator[praw.reddit.Submission]:
client = get_praw(credentials)
def get_posts(input_data: Input) -> Iterator[praw.reddit.Submission]:
client = get_praw(input_data.creds)
subreddit = client.subreddit(input_data.subreddit)
return subreddit.new(limit=input_data.post_limit or 10)
def run(
self, input_data: Input, *, credentials: RedditCredentials, **kwargs
) -> BlockOutput:
def run(self, input_data: Input, **kwargs) -> BlockOutput:
current_time = datetime.now(tz=timezone.utc)
for post in self.get_posts(input_data=input_data, credentials=credentials):
for post in self.get_posts(input_data):
if input_data.last_minutes:
post_datetime = datetime.fromtimestamp(
post.created_utc, tz=timezone.utc
@@ -172,7 +141,9 @@ class GetRedditPostsBlock(Block):
class PostRedditCommentBlock(Block):
class Input(BlockSchema):
credentials: RedditCredentialsInput = RedditCredentialsField()
creds: RedditCredentials = SchemaField(
description="Reddit credentials", default=RedditCredentials()
)
data: RedditComment = SchemaField(description="Reddit comment")
class Output(BlockSchema):
@@ -185,15 +156,7 @@ class PostRedditCommentBlock(Block):
categories={BlockCategory.SOCIAL},
input_schema=PostRedditCommentBlock.Input,
output_schema=PostRedditCommentBlock.Output,
disabled=(
not settings.secrets.reddit_client_id
or not settings.secrets.reddit_client_secret
),
test_credentials=TEST_CREDENTIALS,
test_input={
"credentials": TEST_CREDENTIALS_INPUT,
"data": {"post_id": "id", "comment": "comment"},
},
test_input={"data": {"post_id": "id", "comment": "comment"}},
test_output=[("comment_id", "dummy_comment_id")],
test_mock={"reply_post": lambda creds, comment: "dummy_comment_id"},
)
@@ -207,7 +170,5 @@ class PostRedditCommentBlock(Block):
raise ValueError("Failed to post comment.")
return new_comment.id
def run(
self, input_data: Input, *, credentials: RedditCredentials, **kwargs
) -> BlockOutput:
yield "comment_id", self.reply_post(credentials, input_data.data)
def run(self, input_data: Input, **kwargs) -> BlockOutput:
yield "comment_id", self.reply_post(input_data.creds, input_data.data)

View File

@@ -131,7 +131,7 @@ class ReplicateFluxAdvancedModelBlock(Block):
super().__init__(
id="90f8c45e-e983-4644-aa0b-b4ebe2f531bc",
description="This block runs Flux models on Replicate with advanced settings.",
categories={BlockCategory.AI, BlockCategory.MULTIMEDIA},
categories={BlockCategory.AI},
input_schema=ReplicateFluxAdvancedModelBlock.Input,
output_schema=ReplicateFluxAdvancedModelBlock.Output,
test_input={

View File

@@ -1,174 +0,0 @@
from base64 import b64encode
from enum import Enum
from typing import Literal
from pydantic import SecretStr
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import (
APIKeyCredentials,
CredentialsField,
CredentialsMetaInput,
SchemaField,
)
from backend.integrations.providers import ProviderName
from backend.util.file import MediaFile, store_media_file
from backend.util.request import Requests
class Format(str, Enum):
PNG = "png"
JPEG = "jpeg"
WEBP = "webp"
class ScreenshotWebPageBlock(Block):
"""Block for taking screenshots using ScreenshotOne API"""
class Input(BlockSchema):
credentials: CredentialsMetaInput[
Literal[ProviderName.SCREENSHOTONE], Literal["api_key"]
] = CredentialsField(description="The ScreenshotOne API key")
url: str = SchemaField(
description="URL of the website to screenshot",
placeholder="https://example.com",
)
viewport_width: int = SchemaField(
description="Width of the viewport in pixels", default=1920
)
viewport_height: int = SchemaField(
description="Height of the viewport in pixels", default=1080
)
full_page: bool = SchemaField(
description="Whether to capture the full page length", default=False
)
format: Format = SchemaField(
description="Output format (png, jpeg, webp)", default=Format.PNG
)
block_ads: bool = SchemaField(description="Whether to block ads", default=True)
block_cookie_banners: bool = SchemaField(
description="Whether to block cookie banners", default=True
)
block_chats: bool = SchemaField(
description="Whether to block chat widgets", default=True
)
cache: bool = SchemaField(
description="Whether to enable caching", default=False
)
class Output(BlockSchema):
image: MediaFile = SchemaField(description="The screenshot image data")
error: str = SchemaField(description="Error message if the screenshot failed")
def __init__(self):
super().__init__(
id="3a7c4b8d-6e2f-4a5d-b9c1-f8d23c5a9b0e", # Generated UUID
description="Takes a screenshot of a specified website using ScreenshotOne API",
categories={BlockCategory.DATA},
input_schema=ScreenshotWebPageBlock.Input,
output_schema=ScreenshotWebPageBlock.Output,
test_input={
"url": "https://example.com",
"viewport_width": 1920,
"viewport_height": 1080,
"full_page": False,
"format": "png",
"block_ads": True,
"block_cookie_banners": True,
"block_chats": True,
"cache": False,
"credentials": {
"provider": "screenshotone",
"type": "api_key",
"id": "test-id",
"title": "Test API Key",
},
},
test_credentials=APIKeyCredentials(
id="test-id",
provider="screenshotone",
api_key=SecretStr("test-key"),
title="Test API Key",
expires_at=None,
),
test_output=[
(
"image",
"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAAXNSR0IArs4c6QAAAB5JREFUOE9jZPjP8J+BAsA4agDDaBgwjIYBw7AIAwCV5B/xAsMbygAAAABJRU5ErkJggg==",
),
],
test_mock={
"take_screenshot": lambda *args, **kwargs: {
"image": "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAAXNSR0IArs4c6QAAAB5JREFUOE9jZPjP8J+BAsA4agDDaBgwjIYBw7AIAwCV5B/xAsMbygAAAABJRU5ErkJggg==",
}
},
)
@staticmethod
def take_screenshot(
credentials: APIKeyCredentials,
graph_exec_id: str,
url: str,
viewport_width: int,
viewport_height: int,
full_page: bool,
format: Format,
block_ads: bool,
block_cookie_banners: bool,
block_chats: bool,
cache: bool,
) -> dict:
"""
Takes a screenshot using the ScreenshotOne API
"""
api = Requests(trusted_origins=["https://api.screenshotone.com"])
# Build API URL with parameters
params = {
"access_key": credentials.api_key.get_secret_value(),
"url": url,
"viewport_width": viewport_width,
"viewport_height": viewport_height,
"full_page": str(full_page).lower(),
"format": format.value,
"block_ads": str(block_ads).lower(),
"block_cookie_banners": str(block_cookie_banners).lower(),
"block_chats": str(block_chats).lower(),
"cache": str(cache).lower(),
}
response = api.get("https://api.screenshotone.com/take", params=params)
return {
"image": store_media_file(
graph_exec_id=graph_exec_id,
file=f"data:image/{format.value};base64,{b64encode(response.content).decode('utf-8')}",
return_content=True,
)
}
def run(
self,
input_data: Input,
*,
credentials: APIKeyCredentials,
graph_exec_id: str,
**kwargs,
) -> BlockOutput:
try:
screenshot_data = self.take_screenshot(
credentials=credentials,
graph_exec_id=graph_exec_id,
url=input_data.url,
viewport_width=input_data.viewport_width,
viewport_height=input_data.viewport_height,
full_page=input_data.full_page,
format=input_data.format,
block_ads=input_data.block_ads,
block_cookie_banners=input_data.block_cookie_banners,
block_chats=input_data.block_chats,
cache=input_data.cache,
)
yield "image", screenshot_data["image"]
except Exception as e:
yield "error", str(e)

View File

@@ -78,7 +78,7 @@ class CreateTalkingAvatarVideoBlock(Block):
super().__init__(
id="98c6f503-8c47-4b1c-a96d-351fc7c87dab",
description="This block integrates with D-ID to create video clips and retrieve their URLs.",
categories={BlockCategory.AI, BlockCategory.MULTIMEDIA},
categories={BlockCategory.AI},
input_schema=CreateTalkingAvatarVideoBlock.Input,
output_schema=CreateTalkingAvatarVideoBlock.Output,
test_input={

View File

@@ -76,8 +76,6 @@ class ExtractTextInformationBlock(Block):
class Output(BlockSchema):
positive: str = SchemaField(description="Extracted text")
negative: str = SchemaField(description="Original text")
matched_results: list[str] = SchemaField(description="List of matched results")
matched_count: int = SchemaField(description="Number of matched results")
def __init__(self):
super().__init__(
@@ -105,31 +103,13 @@ class ExtractTextInformationBlock(Block):
},
],
test_output=[
# Test case 1
("positive", "World!"),
("matched_results", ["World!"]),
("matched_count", 1),
# Test case 2
("positive", "Hello, World!"),
("matched_results", ["Hello, World!"]),
("matched_count", 1),
# Test case 3
("negative", "Hello, World!"),
("matched_results", []),
("matched_count", 0),
# Test case 4
("positive", "Hello,"),
("matched_results", ["Hello,"]),
("matched_count", 1),
# Test case 5
("positive", "World!!"),
("matched_results", ["World!!"]),
("matched_count", 1),
# Test case 6
("positive", "World!!"),
("positive", "Earth!!"),
("matched_results", ["World!!", "Earth!!"]),
("matched_count", 2),
],
)
@@ -150,16 +130,13 @@ class ExtractTextInformationBlock(Block):
for match in re.finditer(input_data.pattern, txt, flags)
if input_data.group <= len(match.groups())
]
if not input_data.find_all:
matches = matches[:1]
for match in matches:
yield "positive", match
if not input_data.find_all:
return
if not matches:
yield "negative", input_data.text
yield "matched_results", matches
yield "matched_count", len(matches)
class FillTextTemplateBlock(Block):
class Input(BlockSchema):
@@ -235,71 +212,3 @@ class CombineTextsBlock(Block):
def run(self, input_data: Input, **kwargs) -> BlockOutput:
combined_text = input_data.delimiter.join(input_data.input)
yield "output", combined_text
class TextSplitBlock(Block):
class Input(BlockSchema):
text: str = SchemaField(description="The text to split.")
delimiter: str = SchemaField(description="The delimiter to split the text by.")
strip: bool = SchemaField(
description="Whether to strip the text.", default=True
)
class Output(BlockSchema):
texts: list[str] = SchemaField(
description="The text split into a list of strings."
)
def __init__(self):
super().__init__(
id="d5ea33c8-a575-477a-b42f-2fe3be5055ec",
description="This block is used to split a text into a list of strings.",
categories={BlockCategory.TEXT},
input_schema=TextSplitBlock.Input,
output_schema=TextSplitBlock.Output,
test_input=[
{"text": "Hello, World!", "delimiter": ","},
{"text": "Hello, World!", "delimiter": ",", "strip": False},
],
test_output=[
("texts", ["Hello", "World!"]),
("texts", ["Hello", " World!"]),
],
)
def run(self, input_data: Input, **kwargs) -> BlockOutput:
if len(input_data.text) == 0:
yield "texts", []
else:
texts = input_data.text.split(input_data.delimiter)
if input_data.strip:
texts = [text.strip() for text in texts]
yield "texts", texts
class TextReplaceBlock(Block):
class Input(BlockSchema):
text: str = SchemaField(description="The text to replace.")
old: str = SchemaField(description="The old text to replace.")
new: str = SchemaField(description="The new text to replace with.")
class Output(BlockSchema):
output: str = SchemaField(description="The text with the replaced text.")
def __init__(self):
super().__init__(
id="7e7c87ab-3469-4bcc-9abe-67705091b713",
description="This block is used to replace a text with a new text.",
categories={BlockCategory.TEXT},
input_schema=TextReplaceBlock.Input,
output_schema=TextReplaceBlock.Output,
test_input=[
{"text": "Hello, World!", "old": "Hello", "new": "Hi"},
],
test_output=[
("output", "Hi, World!"),
],
)
def run(self, input_data: Input, **kwargs) -> BlockOutput:
yield "output", input_data.text.replace(input_data.old, input_data.new)

View File

@@ -53,7 +53,7 @@ class UnrealTextToSpeechBlock(Block):
super().__init__(
id="4ff1ff6d-cc40-4caa-ae69-011daa20c378",
description="Converts text to speech using the Unreal Speech API",
categories={BlockCategory.AI, BlockCategory.TEXT, BlockCategory.MULTIMEDIA},
categories={BlockCategory.AI, BlockCategory.TEXT},
input_schema=UnrealTextToSpeechBlock.Input,
output_schema=UnrealTextToSpeechBlock.Output,
test_input={

View File

@@ -1,61 +0,0 @@
from typing import Literal
from pydantic import SecretStr
from backend.data.model import (
CredentialsField,
CredentialsMetaInput,
OAuth2Credentials,
ProviderName,
)
from backend.integrations.oauth.todoist import TodoistOAuthHandler
from backend.util.settings import Secrets
secrets = Secrets()
TODOIST_OAUTH_IS_CONFIGURED = bool(
secrets.todoist_client_id and secrets.todoist_client_secret
)
TodoistCredentials = OAuth2Credentials
TodoistCredentialsInput = CredentialsMetaInput[
Literal[ProviderName.TODOIST], Literal["oauth2"]
]
def TodoistCredentialsField(scopes: list[str]) -> TodoistCredentialsInput:
"""
Creates a Todoist credentials input on a block.
Params:
scopes: The authorization scopes needed for the block to work.
"""
return CredentialsField(
required_scopes=set(TodoistOAuthHandler.DEFAULT_SCOPES + scopes),
description="The Todoist integration requires OAuth2 authentication.",
)
TEST_CREDENTIALS = OAuth2Credentials(
id="01234567-89ab-cdef-0123-456789abcdef",
provider="todoist",
access_token=SecretStr("mock-todoist-access-token"),
refresh_token=None,
access_token_expires_at=None,
scopes=[
"task:add",
"data:read",
"data:read_write",
"data:delete",
"project:delete",
],
title="Mock Todoist OAuth2 Credentials",
username="mock-todoist-username",
refresh_token_expires_at=None,
)
TEST_CREDENTIALS_INPUT = {
"provider": TEST_CREDENTIALS.provider,
"id": TEST_CREDENTIALS.id,
"type": TEST_CREDENTIALS.type,
"title": TEST_CREDENTIALS.title,
}

View File

@@ -1,24 +0,0 @@
from enum import Enum
class Colors(Enum):
berry_red = "berry_red"
red = "red"
orange = "orange"
yellow = "yellow"
olive_green = "olive_green"
lime_green = "lime_green"
green = "green"
mint_green = "mint_green"
teal = "teal"
sky_blue = "sky_blue"
light_blue = "light_blue"
blue = "blue"
grape = "grape"
violet = "violet"
lavender = "lavender"
magenta = "magenta"
salmon = "salmon"
charcoal = "charcoal"
grey = "grey"
taupe = "taupe"

View File

@@ -1,439 +0,0 @@
from typing import Literal, Union
from pydantic import BaseModel
from todoist_api_python.api import TodoistAPI
from typing_extensions import Optional
from backend.blocks.todoist._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
TodoistCredentials,
TodoistCredentialsField,
TodoistCredentialsInput,
)
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import SchemaField
class TaskId(BaseModel):
discriminator: Literal["task"]
task_id: str
class ProjectId(BaseModel):
discriminator: Literal["project"]
project_id: str
class TodoistCreateCommentBlock(Block):
"""Creates a new comment on a Todoist task or project"""
class Input(BlockSchema):
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
content: str = SchemaField(description="Comment content")
id_type: Union[TaskId, ProjectId] = SchemaField(
discriminator="discriminator",
description="Specify either task_id or project_id to comment on",
default=TaskId(discriminator="task", task_id=""),
advanced=False,
)
attachment: Optional[dict] = SchemaField(
description="Optional file attachment", default=None
)
class Output(BlockSchema):
id: str = SchemaField(description="ID of created comment")
content: str = SchemaField(description="Comment content")
posted_at: str = SchemaField(description="Comment timestamp")
task_id: Optional[str] = SchemaField(
description="Associated task ID", default=None
)
project_id: Optional[str] = SchemaField(
description="Associated project ID", default=None
)
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="1bba7e54-2310-4a31-8e6f-54d5f9ab7459",
description="Creates a new comment on a Todoist task or project",
categories={BlockCategory.PRODUCTIVITY},
input_schema=TodoistCreateCommentBlock.Input,
output_schema=TodoistCreateCommentBlock.Output,
test_input={
"credentials": TEST_CREDENTIALS_INPUT,
"content": "Test comment",
"id_type": {"discriminator": "task", "task_id": "2995104339"},
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("id", "2992679862"),
("content", "Test comment"),
("posted_at", "2016-09-22T07:00:00.000000Z"),
("task_id", "2995104339"),
("project_id", None),
],
test_mock={
"create_comment": lambda content, credentials, task_id=None, project_id=None, attachment=None: {
"id": "2992679862",
"content": "Test comment",
"posted_at": "2016-09-22T07:00:00.000000Z",
"task_id": "2995104339",
"project_id": None,
}
},
)
@staticmethod
def create_comment(
credentials: TodoistCredentials,
content: str,
task_id: Optional[str] = None,
project_id: Optional[str] = None,
attachment: Optional[dict] = None,
):
try:
api = TodoistAPI(credentials.access_token.get_secret_value())
comment = api.add_comment(
content=content,
task_id=task_id,
project_id=project_id,
attachment=attachment,
)
return comment.__dict__
except Exception as e:
raise e
def run(
self,
input_data: Input,
*,
credentials: TodoistCredentials,
**kwargs,
) -> BlockOutput:
try:
task_id = None
project_id = None
if isinstance(input_data.id_type, TaskId):
task_id = input_data.id_type.task_id
else:
project_id = input_data.id_type.project_id
comment_data = self.create_comment(
credentials,
input_data.content,
task_id=task_id,
project_id=project_id,
attachment=input_data.attachment,
)
if comment_data:
yield "id", comment_data["id"]
yield "content", comment_data["content"]
yield "posted_at", comment_data["posted_at"]
yield "task_id", comment_data["task_id"]
yield "project_id", comment_data["project_id"]
except Exception as e:
yield "error", str(e)
class TodoistGetCommentsBlock(Block):
"""Get all comments for a Todoist task or project"""
class Input(BlockSchema):
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
id_type: Union[TaskId, ProjectId] = SchemaField(
discriminator="discriminator",
description="Specify either task_id or project_id to get comments for",
default=TaskId(discriminator="task", task_id=""),
advanced=False,
)
class Output(BlockSchema):
comments: list = SchemaField(description="List of comments")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="9972d8ae-ddf2-11ef-a9b8-32d3674e8b7e",
description="Get all comments for a Todoist task or project",
categories={BlockCategory.PRODUCTIVITY},
input_schema=TodoistGetCommentsBlock.Input,
output_schema=TodoistGetCommentsBlock.Output,
test_input={
"credentials": TEST_CREDENTIALS_INPUT,
"id_type": {"discriminator": "task", "task_id": "2995104339"},
},
test_credentials=TEST_CREDENTIALS,
test_output=[
(
"comments",
[
{
"id": "2992679862",
"content": "Test comment",
"posted_at": "2016-09-22T07:00:00.000000Z",
"task_id": "2995104339",
"project_id": None,
"attachment": None,
}
],
)
],
test_mock={
"get_comments": lambda credentials, task_id=None, project_id=None: [
{
"id": "2992679862",
"content": "Test comment",
"posted_at": "2016-09-22T07:00:00.000000Z",
"task_id": "2995104339",
"project_id": None,
"attachment": None,
}
]
},
)
@staticmethod
def get_comments(
credentials: TodoistCredentials,
task_id: Optional[str] = None,
project_id: Optional[str] = None,
):
try:
api = TodoistAPI(credentials.access_token.get_secret_value())
comments = api.get_comments(task_id=task_id, project_id=project_id)
return [comment.__dict__ for comment in comments]
except Exception as e:
raise e
def run(
self,
input_data: Input,
*,
credentials: TodoistCredentials,
**kwargs,
) -> BlockOutput:
try:
task_id = None
project_id = None
if isinstance(input_data.id_type, TaskId):
task_id = input_data.id_type.task_id
else:
project_id = input_data.id_type.project_id
comments = self.get_comments(
credentials, task_id=task_id, project_id=project_id
)
yield "comments", comments
except Exception as e:
yield "error", str(e)
class TodoistGetCommentBlock(Block):
"""Get a single comment from Todoist using comment ID"""
class Input(BlockSchema):
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
comment_id: str = SchemaField(description="Comment ID to retrieve")
class Output(BlockSchema):
content: str = SchemaField(description="Comment content")
id: str = SchemaField(description="Comment ID")
posted_at: str = SchemaField(description="Comment timestamp")
project_id: Optional[str] = SchemaField(
description="Associated project ID", default=None
)
task_id: Optional[str] = SchemaField(
description="Associated task ID", default=None
)
attachment: Optional[dict] = SchemaField(
description="Optional file attachment", default=None
)
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="a809d264-ddf2-11ef-9764-32d3674e8b7e",
description="Get a single comment from Todoist",
categories={BlockCategory.PRODUCTIVITY},
input_schema=TodoistGetCommentBlock.Input,
output_schema=TodoistGetCommentBlock.Output,
test_input={
"credentials": TEST_CREDENTIALS_INPUT,
"comment_id": "2992679862",
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("content", "Test comment"),
("id", "2992679862"),
("posted_at", "2016-09-22T07:00:00.000000Z"),
("project_id", None),
("task_id", "2995104339"),
("attachment", None),
],
test_mock={
"get_comment": lambda credentials, comment_id: {
"content": "Test comment",
"id": "2992679862",
"posted_at": "2016-09-22T07:00:00.000000Z",
"project_id": None,
"task_id": "2995104339",
"attachment": None,
}
},
)
@staticmethod
def get_comment(credentials: TodoistCredentials, comment_id: str):
try:
api = TodoistAPI(credentials.access_token.get_secret_value())
comment = api.get_comment(comment_id=comment_id)
return comment.__dict__
except Exception as e:
raise e
def run(
self,
input_data: Input,
*,
credentials: TodoistCredentials,
**kwargs,
) -> BlockOutput:
try:
comment_data = self.get_comment(
credentials, comment_id=input_data.comment_id
)
if comment_data:
yield "content", comment_data["content"]
yield "id", comment_data["id"]
yield "posted_at", comment_data["posted_at"]
yield "project_id", comment_data["project_id"]
yield "task_id", comment_data["task_id"]
yield "attachment", comment_data["attachment"]
except Exception as e:
yield "error", str(e)
class TodoistUpdateCommentBlock(Block):
"""Updates a Todoist comment"""
class Input(BlockSchema):
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
comment_id: str = SchemaField(description="Comment ID to update")
content: str = SchemaField(description="New content for the comment")
class Output(BlockSchema):
success: bool = SchemaField(description="Whether the update was successful")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="b773c520-ddf2-11ef-9f34-32d3674e8b7e",
description="Updates a Todoist comment",
categories={BlockCategory.PRODUCTIVITY},
input_schema=TodoistUpdateCommentBlock.Input,
output_schema=TodoistUpdateCommentBlock.Output,
test_input={
"credentials": TEST_CREDENTIALS_INPUT,
"comment_id": "2992679862",
"content": "Need one bottle of milk",
},
test_credentials=TEST_CREDENTIALS,
test_output=[("success", True)],
test_mock={"update_comment": lambda credentials, comment_id, content: True},
)
@staticmethod
def update_comment(credentials: TodoistCredentials, comment_id: str, content: str):
try:
api = TodoistAPI(credentials.access_token.get_secret_value())
api.update_comment(comment_id=comment_id, content=content)
return True
except Exception as e:
raise e
def run(
self,
input_data: Input,
*,
credentials: TodoistCredentials,
**kwargs,
) -> BlockOutput:
try:
success = self.update_comment(
credentials,
comment_id=input_data.comment_id,
content=input_data.content,
)
yield "success", success
except Exception as e:
yield "error", str(e)
class TodoistDeleteCommentBlock(Block):
"""Deletes a Todoist comment"""
class Input(BlockSchema):
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
comment_id: str = SchemaField(description="Comment ID to delete")
class Output(BlockSchema):
success: bool = SchemaField(description="Whether the deletion was successful")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="bda4c020-ddf2-11ef-b114-32d3674e8b7e",
description="Deletes a Todoist comment",
categories={BlockCategory.PRODUCTIVITY},
input_schema=TodoistDeleteCommentBlock.Input,
output_schema=TodoistDeleteCommentBlock.Output,
test_input={
"credentials": TEST_CREDENTIALS_INPUT,
"comment_id": "2992679862",
},
test_credentials=TEST_CREDENTIALS,
test_output=[("success", True)],
test_mock={"delete_comment": lambda credentials, comment_id: True},
)
@staticmethod
def delete_comment(credentials: TodoistCredentials, comment_id: str):
try:
api = TodoistAPI(credentials.access_token.get_secret_value())
success = api.delete_comment(comment_id=comment_id)
return success
except Exception as e:
raise e
def run(
self,
input_data: Input,
*,
credentials: TodoistCredentials,
**kwargs,
) -> BlockOutput:
try:
success = self.delete_comment(credentials, comment_id=input_data.comment_id)
yield "success", success
except Exception as e:
yield "error", str(e)

View File

@@ -1,557 +0,0 @@
from todoist_api_python.api import TodoistAPI
from typing_extensions import Optional
from backend.blocks.todoist._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
TodoistCredentials,
TodoistCredentialsField,
TodoistCredentialsInput,
)
from backend.blocks.todoist._types import Colors
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import SchemaField
class TodoistCreateLabelBlock(Block):
"""Creates a new label in Todoist"""
class Input(BlockSchema):
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
name: str = SchemaField(description="Name of the label")
order: Optional[int] = SchemaField(description="Label order", default=None)
color: Optional[Colors] = SchemaField(
description="The color of the label icon", default=Colors.charcoal
)
is_favorite: bool = SchemaField(
description="Whether the label is a favorite", default=False
)
class Output(BlockSchema):
id: str = SchemaField(description="ID of the created label")
name: str = SchemaField(description="Name of the label")
color: str = SchemaField(description="Color of the label")
order: int = SchemaField(description="Label order")
is_favorite: bool = SchemaField(description="Favorite status")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="7288a968-de14-11ef-8997-32d3674e8b7e",
description="Creates a new label in Todoist, It will not work if same name already exists",
categories={BlockCategory.PRODUCTIVITY},
input_schema=TodoistCreateLabelBlock.Input,
output_schema=TodoistCreateLabelBlock.Output,
test_input={
"credentials": TEST_CREDENTIALS_INPUT,
"name": "Test Label",
"color": Colors.charcoal.value,
"order": 1,
"is_favorite": False,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("id", "2156154810"),
("name", "Test Label"),
("color", "charcoal"),
("order", 1),
("is_favorite", False),
],
test_mock={
"create_label": lambda *args, **kwargs: {
"id": "2156154810",
"name": "Test Label",
"color": "charcoal",
"order": 1,
"is_favorite": False,
}
},
)
@staticmethod
def create_label(credentials: TodoistCredentials, name: str, **kwargs):
try:
api = TodoistAPI(credentials.access_token.get_secret_value())
label = api.add_label(name=name, **kwargs)
return label.__dict__
except Exception as e:
raise e
def run(
self,
input_data: Input,
*,
credentials: TodoistCredentials,
**kwargs,
) -> BlockOutput:
try:
label_args = {
"order": input_data.order,
"color": (
input_data.color.value if input_data.color is not None else None
),
"is_favorite": input_data.is_favorite,
}
label_data = self.create_label(
credentials,
input_data.name,
**{k: v for k, v in label_args.items() if v is not None},
)
if label_data:
yield "id", label_data["id"]
yield "name", label_data["name"]
yield "color", label_data["color"]
yield "order", label_data["order"]
yield "is_favorite", label_data["is_favorite"]
except Exception as e:
yield "error", str(e)
class TodoistListLabelsBlock(Block):
"""Gets all personal labels from Todoist"""
class Input(BlockSchema):
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
class Output(BlockSchema):
labels: list = SchemaField(description="List of complete label data")
label_ids: list = SchemaField(description="List of label IDs")
label_names: list = SchemaField(description="List of label names")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="776dd750-de14-11ef-b927-32d3674e8b7e",
description="Gets all personal labels from Todoist",
categories={BlockCategory.PRODUCTIVITY},
input_schema=TodoistListLabelsBlock.Input,
output_schema=TodoistListLabelsBlock.Output,
test_input={"credentials": TEST_CREDENTIALS_INPUT},
test_credentials=TEST_CREDENTIALS,
test_output=[
(
"labels",
[
{
"id": "2156154810",
"name": "Test Label",
"color": "charcoal",
"order": 1,
"is_favorite": False,
}
],
),
("label_ids", ["2156154810"]),
("label_names", ["Test Label"]),
],
test_mock={
"get_labels": lambda *args, **kwargs: [
{
"id": "2156154810",
"name": "Test Label",
"color": "charcoal",
"order": 1,
"is_favorite": False,
}
]
},
)
@staticmethod
def get_labels(credentials: TodoistCredentials):
try:
api = TodoistAPI(credentials.access_token.get_secret_value())
labels = api.get_labels()
return [label.__dict__ for label in labels]
except Exception as e:
raise e
def run(
self,
input_data: Input,
*,
credentials: TodoistCredentials,
**kwargs,
) -> BlockOutput:
try:
labels = self.get_labels(credentials)
yield "labels", labels
yield "label_ids", [label["id"] for label in labels]
yield "label_names", [label["name"] for label in labels]
except Exception as e:
yield "error", str(e)
class TodoistGetLabelBlock(Block):
"""Gets a personal label from Todoist by ID"""
class Input(BlockSchema):
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
label_id: str = SchemaField(description="ID of the label to retrieve")
class Output(BlockSchema):
id: str = SchemaField(description="ID of the label")
name: str = SchemaField(description="Name of the label")
color: str = SchemaField(description="Color of the label")
order: int = SchemaField(description="Label order")
is_favorite: bool = SchemaField(description="Favorite status")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="7f236514-de14-11ef-bd7a-32d3674e8b7e",
description="Gets a personal label from Todoist by ID",
categories={BlockCategory.PRODUCTIVITY},
input_schema=TodoistGetLabelBlock.Input,
output_schema=TodoistGetLabelBlock.Output,
test_input={
"credentials": TEST_CREDENTIALS_INPUT,
"label_id": "2156154810",
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("id", "2156154810"),
("name", "Test Label"),
("color", "charcoal"),
("order", 1),
("is_favorite", False),
],
test_mock={
"get_label": lambda *args, **kwargs: {
"id": "2156154810",
"name": "Test Label",
"color": "charcoal",
"order": 1,
"is_favorite": False,
}
},
)
@staticmethod
def get_label(credentials: TodoistCredentials, label_id: str):
try:
api = TodoistAPI(credentials.access_token.get_secret_value())
label = api.get_label(label_id=label_id)
return label.__dict__
except Exception as e:
raise e
def run(
self,
input_data: Input,
*,
credentials: TodoistCredentials,
**kwargs,
) -> BlockOutput:
try:
label_data = self.get_label(credentials, input_data.label_id)
if label_data:
yield "id", label_data["id"]
yield "name", label_data["name"]
yield "color", label_data["color"]
yield "order", label_data["order"]
yield "is_favorite", label_data["is_favorite"]
except Exception as e:
yield "error", str(e)
class TodoistUpdateLabelBlock(Block):
"""Updates a personal label in Todoist using ID"""
class Input(BlockSchema):
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
label_id: str = SchemaField(description="ID of the label to update")
name: Optional[str] = SchemaField(
description="New name of the label", default=None
)
order: Optional[int] = SchemaField(description="Label order", default=None)
color: Optional[Colors] = SchemaField(
description="The color of the label icon", default=None
)
is_favorite: bool = SchemaField(
description="Whether the label is a favorite (true/false)", default=False
)
class Output(BlockSchema):
success: bool = SchemaField(description="Whether the update was successful")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="8755614c-de14-11ef-9b56-32d3674e8b7e",
description="Updates a personal label in Todoist",
categories={BlockCategory.PRODUCTIVITY},
input_schema=TodoistUpdateLabelBlock.Input,
output_schema=TodoistUpdateLabelBlock.Output,
test_input={
"credentials": TEST_CREDENTIALS_INPUT,
"label_id": "2156154810",
"name": "Updated Label",
"color": Colors.charcoal.value,
"order": 2,
"is_favorite": True,
},
test_credentials=TEST_CREDENTIALS,
test_output=[("success", True)],
test_mock={"update_label": lambda *args, **kwargs: True},
)
@staticmethod
def update_label(credentials: TodoistCredentials, label_id: str, **kwargs):
try:
api = TodoistAPI(credentials.access_token.get_secret_value())
api.update_label(label_id=label_id, **kwargs)
return True
except Exception as e:
raise e
def run(
self,
input_data: Input,
*,
credentials: TodoistCredentials,
**kwargs,
) -> BlockOutput:
try:
label_args = {}
if input_data.name is not None:
label_args["name"] = input_data.name
if input_data.order is not None:
label_args["order"] = input_data.order
if input_data.color is not None:
label_args["color"] = input_data.color.value
if input_data.is_favorite is not None:
label_args["is_favorite"] = input_data.is_favorite
success = self.update_label(
credentials,
input_data.label_id,
**{k: v for k, v in label_args.items() if v is not None},
)
yield "success", success
except Exception as e:
yield "error", str(e)
class TodoistDeleteLabelBlock(Block):
"""Deletes a personal label in Todoist"""
class Input(BlockSchema):
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
label_id: str = SchemaField(description="ID of the label to delete")
class Output(BlockSchema):
success: bool = SchemaField(description="Whether the deletion was successful")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="901b8f86-de14-11ef-98b8-32d3674e8b7e",
description="Deletes a personal label in Todoist",
categories={BlockCategory.PRODUCTIVITY},
input_schema=TodoistDeleteLabelBlock.Input,
output_schema=TodoistDeleteLabelBlock.Output,
test_input={
"credentials": TEST_CREDENTIALS_INPUT,
"label_id": "2156154810",
},
test_credentials=TEST_CREDENTIALS,
test_output=[("success", True)],
test_mock={"delete_label": lambda *args, **kwargs: True},
)
@staticmethod
def delete_label(credentials: TodoistCredentials, label_id: str):
try:
api = TodoistAPI(credentials.access_token.get_secret_value())
success = api.delete_label(label_id=label_id)
return success
except Exception as e:
raise e
def run(
self,
input_data: Input,
*,
credentials: TodoistCredentials,
**kwargs,
) -> BlockOutput:
try:
success = self.delete_label(credentials, input_data.label_id)
yield "success", success
except Exception as e:
yield "error", str(e)
class TodoistGetSharedLabelsBlock(Block):
"""Gets all shared labels from Todoist"""
class Input(BlockSchema):
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
class Output(BlockSchema):
labels: list = SchemaField(description="List of shared label names")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="55fba510-de15-11ef-aed2-32d3674e8b7e",
description="Gets all shared labels from Todoist",
categories={BlockCategory.PRODUCTIVITY},
input_schema=TodoistGetSharedLabelsBlock.Input,
output_schema=TodoistGetSharedLabelsBlock.Output,
test_input={"credentials": TEST_CREDENTIALS_INPUT},
test_credentials=TEST_CREDENTIALS,
test_output=[("labels", ["Label1", "Label2", "Label3"])],
test_mock={
"get_shared_labels": lambda *args, **kwargs: [
"Label1",
"Label2",
"Label3",
]
},
)
@staticmethod
def get_shared_labels(credentials: TodoistCredentials):
try:
api = TodoistAPI(credentials.access_token.get_secret_value())
labels = api.get_shared_labels()
return labels
except Exception as e:
raise e
def run(
self,
input_data: Input,
*,
credentials: TodoistCredentials,
**kwargs,
) -> BlockOutput:
try:
labels = self.get_shared_labels(credentials)
yield "labels", labels
except Exception as e:
yield "error", str(e)
class TodoistRenameSharedLabelsBlock(Block):
"""Renames all instances of a shared label"""
class Input(BlockSchema):
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
name: str = SchemaField(description="The name of the existing label to rename")
new_name: str = SchemaField(description="The new name for the label")
class Output(BlockSchema):
success: bool = SchemaField(description="Whether the rename was successful")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="9d63ad9a-de14-11ef-ab3f-32d3674e8b7e",
description="Renames all instances of a shared label",
categories={BlockCategory.PRODUCTIVITY},
input_schema=TodoistRenameSharedLabelsBlock.Input,
output_schema=TodoistRenameSharedLabelsBlock.Output,
test_input={
"credentials": TEST_CREDENTIALS_INPUT,
"name": "OldLabel",
"new_name": "NewLabel",
},
test_credentials=TEST_CREDENTIALS,
test_output=[("success", True)],
test_mock={"rename_shared_labels": lambda *args, **kwargs: True},
)
@staticmethod
def rename_shared_labels(credentials: TodoistCredentials, name: str, new_name: str):
try:
api = TodoistAPI(credentials.access_token.get_secret_value())
success = api.rename_shared_label(name=name, new_name=new_name)
return success
except Exception as e:
raise e
def run(
self,
input_data: Input,
*,
credentials: TodoistCredentials,
**kwargs,
) -> BlockOutput:
try:
success = self.rename_shared_labels(
credentials, input_data.name, input_data.new_name
)
yield "success", success
except Exception as e:
yield "error", str(e)
class TodoistRemoveSharedLabelsBlock(Block):
"""Removes all instances of a shared label"""
class Input(BlockSchema):
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
name: str = SchemaField(description="The name of the label to remove")
class Output(BlockSchema):
success: bool = SchemaField(description="Whether the removal was successful")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="a6c5cbde-de14-11ef-8863-32d3674e8b7e",
description="Removes all instances of a shared label",
categories={BlockCategory.PRODUCTIVITY},
input_schema=TodoistRemoveSharedLabelsBlock.Input,
output_schema=TodoistRemoveSharedLabelsBlock.Output,
test_input={"credentials": TEST_CREDENTIALS_INPUT, "name": "LabelToRemove"},
test_credentials=TEST_CREDENTIALS,
test_output=[("success", True)],
test_mock={"remove_shared_label": lambda *args, **kwargs: True},
)
@staticmethod
def remove_shared_label(credentials: TodoistCredentials, name: str):
try:
api = TodoistAPI(credentials.access_token.get_secret_value())
success = api.remove_shared_label(name=name)
return success
except Exception as e:
raise e
def run(
self,
input_data: Input,
*,
credentials: TodoistCredentials,
**kwargs,
) -> BlockOutput:
try:
success = self.remove_shared_label(credentials, input_data.name)
yield "success", success
except Exception as e:
yield "error", str(e)

View File

@@ -1,566 +0,0 @@
from todoist_api_python.api import TodoistAPI
from typing_extensions import Optional
from backend.blocks.todoist._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
TodoistCredentials,
TodoistCredentialsField,
TodoistCredentialsInput,
)
from backend.blocks.todoist._types import Colors
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import SchemaField
class TodoistListProjectsBlock(Block):
"""Gets all projects for a Todoist user"""
class Input(BlockSchema):
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
class Output(BlockSchema):
names_list: list[str] = SchemaField(description="List of project names")
ids_list: list[str] = SchemaField(description="List of project IDs")
url_list: list[str] = SchemaField(description="List of project URLs")
complete_data: list[dict] = SchemaField(
description="Complete project data including all fields"
)
error: str = SchemaField(description="Error message if request failed")
def __init__(self):
super().__init__(
id="5f3e1d5b-6bc5-40e3-97ee-1318b3f38813",
description="Gets all projects and their details from Todoist",
categories={BlockCategory.PRODUCTIVITY},
input_schema=TodoistListProjectsBlock.Input,
output_schema=TodoistListProjectsBlock.Output,
test_input={
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("names_list", ["Inbox"]),
("ids_list", ["220474322"]),
("url_list", ["https://todoist.com/showProject?id=220474322"]),
(
"complete_data",
[
{
"id": "220474322",
"name": "Inbox",
"url": "https://todoist.com/showProject?id=220474322",
}
],
),
],
test_mock={
"get_project_lists": lambda *args, **kwargs: (
["Inbox"],
["220474322"],
["https://todoist.com/showProject?id=220474322"],
[
{
"id": "220474322",
"name": "Inbox",
"url": "https://todoist.com/showProject?id=220474322",
}
],
None,
)
},
)
@staticmethod
def get_project_lists(credentials: TodoistCredentials):
try:
api = TodoistAPI(credentials.access_token.get_secret_value())
projects = api.get_projects()
names = []
ids = []
urls = []
complete_data = []
for project in projects:
names.append(project.name)
ids.append(project.id)
urls.append(project.url)
complete_data.append(project.__dict__)
return names, ids, urls, complete_data, None
except Exception as e:
raise e
def run(
self,
input_data: Input,
*,
credentials: TodoistCredentials,
**kwargs,
) -> BlockOutput:
try:
names, ids, urls, data, error = self.get_project_lists(credentials)
if names:
yield "names_list", names
if ids:
yield "ids_list", ids
if urls:
yield "url_list", urls
if data:
yield "complete_data", data
except Exception as e:
yield "error", str(e)
class TodoistCreateProjectBlock(Block):
"""Creates a new project in Todoist"""
class Input(BlockSchema):
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
name: str = SchemaField(description="Name of the project", advanced=False)
parent_id: Optional[str] = SchemaField(
description="Parent project ID", default=None, advanced=True
)
color: Optional[Colors] = SchemaField(
description="Color of the project icon",
default=Colors.charcoal,
advanced=True,
)
is_favorite: bool = SchemaField(
description="Whether the project is a favorite",
default=False,
advanced=True,
)
view_style: Optional[str] = SchemaField(
description="Display style (list or board)", default=None, advanced=True
)
class Output(BlockSchema):
success: bool = SchemaField(description="Whether the creation was successful")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="ade60136-de14-11ef-b5e5-32d3674e8b7e",
description="Creates a new project in Todoist",
categories={BlockCategory.PRODUCTIVITY},
input_schema=TodoistCreateProjectBlock.Input,
output_schema=TodoistCreateProjectBlock.Output,
test_input={"credentials": TEST_CREDENTIALS_INPUT, "name": "Test Project"},
test_credentials=TEST_CREDENTIALS,
test_output=[("success", True)],
test_mock={"create_project": lambda *args, **kwargs: (True)},
)
@staticmethod
def create_project(
credentials: TodoistCredentials,
name: str,
parent_id: Optional[str],
color: Optional[Colors],
is_favorite: bool,
view_style: Optional[str],
):
try:
api = TodoistAPI(credentials.access_token.get_secret_value())
params = {"name": name, "is_favorite": is_favorite}
if parent_id is not None:
params["parent_id"] = parent_id
if color is not None:
params["color"] = color.value
if view_style is not None:
params["view_style"] = view_style
api.add_project(**params)
return True
except Exception as e:
raise e
def run(
self,
input_data: Input,
*,
credentials: TodoistCredentials,
**kwargs,
) -> BlockOutput:
try:
success = self.create_project(
credentials=credentials,
name=input_data.name,
parent_id=input_data.parent_id,
color=input_data.color,
is_favorite=input_data.is_favorite,
view_style=input_data.view_style,
)
yield "success", success
except Exception as e:
yield "error", str(e)
class TodoistGetProjectBlock(Block):
"""Gets details for a specific Todoist project"""
class Input(BlockSchema):
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
project_id: str = SchemaField(
description="ID of the project to get details for", advanced=False
)
class Output(BlockSchema):
project_id: str = SchemaField(description="ID of project")
project_name: str = SchemaField(description="Name of project")
project_url: str = SchemaField(description="URL of project")
complete_data: dict = SchemaField(
description="Complete project data including all fields"
)
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="b435b5ea-de14-11ef-8b51-32d3674e8b7e",
description="Gets details for a specific Todoist project",
categories={BlockCategory.PRODUCTIVITY},
input_schema=TodoistGetProjectBlock.Input,
output_schema=TodoistGetProjectBlock.Output,
test_input={
"credentials": TEST_CREDENTIALS_INPUT,
"project_id": "2203306141",
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("project_id", "2203306141"),
("project_name", "Shopping List"),
("project_url", "https://todoist.com/showProject?id=2203306141"),
(
"complete_data",
{
"id": "2203306141",
"name": "Shopping List",
"url": "https://todoist.com/showProject?id=2203306141",
},
),
],
test_mock={
"get_project": lambda *args, **kwargs: (
"2203306141",
"Shopping List",
"https://todoist.com/showProject?id=2203306141",
{
"id": "2203306141",
"name": "Shopping List",
"url": "https://todoist.com/showProject?id=2203306141",
},
)
},
)
@staticmethod
def get_project(credentials: TodoistCredentials, project_id: str):
try:
api = TodoistAPI(credentials.access_token.get_secret_value())
project = api.get_project(project_id=project_id)
return project.id, project.name, project.url, project.__dict__
except Exception as e:
raise e
def run(
self,
input_data: Input,
*,
credentials: TodoistCredentials,
**kwargs,
) -> BlockOutput:
try:
project_id, project_name, project_url, data = self.get_project(
credentials=credentials, project_id=input_data.project_id
)
if project_id:
yield "project_id", project_id
if project_name:
yield "project_name", project_name
if project_url:
yield "project_url", project_url
if data:
yield "complete_data", data
except Exception as e:
yield "error", str(e)
class TodoistUpdateProjectBlock(Block):
"""Updates an existing project in Todoist"""
class Input(BlockSchema):
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
project_id: str = SchemaField(
description="ID of project to update", advanced=False
)
name: Optional[str] = SchemaField(
description="New name for the project", default=None, advanced=False
)
color: Optional[Colors] = SchemaField(
description="New color for the project icon", default=None, advanced=True
)
is_favorite: Optional[bool] = SchemaField(
description="Whether the project should be a favorite",
default=None,
advanced=True,
)
view_style: Optional[str] = SchemaField(
description="Display style (list or board)", default=None, advanced=True
)
class Output(BlockSchema):
success: bool = SchemaField(description="Whether the update was successful")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="ba41a20a-de14-11ef-91d7-32d3674e8b7e",
description="Updates an existing project in Todoist",
categories={BlockCategory.PRODUCTIVITY},
input_schema=TodoistUpdateProjectBlock.Input,
output_schema=TodoistUpdateProjectBlock.Output,
test_input={
"credentials": TEST_CREDENTIALS_INPUT,
"project_id": "2203306141",
"name": "Things To Buy",
},
test_credentials=TEST_CREDENTIALS,
test_output=[("success", True)],
test_mock={"update_project": lambda *args, **kwargs: (True)},
)
@staticmethod
def update_project(
credentials: TodoistCredentials,
project_id: str,
name: Optional[str],
color: Optional[Colors],
is_favorite: Optional[bool],
view_style: Optional[str],
):
try:
api = TodoistAPI(credentials.access_token.get_secret_value())
params = {}
if name is not None:
params["name"] = name
if color is not None:
params["color"] = color.value
if is_favorite is not None:
params["is_favorite"] = is_favorite
if view_style is not None:
params["view_style"] = view_style
api.update_project(project_id=project_id, **params)
return True
except Exception as e:
raise e
def run(
self,
input_data: Input,
*,
credentials: TodoistCredentials,
**kwargs,
) -> BlockOutput:
try:
success = self.update_project(
credentials=credentials,
project_id=input_data.project_id,
name=input_data.name,
color=input_data.color,
is_favorite=input_data.is_favorite,
view_style=input_data.view_style,
)
yield "success", success
except Exception as e:
yield "error", str(e)
class TodoistDeleteProjectBlock(Block):
"""Deletes a project and all of its sections and tasks"""
class Input(BlockSchema):
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
project_id: str = SchemaField(
description="ID of project to delete", advanced=False
)
class Output(BlockSchema):
success: bool = SchemaField(description="Whether the deletion was successful")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="c2893acc-de14-11ef-a113-32d3674e8b7e",
description="Deletes a Todoist project and all its contents",
categories={BlockCategory.PRODUCTIVITY},
input_schema=TodoistDeleteProjectBlock.Input,
output_schema=TodoistDeleteProjectBlock.Output,
test_input={
"credentials": TEST_CREDENTIALS_INPUT,
"project_id": "2203306141",
},
test_credentials=TEST_CREDENTIALS,
test_output=[("success", True)],
test_mock={"delete_project": lambda *args, **kwargs: (True)},
)
@staticmethod
def delete_project(credentials: TodoistCredentials, project_id: str):
try:
api = TodoistAPI(credentials.access_token.get_secret_value())
success = api.delete_project(project_id=project_id)
return success
except Exception as e:
raise e
def run(
self,
input_data: Input,
*,
credentials: TodoistCredentials,
**kwargs,
) -> BlockOutput:
try:
success = self.delete_project(
credentials=credentials, project_id=input_data.project_id
)
yield "success", success
except Exception as e:
yield "error", str(e)
class TodoistListCollaboratorsBlock(Block):
"""Gets all collaborators for a Todoist project"""
class Input(BlockSchema):
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
project_id: str = SchemaField(
description="ID of the project to get collaborators for", advanced=False
)
class Output(BlockSchema):
collaborator_ids: list[str] = SchemaField(
description="List of collaborator IDs"
)
collaborator_names: list[str] = SchemaField(
description="List of collaborator names"
)
collaborator_emails: list[str] = SchemaField(
description="List of collaborator email addresses"
)
complete_data: list[dict] = SchemaField(
description="Complete collaborator data including all fields"
)
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="c99c804e-de14-11ef-9f47-32d3674e8b7e",
description="Gets all collaborators for a specific Todoist project",
categories={BlockCategory.PRODUCTIVITY},
input_schema=TodoistListCollaboratorsBlock.Input,
output_schema=TodoistListCollaboratorsBlock.Output,
test_input={
"credentials": TEST_CREDENTIALS_INPUT,
"project_id": "2203306141",
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("collaborator_ids", ["2671362", "2671366"]),
("collaborator_names", ["Alice", "Bob"]),
("collaborator_emails", ["alice@example.com", "bob@example.com"]),
(
"complete_data",
[
{
"id": "2671362",
"name": "Alice",
"email": "alice@example.com",
},
{"id": "2671366", "name": "Bob", "email": "bob@example.com"},
],
),
],
test_mock={
"get_collaborators": lambda *args, **kwargs: (
["2671362", "2671366"],
["Alice", "Bob"],
["alice@example.com", "bob@example.com"],
[
{
"id": "2671362",
"name": "Alice",
"email": "alice@example.com",
},
{"id": "2671366", "name": "Bob", "email": "bob@example.com"},
],
)
},
)
@staticmethod
def get_collaborators(credentials: TodoistCredentials, project_id: str):
try:
api = TodoistAPI(credentials.access_token.get_secret_value())
collaborators = api.get_collaborators(project_id=project_id)
ids = []
names = []
emails = []
complete_data = []
for collaborator in collaborators:
ids.append(collaborator.id)
names.append(collaborator.name)
emails.append(collaborator.email)
complete_data.append(collaborator.__dict__)
return ids, names, emails, complete_data
except Exception as e:
raise e
def run(
self,
input_data: Input,
*,
credentials: TodoistCredentials,
**kwargs,
) -> BlockOutput:
try:
ids, names, emails, data = self.get_collaborators(
credentials=credentials, project_id=input_data.project_id
)
if ids:
yield "collaborator_ids", ids
if names:
yield "collaborator_names", names
if emails:
yield "collaborator_emails", emails
if data:
yield "complete_data", data
except Exception as e:
yield "error", str(e)

View File

@@ -1,306 +0,0 @@
from todoist_api_python.api import TodoistAPI
from typing_extensions import Optional
from backend.blocks.todoist._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
TodoistCredentials,
TodoistCredentialsField,
TodoistCredentialsInput,
)
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import SchemaField
class TodoistListSectionsBlock(Block):
"""Gets all sections for a Todoist project"""
class Input(BlockSchema):
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
project_id: Optional[str] = SchemaField(
description="Optional project ID to filter sections"
)
class Output(BlockSchema):
names_list: list[str] = SchemaField(description="List of section names")
ids_list: list[str] = SchemaField(description="List of section IDs")
complete_data: list[dict] = SchemaField(
description="Complete section data including all fields"
)
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="d6a116d8-de14-11ef-a94c-32d3674e8b7e",
description="Gets all sections and their details from Todoist",
categories={BlockCategory.PRODUCTIVITY},
input_schema=TodoistListSectionsBlock.Input,
output_schema=TodoistListSectionsBlock.Output,
test_input={
"credentials": TEST_CREDENTIALS_INPUT,
"project_id": "2203306141",
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("names_list", ["Groceries"]),
("ids_list", ["7025"]),
(
"complete_data",
[
{
"id": "7025",
"project_id": "2203306141",
"order": 1,
"name": "Groceries",
}
],
),
],
test_mock={
"get_section_lists": lambda *args, **kwargs: (
["Groceries"],
["7025"],
[
{
"id": "7025",
"project_id": "2203306141",
"order": 1,
"name": "Groceries",
}
],
)
},
)
@staticmethod
def get_section_lists(
credentials: TodoistCredentials, project_id: Optional[str] = None
):
try:
api = TodoistAPI(credentials.access_token.get_secret_value())
sections = api.get_sections(project_id=project_id)
names = []
ids = []
complete_data = []
for section in sections:
names.append(section.name)
ids.append(section.id)
complete_data.append(section.__dict__)
return names, ids, complete_data
except Exception as e:
raise e
def run(
self,
input_data: Input,
*,
credentials: TodoistCredentials,
**kwargs,
) -> BlockOutput:
try:
names, ids, data = self.get_section_lists(
credentials, input_data.project_id
)
if names:
yield "names_list", names
if ids:
yield "ids_list", ids
if data:
yield "complete_data", data
except Exception as e:
yield "error", str(e)
# Error in official todoist SDK. Will add this block using sync_api
# class TodoistCreateSectionBlock(Block):
# """Creates a new section in a Todoist project"""
# class Input(BlockSchema):
# credentials: TodoistCredentialsInput = TodoistCredentialsField([])
# name: str = SchemaField(description="Section name")
# project_id: str = SchemaField(description="Project ID this section should belong to")
# order: Optional[int] = SchemaField(description="Optional order among other sections", default=None)
# class Output(BlockSchema):
# success: bool = SchemaField(description="Whether section was successfully created")
# error: str = SchemaField(description="Error message if the request failed")
# def __init__(self):
# super().__init__(
# id="e3025cfc-de14-11ef-b9f2-32d3674e8b7e",
# description="Creates a new section in a Todoist project",
# categories={BlockCategory.PRODUCTIVITY},
# input_schema=TodoistCreateSectionBlock.Input,
# output_schema=TodoistCreateSectionBlock.Output,
# test_input={
# "credentials": TEST_CREDENTIALS_INPUT,
# "name": "Groceries",
# "project_id": "2203306141"
# },
# test_credentials=TEST_CREDENTIALS,
# test_output=[
# ("success", True)
# ],
# test_mock={
# "create_section": lambda *args, **kwargs: (
# {"id": "7025", "project_id": "2203306141", "order": 1, "name": "Groceries"},
# )
# },
# )
# @staticmethod
# def create_section(credentials: TodoistCredentials, name: str, project_id: str, order: Optional[int] = None):
# try:
# api = TodoistAPI(credentials.access_token.get_secret_value())
# section = api.add_section(name=name, project_id=project_id, order=order)
# return section.__dict__
# except Exception as e:
# raise e
# def run(
# self,
# input_data: Input,
# *,
# credentials: TodoistCredentials,
# **kwargs,
# ) -> BlockOutput:
# try:
# section_data = self.create_section(
# credentials,
# input_data.name,
# input_data.project_id,
# input_data.order
# )
# if section_data:
# yield "success", True
# except Exception as e:
# yield "error", str(e)
class TodoistGetSectionBlock(Block):
"""Gets a single section from Todoist by ID"""
class Input(BlockSchema):
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
section_id: str = SchemaField(description="ID of section to fetch")
class Output(BlockSchema):
id: str = SchemaField(description="ID of section")
project_id: str = SchemaField(description="Project ID the section belongs to")
order: int = SchemaField(description="Order of the section")
name: str = SchemaField(description="Name of the section")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="ea5580e2-de14-11ef-a5d3-32d3674e8b7e",
description="Gets a single section by ID from Todoist",
categories={BlockCategory.PRODUCTIVITY},
input_schema=TodoistGetSectionBlock.Input,
output_schema=TodoistGetSectionBlock.Output,
test_input={"credentials": TEST_CREDENTIALS_INPUT, "section_id": "7025"},
test_credentials=TEST_CREDENTIALS,
test_output=[
("id", "7025"),
("project_id", "2203306141"),
("order", 1),
("name", "Groceries"),
],
test_mock={
"get_section": lambda *args, **kwargs: {
"id": "7025",
"project_id": "2203306141",
"order": 1,
"name": "Groceries",
}
},
)
@staticmethod
def get_section(credentials: TodoistCredentials, section_id: str):
try:
api = TodoistAPI(credentials.access_token.get_secret_value())
section = api.get_section(section_id=section_id)
return section.__dict__
except Exception as e:
raise e
def run(
self,
input_data: Input,
*,
credentials: TodoistCredentials,
**kwargs,
) -> BlockOutput:
try:
section_data = self.get_section(credentials, input_data.section_id)
if section_data:
yield "id", section_data["id"]
yield "project_id", section_data["project_id"]
yield "order", section_data["order"]
yield "name", section_data["name"]
except Exception as e:
yield "error", str(e)
class TodoistDeleteSectionBlock(Block):
"""Deletes a section and all its tasks from Todoist"""
class Input(BlockSchema):
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
section_id: str = SchemaField(description="ID of section to delete")
class Output(BlockSchema):
success: bool = SchemaField(
description="Whether section was successfully deleted"
)
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="f0e52eee-de14-11ef-9b12-32d3674e8b7e",
description="Deletes a section and all its tasks from Todoist",
categories={BlockCategory.PRODUCTIVITY},
input_schema=TodoistDeleteSectionBlock.Input,
output_schema=TodoistDeleteSectionBlock.Output,
test_input={"credentials": TEST_CREDENTIALS_INPUT, "section_id": "7025"},
test_credentials=TEST_CREDENTIALS,
test_output=[("success", True)],
test_mock={"delete_section": lambda *args, **kwargs: (True)},
)
@staticmethod
def delete_section(credentials: TodoistCredentials, section_id: str):
try:
api = TodoistAPI(credentials.access_token.get_secret_value())
success = api.delete_section(section_id=section_id)
return success
except Exception as e:
raise e
def run(
self,
input_data: Input,
*,
credentials: TodoistCredentials,
**kwargs,
) -> BlockOutput:
try:
success = self.delete_section(credentials, input_data.section_id)
yield "success", success
except Exception as e:
yield "error", str(e)

View File

@@ -1,660 +0,0 @@
from datetime import datetime
from todoist_api_python.api import TodoistAPI
from todoist_api_python.models import Task
from typing_extensions import Optional
from backend.blocks.todoist._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
TodoistCredentials,
TodoistCredentialsField,
TodoistCredentialsInput,
)
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import SchemaField
class TodoistCreateTaskBlock(Block):
"""Creates a new task in a Todoist project"""
class Input(BlockSchema):
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
content: str = SchemaField(description="Task content", advanced=False)
description: Optional[str] = SchemaField(
description="Task description", default=None, advanced=False
)
project_id: Optional[str] = SchemaField(
description="Project ID this task should belong to",
default=None,
advanced=False,
)
section_id: Optional[str] = SchemaField(
description="Section ID this task should belong to",
default=None,
advanced=False,
)
parent_id: Optional[str] = SchemaField(
description="Parent task ID", default=None, advanced=True
)
order: Optional[int] = SchemaField(
description="Optional order among other tasks,[Non-zero integer value used by clients to sort tasks under the same parent]",
default=None,
advanced=True,
)
labels: Optional[list[str]] = SchemaField(
description="Task labels", default=None, advanced=True
)
priority: Optional[int] = SchemaField(
description="Task priority from 1 (normal) to 4 (urgent)",
default=None,
advanced=True,
)
due_date: Optional[datetime] = SchemaField(
description="Due date in YYYY-MM-DD format", advanced=True, default=None
)
deadline_date: Optional[datetime] = SchemaField(
description="Specific date in YYYY-MM-DD format relative to user's timezone",
default=None,
advanced=True,
)
assignee_id: Optional[str] = SchemaField(
description="Responsible user ID", default=None, advanced=True
)
duration_unit: Optional[str] = SchemaField(
description="Task duration unit (minute/day)", default=None, advanced=True
)
duration: Optional[int] = SchemaField(
description="Task duration amount, You need to selecct the duration unit first",
depends_on=["duration_unit"],
default=None,
advanced=True,
)
class Output(BlockSchema):
id: str = SchemaField(description="Task ID")
url: str = SchemaField(description="Task URL")
complete_data: dict = SchemaField(
description="Complete task data as dictionary"
)
error: str = SchemaField(description="Error message if request failed")
def __init__(self):
super().__init__(
id="fde4f458-de14-11ef-bf0c-32d3674e8b7e",
description="Creates a new task in a Todoist project",
categories={BlockCategory.PRODUCTIVITY},
input_schema=TodoistCreateTaskBlock.Input,
output_schema=TodoistCreateTaskBlock.Output,
test_input={
"credentials": TEST_CREDENTIALS_INPUT,
"content": "Buy groceries",
"project_id": "2203306141",
"priority": 4,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("id", "2995104339"),
("url", "https://todoist.com/showTask?id=2995104339"),
(
"complete_data",
{
"id": "2995104339",
"project_id": "2203306141",
"url": "https://todoist.com/showTask?id=2995104339",
},
),
],
test_mock={
"create_task": lambda *args, **kwargs: (
"2995104339",
"https://todoist.com/showTask?id=2995104339",
{
"id": "2995104339",
"project_id": "2203306141",
"url": "https://todoist.com/showTask?id=2995104339",
},
)
},
)
@staticmethod
def create_task(credentials: TodoistCredentials, content: str, **kwargs):
try:
api = TodoistAPI(credentials.access_token.get_secret_value())
task = api.add_task(content=content, **kwargs)
task_dict = Task.to_dict(task)
return task.id, task.url, task_dict
except Exception as e:
raise e
def run(
self,
input_data: Input,
*,
credentials: TodoistCredentials,
**kwargs,
) -> BlockOutput:
try:
due_date = (
input_data.due_date.strftime("%Y-%m-%d")
if input_data.due_date
else None
)
deadline_date = (
input_data.deadline_date.strftime("%Y-%m-%d")
if input_data.deadline_date
else None
)
task_args = {
"description": input_data.description,
"project_id": input_data.project_id,
"section_id": input_data.section_id,
"parent_id": input_data.parent_id,
"order": input_data.order,
"labels": input_data.labels,
"priority": input_data.priority,
"due_date": due_date,
"deadline_date": deadline_date,
"assignee_id": input_data.assignee_id,
"duration": input_data.duration,
"duration_unit": input_data.duration_unit,
}
id, url, complete_data = self.create_task(
credentials,
input_data.content,
**{k: v for k, v in task_args.items() if v is not None},
)
yield "id", id
yield "url", url
yield "complete_data", complete_data
except Exception as e:
yield "error", str(e)
class TodoistGetTasksBlock(Block):
"""Get active tasks from Todoist"""
class Input(BlockSchema):
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
project_id: Optional[str] = SchemaField(
description="Filter tasks by project ID", default=None, advanced=False
)
section_id: Optional[str] = SchemaField(
description="Filter tasks by section ID", default=None, advanced=True
)
label: Optional[str] = SchemaField(
description="Filter tasks by label name", default=None, advanced=True
)
filter: Optional[str] = SchemaField(
description="Filter by any supported filter, You can see How to use filters or create one of your one here - https://todoist.com/help/articles/introduction-to-filters-V98wIH",
default=None,
advanced=True,
)
lang: Optional[str] = SchemaField(
description="IETF language tag for filter language", default=None
)
ids: Optional[list[str]] = SchemaField(
description="List of task IDs to retrieve", default=None, advanced=False
)
class Output(BlockSchema):
ids: list[str] = SchemaField(description="Task IDs")
urls: list[str] = SchemaField(description="Task URLs")
complete_data: list[dict] = SchemaField(
description="Complete task data as dictionary"
)
error: str = SchemaField(description="Error message if request failed")
def __init__(self):
super().__init__(
id="0b706e86-de15-11ef-a113-32d3674e8b7e",
description="Get active tasks from Todoist",
categories={BlockCategory.PRODUCTIVITY},
input_schema=TodoistGetTasksBlock.Input,
output_schema=TodoistGetTasksBlock.Output,
test_input={
"credentials": TEST_CREDENTIALS_INPUT,
"project_id": "2203306141",
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("ids", ["2995104339"]),
("urls", ["https://todoist.com/showTask?id=2995104339"]),
(
"complete_data",
[
{
"id": "2995104339",
"project_id": "2203306141",
"url": "https://todoist.com/showTask?id=2995104339",
"is_completed": False,
}
],
),
],
test_mock={
"get_tasks": lambda *args, **kwargs: [
{
"id": "2995104339",
"project_id": "2203306141",
"url": "https://todoist.com/showTask?id=2995104339",
"is_completed": False,
}
]
},
)
@staticmethod
def get_tasks(credentials: TodoistCredentials, **kwargs):
try:
api = TodoistAPI(credentials.access_token.get_secret_value())
tasks = api.get_tasks(**kwargs)
return [Task.to_dict(task) for task in tasks]
except Exception as e:
raise e
def run(
self,
input_data: Input,
*,
credentials: TodoistCredentials,
**kwargs,
) -> BlockOutput:
try:
task_filters = {
"project_id": input_data.project_id,
"section_id": input_data.section_id,
"label": input_data.label,
"filter": input_data.filter,
"lang": input_data.lang,
"ids": input_data.ids,
}
tasks = self.get_tasks(
credentials, **{k: v for k, v in task_filters.items() if v is not None}
)
yield "ids", [task["id"] for task in tasks]
yield "urls", [task["url"] for task in tasks]
yield "complete_data", tasks
except Exception as e:
yield "error", str(e)
class TodoistGetTaskBlock(Block):
"""Get an active task from Todoist"""
class Input(BlockSchema):
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
task_id: str = SchemaField(description="Task ID to retrieve")
class Output(BlockSchema):
project_id: str = SchemaField(description="Project ID containing the task")
url: str = SchemaField(description="Task URL")
complete_data: dict = SchemaField(
description="Complete task data as dictionary"
)
error: str = SchemaField(description="Error message if request failed")
def __init__(self):
super().__init__(
id="16d7dc8c-de15-11ef-8ace-32d3674e8b7e",
description="Get an active task from Todoist",
categories={BlockCategory.PRODUCTIVITY},
input_schema=TodoistGetTaskBlock.Input,
output_schema=TodoistGetTaskBlock.Output,
test_input={"credentials": TEST_CREDENTIALS_INPUT, "task_id": "2995104339"},
test_credentials=TEST_CREDENTIALS,
test_output=[
("project_id", "2203306141"),
("url", "https://todoist.com/showTask?id=2995104339"),
(
"complete_data",
{
"id": "2995104339",
"project_id": "2203306141",
"url": "https://todoist.com/showTask?id=2995104339",
},
),
],
test_mock={
"get_task": lambda *args, **kwargs: {
"project_id": "2203306141",
"id": "2995104339",
"url": "https://todoist.com/showTask?id=2995104339",
}
},
)
@staticmethod
def get_task(credentials: TodoistCredentials, task_id: str):
try:
api = TodoistAPI(credentials.access_token.get_secret_value())
task = api.get_task(task_id=task_id)
return Task.to_dict(task)
except Exception as e:
raise e
def run(
self,
input_data: Input,
*,
credentials: TodoistCredentials,
**kwargs,
) -> BlockOutput:
try:
task_data = self.get_task(credentials, input_data.task_id)
if task_data:
yield "project_id", task_data["project_id"]
yield "url", task_data["url"]
yield "complete_data", task_data
except Exception as e:
yield "error", str(e)
class TodoistUpdateTaskBlock(Block):
"""Updates an existing task in Todoist"""
class Input(BlockSchema):
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
task_id: str = SchemaField(description="Task ID to update")
content: str = SchemaField(description="Task content", advanced=False)
description: Optional[str] = SchemaField(
description="Task description", default=None, advanced=False
)
project_id: Optional[str] = SchemaField(
description="Project ID this task should belong to",
default=None,
advanced=False,
)
section_id: Optional[str] = SchemaField(
description="Section ID this task should belong to",
default=None,
advanced=False,
)
parent_id: Optional[str] = SchemaField(
description="Parent task ID", default=None, advanced=True
)
order: Optional[int] = SchemaField(
description="Optional order among other tasks,[Non-zero integer value used by clients to sort tasks under the same parent]",
default=None,
advanced=True,
)
labels: Optional[list[str]] = SchemaField(
description="Task labels", default=None, advanced=True
)
priority: Optional[int] = SchemaField(
description="Task priority from 1 (normal) to 4 (urgent)",
default=None,
advanced=True,
)
due_date: Optional[datetime] = SchemaField(
description="Due date in YYYY-MM-DD format", advanced=True, default=None
)
deadline_date: Optional[datetime] = SchemaField(
description="Specific date in YYYY-MM-DD format relative to user's timezone",
default=None,
advanced=True,
)
assignee_id: Optional[str] = SchemaField(
description="Responsible user ID", default=None, advanced=True
)
duration_unit: Optional[str] = SchemaField(
description="Task duration unit (minute/day)", default=None, advanced=True
)
duration: Optional[int] = SchemaField(
description="Task duration amount, You need to selecct the duration unit first",
depends_on=["duration_unit"],
default=None,
advanced=True,
)
class Output(BlockSchema):
success: bool = SchemaField(description="Whether the update was successful")
error: str = SchemaField(description="Error message if request failed")
def __init__(self):
super().__init__(
id="1eee6d32-de15-11ef-a2ff-32d3674e8b7e",
description="Updates an existing task in Todoist",
categories={BlockCategory.PRODUCTIVITY},
input_schema=TodoistUpdateTaskBlock.Input,
output_schema=TodoistUpdateTaskBlock.Output,
test_input={
"credentials": TEST_CREDENTIALS_INPUT,
"task_id": "2995104339",
"content": "Buy Coffee",
},
test_credentials=TEST_CREDENTIALS,
test_output=[("success", True)],
test_mock={"update_task": lambda *args, **kwargs: True},
)
@staticmethod
def update_task(credentials: TodoistCredentials, task_id: str, **kwargs):
try:
api = TodoistAPI(credentials.access_token.get_secret_value())
is_success = api.update_task(task_id=task_id, **kwargs)
return is_success
except Exception as e:
raise e
def run(
self,
input_data: Input,
*,
credentials: TodoistCredentials,
**kwargs,
) -> BlockOutput:
try:
due_date = (
input_data.due_date.strftime("%Y-%m-%d")
if input_data.due_date
else None
)
deadline_date = (
input_data.deadline_date.strftime("%Y-%m-%d")
if input_data.deadline_date
else None
)
task_updates = {}
if input_data.content is not None:
task_updates["content"] = input_data.content
if input_data.description is not None:
task_updates["description"] = input_data.description
if input_data.project_id is not None:
task_updates["project_id"] = input_data.project_id
if input_data.section_id is not None:
task_updates["section_id"] = input_data.section_id
if input_data.parent_id is not None:
task_updates["parent_id"] = input_data.parent_id
if input_data.order is not None:
task_updates["order"] = input_data.order
if input_data.labels is not None:
task_updates["labels"] = input_data.labels
if input_data.priority is not None:
task_updates["priority"] = input_data.priority
if due_date is not None:
task_updates["due_date"] = due_date
if deadline_date is not None:
task_updates["deadline_date"] = deadline_date
if input_data.assignee_id is not None:
task_updates["assignee_id"] = input_data.assignee_id
if input_data.duration is not None:
task_updates["duration"] = input_data.duration
if input_data.duration_unit is not None:
task_updates["duration_unit"] = input_data.duration_unit
self.update_task(
credentials,
input_data.task_id,
**{k: v for k, v in task_updates.items() if v is not None},
)
yield "success", True
except Exception as e:
yield "error", str(e)
class TodoistCloseTaskBlock(Block):
"""Closes a task in Todoist"""
class Input(BlockSchema):
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
task_id: str = SchemaField(description="Task ID to close")
class Output(BlockSchema):
success: bool = SchemaField(
description="Whether the task was successfully closed"
)
error: str = SchemaField(description="Error message if request failed")
def __init__(self):
super().__init__(
id="29fac798-de15-11ef-b839-32d3674e8b7e",
description="Closes a task in Todoist",
categories={BlockCategory.PRODUCTIVITY},
input_schema=TodoistCloseTaskBlock.Input,
output_schema=TodoistCloseTaskBlock.Output,
test_input={"credentials": TEST_CREDENTIALS_INPUT, "task_id": "2995104339"},
test_credentials=TEST_CREDENTIALS,
test_output=[("success", True)],
test_mock={"close_task": lambda *args, **kwargs: True},
)
@staticmethod
def close_task(credentials: TodoistCredentials, task_id: str):
try:
api = TodoistAPI(credentials.access_token.get_secret_value())
is_success = api.close_task(task_id=task_id)
return is_success
except Exception as e:
raise e
def run(
self,
input_data: Input,
*,
credentials: TodoistCredentials,
**kwargs,
) -> BlockOutput:
try:
is_success = self.close_task(credentials, input_data.task_id)
yield "success", is_success
except Exception as e:
yield "error", str(e)
class TodoistReopenTaskBlock(Block):
"""Reopens a task in Todoist"""
class Input(BlockSchema):
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
task_id: str = SchemaField(description="Task ID to reopen")
class Output(BlockSchema):
success: bool = SchemaField(
description="Whether the task was successfully reopened"
)
error: str = SchemaField(description="Error message if request failed")
def __init__(self):
super().__init__(
id="2e6bf6f8-de15-11ef-ae7c-32d3674e8b7e",
description="Reopens a task in Todoist",
categories={BlockCategory.PRODUCTIVITY},
input_schema=TodoistReopenTaskBlock.Input,
output_schema=TodoistReopenTaskBlock.Output,
test_input={"credentials": TEST_CREDENTIALS_INPUT, "task_id": "2995104339"},
test_credentials=TEST_CREDENTIALS,
test_output=[
("success", True),
],
test_mock={"reopen_task": lambda *args, **kwargs: (True)},
)
@staticmethod
def reopen_task(credentials: TodoistCredentials, task_id: str):
try:
api = TodoistAPI(credentials.access_token.get_secret_value())
is_success = api.reopen_task(task_id=task_id)
return is_success
except Exception as e:
raise e
def run(
self,
input_data: Input,
*,
credentials: TodoistCredentials,
**kwargs,
) -> BlockOutput:
try:
is_success = self.reopen_task(credentials, input_data.task_id)
yield "success", is_success
except Exception as e:
yield "error", str(e)
class TodoistDeleteTaskBlock(Block):
"""Deletes a task in Todoist"""
class Input(BlockSchema):
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
task_id: str = SchemaField(description="Task ID to delete")
class Output(BlockSchema):
success: bool = SchemaField(
description="Whether the task was successfully deleted"
)
error: str = SchemaField(description="Error message if request failed")
def __init__(self):
super().__init__(
id="33c29ada-de15-11ef-bcbb-32d3674e8b7e",
description="Deletes a task in Todoist",
categories={BlockCategory.PRODUCTIVITY},
input_schema=TodoistDeleteTaskBlock.Input,
output_schema=TodoistDeleteTaskBlock.Output,
test_input={"credentials": TEST_CREDENTIALS_INPUT, "task_id": "2995104339"},
test_credentials=TEST_CREDENTIALS,
test_output=[
("success", True),
],
test_mock={"delete_task": lambda *args, **kwargs: (True)},
)
@staticmethod
def delete_task(credentials: TodoistCredentials, task_id: str):
try:
api = TodoistAPI(credentials.access_token.get_secret_value())
is_success = api.delete_task(task_id=task_id)
return is_success
except Exception as e:
raise e
def run(
self,
input_data: Input,
*,
credentials: TodoistCredentials,
**kwargs,
) -> BlockOutput:
try:
is_success = self.delete_task(credentials, input_data.task_id)
yield "success", is_success
except Exception as e:
yield "error", str(e)

View File

@@ -92,8 +92,7 @@ class TwitterPostTweetBlock(Block):
attachment: Union[Media, DeepLink, Poll, Place, Quote] | None = SchemaField(
discriminator="discriminator",
description="Additional tweet data (media, deep link, poll, place or quote)",
advanced=False,
default=Media(discriminator="media"),
advanced=True,
)
exclude_reply_user_ids: Optional[List[str]] = SchemaField(

View File

@@ -23,6 +23,71 @@ from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import SchemaField
class TwitterUnblockUserBlock(Block):
"""
Unblock a specific user on Twitter. The request succeeds with no action when the user sends a request to a user they're not blocking or have already unblocked.
"""
class Input(BlockSchema):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["block.write", "users.read", "offline.access"]
)
target_user_id: str = SchemaField(
description="The user ID of the user that you would like to unblock",
placeholder="Enter target user ID",
)
class Output(BlockSchema):
success: bool = SchemaField(description="Whether the unblock was successful")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="0f1b6570-a631-11ef-a3ea-230cbe9650dd",
description="This block unblocks a specific user on Twitter.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterUnblockUserBlock.Input,
output_schema=TwitterUnblockUserBlock.Output,
test_input={
"target_user_id": "12345",
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("success", True),
],
test_mock={"unblock_user": lambda *args, **kwargs: True},
)
@staticmethod
def unblock_user(credentials: TwitterCredentials, target_user_id: str):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
client.unblock(target_user_id=target_user_id, user_auth=False)
return True
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
success = self.unblock_user(credentials, input_data.target_user_id)
yield "success", success
except Exception as e:
yield "error", handle_tweepy_exception(e)
class TwitterGetBlockedUsersBlock(Block):
"""
Get a list of users who are blocked by the authenticating user
@@ -173,3 +238,68 @@ class TwitterGetBlockedUsersBlock(Block):
yield "next_token", next_token
except Exception as e:
yield "error", handle_tweepy_exception(e)
class TwitterBlockUserBlock(Block):
"""
Block a specific user on Twitter
"""
class Input(BlockSchema):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["block.write", "users.read", "offline.access"]
)
target_user_id: str = SchemaField(
description="The user ID of the user that you would like to block",
placeholder="Enter target user ID",
)
class Output(BlockSchema):
success: bool = SchemaField(description="Whether the block was successful")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="fc258b94-a630-11ef-abc3-df050b75b816",
description="This block blocks a specific user on Twitter.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterBlockUserBlock.Input,
output_schema=TwitterBlockUserBlock.Output,
test_input={
"target_user_id": "12345",
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("success", True),
],
test_mock={"block_user": lambda *args, **kwargs: True},
)
@staticmethod
def block_user(credentials: TwitterCredentials, target_user_id: str):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
client.block(target_user_id=target_user_id, user_auth=False)
return True
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
success = self.block_user(credentials, input_data.target_user_id)
yield "success", success
except Exception as e:
yield "error", handle_tweepy_exception(e)

View File

@@ -1,37 +0,0 @@
from gravitasml.parser import Parser
from gravitasml.token import tokenize
from backend.data.block import Block, BlockOutput, BlockSchema
from backend.data.model import SchemaField
class XMLParserBlock(Block):
class Input(BlockSchema):
input_xml: str = SchemaField(description="input xml to be parsed")
class Output(BlockSchema):
parsed_xml: dict = SchemaField(description="output parsed xml to dict")
error: str = SchemaField(description="Error in parsing")
def __init__(self):
super().__init__(
id="286380af-9529-4b55-8be0-1d7c854abdb5",
description="Parses XML using gravitasml to tokenize and coverts it to dict",
input_schema=XMLParserBlock.Input,
output_schema=XMLParserBlock.Output,
test_input={"input_xml": "<tag1><tag2>content</tag2></tag1>"},
test_output=[
("parsed_xml", {"tag1": {"tag2": "content"}}),
],
)
def run(self, input_data: Input, **kwargs) -> BlockOutput:
try:
tokens = tokenize(input_data.input_xml)
parser = Parser(tokens)
parsed_result = parser.parse()
yield "parsed_xml", parsed_result
except ValueError as val_e:
raise ValueError(f"Validation error for dict:{val_e}") from val_e
except SyntaxError as syn_e:
raise SyntaxError(f"Error in input xml syntax: {syn_e}") from syn_e

View File

@@ -221,8 +221,7 @@ def event():
@test.command()
@click.argument("server_address")
@click.argument("graph_id")
@click.argument("graph_version")
def websocket(server_address: str, graph_id: str, graph_version: int):
def websocket(server_address: str, graph_id: str):
"""
Tests the websocket connection.
"""
@@ -238,9 +237,7 @@ def websocket(server_address: str, graph_id: str, graph_version: int):
try:
msg = WsMessage(
method=Methods.SUBSCRIBE,
data=ExecutionSubscription(
graph_id=graph_id, graph_version=graph_version
).model_dump(),
data=ExecutionSubscription(graph_id=graph_id).model_dump(),
).model_dump_json()
await websocket.send(msg)
print(f"Sending: {msg}")

View File

@@ -64,9 +64,6 @@ class BlockCategory(Enum):
SAFETY = (
"Block that provides AI safety mechanisms such as detecting harmful content"
)
PRODUCTIVITY = "Block that helps with productivity"
ISSUE_TRACKING = "Block that helps with issue tracking"
MULTIMEDIA = "Block that interacts with multimedia content"
def dict(self) -> dict[str, str]:
return {"category": self.name, "description": self.value}
@@ -398,7 +395,6 @@ class Block(ABC, Generic[BlockSchemaInputType, BlockSchemaOutputType]):
}
def execute(self, input_data: BlockInput, **kwargs) -> BlockOutput:
# Merge the input data with the extra execution arguments, preferring the args for security
if error := self.input_schema.validate_data(input_data):
raise ValueError(
f"Unable to execute block with invalid input data: {error}"

View File

@@ -35,8 +35,6 @@ from backend.integrations.credentials_store import (
# =============== Configure the cost for each LLM Model call =============== #
MODEL_COST: dict[LlmModel, int] = {
LlmModel.O3_MINI: 2, # $1.10 / $4.40
LlmModel.O1: 16, # $15 / $60
LlmModel.O1_PREVIEW: 16,
LlmModel.O1_MINI: 4,
LlmModel.GPT4O_MINI: 1,
@@ -44,21 +42,20 @@ MODEL_COST: dict[LlmModel, int] = {
LlmModel.GPT4_TURBO: 10,
LlmModel.GPT3_5_TURBO: 1,
LlmModel.CLAUDE_3_5_SONNET: 4,
LlmModel.CLAUDE_3_5_HAIKU: 1, # $0.80 / $4.00
LlmModel.CLAUDE_3_HAIKU: 1,
LlmModel.LLAMA3_8B: 1,
LlmModel.LLAMA3_70B: 1,
LlmModel.MIXTRAL_8X7B: 1,
LlmModel.GEMMA_7B: 1,
LlmModel.GEMMA2_9B: 1,
LlmModel.LLAMA3_3_70B: 1, # $0.59 / $0.79
LlmModel.LLAMA3_1_405B: 1,
LlmModel.LLAMA3_1_70B: 1,
LlmModel.LLAMA3_1_8B: 1,
LlmModel.OLLAMA_LLAMA3_3: 1,
LlmModel.OLLAMA_LLAMA3_2: 1,
LlmModel.OLLAMA_LLAMA3_8B: 1,
LlmModel.OLLAMA_LLAMA3_405B: 1,
LlmModel.DEEPSEEK_LLAMA_70B: 1, # ? / ?
LlmModel.OLLAMA_DOLPHIN: 1,
LlmModel.GEMINI_FLASH_1_5: 1,
LlmModel.GEMINI_FLASH_1_5_8B: 1,
LlmModel.GROK_BETA: 5,
LlmModel.MISTRAL_NEMO: 1,
LlmModel.COHERE_COMMAND_R_08_2024: 1,

View File

@@ -10,6 +10,7 @@ class BlockCostType(str, Enum):
RUN = "run" # cost X credits per run
BYTE = "byte" # cost X credits per byte
SECOND = "second" # cost X credits per second
DOLLAR = "dollar" # cost X dollars per run
class BlockCost(BaseModel):

View File

@@ -1,65 +1,40 @@
import logging
from abc import ABC, abstractmethod
from collections import defaultdict
from datetime import datetime, timezone
import stripe
from prisma import Json
from prisma.enums import CreditTransactionType
from prisma.errors import UniqueViolationError
from prisma.models import CreditTransaction, User
from prisma.types import CreditTransactionCreateInput, CreditTransactionWhereInput
from pydantic import BaseModel
from prisma.models import CreditTransaction
from backend.data import db
from backend.data.block import Block, BlockInput, get_block
from backend.data.block_cost_config import BLOCK_COSTS
from backend.data.cost import BlockCost, BlockCostType
from backend.data.execution import NodeExecutionEntry
from backend.data.model import AutoTopUpConfig, TransactionHistory, UserTransaction
from backend.data.user import get_user_by_id
from backend.util.settings import Settings
from backend.util.settings import Config
settings = Settings()
stripe.api_key = settings.secrets.stripe_api_key
logger = logging.getLogger(__name__)
config = Config()
class UserCreditBase(ABC):
@abstractmethod
async def get_credits(self, user_id: str) -> int:
"""
Get the current credits for the user.
Returns:
int: The current credits for the user.
"""
pass
def __init__(self, num_user_credits_refill: int):
self.num_user_credits_refill = num_user_credits_refill
@abstractmethod
async def get_transaction_history(
self,
user_id: str,
transaction_time: datetime,
transaction_count_limit: int,
) -> TransactionHistory:
async def get_or_refill_credit(self, user_id: str) -> int:
"""
Get the credit transactions for the user.
Args:
user_id (str): The user ID.
transaction_time (datetime): The upper bound of the transaction time.
transaction_count_limit (int): The transaction count limit.
Get the current credit for the user and refill if no transaction has been made in the current cycle.
Returns:
TransactionHistory: The credit transactions for the user.
int: The current credit for the user.
"""
pass
@abstractmethod
async def spend_credits(
self,
entry: NodeExecutionEntry,
user_id: str,
user_credit: int,
block_id: str,
input_data: BlockInput,
data_size: float,
run_time: float,
) -> int:
@@ -67,7 +42,10 @@ class UserCreditBase(ABC):
Spend the credits for the user based on the block usage.
Args:
entry (NodeExecutionEntry): The node execution identifiers & data.
user_id (str): The user ID.
user_credit (int): The current credit for the user.
block_id (str): The block ID.
input_data (BlockInput): The input data for the block.
data_size (float): The size of the data being processed.
run_time (float): The time taken to run the block.
@@ -79,7 +57,7 @@ class UserCreditBase(ABC):
@abstractmethod
async def top_up_credits(self, user_id: str, amount: int):
"""
Top up the credits for the user immediately.
Top up the credits for the user.
Args:
user_id (str): The user ID.
@@ -87,181 +65,51 @@ class UserCreditBase(ABC):
"""
pass
@abstractmethod
async def top_up_intent(self, user_id: str, amount: int) -> str:
"""
Create a payment intent to top up the credits for the user.
Args:
user_id (str): The user ID.
amount (int): The amount of credits to top up.
Returns:
str: The redirect url to the payment page.
"""
pass
@abstractmethod
async def fulfill_checkout(
self, *, session_id: str | None = None, user_id: str | None = None
):
"""
Fulfill the Stripe checkout session.
Args:
session_id (str | None): The checkout session ID. Will try to fulfill most recent if None.
user_id (str | None): The user ID must be provided if session_id is None.
"""
pass
@staticmethod
def time_now() -> datetime:
return datetime.now(timezone.utc)
# ====== Transaction Helper Methods ====== #
# Any modifications to the transaction table should only be done through these methods #
async def _get_credits(self, user_id: str) -> tuple[int, datetime]:
"""
Returns the current balance of the user & the latest balance snapshot time.
"""
top_time = self.time_now()
snapshot = await CreditTransaction.prisma().find_first(
where={
"userId": user_id,
"createdAt": {"lte": top_time},
"isActive": True,
"runningBalance": {"not": None}, # type: ignore
},
order={"createdAt": "desc"},
)
datetime_min = datetime.min.replace(tzinfo=timezone.utc)
snapshot_balance = snapshot.runningBalance or 0 if snapshot else 0
snapshot_time = snapshot.createdAt if snapshot else datetime_min
# Get transactions after the snapshot, this should not exist, but just in case.
transactions = await CreditTransaction.prisma().group_by(
by=["userId"],
sum={"amount": True},
max={"createdAt": True},
where={
"userId": user_id,
"createdAt": {
"gt": snapshot_time,
"lte": top_time,
},
"isActive": True,
},
)
transaction_balance = (
int(transactions[0].get("_sum", {}).get("amount", 0) + snapshot_balance)
if transactions
else snapshot_balance
)
transaction_time = (
datetime.fromisoformat(
str(transactions[0].get("_max", {}).get("createdAt", datetime_min))
)
if transactions
else snapshot_time
)
return transaction_balance, transaction_time
async def _enable_transaction(
self, transaction_key: str, user_id: str, metadata: Json
):
transaction = await CreditTransaction.prisma().find_first_or_raise(
where={"transactionKey": transaction_key, "userId": user_id}
)
if transaction.isActive:
return
async with db.locked_transaction(f"usr_trx_{user_id}"):
user_balance, _ = await self._get_credits(user_id)
await CreditTransaction.prisma().update(
where={
"creditTransactionIdentifier": {
"transactionKey": transaction_key,
"userId": user_id,
}
},
data={
"isActive": True,
"runningBalance": user_balance + transaction.amount,
"createdAt": self.time_now(),
"metadata": metadata,
},
)
async def _add_transaction(
self,
user_id: str,
amount: int,
transaction_type: CreditTransactionType,
is_active: bool = True,
transaction_key: str | None = None,
ceiling_balance: int | None = None,
metadata: Json = Json({}),
) -> tuple[int, str]:
"""
Add a new transaction for the user.
This is the only method that should be used to add a new transaction.
Args:
user_id (str): The user ID.
amount (int): The amount of credits to add.
transaction_type (CreditTransactionType): The type of transaction.
is_active (bool): Whether the transaction is active or needs to be manually activated through _enable_transaction.
transaction_key (str | None): The transaction key. Avoids adding transaction if the key already exists.
ceiling_balance (int | None): The ceiling balance. Avoids adding more credits if the balance is already above the ceiling.
metadata (Json): The metadata of the transaction.
Returns:
tuple[int, str]: The new balance & the transaction key.
"""
async with db.locked_transaction(f"usr_trx_{user_id}"):
# Get latest balance snapshot
user_balance, _ = await self._get_credits(user_id)
if ceiling_balance and user_balance >= ceiling_balance:
raise ValueError(
f"You already have enough balance for user {user_id}, balance: {user_balance}, ceiling: {ceiling_balance}"
)
if amount < 0 and user_balance < abs(amount):
raise ValueError(
f"Insufficient balance of ${user_balance/100} to run the block that costs ${abs(amount)/100}"
)
# Create the transaction
transaction_data: CreditTransactionCreateInput = {
"userId": user_id,
"amount": amount,
"runningBalance": user_balance + amount,
"type": transaction_type,
"metadata": metadata,
"isActive": is_active,
"createdAt": self.time_now(),
}
if transaction_key:
transaction_data["transactionKey"] = transaction_key
tx = await CreditTransaction.prisma().create(data=transaction_data)
return user_balance + amount, tx.transactionKey
class UsageTransactionMetadata(BaseModel):
graph_exec_id: str | None = None
graph_id: str | None = None
node_id: str | None = None
node_exec_id: str | None = None
block_id: str | None = None
block: str | None = None
input: BlockInput | None = None
class UserCredit(UserCreditBase):
async def get_or_refill_credit(self, user_id: str) -> int:
cur_time = self.time_now()
cur_month = cur_time.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
nxt_month = (
cur_month.replace(month=cur_month.month + 1)
if cur_month.month < 12
else cur_month.replace(year=cur_month.year + 1, month=1)
)
user_credit = await CreditTransaction.prisma().group_by(
by=["userId"],
sum={"amount": True},
where={
"userId": user_id,
"createdAt": {"gte": cur_month, "lt": nxt_month},
"isActive": True,
},
)
if user_credit:
credit_sum = user_credit[0].get("_sum") or {}
return credit_sum.get("amount", 0)
key = f"MONTHLY-CREDIT-TOP-UP-{cur_month}"
try:
await CreditTransaction.prisma().create(
data={
"amount": self.num_user_credits_refill,
"type": CreditTransactionType.TOP_UP,
"userId": user_id,
"transactionKey": key,
"createdAt": self.time_now(),
}
)
except UniqueViolationError:
pass # Already refilled this month
return self.num_user_credits_refill
@staticmethod
def time_now():
return datetime.now(timezone.utc)
def _block_usage_cost(
self,
@@ -300,8 +148,8 @@ class UserCredit(UserCreditBase):
) -> bool:
"""
Filter rules:
- If cost_filter is an object, then check if cost_filter is the subset of input_data
- Otherwise, check if cost_filter is equal to input_data.
- If costFilter is an object, then check if costFilter is the subset of inputValues
- Otherwise, check if costFilter is equal to inputValues.
- Undefined, null, and empty string are considered as equal.
"""
if not isinstance(cost_filter, dict) or not isinstance(input_data, dict):
@@ -315,366 +163,72 @@ class UserCredit(UserCreditBase):
async def spend_credits(
self,
entry: NodeExecutionEntry,
user_id: str,
user_credit: int,
block_id: str,
input_data: BlockInput,
data_size: float,
run_time: float,
validate_balance: bool = True,
) -> int:
block = get_block(entry.block_id)
block = get_block(block_id)
if not block:
raise ValueError(f"Block not found: {entry.block_id}")
raise ValueError(f"Block not found: {block_id}")
cost, matching_filter = self._block_usage_cost(
block=block, input_data=entry.data, data_size=data_size, run_time=run_time
block=block, input_data=input_data, data_size=data_size, run_time=run_time
)
if cost == 0:
if cost <= 0:
return 0
balance, _ = await self._add_transaction(
user_id=entry.user_id,
amount=-cost,
transaction_type=CreditTransactionType.USAGE,
metadata=Json(
UsageTransactionMetadata(
graph_exec_id=entry.graph_exec_id,
graph_id=entry.graph_id,
node_id=entry.node_id,
node_exec_id=entry.node_exec_id,
block_id=entry.block_id,
block=block.name,
input=matching_filter,
).model_dump()
),
if validate_balance and user_credit < cost:
raise ValueError(f"Insufficient credit: {user_credit} < {cost}")
await CreditTransaction.prisma().create(
data={
"userId": user_id,
"amount": -cost,
"type": CreditTransactionType.USAGE,
"blockId": block.id,
"metadata": Json(
{
"block": block.name,
"input": matching_filter,
}
),
"createdAt": self.time_now(),
}
)
user_id = entry.user_id
# Auto top-up if balance is below threshold.
auto_top_up = await get_auto_top_up(user_id)
if auto_top_up.threshold and balance < auto_top_up.threshold:
try:
await self._top_up_credits(
user_id=user_id,
amount=auto_top_up.amount,
# Avoid multiple auto top-ups within the same graph execution.
key=f"AUTO-TOP-UP-{user_id}-{entry.graph_exec_id}",
ceiling_balance=auto_top_up.threshold,
)
except Exception as e:
# Failed top-up is not critical, we can move on.
logger.error(
f"Auto top-up failed for user {user_id}, balance: {balance}, amount: {auto_top_up.amount}, error: {e}"
)
return cost
async def top_up_credits(self, user_id: str, amount: int):
await self._top_up_credits(user_id, amount)
async def _top_up_credits(
self,
user_id: str,
amount: int,
key: str | None = None,
ceiling_balance: int | None = None,
):
if amount < 0:
raise ValueError(f"Top up amount must not be negative: {amount}")
if key is not None and (
await CreditTransaction.prisma().find_first(
where={"transactionKey": key, "userId": user_id}
)
):
raise ValueError(f"Transaction key {key} already exists for user {user_id}")
_, transaction_key = await self._add_transaction(
user_id=user_id,
amount=amount,
transaction_type=CreditTransactionType.TOP_UP,
is_active=False,
transaction_key=key,
ceiling_balance=ceiling_balance,
)
customer_id = await get_stripe_customer_id(user_id)
payment_methods = stripe.PaymentMethod.list(customer=customer_id, type="card")
if not payment_methods:
raise ValueError("No payment method found, please add it on the platform.")
for payment_method in payment_methods:
if amount == 0:
setup_intent = stripe.SetupIntent.create(
customer=customer_id,
usage="off_session",
confirm=True,
payment_method=payment_method.id,
automatic_payment_methods={
"enabled": True,
"allow_redirects": "never",
},
)
if setup_intent.status == "succeeded":
return
else:
payment_intent = stripe.PaymentIntent.create(
amount=amount,
currency="usd",
description="AutoGPT Platform Credits",
customer=customer_id,
off_session=True,
confirm=True,
payment_method=payment_method.id,
automatic_payment_methods={
"enabled": True,
"allow_redirects": "never",
},
)
if payment_intent.status == "succeeded":
await self._enable_transaction(
transaction_key=transaction_key,
user_id=user_id,
metadata=Json({"payment_intent": payment_intent}),
)
return
raise ValueError(
f"Out of {len(payment_methods)} payment methods tried, none is supported"
)
async def top_up_intent(self, user_id: str, amount: int) -> str:
if amount < 500 or amount % 100 != 0:
raise ValueError(
f"Top up amount must be at least 500 credits and multiple of 100 but is {amount}"
)
# Create checkout session
# https://docs.stripe.com/checkout/quickstart?client=react
# unit_amount param is always in the smallest currency unit (so cents for usd)
# which is equal to amount of credits
checkout_session = stripe.checkout.Session.create(
customer=await get_stripe_customer_id(user_id),
line_items=[
{
"price_data": {
"currency": "usd",
"product_data": {
"name": "AutoGPT Platform Credits",
},
"unit_amount": amount,
},
"quantity": 1,
}
],
mode="payment",
ui_mode="hosted",
payment_intent_data={"setup_future_usage": "off_session"},
saved_payment_method_options={"payment_method_save": "enabled"},
success_url=settings.config.frontend_base_url
+ "/marketplace/credits?topup=success",
cancel_url=settings.config.frontend_base_url
+ "/marketplace/credits?topup=cancel",
allow_promotion_codes=True,
)
await self._add_transaction(
user_id=user_id,
amount=amount,
transaction_type=CreditTransactionType.TOP_UP,
transaction_key=checkout_session.id,
is_active=False,
metadata=Json({"checkout_session": checkout_session}),
)
return checkout_session.url or ""
# https://docs.stripe.com/checkout/fulfillment
async def fulfill_checkout(
self, *, session_id: str | None = None, user_id: str | None = None
):
if (not session_id and not user_id) or (session_id and user_id):
raise ValueError("Either session_id or user_id must be provided")
# Retrieve CreditTransaction
find_filter: CreditTransactionWhereInput = {
"type": CreditTransactionType.TOP_UP,
"isActive": False,
}
if session_id:
find_filter["transactionKey"] = session_id
if user_id:
find_filter["userId"] = user_id
# Find the most recent inactive top-up transaction
credit_transaction = await CreditTransaction.prisma().find_first(
where=find_filter,
order={"createdAt": "desc"},
)
# This can be called multiple times for one id, so ignore if already fulfilled
if not credit_transaction:
return
# Retrieve the Checkout Session from the API
checkout_session = stripe.checkout.Session.retrieve(
credit_transaction.transactionKey
)
# Check the Checkout Session's payment_status property
# to determine if fulfillment should be performed
if checkout_session.payment_status in ["paid", "no_payment_required"]:
await self._enable_transaction(
transaction_key=credit_transaction.transactionKey,
user_id=credit_transaction.userId,
metadata=Json({"checkout_session": checkout_session}),
)
async def get_credits(self, user_id: str) -> int:
balance, _ = await self._get_credits(user_id)
return balance
async def get_transaction_history(
self,
user_id: str,
transaction_time: datetime,
transaction_count_limit: int,
) -> TransactionHistory:
transactions = await CreditTransaction.prisma().find_many(
where={
await CreditTransaction.prisma().create(
data={
"userId": user_id,
"createdAt": {"lt": transaction_time},
"isActive": True,
},
order={"createdAt": "desc"},
take=transaction_count_limit,
"amount": amount,
"type": CreditTransactionType.TOP_UP,
"createdAt": self.time_now(),
}
)
grouped_transactions: dict[str, UserTransaction] = defaultdict(
lambda: UserTransaction()
)
tx_time = None
for t in transactions:
metadata = (
UsageTransactionMetadata.model_validate(t.metadata)
if t.metadata
else UsageTransactionMetadata()
)
tx_time = t.createdAt.replace(tzinfo=None)
if t.type == CreditTransactionType.USAGE and metadata.graph_exec_id:
gt = grouped_transactions[metadata.graph_exec_id]
gid = metadata.graph_id[:8] if metadata.graph_id else "UNKNOWN"
gt.description = f"Graph #{gid} Execution"
gt.usage_node_count += 1
gt.usage_start_time = min(gt.usage_start_time, tx_time)
gt.usage_execution_id = metadata.graph_exec_id
gt.usage_graph_id = metadata.graph_id
else:
gt = grouped_transactions[t.transactionKey]
gt.description = f"{t.type} Transaction"
gt.amount += t.amount
gt.transaction_type = t.type
if tx_time > gt.transaction_time:
gt.transaction_time = tx_time
gt.balance = t.runningBalance or 0
return TransactionHistory(
transactions=list(grouped_transactions.values()),
next_transaction_time=(
tx_time if len(transactions) == transaction_count_limit else None
),
)
class BetaUserCredit(UserCredit):
"""
This is a temporary class to handle the test user utilizing monthly credit refill.
TODO: Remove this class & its feature toggle.
"""
def __init__(self, num_user_credits_refill: int):
self.num_user_credits_refill = num_user_credits_refill
async def get_credits(self, user_id: str) -> int:
cur_time = self.time_now().date()
balance, snapshot_time = await self._get_credits(user_id)
if (snapshot_time.year, snapshot_time.month) == (cur_time.year, cur_time.month):
return balance
try:
balance, _ = await self._add_transaction(
user_id=user_id,
amount=max(self.num_user_credits_refill - balance, 0),
transaction_type=CreditTransactionType.TOP_UP,
transaction_key=f"MONTHLY-CREDIT-TOP-UP-{cur_time}",
)
return balance
except UniqueViolationError:
# Already refilled this month
return (await self._get_credits(user_id))[0]
class DisabledUserCredit(UserCreditBase):
async def get_credits(self, *args, **kwargs) -> int:
async def get_or_refill_credit(self, *args, **kwargs) -> int:
return 0
async def get_transaction_history(self, *args, **kwargs) -> TransactionHistory:
return TransactionHistory(transactions=[], next_transaction_time=None)
async def spend_credits(self, *args, **kwargs) -> int:
return 0
async def top_up_credits(self, *args, **kwargs):
pass
async def top_up_intent(self, *args, **kwargs) -> str:
return ""
async def fulfill_checkout(self, *args, **kwargs):
pass
def get_user_credit_model() -> UserCreditBase:
if not settings.config.enable_credit:
return DisabledUserCredit()
if settings.config.enable_beta_monthly_credit:
return BetaUserCredit(settings.config.num_user_credits_refill)
return UserCredit()
if config.enable_credit.lower() == "true":
return UserCredit(config.num_user_credits_refill)
else:
return DisabledUserCredit(0)
def get_block_costs() -> dict[str, list[BlockCost]]:
return {block().id: costs for block, costs in BLOCK_COSTS.items()}
async def get_stripe_customer_id(user_id: str) -> str:
user = await get_user_by_id(user_id)
if user.stripeCustomerId:
return user.stripeCustomerId
customer = stripe.Customer.create(name=user.name or "", email=user.email)
await User.prisma().update(
where={"id": user_id}, data={"stripeCustomerId": customer.id}
)
return customer.id
async def set_auto_top_up(user_id: str, config: AutoTopUpConfig):
await User.prisma().update(
where={"id": user_id},
data={"topUpConfig": Json(config.model_dump())},
)
async def get_auto_top_up(user_id: str) -> AutoTopUpConfig:
user = await get_user_by_id(user_id)
if not user.topUpConfig:
return AutoTopUpConfig(threshold=0, amount=0)
return AutoTopUpConfig.model_validate(user.topUpConfig)

View File

@@ -1,6 +1,5 @@
import logging
import os
import zlib
from contextlib import asynccontextmanager
from uuid import uuid4
@@ -55,14 +54,6 @@ async def transaction():
yield tx
@asynccontextmanager
async def locked_transaction(key: str):
lock_key = zlib.crc32(key.encode("utf-8"))
async with transaction() as tx:
await tx.execute_raw(f"SELECT pg_advisory_xact_lock({lock_key})")
yield tx
class BaseDbModel(BaseModel):
id: str = Field(default_factory=lambda: str(uuid4()))

View File

@@ -1,11 +1,9 @@
from collections import defaultdict
from datetime import datetime, timezone
from multiprocessing import Manager
from typing import Any, AsyncGenerator, Generator, Generic, Optional, Type, TypeVar
from typing import Any, AsyncGenerator, Generator, Generic, TypeVar
from prisma import Json
from prisma.enums import AgentExecutionStatus
from prisma.errors import PrismaError
from prisma.models import (
AgentGraphExecution,
AgentNodeExecution,
@@ -16,8 +14,7 @@ from pydantic import BaseModel
from backend.data.block import BlockData, BlockInput, CompletedBlockOutput
from backend.data.includes import EXECUTION_RESULT_INCLUDE, GRAPH_EXECUTION_INCLUDE
from backend.data.queue import AsyncRedisEventBus, RedisEventBus
from backend.server.v2.store.exceptions import DatabaseError
from backend.util import mock, type
from backend.util import json, mock
from backend.util.settings import Config
@@ -25,7 +22,6 @@ class GraphExecutionEntry(BaseModel):
user_id: str
graph_exec_id: str
graph_id: str
graph_version: int
start_node_execs: list["NodeExecutionEntry"]
@@ -35,7 +31,6 @@ class NodeExecutionEntry(BaseModel):
graph_id: str
node_exec_id: str
node_id: str
block_id: str
data: BlockInput
@@ -103,16 +98,16 @@ class ExecutionResult(BaseModel):
def from_db(execution: AgentNodeExecution):
if execution.executionData:
# Execution that has been queued for execution will persist its data.
input_data = type.convert(execution.executionData, dict[str, Any])
input_data = json.loads(execution.executionData, target_type=dict[str, Any])
else:
# For incomplete execution, executionData will not be yet available.
input_data: BlockInput = defaultdict()
for data in execution.Input or []:
input_data[data.name] = type.convert(data.data, Type[Any])
input_data[data.name] = json.loads(data.data)
output_data: CompletedBlockOutput = defaultdict(list)
for data in execution.Output or []:
output_data[data.name].append(type.convert(data.data, Type[Any]))
output_data[data.name].append(json.loads(data.data))
graph_execution: AgentGraphExecution | None = execution.AgentGraphExecution
@@ -141,7 +136,6 @@ async def create_graph_execution(
graph_version: int,
nodes_input: list[tuple[str, BlockInput]],
user_id: str,
preset_id: str | None = None,
) -> tuple[str, list[ExecutionResult]]:
"""
Create a new AgentGraphExecution record.
@@ -160,7 +154,7 @@ async def create_graph_execution(
"executionStatus": ExecutionStatus.INCOMPLETE,
"Input": {
"create": [
{"name": name, "data": Json(data)}
{"name": name, "data": json.dumps(data)}
for name, data in node_input.items()
]
},
@@ -169,7 +163,6 @@ async def create_graph_execution(
]
},
"userId": user_id,
"agentPresetId": preset_id,
},
include=GRAPH_EXECUTION_INCLUDE,
)
@@ -213,7 +206,7 @@ async def upsert_execution_input(
order={"addedTime": "asc"},
include={"Input": True},
)
json_input_data = Json(input_data)
json_input_data = json.dumps(input_data)
if existing_execution:
await AgentNodeExecutionInputOutput.prisma().create(
@@ -225,7 +218,7 @@ async def upsert_execution_input(
)
return existing_execution.id, {
**{
input_data.name: type.convert(input_data.data, Type[Any])
input_data.name: json.loads(input_data.data)
for input_data in existing_execution.Input or []
},
input_name: input_data,
@@ -259,7 +252,7 @@ async def upsert_execution_output(
await AgentNodeExecutionInputOutput.prisma().create(
data={
"name": output_name,
"data": Json(output_data),
"data": json.dumps(output_data),
"referencedByOutputExecId": node_exec_id,
}
)
@@ -284,7 +277,7 @@ async def update_graph_execution_stats(
where={"id": graph_exec_id},
data={
"executionStatus": status,
"stats": Json(stats),
"stats": json.dumps(stats),
},
)
if not res:
@@ -296,7 +289,7 @@ async def update_graph_execution_stats(
async def update_node_execution_stats(node_exec_id: str, stats: dict[str, Any]):
await AgentNodeExecution.prisma().update(
where={"id": node_exec_id},
data={"stats": Json(stats)},
data={"stats": json.dumps(stats)},
)
@@ -316,8 +309,8 @@ async def update_execution_status(
**({"startedTime": now} if status == ExecutionStatus.RUNNING else {}),
**({"endedTime": now} if status == ExecutionStatus.FAILED else {}),
**({"endedTime": now} if status == ExecutionStatus.COMPLETED else {}),
**({"executionData": Json(execution_data)} if execution_data else {}),
**({"stats": Json(stats)} if stats else {}),
**({"executionData": json.dumps(execution_data)} if execution_data else {}),
**({"stats": json.dumps(stats)} if stats else {}),
}
res = await AgentNodeExecution.prisma().update(
@@ -331,30 +324,6 @@ async def update_execution_status(
return ExecutionResult.from_db(res)
async def get_execution(
execution_id: str, user_id: str
) -> Optional[AgentNodeExecution]:
"""
Get an execution by ID. Returns None if not found.
Args:
execution_id: The ID of the execution to retrieve
Returns:
The execution if found, None otherwise
"""
try:
execution = await AgentNodeExecution.prisma().find_unique(
where={
"id": execution_id,
"userId": user_id,
}
)
return execution
except PrismaError:
return None
async def get_execution_results(graph_exec_id: str) -> list[ExecutionResult]:
executions = await AgentNodeExecution.prisma().find_many(
where={"agentGraphExecutionId": graph_exec_id},
@@ -368,31 +337,6 @@ async def get_execution_results(graph_exec_id: str) -> list[ExecutionResult]:
return res
async def get_executions_in_timerange(
user_id: str, start_time: str, end_time: str
) -> list[ExecutionResult]:
try:
executions = await AgentGraphExecution.prisma().find_many(
where={
"AND": [
{
"startedAt": {
"gte": datetime.fromisoformat(start_time),
"lte": datetime.fromisoformat(end_time),
}
},
{"userId": user_id},
]
},
include=GRAPH_EXECUTION_INCLUDE,
)
return [ExecutionResult.from_graph(execution) for execution in executions]
except Exception as e:
raise DatabaseError(
f"Failed to get executions in timerange {start_time} to {end_time} for user {user_id}: {e}"
) from e
LIST_SPLIT = "_$_"
DICT_SPLIT = "_#_"
OBJC_SPLIT = "_@_"
@@ -476,7 +420,8 @@ async def get_latest_execution(node_id: str, graph_eid: str) -> ExecutionResult
where={
"agentNodeId": node_id,
"agentGraphExecutionId": graph_eid,
"executionStatus": {"not": ExecutionStatus.INCOMPLETE}, # type: ignore
"executionStatus": {"not": ExecutionStatus.INCOMPLETE},
"executionData": {"not": None}, # type: ignore
},
order={"queuedTime": "desc"},
include=EXECUTION_RESULT_INCLUDE,

View File

@@ -6,20 +6,13 @@ from datetime import datetime, timezone
from typing import Any, Literal, Optional, Type
import prisma
from prisma import Json
from prisma.models import (
AgentGraph,
AgentGraphExecution,
AgentNode,
AgentNodeLink,
StoreListingVersion,
)
from prisma.models import AgentGraph, AgentGraphExecution, AgentNode, AgentNodeLink
from prisma.types import AgentGraphWhereInput
from pydantic.fields import computed_field
from backend.blocks.agent import AgentExecutorBlock
from backend.blocks.basic import AgentInputBlock, AgentOutputBlock
from backend.util import type
from backend.util import json
from .block import BlockInput, BlockType, get_block, get_blocks
from .db import BaseDbModel, transaction
@@ -75,8 +68,8 @@ class NodeModel(Node):
obj = NodeModel(
id=node.id,
block_id=node.AgentBlock.id,
input_default=type.convert(node.constantInput, dict[str, Any]),
metadata=type.convert(node.metadata, dict[str, Any]),
input_default=json.loads(node.constantInput, target_type=dict[str, Any]),
metadata=json.loads(node.metadata, target_type=dict[str, Any]),
graph_id=node.agentGraphId,
graph_version=node.agentGraphVersion,
webhook_id=node.webhookId,
@@ -126,7 +119,7 @@ class GraphExecution(BaseDbModel):
total_run_time = duration
try:
stats = type.convert(execution.stats or {}, dict[str, Any])
stats = json.loads(execution.stats or "{}", target_type=dict[str, Any])
except ValueError:
stats = {}
@@ -403,9 +396,11 @@ class GraphModel(Graph):
if for_export:
# Remove credentials from node input
if node.constantInput:
constant_input = type.convert(node.constantInput, dict[str, Any])
constant_input = json.loads(
node.constantInput, target_type=dict[str, Any]
)
constant_input = GraphModel._hide_node_input_credentials(constant_input)
node.constantInput = Json(constant_input)
node.constantInput = json.dumps(constant_input)
# Remove webhook info
node.webhookId = None
@@ -534,7 +529,7 @@ async def get_execution(user_id: str, execution_id: str) -> GraphExecution | Non
async def get_graph(
graph_id: str,
version: int | None = None,
template: bool = False, # note: currently not in use; TODO: remove from DB entirely
template: bool = False,
user_id: str | None = None,
for_export: bool = False,
) -> GraphModel | None:
@@ -548,35 +543,21 @@ async def get_graph(
where_clause: AgentGraphWhereInput = {
"id": graph_id,
}
if version is not None:
where_clause["version"] = version
elif not template:
where_clause["isActive"] = True
# TODO: Fix hack workaround to get adding store agents to work
if user_id is not None and not template:
where_clause["userId"] = user_id
graph = await AgentGraph.prisma().find_first(
where=where_clause,
include=AGENT_GRAPH_INCLUDE,
order={"version": "desc"},
)
# For access, the graph must be owned by the user or listed in the store
if graph is None or (
graph.userId != user_id
and not (
await StoreListingVersion.prisma().find_first(
where={
"agentId": graph_id,
"agentVersion": version or graph.version,
"isDeleted": False,
"StoreListing": {"is": {"isApproved": True}},
}
)
)
):
return None
return GraphModel.from_db(graph, for_export)
return GraphModel.from_db(graph, for_export) if graph else None
async def set_graph_active_version(graph_id: str, version: int, user_id: str) -> None:
@@ -653,8 +634,8 @@ async def __create_graph(tx, graph: Graph, user_id: str):
{
"id": node.id,
"agentBlockId": node.block_id,
"constantInput": Json(node.input_default),
"metadata": Json(node.metadata),
"constantInput": json.dumps(node.input_default),
"metadata": json.dumps(node.metadata),
}
for node in graph.nodes
]
@@ -741,7 +722,7 @@ async def fix_llm_provider_credentials():
raise RuntimeError(f"Impossible state while processing node {node}")
node_id: str = node["node_id"]
node_preset_input: dict = node["node_preset_input"]
node_preset_input: dict = json.loads(node["node_preset_input"])
credentials_meta: dict = node_preset_input["credentials"]
credentials = next(
@@ -777,5 +758,5 @@ async def fix_llm_provider_credentials():
store.update_creds(user_id, credentials)
await AgentNode.prisma().update(
where={"id": node_id},
data={"constantInput": Json(node_preset_input)},
data={"constantInput": json.dumps(node_preset_input)},
)

View File

@@ -1,8 +1,6 @@
from __future__ import annotations
import base64
import logging
from datetime import datetime
from typing import (
TYPE_CHECKING,
Annotated,
@@ -18,7 +16,6 @@ from typing import (
)
from uuid import uuid4
from prisma.enums import CreditTransactionType
from pydantic import (
BaseModel,
ConfigDict,
@@ -202,42 +199,27 @@ class OAuth2Credentials(_BaseCredentials):
scopes: list[str]
metadata: dict[str, Any] = Field(default_factory=dict)
def auth_header(self) -> str:
def bearer(self) -> str:
return f"Bearer {self.access_token.get_secret_value()}"
class APIKeyCredentials(_BaseCredentials):
type: Literal["api_key"] = "api_key"
api_key: SecretStr
expires_at: Optional[int] = Field(
default=None,
description="Unix timestamp (seconds) indicating when the API key expires (if at all)",
)
expires_at: Optional[int]
"""Unix timestamp (seconds) indicating when the API key expires (if at all)"""
def auth_header(self) -> str:
def bearer(self) -> str:
return f"Bearer {self.api_key.get_secret_value()}"
class UserPasswordCredentials(_BaseCredentials):
type: Literal["user_password"] = "user_password"
username: SecretStr
password: SecretStr
def auth_header(self) -> str:
# Converting the string to bytes using encode()
# Base64 encoding it with base64.b64encode()
# Converting the resulting bytes back to a string with decode()
return f"Basic {base64.b64encode(f'{self.username.get_secret_value()}:{self.password.get_secret_value()}'.encode()).decode()}"
Credentials = Annotated[
OAuth2Credentials | APIKeyCredentials | UserPasswordCredentials,
OAuth2Credentials | APIKeyCredentials,
Field(discriminator="type"),
]
CredentialsType = Literal["api_key", "oauth2", "user_password"]
CredentialsType = Literal["api_key", "oauth2"]
class OAuthState(BaseModel):
@@ -365,27 +347,3 @@ def CredentialsField(
class ContributorDetails(BaseModel):
name: str = Field(title="Name", description="The name of the contributor.")
class AutoTopUpConfig(BaseModel):
amount: int
"""Amount of credits to top up."""
threshold: int
"""Threshold to trigger auto top up."""
class UserTransaction(BaseModel):
transaction_time: datetime = datetime.min
transaction_type: CreditTransactionType = CreditTransactionType.USAGE
amount: int = 0
balance: int = 0
description: str | None = None
usage_graph_id: str | None = None
usage_execution_id: str | None = None
usage_node_count: int = 0
usage_start_time: datetime = datetime.max
class TransactionHistory(BaseModel):
transactions: list[UserTransaction]
next_transaction_time: datetime | None

View File

@@ -1,360 +0,0 @@
import logging
from datetime import datetime, timedelta
from enum import Enum
from typing import Annotated, Generic, Optional, TypeVar, Union
from prisma import Json
from prisma.enums import NotificationType
from prisma.models import NotificationEvent, UserNotificationBatch
from prisma.types import UserNotificationBatchWhereInput
# from backend.notifications.models import NotificationEvent
from pydantic import BaseModel, EmailStr, Field, field_validator
from backend.server.v2.store.exceptions import DatabaseError
from .db import transaction
logger = logging.getLogger(__name__)
T_co = TypeVar("T_co", bound="BaseNotificationData", covariant=True)
class BatchingStrategy(Enum):
IMMEDIATE = "immediate" # Send right away (errors, critical notifications)
HOURLY = "hourly" # Batch for up to an hour (usage reports)
DAILY = "daily" # Daily digest (summary notifications)
BACKOFF = "backoff" # Backoff strategy (exponential backoff)
class BaseNotificationData(BaseModel):
pass
class AgentRunData(BaseNotificationData):
agent_name: str
credits_used: float
# remaining_balance: float
execution_time: float
graph_id: str
node_count: int = Field(..., description="Number of nodes executed")
class ZeroBalanceData(BaseNotificationData):
last_transaction: float
last_transaction_time: datetime
top_up_link: str
class LowBalanceData(BaseNotificationData):
current_balance: float
threshold_amount: float
top_up_link: str
recent_usage: float = Field(..., description="Usage in the last 24 hours")
class BlockExecutionFailedData(BaseNotificationData):
block_name: str
block_id: str
error_message: str
graph_id: str
node_id: str
execution_id: str
class ContinuousAgentErrorData(BaseNotificationData):
agent_name: str
error_message: str
graph_id: str
execution_id: str
start_time: datetime
error_time: datetime
attempts: int = Field(..., description="Number of retry attempts made")
class BaseSummaryData(BaseNotificationData):
total_credits_used: float
total_executions: int
most_used_agent: str
total_execution_time: float
successful_runs: int
failed_runs: int
average_execution_time: float
cost_breakdown: dict[str, float]
class DailySummaryData(BaseSummaryData):
date: datetime
class WeeklySummaryData(BaseSummaryData):
start_date: datetime
end_date: datetime
week_number: int
year: int
class MonthlySummaryData(BaseSummaryData):
month: int
year: int
NotificationData = Annotated[
Union[
AgentRunData,
ZeroBalanceData,
LowBalanceData,
BlockExecutionFailedData,
ContinuousAgentErrorData,
MonthlySummaryData,
],
Field(discriminator="type"),
]
class NotificationEventDTO(BaseModel):
user_id: str
type: NotificationType
data: dict
created_at: datetime = Field(default_factory=datetime.now)
class NotificationEventModel(BaseModel, Generic[T_co]):
user_id: str
type: NotificationType
data: T_co
created_at: datetime = Field(default_factory=datetime.now)
@property
def strategy(self) -> BatchingStrategy:
return NotificationTypeOverride(self.type).strategy
@field_validator("type", mode="before")
def uppercase_type(cls, v):
if isinstance(v, str):
return v.upper()
return v
@property
def template(self) -> str:
return NotificationTypeOverride(self.type).template
def get_data_type(
notification_type: NotificationType,
) -> type[BaseNotificationData]:
return {
NotificationType.AGENT_RUN: AgentRunData,
NotificationType.ZERO_BALANCE: ZeroBalanceData,
NotificationType.LOW_BALANCE: LowBalanceData,
NotificationType.BLOCK_EXECUTION_FAILED: BlockExecutionFailedData,
NotificationType.CONTINUOUS_AGENT_ERROR: ContinuousAgentErrorData,
NotificationType.DAILY_SUMMARY: DailySummaryData,
NotificationType.WEEKLY_SUMMARY: WeeklySummaryData,
NotificationType.MONTHLY_SUMMARY: MonthlySummaryData,
}[notification_type]
class NotificationBatch(BaseModel):
user_id: str
events: list[NotificationEvent]
strategy: BatchingStrategy
last_update: datetime = datetime.now()
class NotificationResult(BaseModel):
success: bool
message: Optional[str] = None
class NotificationTypeOverride:
def __init__(self, notification_type: NotificationType):
self.notification_type = notification_type
@property
def strategy(self) -> BatchingStrategy:
BATCHING_RULES = {
# These are batched by the notification service
NotificationType.AGENT_RUN: BatchingStrategy.IMMEDIATE,
# These are batched by the notification service, but with a backoff strategy
NotificationType.ZERO_BALANCE: BatchingStrategy.BACKOFF,
NotificationType.LOW_BALANCE: BatchingStrategy.BACKOFF,
NotificationType.BLOCK_EXECUTION_FAILED: BatchingStrategy.BACKOFF,
NotificationType.CONTINUOUS_AGENT_ERROR: BatchingStrategy.BACKOFF,
# These aren't batched by the notification service, so we send them right away
NotificationType.DAILY_SUMMARY: BatchingStrategy.IMMEDIATE,
NotificationType.WEEKLY_SUMMARY: BatchingStrategy.IMMEDIATE,
NotificationType.MONTHLY_SUMMARY: BatchingStrategy.IMMEDIATE,
}
return BATCHING_RULES.get(self.notification_type, BatchingStrategy.HOURLY)
@property
def template(self) -> str:
"""Returns template name for this notification type"""
return {
NotificationType.AGENT_RUN: "agent_run.html",
NotificationType.ZERO_BALANCE: "zero_balance.html",
NotificationType.LOW_BALANCE: "low_balance.html",
NotificationType.BLOCK_EXECUTION_FAILED: "block_failed.html",
NotificationType.CONTINUOUS_AGENT_ERROR: "agent_error.html",
NotificationType.DAILY_SUMMARY: "daily_summary.html",
NotificationType.WEEKLY_SUMMARY: "weekly_summary.html",
NotificationType.MONTHLY_SUMMARY: "monthly_summary.html",
}[self.notification_type]
class NotificationPreference(BaseModel):
user_id: str
email: EmailStr
preferences: dict[NotificationType, bool] = Field(
default_factory=dict, description="Which notifications the user wants"
)
daily_limit: int = 10 # Max emails per day
emails_sent_today: int = 0
last_reset_date: datetime = Field(default_factory=datetime.now)
def get_batch_delay(notification_type: NotificationType) -> timedelta:
return {
NotificationType.AGENT_RUN: timedelta(seconds=1),
NotificationType.ZERO_BALANCE: timedelta(minutes=60),
NotificationType.LOW_BALANCE: timedelta(minutes=60),
NotificationType.BLOCK_EXECUTION_FAILED: timedelta(minutes=60),
NotificationType.CONTINUOUS_AGENT_ERROR: timedelta(minutes=60),
}[notification_type]
async def create_or_add_to_user_notification_batch(
user_id: str,
notification_type: NotificationType,
data: str, # type: 'NotificationEventModel'
) -> dict:
try:
logger.info(
f"Creating or adding to notification batch for {user_id} with type {notification_type} and data {data}"
)
notification_data = NotificationEventModel[
get_data_type(notification_type)
].model_validate_json(data)
# Serialize the data
json_data: Json = Json(notification_data.data.model_dump_json())
# First try to find existing batch
existing_batch = await UserNotificationBatch.prisma().find_unique(
where={
"userId_type": {
"userId": user_id,
"type": notification_type,
}
},
include={"notifications": True},
)
if not existing_batch:
async with transaction() as tx:
notification_event = await tx.notificationevent.create(
data={
"type": notification_type,
"data": json_data,
}
)
# Create new batch
resp = await tx.usernotificationbatch.create(
data={
"userId": user_id,
"type": notification_type,
"notifications": {"connect": [{"id": notification_event.id}]},
},
include={"notifications": True},
)
return resp.model_dump()
else:
async with transaction() as tx:
notification_event = await tx.notificationevent.create(
data={
"type": notification_type,
"data": json_data,
"UserNotificationBatch": {"connect": {"id": existing_batch.id}},
}
)
# Add to existing batch
resp = await tx.usernotificationbatch.update(
where={"id": existing_batch.id},
data={
"notifications": {"connect": [{"id": notification_event.id}]}
},
include={"notifications": True},
)
if not resp:
raise DatabaseError(
f"Failed to add notification event {notification_event.id} to existing batch {existing_batch.id}"
)
return resp.model_dump()
except Exception as e:
raise DatabaseError(
f"Failed to create or add to notification batch for user {user_id} and type {notification_type}: {e}"
) from e
async def get_user_notification_last_message_in_batch(
user_id: str,
notification_type: NotificationType,
) -> NotificationEvent | None:
try:
batch = await UserNotificationBatch.prisma().find_first(
where={"userId": user_id, "type": notification_type},
order={"createdAt": "desc"},
)
if not batch:
return None
if not batch.notifications:
return None
return batch.notifications[-1]
except Exception as e:
raise DatabaseError(
f"Failed to get user notification last message in batch for user {user_id} and type {notification_type}: {e}"
) from e
async def empty_user_notification_batch(
user_id: str, notification_type: NotificationType
) -> None:
try:
async with transaction() as tx:
await tx.notificationevent.delete_many(
where={
"UserNotificationBatch": {
"is": {"userId": user_id, "type": notification_type}
}
}
)
await tx.usernotificationbatch.delete_many(
where=UserNotificationBatchWhereInput(
userId=user_id,
type=notification_type,
)
)
except Exception as e:
raise DatabaseError(
f"Failed to empty user notification batch for user {user_id} and type {notification_type}: {e}"
) from e
async def get_user_notification_batch(
user_id: str,
notification_type: NotificationType,
) -> UserNotificationBatch | None:
try:
return await UserNotificationBatch.prisma().find_first(
where={"userId": user_id, "type": notification_type},
include={"notifications": True},
)
except Exception as e:
raise DatabaseError(
f"Failed to get user notification batch for user {user_id} and type {notification_type}: {e}"
) from e

View File

@@ -1,296 +0,0 @@
import logging
from abc import ABC, abstractmethod
from enum import Enum
from typing import Awaitable, Optional
import aio_pika
import pika
import pika.adapters.blocking_connection
from pika.spec import BasicProperties
from pydantic import BaseModel
from backend.util.retry import conn_retry
from backend.util.settings import Settings
logger = logging.getLogger(__name__)
class ExchangeType(str, Enum):
DIRECT = "direct"
FANOUT = "fanout"
TOPIC = "topic"
HEADERS = "headers"
class Exchange(BaseModel):
name: str
type: ExchangeType
durable: bool = True
auto_delete: bool = False
class Queue(BaseModel):
name: str
durable: bool = True
auto_delete: bool = False
# Optional exchange binding configuration
exchange: Optional[Exchange] = None
routing_key: Optional[str] = None
arguments: Optional[dict] = None
class RabbitMQConfig(BaseModel):
"""Configuration for a RabbitMQ service instance"""
vhost: str = "/"
exchanges: list[Exchange]
queues: list[Queue]
class RabbitMQBase(ABC):
"""Base class for RabbitMQ connections with shared configuration"""
def __init__(self, config: RabbitMQConfig):
settings = Settings()
self.host = settings.config.rabbitmq_host
self.port = settings.config.rabbitmq_port
self.username = settings.secrets.rabbitmq_default_user
self.password = settings.secrets.rabbitmq_default_pass
self.config = config
self._connection = None
self._channel = None
@property
def is_connected(self) -> bool:
"""Check if we have a valid connection"""
return bool(self._connection)
@property
def is_ready(self) -> bool:
"""Check if we have a valid channel"""
return bool(self.is_connected and self._channel)
@abstractmethod
def connect(self) -> None | Awaitable[None]:
"""Establish connection to RabbitMQ"""
pass
@abstractmethod
def disconnect(self) -> None | Awaitable[None]:
"""Close connection to RabbitMQ"""
pass
@abstractmethod
def declare_infrastructure(self) -> None | Awaitable[None]:
"""Declare exchanges and queues for this service"""
pass
class SyncRabbitMQ(RabbitMQBase):
"""Synchronous RabbitMQ client"""
@property
def is_connected(self) -> bool:
return bool(self._connection and self._connection.is_open)
@property
def is_ready(self) -> bool:
return bool(self.is_connected and self._channel and self._channel.is_open)
@conn_retry("RabbitMQ", "Acquiring connection")
def connect(self) -> None:
if self.is_connected:
return
credentials = pika.PlainCredentials(self.username, self.password)
parameters = pika.ConnectionParameters(
host=self.host,
port=self.port,
virtual_host=self.config.vhost,
credentials=credentials,
heartbeat=600,
blocked_connection_timeout=300,
)
self._connection = pika.BlockingConnection(parameters)
self._channel = self._connection.channel()
self._channel.basic_qos(prefetch_count=1)
self.declare_infrastructure()
def disconnect(self) -> None:
if self._channel:
if self._channel.is_open:
self._channel.close()
self._channel = None
if self._connection:
if self._connection.is_open:
self._connection.close()
self._connection = None
def declare_infrastructure(self) -> None:
"""Declare exchanges and queues for this service"""
if not self.is_ready:
self.connect()
if self._channel is None:
raise RuntimeError("Channel should be established after connect")
# Declare exchanges
for exchange in self.config.exchanges:
self._channel.exchange_declare(
exchange=exchange.name,
exchange_type=exchange.type.value,
durable=exchange.durable,
auto_delete=exchange.auto_delete,
)
# Declare queues and bind them to exchanges
for queue in self.config.queues:
self._channel.queue_declare(
queue=queue.name,
durable=queue.durable,
auto_delete=queue.auto_delete,
arguments=queue.arguments,
)
if queue.exchange:
self._channel.queue_bind(
queue=queue.name,
exchange=queue.exchange.name,
routing_key=queue.routing_key or queue.name,
)
def publish_message(
self,
routing_key: str,
message: str,
exchange: Optional[Exchange] = None,
properties: Optional[BasicProperties] = None,
mandatory: bool = True,
) -> None:
if not self.is_ready:
self.connect()
if self._channel is None:
raise RuntimeError("Channel should be established after connect")
self._channel.basic_publish(
exchange=exchange.name if exchange else "",
routing_key=routing_key,
body=message.encode(),
properties=properties or BasicProperties(delivery_mode=2),
mandatory=mandatory,
)
def get_channel(self) -> pika.adapters.blocking_connection.BlockingChannel:
if not self.is_ready:
self.connect()
if self._channel is None:
raise RuntimeError("Channel should be established after connect")
return self._channel
class AsyncRabbitMQ(RabbitMQBase):
"""Asynchronous RabbitMQ client"""
@property
def is_connected(self) -> bool:
return bool(self._connection and not self._connection.is_closed)
@property
def is_ready(self) -> bool:
return bool(self.is_connected and self._channel and not self._channel.is_closed)
@conn_retry("AsyncRabbitMQ", "Acquiring async connection")
async def connect(self):
if self.is_connected:
return
self._connection = await aio_pika.connect_robust(
host=self.host,
port=self.port,
login=self.username,
password=self.password,
virtualhost=self.config.vhost.lstrip("/"),
)
self._channel = await self._connection.channel()
await self._channel.set_qos(prefetch_count=1)
await self.declare_infrastructure()
async def disconnect(self):
if self._channel:
await self._channel.close()
self._channel = None
if self._connection:
await self._connection.close()
self._connection = None
async def declare_infrastructure(self):
"""Declare exchanges and queues for this service"""
if not self.is_ready:
await self.connect()
if self._channel is None:
raise RuntimeError("Channel should be established after connect")
# Declare exchanges
for exchange in self.config.exchanges:
await self._channel.declare_exchange(
name=exchange.name,
type=exchange.type.value,
durable=exchange.durable,
auto_delete=exchange.auto_delete,
)
# Declare queues and bind them to exchanges
for queue in self.config.queues:
queue_obj = await self._channel.declare_queue(
name=queue.name,
durable=queue.durable,
auto_delete=queue.auto_delete,
arguments=queue.arguments,
)
if queue.exchange:
exchange = await self._channel.get_exchange(queue.exchange.name)
await queue_obj.bind(
exchange, routing_key=queue.routing_key or queue.name
)
async def publish_message(
self,
routing_key: str,
message: str,
exchange: Optional[Exchange] = None,
persistent: bool = True,
) -> None:
if not self.is_ready:
await self.connect()
if self._channel is None:
raise RuntimeError("Channel should be established after connect")
if exchange:
exchange_obj = await self._channel.get_exchange(exchange.name)
else:
exchange_obj = self._channel.default_exchange
await exchange_obj.publish(
aio_pika.Message(
body=message.encode(),
delivery_mode=(
aio_pika.DeliveryMode.PERSISTENT
if persistent
else aio_pika.DeliveryMode.NOT_PERSISTENT
),
),
routing_key=routing_key,
)
async def get_channel(self) -> aio_pika.abc.AbstractChannel:
if not self.is_ready:
await self.connect()
if self._channel is None:
raise RuntimeError("Channel should be established after connect")
return self._channel

View File

@@ -1,54 +1,44 @@
import logging
from datetime import datetime, timedelta
from typing import Optional, cast
from autogpt_libs.auth.models import DEFAULT_USER_ID
from fastapi import HTTPException
from prisma import Json
from prisma.enums import NotificationType
from prisma.models import User
from backend.data.db import prisma
from backend.data.model import UserIntegrations, UserMetadata, UserMetadataRaw
from backend.data.notifications import NotificationPreference
from backend.server.v2.store.exceptions import DatabaseError
from backend.util.encryption import JSONCryptor
logger = logging.getLogger(__name__)
async def get_or_create_user(user_data: dict) -> User:
try:
user_id = user_data.get("sub")
if not user_id:
raise HTTPException(status_code=401, detail="User ID not found in token")
user_id = user_data.get("sub")
if not user_id:
raise HTTPException(status_code=401, detail="User ID not found in token")
user_email = user_data.get("email")
if not user_email:
raise HTTPException(status_code=401, detail="Email not found in token")
user_email = user_data.get("email")
if not user_email:
raise HTTPException(status_code=401, detail="Email not found in token")
user = await prisma.user.find_unique(where={"id": user_id})
if not user:
user = await prisma.user.create(
data={
"id": user_id,
"email": user_email,
"name": user_data.get("user_metadata", {}).get("name"),
}
)
return User.model_validate(user)
except Exception as e:
raise DatabaseError(f"Failed to get or create user {user_data}: {e}") from e
async def get_user_by_id(user_id: str) -> User:
user = await prisma.user.find_unique(where={"id": user_id})
if not user:
raise ValueError(f"User not found with ID: {user_id}")
user = await prisma.user.create(
data={
"id": user_id,
"email": user_email,
"name": user_data.get("user_metadata", {}).get("name"),
}
)
return User.model_validate(user)
async def get_user_by_id(user_id: str) -> Optional[User]:
user = await prisma.user.find_unique(where={"id": user_id})
return User.model_validate(user) if user else None
async def create_default_user() -> Optional[User]:
user = await prisma.user.find_unique(where={"id": DEFAULT_USER_ID})
if not user:
@@ -138,70 +128,3 @@ async def migrate_and_encrypt_user_integrations():
where={"id": user.id},
data={"metadata": Json(raw_metadata)},
)
async def get_active_user_ids_in_timerange(start_time: str, end_time: str) -> list[str]:
try:
users = await User.prisma().find_many(
where={
"AgentGraphExecutions": {
"some": {
"createdAt": {
"gte": datetime.fromisoformat(start_time),
"lte": datetime.fromisoformat(end_time),
}
}
}
},
)
return [user.id for user in users]
except Exception as e:
raise DatabaseError(
f"Failed to get active user ids in timerange {start_time} to {end_time}: {e}"
) from e
async def get_active_users_ids() -> list[str]:
user_ids = await get_active_user_ids_in_timerange(
(datetime.now() - timedelta(days=30)).isoformat(),
datetime.now().isoformat(),
)
return user_ids
async def get_user_notification_preference(user_id: str) -> NotificationPreference:
try:
user = await User.prisma().find_unique_or_raise(
where={"id": user_id},
)
# enable notifications by default if user has no notification preference (shouldn't ever happen though)
preferences: dict[NotificationType, bool] = {
NotificationType.AGENT_RUN: user.notifyOnAgentRun or True,
NotificationType.ZERO_BALANCE: user.notifyOnZeroBalance or True,
NotificationType.LOW_BALANCE: user.notifyOnLowBalance or True,
NotificationType.BLOCK_EXECUTION_FAILED: user.notifyOnBlockExecutionFailed
or True,
NotificationType.CONTINUOUS_AGENT_ERROR: user.notifyOnContinuousAgentError
or True,
NotificationType.DAILY_SUMMARY: user.notifyOnDailySummary or True,
NotificationType.WEEKLY_SUMMARY: user.notifyOnWeeklySummary or True,
NotificationType.MONTHLY_SUMMARY: user.notifyOnMonthlySummary or True,
}
daily_limit = user.maxEmailsPerDay or 3
notification_preference = NotificationPreference(
user_id=user.id,
email=user.email,
preferences=preferences,
daily_limit=daily_limit,
# TODO with other changes later, for now we just will email them
emails_sent_today=0,
last_reset_date=datetime.now(),
)
return NotificationPreference.model_validate(notification_preference)
except Exception as e:
raise DatabaseError(
f"Failed to upsert user notification preference for user {user_id}: {e}"
) from e

View File

@@ -4,11 +4,9 @@ from typing import Any, Callable, Concatenate, Coroutine, ParamSpec, TypeVar, ca
from backend.data.credit import get_user_credit_model
from backend.data.execution import (
ExecutionResult,
NodeExecutionEntry,
RedisExecutionEventBus,
create_graph_execution,
get_execution_results,
get_executions_in_timerange,
get_incomplete_executions,
get_latest_execution,
update_execution_status,
@@ -18,19 +16,9 @@ from backend.data.execution import (
upsert_execution_output,
)
from backend.data.graph import get_graph, get_node
from backend.data.notifications import (
create_or_add_to_user_notification_batch,
empty_user_notification_batch,
get_user_notification_batch,
get_user_notification_last_message_in_batch,
)
from backend.data.user import (
get_active_user_ids_in_timerange,
get_active_users_ids,
get_user_by_id,
get_user_integrations,
get_user_metadata,
get_user_notification_preference,
update_user_integrations,
update_user_metadata,
)
@@ -83,7 +71,6 @@ class DatabaseManager(AppService):
update_node_execution_stats = exposed_run_and_wait(update_node_execution_stats)
upsert_execution_input = exposed_run_and_wait(upsert_execution_input)
upsert_execution_output = exposed_run_and_wait(upsert_execution_output)
get_executions_in_timerange = exposed_run_and_wait(get_executions_in_timerange)
# Graphs
get_node = exposed_run_and_wait(get_node)
@@ -91,31 +78,17 @@ class DatabaseManager(AppService):
# Credits
user_credit_model = get_user_credit_model()
get_or_refill_credit = cast(
Callable[[Any, str], int],
exposed_run_and_wait(user_credit_model.get_or_refill_credit),
)
spend_credits = cast(
Callable[[Any, NodeExecutionEntry, float, float], int],
Callable[[Any, str, int, str, dict[str, str], float, float], int],
exposed_run_and_wait(user_credit_model.spend_credits),
)
# User + User Metadata + User Integrations + User Notification Preferences
# User + User Metadata + User Integrations
get_user_metadata = exposed_run_and_wait(get_user_metadata)
update_user_metadata = exposed_run_and_wait(update_user_metadata)
get_user_integrations = exposed_run_and_wait(get_user_integrations)
update_user_integrations = exposed_run_and_wait(update_user_integrations)
get_active_user_ids_in_timerange = exposed_run_and_wait(
get_active_user_ids_in_timerange
)
get_user_by_id = exposed_run_and_wait(get_user_by_id)
get_user_notification_preference = exposed_run_and_wait(
get_user_notification_preference
)
get_active_users_ids = exposed_run_and_wait(get_active_users_ids)
# Notifications
create_or_add_to_user_notification_batch = exposed_run_and_wait(
create_or_add_to_user_notification_batch
)
get_user_notification_last_message_in_batch = exposed_run_and_wait(
get_user_notification_last_message_in_batch
)
empty_user_notification_batch = exposed_run_and_wait(empty_user_notification_batch)
get_user_notification_batch = exposed_run_and_wait(get_user_notification_batch)

View File

@@ -8,7 +8,7 @@ import threading
from concurrent.futures import Future, ProcessPoolExecutor
from contextlib import contextmanager
from multiprocessing.pool import AsyncResult, Pool
from typing import TYPE_CHECKING, Any, Generator, Optional, TypeVar, cast
from typing import TYPE_CHECKING, Any, Generator, TypeVar, cast
from redis.lock import Lock as RedisLock
@@ -40,7 +40,6 @@ from backend.data.graph import GraphModel, Link, Node
from backend.integrations.creds_manager import IntegrationCredentialsManager
from backend.util import json
from backend.util.decorator import error_logged, time_measured
from backend.util.file import clean_exec_files
from backend.util.logging import configure_logging
from backend.util.process import set_service_name
from backend.util.service import (
@@ -163,7 +162,6 @@ def execute_node(
# AgentExecutorBlock specially separate the node input_data & its input_default.
if isinstance(node_block, AgentExecutorBlock):
input_data = {**node.input_default, "data": input_data}
data.data = input_data
# Execute the node
input_data_str = json.dumps(input_data)
@@ -171,15 +169,7 @@ def execute_node(
log_metadata.info("Executed node with input", input=input_data_str)
update_execution(ExecutionStatus.RUNNING)
# Inject extra execution arguments for the blocks via kwargs
extra_exec_kwargs: dict = {
"graph_id": graph_id,
"node_id": node_id,
"graph_exec_id": graph_exec_id,
"node_exec_id": node_exec_id,
"user_id": user_id,
}
extra_exec_kwargs = {}
# Last-minute fetch credentials + acquire a system-wide read-write lock to prevent
# changes during execution. ⚠️ This means a set of credentials can only be used by
# one (running) block at a time; simultaneous execution of blocks using same
@@ -192,12 +182,12 @@ def execute_node(
extra_exec_kwargs[field_name] = credentials
output_size = 0
try:
# Charge the user for the execution before running the block.
# TODO: We assume the block is executed within 0 seconds.
# This is fine because for now, there is no block that is charged by time.
db_client.spend_credits(data, input_size + output_size, 0)
end_status = ExecutionStatus.COMPLETED
credit = db_client.get_or_refill_credit(user_id)
if credit < 0:
raise ValueError(f"Insufficient credit: {credit}")
try:
for output_name, output_data in node_block.execute(
input_data, **extra_exec_kwargs
):
@@ -216,12 +206,11 @@ def execute_node(
):
yield execution
update_execution(ExecutionStatus.COMPLETED)
except Exception as e:
end_status = ExecutionStatus.FAILED
error_msg = str(e)
log_metadata.exception(f"Node execution failed with error {error_msg}")
db_client.upsert_execution_output(node_exec_id, "error", error_msg)
update_execution(ExecutionStatus.FAILED)
for execution in _enqueue_next_nodes(
db_client=db_client,
@@ -243,6 +232,17 @@ def execute_node(
except Exception as e:
log_metadata.error(f"Failed to release credentials lock: {e}")
# Update execution status and spend credits
res = update_execution(end_status)
if end_status == ExecutionStatus.COMPLETED:
s = input_size + output_size
t = (
(res.end_time - res.start_time).total_seconds()
if res.end_time and res.start_time
else 0
)
db_client.spend_credits(user_id, credit, node_block.id, input_data, s, t)
# Update execution stats
if execution_stats is not None:
execution_stats.update(node_block.execution_stats)
@@ -260,7 +260,7 @@ def _enqueue_next_nodes(
log_metadata: LogMetadata,
) -> list[NodeExecutionEntry]:
def add_enqueued_execution(
node_exec_id: str, node_id: str, block_id: str, data: BlockInput
node_exec_id: str, node_id: str, data: BlockInput
) -> NodeExecutionEntry:
exec_update = db_client.update_execution_status(
node_exec_id, ExecutionStatus.QUEUED, data
@@ -272,7 +272,6 @@ def _enqueue_next_nodes(
graph_id=graph_id,
node_exec_id=node_exec_id,
node_id=node_id,
block_id=block_id,
data=data,
)
@@ -326,12 +325,7 @@ def _enqueue_next_nodes(
# Input is complete, enqueue the execution.
log_metadata.info(f"Enqueued {suffix}")
enqueued_executions.append(
add_enqueued_execution(
node_exec_id=next_node_exec_id,
node_id=next_node_id,
block_id=next_node.block_id,
data=next_node_input,
)
add_enqueued_execution(next_node_exec_id, next_node_id, next_node_input)
)
# Next execution stops here if the link is not static.
@@ -361,12 +355,7 @@ def _enqueue_next_nodes(
continue
log_metadata.info(f"Enqueueing static-link execution {suffix}")
enqueued_executions.append(
add_enqueued_execution(
node_exec_id=iexec.node_exec_id,
node_id=next_node_id,
block_id=next_node.block_id,
data=idata,
)
add_enqueued_execution(iexec.node_exec_id, next_node_id, idata)
)
return enqueued_executions
@@ -559,15 +548,9 @@ class Executor:
q.add(execution)
log_metadata.info(f"Finished node execution {node_exec.node_exec_id}")
except Exception as e:
# Avoid user error being marked as an actual error.
if isinstance(e, ValueError):
log_metadata.info(
f"Failed node execution {node_exec.node_exec_id}: {e}"
)
else:
log_metadata.exception(
f"Failed node execution {node_exec.node_exec_id}: {e}"
)
log_metadata.exception(
f"Failed node execution {node_exec.node_exec_id}: {e}"
)
@classmethod
def on_graph_executor_start(cls):
@@ -665,10 +648,6 @@ class Executor:
try:
queue = ExecutionQueue[NodeExecutionEntry]()
for node_exec in graph_exec.start_node_execs:
exec_update = cls.db_client.update_execution_status(
node_exec.node_exec_id, ExecutionStatus.QUEUED, node_exec.data
)
cls.db_client.send_execution_update(exec_update)
queue.add(node_exec)
running_executions: dict[str, AsyncResult] = {}
@@ -737,7 +716,6 @@ class Executor:
finished = True
cancel.set()
cancel_thread.join()
clean_exec_files(graph_exec.graph_exec_id)
return (
exec_stats,
@@ -802,8 +780,7 @@ class ExecutionManager(AppService):
graph_id: str,
data: BlockInput,
user_id: str,
graph_version: Optional[int] = None,
preset_id: str | None = None,
graph_version: int | None = None,
) -> GraphExecutionEntry:
graph: GraphModel | None = self.db_client.get_graph(
graph_id=graph_id, user_id=user_id, version=graph_version
@@ -825,9 +802,9 @@ class ExecutionManager(AppService):
# Extract request input data, and assign it to the input pin.
if block.block_type == BlockType.INPUT:
input_name = node.input_default.get("name")
if input_name and input_name in data:
input_data = {"value": data[input_name]}
name = node.input_default.get("name")
if name and name in data:
input_data = {"value": data[name]}
# Extract webhook payload, and assign it to the input pin
webhook_payload_key = f"webhook_{node.webhook_id}_payload"
@@ -852,7 +829,6 @@ class ExecutionManager(AppService):
graph_version=graph.version,
nodes_input=nodes_input,
user_id=user_id,
preset_id=preset_id,
)
starting_node_execs = []
@@ -864,15 +840,17 @@ class ExecutionManager(AppService):
graph_id=node_exec.graph_id,
node_exec_id=node_exec.node_exec_id,
node_id=node_exec.node_id,
block_id=node_exec.block_id,
data=node_exec.input_data,
)
)
exec_update = self.db_client.update_execution_status(
node_exec.node_exec_id, ExecutionStatus.QUEUED, node_exec.input_data
)
self.db_client.send_execution_update(exec_update)
graph_exec = GraphExecutionEntry(
user_id=user_id,
graph_id=graph_id,
graph_version=graph_version or 0,
graph_exec_id=graph_exec_id,
start_node_execs=starting_node_execs,
)

View File

@@ -63,10 +63,7 @@ def execute_graph(**kwargs):
try:
log(f"Executing recurring job for graph #{args.graph_id}")
get_execution_client().add_execution(
graph_id=args.graph_id,
data=args.input_data,
user_id=args.user_id,
graph_version=args.graph_version,
args.graph_id, args.input_data, args.user_id
)
except Exception as e:
logger.exception(f"Error executing graph {args.graph_id}: {e}")

View File

@@ -23,15 +23,6 @@ from backend.util.settings import Settings
settings = Settings()
# This is an overrride since ollama doesn't actually require an API key, but the creddential system enforces one be attached
ollama_credentials = APIKeyCredentials(
id="744fdc56-071a-4761-b5a5-0af0ce10a2b5",
provider="ollama",
api_key=SecretStr("FAKE_API_KEY"),
title="Use Credits for Ollama",
expires_at=None,
)
revid_credentials = APIKeyCredentials(
id="fdb7f412-f519-48d1-9b5f-d2f73d0e01fe",
provider="revid",
@@ -130,24 +121,9 @@ nvidia_credentials = APIKeyCredentials(
title="Use Credits for Nvidia",
expires_at=None,
)
screenshotone_credentials = APIKeyCredentials(
id="3b1bdd16-8818-4bc2-8cbb-b23f9a3439ed",
provider="screenshotone",
api_key=SecretStr(settings.secrets.screenshotone_api_key),
title="Use Credits for ScreenshotOne",
expires_at=None,
)
mem0_credentials = APIKeyCredentials(
id="ed55ac19-356e-4243-a6cb-bc599e9b716f",
provider="mem0",
api_key=SecretStr(settings.secrets.mem0_api_key),
title="Use Credits for Mem0",
expires_at=None,
)
DEFAULT_CREDENTIALS = [
ollama_credentials,
revid_credentials,
ideogram_credentials,
replicate_credentials,
@@ -161,9 +137,7 @@ DEFAULT_CREDENTIALS = [
fal_credentials,
exa_credentials,
e2b_credentials,
mem0_credentials,
nvidia_credentials,
screenshotone_credentials,
]
@@ -195,10 +169,6 @@ class IntegrationCredentialsStore:
def get_all_creds(self, user_id: str) -> list[Credentials]:
users_credentials = self._get_user_integrations(user_id).credentials
all_credentials = users_credentials
# These will always be added
all_credentials.append(ollama_credentials)
# These will only be added if the API key is set
if settings.secrets.revid_api_key:
all_credentials.append(revid_credentials)
if settings.secrets.ideogram_api_key:
@@ -227,10 +197,6 @@ class IntegrationCredentialsStore:
all_credentials.append(e2b_credentials)
if settings.secrets.nvidia_api_key:
all_credentials.append(nvidia_credentials)
if settings.secrets.screenshotone_api_key:
all_credentials.append(screenshotone_credentials)
if settings.secrets.mem0_api_key:
all_credentials.append(mem0_credentials)
return all_credentials
def get_creds_by_id(self, user_id: str, credentials_id: str) -> Credentials | None:

View File

@@ -1,10 +1,7 @@
from typing import TYPE_CHECKING
from backend.integrations.oauth.todoist import TodoistOAuthHandler
from .github import GitHubOAuthHandler
from .google import GoogleOAuthHandler
from .linear import LinearOAuthHandler
from .notion import NotionOAuthHandler
from .twitter import TwitterOAuthHandler
@@ -20,8 +17,6 @@ HANDLERS_BY_NAME: dict["ProviderName", type["BaseOAuthHandler"]] = {
GoogleOAuthHandler,
NotionOAuthHandler,
TwitterOAuthHandler,
LinearOAuthHandler,
TodoistOAuthHandler,
]
}
# --8<-- [end:HANDLERS_BY_NAMEExample]

View File

@@ -1,165 +0,0 @@
import json
from typing import Optional
from urllib.parse import urlencode
from pydantic import SecretStr
from backend.blocks.linear._api import LinearAPIException
from backend.data.model import APIKeyCredentials, OAuth2Credentials
from backend.integrations.providers import ProviderName
from backend.util.request import requests
from .base import BaseOAuthHandler
class LinearOAuthHandler(BaseOAuthHandler):
"""
OAuth2 handler for Linear.
"""
PROVIDER_NAME = ProviderName.LINEAR
def __init__(self, client_id: str, client_secret: str, redirect_uri: str):
self.client_id = client_id
self.client_secret = client_secret
self.redirect_uri = redirect_uri
self.auth_base_url = "https://linear.app/oauth/authorize"
self.token_url = "https://api.linear.app/oauth/token" # Correct token URL
self.revoke_url = "https://api.linear.app/oauth/revoke"
def get_login_url(
self, scopes: list[str], state: str, code_challenge: Optional[str]
) -> str:
params = {
"client_id": self.client_id,
"redirect_uri": self.redirect_uri,
"response_type": "code", # Important: include "response_type"
"scope": ",".join(scopes), # Comma-separated, not space-separated
"state": state,
}
return f"{self.auth_base_url}?{urlencode(params)}"
def exchange_code_for_tokens(
self, code: str, scopes: list[str], code_verifier: Optional[str]
) -> OAuth2Credentials:
return self._request_tokens({"code": code, "redirect_uri": self.redirect_uri})
def revoke_tokens(self, credentials: OAuth2Credentials) -> bool:
if not credentials.access_token:
raise ValueError("No access token to revoke")
headers = {
"Authorization": f"Bearer {credentials.access_token.get_secret_value()}"
}
response = requests.post(self.revoke_url, headers=headers)
if not response.ok:
try:
error_data = response.json()
error_message = error_data.get("error", "Unknown error")
except json.JSONDecodeError:
error_message = response.text
raise LinearAPIException(
f"Failed to revoke Linear tokens ({response.status_code}): {error_message}",
response.status_code,
)
return True # Linear doesn't return JSON on successful revoke
def _refresh_tokens(self, credentials: OAuth2Credentials) -> OAuth2Credentials:
if not credentials.refresh_token:
raise ValueError(
"No refresh token available."
) # Linear uses non-expiring tokens
return self._request_tokens(
{
"refresh_token": credentials.refresh_token.get_secret_value(),
"grant_type": "refresh_token",
}
)
def _request_tokens(
self,
params: dict[str, str],
current_credentials: Optional[OAuth2Credentials] = None,
) -> OAuth2Credentials:
request_body = {
"client_id": self.client_id,
"client_secret": self.client_secret,
"grant_type": "authorization_code", # Ensure grant_type is correct
**params,
}
headers = {
"Content-Type": "application/x-www-form-urlencoded"
} # Correct header for token request
response = requests.post(self.token_url, data=request_body, headers=headers)
if not response.ok:
try:
error_data = response.json()
error_message = error_data.get("error", "Unknown error")
except json.JSONDecodeError:
error_message = response.text
raise LinearAPIException(
f"Failed to fetch Linear tokens ({response.status_code}): {error_message}",
response.status_code,
)
token_data = response.json()
# Note: Linear access tokens do not expire, so we set expires_at to None
new_credentials = OAuth2Credentials(
provider=self.PROVIDER_NAME,
title=current_credentials.title if current_credentials else None,
username=token_data.get("user", {}).get(
"name", "Unknown User"
), # extract name or set appropriate
access_token=token_data["access_token"],
scopes=token_data["scope"].split(
","
), # Linear returns comma-separated scopes
refresh_token=token_data.get(
"refresh_token"
), # Linear uses non-expiring tokens so this might be null
access_token_expires_at=None,
refresh_token_expires_at=None,
)
if current_credentials:
new_credentials.id = current_credentials.id
return new_credentials
def _request_username(self, access_token: str) -> Optional[str]:
# Use the LinearClient to fetch user details using GraphQL
from backend.blocks.linear._api import LinearClient
try:
linear_client = LinearClient(
APIKeyCredentials(
api_key=SecretStr(access_token),
title="temp",
provider=self.PROVIDER_NAME,
expires_at=None,
)
) # Temporary credentials for this request
query = """
query Viewer {
viewer {
name
}
}
"""
response = linear_client.query(query)
return response["viewer"]["name"]
except Exception as e: # Handle any errors
print(f"Error fetching username: {e}")
return None

View File

@@ -1,81 +0,0 @@
import urllib.parse
from typing import ClassVar, Optional
import requests
from backend.data.model import OAuth2Credentials, ProviderName
from backend.integrations.oauth.base import BaseOAuthHandler
class TodoistOAuthHandler(BaseOAuthHandler):
PROVIDER_NAME = ProviderName.TODOIST
DEFAULT_SCOPES: ClassVar[list[str]] = [
"task:add",
"data:read",
"data:read_write",
"data:delete",
"project:delete",
]
AUTHORIZE_URL = "https://todoist.com/oauth/authorize"
TOKEN_URL = "https://todoist.com/oauth/access_token"
def __init__(self, client_id: str, client_secret: str, redirect_uri: str):
self.client_id = client_id
self.client_secret = client_secret
self.redirect_uri = redirect_uri
def get_login_url(
self, scopes: list[str], state: str, code_challenge: Optional[str]
) -> str:
params = {
"client_id": self.client_id,
"scope": ",".join(self.DEFAULT_SCOPES),
"state": state,
}
return f"{self.AUTHORIZE_URL}?{urllib.parse.urlencode(params)}"
def exchange_code_for_tokens(
self, code: str, scopes: list[str], code_verifier: Optional[str]
) -> OAuth2Credentials:
"""Exchange authorization code for access tokens"""
data = {
"client_id": self.client_id,
"client_secret": self.client_secret,
"code": code,
"redirect_uri": self.redirect_uri,
}
response = requests.post(self.TOKEN_URL, data=data)
response.raise_for_status()
tokens = response.json()
response = requests.post(
"https://api.todoist.com/sync/v9/sync",
headers={"Authorization": f"Bearer {tokens['access_token']}"},
data={"sync_token": "*", "resource_types": '["user"]'},
)
response.raise_for_status()
user_info = response.json()
user_email = user_info["user"].get("email")
return OAuth2Credentials(
provider=self.PROVIDER_NAME,
title=None,
username=user_email,
access_token=tokens["access_token"],
refresh_token=None,
access_token_expires_at=None,
refresh_token_expires_at=None,
scopes=scopes,
)
def _refresh_tokens(self, credentials: OAuth2Credentials) -> OAuth2Credentials:
# Todoist does not support token refresh
return credentials
def revoke_tokens(self, credentials: OAuth2Credentials) -> bool:
return False

View File

@@ -17,9 +17,7 @@ class ProviderName(str, Enum):
HUBSPOT = "hubspot"
IDEOGRAM = "ideogram"
JINA = "jina"
LINEAR = "linear"
MEDIUM = "medium"
MEM0 = "mem0"
NOTION = "notion"
NVIDIA = "nvidia"
OLLAMA = "ollama"
@@ -27,13 +25,9 @@ class ProviderName(str, Enum):
OPENWEATHERMAP = "openweathermap"
OPEN_ROUTER = "open_router"
PINECONE = "pinecone"
REDDIT = "reddit"
REPLICATE = "replicate"
REVID = "revid"
SCREENSHOTONE = "screenshotone"
SLANT3D = "slant3d"
SMTP = "smtp"
TWITTER = "twitter"
TODOIST = "todoist"
UNREAL_SPEECH = "unreal_speech"
# --8<-- [end:ProviderName]

View File

@@ -168,7 +168,7 @@ class BaseWebhooksManager(ABC, Generic[WT]):
id = str(uuid4())
secret = secrets.token_hex(32)
provider_name: ProviderName = self.PROVIDER_NAME
provider_name = self.PROVIDER_NAME
ingress_url = webhook_ingress_url(provider_name=provider_name, webhook_id=id)
if register:
if not credentials:

View File

@@ -1,7 +1,7 @@
import logging
from backend.data import integrations
from backend.data.model import Credentials
from backend.data.model import APIKeyCredentials, Credentials, OAuth2Credentials
from ._base import WT, BaseWebhooksManager
@@ -25,6 +25,6 @@ class ManualWebhookManagerBase(BaseWebhooksManager[WT]):
async def _deregister_webhook(
self,
webhook: integrations.Webhook,
credentials: Credentials,
credentials: OAuth2Credentials | APIKeyCredentials,
) -> None:
pass

View File

@@ -67,7 +67,7 @@ class GithubWebhooksManager(BaseWebhooksManager):
headers = {
**self.GITHUB_API_DEFAULT_HEADERS,
"Authorization": credentials.auth_header(),
"Authorization": credentials.bearer(),
}
repo, github_hook_id = webhook.resource, webhook.provider_webhook_id
@@ -96,7 +96,7 @@ class GithubWebhooksManager(BaseWebhooksManager):
headers = {
**self.GITHUB_API_DEFAULT_HEADERS,
"Authorization": credentials.auth_header(),
"Authorization": credentials.bearer(),
}
webhook_data = {
"name": "web",
@@ -142,7 +142,7 @@ class GithubWebhooksManager(BaseWebhooksManager):
headers = {
**self.GITHUB_API_DEFAULT_HEADERS,
"Authorization": credentials.auth_header(),
"Authorization": credentials.bearer(),
}
if webhook_type == self.WebhookType.REPO:

View File

@@ -20,28 +20,24 @@ class ConnectionManager:
for subscribers in self.subscriptions.values():
subscribers.discard(websocket)
async def subscribe(self, graph_id: str, graph_version: int, websocket: WebSocket):
key = f"{graph_id}_{graph_version}"
if key not in self.subscriptions:
self.subscriptions[key] = set()
self.subscriptions[key].add(websocket)
async def subscribe(self, graph_id: str, websocket: WebSocket):
if graph_id not in self.subscriptions:
self.subscriptions[graph_id] = set()
self.subscriptions[graph_id].add(websocket)
async def unsubscribe(
self, graph_id: str, graph_version: int, websocket: WebSocket
):
key = f"{graph_id}_{graph_version}"
if key in self.subscriptions:
self.subscriptions[key].discard(websocket)
if not self.subscriptions[key]:
del self.subscriptions[key]
async def unsubscribe(self, graph_id: str, websocket: WebSocket):
if graph_id in self.subscriptions:
self.subscriptions[graph_id].discard(websocket)
if not self.subscriptions[graph_id]:
del self.subscriptions[graph_id]
async def send_execution_result(self, result: execution.ExecutionResult):
key = f"{result.graph_id}_{result.graph_version}"
if key in self.subscriptions:
graph_id = result.graph_id
if graph_id in self.subscriptions:
message = WsMessage(
method=Methods.EXECUTION_EVENT,
channel=key,
channel=graph_id,
data=result.model_dump(),
).model_dump_json()
for connection in self.subscriptions[key]:
for connection in self.subscriptions[graph_id]:
await connection.send_text(message)

View File

@@ -1,11 +0,0 @@
from fastapi import FastAPI
from .routes.v1 import v1_router
external_app = FastAPI(
title="AutoGPT External API",
description="External API for AutoGPT integrations",
docs_url="/docs",
version="1.0",
)
external_app.include_router(v1_router, prefix="/v1")

View File

@@ -1,37 +0,0 @@
from fastapi import Depends, HTTPException, Request
from fastapi.security import APIKeyHeader
from prisma.enums import APIKeyPermission
from backend.data.api_key import has_permission, validate_api_key
api_key_header = APIKeyHeader(name="X-API-Key")
async def require_api_key(request: Request):
"""Base middleware for API key authentication"""
api_key = await api_key_header(request)
if api_key is None:
raise HTTPException(status_code=401, detail="Missing API key")
api_key_obj = await validate_api_key(api_key)
if not api_key_obj:
raise HTTPException(status_code=401, detail="Invalid API key")
request.state.api_key = api_key_obj
return api_key_obj
def require_permission(permission: APIKeyPermission):
"""Dependency function for checking specific permissions"""
async def check_permission(api_key=Depends(require_api_key)):
if not has_permission(api_key, permission):
raise HTTPException(
status_code=403,
detail=f"API key missing required permission: {permission}",
)
return api_key
return check_permission

View File

@@ -1,152 +0,0 @@
import logging
from collections import defaultdict
from typing import Annotated, Any, Dict, List, Optional, Sequence
from autogpt_libs.utils.cache import thread_cached
from fastapi import APIRouter, Body, Depends, HTTPException
from prisma.enums import AgentExecutionStatus, APIKeyPermission
from typing_extensions import TypedDict
import backend.data.block
from backend.data import execution as execution_db
from backend.data import graph as graph_db
from backend.data.api_key import APIKey
from backend.data.block import BlockInput, CompletedBlockOutput
from backend.data.execution import ExecutionResult
from backend.executor import ExecutionManager
from backend.server.external.middleware import require_permission
from backend.util.service import get_service_client
from backend.util.settings import Settings
@thread_cached
def execution_manager_client() -> ExecutionManager:
return get_service_client(ExecutionManager)
settings = Settings()
logger = logging.getLogger(__name__)
v1_router = APIRouter()
class NodeOutput(TypedDict):
key: str
value: Any
class ExecutionNode(TypedDict):
node_id: str
input: Any
output: Dict[str, Any]
class ExecutionNodeOutput(TypedDict):
node_id: str
outputs: List[NodeOutput]
class GraphExecutionResult(TypedDict):
execution_id: str
status: str
nodes: List[ExecutionNode]
output: Optional[List[Dict[str, str]]]
def get_outputs_with_names(results: List[ExecutionResult]) -> List[Dict[str, str]]:
outputs = []
for result in results:
if "output" in result.output_data:
output_value = result.output_data["output"][0]
name = result.output_data.get("name", [None])[0]
if output_value and name:
outputs.append({name: output_value})
return outputs
@v1_router.get(
path="/blocks",
tags=["blocks"],
dependencies=[Depends(require_permission(APIKeyPermission.READ_BLOCK))],
)
def get_graph_blocks() -> Sequence[dict[Any, Any]]:
blocks = [block() for block in backend.data.block.get_blocks().values()]
return [b.to_dict() for b in blocks]
@v1_router.post(
path="/blocks/{block_id}/execute",
tags=["blocks"],
dependencies=[Depends(require_permission(APIKeyPermission.EXECUTE_BLOCK))],
)
def execute_graph_block(
block_id: str,
data: BlockInput,
api_key: APIKey = Depends(require_permission(APIKeyPermission.EXECUTE_BLOCK)),
) -> CompletedBlockOutput:
obj = backend.data.block.get_block(block_id)
if not obj:
raise HTTPException(status_code=404, detail=f"Block #{block_id} not found.")
output = defaultdict(list)
for name, data in obj.execute(data):
output[name].append(data)
return output
@v1_router.post(
path="/graphs/{graph_id}/execute/{graph_version}",
tags=["graphs"],
)
def execute_graph(
graph_id: str,
graph_version: int,
node_input: Annotated[dict[str, Any], Body(..., embed=True, default_factory=dict)],
api_key: APIKey = Depends(require_permission(APIKeyPermission.EXECUTE_GRAPH)),
) -> dict[str, Any]:
try:
graph_exec = execution_manager_client().add_execution(
graph_id,
graph_version=graph_version,
data=node_input,
user_id=api_key.user_id,
)
return {"id": graph_exec.graph_exec_id}
except Exception as e:
msg = str(e).encode().decode("unicode_escape")
raise HTTPException(status_code=400, detail=msg)
@v1_router.get(
path="/graphs/{graph_id}/executions/{graph_exec_id}/results",
tags=["graphs"],
)
async def get_graph_execution_results(
graph_id: str,
graph_exec_id: str,
api_key: APIKey = Depends(require_permission(APIKeyPermission.READ_GRAPH)),
) -> GraphExecutionResult:
graph = await graph_db.get_graph(graph_id, user_id=api_key.user_id)
if not graph:
raise HTTPException(status_code=404, detail=f"Graph #{graph_id} not found.")
results = await execution_db.get_execution_results(graph_exec_id)
last_result = results[-1] if results else None
execution_status = (
last_result.status if last_result else AgentExecutionStatus.INCOMPLETE
)
outputs = get_outputs_with_names(results)
return GraphExecutionResult(
execution_id=graph_exec_id,
status=execution_status,
nodes=[
ExecutionNode(
node_id=result.node_id,
input=result.input_data.get("value", result.input_data),
output={k: v for k, v in result.output_data.items()},
)
for result in results
],
output=outputs if execution_status == AgentExecutionStatus.COMPLETED else None,
)

View File

@@ -2,7 +2,7 @@ import logging
from typing import TYPE_CHECKING, Annotated, Literal
from fastapi import APIRouter, Body, Depends, HTTPException, Path, Query, Request
from pydantic import BaseModel, Field
from pydantic import BaseModel, Field, SecretStr
from backend.data.graph import set_node_webhook
from backend.data.integrations import (
@@ -12,7 +12,12 @@ from backend.data.integrations import (
publish_webhook_event,
wait_for_webhook_event,
)
from backend.data.model import Credentials, CredentialsType, OAuth2Credentials
from backend.data.model import (
APIKeyCredentials,
Credentials,
CredentialsType,
OAuth2Credentials,
)
from backend.executor.manager import ExecutionManager
from backend.integrations.creds_manager import IntegrationCredentialsManager
from backend.integrations.oauth import HANDLERS_BY_NAME
@@ -105,11 +110,6 @@ def callback(
logger.debug(f"Received credentials with final scopes: {credentials.scopes}")
# Linear returns scopes as a single string with spaces, so we need to split them
# TODO: make a bypass of this part of the OAuth handler
if len(credentials.scopes) == 1 and " " in credentials.scopes[0]:
credentials.scopes = credentials.scopes[0].split(" ")
# Check if the granted scopes are sufficient for the requested scopes
if not set(scopes).issubset(set(credentials.scopes)):
# For now, we'll just log the warning and continue
@@ -199,21 +199,31 @@ def get_credential(
@router.post("/{provider}/credentials", status_code=201)
def create_credentials(
def create_api_key_credentials(
user_id: Annotated[str, Depends(get_user_id)],
provider: Annotated[
ProviderName, Path(title="The provider to create credentials for")
],
credentials: Credentials,
) -> Credentials:
credentials.provider = provider
api_key: Annotated[str, Body(title="The API key to store")],
title: Annotated[str, Body(title="Optional title for the credentials")],
expires_at: Annotated[
int | None, Body(title="Unix timestamp when the key expires")
] = None,
) -> APIKeyCredentials:
new_credentials = APIKeyCredentials(
provider=provider,
api_key=SecretStr(api_key),
title=title,
expires_at=expires_at,
)
try:
creds_manager.create(user_id, credentials)
creds_manager.create(user_id, new_credentials)
except Exception as e:
raise HTTPException(
status_code=500, detail=f"Failed to store credentials: {str(e)}"
)
return credentials
return new_credentials
class CredentialsDeletionResponse(BaseModel):
@@ -310,8 +320,7 @@ async def webhook_ingress_generic(
continue
logger.debug(f"Executing graph #{node.graph_id} node #{node.id}")
executor.add_execution(
graph_id=node.graph_id,
graph_version=node.graph_version,
node.graph_id,
data={f"webhook_{webhook_id}_payload": payload},
user_id=webhook.user_id,
)

View File

@@ -25,15 +25,18 @@ class WsMessage(pydantic.BaseModel):
class ExecutionSubscription(pydantic.BaseModel):
graph_id: str
graph_version: int
class ExecuteGraphResponse(pydantic.BaseModel):
graph_exec_id: str
class SubscriptionDetails(pydantic.BaseModel):
event_type: str
channel: str
graph_id: str
class CreateGraph(pydantic.BaseModel):
graph: backend.data.graph.Graph
template_id: str | None = None
template_version: int | None = None
graph: backend.data.graph.Graph | None = None
class CreateAPIKeyRequest(pydantic.BaseModel):
@@ -53,22 +56,3 @@ class SetGraphActiveVersion(pydantic.BaseModel):
class UpdatePermissionsRequest(pydantic.BaseModel):
permissions: List[APIKeyPermission]
class Pagination(pydantic.BaseModel):
total_items: int = pydantic.Field(
description="Total number of items.", examples=[42]
)
total_pages: int = pydantic.Field(
description="Total number of pages.", examples=[2]
)
current_page: int = pydantic.Field(
description="Current_page page number.", examples=[1]
)
page_size: int = pydantic.Field(
description="Number of items per page.", examples=[25]
)
class RequestTopUp(pydantic.BaseModel):
credit_amount: int

View File

@@ -1,8 +1,7 @@
import contextlib
import logging
from typing import Any, Optional
import typing
import autogpt_libs.auth.models
import fastapi
import fastapi.responses
import starlette.middleware.cors
@@ -17,14 +16,10 @@ import backend.data.db
import backend.data.graph
import backend.data.user
import backend.server.routers.v1
import backend.server.v2.library.db
import backend.server.v2.library.model
import backend.server.v2.library.routes
import backend.server.v2.store.model
import backend.server.v2.store.routes
import backend.util.service
import backend.util.settings
from backend.server.external.api import external_app
settings = backend.util.settings.Settings()
logger = logging.getLogger(__name__)
@@ -99,8 +94,6 @@ app.include_router(
backend.server.v2.library.routes.router, tags=["v2"], prefix="/api/library"
)
app.mount("/external-api", external_app)
@app.get(path="/health", tags=["health"], dependencies=[])
async def health():
@@ -124,27 +117,9 @@ class AgentServer(backend.util.service.AppProcess):
@staticmethod
async def test_execute_graph(
graph_id: str,
user_id: str,
graph_version: Optional[int] = None,
node_input: Optional[dict[str, Any]] = None,
graph_id: str, node_input: dict[typing.Any, typing.Any], user_id: str
):
return backend.server.routers.v1.execute_graph(
user_id=user_id,
graph_id=graph_id,
graph_version=graph_version,
node_input=node_input or {},
)
@staticmethod
async def test_get_graph(
graph_id: str,
graph_version: int,
user_id: str,
):
return await backend.server.routers.v1.get_graph(
graph_id, user_id, graph_version
)
return backend.server.routers.v1.execute_graph(graph_id, node_input, user_id)
@staticmethod
async def test_create_graph(
@@ -172,76 +147,7 @@ class AgentServer(backend.util.service.AppProcess):
@staticmethod
async def test_delete_graph(graph_id: str, user_id: str):
await backend.server.v2.library.db.delete_library_agent_by_graph_id(
graph_id=graph_id, user_id=user_id
)
return await backend.server.routers.v1.delete_graph(graph_id, user_id)
@staticmethod
async def test_get_presets(user_id: str, page: int = 1, page_size: int = 10):
return await backend.server.v2.library.routes.presets.get_presets(
user_id=user_id, page=page, page_size=page_size
)
@staticmethod
async def test_get_preset(preset_id: str, user_id: str):
return await backend.server.v2.library.routes.presets.get_preset(
preset_id=preset_id, user_id=user_id
)
@staticmethod
async def test_create_preset(
preset: backend.server.v2.library.model.CreateLibraryAgentPresetRequest,
user_id: str,
):
return await backend.server.v2.library.routes.presets.create_preset(
preset=preset, user_id=user_id
)
@staticmethod
async def test_update_preset(
preset_id: str,
preset: backend.server.v2.library.model.CreateLibraryAgentPresetRequest,
user_id: str,
):
return await backend.server.v2.library.routes.presets.update_preset(
preset_id=preset_id, preset=preset, user_id=user_id
)
@staticmethod
async def test_delete_preset(preset_id: str, user_id: str):
return await backend.server.v2.library.routes.presets.delete_preset(
preset_id=preset_id, user_id=user_id
)
@staticmethod
async def test_execute_preset(
graph_id: str,
graph_version: int,
preset_id: str,
user_id: str,
node_input: Optional[dict[str, Any]] = None,
):
return await backend.server.v2.library.routes.presets.execute_preset(
graph_id=graph_id,
graph_version=graph_version,
preset_id=preset_id,
node_input=node_input or {},
user_id=user_id,
)
@staticmethod
async def test_create_store_listing(
request: backend.server.v2.store.model.StoreSubmissionRequest, user_id: str
):
return await backend.server.v2.store.routes.create_submission(request, user_id)
@staticmethod
async def test_review_store_listing(
request: backend.server.v2.store.model.ReviewSubmissionRequest,
user: autogpt_libs.auth.models.User,
):
return await backend.server.v2.store.routes.review_submission(request, user)
def set_test_dependency_overrides(self, overrides: dict):
app.dependency_overrides.update(overrides)

View File

@@ -1,21 +1,18 @@
import asyncio
import logging
from collections import defaultdict
from datetime import datetime
from typing import TYPE_CHECKING, Annotated, Any, Sequence
import pydantic
import stripe
from autogpt_libs.auth.middleware import auth_middleware
from autogpt_libs.feature_flag.client import feature_flag
from autogpt_libs.utils.cache import thread_cached
from fastapi import APIRouter, Body, Depends, HTTPException, Request, Response
from fastapi import APIRouter, Depends, HTTPException
from typing_extensions import Optional, TypedDict
import backend.data.block
import backend.server.integrations.router
import backend.server.routers.analytics
import backend.server.v2.library.db as library_db
from backend.data import execution as execution_db
from backend.data import graph as graph_db
from backend.data.api_key import (
@@ -31,15 +28,7 @@ from backend.data.api_key import (
update_api_key_permissions,
)
from backend.data.block import BlockInput, CompletedBlockOutput
from backend.data.credit import (
AutoTopUpConfig,
TransactionHistory,
get_auto_top_up,
get_block_costs,
get_stripe_customer_id,
get_user_credit_model,
set_auto_top_up,
)
from backend.data.credit import get_block_costs, get_user_credit_model
from backend.data.user import get_or_create_user
from backend.executor import ExecutionManager, ExecutionScheduler, scheduler
from backend.integrations.creds_manager import IntegrationCredentialsManager
@@ -51,8 +40,6 @@ from backend.server.model import (
CreateAPIKeyRequest,
CreateAPIKeyResponse,
CreateGraph,
ExecuteGraphResponse,
RequestTopUp,
SetGraphActiveVersion,
UpdatePermissionsRequest,
)
@@ -78,6 +65,7 @@ settings = Settings()
logger = logging.getLogger(__name__)
integration_creds_manager = IntegrationCredentialsManager()
_user_credit_model = get_user_credit_model()
# Define the API routes
@@ -146,126 +134,7 @@ async def get_user_credits(
user_id: Annotated[str, Depends(get_user_id)],
) -> dict[str, int]:
# Credits can go negative, so ensure it's at least 0 for user to see.
return {"credits": max(await _user_credit_model.get_credits(user_id), 0)}
@v1_router.post(
path="/credits", tags=["credits"], dependencies=[Depends(auth_middleware)]
)
async def request_top_up(
request: RequestTopUp, user_id: Annotated[str, Depends(get_user_id)]
):
checkout_url = await _user_credit_model.top_up_intent(
user_id, request.credit_amount
)
return {"checkout_url": checkout_url}
@v1_router.patch(
path="/credits", tags=["credits"], dependencies=[Depends(auth_middleware)]
)
async def fulfill_checkout(user_id: Annotated[str, Depends(get_user_id)]):
await _user_credit_model.fulfill_checkout(user_id=user_id)
return Response(status_code=200)
@v1_router.post(
path="/credits/auto-top-up",
tags=["credits"],
dependencies=[Depends(auth_middleware)],
)
async def configure_user_auto_top_up(
request: AutoTopUpConfig, user_id: Annotated[str, Depends(get_user_id)]
) -> str:
if request.threshold < 0:
raise ValueError("Threshold must be greater than 0")
if request.amount < 500 and request.amount != 0:
raise ValueError("Amount must be greater than or equal to 500")
if request.amount < request.threshold:
raise ValueError("Amount must be greater than or equal to threshold")
current_balance = await _user_credit_model.get_credits(user_id)
if current_balance < request.threshold:
await _user_credit_model.top_up_credits(user_id, request.amount)
else:
await _user_credit_model.top_up_credits(user_id, 0)
await set_auto_top_up(
user_id, AutoTopUpConfig(threshold=request.threshold, amount=request.amount)
)
return "Auto top-up settings updated"
@v1_router.get(
path="/credits/auto-top-up",
tags=["credits"],
dependencies=[Depends(auth_middleware)],
)
async def get_user_auto_top_up(
user_id: Annotated[str, Depends(get_user_id)]
) -> AutoTopUpConfig:
return await get_auto_top_up(user_id)
@v1_router.post(path="/credits/stripe_webhook", tags=["credits"])
async def stripe_webhook(request: Request):
# Get the raw request body
payload = await request.body()
# Get the signature header
sig_header = request.headers.get("stripe-signature")
try:
event = stripe.Webhook.construct_event(
payload, sig_header, settings.secrets.stripe_webhook_secret
)
except ValueError:
# Invalid payload
raise HTTPException(status_code=400)
except stripe.SignatureVerificationError:
# Invalid signature
raise HTTPException(status_code=400)
if (
event["type"] == "checkout.session.completed"
or event["type"] == "checkout.session.async_payment_succeeded"
):
await _user_credit_model.fulfill_checkout(
session_id=event["data"]["object"]["id"]
)
return Response(status_code=200)
@v1_router.get(path="/credits/manage", dependencies=[Depends(auth_middleware)])
async def manage_payment_method(
user_id: Annotated[str, Depends(get_user_id)],
) -> dict[str, str]:
session = stripe.billing_portal.Session.create(
customer=await get_stripe_customer_id(user_id),
return_url=settings.config.frontend_base_url + "/marketplace/credits",
)
if not session:
raise HTTPException(
status_code=400, detail="Failed to create billing portal session"
)
return {"url": session.url}
@v1_router.get(path="/credits/transactions", dependencies=[Depends(auth_middleware)])
async def get_credit_history(
user_id: Annotated[str, Depends(get_user_id)],
transaction_time: datetime | None = None,
transaction_count_limit: int = 100,
) -> TransactionHistory:
if transaction_count_limit < 1 or transaction_count_limit > 1000:
raise ValueError("Transaction count limit must be between 1 and 1000")
return await _user_credit_model.get_transaction_history(
user_id=user_id,
transaction_time=transaction_time or datetime.max,
transaction_count_limit=transaction_count_limit,
)
return {"credits": max(await _user_credit_model.get_or_refill_credit(user_id), 0)}
########################################################
@@ -311,6 +180,11 @@ async def get_graph(
tags=["graphs"],
dependencies=[Depends(auth_middleware)],
)
@v1_router.get(
path="/templates/{graph_id}/versions",
tags=["templates", "graphs"],
dependencies=[Depends(auth_middleware)],
)
async def get_graph_all_versions(
graph_id: str, user_id: Annotated[str, Depends(get_user_id)]
) -> Sequence[graph_db.GraphModel]:
@@ -326,18 +200,41 @@ async def get_graph_all_versions(
async def create_new_graph(
create_graph: CreateGraph, user_id: Annotated[str, Depends(get_user_id)]
) -> graph_db.GraphModel:
graph = graph_db.make_graph_model(create_graph.graph, user_id)
return await do_create_graph(create_graph, is_template=False, user_id=user_id)
async def do_create_graph(
create_graph: CreateGraph,
is_template: bool,
# user_id doesn't have to be annotated like on other endpoints,
# because create_graph isn't used directly as an endpoint
user_id: str,
) -> graph_db.GraphModel:
if create_graph.graph:
graph = graph_db.make_graph_model(create_graph.graph, user_id)
elif create_graph.template_id:
# Create a new graph from a template
graph = await graph_db.get_graph(
create_graph.template_id,
create_graph.template_version,
template=True,
user_id=user_id,
)
if not graph:
raise HTTPException(
400, detail=f"Template #{create_graph.template_id} not found"
)
graph.version = 1
else:
raise HTTPException(
status_code=400, detail="Either graph or template_id must be provided."
)
graph.is_template = is_template
graph.is_active = not is_template
graph.reassign_ids(user_id=user_id, reassign_graph_id=True)
graph = await graph_db.create_graph(graph, user_id=user_id)
# Create a library agent for the new graph
await library_db.create_library_agent(
graph.id,
graph.version,
user_id,
)
graph = await on_graph_activate(
graph,
get_credentials=lambda id: integration_creds_manager.get(user_id, id),
@@ -364,6 +261,11 @@ async def delete_graph(
@v1_router.put(
path="/graphs/{graph_id}", tags=["graphs"], dependencies=[Depends(auth_middleware)]
)
@v1_router.put(
path="/templates/{graph_id}",
tags=["templates", "graphs"],
dependencies=[Depends(auth_middleware)],
)
async def update_graph(
graph_id: str,
graph: graph_db.Graph,
@@ -395,10 +297,6 @@ async def update_graph(
new_graph_version = await graph_db.create_graph(graph, user_id=user_id)
if new_graph_version.is_active:
# Keep the library agent up to date with the new active version
await library_db.update_agent_version_in_library(
user_id, graph.id, graph.version
)
def get_credentials(credentials_id: str) -> "Credentials | None":
return integration_creds_manager.get(user_id, credentials_id)
@@ -455,12 +353,6 @@ async def set_graph_active_version(
version=new_active_version,
user_id=user_id,
)
# Keep the library agent up to date with the new active version
await library_db.update_agent_version_in_library(
user_id, new_active_graph.id, new_active_graph.version
)
if current_active_graph and current_active_graph.version != new_active_version:
# Handle deactivation of the previously active version
await on_graph_deactivate(
@@ -470,23 +362,22 @@ async def set_graph_active_version(
@v1_router.post(
path="/graphs/{graph_id}/execute/{graph_version}",
path="/graphs/{graph_id}/execute",
tags=["graphs"],
dependencies=[Depends(auth_middleware)],
)
def execute_graph(
graph_id: str,
node_input: Annotated[dict[str, Any], Body(..., embed=True, default_factory=dict)],
node_input: dict[Any, Any],
user_id: Annotated[str, Depends(get_user_id)],
graph_version: Optional[int] = None,
) -> ExecuteGraphResponse:
) -> dict[str, Any]: # FIXME: add proper return type
try:
graph_exec = execution_manager_client().add_execution(
graph_id, node_input, user_id=user_id, graph_version=graph_version
graph_id, node_input, user_id=user_id
)
return ExecuteGraphResponse(graph_exec_id=graph_exec.graph_exec_id)
return {"id": graph_exec.graph_exec_id}
except Exception as e:
msg = str(e).encode().decode("unicode_escape")
msg = e.__str__().encode().decode("unicode_escape")
raise HTTPException(status_code=400, detail=msg)
@@ -537,6 +428,47 @@ async def get_graph_run_node_execution_results(
return await execution_db.get_execution_results(graph_exec_id)
########################################################
##################### Templates ########################
########################################################
@v1_router.get(
path="/templates",
tags=["graphs", "templates"],
dependencies=[Depends(auth_middleware)],
)
async def get_templates(
user_id: Annotated[str, Depends(get_user_id)]
) -> Sequence[graph_db.GraphModel]:
return await graph_db.get_graphs(filter_by="template", user_id=user_id)
@v1_router.get(
path="/templates/{graph_id}",
tags=["templates", "graphs"],
dependencies=[Depends(auth_middleware)],
)
async def get_template(
graph_id: str, version: int | None = None
) -> graph_db.GraphModel:
graph = await graph_db.get_graph(graph_id, version, template=True)
if not graph:
raise HTTPException(status_code=404, detail=f"Template #{graph_id} not found.")
return graph
@v1_router.post(
path="/templates",
tags=["templates", "graphs"],
dependencies=[Depends(auth_middleware)],
)
async def create_new_template(
create_graph: CreateGraph, user_id: Annotated[str, Depends(get_user_id)]
) -> graph_db.GraphModel:
return await do_create_graph(create_graph, is_template=True, user_id=user_id)
########################################################
##################### Schedules ########################
########################################################
@@ -546,7 +478,6 @@ class ScheduleCreationRequest(pydantic.BaseModel):
cron: str
input_data: dict[Any, Any]
graph_id: str
graph_version: int
@v1_router.post(
@@ -558,13 +489,10 @@ async def create_schedule(
user_id: Annotated[str, Depends(get_user_id)],
schedule: ScheduleCreationRequest,
) -> scheduler.JobInfo:
graph = await graph_db.get_graph(
schedule.graph_id, schedule.graph_version, user_id=user_id
)
graph = await graph_db.get_graph(schedule.graph_id, user_id=user_id)
if not graph:
raise HTTPException(
status_code=404,
detail=f"Graph #{schedule.graph_id} v.{schedule.graph_version} not found.",
status_code=404, detail=f"Graph #{schedule.graph_id} not found."
)
return await asyncio.to_thread(
@@ -617,6 +545,7 @@ def get_execution_schedules(
tags=["api-keys"],
dependencies=[Depends(auth_middleware)],
)
@feature_flag("api-keys-enabled")
async def create_api_key(
request: CreateAPIKeyRequest, user_id: Annotated[str, Depends(get_user_id)]
) -> CreateAPIKeyResponse:
@@ -640,6 +569,7 @@ async def create_api_key(
tags=["api-keys"],
dependencies=[Depends(auth_middleware)],
)
@feature_flag("api-keys-enabled")
async def get_api_keys(
user_id: Annotated[str, Depends(get_user_id)]
) -> list[APIKeyWithoutHash]:
@@ -657,6 +587,7 @@ async def get_api_keys(
tags=["api-keys"],
dependencies=[Depends(auth_middleware)],
)
@feature_flag("api-keys-enabled")
async def get_api_key(
key_id: str, user_id: Annotated[str, Depends(get_user_id)]
) -> APIKeyWithoutHash:

View File

@@ -1,166 +1,103 @@
import logging
from typing import List
import prisma.errors
import prisma.fields
import prisma.models
import prisma.types
import backend.server.model
import backend.server.v2.library.model as library_model
import backend.server.v2.store.exceptions as store_exceptions
import backend.data.graph
import backend.data.includes
import backend.server.v2.library.model
import backend.server.v2.store.exceptions
logger = logging.getLogger(__name__)
async def get_library_agents(
user_id: str, search_query: str | None = None
) -> list[library_model.LibraryAgent]:
logger.debug(
f"Fetching library agents for user_id={user_id} search_query={search_query}"
)
if search_query and len(search_query.strip()) > 100:
logger.warning(f"Search query too long: {search_query}")
raise store_exceptions.DatabaseError("Search query is too long.")
where_clause: prisma.types.LibraryAgentWhereInput = {
"userId": user_id,
"isDeleted": False,
"isArchived": False,
}
if search_query:
where_clause["OR"] = [
{
"Agent": {
"is": {"name": {"contains": search_query, "mode": "insensitive"}}
}
},
{
"Agent": {
"is": {
"description": {"contains": search_query, "mode": "insensitive"}
}
}
},
]
user_id: str,
) -> List[backend.server.v2.library.model.LibraryAgent]:
"""
Returns all agents (AgentGraph) that belong to the user and all agents in their library (UserAgent table)
"""
logger.debug(f"Getting library agents for user {user_id}")
try:
library_agents = await prisma.models.LibraryAgent.prisma().find_many(
where=where_clause,
# Get agents created by user with nodes and links
user_created = await prisma.models.AgentGraph.prisma().find_many(
where=prisma.types.AgentGraphWhereInput(userId=user_id, isActive=True),
include=backend.data.includes.AGENT_GRAPH_INCLUDE,
)
# Get agents in user's library with nodes and links
library_agents = await prisma.models.UserAgent.prisma().find_many(
where=prisma.types.UserAgentWhereInput(
userId=user_id, isDeleted=False, isArchived=False
),
include={
"Agent": {
"include": {
"AgentNodes": {"include": {"Input": True, "Output": True}}
"AgentNodes": {
"include": {
"Input": True,
"Output": True,
"Webhook": True,
"AgentBlock": True,
}
}
}
}
},
order=[{"updatedAt": "desc"}],
)
logger.debug(f"Retrieved {len(library_agents)} agents for user_id={user_id}.")
return [library_model.LibraryAgent.from_db(agent) for agent in library_agents]
# Convert to Graph models first
graphs = []
# Add user created agents
for agent in user_created:
try:
graphs.append(backend.data.graph.GraphModel.from_db(agent))
except Exception as e:
logger.error(f"Error processing user created agent {agent.id}: {e}")
continue
# Add library agents
for agent in library_agents:
if agent.Agent:
try:
graphs.append(backend.data.graph.GraphModel.from_db(agent.Agent))
except Exception as e:
logger.error(f"Error processing library agent {agent.agentId}: {e}")
continue
# Convert Graph models to LibraryAgent models
result = []
for graph in graphs:
result.append(
backend.server.v2.library.model.LibraryAgent(
id=graph.id,
version=graph.version,
is_active=graph.is_active,
name=graph.name,
description=graph.description,
isCreatedByUser=any(a.id == graph.id for a in user_created),
input_schema=graph.input_schema,
output_schema=graph.output_schema,
)
)
logger.debug(f"Found {len(result)} library agents")
return result
except prisma.errors.PrismaError as e:
logger.error(f"Database error fetching library agents: {e}")
raise store_exceptions.DatabaseError("Unable to fetch library agents.")
async def create_library_agent(
agent_id: str, agent_version: int, user_id: str
) -> prisma.models.LibraryAgent:
"""
Adds an agent to the user's library (LibraryAgent table)
"""
try:
return await prisma.models.LibraryAgent.prisma().create(
data={
"userId": user_id,
"agentId": agent_id,
"agentVersion": agent_version,
"isCreatedByUser": False,
"useGraphIsActiveVersion": True,
}
)
except prisma.errors.PrismaError as e:
logger.error(f"Database error creating agent to library: {str(e)}")
raise store_exceptions.DatabaseError("Failed to create agent to library") from e
async def update_agent_version_in_library(
user_id: str, agent_id: str, agent_version: int
) -> None:
"""
Updates the agent version in the library
"""
try:
library_agent = await prisma.models.LibraryAgent.prisma().find_first_or_raise(
where={
"userId": user_id,
"agentId": agent_id,
"useGraphIsActiveVersion": True,
},
)
await prisma.models.LibraryAgent.prisma().update(
where={"id": library_agent.id},
data={
"Agent": {
"connect": {
"graphVersionId": {"id": agent_id, "version": agent_version}
},
},
},
)
except prisma.errors.PrismaError as e:
logger.error(f"Database error updating agent version in library: {str(e)}")
raise store_exceptions.DatabaseError(
"Failed to update agent version in library"
logger.error(f"Database error getting library agents: {str(e)}")
raise backend.server.v2.store.exceptions.DatabaseError(
"Failed to fetch library agents"
) from e
async def update_library_agent(
library_agent_id: str,
user_id: str,
auto_update_version: bool = False,
is_favorite: bool = False,
is_archived: bool = False,
is_deleted: bool = False,
) -> None:
async def add_agent_to_library(store_listing_version_id: str, user_id: str) -> None:
"""
Updates the library agent with the given fields
"""
try:
await prisma.models.LibraryAgent.prisma().update_many(
where={"id": library_agent_id, "userId": user_id},
data={
"useGraphIsActiveVersion": auto_update_version,
"isFavorite": is_favorite,
"isArchived": is_archived,
"isDeleted": is_deleted,
},
)
except prisma.errors.PrismaError as e:
logger.error(f"Database error updating library agent: {str(e)}")
raise store_exceptions.DatabaseError("Failed to update library agent") from e
async def delete_library_agent_by_graph_id(graph_id: str, user_id: str) -> None:
"""
Deletes a library agent for the given user
"""
try:
await prisma.models.LibraryAgent.prisma().delete_many(
where={"agentId": graph_id, "userId": user_id}
)
except prisma.errors.PrismaError as e:
logger.error(f"Database error deleting library agent: {str(e)}")
raise store_exceptions.DatabaseError("Failed to delete library agent") from e
async def add_store_agent_to_library(
store_listing_version_id: str, user_id: str
) -> None:
"""
Finds the agent from the store listing version and adds it to the user's library (LibraryAgent table)
Finds the agent from the store listing version and adds it to the user's library (UserAgent table)
if they don't already have it
"""
logger.debug(
@@ -179,7 +116,7 @@ async def add_store_agent_to_library(
logger.warning(
f"Store listing version not found: {store_listing_version_id}"
)
raise store_exceptions.AgentNotFoundError(
raise backend.server.v2.store.exceptions.AgentNotFoundError(
f"Store listing version {store_listing_version_id} not found"
)
@@ -189,10 +126,12 @@ async def add_store_agent_to_library(
logger.warning(
f"User {user_id} cannot add their own agent to their library"
)
raise store_exceptions.DatabaseError("Cannot add own agent to library")
raise backend.server.v2.store.exceptions.DatabaseError(
"Cannot add own agent to library"
)
# Check if user already has this agent
existing_user_agent = await prisma.models.LibraryAgent.prisma().find_first(
existing_user_agent = await prisma.models.UserAgent.prisma().find_first(
where={
"userId": user_id,
"agentId": agent.id,
@@ -206,134 +145,21 @@ async def add_store_agent_to_library(
)
return
# Create LibraryAgent entry
await prisma.models.LibraryAgent.prisma().create(
data={
"userId": user_id,
"agentId": agent.id,
"agentVersion": agent.version,
"isCreatedByUser": False,
}
# Create UserAgent entry
await prisma.models.UserAgent.prisma().create(
data=prisma.types.UserAgentCreateInput(
userId=user_id,
agentId=agent.id,
agentVersion=agent.version,
isCreatedByUser=False,
)
)
logger.debug(f"Added agent {agent.id} to library for user {user_id}")
except store_exceptions.AgentNotFoundError:
except backend.server.v2.store.exceptions.AgentNotFoundError:
raise
except prisma.errors.PrismaError as e:
logger.error(f"Database error adding agent to library: {str(e)}")
raise store_exceptions.DatabaseError("Failed to add agent to library") from e
##############################################
########### Presets DB Functions #############
##############################################
async def get_presets(
user_id: str, page: int, page_size: int
) -> library_model.LibraryAgentPresetResponse:
try:
presets = await prisma.models.AgentPreset.prisma().find_many(
where={"userId": user_id},
skip=page * page_size,
take=page_size,
)
total_items = await prisma.models.AgentPreset.prisma().count(
where={"userId": user_id},
)
total_pages = (total_items + page_size - 1) // page_size
presets = [
library_model.LibraryAgentPreset.from_db(preset) for preset in presets
]
return library_model.LibraryAgentPresetResponse(
presets=presets,
pagination=backend.server.model.Pagination(
total_items=total_items,
total_pages=total_pages,
current_page=page,
page_size=page_size,
),
)
except prisma.errors.PrismaError as e:
logger.error(f"Database error getting presets: {str(e)}")
raise store_exceptions.DatabaseError("Failed to fetch presets") from e
async def get_preset(
user_id: str, preset_id: str
) -> library_model.LibraryAgentPreset | None:
try:
preset = await prisma.models.AgentPreset.prisma().find_unique(
where={"id": preset_id}, include={"InputPresets": True}
)
if not preset or preset.userId != user_id:
return None
return library_model.LibraryAgentPreset.from_db(preset)
except prisma.errors.PrismaError as e:
logger.error(f"Database error getting preset: {str(e)}")
raise store_exceptions.DatabaseError("Failed to fetch preset") from e
async def upsert_preset(
user_id: str,
preset: library_model.CreateLibraryAgentPresetRequest,
preset_id: str | None = None,
) -> library_model.LibraryAgentPreset:
try:
if preset_id:
# Update existing preset
new_preset = await prisma.models.AgentPreset.prisma().update(
where={"id": preset_id},
data={
"name": preset.name,
"description": preset.description,
"isActive": preset.is_active,
"InputPresets": {
"create": [
{"name": name, "data": prisma.fields.Json(data)}
for name, data in preset.inputs.items()
]
},
},
include={"InputPresets": True},
)
if not new_preset:
raise ValueError(f"AgentPreset #{preset_id} not found")
else:
# Create new preset
new_preset = await prisma.models.AgentPreset.prisma().create(
data={
"userId": user_id,
"name": preset.name,
"description": preset.description,
"agentId": preset.agent_id,
"agentVersion": preset.agent_version,
"isActive": preset.is_active,
"InputPresets": {
"create": [
{"name": name, "data": prisma.fields.Json(data)}
for name, data in preset.inputs.items()
]
},
},
include={"InputPresets": True},
)
return library_model.LibraryAgentPreset.from_db(new_preset)
except prisma.errors.PrismaError as e:
logger.error(f"Database error creating preset: {str(e)}")
raise store_exceptions.DatabaseError("Failed to create preset") from e
async def delete_preset(user_id: str, preset_id: str) -> None:
try:
await prisma.models.AgentPreset.prisma().update_many(
where={"id": preset_id, "userId": user_id},
data={"isDeleted": True},
)
except prisma.errors.PrismaError as e:
logger.error(f"Database error deleting preset: {str(e)}")
raise store_exceptions.DatabaseError("Failed to delete preset") from e
raise backend.server.v2.store.exceptions.DatabaseError(
"Failed to add agent to library"
) from e

View File

@@ -37,7 +37,7 @@ async def test_get_library_agents(mocker):
]
mock_library_agents = [
prisma.models.LibraryAgent(
prisma.models.UserAgent(
id="ua1",
userId="test-user",
agentId="agent2",
@@ -48,7 +48,6 @@ async def test_get_library_agents(mocker):
createdAt=datetime.now(),
updatedAt=datetime.now(),
isFavorite=False,
useGraphIsActiveVersion=True,
Agent=prisma.models.AgentGraph(
id="agent2",
version=1,
@@ -68,8 +67,8 @@ async def test_get_library_agents(mocker):
return_value=mock_user_created
)
mock_library_agent = mocker.patch("prisma.models.LibraryAgent.prisma")
mock_library_agent.return_value.find_many = mocker.AsyncMock(
mock_user_agent = mocker.patch("prisma.models.UserAgent.prisma")
mock_user_agent.return_value.find_many = mocker.AsyncMock(
return_value=mock_library_agents
)
@@ -77,16 +76,40 @@ async def test_get_library_agents(mocker):
result = await db.get_library_agents("test-user")
# Verify results
assert len(result) == 1
assert result[0].id == "ua1"
assert result[0].name == "Test Agent 2"
assert result[0].description == "Test Description 2"
assert result[0].is_created_by_user is False
assert result[0].is_latest_version is True
assert result[0].is_favorite is False
assert result[0].agent_id == "agent2"
assert result[0].agent_version == 1
assert result[0].preset_id is None
assert len(result) == 2
assert result[0].id == "agent1"
assert result[0].name == "Test Agent 1"
assert result[0].description == "Test Description 1"
assert result[0].isCreatedByUser is True
assert result[1].id == "agent2"
assert result[1].name == "Test Agent 2"
assert result[1].description == "Test Description 2"
assert result[1].isCreatedByUser is False
# Verify mocks called correctly
mock_agent_graph.return_value.find_many.assert_called_once_with(
where=prisma.types.AgentGraphWhereInput(userId="test-user", isActive=True),
include=backend.data.includes.AGENT_GRAPH_INCLUDE,
)
mock_user_agent.return_value.find_many.assert_called_once_with(
where=prisma.types.UserAgentWhereInput(
userId="test-user", isDeleted=False, isArchived=False
),
include={
"Agent": {
"include": {
"AgentNodes": {
"include": {
"Input": True,
"Output": True,
"Webhook": True,
"AgentBlock": True,
}
}
}
}
},
)
@pytest.mark.asyncio
@@ -129,26 +152,26 @@ async def test_add_agent_to_library(mocker):
return_value=mock_store_listing
)
mock_library_agent = mocker.patch("prisma.models.LibraryAgent.prisma")
mock_library_agent.return_value.find_first = mocker.AsyncMock(return_value=None)
mock_library_agent.return_value.create = mocker.AsyncMock()
mock_user_agent = mocker.patch("prisma.models.UserAgent.prisma")
mock_user_agent.return_value.find_first = mocker.AsyncMock(return_value=None)
mock_user_agent.return_value.create = mocker.AsyncMock()
# Call function
await db.add_store_agent_to_library("version123", "test-user")
await db.add_agent_to_library("version123", "test-user")
# Verify mocks called correctly
mock_store_listing_version.return_value.find_unique.assert_called_once_with(
where={"id": "version123"}, include={"Agent": True}
)
mock_library_agent.return_value.find_first.assert_called_once_with(
mock_user_agent.return_value.find_first.assert_called_once_with(
where={
"userId": "test-user",
"agentId": "agent1",
"agentVersion": 1,
}
)
mock_library_agent.return_value.create.assert_called_once_with(
data=prisma.types.LibraryAgentCreateInput(
mock_user_agent.return_value.create.assert_called_once_with(
data=prisma.types.UserAgentCreateInput(
userId="test-user", agentId="agent1", agentVersion=1, isCreatedByUser=False
)
)
@@ -166,7 +189,7 @@ async def test_add_agent_to_library_not_found(mocker):
# Call function and verify exception
with pytest.raises(backend.server.v2.store.exceptions.AgentNotFoundError):
await db.add_store_agent_to_library("version123", "test-user")
await db.add_agent_to_library("version123", "test-user")
# Verify mock called correctly
mock_store_listing_version.return_value.find_unique.assert_called_once_with(

View File

@@ -1,111 +1,16 @@
import datetime
from typing import Any
import typing
import prisma.models
import pydantic
import backend.data.block as block_model
import backend.data.graph as graph_model
import backend.server.model as server_model
class LibraryAgent(pydantic.BaseModel):
id: str # Changed from agent_id to match GraphMeta
agent_id: str
agent_version: int # Changed from agent_version to match GraphMeta
preset_id: str | None
updated_at: datetime.datetime
version: int # Changed from agent_version to match GraphMeta
is_active: bool # Added to match GraphMeta
name: str
description: str
isCreatedByUser: bool
# Made input_schema and output_schema match GraphMeta's type
input_schema: dict[str, Any] # Should be BlockIOObjectSubSchema in frontend
output_schema: dict[str, Any] # Should be BlockIOObjectSubSchema in frontend
is_favorite: bool
is_created_by_user: bool
is_latest_version: bool
@staticmethod
def from_db(agent: prisma.models.LibraryAgent):
if not agent.Agent:
raise ValueError("AgentGraph is required")
graph = graph_model.GraphModel.from_db(agent.Agent)
agent_updated_at = agent.Agent.updatedAt
lib_agent_updated_at = agent.updatedAt
# Take the latest updated_at timestamp either when the graph was updated or the library agent was updated
updated_at = (
max(agent_updated_at, lib_agent_updated_at)
if agent_updated_at
else lib_agent_updated_at
)
return LibraryAgent(
id=agent.id,
agent_id=agent.agentId,
agent_version=agent.agentVersion,
updated_at=updated_at,
name=graph.name,
description=graph.description,
input_schema=graph.input_schema,
output_schema=graph.output_schema,
is_favorite=agent.isFavorite,
is_created_by_user=agent.isCreatedByUser,
is_latest_version=graph.is_active,
preset_id=agent.AgentPreset.id if agent.AgentPreset else None,
)
class LibraryAgentPreset(pydantic.BaseModel):
id: str
updated_at: datetime.datetime
agent_id: str
agent_version: int
name: str
description: str
is_active: bool
inputs: block_model.BlockInput
@staticmethod
def from_db(preset: prisma.models.AgentPreset):
input_data: block_model.BlockInput = {}
for preset_input in preset.InputPresets or []:
input_data[preset_input.name] = preset_input.data
return LibraryAgentPreset(
id=preset.id,
updated_at=preset.updatedAt,
agent_id=preset.agentId,
agent_version=preset.agentVersion,
name=preset.name,
description=preset.description,
is_active=preset.isActive,
inputs=input_data,
)
class LibraryAgentPresetResponse(pydantic.BaseModel):
presets: list[LibraryAgentPreset]
pagination: server_model.Pagination
class CreateLibraryAgentPresetRequest(pydantic.BaseModel):
name: str
description: str
inputs: block_model.BlockInput
agent_id: str
agent_version: int
is_active: bool
input_schema: dict[str, typing.Any] # Should be BlockIOObjectSubSchema in frontend
output_schema: dict[str, typing.Any] # Should be BlockIOObjectSubSchema in frontend

View File

@@ -1,36 +1,23 @@
import datetime
import prisma.fields
import prisma.models
import backend.data.block
import backend.server.model
import backend.server.v2.library.model
def test_library_agent():
agent = backend.server.v2.library.model.LibraryAgent(
id="test-agent-123",
agent_id="agent-123",
agent_version=1,
preset_id=None,
updated_at=datetime.datetime.now(),
version=1,
is_active=True,
name="Test Agent",
description="Test description",
isCreatedByUser=False,
input_schema={"type": "object", "properties": {}},
output_schema={"type": "object", "properties": {}},
is_favorite=False,
is_created_by_user=False,
is_latest_version=True,
)
assert agent.id == "test-agent-123"
assert agent.agent_id == "agent-123"
assert agent.agent_version == 1
assert agent.version == 1
assert agent.is_active is True
assert agent.name == "Test Agent"
assert agent.description == "Test description"
assert agent.is_favorite is False
assert agent.is_created_by_user is False
assert agent.is_latest_version is True
assert agent.isCreatedByUser is False
assert agent.input_schema == {"type": "object", "properties": {}}
assert agent.output_schema == {"type": "object", "properties": {}}
@@ -38,140 +25,19 @@ def test_library_agent():
def test_library_agent_with_user_created():
agent = backend.server.v2.library.model.LibraryAgent(
id="user-agent-456",
agent_id="agent-456",
agent_version=2,
preset_id=None,
updated_at=datetime.datetime.now(),
version=2,
is_active=True,
name="User Created Agent",
description="An agent created by the user",
isCreatedByUser=True,
input_schema={"type": "object", "properties": {}},
output_schema={"type": "object", "properties": {}},
is_favorite=False,
is_created_by_user=True,
is_latest_version=True,
)
assert agent.id == "user-agent-456"
assert agent.agent_id == "agent-456"
assert agent.agent_version == 2
assert agent.version == 2
assert agent.is_active is True
assert agent.name == "User Created Agent"
assert agent.description == "An agent created by the user"
assert agent.is_favorite is False
assert agent.is_created_by_user is True
assert agent.is_latest_version is True
assert agent.isCreatedByUser is True
assert agent.input_schema == {"type": "object", "properties": {}}
assert agent.output_schema == {"type": "object", "properties": {}}
def test_library_agent_preset():
preset = backend.server.v2.library.model.LibraryAgentPreset(
id="preset-123",
name="Test Preset",
description="Test preset description",
agent_id="test-agent-123",
agent_version=1,
is_active=True,
inputs={
"dictionary": {"key1": "Hello", "key2": "World"},
"selected_value": "key2",
},
updated_at=datetime.datetime.now(),
)
assert preset.id == "preset-123"
assert preset.name == "Test Preset"
assert preset.description == "Test preset description"
assert preset.agent_id == "test-agent-123"
assert preset.agent_version == 1
assert preset.is_active is True
assert preset.inputs == {
"dictionary": {"key1": "Hello", "key2": "World"},
"selected_value": "key2",
}
def test_library_agent_preset_response():
preset = backend.server.v2.library.model.LibraryAgentPreset(
id="preset-123",
name="Test Preset",
description="Test preset description",
agent_id="test-agent-123",
agent_version=1,
is_active=True,
inputs={
"dictionary": {"key1": "Hello", "key2": "World"},
"selected_value": "key2",
},
updated_at=datetime.datetime.now(),
)
pagination = backend.server.model.Pagination(
total_items=1, total_pages=1, current_page=1, page_size=10
)
response = backend.server.v2.library.model.LibraryAgentPresetResponse(
presets=[preset], pagination=pagination
)
assert len(response.presets) == 1
assert response.presets[0].id == "preset-123"
assert response.pagination.total_items == 1
assert response.pagination.total_pages == 1
assert response.pagination.current_page == 1
assert response.pagination.page_size == 10
def test_create_library_agent_preset_request():
request = backend.server.v2.library.model.CreateLibraryAgentPresetRequest(
name="New Preset",
description="New preset description",
agent_id="agent-123",
agent_version=1,
is_active=True,
inputs={
"dictionary": {"key1": "Hello", "key2": "World"},
"selected_value": "key2",
},
)
assert request.name == "New Preset"
assert request.description == "New preset description"
assert request.agent_id == "agent-123"
assert request.agent_version == 1
assert request.is_active is True
assert request.inputs == {
"dictionary": {"key1": "Hello", "key2": "World"},
"selected_value": "key2",
}
def test_library_agent_from_db():
# Create mock DB agent
db_agent = prisma.models.AgentPreset(
id="test-agent-123",
createdAt=datetime.datetime.now(),
updatedAt=datetime.datetime.now(),
agentId="agent-123",
agentVersion=1,
name="Test Agent",
description="Test agent description",
isActive=True,
userId="test-user-123",
isDeleted=False,
InputPresets=[
prisma.models.AgentNodeExecutionInputOutput(
id="input-123",
time=datetime.datetime.now(),
name="input1",
data=prisma.fields.Json({"type": "string", "value": "test value"}),
)
],
)
# Convert to LibraryAgentPreset
agent = backend.server.v2.library.model.LibraryAgentPreset.from_db(db_agent)
assert agent.id == "test-agent-123"
assert agent.agent_version == 1
assert agent.is_active is True
assert agent.name == "Test Agent"
assert agent.description == "Test agent description"
assert agent.inputs == {"input1": {"type": "string", "value": "test value"}}

View File

@@ -0,0 +1,123 @@
import logging
import typing
import autogpt_libs.auth.depends
import autogpt_libs.auth.middleware
import fastapi
import prisma
import backend.data.graph
import backend.integrations.creds_manager
import backend.integrations.webhooks.graph_lifecycle_hooks
import backend.server.v2.library.db
import backend.server.v2.library.model
logger = logging.getLogger(__name__)
router = fastapi.APIRouter()
integration_creds_manager = (
backend.integrations.creds_manager.IntegrationCredentialsManager()
)
@router.get(
"/agents",
tags=["library", "private"],
dependencies=[fastapi.Depends(autogpt_libs.auth.middleware.auth_middleware)],
)
async def get_library_agents(
user_id: typing.Annotated[
str, fastapi.Depends(autogpt_libs.auth.depends.get_user_id)
]
) -> typing.Sequence[backend.server.v2.library.model.LibraryAgent]:
"""
Get all agents in the user's library, including both created and saved agents.
"""
try:
agents = await backend.server.v2.library.db.get_library_agents(user_id)
return agents
except Exception:
logger.exception("Exception occurred whilst getting library agents")
raise fastapi.HTTPException(
status_code=500, detail="Failed to get library agents"
)
@router.post(
"/agents/{store_listing_version_id}",
tags=["library", "private"],
dependencies=[fastapi.Depends(autogpt_libs.auth.middleware.auth_middleware)],
status_code=201,
)
async def add_agent_to_library(
store_listing_version_id: str,
user_id: typing.Annotated[
str, fastapi.Depends(autogpt_libs.auth.depends.get_user_id)
],
) -> fastapi.Response:
"""
Add an agent from the store to the user's library.
Args:
store_listing_version_id (str): ID of the store listing version to add
user_id (str): ID of the authenticated user
Returns:
fastapi.Response: 201 status code on success
Raises:
HTTPException: If there is an error adding the agent to the library
"""
try:
# Get the graph from the store listing
store_listing_version = (
await prisma.models.StoreListingVersion.prisma().find_unique(
where={"id": store_listing_version_id}, include={"Agent": True}
)
)
if not store_listing_version or not store_listing_version.Agent:
raise fastapi.HTTPException(
status_code=404,
detail=f"Store listing version {store_listing_version_id} not found",
)
agent = store_listing_version.Agent
if agent.userId == user_id:
raise fastapi.HTTPException(
status_code=400, detail="Cannot add own agent to library"
)
# Create a new graph from the template
graph = await backend.data.graph.get_graph(
agent.id, agent.version, template=True, user_id=user_id
)
if not graph:
raise fastapi.HTTPException(
status_code=404, detail=f"Agent {agent.id} not found"
)
# Create a deep copy with new IDs
graph.version = 1
graph.is_template = False
graph.is_active = True
graph.reassign_ids(user_id=user_id, reassign_graph_id=True)
# Save the new graph
graph = await backend.data.graph.create_graph(graph, user_id=user_id)
graph = (
await backend.integrations.webhooks.graph_lifecycle_hooks.on_graph_activate(
graph,
get_credentials=lambda id: integration_creds_manager.get(user_id, id),
)
)
return fastapi.Response(status_code=201)
except Exception:
logger.exception("Exception occurred whilst adding agent to library")
raise fastapi.HTTPException(
status_code=500, detail="Failed to add agent to library"
)

View File

@@ -1,9 +0,0 @@
import fastapi
from .agents import router as agents_router
from .presets import router as presets_router
router = fastapi.APIRouter()
router.include_router(presets_router)
router.include_router(agents_router)

View File

@@ -1,138 +0,0 @@
import logging
from typing import Annotated, Sequence
import autogpt_libs.auth as autogpt_auth_lib
import fastapi
import backend.server.v2.library.db as library_db
import backend.server.v2.library.model as library_model
import backend.server.v2.store.exceptions as store_exceptions
logger = logging.getLogger(__name__)
router = fastapi.APIRouter()
@router.get(
"/agents",
tags=["library", "private"],
dependencies=[fastapi.Depends(autogpt_auth_lib.auth_middleware)],
)
async def get_library_agents(
user_id: Annotated[str, fastapi.Depends(autogpt_auth_lib.depends.get_user_id)]
) -> Sequence[library_model.LibraryAgent]:
"""
Get all agents in the user's library, including both created and saved agents.
"""
try:
agents = await library_db.get_library_agents(user_id)
return agents
except Exception as e:
logger.exception(f"Exception occurred whilst getting library agents: {e}")
raise fastapi.HTTPException(
status_code=500, detail="Failed to get library agents"
)
@router.post(
"/agents/{store_listing_version_id}",
tags=["library", "private"],
dependencies=[fastapi.Depends(autogpt_auth_lib.auth_middleware)],
status_code=201,
)
async def add_agent_to_library(
store_listing_version_id: str,
user_id: Annotated[str, fastapi.Depends(autogpt_auth_lib.depends.get_user_id)],
) -> fastapi.Response:
"""
Add an agent from the store to the user's library.
Args:
store_listing_version_id (str): ID of the store listing version to add
user_id (str): ID of the authenticated user
Returns:
fastapi.Response: 201 status code on success
Raises:
HTTPException: If there is an error adding the agent to the library
"""
try:
# Use the database function to add the agent to the library
await library_db.add_store_agent_to_library(store_listing_version_id, user_id)
return fastapi.Response(status_code=201)
except store_exceptions.AgentNotFoundError:
raise fastapi.HTTPException(
status_code=404,
detail=f"Store listing version {store_listing_version_id} not found",
)
except store_exceptions.DatabaseError as e:
logger.exception(f"Database error occurred whilst adding agent to library: {e}")
raise fastapi.HTTPException(
status_code=500, detail="Failed to add agent to library"
)
except Exception as e:
logger.exception(
f"Unexpected exception occurred whilst adding agent to library: {e}"
)
raise fastapi.HTTPException(
status_code=500, detail="Failed to add agent to library"
)
@router.put(
"/agents/{library_agent_id}",
tags=["library", "private"],
dependencies=[fastapi.Depends(autogpt_auth_lib.auth_middleware)],
status_code=204,
)
async def update_library_agent(
library_agent_id: str,
user_id: Annotated[str, fastapi.Depends(autogpt_auth_lib.depends.get_user_id)],
auto_update_version: bool = False,
is_favorite: bool = False,
is_archived: bool = False,
is_deleted: bool = False,
) -> fastapi.Response:
"""
Update the library agent with the given fields.
Args:
library_agent_id (str): ID of the library agent to update
user_id (str): ID of the authenticated user
auto_update_version (bool): Whether to auto-update the agent version
is_favorite (bool): Whether the agent is marked as favorite
is_archived (bool): Whether the agent is archived
is_deleted (bool): Whether the agent is deleted
Returns:
fastapi.Response: 204 status code on success
Raises:
HTTPException: If there is an error updating the library agent
"""
try:
# Use the database function to update the library agent
await library_db.update_library_agent(
library_agent_id,
user_id,
auto_update_version,
is_favorite,
is_archived,
is_deleted,
)
return fastapi.Response(status_code=204)
except store_exceptions.DatabaseError as e:
logger.exception(f"Database error occurred whilst updating library agent: {e}")
raise fastapi.HTTPException(
status_code=500, detail="Failed to update library agent"
)
except Exception as e:
logger.exception(
f"Unexpected exception occurred whilst updating library agent: {e}"
)
raise fastapi.HTTPException(
status_code=500, detail="Failed to update library agent"
)

View File

@@ -1,130 +0,0 @@
import logging
from typing import Annotated, Any
import autogpt_libs.auth as autogpt_auth_lib
import autogpt_libs.utils.cache
import fastapi
import backend.executor
import backend.server.v2.library.db as library_db
import backend.server.v2.library.model as library_model
import backend.util.service
logger = logging.getLogger(__name__)
router = fastapi.APIRouter()
@autogpt_libs.utils.cache.thread_cached
def execution_manager_client() -> backend.executor.ExecutionManager:
return backend.util.service.get_service_client(backend.executor.ExecutionManager)
@router.get("/presets")
async def get_presets(
user_id: Annotated[str, fastapi.Depends(autogpt_auth_lib.depends.get_user_id)],
page: int = 1,
page_size: int = 10,
) -> library_model.LibraryAgentPresetResponse:
try:
presets = await library_db.get_presets(user_id, page, page_size)
return presets
except Exception as e:
logger.exception(f"Exception occurred whilst getting presets: {e}")
raise fastapi.HTTPException(status_code=500, detail="Failed to get presets")
@router.get("/presets/{preset_id}")
async def get_preset(
preset_id: str,
user_id: Annotated[str, fastapi.Depends(autogpt_auth_lib.depends.get_user_id)],
) -> library_model.LibraryAgentPreset:
try:
preset = await library_db.get_preset(user_id, preset_id)
if not preset:
raise fastapi.HTTPException(
status_code=404,
detail=f"Preset {preset_id} not found",
)
return preset
except Exception as e:
logger.exception(f"Exception occurred whilst getting preset: {e}")
raise fastapi.HTTPException(status_code=500, detail="Failed to get preset")
@router.post("/presets")
async def create_preset(
preset: library_model.CreateLibraryAgentPresetRequest,
user_id: Annotated[str, fastapi.Depends(autogpt_auth_lib.depends.get_user_id)],
) -> library_model.LibraryAgentPreset:
try:
return await library_db.upsert_preset(user_id, preset)
except Exception as e:
logger.exception(f"Exception occurred whilst creating preset: {e}")
raise fastapi.HTTPException(status_code=500, detail="Failed to create preset")
@router.put("/presets/{preset_id}")
async def update_preset(
preset_id: str,
preset: library_model.CreateLibraryAgentPresetRequest,
user_id: Annotated[str, fastapi.Depends(autogpt_auth_lib.depends.get_user_id)],
) -> library_model.LibraryAgentPreset:
try:
return await library_db.upsert_preset(user_id, preset, preset_id)
except Exception as e:
logger.exception(f"Exception occurred whilst updating preset: {e}")
raise fastapi.HTTPException(status_code=500, detail="Failed to update preset")
@router.delete("/presets/{preset_id}")
async def delete_preset(
preset_id: str,
user_id: Annotated[str, fastapi.Depends(autogpt_auth_lib.depends.get_user_id)],
):
try:
await library_db.delete_preset(user_id, preset_id)
return fastapi.Response(status_code=204)
except Exception as e:
logger.exception(f"Exception occurred whilst deleting preset: {e}")
raise fastapi.HTTPException(status_code=500, detail="Failed to delete preset")
@router.post(
path="/presets/{preset_id}/execute",
tags=["presets"],
dependencies=[fastapi.Depends(autogpt_auth_lib.auth_middleware)],
)
async def execute_preset(
graph_id: str,
graph_version: int,
preset_id: str,
node_input: Annotated[
dict[str, Any], fastapi.Body(..., embed=True, default_factory=dict)
],
user_id: Annotated[str, fastapi.Depends(autogpt_auth_lib.depends.get_user_id)],
) -> dict[str, Any]: # FIXME: add proper return type
try:
preset = await library_db.get_preset(user_id, preset_id)
if not preset:
raise fastapi.HTTPException(status_code=404, detail="Preset not found")
logger.debug(f"Preset inputs: {preset.inputs}")
# Merge input overrides with preset inputs
merged_node_input = preset.inputs | node_input
execution = execution_manager_client().add_execution(
graph_id=graph_id,
graph_version=graph_version,
data=merged_node_input,
user_id=user_id,
preset_id=preset_id,
)
logger.debug(f"Execution added: {execution} with input: {merged_node_input}")
return {"id": execution.graph_exec_id}
except Exception as e:
msg = str(e).encode().decode("unicode_escape")
raise fastapi.HTTPException(status_code=400, detail=msg)

View File

@@ -1,16 +1,16 @@
import datetime
import autogpt_libs.auth as autogpt_auth_lib
import autogpt_libs.auth.depends
import autogpt_libs.auth.middleware
import fastapi
import fastapi.testclient
import pytest
import pytest_mock
import backend.server.v2.library.model as library_model
from backend.server.v2.library.routes import router as library_router
import backend.server.v2.library.db
import backend.server.v2.library.model
import backend.server.v2.library.routes
app = fastapi.FastAPI()
app.include_router(library_router)
app.include_router(backend.server.v2.library.routes.router)
client = fastapi.testclient.TestClient(app)
@@ -25,37 +25,31 @@ def override_get_user_id():
return "test-user-id"
app.dependency_overrides[autogpt_auth_lib.auth_middleware] = override_auth_middleware
app.dependency_overrides[autogpt_auth_lib.depends.get_user_id] = override_get_user_id
app.dependency_overrides[autogpt_libs.auth.middleware.auth_middleware] = (
override_auth_middleware
)
app.dependency_overrides[autogpt_libs.auth.depends.get_user_id] = override_get_user_id
def test_get_library_agents_success(mocker: pytest_mock.MockFixture):
mocked_value = [
library_model.LibraryAgent(
backend.server.v2.library.model.LibraryAgent(
id="test-agent-1",
agent_id="test-agent-1",
agent_version=1,
preset_id="preset-1",
updated_at=datetime.datetime(2023, 1, 1, 0, 0, 0),
is_favorite=False,
is_created_by_user=True,
is_latest_version=True,
version=1,
is_active=True,
name="Test Agent 1",
description="Test Description 1",
isCreatedByUser=True,
input_schema={"type": "object", "properties": {}},
output_schema={"type": "object", "properties": {}},
),
library_model.LibraryAgent(
backend.server.v2.library.model.LibraryAgent(
id="test-agent-2",
agent_id="test-agent-2",
agent_version=1,
preset_id="preset-2",
updated_at=datetime.datetime(2023, 1, 1, 0, 0, 0),
is_favorite=False,
is_created_by_user=False,
is_latest_version=True,
version=1,
is_active=True,
name="Test Agent 2",
description="Test Description 2",
isCreatedByUser=False,
input_schema={"type": "object", "properties": {}},
output_schema={"type": "object", "properties": {}},
),
@@ -67,13 +61,14 @@ def test_get_library_agents_success(mocker: pytest_mock.MockFixture):
assert response.status_code == 200
data = [
library_model.LibraryAgent.model_validate(agent) for agent in response.json()
backend.server.v2.library.model.LibraryAgent.model_validate(agent)
for agent in response.json()
]
assert len(data) == 2
assert data[0].agent_id == "test-agent-1"
assert data[0].is_created_by_user is True
assert data[1].agent_id == "test-agent-2"
assert data[1].is_created_by_user is False
assert data[0].id == "test-agent-1"
assert data[0].isCreatedByUser is True
assert data[1].id == "test-agent-2"
assert data[1].isCreatedByUser is False
mock_db_call.assert_called_once_with("test-user-id")

View File

@@ -1,4 +1,5 @@
import logging
import random
from datetime import datetime
from typing import Optional
@@ -16,25 +17,6 @@ from backend.data.graph import GraphModel
logger = logging.getLogger(__name__)
def sanitize_query(query: str | None) -> str | None:
if query is None:
return query
query = query.strip()[:100]
return (
query.replace("\\", "\\\\")
.replace("%", "\\%")
.replace("_", "\\_")
.replace("[", "\\[")
.replace("]", "\\]")
.replace("'", "\\'")
.replace('"', '\\"')
.replace(";", "\\;")
.replace("--", "\\--")
.replace("/*", "\\/*")
.replace("*/", "\\*/")
)
async def get_store_agents(
featured: bool = False,
creator: str | None = None,
@@ -47,7 +29,29 @@ async def get_store_agents(
logger.debug(
f"Getting store agents. featured={featured}, creator={creator}, sorted_by={sorted_by}, search={search_query}, category={category}, page={page}"
)
sanitized_query = sanitize_query(search_query)
sanitized_query = None
# Sanitize and validate search query by escaping special characters
if search_query is not None:
sanitized_query = search_query.strip()
if not sanitized_query or len(sanitized_query) > 100: # Reasonable length limit
raise backend.server.v2.store.exceptions.DatabaseError(
f"Invalid search query: len({len(sanitized_query)}) query: {search_query}"
)
# Escape special SQL characters
sanitized_query = (
sanitized_query.replace("\\", "\\\\")
.replace("%", "\\%")
.replace("_", "\\_")
.replace("[", "\\[")
.replace("]", "\\]")
.replace("'", "\\'")
.replace('"', '\\"')
.replace(";", "\\;")
.replace("--", "\\--")
.replace("/*", "\\/*")
.replace("*/", "\\*/")
)
where_clause = {}
if featured:
@@ -89,8 +93,8 @@ async def get_store_agents(
slug=agent.slug,
agent_name=agent.agent_name,
agent_image=agent.agent_image[0] if agent.agent_image else "",
creator=agent.creator_username or "Needs Profile",
creator_avatar=agent.creator_avatar or "",
creator=agent.creator_username,
creator_avatar=agent.creator_avatar,
sub_heading=agent.sub_heading,
description=agent.description,
runs=agent.runs,
@@ -110,7 +114,7 @@ async def get_store_agents(
),
)
except Exception as e:
logger.error(f"Error getting store agents: {e}")
logger.error(f"Error getting store agents: {str(e)}")
raise backend.server.v2.store.exceptions.DatabaseError(
"Failed to fetch store agents"
) from e
@@ -152,7 +156,7 @@ async def get_store_agent_details(
except backend.server.v2.store.exceptions.AgentNotFoundError:
raise
except Exception as e:
logger.error(f"Error getting store agent details: {e}")
logger.error(f"Error getting store agent details: {str(e)}")
raise backend.server.v2.store.exceptions.DatabaseError(
"Failed to fetch agent details"
) from e
@@ -266,7 +270,7 @@ async def get_store_creators(
),
)
except Exception as e:
logger.error(f"Error getting store creators: {e}")
logger.error(f"Error getting store creators: {str(e)}")
raise backend.server.v2.store.exceptions.DatabaseError(
"Failed to fetch store creators"
) from e
@@ -303,7 +307,7 @@ async def get_store_creator_details(
except backend.server.v2.store.exceptions.CreatorNotFoundError:
raise
except Exception as e:
logger.error(f"Error getting store creator details: {e}")
logger.error(f"Error getting store creator details: {str(e)}")
raise backend.server.v2.store.exceptions.DatabaseError(
"Failed to fetch creator details"
) from e
@@ -321,10 +325,7 @@ async def get_store_submissions(
where = prisma.types.StoreSubmissionWhereInput(user_id=user_id)
# Query submissions from database
submissions = await prisma.models.StoreSubmission.prisma().find_many(
where=where,
skip=skip,
take=page_size,
order=[{"date_submitted": "desc"}],
where=where, skip=skip, take=page_size, order=[{"date_submitted": "desc"}]
)
# Get total count for pagination
@@ -362,7 +363,7 @@ async def get_store_submissions(
)
except Exception as e:
logger.error(f"Error fetching store submissions: {e}")
logger.error(f"Error fetching store submissions: {str(e)}")
# Return empty response rather than exposing internal errors
return backend.server.v2.store.model.StoreSubmissionsResponse(
submissions=[],
@@ -404,7 +405,9 @@ async def delete_store_submission(
)
# Delete the submission
await prisma.models.StoreListing.prisma().delete(where={"id": submission.id})
await prisma.models.StoreListing.prisma().delete(
where=prisma.types.StoreListingWhereUniqueInput(id=submission.id)
)
logger.debug(
f"Successfully deleted submission {submission_id} for user {user_id}"
@@ -412,7 +415,7 @@ async def delete_store_submission(
return True
except Exception as e:
logger.error(f"Error deleting store submission: {e}")
logger.error(f"Error deleting store submission: {str(e)}")
return False
@@ -501,15 +504,7 @@ async def create_store_submission(
"subHeading": sub_heading,
}
},
},
include={"StoreListingVersions": True},
)
store_listing_version_id = (
listing.StoreListingVersions[0].id
if listing.StoreListingVersions is not None
and len(listing.StoreListingVersions) > 0
else None
}
)
logger.debug(f"Created store listing for agent {agent_id}")
@@ -526,7 +521,6 @@ async def create_store_submission(
status=prisma.enums.SubmissionStatus.PENDING,
runs=0,
rating=0.0,
store_listing_version_id=store_listing_version_id,
)
except (
@@ -535,7 +529,7 @@ async def create_store_submission(
):
raise
except prisma.errors.PrismaError as e:
logger.error(f"Database error creating store submission: {e}")
logger.error(f"Database error creating store submission: {str(e)}")
raise backend.server.v2.store.exceptions.DatabaseError(
"Failed to create store submission"
) from e
@@ -575,7 +569,7 @@ async def create_store_review(
)
except prisma.errors.PrismaError as e:
logger.error(f"Database error creating store review: {e}")
logger.error(f"Database error creating store review: {str(e)}")
raise backend.server.v2.store.exceptions.DatabaseError(
"Failed to create store review"
) from e
@@ -583,7 +577,7 @@ async def create_store_review(
async def get_user_profile(
user_id: str,
) -> backend.server.v2.store.model.ProfileDetails | None:
) -> backend.server.v2.store.model.ProfileDetails:
logger.debug(f"Getting user profile for {user_id}")
try:
@@ -592,7 +586,25 @@ async def get_user_profile(
)
if not profile:
return None
logger.warning(f"Profile not found for user {user_id}")
new_profile = await prisma.models.Profile.prisma().create(
data=prisma.types.ProfileCreateInput(
userId=user_id,
name="No Profile Data",
username=f"{random.choice(['happy', 'clever', 'swift', 'bright', 'wise'])}-{random.choice(['fox', 'wolf', 'bear', 'eagle', 'owl'])}_{random.randint(1000,9999)}".lower(),
description="No Profile Data",
links=[],
avatarUrl="",
)
)
return backend.server.v2.store.model.ProfileDetails(
name=new_profile.name,
username=new_profile.username,
description=new_profile.description,
links=new_profile.links,
avatar_url=new_profile.avatarUrl,
)
return backend.server.v2.store.model.ProfileDetails(
name=profile.name,
username=profile.username,
@@ -601,90 +613,115 @@ async def get_user_profile(
avatar_url=profile.avatarUrl,
)
except Exception as e:
logger.error("Error getting user profile: %s", e)
raise backend.server.v2.store.exceptions.DatabaseError(
"Failed to get user profile"
) from e
logger.error(f"Error getting user profile: {str(e)}")
return backend.server.v2.store.model.ProfileDetails(
name="No Profile Data",
username="No Profile Data",
description="No Profile Data",
links=[],
avatar_url="",
)
async def update_profile(
async def update_or_create_profile(
user_id: str, profile: backend.server.v2.store.model.Profile
) -> backend.server.v2.store.model.CreatorDetails:
"""
Update the store profile for a user or create a new one if it doesn't exist.
Update the store profile for a user. Creates a new profile if one doesn't exist.
Only allows updating if the user_id matches the owning user.
If a field is None, it will not overwrite the existing value in the case of an update.
Args:
user_id: ID of the authenticated user
profile: Updated profile details
Returns:
CreatorDetails: The updated or created profile details
CreatorDetails: The updated profile
Raises:
DatabaseError: If there's an issue updating or creating the profile
HTTPException: If user is not authorized to update this profile
DatabaseError: If profile cannot be updated due to database issues
"""
logger.info("Updating profile for user %s with data: %s", user_id, profile)
logger.info(f"Updating profile for user {user_id} data: {profile}")
try:
# Sanitize username to allow only letters, numbers, and hyphens
# Sanitize username to only allow letters and hyphens
username = "".join(
c if c.isalpha() or c == "-" or c.isnumeric() else ""
for c in profile.username
).lower()
# Check if profile exists for the given user_id
existing_profile = await prisma.models.Profile.prisma().find_first(
where={"userId": user_id}
)
# If no profile exists, create a new one
if not existing_profile:
raise backend.server.v2.store.exceptions.ProfileNotFoundError(
f"Profile not found for user {user_id}. This should not be possible."
logger.debug(
f"No existing profile found. Creating new profile for user {user_id}"
)
# Create new profile since one doesn't exist
new_profile = await prisma.models.Profile.prisma().create(
data={
"userId": user_id,
"name": profile.name,
"username": username,
"description": profile.description,
"links": profile.links or [],
"avatarUrl": profile.avatar_url,
"isFeatured": False,
}
)
# Verify that the user is authorized to update this profile
if existing_profile.userId != user_id:
logger.error(
"Unauthorized update attempt for profile %s by user %s",
existing_profile.userId,
user_id,
return backend.server.v2.store.model.CreatorDetails(
name=new_profile.name,
username=new_profile.username,
description=new_profile.description,
links=new_profile.links,
avatar_url=new_profile.avatarUrl or "",
agent_rating=0.0,
agent_runs=0,
top_categories=[],
)
raise backend.server.v2.store.exceptions.DatabaseError(
f"Unauthorized update attempt for profile {existing_profile.id} by user {user_id}"
else:
logger.debug(f"Updating existing profile for user {user_id}")
# Update only provided fields for the existing profile
update_data = {}
if profile.name is not None:
update_data["name"] = profile.name
if profile.username is not None:
update_data["username"] = username
if profile.description is not None:
update_data["description"] = profile.description
if profile.links is not None:
update_data["links"] = profile.links
if profile.avatar_url is not None:
update_data["avatarUrl"] = profile.avatar_url
# Update the existing profile
updated_profile = await prisma.models.Profile.prisma().update(
where={"id": existing_profile.id},
data=prisma.types.ProfileUpdateInput(**update_data),
)
if updated_profile is None:
logger.error(f"Failed to update profile for user {user_id}")
raise backend.server.v2.store.exceptions.DatabaseError(
"Failed to update profile"
)
logger.debug("Updating existing profile for user %s", user_id)
# Prepare update data, only including non-None values
update_data = {}
if profile.name is not None:
update_data["name"] = profile.name
if profile.username is not None:
update_data["username"] = username
if profile.description is not None:
update_data["description"] = profile.description
if profile.links is not None:
update_data["links"] = profile.links
if profile.avatar_url is not None:
update_data["avatarUrl"] = profile.avatar_url
# Update the existing profile
updated_profile = await prisma.models.Profile.prisma().update(
where={"id": existing_profile.id},
data=prisma.types.ProfileUpdateInput(**update_data),
)
if updated_profile is None:
logger.error("Failed to update profile for user %s", user_id)
raise backend.server.v2.store.exceptions.DatabaseError(
"Failed to update profile"
return backend.server.v2.store.model.CreatorDetails(
name=updated_profile.name,
username=updated_profile.username,
description=updated_profile.description,
links=updated_profile.links,
avatar_url=updated_profile.avatarUrl or "",
agent_rating=0.0,
agent_runs=0,
top_categories=[],
)
return backend.server.v2.store.model.CreatorDetails(
name=updated_profile.name,
username=updated_profile.username,
description=updated_profile.description,
links=updated_profile.links,
avatar_url=updated_profile.avatarUrl or "",
agent_rating=0.0,
agent_runs=0,
top_categories=[],
)
except prisma.errors.PrismaError as e:
logger.error("Database error updating profile: %s", e)
logger.error(f"Database error updating profile: {str(e)}")
raise backend.server.v2.store.exceptions.DatabaseError(
"Failed to update profile"
) from e
@@ -749,7 +786,7 @@ async def get_my_agents(
),
)
except Exception as e:
logger.error(f"Error getting my agents: {e}")
logger.error(f"Error getting my agents: {str(e)}")
raise backend.server.v2.store.exceptions.DatabaseError(
"Failed to fetch my agents"
) from e
@@ -772,17 +809,15 @@ async def get_agent(
detail=f"Store listing version {store_listing_version_id} not found",
)
graph_id = store_listing_version.agentId
graph_version = store_listing_version.agentVersion
graph = await backend.data.graph.get_graph(graph_id, graph_version)
agent = store_listing_version.Agent
graph = await backend.data.graph.get_graph(
agent.id, agent.version, template=True
)
if not graph:
raise fastapi.HTTPException(
status_code=404,
detail=(
f"Agent #{graph_id} not found "
f"for store listing version #{store_listing_version_id}"
),
status_code=404, detail=f"Agent {agent.id} not found"
)
graph.version = 1
@@ -793,72 +828,7 @@ async def get_agent(
return graph
except Exception as e:
logger.error(f"Error getting agent: {e}")
logger.error(f"Error getting agent: {str(e)}")
raise backend.server.v2.store.exceptions.DatabaseError(
"Failed to fetch agent"
) from e
async def review_store_submission(
store_listing_version_id: str, is_approved: bool, comments: str, reviewer_id: str
) -> prisma.models.StoreListingSubmission:
"""Review a store listing submission."""
try:
store_listing_version = (
await prisma.models.StoreListingVersion.prisma().find_unique(
where={"id": store_listing_version_id},
include={"StoreListing": True},
)
)
if not store_listing_version or not store_listing_version.StoreListing:
raise fastapi.HTTPException(
status_code=404,
detail=f"Store listing version {store_listing_version_id} not found",
)
if is_approved:
await prisma.models.StoreListing.prisma().update(
where={"id": store_listing_version.StoreListing.id},
data={"isApproved": True},
)
submission_status = (
prisma.enums.SubmissionStatus.APPROVED
if is_approved
else prisma.enums.SubmissionStatus.REJECTED
)
update_data: prisma.types.StoreListingSubmissionUpdateInput = {
"Status": submission_status,
"reviewComments": comments,
"Reviewer": {"connect": {"id": reviewer_id}},
"StoreListing": {"connect": {"id": store_listing_version.StoreListing.id}},
}
create_data: prisma.types.StoreListingSubmissionCreateInput = {
**update_data,
"StoreListingVersion": {"connect": {"id": store_listing_version_id}},
}
submission = await prisma.models.StoreListingSubmission.prisma().upsert(
where={"storeListingVersionId": store_listing_version_id},
data={
"create": create_data,
"update": update_data,
},
)
if not submission:
raise fastapi.HTTPException( # FIXME: don't return HTTP exceptions here
status_code=404,
detail=f"Store listing submission {store_listing_version_id} not found",
)
return submission
except Exception as e:
logger.error(f"Could not create store submission review: {e}")
raise backend.server.v2.store.exceptions.DatabaseError(
"Failed to create store submission review"
) from e

View File

@@ -158,26 +158,6 @@ async def test_create_store_submission(mocker):
agentId="agent-id",
agentVersion=1,
owningUserId="user-id",
StoreListingVersions=[
prisma.models.StoreListingVersion(
id="version-id",
agentId="agent-id",
agentVersion=1,
slug="test-agent",
name="Test Agent",
description="Test description",
createdAt=datetime.now(),
updatedAt=datetime.now(),
subHeading="Test heading",
imageUrls=["image.jpg"],
categories=["test"],
isFeatured=False,
isDeleted=False,
version=1,
isAvailable=True,
isApproved=False,
)
],
)
# Mock prisma calls
@@ -201,7 +181,6 @@ async def test_create_store_submission(mocker):
# Verify results
assert result.name == "Test Agent"
assert result.description == "Test description"
assert result.store_listing_version_id == "version-id"
# Verify mocks called correctly
mock_agent_graph.return_value.find_first.assert_called_once()
@@ -216,7 +195,6 @@ async def test_update_profile(mocker):
id="profile-id",
name="Test Creator",
username="creator",
userId="user-id",
description="Test description",
links=["link1"],
avatarUrl="avatar.jpg",
@@ -243,7 +221,7 @@ async def test_update_profile(mocker):
)
# Call function
result = await db.update_profile("user-id", profile)
result = await db.update_or_create_profile("user-id", profile)
# Verify results
assert result.username == "creator"
@@ -259,7 +237,7 @@ async def test_get_user_profile(mocker):
# Mock data
mock_profile = prisma.models.Profile(
id="profile-id",
name="Test User",
name="No Profile Data",
username="testuser",
description="Test description",
links=["link1", "link2"],
@@ -267,22 +245,20 @@ async def test_get_user_profile(mocker):
isFeatured=False,
createdAt=datetime.now(),
updatedAt=datetime.now(),
userId="user-id",
)
# Mock prisma calls
mock_profile_db = mocker.patch("prisma.models.Profile.prisma")
mock_profile_db.return_value.find_first = mocker.AsyncMock(
mock_profile_db.return_value.find_unique = mocker.AsyncMock(
return_value=mock_profile
)
# Call function
result = await db.get_user_profile("user-id")
assert result is not None
# Verify results
assert result.name == "Test User"
assert result.username == "testuser"
assert result.description == "Test description"
assert result.links == ["link1", "link2"]
assert result.avatar_url == "avatar.jpg"
assert result.name == "No Profile Data"
assert result.username == "No Profile Data"
assert result.description == "No Profile Data"
assert result.links == []
assert result.avatar_url == ""

View File

@@ -115,7 +115,6 @@ class StoreSubmission(pydantic.BaseModel):
status: prisma.enums.SubmissionStatus
runs: int
rating: float
store_listing_version_id: str | None = None
class StoreSubmissionsResponse(pydantic.BaseModel):
@@ -152,9 +151,3 @@ class StoreReviewCreate(pydantic.BaseModel):
store_listing_version_id: str
score: int
comments: str | None = None
class ReviewSubmissionRequest(pydantic.BaseModel):
store_listing_version_id: str
is_approved: bool
comments: str

View File

@@ -42,11 +42,6 @@ async def get_profile(
"""
try:
profile = await backend.server.v2.store.db.get_user_profile(user_id)
if profile is None:
return fastapi.responses.JSONResponse(
status_code=404,
content={"detail": "Profile not found"},
)
return profile
except Exception:
logger.exception("Exception occurred whilst getting user profile")
@@ -82,7 +77,7 @@ async def update_or_create_profile(
HTTPException: If there is an error updating the profile
"""
try:
updated_profile = await backend.server.v2.store.db.update_profile(
updated_profile = await backend.server.v2.store.db.update_or_create_profile(
user_id=user_id, profile=profile
)
return updated_profile
@@ -647,30 +642,3 @@ async def download_agent_file(
return fastapi.responses.FileResponse(
tmp_file.name, filename=file_name, media_type="application/json"
)
@router.post(
"/submissions/review/{store_listing_version_id}",
tags=["store", "private"],
)
async def review_submission(
request: backend.server.v2.store.model.ReviewSubmissionRequest,
user: typing.Annotated[
autogpt_libs.auth.models.User,
fastapi.Depends(autogpt_libs.auth.depends.requires_admin_user),
],
):
# Proceed with the review submission logic
try:
submission = await backend.server.v2.store.db.review_store_submission(
store_listing_version_id=request.store_listing_version_id,
is_approved=request.is_approved,
comments=request.comments,
reviewer_id=user.user_id,
)
return submission
except Exception:
raise fastapi.HTTPException(
status_code=500,
detail="An error occurred while creating the store submission review",
)

View File

@@ -86,13 +86,13 @@ async def handle_subscribe(
)
else:
ex_sub = ExecutionSubscription.model_validate(message.data)
await manager.subscribe(ex_sub.graph_id, ex_sub.graph_version, websocket)
await manager.subscribe(ex_sub.graph_id, websocket)
logger.debug(f"New execution subscription for graph {ex_sub.graph_id}")
await websocket.send_text(
WsMessage(
method=Methods.SUBSCRIBE,
success=True,
channel=f"{ex_sub.graph_id}_{ex_sub.graph_version}",
channel=ex_sub.graph_id,
).model_dump_json()
)
@@ -110,13 +110,13 @@ async def handle_unsubscribe(
)
else:
ex_sub = ExecutionSubscription.model_validate(message.data)
await manager.unsubscribe(ex_sub.graph_id, ex_sub.graph_version, websocket)
await manager.unsubscribe(ex_sub.graph_id, websocket)
logger.debug(f"Removed execution subscription for graph {ex_sub.graph_id}")
await websocket.send_text(
WsMessage(
method=Methods.UNSUBSCRIBE,
success=True,
channel=f"{ex_sub.graph_id}_{ex_sub.graph_version}",
channel=ex_sub.graph_id,
).model_dump_json()
)

View File

@@ -253,14 +253,12 @@ async def block_autogen_agent():
test_graph = await create_graph(create_test_graph(), user_id=test_user.id)
input_data = {"input": "Write me a block that writes a string into a file."}
response = await server.agent_server.test_execute_graph(
graph_id=test_graph.id,
user_id=test_user.id,
node_input=input_data,
test_graph.id, input_data, test_user.id
)
print(response)
result = await wait_execution(
graph_id=test_graph.id,
graph_exec_id=response.graph_exec_id,
graph_exec_id=response["id"],
timeout=1200,
user_id=test_user.id,
)

View File

@@ -157,14 +157,10 @@ async def reddit_marketing_agent():
test_graph = await create_graph(create_test_graph(), user_id=test_user.id)
input_data = {"subreddit": "AutoGPT"}
response = await server.agent_server.test_execute_graph(
graph_id=test_graph.id,
user_id=test_user.id,
node_input=input_data,
test_graph.id, input_data, test_user.id
)
print(response)
result = await wait_execution(
test_user.id, test_graph.id, response.graph_exec_id, 120
)
result = await wait_execution(test_user.id, test_graph.id, response["id"], 120)
print(result)

View File

@@ -8,19 +8,12 @@ from backend.data.user import get_or_create_user
from backend.util.test import SpinTestServer, wait_execution
async def create_test_user(alt_user: bool = False) -> User:
if alt_user:
test_user_data = {
"sub": "3e53486c-cf57-477e-ba2a-cb02dc828e1b",
"email": "testuser2@example.com",
"name": "Test User 2",
}
else:
test_user_data = {
"sub": "ef3b97d7-1161-4eb4-92b2-10c24fb154c1",
"email": "testuser@example.com",
"name": "Test User",
}
async def create_test_user() -> User:
test_user_data = {
"sub": "ef3b97d7-1161-4eb4-92b2-10c24fb154c1",
"email": "testuser#example.com",
"name": "Test User",
}
user = await get_or_create_user(test_user_data)
return user
@@ -86,14 +79,10 @@ async def sample_agent():
test_graph = await create_graph(create_test_graph(), test_user.id)
input_data = {"input_1": "Hello", "input_2": "World"}
response = await server.agent_server.test_execute_graph(
graph_id=test_graph.id,
user_id=test_user.id,
node_input=input_data,
test_graph.id, input_data, test_user.id
)
print(response)
result = await wait_execution(
test_user.id, test_graph.id, response.graph_exec_id, 10
)
result = await wait_execution(test_user.id, test_graph.id, response["id"], 10)
print(result)

View File

@@ -1,143 +0,0 @@
import base64
import mimetypes
import re
import shutil
import tempfile
import uuid
from pathlib import Path
from urllib.parse import urlparse
# This "requests" presumably has additional checks against internal networks for SSRF.
from backend.util.request import requests
TEMP_DIR = Path(tempfile.gettempdir()).resolve()
def get_exec_file_path(graph_exec_id: str, path: str) -> str:
"""
Utility to build an absolute path in the {temp}/exec_file/{exec_id}/... folder.
"""
return str(TEMP_DIR / "exec_file" / graph_exec_id / path)
def clean_exec_files(graph_exec_id: str, file: str = "") -> None:
"""
Utility to remove the {temp}/exec_file/{exec_id} folder and its contents.
"""
exec_path = Path(get_exec_file_path(graph_exec_id, file))
if exec_path.exists() and exec_path.is_dir():
shutil.rmtree(exec_path)
"""
MediaFile is a string that represents a file. It can be one of the following:
- Data URI: base64 encoded media file. See https://developer.mozilla.org/en-US/docs/Web/URI/Schemes/data/
- URL: Media file hosted on the internet, it starts with http:// or https://.
- Local path (anything else): A temporary file path living within graph execution time.
Note: Replace this type alias into a proper class, when more information is needed.
"""
MediaFile = str
def store_media_file(
graph_exec_id: str, file: MediaFile, return_content: bool = False
) -> MediaFile:
"""
Safely handle 'file' (a data URI, a URL, or a local path relative to {temp}/exec_file/{exec_id}),
placing or verifying it under:
{tempdir}/exec_file/{exec_id}/...
If 'return_content=True', return a data URI (data:<mime>;base64,<content>).
Otherwise, returns the file media path relative to the exec_id folder.
For each MediaFile type:
- Data URI:
-> decode and store in a new random file in that folder
- URL:
-> download and store in that folder
- Local path:
-> interpret as relative to that folder; verify it exists
(no copying, as it's presumably already there).
We realpath-check so no symlink or '..' can escape the folder.
:param graph_exec_id: The unique ID of the graph execution.
:param file: Data URI, URL, or local (relative) path.
:param return_content: If True, return a data URI of the file content.
If False, return the *relative* path inside the exec_id folder.
:return: The requested result: data URI or relative path of the media.
"""
# Build base path
base_path = Path(get_exec_file_path(graph_exec_id, ""))
base_path.mkdir(parents=True, exist_ok=True)
# Helper functions
def _extension_from_mime(mime: str) -> str:
ext = mimetypes.guess_extension(mime, strict=False)
return ext if ext else ".bin"
def _file_to_data_uri(path: Path) -> str:
mime_type, _ = mimetypes.guess_type(path)
mime_type = mime_type or "application/octet-stream"
b64 = base64.b64encode(path.read_bytes()).decode("utf-8")
return f"data:{mime_type};base64,{b64}"
def _ensure_inside_base(path_candidate: Path, base: Path) -> Path:
"""
Resolve symlinks via resolve() and ensure the result is still under base.
"""
real_candidate = path_candidate.resolve()
real_base = base.resolve()
if not real_candidate.is_relative_to(real_base):
raise ValueError(
"Local file path is outside the temp_base directory. Access denied."
)
return real_candidate
def _strip_base_prefix(absolute_path: Path, base: Path) -> str:
"""
Strip base prefix and normalize path.
"""
return str(absolute_path.relative_to(base))
# Process file
if file.startswith("data:"):
# Data URI
match = re.match(r"^data:([^;]+);base64,(.*)$", file, re.DOTALL)
if not match:
raise ValueError(
"Invalid data URI format. Expected data:<mime>;base64,<data>"
)
mime_type = match.group(1).strip().lower()
b64_content = match.group(2).strip()
# Generate filename and decode
extension = _extension_from_mime(mime_type)
filename = f"{uuid.uuid4()}{extension}"
target_path = _ensure_inside_base(base_path / filename, base_path)
target_path.write_bytes(base64.b64decode(b64_content))
elif file.startswith(("http://", "https://")):
# URL
parsed_url = urlparse(file)
filename = Path(parsed_url.path).name or f"{uuid.uuid4()}"
target_path = _ensure_inside_base(base_path / filename, base_path)
# Download and save
resp = requests.get(file)
resp.raise_for_status()
target_path.write_bytes(resp.content)
else:
# Local path
target_path = _ensure_inside_base(base_path / file, base_path)
if not target_path.is_file():
raise ValueError(f"Local file does not exist: {target_path}")
# Return result
if return_content:
return MediaFile(_file_to_data_uri(target_path))
else:
return MediaFile(_strip_base_prefix(target_path, base_path))

View File

@@ -18,7 +18,6 @@ from typing import (
FrozenSet,
Iterator,
List,
Optional,
Set,
Tuple,
Type,
@@ -34,7 +33,7 @@ from pydantic import BaseModel
from Pyro5 import api as pyro
from Pyro5 import config as pyro_config
from backend.data import db, rabbitmq, redis
from backend.data import db, redis
from backend.util.process import AppProcess
from backend.util.retry import conn_retry
from backend.util.settings import Config, Secrets
@@ -62,11 +61,8 @@ def expose(func: C) -> C:
try:
return func(*args, **kwargs)
except Exception as e:
msg = f"Error in {func.__name__}: {e}"
if isinstance(e, ValueError):
logger.warning(msg)
else:
logger.exception(msg)
msg = f"Error in {func.__name__}: {e.__str__()}"
logger.exception(msg)
raise
register_pydantic_serializers(func)
@@ -80,7 +76,7 @@ def register_pydantic_serializers(func: Callable):
try:
pydantic_types = _pydantic_models_from_type_annotation(annotation)
except Exception as e:
raise TypeError(f"Error while exposing {func.__name__}: {e}")
raise TypeError(f"Error while exposing {func.__name__}: {e.__str__()}")
for model in pydantic_types:
logger.debug(
@@ -117,9 +113,6 @@ class AppService(AppProcess, ABC):
shared_event_loop: asyncio.AbstractEventLoop
use_db: bool = False
use_redis: bool = False
use_async: bool = False
use_rabbitmq: Optional[rabbitmq.RabbitMQConfig] = None
rabbitmq_service: Optional[rabbitmq.SyncRabbitMQ | rabbitmq.AsyncRabbitMQ] = None
use_supabase: bool = False
def __init__(self):
@@ -134,20 +127,6 @@ class AppService(AppProcess, ABC):
def get_host(cls) -> str:
return os.environ.get(f"{cls.service_name.upper()}_HOST", config.pyro_host)
@property
def rabbit(self) -> rabbitmq.SyncRabbitMQ | rabbitmq.AsyncRabbitMQ:
"""Access the RabbitMQ service. Will raise if not configured."""
if not self.rabbitmq_service:
raise RuntimeError("RabbitMQ not configured for this service")
return self.rabbitmq_service
@property
def rabbit_config(self) -> rabbitmq.RabbitMQConfig:
"""Access the RabbitMQ config. Will raise if not configured."""
if not self.use_rabbitmq:
raise RuntimeError("RabbitMQ not configured for this service")
return self.use_rabbitmq
def run_service(self) -> None:
while True:
time.sleep(10)
@@ -165,16 +144,6 @@ class AppService(AppProcess, ABC):
self.shared_event_loop.run_until_complete(db.connect())
if self.use_redis:
redis.connect()
if self.use_rabbitmq:
logger.info(f"[{self.__class__.__name__}] ⏳ Configuring RabbitMQ...")
if self.use_async:
self.rabbitmq_service = rabbitmq.AsyncRabbitMQ(self.use_rabbitmq)
self.shared_event_loop.run_until_complete(
self.rabbitmq_service.connect()
)
else:
self.rabbitmq_service = rabbitmq.SyncRabbitMQ(self.use_rabbitmq)
self.rabbitmq_service.connect()
if self.use_supabase:
from supabase import create_client
@@ -203,8 +172,6 @@ class AppService(AppProcess, ABC):
if self.use_redis:
logger.info(f"[{self.__class__.__name__}] ⏳ Disconnecting Redis...")
redis.disconnect()
if self.use_rabbitmq:
logger.info(f"[{self.__class__.__name__}] ⏳ Disconnecting RabbitMQ...")
@conn_retry("Pyro", "Starting Pyro Service")
def __start_pyro(self):

View File

@@ -81,14 +81,10 @@ class Config(UpdateTrackingModel["Config"], BaseSettings):
default=True,
description="If authentication is enabled or not",
)
enable_credit: bool = Field(
default=False,
enable_credit: str = Field(
default="false",
description="If user credit system is enabled or not",
)
enable_beta_monthly_credit: bool = Field(
default=True,
description="If beta monthly credits accounting is enabled or not",
)
num_user_credits_refill: int = Field(
default=1500,
description="Number of credits to refill for each user",
@@ -157,30 +153,11 @@ class Config(UpdateTrackingModel["Config"], BaseSettings):
description="The name of the Google Cloud Storage bucket for media files",
)
reddit_user_agent: str = Field(
default="AutoGPT:1.0 (by /u/autogpt)",
description="The user agent for the Reddit API",
)
scheduler_db_pool_size: int = Field(
default=3,
description="The pool size for the scheduler database connection pool",
)
rabbitmq_host: str = Field(
default="localhost",
description="The host for the RabbitMQ server",
)
rabbitmq_port: int = Field(
default=5672,
description="The port for the RabbitMQ server",
)
rabbitmq_vhost: str = Field(
default="/",
description="The vhost for the RabbitMQ server",
)
@field_validator("platform_base_url", "frontend_base_url")
@classmethod
def validate_platform_base_url(cls, v: str, info: ValidationInfo) -> str:
@@ -272,11 +249,6 @@ class Secrets(UpdateTrackingModel["Secrets"], BaseSettings):
encryption_key: str = Field(default="", description="Encryption key")
rabbitmq_default_user: str = Field(default="", description="RabbitMQ default user")
rabbitmq_default_pass: str = Field(
default="", description="RabbitMQ default password"
)
# OAuth server credentials for integrations
# --8<-- [start:OAuthServerCredentialsExample]
github_client_id: str = Field(default="", description="GitHub OAuth client ID")
@@ -304,6 +276,8 @@ class Secrets(UpdateTrackingModel["Secrets"], BaseSettings):
reddit_client_id: str = Field(default="", description="Reddit client ID")
reddit_client_secret: str = Field(default="", description="Reddit client secret")
reddit_username: str = Field(default="", description="Reddit username")
reddit_password: str = Field(default="", description="Reddit password")
openweathermap_api_key: str = Field(
default="", description="OpenWeatherMap API key"
@@ -334,18 +308,6 @@ class Secrets(UpdateTrackingModel["Secrets"], BaseSettings):
exa_api_key: str = Field(default="", description="Exa API key")
e2b_api_key: str = Field(default="", description="E2B API key")
nvidia_api_key: str = Field(default="", description="Nvidia API key")
mem0_api_key: str = Field(default="", description="Mem0 API key")
linear_client_id: str = Field(default="", description="Linear client ID")
linear_client_secret: str = Field(default="", description="Linear client secret")
todoist_client_id: str = Field(default="", description="Todoist client ID")
todoist_client_secret: str = Field(default="", description="Todoist client secret")
stripe_api_key: str = Field(default="", description="Stripe API Key")
stripe_webhook_secret: str = Field(default="", description="Stripe Webhook Secret")
screenshotone_api_key: str = Field(default="", description="ScreenshotOne API Key")
# Add more secret fields as needed

View File

@@ -1,6 +1,5 @@
import logging
import time
import uuid
from typing import Sequence, cast
from backend.data import db
@@ -58,7 +57,7 @@ async def wait_execution(
user_id: str,
graph_id: str,
graph_exec_id: str,
timeout: int = 30,
timeout: int = 20,
) -> Sequence[ExecutionResult]:
async def is_execution_completed():
status = await AgentServer().test_get_graph_run_status(graph_exec_id, user_id)
@@ -105,13 +104,7 @@ def execute_block_test(block: Block):
log.info(f"{prefix} mock {mock_name} not found in block")
# Populate credentials argument(s)
extra_exec_kwargs: dict = {
"graph_id": str(uuid.uuid4()),
"node_id": str(uuid.uuid4()),
"graph_exec_id": str(uuid.uuid4()),
"node_exec_id": str(uuid.uuid4()),
"user_id": str(uuid.uuid4()),
}
extra_exec_kwargs = {}
input_model = cast(type[BlockSchema], block.input_schema)
credentials_input_fields = input_model.get_credentials_fields()
if len(credentials_input_fields) == 1 and isinstance(
@@ -132,9 +125,7 @@ def execute_block_test(block: Block):
for output_name, output_data in block.execute(input_data, **extra_exec_kwargs):
if output_index >= len(block.test_output):
raise ValueError(
f"{prefix} produced output more than expected {output_index} >= {len(block.test_output)}:\nOutput Expected:\t\t{block.test_output}\nFailed Output Produced:\t('{output_name}', {output_data})\nNote that this may not be the one that was unexpected, but it is the first that triggered the extra output warning"
)
raise ValueError(f"{prefix} produced output more than expected")
ex_output_name, ex_output_data = block.test_output[output_index]
def compare(data, expected_data):
@@ -151,9 +142,7 @@ def execute_block_test(block: Block):
log.info(f"{prefix} {mark} comparing `{data}` vs `{expected_data}`")
if not is_matching:
raise ValueError(
f"{prefix}: wrong output {data} vs {expected_data}\n"
f"Output Expected:\t\t{block.test_output}\n"
f"Failed Output Produced:\t('{output_name}', {output_data})"
f"{prefix}: wrong output {data} vs {expected_data}"
)
compare(output_data, ex_output_data)

View File

@@ -1,8 +1,6 @@
import json
from typing import Any, Type, TypeVar, cast, get_args, get_origin
from prisma import Json as PrismaJson
class ConversionError(ValueError):
pass
@@ -190,8 +188,6 @@ def type_match(value: Any, target_type: Type[T]) -> T:
def convert(value: Any, target_type: Type[T]) -> T:
try:
if isinstance(value, PrismaJson):
value = value.data
return cast(T, _try_convert(value, target_type, raise_on_mismatch=False))
except Exception as e:
raise ConversionError(f"Failed to convert {value} to {target_type}") from e

View File

@@ -1,2 +0,0 @@
-- AlterTable
ALTER TABLE "AgentPreset" ADD COLUMN "isDeleted" BOOLEAN NOT NULL DEFAULT false;

Some files were not shown because too many files have changed in this diff Show More