Files
AutoGPT/autogpt_platform/backend/pyproject.toml
Zamil Majdy c4056cbae9 feat(block): Introduce context-window aware prompt compaction for LLM & SmartDecision blocks (#10252)
Calling LLM using the current block sometimes can break due to the high
context window.
A prompt compaction algorithm is applied (enabled by default) to make
sure the sent prompt is within a context window limit.


### Changes 🏗️

````
Heuristics
--------
* Prefer shrinking the content rather than truncating the conversation.
* If the conversation content is compacted and it's still not enough, then reduce the conversation list.
* The rest of the implementation is adjusted to minimize the LLM call breaking.

Strategy
--------
1. **Token-aware truncation** – progressively halve a per-message cap
   (`start_cap`, `start_cap/2`, … `floor_cap`) and apply it to the
   *content* of every message except the first and last.  Tool shells
   are included: we keep the envelope but shorten huge payloads.
2. **Middle-out deletion** – if still over the limit, delete the whole
   messages working outward from the centre, **skipping** any message
   that contains ``tool_calls`` or has ``role == "tool"``.
3. **Last-chance trim** – if still too big, truncate the *first* and
   *last* message bodies down to `floor_cap` tokens.
4. If the prompt is *still* too large:
     • raise ``ValueError``      when ``lossy_ok == False`` (default)
     • return the partially-trimmed prompt when ``lossy_ok == True``
````

### Checklist 📋

#### For code changes:
- [x] I have clearly listed my changes in the PR description
- [x] I have made a test plan
- [x] I have tested my changes according to the test plan:
  <!-- Put your test plan here: -->
- [x] Run an SDM block in a loop until it hits 200000 tokens using the
open-ai O3 model.
2025-06-27 15:07:50 +00:00

125 lines
3.1 KiB
TOML

[tool.poetry]
name = "autogpt-platform-backend"
version = "0.4.9"
description = "A platform for building AI-powered agentic workflows"
authors = ["AutoGPT <info@agpt.co>"]
readme = "README.md"
packages = [{ include = "backend", format = "sdist" }]
[tool.poetry.dependencies]
python = ">=3.10,<3.13"
aio-pika = "^9.5.5"
aiodns = "^3.1.1"
anthropic = "^0.51.0"
apscheduler = "^3.11.0"
autogpt-libs = { path = "../autogpt_libs", develop = true }
bleach = { extras = ["css"], version = "^6.2.0" }
click = "^8.2.0"
cryptography = "^43.0"
discord-py = "^2.5.2"
e2b-code-interpreter = "^1.5.0"
fastapi = "^0.115.12"
feedparser = "^6.0.11"
flake8 = "^7.2.0"
google-api-python-client = "^2.169.0"
google-auth-oauthlib = "^1.2.2"
google-cloud-storage = "^3.1.0"
googlemaps = "^4.10.0"
gravitasml = "^0.1.3"
groq = "^0.24.0"
jinja2 = "^3.1.6"
jsonref = "^1.1.0"
jsonschema = "^4.22.0"
launchdarkly-server-sdk = "^9.11.0"
mem0ai = "^0.1.98"
moviepy = "^2.1.2"
ollama = "^0.4.8"
openai = "^1.78.1"
pika = "^1.3.2"
pinecone = "^5.3.1"
poetry = "2.1.1" # CHECK DEPENDABOT SUPPORT BEFORE UPGRADING
postmarker = "^1.0"
praw = "~7.8.1"
prisma = "^0.15.0"
prometheus-client = "^0.21.1"
psutil = "^7.0.0"
psycopg2-binary = "^2.9.10"
pydantic = { extras = ["email"], version = "^2.11.4" }
pydantic-settings = "^2.9.1"
pytest = "^8.3.5"
pytest-asyncio = "^0.26.0"
python-dotenv = "^1.1.0"
python-multipart = "^0.0.20"
redis = "^5.2.0"
replicate = "^1.0.6"
sentry-sdk = {extras = ["anthropic", "fastapi", "launchdarkly", "openai", "sqlalchemy"], version = "^2.28.0"}
sqlalchemy = "^2.0.40"
strenum = "^0.4.9"
stripe = "^11.5.0"
supabase = "2.15.1"
tenacity = "^9.1.2"
todoist-api-python = "^2.1.7"
tweepy = "^4.14.0"
uvicorn = { extras = ["standard"], version = "^0.34.2" }
websockets = "^14.2"
youtube-transcript-api = "^0.6.2"
zerobouncesdk = "^1.1.1"
# NOTE: please insert new dependencies in their alphabetical location
pytest-snapshot = "^0.9.0"
aiofiles = "^24.1.0"
pyclamd = "^0.4.0"
tiktoken = "^0.9.0"
[tool.poetry.group.dev.dependencies]
aiohappyeyeballs = "^2.6.1"
black = "^24.10.0"
faker = "^33.3.1"
httpx = "^0.28.1"
isort = "^5.13.2"
poethepoet = "^0.34.0"
pyright = "^1.1.400"
pytest-mock = "^3.14.0"
pytest-watcher = "^0.4.2"
requests = "^2.32.3"
ruff = "^0.11.10"
# NOTE: please insert new dependencies in their alphabetical location
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
[tool.poetry.scripts]
app = "backend.app:main"
rest = "backend.rest:main"
ws = "backend.ws:main"
scheduler = "backend.scheduler:main"
executor = "backend.exec:main"
cli = "backend.cli:main"
format = "linter:format"
lint = "linter:lint"
test = "run_tests:test"
[tool.isort]
profile = "black"
[tool.pytest-watcher]
now = false
clear = true
delay = 0.2
runner = "pytest"
runner_args = []
patterns = ["*.py"]
ignore_patterns = []
[tool.pytest.ini_options]
asyncio_mode = "auto"
asyncio_default_fixture_loop_scope = "session"
filterwarnings = [
"ignore:'audioop' is deprecated:DeprecationWarning:discord.player",
"ignore:invalid escape sequence:DeprecationWarning:tweepy.api",
]
[tool.ruff]
target-version = "py310"