Compare commits

...

2 Commits

Author SHA1 Message Date
Bentlybro
6b1f0df58c fix(backend): Clean up orphaned schedules without schedule_id
Old scheduled jobs created before schedule_id was added to
GraphExecutionJobArgs have schedule_id=None. When these fail
validation, _handle_graph_validation_error could not unschedule
them, causing them to fire repeatedly and generate ~60K+ Sentry
errors (AUTOGPT-SERVER-6W2 and AUTOGPT-SERVER-6W3).

Fix: Add _cleanup_old_schedules_without_id() which finds schedules
for the graph but only removes those with schedule_id=None (legacy
jobs). This preserves any valid newer schedules the user may have
created, unlike the broader _cleanup_orphaned_schedules_for_graph()
which removes all schedules for a graph.
2026-02-02 14:15:25 +00:00
Guofang.Tang
1081590384 feat(backend): cover webhook ingress URL route (#11747)
### Changes 🏗️

- Add a unit test to verify webhook ingress URL generation matches the
FastAPI route.

  ### Checklist 📋

  #### For code changes:

  - [x] I have clearly listed my changes in the PR description
  - [x] I have made a test plan
  - [x] I have tested my changes according to the test plan:
- [x] poetry run pytest backend/integrations/webhooks/utils_test.py
--confcutdir=backend/integrations/webhooks

  #### For configuration changes:

  - [x] .env.default is updated or already compatible with my changes
- [x] docker-compose.yml is updated or already compatible with my
changes
- [x] I have included a list of my configuration changes in the PR
description (under Changes)



<!-- This is an auto-generated comment: release notes by coderabbit.ai
-->
## Summary by CodeRabbit

* **Tests**
* Added a unit test that validates webhook ingress URL generation
matches the application's resolved route (scheme, host, and path) for
provider-specific webhook endpoints, improving confidence in routing
behavior and helping prevent regressions.

<sub>✏️ Tip: You can customize this high-level summary in your review
settings.</sub>
<!-- end of auto-generated comment: release notes by coderabbit.ai -->

---------

Co-authored-by: Reinier van der Leer <pwuts@agpt.co>
2026-02-01 20:29:15 +00:00
2 changed files with 72 additions and 2 deletions

View File

@@ -193,9 +193,11 @@ async def _handle_graph_validation_error(args: "GraphExecutionJobArgs") -> None:
user_id=args.user_id,
)
else:
logger.error(
f"Unable to unschedule graph: {args.graph_id} as this is an old job with no associated schedule_id please remove manually"
logger.warning(
f"Old scheduled job for graph {args.graph_id} (user {args.user_id}) "
f"has no schedule_id, attempting targeted cleanup"
)
await _cleanup_old_schedules_without_id(args.graph_id, args.user_id)
async def _handle_graph_not_available(
@@ -238,6 +240,35 @@ async def _cleanup_orphaned_schedules_for_graph(graph_id: str, user_id: str) ->
)
async def _cleanup_old_schedules_without_id(graph_id: str, user_id: str) -> None:
"""Remove only schedules that have no schedule_id in their job args.
Unlike _cleanup_orphaned_schedules_for_graph (which removes ALL schedules
for a graph), this only targets legacy jobs created before schedule_id was
added to GraphExecutionJobArgs, preserving any valid newer schedules.
"""
scheduler_client = get_scheduler_client()
schedules = await scheduler_client.get_execution_schedules(
graph_id=graph_id, user_id=user_id
)
for schedule in schedules:
if schedule.schedule_id is not None:
continue
try:
await scheduler_client.delete_schedule(
schedule_id=schedule.id, user_id=user_id
)
logger.info(
f"Cleaned up old schedule {schedule.id} (no schedule_id) "
f"for graph {graph_id}"
)
except Exception:
logger.exception(
f"Failed to delete old schedule {schedule.id} for graph {graph_id}"
)
def cleanup_expired_files():
"""Clean up expired files from cloud storage."""
# Wait for completion

View File

@@ -0,0 +1,39 @@
from urllib.parse import urlparse
import fastapi
from fastapi.routing import APIRoute
from backend.api.features.integrations.router import router as integrations_router
from backend.integrations.providers import ProviderName
from backend.integrations.webhooks import utils as webhooks_utils
def test_webhook_ingress_url_matches_route(monkeypatch) -> None:
app = fastapi.FastAPI()
app.include_router(integrations_router, prefix="/api/integrations")
provider = ProviderName.GITHUB
webhook_id = "webhook_123"
base_url = "https://example.com"
monkeypatch.setattr(webhooks_utils.app_config, "platform_base_url", base_url)
route = next(
route
for route in integrations_router.routes
if isinstance(route, APIRoute)
and route.path == "/{provider}/webhooks/{webhook_id}/ingress"
and "POST" in route.methods
)
expected_path = f"/api/integrations{route.path}".format(
provider=provider.value,
webhook_id=webhook_id,
)
actual_url = urlparse(webhooks_utils.webhook_ingress_url(provider, webhook_id))
expected_base = urlparse(base_url)
assert (actual_url.scheme, actual_url.netloc) == (
expected_base.scheme,
expected_base.netloc,
)
assert actual_url.path == expected_path