Compare commits

...

37 Commits

Author SHA1 Message Date
Nicholas Tindle
021ddaad59 Merge branch 'dev' into feat/org-workspace-pr1 2026-04-07 13:39:26 -05:00
Nicholas Tindle
a4d84add67 test(backend): strengthen weak assertions in credit tests + real review-finding tests
Credit test improvements (stub auditor findings):
- spend_credits: verify transaction data (orgId, userId, amount, runningBalance)
- top_up_credits: verify transaction data (orgId, amount, initiatedByUserId)
- get_seat_info: verify where clause includes organizationId
- unassign_seat: verify where clause targets correct user+org

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-07 13:37:17 -05:00
Nicholas Tindle
a7fa45d1be fix(backend): address critical review findings from 7-agent audit
Security fixes:
- decline_invitation: added state checks (already accepted/revoked/expired)
- join_team/leave_team: added ctx.org_id == org_id path verification
- reject_transfer: added org membership check (source or target only)

Logic fixes:
- approve_transfer: no longer sets COMPLETED — only execute_transfer does
- update_org: syncs OrganizationProfile displayName/username on rename

Schema:
- Renamed WORKSPACE → TEAM in ResourceVisibility, CredentialScope,
  CredentialOwnerType enums (consistency with Team model naming)
- Migration regenerated

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-07 13:33:17 -05:00
Nicholas Tindle
863ddd4364 test(backend): replace all 48 xfail stubs with real test implementations
Zero 'assert False' stubs remaining. Every test now calls actual
functions with proper mocks and real assertions.

Scoreboard: 66 passed, 30 xfailed (was 47/49)

Tests that flipped from xfail → passing (19):
- PR10 (3): copilot agent run, preset execution, pending human review
- PR11 (1): notification batch schema columns verified
- PR14 (6): create_api_key org context, validate_api_key org fields,
  external API org resolution, team restriction, defaults, none-org
- PR15 (2): create_submission sets owningOrgId, fork_graph to diff org
- PR16 (8): ALL transfer tests — create, approve (source+target),
  execute, reject, requires both approvals, credential handling

Remaining 30 xfails (real test bodies, will pass when implemented):
- PR11 (1): subscribe_to_session org validation
- PR14 (2): list_api_keys org-scoped, route RequestContext
- PR15 (8): view tests, copy_graph, slug per-org, membership, edit
- PR18 (20): data layer cutover (read-path org scoping, NOT NULL)

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-07 13:18:21 -05:00
Nicholas Tindle
a6ee510cdd feat(backend): PR18 — add RequestContext to 26 v1.py routes for org/team scoping
Switch 26 routes from user-only to user+org context by adding
ctx: Annotated[RequestContext, Security(get_request_context)]
alongside the existing user_id parameter.

Graph CRUD (6): list, get, versions, delete, set_active, settings
Executions (6): list_all, list_graph, get, delete, share, unshare
Scheduling (3): list_graph, list_all, delete
Builder (1): execute_block
Files (1): upload
Credits (4): get, top_up, history, refunds
API Keys (5): list, get, delete, suspend, update_permissions

Routes left user-only (no ctx needed):
- Auth: email, timezone
- Preferences: get/update
- Onboarding: all 7 routes
- Billing: checkout, auto_top_up, payment_method, refund

5 routes already had ctx from earlier PRs.
Scoreboard: 47 passed, 49 xfailed.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-07 08:03:29 -05:00
Nicholas Tindle
84b96fc360 feat(backend): PR14+PR15+PR16 — API key tenancy, marketplace org ownership, transfer workflows
PR14 — API Key/OAuth Tenancy:
- create_api_key() accepts organization_id, owner_type, team_id_restriction
- APIKeyInfo model extended with org context fields
- External API middleware reads org context from validated key
- v1.py API key create route passes org context from RequestContext

PR15 — Marketplace Org Ownership:
- create_store_submission() accepts organization_id, sets owningOrgId on StoreListing
- Store submission route passes ctx.org_id to submission creation
- Schema already has owningOrgId from PR1 migration

PR16 — Transfer Workflows (new package):
- backend/api/features/transfers/ with model.py, db.py, routes.py
- POST /api/transfers — initiate transfer (requires TRANSFER_RESOURCES)
- GET /api/transfers — list transfers for active org
- POST /api/transfers/{id}/approve — approve (source or target admin)
- POST /api/transfers/{id}/reject — reject
- POST /api/transfers/{id}/execute — execute (moves organizationId)
- AuditLog entries on transfer completion
- Router registered in rest_api.py

Scoreboard: 47 passed, 49 xfailed (regression tests green).

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-07 04:08:49 -05:00
Nicholas Tindle
9732cef3b5 feat(backend): PR10+PR11 — thread org/team through all execution paths + notification schema
PR10 — All add_graph_execution call sites now pass org/team context:
- blocks/agent.py: nested agent execution gets org/team from ExecutionContext
- copilot/tools/run_agent.py: CoPilot agent run resolves user's default team
- library/routes/presets.py: preset execution uses RequestContext
- external/v1/routes.py: external API resolves user's default team
- data/human_review.py: get_or_create_human_review accepts + stores org/team

PR11 — Schema:
- UserNotificationBatch: added organizationId + teamId columns
- Migration regenerated to include new columns

Re-keying NOT needed: ChatSession IDs are globally unique UUIDs.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-05 16:22:50 +01:00
Nicholas Tindle
aaa16a2dd2 test(backend): add 96 regression + xfail scoreboard tests for org/team migration
Phase 0 of the org/team tenancy completion. All tests in one file as a
scoreboard — regression tests PASS now, xfail tests flip to PASS as
each PR is implemented.

Regression tests (47 passing):
- Graph CRUD userId isolation (5)
- Execution userId isolation (7)
- Library agent userId isolation (6)
- Store/marketplace userId isolation (5)
- Chat session userId isolation (6)
- API key userId isolation (3)
- Credits userId isolation (3)
- Schedules userId isolation (4)
- Shared executions public access (1)
- User settings isolation (2)
- Additional execution + chat edge cases (5)

xfail tests (49 expected failures):
- PR10: Webhook/background tenancy (3)
- PR11: Realtime tenancy (2 + 1 passing doc test)
- PR14: API key/OAuth tenancy (6 + 2 passing)
- PR15: Marketplace org ownership (10 + 2 passing)
- PR16: Transfer workflows (8)
- PR18: Final cutover (20)

As each PR is implemented, its xfail markers are removed and tests
must pass. Target: 96/96 passing when all PRs are complete.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-05 07:36:56 +02:00
Nicholas Tindle
fbb6b5cfac refactor(platform): rename Workspace → Team across entire org feature
Rename the 'workspace' concept to 'team' per architecture + product
alignment. Teams are groups of people sharing resources (like GitHub
Teams within Organizations). This resolves the naming collision with
the existing UserWorkspace file-storage model.

Schema:
- OrgWorkspace → Team, OrgWorkspaceMember → TeamMember,
  WorkspaceInvite → TeamInvite, WorkspaceJoinPolicy → TeamJoinPolicy
- orgWorkspaceId → teamId on all tenant-bound models
- Single clean migration (old workspace migrations deleted)

Auth:
- WorkspaceAction → TeamAction
- requires_workspace_permission → requires_team_permission
- X-Workspace-Id → X-Team-Id header
- RequestContext: workspace_id → team_id, is_workspace_* → is_team_*

Backend:
- workspace_routes.py → team_routes.py (+ db, model files)
- All function/class/variable renames throughout
- API routes: /api/orgs/{id}/workspaces → /api/orgs/{id}/teams

Frontend:
- OrgWorkspaceSwitcher → OrgTeamSwitcher
- OrgWorkspaceProvider → OrgTeamProvider
- useOrgWorkspaceStore → useOrgTeamStore
- ACTIVE_WORKSPACE → ACTIVE_TEAM localStorage key
- active-workspace-id → active-team-id

NOT renamed (file-storage workspace — different concept):
- UserWorkspace, UserWorkspaceFile, workspace:// URIs
- ExecutionContext.workspace_id (file workspace)
- backend/data/workspace.py, backend/util/workspace*.py

40 files changed, 199 tests passing.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-04 17:58:53 +02:00
Nicholas Tindle
87c3ab6464 fix(backend): address PR review round 2 — TDD for 3 bugs + docstring fix
Bugs found by sentry, coderabbit, and Pwuts. Each had a failing xfail
test written first, then fixed, then xfail removed.

Fixes:
1. update_org_member: bare next() replaced with next(..., None) + NotFoundError
   — prevents StopIteration crash if member disappears concurrently (sentry)
2. add_workspace_member: now verifies workspace belongs to the claimed org
   — prevents adding members to workspaces in other orgs (coderabbit)
3. test_assign_seat: fixed to assert requested seat type not mock default (coderabbit)
4. Docstring format: RST 'Usage::' blocks replaced with plain 'Example::'
   to match codebase style (Pwuts)

3 new tests + 3 existing tests updated.
Total: 127 route tests + 15 credit tests passing.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-04 08:54:25 +02:00
Nicholas Tindle
fb1f68f67e fix(backend): address 7 PR review bugs with TDD approach
Bugs found by coderabbit, cursor, and sentry bots. Each bug had a
failing xfail test written first, then fixed, then xfail removed.

Fixes:
1. Invitation routes (create/list/revoke) now verify ctx.org_id == path
   org_id — prevents cross-org authorization bypass
2. decline_invitation now verifies email matches — prevents any user from
   declining another user's invitation
3. create_org now creates OrgBalance(balance=0) — prevents credit
   operations failing on new team orgs
4. transfer_ownership rejects self-transfer — prevents clearing the only
   owner when new_owner_id == current_owner_id
5. join_workspace verifies org membership — prevents non-org-members from
   joining org workspaces
6. Workspace create route verifies ctx.org_id == path org_id

7 new tests + 4 existing tests updated for new behavior.
Total: 124 route tests passing.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-03 20:52:04 +02:00
Nicholas Tindle
15ef6206f6 test(backend): add 28 gap-fix tests + fix test infrastructure for org tests
Add comprehensive tests for all design gap fixes:

Conversion (6 tests):
- test_convert_creates_new_personal_org
- test_convert_new_org_gets_all_records (7 records verified)
- test_convert_new_org_gets_zero_balance
- test_convert_rolls_back_on_failure
- test_convert_already_team_org_fails
- test_convert_user_without_name_uses_org_name

Soft-delete (4 tests):
- test_delete_org_sets_deleted_at (not hard-delete)
- test_delete_personal_org_blocked
- test_delete_already_deleted_org_blocked
- test_get_org_filters_deleted / test_list_orgs_filters_deleted

Self-removal prevention (5 tests):
- test_remove_self_blocked
- test_remove_member_who_would_become_orgless_blocked
- test_remove_member_with_other_orgs_allowed
- test_remove_owner_blocked
- test_remove_nonexistent_member_raises

Default workspace protection (3 tests):
- test_update_default_workspace_join_policy_blocked
- test_update_default_workspace_name_allowed
- test_update_non_default_workspace_join_policy_allowed

Last-admin guard (3 tests):
- test_remove_last_workspace_admin_blocked
- test_remove_workspace_admin_when_others_exist_allowed
- test_remove_non_admin_always_allowed

Invitation idempotency (3 tests):
- test_accept_already_accepted_raises_400
- test_accept_expired_raises_400
- test_accept_wrong_email_raises_403

UpdateOrgData typed model (2 tests):
- test_update_org_with_typed_model
- test_update_org_empty_data_is_noop

Auth error message (1 test):
- test_auth_fallback_no_org_returns_contact_support

Also:
- Added orgs/conftest.py to override server and graph_cleanup fixtures
  (org tests mock at Prisma boundary, don't need full backend server)
- Fixed _make_org helper to include deletedAt=None
- Fixed existing tests for new function signatures

Total: 117 route tests + 29 migration tests + 15 credit tests + 72
permission tests = 233 org/workspace tests.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-03 19:22:16 +02:00
Nicholas Tindle
0161e262a2 fix(backend): fix 10 design gaps in org/workspace — conversion, deletion, auth safety
CRITICAL fixes:
- Conversion spawns new personal org (slug: {old}-personal-{n}) with
  rollback on failure — user always has a personal org
- Org deletion is now soft-delete (sets deletedAt) — preserves financial
  records, prevents FK constraint crashes
- Invitation accept catches UniqueViolationError — idempotent on race
- Self-removal blocked — prevents admin from removing themselves and
  becoming org-less
- Member removal checks other org memberships — blocks if user would be
  locked out (no other orgs)

HIGH fixes:
- Default workspace joinPolicy change blocked — preserves auto-join invariant
- Last workspace admin removal blocked — workspace stays manageable
- Auth fallback error now says 'contact support' instead of generic 400

MEDIUM fixes:
- update_org takes typed UpdateOrgData model instead of raw dict — prevents
  accidental isPersonal mutation
- Soft-deleted orgs filtered from list_user_orgs, get_org, auth fallback

Tests updated for new function signatures (convert takes user_id,
remove_org_member takes requesting_user_id, update_org takes UpdateOrgData).

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-03 18:44:25 +02:00
Nicholas Tindle
67004b5113 Merge branch 'feat/org-workspace-pr1' of https://github.com/Significant-Gravitas/AutoGPT into feat/org-workspace-pr1 2026-04-03 17:01:41 +02:00
Nicholas Tindle
166d1d2a75 refactor(backend): align org/ folder with codebase Pydantic patterns
Convert all org/workspace models and DB functions to match the
established backend patterns:

Models (model.py, workspace_model.py):
- All fields converted to snake_case (isPersonal → is_personal, etc.)
- Added from_db() staticmethods to convert Prisma camelCase → snake_case
- Moved InvitationResponse/CreateInvitationRequest from inline to model.py

DB functions (db.py, workspace_db.py):
- Return typed Pydantic models instead of raw dicts
- Use ModelName.from_db(prisma_obj) for all conversions
- Explicit typed return annotations on all functions

Routes (routes.py, workspace_routes.py, invitation_routes.py):
- Return Pydantic models directly from DB (no dict unpacking)
- Request field references use snake_case (request.user_id not userId)
- Import models from model.py (not inline definitions)

Tests (routes_test.py):
- Updated assertions from dict bracket access to Pydantic attribute access
- Mock objects use Prisma camelCase (from_db maps to snake_case)

All 205 tests pass. Zero type errors.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-03 16:48:29 +02:00
Nicholas Tindle
2c573f1add feat(backend): thread tenancy through webhooks, scheduler, SSE, and seeder
Fills the three must-do-before-merge gaps:

PR10 — Webhook/background execution tenancy:
- Add organization_id + org_workspace_id to ExecutionContext
- Add org/workspace fields to GraphExecutionJobArgs (scheduler)
- Pass tenant context through _execute_graph scheduled job
- Pass tenant context through add_graph_execution_schedule
- Update v1.py schedule creation route with RequestContext
- Webhook node/preset triggers resolve user's org/workspace via
  get_user_default_org_workspace() before calling add_graph_execution

PR11 — SSE/realtime tenancy:
- Add organization_id + org_workspace_id to CoPilotExecutionEntry
- Update enqueue_copilot_turn() to accept and pass org/workspace
- Chat stream_chat_post() resolves RequestContext and passes to enqueue
- Tenant context flows through RabbitMQ to executor service

Seeder:
- Add _get_user_org_ws() cache helper to TestDataCreator
- create_test_graphs() now passes org/workspace to create_graph()
- New get_user_default_org_workspace() helper in orgs/db.py

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-03 16:32:00 +02:00
Nicholas Tindle
93e3521fde Merge branch 'dev' into feat/org-workspace-pr1 2026-04-03 15:58:10 +02:00
Nicholas Tindle
019ad3cade fix(backend): address audit findings and add 205 comprehensive tests
Fixes all critical and high-priority issues from self-audit:

CRITICAL fixes:
- C1: spend_org_credits now uses atomic UPDATE WHERE balance >= amount
  (prevents race condition that could make balance negative)
- C2: All org write routes verify ctx.org_id == path org_id
  (prevents authorization bypass via mismatched X-Org-Id header)
- C3: WorkspaceInvite now has FK to OrgWorkspace with cascade delete
- C4: IntegrationCredential uses dedicated workspaceId column instead
  of polymorphic ownerId for the OrgWorkspace FK

HIGH fixes:
- H1: Workspace get/update/delete verify orgId matches path parameter
- H2: list_workspace_members checks org membership
- H3: create_org validates slug uniqueness against orgs + aliases
- H4: update_org validates new slug uniqueness and creates RENAME alias
- H5: Removed broken migration alias that wrote wrong slug
- H7: accept_invitation verifies accepting user's email matches invite
- H8: transfer_ownership uses single atomic SQL UPDATE for both members

Tests (205 total):
- 72 permission matrix tests (every action x role combination)
- 29 migration tests (slug resolution, idempotency, edge cases, field mapping)
- 15 credit tests (atomic spend, insufficient balance, top-up, seats)
- 89 route tests (org CRUD, workspace CRUD, invitations, auth bypass attempts)

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-03 15:18:31 +02:00
Nicholas Tindle
0a086970f8 fix(backend): fix Prisma create calls in org migration
- Use relation connects instead of raw FK fields for OrgMember,
  OrgWorkspace, OrgWorkspaceMember, OrganizationProfile
- Only include optional JSON fields (topUpConfig, socialLinks) when
  non-None to avoid Prisma MissingRequiredValueError

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-03 14:34:41 +02:00
Nicholas Tindle
736bde56c4 chore(platform): placeholder for tenancy cutover and legacy cleanup (PR18)
TODO (final cutover PR):
1. Make organizationId NOT NULL on all tenant-bound models
2. Switch all read paths to tenancy-first logic
3. Stop reading from User.integrations blob
4. Remove dual-write to blob
5. Remove dead compatibility code
6. Rebuild analytics/search/materialized views on org/workspace ownership
7. Add @@unique([owningOrgId, slug]) on StoreListing
8. Remove ENABLE_LEGACY_TENANCY_FALLBACK flag
9. Full regression suite must pass under tenancy-first mode

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-02 13:38:24 +02:00
Nicholas Tindle
fdc710b21a feat(frontend): add org/workspace state management, switcher, and provider (PR17)
Frontend foundation for org/workspace support:

State management (Zustand store):
- activeOrgID / activeWorkspaceID persisted to localStorage
- orgs / workspaces lists fetched from API
- switchOrg / switchWorkspace actions that clear React Query cache

OrgWorkspaceProvider:
- Fetches org list on login, sets personal org as default
- Clears context on logout
- Triggers cache invalidation on org switch

OrgWorkspaceSwitcher (Navbar component):
- Compact pill showing active org name + avatar
- Popover with org list (checkmark on active, Personal badge)
- Workspace list within active org (OPEN/PRIVATE indicators)
- Links to create org and manage workspaces

Integration:
- Added to providers.tsx (between CredentialsProvider and LaunchDarkly)
- Added to Navbar right section (before FeedbackButton)
- Header injection from PR1 ensures all API calls carry org/workspace context

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-02 13:38:10 +02:00
Nicholas Tindle
0870465bec chore(backend): placeholder for transfer workflows and admin alias mgmt (PR16)
TODO: Implement TransferRequest CRUD endpoints (initiate, list, approve,
reject, execute). Add admin alias inspection and removal.
Direct transfer for dual-admins, requested transfers require approval.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-01 17:53:23 +02:00
Nicholas Tindle
1cca5b0095 chore(backend): placeholder for marketplace org ownership (PR15)
TODO: Switch StoreListing ownership from owningUserId to owningOrgId.
Update create_store_submission to use org identity from RequestContext.
Add promotion-before-publish requirement for workspace agents.
Install permanent aliases for old user slugs.
Add copy/fork/transfer endpoints for agent portability.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-01 17:53:23 +02:00
Nicholas Tindle
b23618905f chore(backend): placeholder for API key and OAuth tenancy (PR14)
TODO: Add ownerType (USER/ORG), organizationId, workspaceIdRestriction
to API key creation/validation. Update token introspection to check org
context. Migrate existing keys as user-owned within personal org.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-01 17:53:23 +02:00
Nicholas Tindle
df32fe2634 feat(backend): add org-level credit operations and seat management (PR13)
Org credit system mirroring UserCreditBase for org-scoped billing:
- get_org_credits / spend_org_credits / top_up_org_credits
- get_org_transaction_history with workspace attribution
- get_seat_info / assign_seat / unassign_seat

Operates on OrgBalance and OrgCreditTransaction tables. Seat assignments
track FREE/PAID types with ACTIVE/INACTIVE status.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-01 17:53:09 +02:00
Nicholas Tindle
05a6e7a8f5 chore(backend): placeholder for file-space rename and shared files (PR12)
TODO: Rename UserWorkspace file-storage model internally. Add org/workspace
tenancy to file records. Support workspace-shared files.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-01 17:49:50 +02:00
Nicholas Tindle
d81e5b44d5 chore(backend): placeholder for realtime/notification tenancy (PR11)
TODO: Re-key websocket/SSE channels by org/workspace. Update notification
batching to carry org/workspace context.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-01 17:49:50 +02:00
Nicholas Tindle
eaa0bff281 chore(backend): placeholder for webhook/background tenancy context (PR10)
TODO: Thread org_id and workspace_id through webhook execution,
PendingHumanReview, background job payloads, and scoped credential
resolution in webhook-triggered execution.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-01 17:49:43 +02:00
Nicholas Tindle
d0160d4005 feat(backend): add workspace_id scoping to execution queries (PR9)
Update get_graph_executions() to accept workspace_id parameter.
When provided, filters by orgWorkspaceId instead of userId for
workspace-scoped execution visibility.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-01 11:02:22 +02:00
Nicholas Tindle
d86cb75e6e feat(backend): add workspace_id scoping to graph read paths (PR8)
Update get_graph() and get_graph_all_versions() to accept an optional
workspace_id parameter. When provided, queries filter by orgWorkspaceId
instead of userId, enabling workspace-scoped agent visibility.

Both functions maintain backward compatibility — workspace_id defaults
to None, which preserves the existing userId-based behavior.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-01 11:00:55 +02:00
Nicholas Tindle
181eb92b47 feat(backend): add scoped credential store for IntegrationCredential table (PR7)
New ScopedCredentialStore that reads/writes credentials from the
IntegrationCredential table with USER → WORKSPACE → ORG resolution.

Functions:
- get_scoped_credentials — list credentials visible in current context
- get_credential_by_id — get single credential with access check
- create_credential — create credential with scope (USER/WORKSPACE/ORG)
- delete_credential — soft-delete by setting status to revoked

This runs alongside the legacy IntegrationCredentialsStore (User.integrations
blob) during the dual-read transition. The legacy store is not modified.
Cutover happens in PR18.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-01 08:53:33 +02:00
Nicholas Tindle
e07762c062 feat(backend): add invitation API for org membership (PR6)
Email-based invitation flow for adding users to organizations:

Org-scoped (under /api/orgs/{org_id}/invitations):
- POST — create invitation with role flags + optional workspace IDs
- GET — list pending invitations (admin+)
- DELETE /{id} — revoke invitation (admin+)

Token-based (under /api/invitations):
- POST /{token}/accept — accept invitation (creates OrgMember, adds to
  default workspace + specified workspaces)
- POST /{token}/decline — decline/revoke invitation
- GET /pending — list pending invitations for current user's email

Invitations expire after 7 days. Acceptance is atomic — creates org
membership, workspace memberships, and marks invitation as accepted.

TODO: Wire up Postmark email sending for invitation notifications.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-01 08:51:35 +02:00
Nicholas Tindle
5e3f536b1c feat(backend): add workspace CRUD API endpoints (PR5)
Workspace management API at /api/orgs/{org_id}/workspaces:

- POST — create workspace (creator becomes admin, supports OPEN/PRIVATE)
- GET — list workspaces (OPEN + user's PRIVATE)
- GET /{ws_id} — workspace details
- PATCH /{ws_id} — update name/description/joinPolicy
- DELETE /{ws_id} — delete (not default workspace)
- POST /{ws_id}/join — self-join OPEN workspaces
- POST /{ws_id}/leave — leave (not default workspace)

Member management:
- GET /{ws_id}/members — list members
- POST /{ws_id}/members — add org member to workspace
- PATCH /{ws_id}/members/{uid} — update role flags
- DELETE /{ws_id}/members/{uid} — remove from workspace

All endpoints use RequestContext permission model. Workspace actions
require workspace-level permissions; creation requires org-level
CREATE_WORKSPACES permission.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-01 08:49:47 +02:00
Nicholas Tindle
71cb6ccd7e feat(backend): add org CRUD API endpoints (PR4)
Full organization management API at /api/orgs with:

Org lifecycle:
- POST /api/orgs — create org (auto-creates default workspace, owner
  membership, org profile, seat assignment)
- GET /api/orgs — list user's orgs
- GET /api/orgs/{org_id} — get org details
- PATCH /api/orgs/{org_id} — update org (admin+)
- DELETE /api/orgs/{org_id} — delete org (owner, not personal)
- POST /api/orgs/{org_id}/convert — convert personal→team

Member management:
- GET /api/orgs/{org_id}/members — list members
- POST /api/orgs/{org_id}/members — add member (auto-joins default workspace)
- PATCH /api/orgs/{org_id}/members/{uid} — update role flags
- DELETE /api/orgs/{org_id}/members/{uid} — remove (cascades workspace memberships)
- POST /api/orgs/{org_id}/transfer-ownership — transfer

Alias management:
- GET /api/orgs/{org_id}/aliases — list aliases
- POST /api/orgs/{org_id}/aliases — create alias

All endpoints use the RequestContext permission model from PR1.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-01 08:46:30 +02:00
Nicholas Tindle
c47ae1eea1 feat(backend): dual-write tenancy fields on create/update paths (PR3)
Update core create/update codepaths to write organizationId and
orgWorkspaceId alongside legacy userId fields. Reads remain
userId-based for backward compatibility.

Updated functions:
- graph.create_graph / __create_graph / fork_graph — accept and write
  organization_id + org_workspace_id to AgentGraphCreateInput
- execution.create_graph_execution — accept and write tenancy fields
- copilot/db.create_chat_session — accept and write tenancy fields
- executor/utils.add_graph_execution — thread tenancy params through

Updated routes (v1.py):
- create_new_graph — resolves RequestContext, passes org/workspace IDs
- update_graph — resolves RequestContext, passes org/workspace IDs
- execute_graph — resolves RequestContext, passes to execution chain

Test helpers in rest_api.py updated with synthetic RequestContext for
backward compatibility.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-01 08:40:26 +02:00
Nicholas Tindle
277362b5cc feat(backend): add personal org bootstrap migration (PR2)
Idempotent data migration that creates a personal Organization for every
existing user, migrates billing state, and assigns all tenant-bound
resources to the user's default workspace.

Migration steps (all idempotent):
- Create Organization per user (slug from Profile.username → User.name → email)
- Slug collision resolution with deterministic numeric suffixes
- Create OrgMember (owner), OrgWorkspace (default), OrgWorkspaceMember
- Create OrganizationProfile from user's Profile data
- Create OrganizationSeatAssignment (FREE seat)
- Copy UserBalance → OrgBalance
- Copy CreditTransaction → OrgCreditTransaction
- Set organizationId + orgWorkspaceId on: AgentGraph, AgentGraphExecution,
  ChatSession, AgentPreset, LibraryAgent, LibraryFolder, IntegrationWebhook,
  APIKey, BuilderSearchHistory, PendingHumanReview, StoreListingVersion
- Set owningOrgId on StoreListing
- Create OrganizationAlias for published agents with divergent slugs

Wired into rest_api.py lifespan to run on server startup.

23 tests covering: slug sanitization, collision resolution, org creation
with/without profiles, balance migration, resource assignment, orchestration.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-01 08:27:59 +02:00
Nicholas Tindle
51ab80425d feat(platform): add org/workspace schema, tenant context, and permissions (PR1)
Add the foundational schema and plumbing for first-class organization and
workspace support across the platform. This is PR1 of 18 in the org/workspace
feature sequence — no business behavior changes, only additive schema and
new auth infrastructure.

Schema:
- 11 new enums (WorkspaceJoinPolicy, ResourceVisibility, CredentialScope, etc.)
- 15 new models (Organization, OrgMember, OrgWorkspace, OrgWorkspaceMember,
  OrgInvitation, WorkspaceInvite, OrganizationAlias, OrganizationProfile,
  OrganizationSubscription, OrganizationSeatAssignment, OrgBalance,
  OrgCreditTransaction, TransferRequest, AuditLog, IntegrationCredential)
- Nullable tenancy columns (organizationId, orgWorkspaceId, visibility)
  added to: AgentGraph, AgentGraphExecution, ChatSession, AgentPreset,
  LibraryAgent, LibraryFolder, IntegrationWebhook, APIKey, StoreListing,
  StoreListingVersion, BuilderSearchHistory, PendingHumanReview, OAuthApplication

Auth:
- RequestContext dataclass (user_id, org_id, workspace_id, role flags, seat_status)
- get_request_context FastAPI dependency with X-Org-Id/X-Workspace-Id headers
- Permission model with OrgAction/WorkspaceAction enums and check functions
- requires_org_permission/requires_workspace_permission dependency factories
- 72 exhaustive permission matrix tests (every action × role combination)

Frontend:
- X-Org-Id and X-Workspace-Id header injection in custom-mutator.ts
- ACTIVE_ORG and ACTIVE_WORKSPACE localStorage keys

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-01 08:09:00 +02:00
59 changed files with 13806 additions and 758 deletions

View File

@@ -1,12 +1,21 @@
from .config import verify_settings
from .dependencies import (
get_optional_user_id,
get_request_context,
get_user_id,
requires_admin_user,
requires_org_permission,
requires_team_permission,
requires_user,
)
from .helpers import add_auth_responses_to_openapi
from .models import User
from .models import RequestContext, User
from .permissions import (
OrgAction,
TeamAction,
check_org_permission,
check_team_permission,
)
__all__ = [
"verify_settings",
@@ -14,6 +23,14 @@ __all__ = [
"requires_admin_user",
"requires_user",
"get_optional_user_id",
"get_request_context",
"requires_org_permission",
"requires_team_permission",
"add_auth_responses_to_openapi",
"User",
"RequestContext",
"OrgAction",
"TeamAction",
"check_org_permission",
"check_team_permission",
]

View File

@@ -10,7 +10,13 @@ import fastapi
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
from .jwt_utils import get_jwt_payload, verify_user
from .models import User
from .models import RequestContext, User
from .permissions import (
OrgAction,
TeamAction,
check_org_permission,
check_team_permission,
)
optional_bearer = HTTPBearer(auto_error=False)
@@ -115,3 +121,210 @@ async def get_user_id(
return impersonate_header
return user_id
# ---------------------------------------------------------------------------
# Org / Workspace context resolution
# ---------------------------------------------------------------------------
ORG_HEADER_NAME = "X-Org-Id"
TEAM_HEADER_NAME = "X-Team-Id"
async def get_request_context(
request: fastapi.Request,
jwt_payload: dict = fastapi.Security(get_jwt_payload),
) -> RequestContext:
"""
FastAPI dependency that resolves the full org/workspace context for a request.
Resolution order:
1. Extract user_id from JWT (supports admin impersonation via X-Act-As-User-Id).
2. Read X-Org-Id header; fall back to the user's personal org; fail if none.
3. Validate that the user has an ACTIVE OrgMember row for that org.
4. Read X-Team-Id header (optional). If set, validate that the
workspace belongs to the org AND the user has an TeamMember
row. On failure, silently fall back to None (org-home).
5. Populate all role flags and return a RequestContext.
"""
from backend.data.db import prisma # deferred -- only needed at runtime
# --- 1. user_id (reuse existing impersonation logic) ----------------------
user_id = jwt_payload.get("sub")
if not user_id:
raise fastapi.HTTPException(
status_code=401, detail="User ID not found in token"
)
impersonate_header = request.headers.get(IMPERSONATION_HEADER_NAME, "").strip()
if impersonate_header:
authenticated_user = verify_user(jwt_payload, admin_only=False)
if authenticated_user.role != "admin":
raise fastapi.HTTPException(
status_code=403,
detail="Only admin users can impersonate other users",
)
logger.info(
f"Admin impersonation: {authenticated_user.user_id} ({authenticated_user.email}) "
f"acting as user {impersonate_header} for requesting {request.method} {request.url}"
)
user_id = impersonate_header
# --- 2. org_id ------------------------------------------------------------
org_id = request.headers.get(ORG_HEADER_NAME, "").strip() or None
if org_id is None:
# Fall back to the user's personal org (an org where the user is the
# sole owner, typically created at sign-up).
personal_org = await prisma.orgmember.find_first(
where={
"userId": user_id,
"isOwner": True,
"Org": {"isPersonal": True},
},
order={"createdAt": "asc"},
)
if personal_org is None:
logger.warning(
f"User {user_id} has no personal org — account in inconsistent state"
)
raise fastapi.HTTPException(
status_code=400,
detail=(
"No organization context available. Your account may be in "
"an inconsistent state — please contact support."
),
)
org_id = personal_org.orgId
# --- 3. validate OrgMember ------------------------------------------------
org_member = await prisma.orgmember.find_unique(
where={
"orgId_userId": {"orgId": org_id, "userId": user_id},
},
)
if org_member is None or org_member.status != "ACTIVE":
raise fastapi.HTTPException(
status_code=403,
detail="User is not an active member of this organization",
)
is_org_owner = org_member.isOwner
is_org_admin = org_member.isAdmin
is_org_billing_manager = org_member.isBillingManager
seat_status = "ACTIVE" # validated above; seat assignment checked separately
# --- 4. team_id (optional) -------------------------------------------
team_id: str | None = (
request.headers.get(TEAM_HEADER_NAME, "").strip() or None
)
is_team_admin = False
is_team_billing_manager = False
if team_id is not None:
# Validate workspace belongs to org AND user has a membership row
ws_member = await prisma.teammember.find_unique(
where={
"teamId_userId": {
"teamId": team_id,
"userId": user_id,
},
},
include={"Team": True},
)
if (
ws_member is None
or ws_member.Team is None
or ws_member.Team.orgId != org_id
):
logger.debug(
"Workspace %s not valid for user %s in org %s; falling back to org-home",
team_id,
user_id,
org_id,
)
team_id = None
else:
is_team_admin = ws_member.isAdmin
is_team_billing_manager = ws_member.isBillingManager
# --- 5. build context -----------------------------------------------------
return RequestContext(
user_id=user_id,
org_id=org_id,
team_id=team_id,
is_org_owner=is_org_owner,
is_org_admin=is_org_admin,
is_org_billing_manager=is_org_billing_manager,
is_team_admin=is_team_admin,
is_team_billing_manager=is_team_billing_manager,
seat_status=seat_status,
)
def requires_org_permission(
*actions: OrgAction,
):
"""Factory returning a FastAPI dependency that enforces org-level permissions.
The request is allowed only if the user holds **all** listed actions.
Example::
@router.delete("/org/{org_id}")
async def delete_org(
ctx: RequestContext = Security(requires_org_permission(OrgAction.DELETE_ORG)),
):
...
"""
async def _dependency(
ctx: RequestContext = fastapi.Security(get_request_context),
) -> RequestContext:
for action in actions:
if not check_org_permission(ctx, action):
raise fastapi.HTTPException(
status_code=403,
detail=f"Missing org permission: {action.value}",
)
return ctx
return _dependency
def requires_team_permission(
*actions: TeamAction,
):
"""Factory returning a FastAPI dependency that enforces workspace-level permissions.
The user must be in a workspace context (team_id is set) and
hold **all** listed actions.
Example::
@router.post("/workspace/{ws_id}/agents")
async def create_agent(
ctx: RequestContext = Security(
requires_team_permission(TeamAction.CREATE_AGENTS)
),
):
...
"""
async def _dependency(
ctx: RequestContext = fastapi.Security(get_request_context),
) -> RequestContext:
if ctx.team_id is None:
raise fastapi.HTTPException(
status_code=400,
detail="Workspace context required for this action",
)
for action in actions:
if not check_team_permission(ctx, action):
raise fastapi.HTTPException(
status_code=403,
detail=f"Missing workspace permission: {action.value}",
)
return ctx
return _dependency

View File

@@ -20,3 +20,16 @@ class User:
phone_number=payload.get("phone", ""),
role=payload["role"],
)
@dataclass(frozen=True)
class RequestContext:
user_id: str
org_id: str
team_id: str | None # None = org-home context
is_org_owner: bool
is_org_admin: bool
is_org_billing_manager: bool
is_team_admin: bool
is_team_billing_manager: bool
seat_status: str # ACTIVE, INACTIVE, PENDING, NONE

View File

@@ -0,0 +1,115 @@
"""
Role-based permission checks for org-level and workspace-level actions.
Permission maps are pure data; check functions are pure functions operating
on a RequestContext -- no I/O, no database access.
"""
from enum import Enum
from .models import RequestContext
class OrgAction(str, Enum):
DELETE_ORG = "DELETE_ORG"
RENAME_ORG = "RENAME_ORG"
MANAGE_MEMBERS = "MANAGE_MEMBERS"
MANAGE_WORKSPACES = "MANAGE_WORKSPACES"
CREATE_WORKSPACES = "CREATE_WORKSPACES"
MANAGE_BILLING = "MANAGE_BILLING"
PUBLISH_TO_STORE = "PUBLISH_TO_STORE"
TRANSFER_RESOURCES = "TRANSFER_RESOURCES"
VIEW_ORG = "VIEW_ORG"
CREATE_RESOURCES = "CREATE_RESOURCES"
SHARE_RESOURCES = "SHARE_RESOURCES"
class TeamAction(str, Enum):
MANAGE_MEMBERS = "MANAGE_MEMBERS"
MANAGE_SETTINGS = "MANAGE_SETTINGS"
MANAGE_CREDENTIALS = "MANAGE_CREDENTIALS"
VIEW_SPEND = "VIEW_SPEND"
CREATE_AGENTS = "CREATE_AGENTS"
USE_CREDENTIALS = "USE_CREDENTIALS"
VIEW_EXECUTIONS = "VIEW_EXECUTIONS"
DELETE_AGENTS = "DELETE_AGENTS"
# Org permission map: action -> set of roles that are allowed.
# Roles checked via RequestContext boolean flags.
# "member" means any active org member (no special role flag required).
_ORG_PERMISSIONS: dict[OrgAction, set[str]] = {
OrgAction.DELETE_ORG: {"owner"},
OrgAction.RENAME_ORG: {"owner", "admin"},
OrgAction.MANAGE_MEMBERS: {"owner", "admin"},
OrgAction.MANAGE_WORKSPACES: {"owner", "admin"},
OrgAction.CREATE_WORKSPACES: {"owner", "admin", "billing_manager"},
OrgAction.MANAGE_BILLING: {"owner", "billing_manager"},
OrgAction.PUBLISH_TO_STORE: {"owner", "admin", "member"},
OrgAction.TRANSFER_RESOURCES: {"owner", "admin"},
OrgAction.VIEW_ORG: {"owner", "admin", "billing_manager", "member"},
OrgAction.CREATE_RESOURCES: {"owner", "admin", "member"},
OrgAction.SHARE_RESOURCES: {"owner", "admin", "member"},
}
# Workspace permission map: action -> set of roles that are allowed.
# "team_member" means any workspace member with no special workspace role.
_TEAM_PERMISSIONS: dict[TeamAction, set[str]] = {
TeamAction.MANAGE_MEMBERS: {"team_admin"},
TeamAction.MANAGE_SETTINGS: {"team_admin"},
TeamAction.MANAGE_CREDENTIALS: {"team_admin"},
TeamAction.VIEW_SPEND: {"team_admin", "team_billing_manager"},
TeamAction.CREATE_AGENTS: {"team_admin", "team_member"},
TeamAction.USE_CREDENTIALS: {"team_admin", "team_member"},
TeamAction.VIEW_EXECUTIONS: {"team_admin", "team_member"},
TeamAction.DELETE_AGENTS: {"team_admin"},
}
def _get_org_roles(ctx: RequestContext) -> set[str]:
"""Derive the set of org-level role tags from a RequestContext."""
roles: set[str] = set()
if ctx.is_org_owner:
roles.add("owner")
if ctx.is_org_admin:
roles.add("admin")
if ctx.is_org_billing_manager:
roles.add("billing_manager")
# A plain member (no owner/admin/billing_manager flags) gets "member".
# Owner and admin also get "member" since they can do everything a member can.
# Billing managers do NOT get "member" — they only get finance-related actions.
if ctx.is_org_owner or ctx.is_org_admin:
roles.add("member")
elif not ctx.is_org_billing_manager:
# Plain member with no special role flags
roles.add("member")
return roles
def _get_team_roles(ctx: RequestContext) -> set[str]:
"""Derive the set of workspace-level role tags from a RequestContext."""
roles: set[str] = set()
if ctx.is_team_admin:
roles.add("team_admin")
if ctx.is_team_billing_manager:
roles.add("team_billing_manager")
# Regular workspace members (not admin, not billing_manager) get team_member.
# WS admins also get team_member (they can do everything a member can).
if ctx.team_id is not None:
if ctx.is_team_admin or (not ctx.is_team_billing_manager):
roles.add("team_member")
return roles
def check_org_permission(ctx: RequestContext, action: OrgAction) -> bool:
"""Return True if the RequestContext grants the given org-level action."""
allowed_roles = _ORG_PERMISSIONS.get(action, set())
return bool(_get_org_roles(ctx) & allowed_roles)
def check_team_permission(ctx: RequestContext, action: TeamAction) -> bool:
"""Return True if the RequestContext grants the given workspace-level action."""
if ctx.team_id is None:
return False
allowed_roles = _TEAM_PERMISSIONS.get(action, set())
return bool(_get_team_roles(ctx) & allowed_roles)

View File

@@ -0,0 +1,262 @@
"""
Exhaustive tests for org-level and workspace-level permission checks.
Every OrgAction x role combination and every TeamAction x role
combination is covered. These are pure-function tests -- no mocking,
no database, no I/O.
"""
import pytest
from autogpt_libs.auth.models import RequestContext
from autogpt_libs.auth.permissions import (
OrgAction,
TeamAction,
check_org_permission,
check_team_permission,
)
# ---------------------------------------------------------------------------
# Helpers to build RequestContext fixtures for each role
# ---------------------------------------------------------------------------
def _make_ctx(
*,
is_org_owner: bool = False,
is_org_admin: bool = False,
is_org_billing_manager: bool = False,
is_team_admin: bool = False,
is_team_billing_manager: bool = False,
seat_status: str = "ACTIVE",
team_id: str | None = None,
) -> RequestContext:
return RequestContext(
user_id="user-1",
org_id="org-1",
team_id=team_id,
is_org_owner=is_org_owner,
is_org_admin=is_org_admin,
is_org_billing_manager=is_org_billing_manager,
is_team_admin=is_team_admin,
is_team_billing_manager=is_team_billing_manager,
seat_status=seat_status,
)
# Convenience contexts for org-level roles
ORG_OWNER = _make_ctx(is_org_owner=True)
ORG_ADMIN = _make_ctx(is_org_admin=True)
ORG_BILLING_MANAGER = _make_ctx(is_org_billing_manager=True)
ORG_MEMBER = _make_ctx() # ACTIVE seat, no special role flags
# Convenience contexts for workspace-level roles (team_id is set)
TEAM_ADMIN = _make_ctx(team_id="ws-1", is_team_admin=True)
TEAM_BILLING_MGR = _make_ctx(team_id="ws-1", is_team_billing_manager=True)
TEAM_MEMBER = _make_ctx(team_id="ws-1") # regular workspace member
# ---------------------------------------------------------------------------
# Org permission matrix
# ---------------------------------------------------------------------------
# Expected outcomes per (action, role). True = allowed.
_ORG_EXPECTED: dict[OrgAction, dict[str, bool]] = {
OrgAction.DELETE_ORG: {
"owner": True,
"admin": False,
"billing_manager": False,
"member": False,
},
OrgAction.RENAME_ORG: {
"owner": True,
"admin": True,
"billing_manager": False,
"member": False,
},
OrgAction.MANAGE_MEMBERS: {
"owner": True,
"admin": True,
"billing_manager": False,
"member": False,
},
OrgAction.MANAGE_WORKSPACES: {
"owner": True,
"admin": True,
"billing_manager": False,
"member": False,
},
OrgAction.CREATE_WORKSPACES: {
"owner": True,
"admin": True,
"billing_manager": True,
"member": False,
},
OrgAction.MANAGE_BILLING: {
"owner": True,
"admin": False,
"billing_manager": True,
"member": False,
},
OrgAction.PUBLISH_TO_STORE: {
"owner": True,
"admin": True,
"billing_manager": False,
"member": True,
},
OrgAction.TRANSFER_RESOURCES: {
"owner": True,
"admin": True,
"billing_manager": False,
"member": False,
},
OrgAction.VIEW_ORG: {
"owner": True,
"admin": True,
"billing_manager": True,
"member": True,
},
OrgAction.CREATE_RESOURCES: {
"owner": True,
"admin": True,
"billing_manager": False,
"member": True,
},
OrgAction.SHARE_RESOURCES: {
"owner": True,
"admin": True,
"billing_manager": False,
"member": True,
},
}
_ORG_ROLE_CTX = {
"owner": ORG_OWNER,
"admin": ORG_ADMIN,
"billing_manager": ORG_BILLING_MANAGER,
"member": ORG_MEMBER,
}
class TestOrgPermissions:
"""Exhaustive org action x role matrix."""
@pytest.mark.parametrize(
"action",
list(OrgAction),
ids=[a.value for a in OrgAction],
)
@pytest.mark.parametrize(
"role",
["owner", "admin", "billing_manager", "member"],
)
def test_org_permission_matrix(self, action: OrgAction, role: str):
ctx = _ORG_ROLE_CTX[role]
expected = _ORG_EXPECTED[action][role]
result = check_org_permission(ctx, action)
assert result is expected, (
f"OrgAction.{action.value} for role={role}: "
f"expected {expected}, got {result}"
)
def test_member_role_always_present_regardless_of_seat(self):
"""The 'member' role is implicit for all org members.
Seat-gating is enforced at the endpoint level, not in permission checks."""
ctx = _make_ctx(seat_status="INACTIVE")
# VIEW_ORG is allowed for members regardless of seat status
assert check_org_permission(ctx, OrgAction.VIEW_ORG) is True
def test_owner_with_inactive_seat_retains_all_owner_permissions(self):
"""Owner flag is independent of seat_status."""
ctx = _make_ctx(is_org_owner=True, seat_status="INACTIVE")
assert check_org_permission(ctx, OrgAction.DELETE_ORG) is True
assert check_org_permission(ctx, OrgAction.PUBLISH_TO_STORE) is True
# ---------------------------------------------------------------------------
# Workspace permission matrix
# ---------------------------------------------------------------------------
_TEAM_EXPECTED: dict[TeamAction, dict[str, bool]] = {
TeamAction.MANAGE_MEMBERS: {
"team_admin": True,
"team_billing_manager": False,
"team_member": False,
},
TeamAction.MANAGE_SETTINGS: {
"team_admin": True,
"team_billing_manager": False,
"team_member": False,
},
TeamAction.MANAGE_CREDENTIALS: {
"team_admin": True,
"team_billing_manager": False,
"team_member": False,
},
TeamAction.VIEW_SPEND: {
"team_admin": True,
"team_billing_manager": True,
"team_member": False,
},
TeamAction.CREATE_AGENTS: {
"team_admin": True,
"team_billing_manager": False,
"team_member": True,
},
TeamAction.USE_CREDENTIALS: {
"team_admin": True,
"team_billing_manager": False,
"team_member": True,
},
TeamAction.VIEW_EXECUTIONS: {
"team_admin": True,
"team_billing_manager": False,
"team_member": True,
},
TeamAction.DELETE_AGENTS: {
"team_admin": True,
"team_billing_manager": False,
"team_member": False,
},
}
_TEAM_ROLE_CTX = {
"team_admin": TEAM_ADMIN,
"team_billing_manager": TEAM_BILLING_MGR,
"team_member": TEAM_MEMBER,
}
class TestTeamPermissions:
"""Exhaustive workspace action x role matrix."""
@pytest.mark.parametrize(
"action",
list(TeamAction),
ids=[a.value for a in TeamAction],
)
@pytest.mark.parametrize(
"role",
["team_admin", "team_billing_manager", "team_member"],
)
def test_team_permission_matrix(self, action: TeamAction, role: str):
ctx = _TEAM_ROLE_CTX[role]
expected = _TEAM_EXPECTED[action][role]
result = check_team_permission(ctx, action)
assert result is expected, (
f"TeamAction.{action.value} for role={role}: "
f"expected {expected}, got {result}"
)
def test_no_team_context_denies_all(self):
"""Without a team_id, all workspace actions are denied."""
ctx = _make_ctx(is_team_admin=True) # no team_id
for action in TeamAction:
assert check_team_permission(ctx, action) is False
def test_team_billing_manager_is_not_team_member(self):
"""A workspace billing manager should NOT get team_member permissions."""
ctx = TEAM_BILLING_MGR
assert check_team_permission(ctx, TeamAction.CREATE_AGENTS) is False
assert check_team_permission(ctx, TeamAction.VIEW_SPEND) is True

View File

@@ -147,11 +147,18 @@ async def execute_graph(
),
) -> dict[str, Any]:
try:
# Resolve org/team from user's default (PR14 will add key-level org context)
from backend.api.features.orgs.db import get_user_default_team
org_id, team_id = await get_user_default_team(auth.user_id)
graph_exec = await add_graph_execution(
graph_id=graph_id,
user_id=auth.user_id,
inputs=node_input,
graph_version=graph_version,
organization_id=org_id,
team_id=team_id,
)
return {"id": graph_exec.id}
except Exception as e:

View File

@@ -723,6 +723,7 @@ async def stream_chat_post(
session_id: str,
request: StreamChatRequest,
user_id: str = Security(auth.get_user_id),
ctx: auth.RequestContext = Security(auth.get_request_context),
):
"""
Stream chat responses for a session (POST with context support).
@@ -866,6 +867,8 @@ async def stream_chat_post(
is_user_message=request.is_user_message,
context=request.context,
file_ids=sanitized_file_ids,
organization_id=ctx.org_id,
team_id=ctx.team_id,
mode=request.mode,
)

View File

@@ -1,375 +0,0 @@
import asyncio
import logging
from typing import Any, List
import autogpt_libs.auth as autogpt_auth_lib
from fastapi import APIRouter, HTTPException, Query, Security, status
from prisma.enums import ReviewStatus
from backend.copilot.constants import (
is_copilot_synthetic_id,
parse_node_id_from_exec_id,
)
from backend.data.execution import (
ExecutionContext,
ExecutionStatus,
get_graph_execution_meta,
get_node_executions,
)
from backend.data.graph import get_graph_settings
from backend.data.human_review import (
create_auto_approval_record,
get_pending_reviews_for_execution,
get_pending_reviews_for_user,
get_reviews_by_node_exec_ids,
has_pending_reviews_for_graph_exec,
process_all_reviews_for_execution,
)
from backend.data.model import USER_TIMEZONE_NOT_SET
from backend.data.user import get_user_by_id
from backend.data.workspace import get_or_create_workspace
from backend.executor.utils import add_graph_execution
from .model import PendingHumanReviewModel, ReviewRequest, ReviewResponse
logger = logging.getLogger(__name__)
router = APIRouter(
tags=["v2", "executions", "review"],
dependencies=[Security(autogpt_auth_lib.requires_user)],
)
async def _resolve_node_ids(
node_exec_ids: list[str],
graph_exec_id: str,
is_copilot: bool,
) -> dict[str, str]:
"""Resolve node_exec_id -> node_id for auto-approval records.
CoPilot synthetic IDs encode node_id in the format "{node_id}:{random}".
Graph executions look up node_id from NodeExecution records.
"""
if not node_exec_ids:
return {}
if is_copilot:
return {neid: parse_node_id_from_exec_id(neid) for neid in node_exec_ids}
node_execs = await get_node_executions(
graph_exec_id=graph_exec_id, include_exec_data=False
)
node_exec_map = {ne.node_exec_id: ne.node_id for ne in node_execs}
result = {}
for neid in node_exec_ids:
if neid in node_exec_map:
result[neid] = node_exec_map[neid]
else:
logger.error(
f"Failed to resolve node_id for {neid}: Node execution not found."
)
return result
@router.get(
"/pending",
summary="Get Pending Reviews",
response_model=List[PendingHumanReviewModel],
responses={
200: {"description": "List of pending reviews"},
500: {"description": "Server error", "content": {"application/json": {}}},
},
)
async def list_pending_reviews(
user_id: str = Security(autogpt_auth_lib.get_user_id),
page: int = Query(1, ge=1, description="Page number (1-indexed)"),
page_size: int = Query(25, ge=1, le=100, description="Number of reviews per page"),
) -> List[PendingHumanReviewModel]:
"""Get all pending reviews for the current user.
Retrieves all reviews with status "WAITING" that belong to the authenticated user.
Results are ordered by creation time (newest first).
Args:
user_id: Authenticated user ID from security dependency
Returns:
List of pending review objects with status converted to typed literals
Raises:
HTTPException: If authentication fails or database error occurs
Note:
Reviews with invalid status values are logged as warnings but excluded
from results rather than failing the entire request.
"""
return await get_pending_reviews_for_user(user_id, page, page_size)
@router.get(
"/execution/{graph_exec_id}",
summary="Get Pending Reviews for Execution",
response_model=List[PendingHumanReviewModel],
responses={
200: {"description": "List of pending reviews for the execution"},
404: {"description": "Graph execution not found"},
500: {"description": "Server error", "content": {"application/json": {}}},
},
)
async def list_pending_reviews_for_execution(
graph_exec_id: str,
user_id: str = Security(autogpt_auth_lib.get_user_id),
) -> List[PendingHumanReviewModel]:
"""Get all pending reviews for a specific graph execution.
Retrieves all reviews with status "WAITING" for the specified graph execution
that belong to the authenticated user. Results are ordered by creation time
(oldest first) to preserve review order within the execution.
Args:
graph_exec_id: ID of the graph execution to get reviews for
user_id: Authenticated user ID from security dependency
Returns:
List of pending review objects for the specified execution
Raises:
HTTPException:
- 404: If the graph execution doesn't exist or isn't owned by this user
- 500: If authentication fails or database error occurs
Note:
Only returns reviews owned by the authenticated user for security.
Reviews with invalid status are excluded with warning logs.
"""
# Verify user owns the graph execution before returning reviews
# (CoPilot synthetic IDs don't have graph execution records)
if not is_copilot_synthetic_id(graph_exec_id):
graph_exec = await get_graph_execution_meta(
user_id=user_id, execution_id=graph_exec_id
)
if not graph_exec:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Graph execution #{graph_exec_id} not found",
)
return await get_pending_reviews_for_execution(graph_exec_id, user_id)
@router.post("/action", response_model=ReviewResponse)
async def process_review_action(
request: ReviewRequest,
user_id: str = Security(autogpt_auth_lib.get_user_id),
) -> ReviewResponse:
"""Process reviews with approve or reject actions."""
# Collect all node exec IDs from the request
all_request_node_ids = {review.node_exec_id for review in request.reviews}
if not all_request_node_ids:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="At least one review must be provided",
)
# Batch fetch all requested reviews (regardless of status for idempotent handling)
reviews_map = await get_reviews_by_node_exec_ids(
list(all_request_node_ids), user_id
)
# Validate all reviews were found (must exist, any status is OK for now)
missing_ids = all_request_node_ids - set(reviews_map.keys())
if missing_ids:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Review(s) not found: {', '.join(missing_ids)}",
)
# Validate all reviews belong to the same execution
graph_exec_ids = {review.graph_exec_id for review in reviews_map.values()}
if len(graph_exec_ids) > 1:
raise HTTPException(
status_code=status.HTTP_409_CONFLICT,
detail="All reviews in a single request must belong to the same execution.",
)
graph_exec_id = next(iter(graph_exec_ids))
is_copilot = is_copilot_synthetic_id(graph_exec_id)
# Validate execution status for graph executions (skip for CoPilot synthetic IDs)
if not is_copilot:
graph_exec_meta = await get_graph_execution_meta(
user_id=user_id, execution_id=graph_exec_id
)
if not graph_exec_meta:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Graph execution #{graph_exec_id} not found",
)
if graph_exec_meta.status not in (
ExecutionStatus.REVIEW,
ExecutionStatus.INCOMPLETE,
):
raise HTTPException(
status_code=status.HTTP_409_CONFLICT,
detail=f"Cannot process reviews while execution status is {graph_exec_meta.status}",
)
# Build review decisions map and track which reviews requested auto-approval
# Auto-approved reviews use original data (no modifications allowed)
review_decisions = {}
auto_approve_requests = {} # Map node_exec_id -> auto_approve_future flag
for review in request.reviews:
review_status = (
ReviewStatus.APPROVED if review.approved else ReviewStatus.REJECTED
)
# If this review requested auto-approval, don't allow data modifications
reviewed_data = None if review.auto_approve_future else review.reviewed_data
review_decisions[review.node_exec_id] = (
review_status,
reviewed_data,
review.message,
)
auto_approve_requests[review.node_exec_id] = review.auto_approve_future
# Process all reviews
updated_reviews = await process_all_reviews_for_execution(
user_id=user_id,
review_decisions=review_decisions,
)
# Create auto-approval records for approved reviews that requested it
# Deduplicate by node_id to avoid race conditions when multiple reviews
# for the same node are processed in parallel
async def create_auto_approval_for_node(
node_id: str, review_result
) -> tuple[str, bool]:
"""
Create auto-approval record for a node.
Returns (node_id, success) tuple for tracking failures.
"""
try:
await create_auto_approval_record(
user_id=user_id,
graph_exec_id=review_result.graph_exec_id,
graph_id=review_result.graph_id,
graph_version=review_result.graph_version,
node_id=node_id,
payload=review_result.payload,
)
return (node_id, True)
except Exception as e:
logger.error(
f"Failed to create auto-approval record for node {node_id}",
exc_info=e,
)
return (node_id, False)
# Collect node_exec_ids that need auto-approval and resolve their node_ids
node_exec_ids_needing_auto_approval = [
node_exec_id
for node_exec_id, review_result in updated_reviews.items()
if review_result.status == ReviewStatus.APPROVED
and auto_approve_requests.get(node_exec_id, False)
]
node_id_map = await _resolve_node_ids(
node_exec_ids_needing_auto_approval, graph_exec_id, is_copilot
)
# Deduplicate by node_id — one auto-approval per node
nodes_needing_auto_approval: dict[str, Any] = {}
for node_exec_id in node_exec_ids_needing_auto_approval:
node_id = node_id_map.get(node_exec_id)
if node_id and node_id not in nodes_needing_auto_approval:
nodes_needing_auto_approval[node_id] = updated_reviews[node_exec_id]
# Execute all auto-approval creations in parallel (deduplicated by node_id)
auto_approval_results = await asyncio.gather(
*[
create_auto_approval_for_node(node_id, review_result)
for node_id, review_result in nodes_needing_auto_approval.items()
],
return_exceptions=True,
)
# Count auto-approval failures
auto_approval_failed_count = 0
for result in auto_approval_results:
if isinstance(result, Exception):
auto_approval_failed_count += 1
logger.error(
f"Unexpected exception during auto-approval creation: {result}"
)
elif isinstance(result, tuple) and len(result) == 2 and not result[1]:
auto_approval_failed_count += 1
# Count results
approved_count = sum(
1
for review in updated_reviews.values()
if review.status == ReviewStatus.APPROVED
)
rejected_count = sum(
1
for review in updated_reviews.values()
if review.status == ReviewStatus.REJECTED
)
# Resume graph execution only for real graph executions (not CoPilot)
# CoPilot sessions are resumed by the LLM retrying run_block with review_id
if not is_copilot and updated_reviews:
still_has_pending = await has_pending_reviews_for_graph_exec(graph_exec_id)
if not still_has_pending:
first_review = next(iter(updated_reviews.values()))
try:
user = await get_user_by_id(user_id)
settings = await get_graph_settings(
user_id=user_id, graph_id=first_review.graph_id
)
user_timezone = (
user.timezone if user.timezone != USER_TIMEZONE_NOT_SET else "UTC"
)
workspace = await get_or_create_workspace(user_id)
execution_context = ExecutionContext(
human_in_the_loop_safe_mode=settings.human_in_the_loop_safe_mode,
sensitive_action_safe_mode=settings.sensitive_action_safe_mode,
user_timezone=user_timezone,
workspace_id=workspace.id,
)
await add_graph_execution(
graph_id=first_review.graph_id,
user_id=user_id,
graph_exec_id=graph_exec_id,
execution_context=execution_context,
)
logger.info(f"Resumed execution {graph_exec_id}")
except Exception as e:
logger.error(f"Failed to resume execution {graph_exec_id}: {str(e)}")
# Build error message if auto-approvals failed
error_message = None
if auto_approval_failed_count > 0:
error_message = (
f"{auto_approval_failed_count} auto-approval setting(s) could not be saved. "
f"You may need to manually approve these reviews in future executions."
)
return ReviewResponse(
approved_count=approved_count,
rejected_count=rejected_count,
failed_count=auto_approval_failed_count,
error=error_message,
)

View File

@@ -489,11 +489,16 @@ async def _execute_webhook_node_trigger(
return
logger.debug(f"Executing graph #{node.graph_id} node #{node.id}")
try:
from backend.api.features.orgs.db import get_user_default_team
org_id, ws_id = await get_user_default_team(webhook.user_id)
await add_graph_execution(
user_id=webhook.user_id,
graph_id=node.graph_id,
graph_version=node.graph_version,
nodes_input_masks={node.id: {"payload": payload}},
organization_id=org_id,
team_id=ws_id,
)
except GraphNotInLibraryError as e:
logger.warning(
@@ -550,6 +555,9 @@ async def _execute_webhook_preset_trigger(
logger.debug(f"Executing preset #{preset.id} for webhook #{webhook.id}")
try:
from backend.api.features.orgs.db import get_user_default_team
org_id, ws_id = await get_user_default_team(webhook.user_id)
await add_graph_execution(
user_id=webhook.user_id,
graph_id=preset.graph_id,
@@ -557,6 +565,8 @@ async def _execute_webhook_preset_trigger(
graph_version=preset.graph_version,
graph_credentials_inputs=preset.credentials,
nodes_input_masks={trigger_node.id: {**preset.inputs, "payload": payload}},
organization_id=org_id,
team_id=ws_id,
)
except GraphNotInLibraryError as e:
logger.warning(

View File

@@ -24,6 +24,7 @@ async def test_get_library_agents(mocker):
userId="test-user",
isActive=True,
createdAt=datetime.now(),
visibility=prisma.enums.ResourceVisibility.PRIVATE,
)
]
@@ -49,6 +50,7 @@ async def test_get_library_agents(mocker):
userId="other-user",
isActive=True,
createdAt=datetime.now(),
visibility=prisma.enums.ResourceVisibility.PRIVATE,
),
)
]
@@ -113,6 +115,7 @@ async def test_add_agent_to_library(mocker):
userId="creator",
isActive=True,
createdAt=datetime.now(),
visibility=prisma.enums.ResourceVisibility.PRIVATE,
),
)

View File

@@ -1,5 +1,6 @@
import datetime
import prisma.enums
import prisma.models
import pytest
@@ -20,6 +21,7 @@ async def test_agent_preset_from_db(test_user_id: str):
isActive=True,
userId=test_user_id,
isDeleted=False,
visibility=prisma.enums.ResourceVisibility.PRIVATE,
InputPresets=[
prisma.models.AgentNodeExecutionInputOutput.model_validate(
{

View File

@@ -371,6 +371,9 @@ async def delete_preset(
async def execute_preset(
preset_id: str,
user_id: str = Security(autogpt_auth_lib.get_user_id),
ctx: autogpt_auth_lib.RequestContext = Security(
autogpt_auth_lib.get_request_context
),
inputs: dict[str, Any] = Body(..., embed=True, default_factory=dict),
credential_inputs: dict[str, CredentialsMetaInput] = Body(
..., embed=True, default_factory=dict
@@ -409,4 +412,6 @@ async def execute_preset(
preset_id=preset_id,
inputs=merged_node_input,
graph_credentials_inputs=merged_credential_inputs,
organization_id=ctx.org_id,
team_id=ctx.team_id,
)

View File

@@ -0,0 +1,19 @@
"""Override session-scoped fixtures for org tests.
Org tests mock at the Prisma boundary and don't need the full test server
or its graph cleanup hook.
"""
import pytest
@pytest.fixture(scope="session")
def server():
"""No-op — org tests don't need the full backend server."""
yield None
@pytest.fixture(scope="session", autouse=True)
def graph_cleanup():
"""No-op — org tests don't create real graphs."""
yield

View File

@@ -0,0 +1,573 @@
"""Database operations for organization management."""
import logging
from datetime import datetime, timezone
import prisma.errors
from backend.data.db import prisma
from backend.data.org_migration import _resolve_unique_slug, _sanitize_slug
from backend.util.exceptions import NotFoundError
from .model import OrgAliasResponse, OrgMemberResponse, OrgResponse, UpdateOrgData
logger = logging.getLogger(__name__)
# ---------------------------------------------------------------------------
# Utilities
# ---------------------------------------------------------------------------
async def get_user_default_team(
user_id: str,
) -> tuple[str | None, str | None]:
"""Get the user's personal org ID and its default workspace ID.
Returns (organization_id, team_id). Either may be None if
the user has no org (e.g., migration hasn't run yet).
"""
member = await prisma.orgmember.find_first(
where={
"userId": user_id,
"isOwner": True,
"Org": {"isPersonal": True, "deletedAt": None},
},
)
if member is None:
logger.warning(
f"User {user_id} has no personal org — account may be in inconsistent state"
)
return None, None
org_id = member.orgId
workspace = await prisma.team.find_first(
where={"orgId": org_id, "isDefault": True}
)
ws_id = workspace.id if workspace else None
return org_id, ws_id
async def _create_personal_org_for_user(
user_id: str,
slug_base: str,
display_name: str,
) -> OrgResponse:
"""Create a new personal org with all required records.
Used by both initial org creation (migration) and conversion (spawning
a new personal org when the old one becomes a team org).
"""
slug = await _resolve_unique_slug(slug_base)
org = await prisma.organization.create(
data={
"name": display_name,
"slug": slug,
"isPersonal": True,
"bootstrapUserId": user_id,
"settings": "{}",
}
)
await prisma.orgmember.create(
data={
"orgId": org.id,
"userId": user_id,
"isOwner": True,
"isAdmin": True,
"status": "ACTIVE",
}
)
workspace = await prisma.team.create(
data={
"name": "Default",
"orgId": org.id,
"isDefault": True,
"joinPolicy": "OPEN",
"createdByUserId": user_id,
}
)
await prisma.teammember.create(
data={
"teamId": workspace.id,
"userId": user_id,
"isAdmin": True,
"status": "ACTIVE",
}
)
await prisma.organizationprofile.create(
data={
"organizationId": org.id,
"username": slug,
"displayName": display_name,
}
)
await prisma.organizationseatassignment.create(
data={
"organizationId": org.id,
"userId": user_id,
"seatType": "FREE",
"status": "ACTIVE",
"assignedByUserId": user_id,
}
)
# Create zero-balance row so credit operations don't need upsert
await prisma.orgbalance.create(data={"orgId": org.id, "balance": 0})
return OrgResponse.from_db(org, member_count=1)
# ---------------------------------------------------------------------------
# Org CRUD
# ---------------------------------------------------------------------------
async def create_org(
name: str,
slug: str,
user_id: str,
description: str | None = None,
) -> OrgResponse:
"""Create a team organization and make the user the owner.
Raises:
ValueError: If the slug is already taken by another org or alias.
"""
existing_org = await prisma.organization.find_unique(where={"slug": slug})
if existing_org:
raise ValueError(f"Slug '{slug}' is already in use")
existing_alias = await prisma.organizationalias.find_unique(
where={"aliasSlug": slug}
)
if existing_alias:
raise ValueError(f"Slug '{slug}' is already in use as an alias")
org = await prisma.organization.create(
data={
"name": name,
"slug": slug,
"description": description,
"isPersonal": False,
"bootstrapUserId": user_id,
"settings": "{}",
}
)
await prisma.orgmember.create(
data={
"orgId": org.id,
"userId": user_id,
"isOwner": True,
"isAdmin": True,
"status": "ACTIVE",
}
)
workspace = await prisma.team.create(
data={
"name": "Default",
"orgId": org.id,
"isDefault": True,
"joinPolicy": "OPEN",
"createdByUserId": user_id,
}
)
await prisma.teammember.create(
data={
"teamId": workspace.id,
"userId": user_id,
"isAdmin": True,
"status": "ACTIVE",
}
)
await prisma.organizationprofile.create(
data={
"organizationId": org.id,
"username": slug,
"displayName": name,
}
)
await prisma.organizationseatassignment.create(
data={
"organizationId": org.id,
"userId": user_id,
"seatType": "FREE",
"status": "ACTIVE",
"assignedByUserId": user_id,
}
)
# Create zero-balance row so credit operations don't need upsert
await prisma.orgbalance.create(data={"orgId": org.id, "balance": 0})
return OrgResponse.from_db(org, member_count=1)
async def list_user_orgs(user_id: str) -> list[OrgResponse]:
"""List all non-deleted organizations the user belongs to."""
memberships = await prisma.orgmember.find_many(
where={
"userId": user_id,
"status": "ACTIVE",
"Org": {"deletedAt": None},
},
include={"Org": True},
)
results = []
for m in memberships:
org = m.Org
if org is None:
continue
results.append(OrgResponse.from_db(org))
return results
async def get_org(org_id: str) -> OrgResponse:
"""Get organization details."""
org = await prisma.organization.find_unique(where={"id": org_id})
if org is None or org.deletedAt is not None:
raise NotFoundError(f"Organization {org_id} not found")
return OrgResponse.from_db(org)
async def update_org(org_id: str, data: UpdateOrgData) -> OrgResponse:
"""Update organization fields. Creates a RENAME alias if slug changes.
Only accepts the structured UpdateOrgData model — no arbitrary dict keys.
"""
update_dict: dict = {}
if data.name is not None:
update_dict["name"] = data.name
if data.description is not None:
update_dict["description"] = data.description
if data.avatar_url is not None:
update_dict["avatarUrl"] = data.avatar_url
if data.slug is not None:
existing = await prisma.organization.find_unique(where={"slug": data.slug})
if existing and existing.id != org_id:
raise ValueError(f"Slug '{data.slug}' is already in use")
existing_alias = await prisma.organizationalias.find_unique(
where={"aliasSlug": data.slug}
)
if existing_alias:
raise ValueError(f"Slug '{data.slug}' is already in use as an alias")
old_org = await prisma.organization.find_unique(where={"id": org_id})
if old_org and old_org.slug != data.slug:
await prisma.organizationalias.create(
data={
"organizationId": org_id,
"aliasSlug": old_org.slug,
"aliasType": "RENAME",
}
)
update_dict["slug"] = data.slug
if not update_dict:
return await get_org(org_id)
await prisma.organization.update(where={"id": org_id}, data=update_dict)
# Sync OrganizationProfile when name or slug changes
profile_update: dict = {}
if data.name is not None:
profile_update["displayName"] = data.name
if data.slug is not None:
profile_update["username"] = data.slug
if profile_update:
await prisma.organizationprofile.update(
where={"organizationId": org_id},
data=profile_update,
)
return await get_org(org_id)
async def delete_org(org_id: str) -> None:
"""Soft-delete an organization. Cannot delete personal orgs.
Sets deletedAt instead of hard-deleting to preserve financial records.
"""
org = await prisma.organization.find_unique(where={"id": org_id})
if org is None:
raise NotFoundError(f"Organization {org_id} not found")
if org.isPersonal:
raise ValueError("Cannot delete a personal organization. Convert it first.")
if org.deletedAt is not None:
raise ValueError("Organization is already deleted")
await prisma.organization.update(
where={"id": org_id},
data={"deletedAt": datetime.now(timezone.utc)},
)
async def convert_personal_org(org_id: str, user_id: str) -> OrgResponse:
"""Convert a personal org to a team org.
Creates a new personal org for the user so they always have one.
Existing resources (agents, credits, store listings) stay in the
team org — that's the point of converting.
If new personal org creation fails, the conversion is rolled back.
"""
org = await prisma.organization.find_unique(where={"id": org_id})
if org is None:
raise NotFoundError(f"Organization {org_id} not found")
if not org.isPersonal:
raise ValueError("Organization is already a team org")
# Step 1: Flip isPersonal on the old org
await prisma.organization.update(
where={"id": org_id},
data={"isPersonal": False},
)
# Step 2: Create a new personal org for the user
try:
slug_base = f"{_sanitize_slug(org.slug)}-personal-1"
# Fetch user name for display
user = await prisma.user.find_unique(where={"id": user_id})
display_name = user.name if user and user.name else org.name
await _create_personal_org_for_user(
user_id=user_id,
slug_base=slug_base,
display_name=display_name,
)
except Exception:
# Roll back: restore isPersonal on the old org
logger.exception(
f"Failed to create new personal org for user {user_id} during "
f"conversion of org {org_id} — rolling back"
)
await prisma.organization.update(
where={"id": org_id},
data={"isPersonal": True},
)
raise
return await get_org(org_id)
# ---------------------------------------------------------------------------
# Members
# ---------------------------------------------------------------------------
async def list_org_members(org_id: str) -> list[OrgMemberResponse]:
"""List all active members of an organization."""
members = await prisma.orgmember.find_many(
where={"orgId": org_id, "status": "ACTIVE"},
include={"User": True},
)
return [OrgMemberResponse.from_db(m) for m in members]
async def add_org_member(
org_id: str,
user_id: str,
is_admin: bool = False,
is_billing_manager: bool = False,
invited_by: str | None = None,
) -> OrgMemberResponse:
"""Add a member to an organization and its default workspace."""
member = await prisma.orgmember.create(
data={
"orgId": org_id,
"userId": user_id,
"isAdmin": is_admin,
"isBillingManager": is_billing_manager,
"status": "ACTIVE",
"invitedByUserId": invited_by,
},
include={"User": True},
)
default_ws = await prisma.team.find_first(
where={"orgId": org_id, "isDefault": True}
)
if default_ws:
await prisma.teammember.create(
data={
"teamId": default_ws.id,
"userId": user_id,
"status": "ACTIVE",
}
)
return OrgMemberResponse.from_db(member)
async def update_org_member(
org_id: str, user_id: str, is_admin: bool | None, is_billing_manager: bool | None
) -> OrgMemberResponse:
"""Update a member's role flags."""
member = await prisma.orgmember.find_unique(
where={"orgId_userId": {"orgId": org_id, "userId": user_id}}
)
if member is None:
raise NotFoundError(f"Member {user_id} not found in org {org_id}")
if member.isOwner:
raise ValueError(
"Cannot change the owner's role flags directly. Use transfer-ownership."
)
update_data: dict = {}
if is_admin is not None:
update_data["isAdmin"] = is_admin
if is_billing_manager is not None:
update_data["isBillingManager"] = is_billing_manager
if update_data:
await prisma.orgmember.update(
where={"orgId_userId": {"orgId": org_id, "userId": user_id}},
data=update_data,
)
members = await list_org_members(org_id)
match = next((m for m in members if m.user_id == user_id), None)
if match is None:
raise NotFoundError(f"Member {user_id} not found in org {org_id} after update")
return match
async def remove_org_member(org_id: str, user_id: str, requesting_user_id: str) -> None:
"""Remove a member from an organization and all its workspaces.
Guards:
- Cannot remove the org owner (transfer ownership first)
- Cannot remove yourself (use leave flow instead)
- Cannot remove a user who has active schedules (transfer/cancel first)
- Cannot remove a user who would become org-less (no other org memberships)
"""
member = await prisma.orgmember.find_unique(
where={"orgId_userId": {"orgId": org_id, "userId": user_id}}
)
if member is None:
raise NotFoundError(f"Member {user_id} not found in org {org_id}")
if member.isOwner:
raise ValueError("Cannot remove the org owner. Transfer ownership first.")
if user_id == requesting_user_id:
raise ValueError(
"Cannot remove yourself from an organization. "
"Ask another admin to remove you, or transfer ownership first."
)
# Check if user would become org-less
other_memberships = await prisma.orgmember.count(
where={
"userId": user_id,
"status": "ACTIVE",
"orgId": {"not": org_id},
"Org": {"deletedAt": None},
}
)
if other_memberships == 0:
raise ValueError(
"Cannot remove this member — they have no other organization memberships "
"and would be locked out. They must join or create another org first."
)
# Check for active schedules
# TODO: Check APScheduler for active schedules owned by this user in this org
# For now, this is a placeholder for the schedule transfer requirement
# Remove from all workspaces in this org
workspaces = await prisma.team.find_many(where={"orgId": org_id})
for ws in workspaces:
await prisma.teammember.delete_many(
where={"teamId": ws.id, "userId": user_id}
)
# Remove org membership
await prisma.orgmember.delete(
where={"orgId_userId": {"orgId": org_id, "userId": user_id}}
)
async def transfer_ownership(
org_id: str, current_owner_id: str, new_owner_id: str
) -> None:
"""Transfer org ownership atomically. Both updates happen in one statement."""
if current_owner_id == new_owner_id:
raise ValueError("Cannot transfer ownership to the same user")
current = await prisma.orgmember.find_unique(
where={"orgId_userId": {"orgId": org_id, "userId": current_owner_id}}
)
if current is None or not current.isOwner:
raise ValueError("Current user is not the org owner")
new = await prisma.orgmember.find_unique(
where={"orgId_userId": {"orgId": org_id, "userId": new_owner_id}}
)
if new is None:
raise NotFoundError(f"User {new_owner_id} is not a member of org {org_id}")
await prisma.execute_raw(
"""
UPDATE "OrgMember"
SET "isOwner" = CASE
WHEN "userId" = $1 THEN false
WHEN "userId" = $2 THEN true
ELSE "isOwner"
END,
"isAdmin" = CASE
WHEN "userId" = $2 THEN true
ELSE "isAdmin"
END,
"updatedAt" = NOW()
WHERE "orgId" = $3 AND "userId" IN ($1, $2)
""",
current_owner_id,
new_owner_id,
org_id,
)
# ---------------------------------------------------------------------------
# Aliases
# ---------------------------------------------------------------------------
async def list_org_aliases(org_id: str) -> list[OrgAliasResponse]:
"""List all aliases for an organization."""
aliases = await prisma.organizationalias.find_many(
where={"organizationId": org_id, "removedAt": None}
)
return [OrgAliasResponse.from_db(a) for a in aliases]
async def create_org_alias(
org_id: str, alias_slug: str, user_id: str
) -> OrgAliasResponse:
"""Create a new alias for an organization."""
existing_org = await prisma.organization.find_unique(where={"slug": alias_slug})
if existing_org:
raise ValueError(f"Slug '{alias_slug}' is already used by an organization")
existing_alias = await prisma.organizationalias.find_unique(
where={"aliasSlug": alias_slug}
)
if existing_alias:
raise ValueError(f"Slug '{alias_slug}' is already used as an alias")
alias = await prisma.organizationalias.create(
data={
"organizationId": org_id,
"aliasSlug": alias_slug,
"aliasType": "MANUAL",
"createdByUserId": user_id,
}
)
return OrgAliasResponse.from_db(alias)

View File

@@ -0,0 +1,247 @@
"""Invitation API routes for organization membership."""
from datetime import datetime, timedelta, timezone
from typing import Annotated
from autogpt_libs.auth import get_user_id, requires_org_permission, requires_user
from autogpt_libs.auth.models import RequestContext
from autogpt_libs.auth.permissions import OrgAction
from fastapi import APIRouter, HTTPException, Security
from prisma.errors import UniqueViolationError
from backend.data.db import prisma
from backend.util.exceptions import NotFoundError
from . import db as org_db
from .model import CreateInvitationRequest, InvitationResponse
router = APIRouter()
INVITATION_TTL_DAYS = 7
def _verify_org_path(ctx: RequestContext, org_id: str) -> None:
"""Ensure the authenticated user's active org matches the path parameter."""
if ctx.org_id != org_id:
raise HTTPException(403, detail="Not a member of this organization")
# --- Org-scoped invitation endpoints (under /api/orgs/{org_id}/invitations) ---
org_router = APIRouter()
@org_router.post(
"",
summary="Create invitation",
tags=["orgs", "invitations"],
)
async def create_invitation(
org_id: str,
request: CreateInvitationRequest,
ctx: Annotated[
RequestContext,
Security(requires_org_permission(OrgAction.MANAGE_MEMBERS)),
],
) -> InvitationResponse:
_verify_org_path(ctx, org_id)
expires_at = datetime.now(timezone.utc) + timedelta(days=INVITATION_TTL_DAYS)
invitation = await prisma.orginvitation.create(
data={
"orgId": org_id,
"email": request.email,
"isAdmin": request.is_admin,
"isBillingManager": request.is_billing_manager,
"expiresAt": expires_at,
"invitedByUserId": ctx.user_id,
"teamIds": request.team_ids,
}
)
# TODO: Send email via Postmark with invitation link
# link = f"{frontend_base_url}/org/invite/{invitation.token}"
return InvitationResponse.from_db(invitation)
@org_router.get(
"",
summary="List pending invitations",
tags=["orgs", "invitations"],
)
async def list_invitations(
org_id: str,
ctx: Annotated[
RequestContext,
Security(requires_org_permission(OrgAction.MANAGE_MEMBERS)),
],
) -> list[InvitationResponse]:
_verify_org_path(ctx, org_id)
invitations = await prisma.orginvitation.find_many(
where={
"orgId": org_id,
"acceptedAt": None,
"revokedAt": None,
"expiresAt": {"gt": datetime.now(timezone.utc)},
},
order={"createdAt": "desc"},
)
return [InvitationResponse.from_db(inv) for inv in invitations]
@org_router.delete(
"/{invitation_id}",
summary="Revoke invitation",
tags=["orgs", "invitations"],
status_code=204,
)
async def revoke_invitation(
org_id: str,
invitation_id: str,
ctx: Annotated[
RequestContext,
Security(requires_org_permission(OrgAction.MANAGE_MEMBERS)),
],
) -> None:
_verify_org_path(ctx, org_id)
invitation = await prisma.orginvitation.find_unique(where={"id": invitation_id})
if invitation is None or invitation.orgId != org_id:
raise NotFoundError(f"Invitation {invitation_id} not found")
await prisma.orginvitation.update(
where={"id": invitation_id},
data={"revokedAt": datetime.now(timezone.utc)},
)
# --- Token-based endpoints (under /api/invitations) ---
@router.post(
"/{token}/accept",
summary="Accept invitation",
tags=["invitations"],
dependencies=[Security(requires_user)],
)
async def accept_invitation(
token: str,
user_id: Annotated[str, Security(get_user_id)],
) -> dict:
invitation = await prisma.orginvitation.find_unique(where={"token": token})
if invitation is None:
raise NotFoundError("Invitation not found")
if invitation.acceptedAt is not None:
raise HTTPException(400, detail="Invitation already accepted")
if invitation.revokedAt is not None:
raise HTTPException(400, detail="Invitation has been revoked")
if invitation.expiresAt < datetime.now(timezone.utc):
raise HTTPException(400, detail="Invitation has expired")
# Verify the accepting user's email matches the invitation
accepting_user = await prisma.user.find_unique(where={"id": user_id})
if accepting_user is None:
raise HTTPException(401, detail="User not found")
if accepting_user.email.lower() != invitation.email.lower():
raise HTTPException(
403,
detail="This invitation was sent to a different email address",
)
# Add user to org (idempotent — handles race condition from concurrent accepts)
try:
await org_db.add_org_member(
org_id=invitation.orgId,
user_id=user_id,
is_admin=invitation.isAdmin,
is_billing_manager=invitation.isBillingManager,
invited_by=invitation.invitedByUserId,
)
except UniqueViolationError:
# User is already a member — treat as success (idempotent)
pass
# Add to specified workspaces
for ws_id in invitation.teamIds:
try:
from . import team_db as team_db
await team_db.add_team_member(
ws_id=ws_id,
user_id=user_id,
org_id=invitation.orgId,
invited_by=invitation.invitedByUserId,
)
except Exception:
# Non-fatal -- workspace may have been deleted
pass
# Mark invitation as accepted
await prisma.orginvitation.update(
where={"id": invitation.id},
data={"acceptedAt": datetime.now(timezone.utc), "targetUserId": user_id},
)
return {"orgId": invitation.orgId, "message": "Invitation accepted"}
@router.post(
"/{token}/decline",
summary="Decline invitation",
tags=["invitations"],
dependencies=[Security(requires_user)],
status_code=204,
)
async def decline_invitation(
token: str,
user_id: Annotated[str, Security(get_user_id)],
) -> None:
invitation = await prisma.orginvitation.find_unique(where={"token": token})
if invitation is None:
raise NotFoundError("Invitation not found")
# State checks — same as accept_invitation
if invitation.acceptedAt is not None:
raise HTTPException(400, detail="Invitation already accepted")
if invitation.revokedAt is not None:
raise HTTPException(400, detail="Invitation already revoked")
if invitation.expiresAt < datetime.now(timezone.utc):
raise HTTPException(400, detail="Invitation has expired")
# Verify the declining user's email matches the invitation
declining_user = await prisma.user.find_unique(where={"id": user_id})
if declining_user is None:
raise HTTPException(401, detail="User not found")
if declining_user.email.lower() != invitation.email.lower():
raise HTTPException(403, detail="This invitation was sent to a different email address")
await prisma.orginvitation.update(
where={"id": invitation.id},
data={"revokedAt": datetime.now(timezone.utc)},
)
@router.get(
"/pending",
summary="List pending invitations for current user",
tags=["invitations"],
dependencies=[Security(requires_user)],
)
async def list_pending_for_user(
user_id: Annotated[str, Security(get_user_id)],
) -> list[InvitationResponse]:
# Get user's email
user = await prisma.user.find_unique(where={"id": user_id})
if user is None:
return []
invitations = await prisma.orginvitation.find_many(
where={
"email": user.email,
"acceptedAt": None,
"revokedAt": None,
"expiresAt": {"gt": datetime.now(timezone.utc)},
},
order={"createdAt": "desc"},
)
return [InvitationResponse.from_db(inv) for inv in invitations]

View File

@@ -0,0 +1,148 @@
"""Pydantic request/response models for organization management."""
from datetime import datetime
from pydantic import BaseModel, Field
class CreateOrgRequest(BaseModel):
name: str = Field(..., min_length=1, max_length=100)
slug: str = Field(
..., min_length=1, max_length=100, pattern=r"^[a-z0-9][a-z0-9-]*$"
)
description: str | None = None
class UpdateOrgRequest(BaseModel):
name: str | None = None
slug: str | None = Field(None, pattern=r"^[a-z0-9][a-z0-9-]*$")
description: str | None = None
avatar_url: str | None = None
class UpdateOrgData(BaseModel):
"""Structured data object for update_org DB function.
Only these fields can be updated — no arbitrary dict keys.
"""
name: str | None = None
slug: str | None = None
description: str | None = None
avatar_url: str | None = None
class OrgResponse(BaseModel):
id: str
name: str
slug: str
avatar_url: str | None
description: str | None
is_personal: bool
member_count: int
created_at: datetime
@staticmethod
def from_db(org, member_count: int = 0) -> "OrgResponse":
return OrgResponse(
id=org.id,
name=org.name,
slug=org.slug,
avatar_url=org.avatarUrl,
description=org.description,
is_personal=org.isPersonal,
member_count=member_count,
created_at=org.createdAt,
)
class OrgMemberResponse(BaseModel):
id: str
user_id: str
email: str
name: str | None
is_owner: bool
is_admin: bool
is_billing_manager: bool
joined_at: datetime
@staticmethod
def from_db(member) -> "OrgMemberResponse":
return OrgMemberResponse(
id=member.id,
user_id=member.userId,
email=member.User.email if member.User else "",
name=member.User.name if member.User else None,
is_owner=member.isOwner,
is_admin=member.isAdmin,
is_billing_manager=member.isBillingManager,
joined_at=member.joinedAt,
)
class AddMemberRequest(BaseModel):
user_id: str
is_admin: bool = False
is_billing_manager: bool = False
class UpdateMemberRequest(BaseModel):
is_admin: bool | None = None
is_billing_manager: bool | None = None
class TransferOwnershipRequest(BaseModel):
new_owner_id: str
class OrgAliasResponse(BaseModel):
id: str
alias_slug: str
alias_type: str
created_at: datetime
@staticmethod
def from_db(alias) -> "OrgAliasResponse":
return OrgAliasResponse(
id=alias.id,
alias_slug=alias.aliasSlug,
alias_type=alias.aliasType,
created_at=alias.createdAt,
)
class CreateAliasRequest(BaseModel):
alias_slug: str = Field(
..., min_length=1, max_length=100, pattern=r"^[a-z0-9][a-z0-9-]*$"
)
class CreateInvitationRequest(BaseModel):
email: str
is_admin: bool = False
is_billing_manager: bool = False
team_ids: list[str] = Field(default_factory=list)
class InvitationResponse(BaseModel):
id: str
email: str
is_admin: bool
is_billing_manager: bool
token: str
expires_at: datetime
created_at: datetime
team_ids: list[str]
@staticmethod
def from_db(inv) -> "InvitationResponse":
return InvitationResponse(
id=inv.id,
email=inv.email,
is_admin=inv.isAdmin,
is_billing_manager=inv.isBillingManager,
token=inv.token,
expires_at=inv.expiresAt,
created_at=inv.createdAt,
team_ids=inv.teamIds,
)

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,273 @@
"""Organization management API routes."""
from typing import Annotated
from autogpt_libs.auth import (
get_request_context,
get_user_id,
requires_org_permission,
requires_user,
)
from autogpt_libs.auth.models import RequestContext
from autogpt_libs.auth.permissions import OrgAction
from fastapi import APIRouter, HTTPException, Security
from . import db as org_db
from .model import (
AddMemberRequest,
CreateAliasRequest,
CreateOrgRequest,
OrgAliasResponse,
OrgMemberResponse,
OrgResponse,
TransferOwnershipRequest,
UpdateMemberRequest,
UpdateOrgData,
UpdateOrgRequest,
)
router = APIRouter()
def _verify_org_path(ctx: RequestContext, org_id: str) -> None:
"""Ensure the authenticated user's active org matches the path parameter.
Prevents authorization bypass where a user sends X-Org-Id for org A
but targets org B in the URL path.
"""
if ctx.org_id != org_id:
raise HTTPException(403, detail="Not a member of this organization")
@router.post(
"",
summary="Create organization",
tags=["orgs"],
dependencies=[Security(requires_user)],
)
async def create_org(
request: CreateOrgRequest,
user_id: Annotated[str, Security(get_user_id)],
) -> OrgResponse:
return await org_db.create_org(
name=request.name,
slug=request.slug,
user_id=user_id,
description=request.description,
)
@router.get(
"",
summary="List user organizations",
tags=["orgs"],
dependencies=[Security(requires_user)],
)
async def list_orgs(
user_id: Annotated[str, Security(get_user_id)],
) -> list[OrgResponse]:
return await org_db.list_user_orgs(user_id)
@router.get(
"/{org_id}",
summary="Get organization details",
tags=["orgs"],
)
async def get_org(
org_id: str,
ctx: Annotated[RequestContext, Security(get_request_context)],
) -> OrgResponse:
if ctx.org_id != org_id:
raise HTTPException(403, detail="Not a member of this organization")
return await org_db.get_org(org_id)
@router.patch(
"/{org_id}",
summary="Update organization",
tags=["orgs"],
)
async def update_org(
org_id: str,
request: UpdateOrgRequest,
ctx: Annotated[
RequestContext,
Security(requires_org_permission(OrgAction.RENAME_ORG)),
],
) -> OrgResponse:
_verify_org_path(ctx, org_id)
return await org_db.update_org(
org_id,
UpdateOrgData(
name=request.name,
slug=request.slug,
description=request.description,
avatar_url=request.avatar_url,
),
)
@router.delete(
"/{org_id}",
summary="Delete organization",
tags=["orgs"],
status_code=204,
)
async def delete_org(
org_id: str,
ctx: Annotated[
RequestContext,
Security(requires_org_permission(OrgAction.DELETE_ORG)),
],
) -> None:
_verify_org_path(ctx, org_id)
await org_db.delete_org(org_id)
@router.post(
"/{org_id}/convert",
summary="Convert personal org to team org",
tags=["orgs"],
)
async def convert_org(
org_id: str,
ctx: Annotated[
RequestContext,
Security(requires_org_permission(OrgAction.DELETE_ORG)),
],
) -> OrgResponse:
_verify_org_path(ctx, org_id)
return await org_db.convert_personal_org(org_id, ctx.user_id)
# --- Members ---
@router.get(
"/{org_id}/members",
summary="List organization members",
tags=["orgs"],
)
async def list_members(
org_id: str,
ctx: Annotated[RequestContext, Security(get_request_context)],
) -> list[OrgMemberResponse]:
if ctx.org_id != org_id:
raise HTTPException(403, detail="Not a member of this organization")
return await org_db.list_org_members(org_id)
@router.post(
"/{org_id}/members",
summary="Add member to organization",
tags=["orgs"],
)
async def add_member(
org_id: str,
request: AddMemberRequest,
ctx: Annotated[
RequestContext,
Security(requires_org_permission(OrgAction.MANAGE_MEMBERS)),
],
) -> OrgMemberResponse:
_verify_org_path(ctx, org_id)
return await org_db.add_org_member(
org_id=org_id,
user_id=request.user_id,
is_admin=request.is_admin,
is_billing_manager=request.is_billing_manager,
invited_by=ctx.user_id,
)
@router.patch(
"/{org_id}/members/{uid}",
summary="Update member role",
tags=["orgs"],
)
async def update_member(
org_id: str,
uid: str,
request: UpdateMemberRequest,
ctx: Annotated[
RequestContext,
Security(requires_org_permission(OrgAction.MANAGE_MEMBERS)),
],
) -> OrgMemberResponse:
_verify_org_path(ctx, org_id)
return await org_db.update_org_member(
org_id=org_id,
user_id=uid,
is_admin=request.is_admin,
is_billing_manager=request.is_billing_manager,
)
@router.delete(
"/{org_id}/members/{uid}",
summary="Remove member from organization",
tags=["orgs"],
status_code=204,
)
async def remove_member(
org_id: str,
uid: str,
ctx: Annotated[
RequestContext,
Security(requires_org_permission(OrgAction.MANAGE_MEMBERS)),
],
) -> None:
_verify_org_path(ctx, org_id)
await org_db.remove_org_member(org_id, uid, requesting_user_id=ctx.user_id)
@router.post(
"/{org_id}/transfer-ownership",
summary="Transfer organization ownership",
tags=["orgs"],
)
async def transfer_ownership(
org_id: str,
request: TransferOwnershipRequest,
ctx: Annotated[
RequestContext,
Security(requires_org_permission(OrgAction.DELETE_ORG)),
],
) -> None:
_verify_org_path(ctx, org_id)
await org_db.transfer_ownership(org_id, ctx.user_id, request.new_owner_id)
# --- Aliases ---
@router.get(
"/{org_id}/aliases",
summary="List organization aliases",
tags=["orgs"],
)
async def list_aliases(
org_id: str,
ctx: Annotated[RequestContext, Security(get_request_context)],
) -> list[OrgAliasResponse]:
if ctx.org_id != org_id:
raise HTTPException(403, detail="Not a member of this organization")
return await org_db.list_org_aliases(org_id)
@router.post(
"/{org_id}/aliases",
summary="Create organization alias",
tags=["orgs"],
)
async def create_alias(
org_id: str,
request: CreateAliasRequest,
ctx: Annotated[
RequestContext,
Security(requires_org_permission(OrgAction.RENAME_ORG)),
],
) -> OrgAliasResponse:
_verify_org_path(ctx, org_id)
return await org_db.create_org_alias(org_id, request.alias_slug, ctx.user_id)

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,234 @@
"""Database operations for workspace management."""
import logging
from backend.data.db import prisma
from backend.util.exceptions import NotFoundError
from .team_model import TeamMemberResponse, TeamResponse
logger = logging.getLogger(__name__)
async def create_team(
org_id: str,
name: str,
user_id: str,
description: str | None = None,
join_policy: str = "OPEN",
) -> TeamResponse:
"""Create a workspace and make the creator an admin."""
ws = await prisma.team.create(
data={
"name": name,
"orgId": org_id,
"description": description,
"joinPolicy": join_policy,
"createdByUserId": user_id,
}
)
# Creator becomes admin
await prisma.teammember.create(
data={
"teamId": ws.id,
"userId": user_id,
"isAdmin": True,
"status": "ACTIVE",
}
)
return TeamResponse.from_db(ws, member_count=1)
async def list_teams(org_id: str, user_id: str) -> list[TeamResponse]:
"""List workspaces: all OPEN workspaces + PRIVATE ones the user belongs to."""
workspaces = await prisma.team.find_many(
where={
"orgId": org_id,
"archivedAt": None,
"OR": [
{"joinPolicy": "OPEN"},
{"Members": {"some": {"userId": user_id, "status": "ACTIVE"}}},
],
},
order={"createdAt": "asc"},
)
return [TeamResponse.from_db(ws) for ws in workspaces]
async def get_team(
ws_id: str, expected_org_id: str | None = None
) -> TeamResponse:
"""Get workspace details. Validates org ownership if expected_org_id is given."""
ws = await prisma.team.find_unique(where={"id": ws_id})
if ws is None:
raise NotFoundError(f"Workspace {ws_id} not found")
if expected_org_id and ws.orgId != expected_org_id:
raise NotFoundError(f"Workspace {ws_id} not found in org {expected_org_id}")
return TeamResponse.from_db(ws)
async def update_team(ws_id: str, data: dict) -> TeamResponse:
"""Update workspace fields. Guards the default workspace join policy."""
update_data = {k: v for k, v in data.items() if v is not None}
if not update_data:
return await get_team(ws_id)
# Guard: default workspace joinPolicy cannot be changed
if "joinPolicy" in update_data:
ws = await prisma.team.find_unique(where={"id": ws_id})
if ws and ws.isDefault:
raise ValueError("Cannot change the default workspace's join policy")
await prisma.team.update(where={"id": ws_id}, data=update_data)
return await get_team(ws_id)
async def delete_team(ws_id: str) -> None:
"""Delete a workspace. Cannot delete the default workspace."""
ws = await prisma.team.find_unique(where={"id": ws_id})
if ws is None:
raise NotFoundError(f"Workspace {ws_id} not found")
if ws.isDefault:
raise ValueError("Cannot delete the default workspace")
await prisma.team.delete(where={"id": ws_id})
async def join_team(ws_id: str, user_id: str, org_id: str) -> TeamResponse:
"""Self-join an OPEN workspace. User must be an org member."""
ws = await prisma.team.find_unique(where={"id": ws_id})
if ws is None:
raise NotFoundError(f"Workspace {ws_id} not found")
if ws.orgId != org_id:
raise ValueError("Workspace does not belong to this organization")
if ws.joinPolicy != "OPEN":
raise ValueError("Cannot self-join a PRIVATE workspace. Request an invite.")
# Verify user is actually an org member
org_member = await prisma.orgmember.find_unique(
where={"orgId_userId": {"orgId": org_id, "userId": user_id}}
)
if org_member is None:
raise ValueError(f"User {user_id} is not a member of the organization")
# Check not already a member
existing = await prisma.teammember.find_unique(
where={"teamId_userId": {"teamId": ws_id, "userId": user_id}}
)
if existing:
return TeamResponse.from_db(ws)
await prisma.teammember.create(
data={
"teamId": ws_id,
"userId": user_id,
"status": "ACTIVE",
}
)
return TeamResponse.from_db(ws)
async def leave_team(ws_id: str, user_id: str) -> None:
"""Leave a workspace. Cannot leave the default workspace."""
ws = await prisma.team.find_unique(where={"id": ws_id})
if ws is None:
raise NotFoundError(f"Workspace {ws_id} not found")
if ws.isDefault:
raise ValueError("Cannot leave the default workspace")
await prisma.teammember.delete_many(
where={"teamId": ws_id, "userId": user_id}
)
async def list_team_members(ws_id: str) -> list[TeamMemberResponse]:
"""List all active members of a workspace."""
members = await prisma.teammember.find_many(
where={"teamId": ws_id, "status": "ACTIVE"},
include={"User": True},
)
return [TeamMemberResponse.from_db(m) for m in members]
async def add_team_member(
ws_id: str,
user_id: str,
org_id: str,
is_admin: bool = False,
is_billing_manager: bool = False,
invited_by: str | None = None,
) -> TeamMemberResponse:
"""Add a member to a workspace. Must be an org member, workspace must belong to org."""
# Verify workspace belongs to the org
ws = await prisma.team.find_unique(where={"id": ws_id})
if ws is None or ws.orgId != org_id:
raise ValueError(f"Workspace {ws_id} does not belong to org {org_id}")
# Verify user is in the org
org_member = await prisma.orgmember.find_unique(
where={"orgId_userId": {"orgId": org_id, "userId": user_id}}
)
if org_member is None:
raise ValueError(f"User {user_id} is not a member of the organization")
member = await prisma.teammember.create(
data={
"teamId": ws_id,
"userId": user_id,
"isAdmin": is_admin,
"isBillingManager": is_billing_manager,
"status": "ACTIVE",
"invitedByUserId": invited_by,
},
include={"User": True},
)
return TeamMemberResponse.from_db(member)
async def update_team_member(
ws_id: str,
user_id: str,
is_admin: bool | None,
is_billing_manager: bool | None,
) -> TeamMemberResponse:
"""Update a workspace member's role flags."""
update_data: dict = {}
if is_admin is not None:
update_data["isAdmin"] = is_admin
if is_billing_manager is not None:
update_data["isBillingManager"] = is_billing_manager
if update_data:
await prisma.teammember.update(
where={"teamId_userId": {"teamId": ws_id, "userId": user_id}},
data=update_data,
)
members = await list_team_members(ws_id)
return next(m for m in members if m.user_id == user_id)
async def remove_team_member(ws_id: str, user_id: str) -> None:
"""Remove a member from a workspace.
Guards against removing the last admin — workspace would become unmanageable.
"""
# Check if this would remove the last admin
member = await prisma.teammember.find_unique(
where={"teamId_userId": {"teamId": ws_id, "userId": user_id}}
)
if member and member.isAdmin:
admin_count = await prisma.teammember.count(
where={"teamId": ws_id, "isAdmin": True, "status": "ACTIVE"}
)
if admin_count <= 1:
raise ValueError(
"Cannot remove the last workspace admin. "
"Promote another member to admin first."
)
await prisma.teammember.delete(
where={"teamId_userId": {"teamId": ws_id, "userId": user_id}}
)

View File

@@ -0,0 +1,76 @@
"""Pydantic request/response models for workspace management."""
from datetime import datetime
from pydantic import BaseModel, Field
class CreateTeamRequest(BaseModel):
name: str = Field(..., min_length=1, max_length=100)
description: str | None = None
join_policy: str = "OPEN" # OPEN or PRIVATE
class UpdateTeamRequest(BaseModel):
name: str | None = None
description: str | None = None
join_policy: str | None = None # OPEN or PRIVATE
class TeamResponse(BaseModel):
id: str
name: str
slug: str | None
description: str | None
is_default: bool
join_policy: str
org_id: str
member_count: int
created_at: datetime
@staticmethod
def from_db(ws, member_count: int = 0) -> "TeamResponse":
return TeamResponse(
id=ws.id,
name=ws.name,
slug=ws.slug,
description=ws.description,
is_default=ws.isDefault,
join_policy=ws.joinPolicy,
org_id=ws.orgId,
member_count=member_count,
created_at=ws.createdAt,
)
class TeamMemberResponse(BaseModel):
id: str
user_id: str
email: str
name: str | None
is_admin: bool
is_billing_manager: bool
joined_at: datetime
@staticmethod
def from_db(member) -> "TeamMemberResponse":
return TeamMemberResponse(
id=member.id,
user_id=member.userId,
email=member.User.email if member.User else "",
name=member.User.name if member.User else None,
is_admin=member.isAdmin,
is_billing_manager=member.isBillingManager,
joined_at=member.joinedAt,
)
class AddTeamMemberRequest(BaseModel):
user_id: str
is_admin: bool = False
is_billing_manager: bool = False
class UpdateTeamMemberRequest(BaseModel):
is_admin: bool | None = None
is_billing_manager: bool | None = None

View File

@@ -0,0 +1,236 @@
"""Workspace management API routes (nested under /api/orgs/{org_id}/workspaces)."""
from typing import Annotated
from autogpt_libs.auth import (
get_request_context,
requires_org_permission,
requires_team_permission,
)
from autogpt_libs.auth.models import RequestContext
from autogpt_libs.auth.permissions import OrgAction, TeamAction
from fastapi import APIRouter, HTTPException, Security
from . import team_db as team_db
from .team_model import (
AddTeamMemberRequest,
CreateTeamRequest,
UpdateTeamMemberRequest,
UpdateTeamRequest,
TeamMemberResponse,
TeamResponse,
)
router = APIRouter()
@router.post(
"",
summary="Create workspace",
tags=["orgs", "workspaces"],
)
async def create_team(
org_id: str,
request: CreateTeamRequest,
ctx: Annotated[
RequestContext,
Security(requires_org_permission(OrgAction.CREATE_WORKSPACES)),
],
) -> TeamResponse:
if ctx.org_id != org_id:
raise HTTPException(403, detail="Not a member of this organization")
return await team_db.create_team(
org_id=org_id,
name=request.name,
user_id=ctx.user_id,
description=request.description,
join_policy=request.join_policy,
)
@router.get(
"",
summary="List workspaces",
tags=["orgs", "workspaces"],
)
async def list_teams(
org_id: str,
ctx: Annotated[RequestContext, Security(get_request_context)],
) -> list[TeamResponse]:
if ctx.org_id != org_id:
raise HTTPException(403, detail="Not a member of this organization")
return await team_db.list_teams(org_id, ctx.user_id)
@router.get(
"/{ws_id}",
summary="Get workspace details",
tags=["orgs", "workspaces"],
)
async def get_team(
org_id: str,
ws_id: str,
ctx: Annotated[RequestContext, Security(get_request_context)],
) -> TeamResponse:
if ctx.org_id != org_id:
raise HTTPException(403, detail="Not a member of this organization")
return await team_db.get_team(ws_id, expected_org_id=org_id)
@router.patch(
"/{ws_id}",
summary="Update workspace",
tags=["orgs", "workspaces"],
)
async def update_team(
org_id: str,
ws_id: str,
request: UpdateTeamRequest,
ctx: Annotated[
RequestContext,
Security(requires_team_permission(TeamAction.MANAGE_SETTINGS)),
],
) -> TeamResponse:
# Verify workspace belongs to org (ctx validates workspace membership)
await team_db.get_team(ws_id, expected_org_id=org_id)
return await team_db.update_team(
ws_id,
{
"name": request.name,
"description": request.description,
"joinPolicy": request.join_policy,
},
)
@router.delete(
"/{ws_id}",
summary="Delete workspace",
tags=["orgs", "workspaces"],
status_code=204,
)
async def delete_team(
org_id: str,
ws_id: str,
ctx: Annotated[
RequestContext,
Security(requires_org_permission(OrgAction.MANAGE_WORKSPACES)),
],
) -> None:
await team_db.get_team(ws_id, expected_org_id=org_id)
await team_db.delete_team(ws_id)
@router.post(
"/{ws_id}/join",
summary="Self-join open workspace",
tags=["orgs", "workspaces"],
)
async def join_team(
org_id: str,
ws_id: str,
ctx: Annotated[RequestContext, Security(get_request_context)],
) -> TeamResponse:
if ctx.org_id != org_id:
raise HTTPException(403, detail="Not a member of this organization")
return await team_db.join_team(ws_id, ctx.user_id, org_id)
@router.post(
"/{ws_id}/leave",
summary="Leave workspace",
tags=["orgs", "workspaces"],
status_code=204,
)
async def leave_team(
org_id: str,
ws_id: str,
ctx: Annotated[RequestContext, Security(get_request_context)],
) -> None:
if ctx.org_id != org_id:
raise HTTPException(403, detail="Not a member of this organization")
await team_db.leave_team(ws_id, ctx.user_id)
# --- Members ---
@router.get(
"/{ws_id}/members",
summary="List workspace members",
tags=["orgs", "workspaces"],
)
async def list_members(
org_id: str,
ws_id: str,
ctx: Annotated[RequestContext, Security(get_request_context)],
) -> list[TeamMemberResponse]:
if ctx.org_id != org_id:
raise HTTPException(403, detail="Not a member of this organization")
await team_db.get_team(ws_id, expected_org_id=org_id)
return await team_db.list_team_members(ws_id)
@router.post(
"/{ws_id}/members",
summary="Add member to workspace",
tags=["orgs", "workspaces"],
)
async def add_member(
org_id: str,
ws_id: str,
request: AddTeamMemberRequest,
ctx: Annotated[
RequestContext,
Security(requires_team_permission(TeamAction.MANAGE_MEMBERS)),
],
) -> TeamMemberResponse:
return await team_db.add_team_member(
ws_id=ws_id,
user_id=request.user_id,
org_id=org_id,
is_admin=request.is_admin,
is_billing_manager=request.is_billing_manager,
invited_by=ctx.user_id,
)
@router.patch(
"/{ws_id}/members/{uid}",
summary="Update workspace member role",
tags=["orgs", "workspaces"],
)
async def update_member(
org_id: str,
ws_id: str,
uid: str,
request: UpdateTeamMemberRequest,
ctx: Annotated[
RequestContext,
Security(requires_team_permission(TeamAction.MANAGE_MEMBERS)),
],
) -> TeamMemberResponse:
return await team_db.update_team_member(
ws_id=ws_id,
user_id=uid,
is_admin=request.is_admin,
is_billing_manager=request.is_billing_manager,
)
@router.delete(
"/{ws_id}/members/{uid}",
summary="Remove member from workspace",
tags=["orgs", "workspaces"],
status_code=204,
)
async def remove_member(
org_id: str,
ws_id: str,
uid: str,
ctx: Annotated[
RequestContext,
Security(requires_team_permission(TeamAction.MANAGE_MEMBERS)),
],
) -> None:
await team_db.remove_team_member(ws_id, uid)

View File

@@ -730,6 +730,7 @@ async def create_store_submission(
categories: list[str] = [],
changes_summary: str | None = "Initial Submission",
recommended_schedule_cron: str | None = None,
organization_id: str | None = None,
) -> store_model.StoreSubmission:
"""
Create the first (and only) store listing and thus submission as a normal user
@@ -858,6 +859,7 @@ async def create_store_submission(
"agentGraphId": graph_id,
"OwningUser": {"connect": {"id": user_id}},
"CreatorProfile": {"connect": {"userId": user_id}},
**({"owningOrgId": organization_id} if organization_id else {}),
},
}
},

View File

@@ -197,6 +197,7 @@ async def test_create_store_submission(mocker):
userId="user-id",
createdAt=now,
isActive=True,
visibility=prisma.enums.ResourceVisibility.PRIVATE,
StoreListingVersions=[],
User=mock_user,
)

View File

@@ -384,6 +384,9 @@ async def get_submissions(
async def create_submission(
submission_request: store_model.StoreSubmissionRequest,
user_id: str = Security(autogpt_libs.auth.get_user_id),
ctx: autogpt_libs.auth.RequestContext = Security(
autogpt_libs.auth.get_request_context
),
) -> store_model.StoreSubmission:
"""Submit a new marketplace listing for review"""
result = await store_db.create_store_submission(
@@ -401,6 +404,7 @@ async def create_submission(
categories=submission_request.categories,
changes_summary=submission_request.changes_summary or "Initial Submission",
recommended_schedule_cron=submission_request.recommended_schedule_cron,
organization_id=ctx.org_id,
)
return result

View File

@@ -0,0 +1,279 @@
"""Database operations for resource transfer management."""
import logging
from datetime import datetime, timezone
from backend.data.db import prisma
from backend.util.exceptions import NotFoundError
from .model import TransferResponse
logger = logging.getLogger(__name__)
_VALID_RESOURCE_TYPES = {"AgentGraph", "StoreListing"}
async def create_transfer(
source_org_id: str,
target_org_id: str,
resource_type: str,
resource_id: str,
user_id: str,
reason: str | None = None,
) -> TransferResponse:
"""Create a new transfer request from source org to target org.
Validates:
- resource_type is one of the allowed types
- source and target orgs are different
- target org exists
- the resource exists and belongs to the source org
"""
if resource_type not in _VALID_RESOURCE_TYPES:
raise ValueError(
f"Invalid resource_type '{resource_type}'. "
f"Must be one of: {', '.join(sorted(_VALID_RESOURCE_TYPES))}"
)
if source_org_id == target_org_id:
raise ValueError("Source and target organizations must be different")
target_org = await prisma.organization.find_unique(
where={"id": target_org_id}
)
if target_org is None or target_org.deletedAt is not None:
raise NotFoundError(f"Target organization {target_org_id} not found")
await _validate_resource_ownership(resource_type, resource_id, source_org_id)
tr = await prisma.transferrequest.create(
data={
"resourceType": resource_type,
"resourceId": resource_id,
"sourceOrganizationId": source_org_id,
"targetOrganizationId": target_org_id,
"initiatedByUserId": user_id,
"status": "PENDING",
"reason": reason,
}
)
return TransferResponse.from_db(tr)
async def list_transfers(org_id: str) -> list[TransferResponse]:
"""List all transfer requests where org is source OR target."""
transfers = await prisma.transferrequest.find_many(
where={
"OR": [
{"sourceOrganizationId": org_id},
{"targetOrganizationId": org_id},
]
},
order={"createdAt": "desc"},
)
return [TransferResponse.from_db(t) for t in transfers]
async def approve_transfer(
transfer_id: str,
user_id: str,
org_id: str,
) -> TransferResponse:
"""Approve a transfer from the source or target side.
- If user's active org is the source org, sets sourceApprovedByUserId.
- If user's active org is the target org, sets targetApprovedByUserId.
- Advances the status accordingly.
"""
tr = await prisma.transferrequest.find_unique(where={"id": transfer_id})
if tr is None:
raise NotFoundError(f"Transfer request {transfer_id} not found")
if tr.status in ("COMPLETED", "REJECTED"):
raise ValueError(f"Cannot approve a transfer with status '{tr.status}'")
update_data: dict = {}
if org_id == tr.sourceOrganizationId:
if tr.sourceApprovedByUserId is not None:
raise ValueError("Source organization has already approved this transfer")
update_data["sourceApprovedByUserId"] = user_id
if tr.targetApprovedByUserId is not None:
# Both sides approved — ready for execution (NOT completed yet)
update_data["status"] = "TARGET_APPROVED"
else:
update_data["status"] = "SOURCE_APPROVED"
elif org_id == tr.targetOrganizationId:
if tr.targetApprovedByUserId is not None:
raise ValueError("Target organization has already approved this transfer")
update_data["targetApprovedByUserId"] = user_id
if tr.sourceApprovedByUserId is not None:
# Both sides approved — ready for execution (NOT completed yet)
update_data["status"] = "SOURCE_APPROVED"
else:
update_data["status"] = "TARGET_APPROVED"
else:
raise ValueError(
"Your active organization is not a party to this transfer"
)
updated = await prisma.transferrequest.update(
where={"id": transfer_id},
data=update_data,
)
return TransferResponse.from_db(updated)
async def reject_transfer(
transfer_id: str,
user_id: str,
org_id: str,
) -> TransferResponse:
"""Reject a pending transfer request. Caller must be in source or target org."""
tr = await prisma.transferrequest.find_unique(where={"id": transfer_id})
if tr is None:
raise NotFoundError(f"Transfer request {transfer_id} not found")
if tr.status in ("COMPLETED", "REJECTED"):
raise ValueError(f"Cannot reject a transfer with status '{tr.status}'")
if org_id not in (tr.sourceOrganizationId, tr.targetOrganizationId):
raise ValueError(
"Your active organization is not a party to this transfer"
)
updated = await prisma.transferrequest.update(
where={"id": transfer_id},
data={"status": "REJECTED"},
)
return TransferResponse.from_db(updated)
async def execute_transfer(
transfer_id: str,
user_id: str,
) -> TransferResponse:
"""Execute an approved transfer -- move the resource to the target org.
Requires both source and target approvals. Updates the resource's
organization ownership and creates AuditLog entries for both orgs.
"""
tr = await prisma.transferrequest.find_unique(where={"id": transfer_id})
if tr is None:
raise NotFoundError(f"Transfer request {transfer_id} not found")
if tr.sourceApprovedByUserId is None or tr.targetApprovedByUserId is None:
raise ValueError(
"Transfer requires approval from both source and target organizations"
)
if tr.status == "COMPLETED":
raise ValueError("Transfer has already been executed")
if tr.status == "REJECTED":
raise ValueError("Cannot execute a rejected transfer")
await _move_resource(
resource_type=tr.resourceType,
resource_id=tr.resourceId,
target_org_id=tr.targetOrganizationId,
)
now = datetime.now(timezone.utc)
updated = await prisma.transferrequest.update(
where={"id": transfer_id},
data={"status": "COMPLETED", "completedAt": now},
)
await _create_audit_logs(
transfer=updated,
actor_user_id=user_id,
)
return TransferResponse.from_db(updated)
# ---------------------------------------------------------------------------
# Internal helpers
# ---------------------------------------------------------------------------
async def _validate_resource_ownership(
resource_type: str, resource_id: str, org_id: str
) -> None:
"""Verify the resource exists and belongs to the given org."""
if resource_type == "AgentGraph":
graph = await prisma.agentgraph.find_first(
where={"id": resource_id, "isActive": True}
)
if graph is None:
raise NotFoundError(f"AgentGraph '{resource_id}' not found")
if graph.organizationId != org_id:
raise ValueError(
"AgentGraph does not belong to the source organization"
)
elif resource_type == "StoreListing":
listing = await prisma.storelisting.find_unique(
where={"id": resource_id}
)
if listing is None or listing.isDeleted:
raise NotFoundError(f"StoreListing '{resource_id}' not found")
if listing.owningOrgId != org_id:
raise ValueError(
"StoreListing does not belong to the source organization"
)
async def _move_resource(
resource_type: str,
resource_id: str,
target_org_id: str,
) -> None:
"""Move the resource to the target organization."""
if resource_type == "AgentGraph":
await prisma.agentgraph.update_many(
where={"id": resource_id, "isActive": True},
data={"organizationId": target_org_id},
)
elif resource_type == "StoreListing":
await prisma.storelisting.update(
where={"id": resource_id},
data={"owningOrgId": target_org_id},
)
async def _create_audit_logs(transfer, actor_user_id: str) -> None:
"""Create audit log entries for both source and target organizations."""
common = {
"actorUserId": actor_user_id,
"entityType": "TransferRequest",
"entityId": transfer.id,
"action": "TRANSFER_EXECUTED",
"afterJson": {
"resourceType": transfer.resourceType,
"resourceId": transfer.resourceId,
"sourceOrganizationId": transfer.sourceOrganizationId,
"targetOrganizationId": transfer.targetOrganizationId,
},
"correlationId": transfer.id,
}
await prisma.auditlog.create(
data={
**common,
"organizationId": transfer.sourceOrganizationId,
"beforeJson": {"organizationId": transfer.sourceOrganizationId},
}
)
await prisma.auditlog.create(
data={
**common,
"organizationId": transfer.targetOrganizationId,
"beforeJson": {"organizationId": transfer.sourceOrganizationId},
}
)

View File

@@ -0,0 +1,44 @@
"""Pydantic request/response models for resource transfer management."""
from datetime import datetime
from pydantic import BaseModel
class CreateTransferRequest(BaseModel):
resource_type: str # "AgentGraph", "StoreListing"
resource_id: str
target_organization_id: str
reason: str | None = None
class TransferResponse(BaseModel):
id: str
resource_type: str
resource_id: str
source_organization_id: str
target_organization_id: str
initiated_by_user_id: str
status: str
source_approved_by_user_id: str | None
target_approved_by_user_id: str | None
completed_at: datetime | None
reason: str | None
created_at: datetime
@staticmethod
def from_db(tr) -> "TransferResponse":
return TransferResponse(
id=tr.id,
resource_type=tr.resourceType,
resource_id=tr.resourceId,
source_organization_id=tr.sourceOrganizationId,
target_organization_id=tr.targetOrganizationId,
initiated_by_user_id=tr.initiatedByUserId,
status=tr.status,
source_approved_by_user_id=tr.sourceApprovedByUserId,
target_approved_by_user_id=tr.targetApprovedByUserId,
completed_at=tr.completedAt,
reason=tr.reason,
created_at=tr.createdAt,
)

View File

@@ -0,0 +1,105 @@
"""Resource transfer API routes."""
from typing import Annotated
from autogpt_libs.auth import requires_org_permission
from autogpt_libs.auth.models import RequestContext
from autogpt_libs.auth.permissions import OrgAction
from fastapi import APIRouter, Security
from . import db as transfer_db
from .model import CreateTransferRequest, TransferResponse
router = APIRouter()
@router.post(
"",
summary="Create transfer request",
tags=["transfers"],
)
async def create_transfer(
request: CreateTransferRequest,
ctx: Annotated[
RequestContext,
Security(requires_org_permission(OrgAction.TRANSFER_RESOURCES)),
],
) -> TransferResponse:
return await transfer_db.create_transfer(
source_org_id=ctx.org_id,
target_org_id=request.target_organization_id,
resource_type=request.resource_type,
resource_id=request.resource_id,
user_id=ctx.user_id,
reason=request.reason,
)
@router.get(
"",
summary="List transfers for active organization",
tags=["transfers"],
)
async def list_transfers(
ctx: Annotated[
RequestContext,
Security(requires_org_permission(OrgAction.TRANSFER_RESOURCES)),
],
) -> list[TransferResponse]:
return await transfer_db.list_transfers(ctx.org_id)
@router.post(
"/{transfer_id}/approve",
summary="Approve transfer request",
tags=["transfers"],
)
async def approve_transfer(
transfer_id: str,
ctx: Annotated[
RequestContext,
Security(requires_org_permission(OrgAction.TRANSFER_RESOURCES)),
],
) -> TransferResponse:
return await transfer_db.approve_transfer(
transfer_id=transfer_id,
user_id=ctx.user_id,
org_id=ctx.org_id,
)
@router.post(
"/{transfer_id}/reject",
summary="Reject transfer request",
tags=["transfers"],
)
async def reject_transfer(
transfer_id: str,
ctx: Annotated[
RequestContext,
Security(requires_org_permission(OrgAction.TRANSFER_RESOURCES)),
],
) -> TransferResponse:
return await transfer_db.reject_transfer(
transfer_id=transfer_id,
user_id=ctx.user_id,
org_id=ctx.org_id,
)
@router.post(
"/{transfer_id}/execute",
summary="Execute approved transfer",
tags=["transfers"],
)
async def execute_transfer(
transfer_id: str,
ctx: Annotated[
RequestContext,
Security(requires_org_permission(OrgAction.TRANSFER_RESOURCES)),
],
) -> TransferResponse:
return await transfer_db.execute_transfer(
transfer_id=transfer_id,
user_id=ctx.user_id,
)

View File

@@ -9,8 +9,9 @@ from typing import Annotated, Any, Sequence, get_args
import pydantic
import stripe
from autogpt_libs.auth import get_user_id, requires_user
from autogpt_libs.auth import get_request_context, get_user_id, requires_user
from autogpt_libs.auth.jwt_utils import get_jwt_payload
from autogpt_libs.auth.models import RequestContext
from fastapi import (
APIRouter,
Body,
@@ -437,7 +438,10 @@ async def get_graph_blocks() -> Response:
dependencies=[Security(requires_user)],
)
async def execute_graph_block(
block_id: str, data: BlockInput, user_id: Annotated[str, Security(get_user_id)]
block_id: str,
data: BlockInput,
user_id: Annotated[str, Security(get_user_id)],
ctx: Annotated[RequestContext, Security(get_request_context)],
) -> CompletedBlockOutput:
obj = get_block(block_id)
if not obj:
@@ -483,6 +487,7 @@ async def execute_graph_block(
)
async def upload_file(
user_id: Annotated[str, Security(get_user_id)],
ctx: Annotated[RequestContext, Security(get_request_context)],
file: UploadFile = File(...),
expiration_hours: int = 24,
) -> UploadFileResponse:
@@ -572,6 +577,7 @@ async def upload_file(
)
async def get_user_credits(
user_id: Annotated[str, Security(get_user_id)],
ctx: Annotated[RequestContext, Security(get_request_context)],
) -> dict[str, int]:
user_credit_model = await get_user_credit_model(user_id)
return {"credits": await user_credit_model.get_credits(user_id)}
@@ -584,7 +590,9 @@ async def get_user_credits(
dependencies=[Security(requires_user)],
)
async def request_top_up(
request: RequestTopUp, user_id: Annotated[str, Security(get_user_id)]
request: RequestTopUp,
user_id: Annotated[str, Security(get_user_id)],
ctx: Annotated[RequestContext, Security(get_request_context)],
):
user_credit_model = await get_user_credit_model(user_id)
checkout_url = await user_credit_model.top_up_intent(user_id, request.credit_amount)
@@ -739,6 +747,7 @@ async def manage_payment_method(
)
async def get_credit_history(
user_id: Annotated[str, Security(get_user_id)],
ctx: Annotated[RequestContext, Security(get_request_context)],
transaction_time: datetime | None = None,
transaction_type: str | None = None,
transaction_count_limit: int = 100,
@@ -763,6 +772,7 @@ async def get_credit_history(
)
async def get_refund_requests(
user_id: Annotated[str, Security(get_user_id)],
ctx: Annotated[RequestContext, Security(get_request_context)],
) -> list[RefundRequest]:
user_credit_model = await get_user_credit_model(user_id)
return await user_credit_model.get_refund_requests(user_id)
@@ -785,6 +795,7 @@ class DeleteGraphResponse(TypedDict):
)
async def list_graphs(
user_id: Annotated[str, Security(get_user_id)],
ctx: Annotated[RequestContext, Security(get_request_context)],
) -> Sequence[graph_db.GraphMeta]:
paginated_result = await graph_db.list_graphs_paginated(
user_id=user_id,
@@ -810,6 +821,7 @@ async def list_graphs(
async def get_graph(
graph_id: str,
user_id: Annotated[str, Security(get_user_id)],
ctx: Annotated[RequestContext, Security(get_request_context)],
version: int | None = None,
for_export: bool = False,
) -> graph_db.GraphModel:
@@ -832,7 +844,9 @@ async def get_graph(
dependencies=[Security(requires_user)],
)
async def get_graph_all_versions(
graph_id: str, user_id: Annotated[str, Security(get_user_id)]
graph_id: str,
user_id: Annotated[str, Security(get_user_id)],
ctx: Annotated[RequestContext, Security(get_request_context)],
) -> Sequence[graph_db.GraphModel]:
graphs = await graph_db.get_graph_all_versions(graph_id, user_id=user_id)
if not graphs:
@@ -849,12 +863,18 @@ async def get_graph_all_versions(
async def create_new_graph(
create_graph: CreateGraph,
user_id: Annotated[str, Security(get_user_id)],
ctx: Annotated[RequestContext, Security(get_request_context)],
) -> graph_db.GraphModel:
graph = graph_db.make_graph_model(create_graph.graph, user_id)
graph.reassign_ids(user_id=user_id, reassign_graph_id=True)
graph.validate_graph(for_run=False)
await graph_db.create_graph(graph, user_id=user_id)
await graph_db.create_graph(
graph,
user_id=user_id,
organization_id=ctx.org_id,
team_id=ctx.team_id,
)
await library_db.create_library_agent(graph, user_id)
activated_graph = await on_graph_activate(graph, user_id=user_id)
@@ -871,7 +891,9 @@ async def create_new_graph(
dependencies=[Security(requires_user)],
)
async def delete_graph(
graph_id: str, user_id: Annotated[str, Security(get_user_id)]
graph_id: str,
user_id: Annotated[str, Security(get_user_id)],
ctx: Annotated[RequestContext, Security(get_request_context)],
) -> DeleteGraphResponse:
if active_version := await graph_db.get_graph(
graph_id=graph_id, version=None, user_id=user_id
@@ -891,6 +913,7 @@ async def update_graph(
graph_id: str,
graph: graph_db.Graph,
user_id: Annotated[str, Security(get_user_id)],
ctx: Annotated[RequestContext, Security(get_request_context)],
) -> graph_db.GraphModel:
if graph.id and graph.id != graph_id:
raise HTTPException(400, detail="Graph ID does not match ID in URI")
@@ -906,7 +929,12 @@ async def update_graph(
graph.reassign_ids(user_id=user_id, reassign_graph_id=False)
graph.validate_graph(for_run=False)
new_graph_version = await graph_db.create_graph(graph, user_id=user_id)
new_graph_version = await graph_db.create_graph(
graph,
user_id=user_id,
organization_id=ctx.org_id,
team_id=ctx.team_id,
)
if new_graph_version.is_active:
await library_db.update_library_agent_version_and_settings(
@@ -939,6 +967,7 @@ async def set_graph_active_version(
graph_id: str,
request_body: SetGraphActiveVersion,
user_id: Annotated[str, Security(get_user_id)],
ctx: Annotated[RequestContext, Security(get_request_context)],
):
new_active_version = request_body.active_graph_version
new_active_graph = await graph_db.get_graph(
@@ -982,6 +1011,7 @@ async def update_graph_settings(
graph_id: str,
settings: GraphSettings,
user_id: Annotated[str, Security(get_user_id)],
ctx: Annotated[RequestContext, Security(get_request_context)],
) -> GraphSettings:
"""Update graph settings for the user's library agent."""
library_agent = await library_db.get_library_agent_by_graph_id(
@@ -1008,6 +1038,7 @@ async def update_graph_settings(
async def execute_graph(
graph_id: str,
user_id: Annotated[str, Security(get_user_id)],
ctx: Annotated[RequestContext, Security(get_request_context)],
inputs: Annotated[dict[str, Any], Body(..., embed=True, default_factory=dict)],
credentials_inputs: Annotated[
dict[str, CredentialsMetaInput], Body(..., embed=True, default_factory=dict)
@@ -1035,6 +1066,8 @@ async def execute_graph(
graph_version=graph_version,
graph_credentials_inputs=credentials_inputs,
dry_run=dry_run,
organization_id=ctx.org_id,
team_id=ctx.team_id,
)
# Record successful graph execution
record_graph_execution(graph_id=graph_id, status="success", user_id=user_id)
@@ -1120,6 +1153,7 @@ async def _stop_graph_run(
)
async def list_graphs_executions(
user_id: Annotated[str, Security(get_user_id)],
ctx: Annotated[RequestContext, Security(get_request_context)],
) -> list[execution_db.GraphExecutionMeta]:
paginated_result = await execution_db.get_graph_executions_paginated(
user_id=user_id,
@@ -1143,6 +1177,7 @@ async def list_graphs_executions(
async def list_graph_executions(
graph_id: str,
user_id: Annotated[str, Security(get_user_id)],
ctx: Annotated[RequestContext, Security(get_request_context)],
page: int = Query(1, ge=1, description="Page number (1-indexed)"),
page_size: int = Query(
25, ge=1, le=100, description="Number of executions per page"
@@ -1200,6 +1235,7 @@ async def get_graph_execution(
graph_id: str,
graph_exec_id: str,
user_id: Annotated[str, Security(get_user_id)],
ctx: Annotated[RequestContext, Security(get_request_context)],
) -> execution_db.GraphExecution | execution_db.GraphExecutionWithNodes:
result = await execution_db.get_graph_execution(
user_id=user_id,
@@ -1257,6 +1293,7 @@ async def hide_activity_summary_if_disabled(
async def delete_graph_execution(
graph_exec_id: str,
user_id: Annotated[str, Security(get_user_id)],
ctx: Annotated[RequestContext, Security(get_request_context)],
) -> None:
await execution_db.delete_graph_execution(
graph_exec_id=graph_exec_id, user_id=user_id
@@ -1284,6 +1321,7 @@ async def enable_execution_sharing(
graph_id: Annotated[str, Path],
graph_exec_id: Annotated[str, Path],
user_id: Annotated[str, Security(get_user_id)],
ctx: Annotated[RequestContext, Security(get_request_context)],
_body: ShareRequest = Body(default=ShareRequest()),
) -> ShareResponse:
"""Enable sharing for a graph execution."""
@@ -1322,6 +1360,7 @@ async def disable_execution_sharing(
graph_id: Annotated[str, Path],
graph_exec_id: Annotated[str, Path],
user_id: Annotated[str, Security(get_user_id)],
ctx: Annotated[RequestContext, Security(get_request_context)],
) -> None:
"""Disable sharing for a graph execution."""
# Verify the execution belongs to the user
@@ -1381,6 +1420,7 @@ class ScheduleCreationRequest(pydantic.BaseModel):
)
async def create_graph_execution_schedule(
user_id: Annotated[str, Security(get_user_id)],
ctx: Annotated[RequestContext, Security(get_request_context)],
graph_id: str = Path(..., description="ID of the graph to schedule"),
schedule_params: ScheduleCreationRequest = Body(),
) -> scheduler.GraphExecutionJobInfo:
@@ -1411,6 +1451,8 @@ async def create_graph_execution_schedule(
input_data=schedule_params.inputs,
input_credentials=schedule_params.credentials,
user_timezone=user_timezone,
organization_id=ctx.org_id,
team_id=ctx.team_id,
)
# Convert the next_run_time back to user timezone for display
@@ -1432,6 +1474,7 @@ async def create_graph_execution_schedule(
)
async def list_graph_execution_schedules(
user_id: Annotated[str, Security(get_user_id)],
ctx: Annotated[RequestContext, Security(get_request_context)],
graph_id: str = Path(),
) -> list[scheduler.GraphExecutionJobInfo]:
return await get_scheduler_client().get_execution_schedules(
@@ -1448,6 +1491,7 @@ async def list_graph_execution_schedules(
)
async def list_all_graphs_execution_schedules(
user_id: Annotated[str, Security(get_user_id)],
ctx: Annotated[RequestContext, Security(get_request_context)],
) -> list[scheduler.GraphExecutionJobInfo]:
return await get_scheduler_client().get_execution_schedules(user_id=user_id)
@@ -1460,6 +1504,7 @@ async def list_all_graphs_execution_schedules(
)
async def delete_graph_execution_schedule(
user_id: Annotated[str, Security(get_user_id)],
ctx: Annotated[RequestContext, Security(get_request_context)],
schedule_id: str = Path(..., description="ID of the schedule to delete"),
) -> dict[str, Any]:
try:
@@ -1484,7 +1529,9 @@ async def delete_graph_execution_schedule(
dependencies=[Security(requires_user)],
)
async def create_api_key(
request: CreateAPIKeyRequest, user_id: Annotated[str, Security(get_user_id)]
request: CreateAPIKeyRequest,
user_id: Annotated[str, Security(get_user_id)],
ctx: Annotated[RequestContext, Security(get_request_context)],
) -> CreateAPIKeyResponse:
"""Create a new API key"""
api_key_info, plain_text_key = await api_key_db.create_api_key(
@@ -1492,6 +1539,7 @@ async def create_api_key(
user_id=user_id,
permissions=request.permissions,
description=request.description,
organization_id=ctx.org_id if ctx.org_id else None,
)
return CreateAPIKeyResponse(api_key=api_key_info, plain_text_key=plain_text_key)
@@ -1504,6 +1552,7 @@ async def create_api_key(
)
async def get_api_keys(
user_id: Annotated[str, Security(get_user_id)],
ctx: Annotated[RequestContext, Security(get_request_context)],
) -> list[api_key_db.APIKeyInfo]:
"""List all API keys for the user"""
return await api_key_db.list_user_api_keys(user_id)
@@ -1516,7 +1565,9 @@ async def get_api_keys(
dependencies=[Security(requires_user)],
)
async def get_api_key(
key_id: str, user_id: Annotated[str, Security(get_user_id)]
key_id: str,
user_id: Annotated[str, Security(get_user_id)],
ctx: Annotated[RequestContext, Security(get_request_context)],
) -> api_key_db.APIKeyInfo:
"""Get a specific API key"""
api_key = await api_key_db.get_api_key_by_id(key_id, user_id)
@@ -1532,7 +1583,9 @@ async def get_api_key(
dependencies=[Security(requires_user)],
)
async def delete_api_key(
key_id: str, user_id: Annotated[str, Security(get_user_id)]
key_id: str,
user_id: Annotated[str, Security(get_user_id)],
ctx: Annotated[RequestContext, Security(get_request_context)],
) -> api_key_db.APIKeyInfo:
"""Revoke an API key"""
return await api_key_db.revoke_api_key(key_id, user_id)
@@ -1545,7 +1598,9 @@ async def delete_api_key(
dependencies=[Security(requires_user)],
)
async def suspend_key(
key_id: str, user_id: Annotated[str, Security(get_user_id)]
key_id: str,
user_id: Annotated[str, Security(get_user_id)],
ctx: Annotated[RequestContext, Security(get_request_context)],
) -> api_key_db.APIKeyInfo:
"""Suspend an API key"""
return await api_key_db.suspend_api_key(key_id, user_id)
@@ -1561,6 +1616,7 @@ async def update_permissions(
key_id: str,
request: UpdatePermissionsRequest,
user_id: Annotated[str, Security(get_user_id)],
ctx: Annotated[RequestContext, Security(get_request_context)],
) -> api_key_db.APIKeyInfo:
"""Update API key permissions"""
return await api_key_db.update_api_key_permissions(

View File

@@ -29,15 +29,20 @@ import backend.api.features.library.model
import backend.api.features.library.routes
import backend.api.features.mcp.routes as mcp_routes
import backend.api.features.oauth
import backend.api.features.orgs.invitation_routes
import backend.api.features.orgs.routes as org_routes
import backend.api.features.orgs.team_routes
import backend.api.features.otto.routes
import backend.api.features.postmark.postmark
import backend.api.features.store.model
import backend.api.features.store.routes
import backend.api.features.transfers.routes as transfer_routes
import backend.api.features.v1
import backend.api.features.workspace.routes as workspace_routes
import backend.api.features.workspace.routes as team_routes
import backend.data.block
import backend.data.db
import backend.data.graph
import backend.data.org_migration
import backend.data.user
import backend.integrations.webhooks.utils
import backend.util.service
@@ -129,6 +134,7 @@ async def lifespan_context(app: fastapi.FastAPI):
await backend.data.graph.fix_llm_provider_credentials()
await backend.data.graph.migrate_llm_models(DEFAULT_LLM_MODEL)
await backend.integrations.webhooks.utils.migrate_legacy_triggered_graphs()
await backend.data.org_migration.run_migration()
with launch_darkly_context():
yield
@@ -352,7 +358,7 @@ app.include_router(
prefix="/api/chat",
)
app.include_router(
workspace_routes.router,
team_routes.router,
tags=["workspace"],
prefix="/api/workspace",
)
@@ -366,6 +372,31 @@ app.include_router(
tags=["oauth"],
prefix="/api/oauth",
)
app.include_router(
org_routes.router,
tags=["v2", "orgs"],
prefix="/api/orgs",
)
app.include_router(
backend.api.features.orgs.team_routes.router,
tags=["v2", "orgs", "workspaces"],
prefix="/api/orgs/{org_id}/workspaces",
)
app.include_router(
backend.api.features.orgs.invitation_routes.org_router,
tags=["v2", "orgs", "invitations"],
prefix="/api/orgs/{org_id}/invitations",
)
app.include_router(
backend.api.features.orgs.invitation_routes.router,
tags=["v2", "invitations"],
prefix="/api/invitations",
)
app.include_router(
transfer_routes.router,
tags=["v2", "transfers"],
prefix="/api/transfers",
)
app.mount("/external-api", external_api)
@@ -420,8 +451,22 @@ class AgentServer(backend.util.service.AppProcess):
graph_version: Optional[int] = None,
node_input: Optional[dict[str, Any]] = None,
):
from autogpt_libs.auth.models import RequestContext
ctx = RequestContext(
user_id=user_id,
org_id="test-org",
team_id="test-workspace",
is_org_owner=True,
is_org_admin=True,
is_org_billing_manager=False,
is_team_admin=True,
is_team_billing_manager=False,
seat_status="ACTIVE",
)
return await backend.api.features.v1.execute_graph(
user_id=user_id,
ctx=ctx,
graph_id=graph_id,
graph_version=graph_version,
inputs=node_input or {},
@@ -444,7 +489,22 @@ class AgentServer(backend.util.service.AppProcess):
create_graph: backend.api.features.v1.CreateGraph,
user_id: str,
):
return await backend.api.features.v1.create_new_graph(create_graph, user_id)
from autogpt_libs.auth.models import RequestContext
ctx = RequestContext(
user_id=user_id,
org_id="test-org",
team_id="test-workspace",
is_org_owner=True,
is_org_admin=True,
is_org_billing_manager=False,
is_team_admin=True,
is_team_billing_manager=False,
seat_status="ACTIVE",
)
return await backend.api.features.v1.create_new_graph(
create_graph, user_id, ctx
)
@staticmethod
async def test_get_graph_run_status(graph_exec_id: str, user_id: str):

View File

@@ -95,6 +95,8 @@ class AgentExecutorBlock(Block):
update={"parent_execution_id": graph_exec_id},
),
dry_run=execution_context.dry_run,
organization_id=execution_context.organization_id,
team_id=execution_context.team_id,
)
logger = execution_utils.LogMetadata(

View File

@@ -162,6 +162,9 @@ async def get_chat_messages_paginated(
async def create_chat_session(
session_id: str,
user_id: str,
*,
organization_id: str | None = None,
team_id: str | None = None,
metadata: ChatSessionMetadata | None = None,
) -> ChatSessionInfo:
"""Create a new chat session in the database."""
@@ -171,6 +174,9 @@ async def create_chat_session(
credentials=SafeJson({}),
successfulAgentRuns=SafeJson({}),
successfulAgentSchedules=SafeJson({}),
# Tenancy dual-write fields
**({"organizationId": organization_id} if organization_id else {}),
**({"teamId": team_id} if team_id else {}),
metadata=SafeJson((metadata or ChatSessionMetadata()).model_dump()),
)
prisma_session = await PrismaChatSession.prisma().create(data=data)

View File

@@ -157,6 +157,12 @@ class CoPilotExecutionEntry(BaseModel):
file_ids: list[str] | None = None
"""Workspace file IDs attached to the user's message"""
organization_id: str | None = None
"""Active organization for tenant-scoped execution"""
team_id: str | None = None
"""Active workspace for tenant-scoped execution"""
mode: CopilotMode | None = None
"""Autopilot mode override: 'fast' or 'extended_thinking'. None = server default."""
@@ -179,6 +185,8 @@ async def enqueue_copilot_turn(
is_user_message: bool = True,
context: dict[str, str] | None = None,
file_ids: list[str] | None = None,
organization_id: str | None = None,
team_id: str | None = None,
mode: CopilotMode | None = None,
) -> None:
"""Enqueue a CoPilot task for processing by the executor service.
@@ -203,6 +211,8 @@ async def enqueue_copilot_turn(
is_user_message=is_user_message,
context=context,
file_ids=file_ids,
organization_id=organization_id,
team_id=team_id,
mode=mode,
)

View File

@@ -498,12 +498,19 @@ class RunAgentTool(BaseTool):
library_agent = await get_or_create_library_agent(graph, user_id)
# Execute
# Resolve org/team context for tenancy
from backend.api.features.orgs.db import get_user_default_team
org_id, team_id = await get_user_default_team(user_id)
execution = await execution_utils.add_graph_execution(
graph_id=library_agent.graph_id,
user_id=user_id,
inputs=inputs,
graph_credentials_inputs=graph_credentials,
dry_run=dry_run,
organization_id=org_id,
team_id=team_id,
)
# Track successful run (dry runs don't count against the session limit)

View File

@@ -29,6 +29,9 @@ class APIKeyInfo(APIAuthorizationInfo):
)
status: APIKeyStatus
description: Optional[str] = None
organization_id: Optional[str] = None
owner_type: Optional[str] = None
team_id_restriction: Optional[str] = None
type: Literal["api_key"] = "api_key" # type: ignore
@@ -46,6 +49,9 @@ class APIKeyInfo(APIAuthorizationInfo):
revoked_at=api_key.revokedAt,
description=api_key.description,
user_id=api_key.userId,
organization_id=api_key.organizationId,
owner_type=api_key.ownerType,
team_id_restriction=api_key.teamIdRestriction,
)
@@ -74,6 +80,9 @@ async def create_api_key(
user_id: str,
permissions: list[APIKeyPermission],
description: Optional[str] = None,
organization_id: Optional[str] = None,
owner_type: Optional[str] = None,
team_id_restriction: Optional[str] = None,
) -> tuple[APIKeyInfo, str]:
"""
Generate a new API key and store it in the database.
@@ -81,19 +90,25 @@ async def create_api_key(
"""
generated_key = keysmith.generate_key()
saved_key_obj = await PrismaAPIKey.prisma().create(
data={
"id": str(uuid.uuid4()),
"name": name,
"head": generated_key.head,
"tail": generated_key.tail,
"hash": generated_key.hash,
"salt": generated_key.salt,
"permissions": [p for p in permissions],
"description": description,
"userId": user_id,
}
)
create_data: dict[str, object] = {
"id": str(uuid.uuid4()),
"name": name,
"head": generated_key.head,
"tail": generated_key.tail,
"hash": generated_key.hash,
"salt": generated_key.salt,
"permissions": [p for p in permissions],
"description": description,
"userId": user_id,
}
if organization_id is not None:
create_data["organizationId"] = organization_id
if owner_type is not None:
create_data["ownerType"] = owner_type
if team_id_restriction is not None:
create_data["teamIdRestriction"] = team_id_restriction
saved_key_obj = await PrismaAPIKey.prisma().create(data=create_data) # type: ignore
return APIKeyInfo.from_db(saved_key_obj), generated_key.key

View File

@@ -98,10 +98,14 @@ class ExecutionContext(BaseModel):
root_execution_id: Optional[str] = None
parent_execution_id: Optional[str] = None
# Workspace
# File workspace (UserWorkspace — NOT the Team concept)
workspace_id: Optional[str] = None
session_id: Optional[str] = None
# Org/team tenancy context
organization_id: Optional[str] = None
team_id: Optional[str] = None
# -------------------------- Models -------------------------- #
@@ -517,6 +521,7 @@ async def get_graph_executions(
created_time_gte: Optional[datetime] = None,
created_time_lte: Optional[datetime] = None,
limit: Optional[int] = None,
team_id: Optional[str] = None,
) -> list[GraphExecutionMeta]:
"""⚠️ **Optional `user_id` check**: MUST USE check in user-facing endpoints."""
where_filter: AgentGraphExecutionWhereInput = {
@@ -524,7 +529,10 @@ async def get_graph_executions(
}
if graph_exec_id:
where_filter["id"] = graph_exec_id
if user_id:
# Prefer team_id scoping over user_id when available
if team_id:
where_filter["teamId"] = team_id
elif user_id:
where_filter["userId"] = user_id
if graph_id:
where_filter["agentGraphId"] = graph_id
@@ -730,6 +738,8 @@ async def create_graph_execution(
nodes_input_masks: Optional[NodesInputMasks] = None,
parent_graph_exec_id: Optional[str] = None,
is_dry_run: bool = False,
organization_id: Optional[str] = None,
team_id: Optional[str] = None,
) -> GraphExecutionWithNodes:
"""
Create a new AgentGraphExecution record.
@@ -768,6 +778,9 @@ async def create_graph_execution(
"agentPresetId": preset_id,
"parentGraphExecutionId": parent_graph_exec_id,
**({"stats": Json({"is_dry_run": True})} if is_dry_run else {}),
# Tenancy dual-write fields
**({"organizationId": organization_id} if organization_id else {}),
**({"teamId": team_id} if team_id else {}),
},
include=GRAPH_EXECUTION_INCLUDE_WITH_NODES,
)

View File

@@ -1090,6 +1090,7 @@ async def get_graph(
for_export: bool = False,
include_subgraphs: bool = False,
skip_access_check: bool = False,
team_id: str | None = None,
) -> GraphModel | None:
"""
Retrieves a graph from the DB.
@@ -1103,14 +1104,18 @@ async def get_graph(
graph = None
# Only search graph directly on owned graph (or access check is skipped)
if skip_access_check or user_id is not None:
if skip_access_check or user_id is not None or team_id is not None:
graph_where_clause: AgentGraphWhereInput = {
"id": graph_id,
}
if version is not None:
graph_where_clause["version"] = version
if not skip_access_check and user_id is not None:
graph_where_clause["userId"] = user_id
# Prefer team_id scoping over user_id when both are available
if not skip_access_check:
if team_id is not None:
graph_where_clause["teamId"] = team_id
elif user_id is not None:
graph_where_clause["userId"] = user_id
graph = await AgentGraph.prisma().find_first(
where=graph_where_clause,
@@ -1333,10 +1338,19 @@ async def set_graph_active_version(graph_id: str, version: int, user_id: str) ->
async def get_graph_all_versions(
graph_id: str, user_id: str, limit: int = MAX_GRAPH_VERSIONS_FETCH
graph_id: str,
user_id: str,
limit: int = MAX_GRAPH_VERSIONS_FETCH,
team_id: str | None = None,
) -> list[GraphModel]:
where_clause: AgentGraphWhereInput = {"id": graph_id}
if team_id is not None:
where_clause["teamId"] = team_id
else:
where_clause["userId"] = user_id
graph_versions = await AgentGraph.prisma().find_many(
where={"id": graph_id, "userId": user_id},
where=where_clause,
order={"version": "desc"},
include=AGENT_GRAPH_INCLUDE,
take=limit,
@@ -1494,9 +1508,21 @@ async def is_graph_published_in_marketplace(graph_id: str, graph_version: int) -
return marketplace_listing is not None
async def create_graph(graph: Graph, user_id: str) -> GraphModel:
async def create_graph(
graph: Graph,
user_id: str,
*,
organization_id: str | None = None,
team_id: str | None = None,
) -> GraphModel:
async with transaction() as tx:
await __create_graph(tx, graph, user_id)
await __create_graph(
tx,
graph,
user_id,
organization_id=organization_id,
team_id=team_id,
)
if created_graph := await get_graph(graph.id, graph.version, user_id=user_id):
return created_graph
@@ -1504,7 +1530,14 @@ async def create_graph(graph: Graph, user_id: str) -> GraphModel:
raise ValueError(f"Created graph {graph.id} v{graph.version} is not in DB")
async def fork_graph(graph_id: str, graph_version: int, user_id: str) -> GraphModel:
async def fork_graph(
graph_id: str,
graph_version: int,
user_id: str,
*,
organization_id: str | None = None,
team_id: str | None = None,
) -> GraphModel:
"""
Forks a graph by copying it and all its nodes and links to a new graph.
"""
@@ -1520,12 +1553,25 @@ async def fork_graph(graph_id: str, graph_version: int, user_id: str) -> GraphMo
graph.validate_graph(for_run=False)
async with transaction() as tx:
await __create_graph(tx, graph, user_id)
await __create_graph(
tx,
graph,
user_id,
organization_id=organization_id,
team_id=team_id,
)
return graph
async def __create_graph(tx, graph: Graph, user_id: str):
async def __create_graph(
tx,
graph: Graph,
user_id: str,
*,
organization_id: str | None = None,
team_id: str | None = None,
):
graphs = [graph] + graph.sub_graphs
# Auto-increment version for any graph entry (parent or sub-graph) whose
@@ -1562,6 +1608,9 @@ async def __create_graph(tx, graph: Graph, user_id: str):
userId=user_id,
forkedFromId=graph.forked_from_id,
forkedFromVersion=graph.forked_from_version,
# Tenancy dual-write fields
organizationId=organization_id,
teamId=team_id,
)
for graph in graphs
]

View File

@@ -170,6 +170,8 @@ async def get_or_create_human_review(
input_data: SafeJsonData,
message: str,
editable: bool,
organization_id: str | None = None,
team_id: str | None = None,
) -> Optional[ReviewResult]:
"""
Get existing review or create a new pending review entry.
@@ -206,6 +208,8 @@ async def get_or_create_human_review(
"instructions": message,
"editable": editable,
"status": ReviewStatus.WAITING,
**({"organizationId": organization_id} if organization_id else {}),
**({"teamId": team_id} if team_id else {}),
},
"update": {}, # Do nothing on update - keep existing review as is
},

View File

@@ -0,0 +1,235 @@
"""Org-level credit operations.
Mirrors the UserCreditBase pattern but operates on OrgBalance and
OrgCreditTransaction tables instead of UserBalance and CreditTransaction.
All balance mutations use atomic SQL to prevent race conditions.
"""
import logging
from prisma.enums import CreditTransactionType
from pydantic import BaseModel
from backend.data.db import prisma
from backend.util.exceptions import InsufficientBalanceError
from backend.util.json import SafeJson
logger = logging.getLogger(__name__)
class _BalanceResult(BaseModel):
balance: int
async def get_org_credits(org_id: str) -> int:
"""Get the current credit balance for an organization."""
balance = await prisma.orgbalance.find_unique(where={"orgId": org_id})
return balance.balance if balance else 0
async def spend_org_credits(
org_id: str,
user_id: str,
amount: int,
team_id: str | None = None,
metadata: dict | None = None,
) -> int:
"""Atomically spend credits from the org balance.
Uses a single UPDATE ... WHERE balance >= $amount to prevent race
conditions. If the UPDATE affects 0 rows, the balance is insufficient.
Returns:
The remaining balance.
Raises:
InsufficientBalanceError: If the org doesn't have enough credits.
"""
if amount <= 0:
raise ValueError("Spend amount must be positive")
# Atomic deduct — only succeeds if balance >= amount
rows_affected = await prisma.execute_raw(
"""
UPDATE "OrgBalance"
SET "balance" = "balance" - $1, "updatedAt" = NOW()
WHERE "orgId" = $2 AND "balance" >= $1
""",
amount,
org_id,
)
if rows_affected == 0:
current = await get_org_credits(org_id)
raise InsufficientBalanceError(
f"Organization has {current} credits but needs {amount}",
user_id=user_id,
balance=current,
amount=amount,
)
# Read the new balance for the transaction record
new_balance = await get_org_credits(org_id)
# Record the transaction
tx_data: dict = {
"orgId": org_id,
"initiatedByUserId": user_id,
"amount": -amount,
"type": CreditTransactionType.USAGE,
"runningBalance": new_balance,
}
if team_id:
tx_data["teamId"] = team_id
if metadata:
tx_data["metadata"] = SafeJson(metadata)
await prisma.orgcredittransaction.create(data=tx_data)
return new_balance
async def top_up_org_credits(
org_id: str,
amount: int,
user_id: str | None = None,
metadata: dict | None = None,
) -> int:
"""Atomically add credits to the org balance.
Creates the OrgBalance row if it doesn't exist (upsert pattern via raw SQL).
Returns the new balance.
"""
if amount <= 0:
raise ValueError("Top-up amount must be positive")
# Atomic upsert — INSERT or UPDATE in one statement
await prisma.execute_raw(
"""
INSERT INTO "OrgBalance" ("orgId", "balance", "updatedAt")
VALUES ($1, $2, NOW())
ON CONFLICT ("orgId")
DO UPDATE SET "balance" = "OrgBalance"."balance" + $2, "updatedAt" = NOW()
""",
org_id,
amount,
)
new_balance = await get_org_credits(org_id)
# Record the transaction
tx_data: dict = {
"orgId": org_id,
"amount": amount,
"type": CreditTransactionType.TOP_UP,
"runningBalance": new_balance,
}
if user_id:
tx_data["initiatedByUserId"] = user_id
if metadata:
tx_data["metadata"] = SafeJson(metadata)
await prisma.orgcredittransaction.create(data=tx_data)
return new_balance
async def get_org_transaction_history(
org_id: str,
limit: int = 50,
offset: int = 0,
) -> list[dict]:
"""Get credit transaction history for an organization."""
transactions = await prisma.orgcredittransaction.find_many(
where={"orgId": org_id, "isActive": True},
order={"createdAt": "desc"},
take=limit,
skip=offset,
)
return [
{
"transactionKey": t.transactionKey,
"createdAt": t.createdAt,
"amount": t.amount,
"type": t.type,
"runningBalance": t.runningBalance,
"initiatedByUserId": t.initiatedByUserId,
"teamId": t.teamId,
"metadata": t.metadata,
}
for t in transactions
]
async def get_seat_info(org_id: str) -> dict:
"""Get seat utilization for an organization."""
seats = await prisma.organizationseatassignment.find_many(
where={"organizationId": org_id}
)
total = len(seats)
active = sum(1 for s in seats if s.status == "ACTIVE")
return {
"total": total,
"active": active,
"inactive": total - active,
"seats": [
{
"userId": s.userId,
"seatType": s.seatType,
"status": s.status,
"createdAt": s.createdAt,
}
for s in seats
],
}
async def assign_seat(
org_id: str,
user_id: str,
seat_type: str = "FREE",
assigned_by: str | None = None,
) -> dict:
"""Assign a seat to a user in the organization."""
seat = await prisma.organizationseatassignment.upsert(
where={
"organizationId_userId": {
"organizationId": org_id,
"userId": user_id,
}
},
data={
"create": {
"organizationId": org_id,
"userId": user_id,
"seatType": seat_type,
"status": "ACTIVE",
"assignedByUserId": assigned_by,
},
"update": {
"seatType": seat_type,
"status": "ACTIVE",
"assignedByUserId": assigned_by,
},
},
)
return {
"userId": seat.userId,
"seatType": seat.seatType,
"status": seat.status,
}
async def unassign_seat(org_id: str, user_id: str) -> None:
"""Deactivate a user's seat assignment."""
await prisma.organizationseatassignment.update(
where={
"organizationId_userId": {
"organizationId": org_id,
"userId": user_id,
}
},
data={"status": "INACTIVE"},
)

View File

@@ -0,0 +1,215 @@
"""Tests for org-level credit operations.
Tests the atomic spend/top-up logic, edge cases, and error paths.
"""
from unittest.mock import AsyncMock, MagicMock
import pytest
from backend.data.org_credit import (
assign_seat,
get_org_credits,
get_org_transaction_history,
get_seat_info,
spend_org_credits,
top_up_org_credits,
unassign_seat,
)
from backend.util.exceptions import InsufficientBalanceError
@pytest.fixture(autouse=True)
def mock_prisma(mocker):
mock = MagicMock()
mock.orgbalance.find_unique = AsyncMock(return_value=MagicMock(balance=1000))
mock.execute_raw = AsyncMock(return_value=1) # 1 row affected = success
mock.orgcredittransaction.create = AsyncMock()
mock.orgcredittransaction.find_many = AsyncMock(return_value=[])
mock.organizationseatassignment.find_many = AsyncMock(return_value=[])
mock.organizationseatassignment.upsert = AsyncMock(
return_value=MagicMock(userId="u1", seatType="FREE", status="ACTIVE")
)
mock.organizationseatassignment.update = AsyncMock()
mocker.patch("backend.data.org_credit.prisma", mock)
return mock
class TestGetOrgCredits:
@pytest.mark.asyncio
async def test_returns_balance(self, mock_prisma):
result = await get_org_credits("org-1")
assert result == 1000
@pytest.mark.asyncio
async def test_returns_zero_when_no_balance_row(self, mock_prisma):
mock_prisma.orgbalance.find_unique = AsyncMock(return_value=None)
result = await get_org_credits("org-missing")
assert result == 0
class TestSpendOrgCredits:
@pytest.mark.asyncio
async def test_spend_success_returns_remaining(self, mock_prisma):
# execute_raw returns 1 (row affected) = success
mock_prisma.execute_raw = AsyncMock(return_value=1)
# After spend, balance reads 900
mock_prisma.orgbalance.find_unique = AsyncMock(
return_value=MagicMock(balance=900)
)
result = await spend_org_credits("org-1", "user-1", 100)
assert result == 900
mock_prisma.orgcredittransaction.create.assert_called_once()
# Verify transaction data is correct
tx_data = mock_prisma.orgcredittransaction.create.call_args[1]["data"]
assert tx_data["orgId"] == "org-1"
assert tx_data["initiatedByUserId"] == "user-1"
assert tx_data["amount"] == -100
assert tx_data["runningBalance"] == 900
@pytest.mark.asyncio
async def test_spend_insufficient_balance_raises(self, mock_prisma):
# execute_raw returns 0 = no row matched (insufficient balance)
mock_prisma.execute_raw = AsyncMock(return_value=0)
mock_prisma.orgbalance.find_unique = AsyncMock(
return_value=MagicMock(balance=50)
)
with pytest.raises(InsufficientBalanceError) as exc_info:
await spend_org_credits("org-1", "user-1", 100)
assert exc_info.value.balance == 50
assert exc_info.value.amount == 100
# Transaction should NOT be created on failure
mock_prisma.orgcredittransaction.create.assert_not_called()
@pytest.mark.asyncio
async def test_spend_zero_amount_raises(self):
with pytest.raises(ValueError, match="positive"):
await spend_org_credits("org-1", "user-1", 0)
@pytest.mark.asyncio
async def test_spend_negative_amount_raises(self):
with pytest.raises(ValueError, match="positive"):
await spend_org_credits("org-1", "user-1", -5)
@pytest.mark.asyncio
async def test_spend_records_workspace_attribution(self, mock_prisma):
mock_prisma.execute_raw = AsyncMock(return_value=1)
mock_prisma.orgbalance.find_unique = AsyncMock(
return_value=MagicMock(balance=800)
)
await spend_org_credits(
"org-1", "user-1", 200, team_id="ws-1", metadata={"block": "llm"}
)
tx_data = mock_prisma.orgcredittransaction.create.call_args[1]["data"]
assert tx_data["teamId"] == "ws-1"
assert tx_data["amount"] == -200
class TestTopUpOrgCredits:
@pytest.mark.asyncio
async def test_top_up_success(self, mock_prisma):
mock_prisma.orgbalance.find_unique = AsyncMock(
return_value=MagicMock(balance=1500)
)
result = await top_up_org_credits("org-1", 500, user_id="user-1")
assert result == 1500
mock_prisma.execute_raw.assert_called_once() # Atomic upsert
# Verify transaction data
mock_prisma.orgcredittransaction.create.assert_called_once()
tx_data = mock_prisma.orgcredittransaction.create.call_args[1]["data"]
assert tx_data["orgId"] == "org-1"
assert tx_data["amount"] == 500
assert tx_data["initiatedByUserId"] == "user-1"
mock_prisma.orgcredittransaction.create.assert_called_once()
@pytest.mark.asyncio
async def test_top_up_zero_raises(self):
with pytest.raises(ValueError, match="positive"):
await top_up_org_credits("org-1", 0)
@pytest.mark.asyncio
async def test_top_up_negative_raises(self):
with pytest.raises(ValueError, match="positive"):
await top_up_org_credits("org-1", -10)
@pytest.mark.asyncio
async def test_top_up_no_user_id_omits_from_transaction(self, mock_prisma):
mock_prisma.orgbalance.find_unique = AsyncMock(
return_value=MagicMock(balance=500)
)
await top_up_org_credits("org-1", 500)
tx_data = mock_prisma.orgcredittransaction.create.call_args[1]["data"]
assert "initiatedByUserId" not in tx_data
class TestGetOrgTransactionHistory:
@pytest.mark.asyncio
async def test_returns_transactions(self, mock_prisma):
mock_tx = MagicMock(
transactionKey="tx-1",
createdAt="2026-01-01",
amount=-100,
type="USAGE",
runningBalance=900,
initiatedByUserId="user-1",
teamId="ws-1",
metadata=None,
)
mock_prisma.orgcredittransaction.find_many = AsyncMock(return_value=[mock_tx])
result = await get_org_transaction_history("org-1", limit=10)
assert len(result) == 1
assert result[0]["amount"] == -100
assert result[0]["teamId"] == "ws-1"
class TestSeatManagement:
@pytest.mark.asyncio
async def test_get_seat_info(self, mock_prisma):
mock_prisma.organizationseatassignment.find_many = AsyncMock(
return_value=[
MagicMock(
userId="u1", seatType="PAID", status="ACTIVE", createdAt="now"
),
MagicMock(
userId="u2", seatType="FREE", status="INACTIVE", createdAt="now"
),
]
)
result = await get_seat_info("org-1")
assert result["total"] == 2
assert result["active"] == 1
assert result["inactive"] == 1
# Verify the query filtered by org
call_kwargs = mock_prisma.organizationseatassignment.find_many.call_args[1]
assert call_kwargs["where"]["organizationId"] == "org-1"
@pytest.mark.asyncio
async def test_assign_seat(self, mock_prisma):
mock_prisma.organizationseatassignment.upsert = AsyncMock(
return_value=MagicMock(userId="user-1", seatType="PAID", status="ACTIVE")
)
result = await assign_seat("org-1", "user-1", seat_type="PAID")
assert result["seatType"] == "PAID"
mock_prisma.organizationseatassignment.upsert.assert_called_once()
@pytest.mark.asyncio
async def test_unassign_seat(self, mock_prisma):
await unassign_seat("org-1", "user-1")
mock_prisma.organizationseatassignment.update.assert_called_once()
call_kwargs = mock_prisma.organizationseatassignment.update.call_args[1]
# Verify correct record targeted
where = call_kwargs["where"]["organizationId_userId"]
assert where["organizationId"] == "org-1"
assert where["userId"] == "user-1"
# Verify status set to INACTIVE
assert call_kwargs["data"]["status"] == "INACTIVE"

View File

@@ -0,0 +1,463 @@
"""
Data migration: Bootstrap personal organizations for existing users.
Creates one Organization per user, with owner membership, default workspace,
org profile, seat assignment, and org balance. Assigns all tenant-bound
resources to the user's default workspace. Idempotent — safe to run repeatedly.
Run automatically during server startup via rest_api.py lifespan.
"""
import logging
import re
import time
from typing import LiteralString
from backend.data.db import prisma
logger = logging.getLogger(__name__)
def _sanitize_slug(raw: str) -> str:
"""Convert a string to a URL-safe slug: lowercase, alphanumeric + hyphens."""
slug = re.sub(r"[^a-z0-9-]", "-", raw.lower().strip())
slug = re.sub(r"-+", "-", slug).strip("-")
return slug or "user"
async def _resolve_unique_slug(desired: str) -> str:
"""Return *desired* if no Organization uses it yet, else append a numeric suffix."""
existing = await prisma.organization.find_unique(where={"slug": desired})
if existing is None:
# Also check aliases
alias = await prisma.organizationalias.find_unique(where={"aliasSlug": desired})
if alias is None:
return desired
# Collision — find the next available numeric suffix
for i in range(1, 10_000):
candidate = f"{desired}-{i}"
org = await prisma.organization.find_unique(where={"slug": candidate})
alias = await prisma.organizationalias.find_unique(
where={"aliasSlug": candidate}
)
if org is None and alias is None:
return candidate
raise RuntimeError(
f"Could not resolve a unique slug for '{desired}' after 10000 attempts"
)
async def create_orgs_for_existing_users() -> int:
"""Create a personal Organization for every user that lacks one.
Returns the number of orgs created.
"""
# Find users who are NOT yet an owner of any personal org
users_without_org = await prisma.query_raw(
"""
SELECT u."id", u."email", u."name", u."stripeCustomerId", u."topUpConfig",
p."username" AS profile_username, p."name" AS profile_name,
p."description" AS profile_description,
p."avatarUrl" AS profile_avatar_url,
p."links" AS profile_links
FROM "User" u
LEFT JOIN "Profile" p ON p."userId" = u."id"
WHERE NOT EXISTS (
SELECT 1 FROM "OrgMember" om
JOIN "Organization" o ON o."id" = om."orgId"
WHERE om."userId" = u."id" AND om."isOwner" = true AND o."isPersonal" = true
)
""",
)
if not users_without_org:
logger.info("Org migration: all users already have personal orgs")
return 0
logger.info(
f"Org migration: creating personal orgs for {len(users_without_org)} users"
)
created = 0
for row in users_without_org:
user_id: str = row["id"]
email: str = row["email"]
profile_username: str | None = row.get("profile_username")
profile_name: str | None = row.get("profile_name")
user_name: str | None = row.get("name")
# Determine slug: Profile.username → sanitized User.name → email local part → user-{id[:8]}
if profile_username:
desired_slug = _sanitize_slug(profile_username)
elif user_name:
desired_slug = _sanitize_slug(user_name)
else:
local_part = email.split("@")[0] if email else ""
desired_slug = (
_sanitize_slug(local_part) if local_part else f"user-{user_id[:8]}"
)
slug = await _resolve_unique_slug(desired_slug)
display_name = profile_name or user_name or email.split("@")[0]
# Create Organization — only include optional JSON fields when non-None
org_data: dict = {
"name": display_name,
"slug": slug,
"isPersonal": True,
"bootstrapUserId": user_id,
"settings": "{}",
}
if row.get("stripeCustomerId"):
org_data["stripeCustomerId"] = row["stripeCustomerId"]
if row.get("topUpConfig"):
org_data["topUpConfig"] = row["topUpConfig"]
org = await prisma.organization.create(data=org_data)
# Create OrgMember (owner)
await prisma.orgmember.create(
data={
"Org": {"connect": {"id": org.id}},
"User": {"connect": {"id": user_id}},
"isOwner": True,
"isAdmin": True,
"status": "ACTIVE",
}
)
# Create default Team
workspace = await prisma.team.create(
data={
"name": "Default",
"Org": {"connect": {"id": org.id}},
"isDefault": True,
"joinPolicy": "OPEN",
"createdByUserId": user_id,
}
)
# Create TeamMember
await prisma.teammember.create(
data={
"Workspace": {"connect": {"id": workspace.id}},
"User": {"connect": {"id": user_id}},
"isAdmin": True,
"status": "ACTIVE",
}
)
# Create OrganizationProfile (from user's Profile if exists)
profile_data: dict = {
"Organization": {"connect": {"id": org.id}},
"username": slug,
"displayName": display_name,
}
if row.get("profile_avatar_url"):
profile_data["avatarUrl"] = row["profile_avatar_url"]
if row.get("profile_description"):
profile_data["bio"] = row["profile_description"]
if row.get("profile_links"):
profile_data["socialLinks"] = row["profile_links"]
await prisma.organizationprofile.create(data=profile_data)
# Create seat assignment (FREE seat for personal org)
await prisma.organizationseatassignment.create(
data={
"organizationId": org.id,
"userId": user_id,
"seatType": "FREE",
"status": "ACTIVE",
"assignedByUserId": user_id,
}
)
# Log if slug diverged from desired (collision resolution)
if slug != desired_slug:
logger.info(
f"Org migration: slug collision for user {user_id}"
f"desired '{desired_slug}', assigned '{slug}'"
)
created += 1
logger.info(f"Org migration: created {created} personal orgs")
return created
async def migrate_org_balances() -> int:
"""Copy UserBalance rows into OrgBalance for personal orgs that lack one.
Returns the number of balances migrated.
"""
result = await prisma.execute_raw(
"""
INSERT INTO "OrgBalance" ("orgId", "balance", "updatedAt")
SELECT o."id", ub."balance", ub."updatedAt"
FROM "UserBalance" ub
JOIN "OrgMember" om ON om."userId" = ub."userId" AND om."isOwner" = true
JOIN "Organization" o ON o."id" = om."orgId" AND o."isPersonal" = true
WHERE NOT EXISTS (
SELECT 1 FROM "OrgBalance" ob WHERE ob."orgId" = o."id"
)
"""
)
logger.info(f"Org migration: migrated {result} org balances")
return result
async def migrate_credit_transactions() -> int:
"""Copy CreditTransaction rows into OrgCreditTransaction for personal orgs.
Only copies transactions that haven't been migrated yet (by checking for
matching transactionKey + orgId).
Returns the number of transactions migrated.
"""
result = await prisma.execute_raw(
"""
INSERT INTO "OrgCreditTransaction"
("transactionKey", "createdAt", "orgId", "initiatedByUserId",
"amount", "type", "runningBalance", "isActive", "metadata")
SELECT
ct."transactionKey", ct."createdAt", o."id", ct."userId",
ct."amount", ct."type", ct."runningBalance", ct."isActive", ct."metadata"
FROM "CreditTransaction" ct
JOIN "OrgMember" om ON om."userId" = ct."userId" AND om."isOwner" = true
JOIN "Organization" o ON o."id" = om."orgId" AND o."isPersonal" = true
WHERE NOT EXISTS (
SELECT 1 FROM "OrgCreditTransaction" oct
WHERE oct."transactionKey" = ct."transactionKey" AND oct."orgId" = o."id"
)
"""
)
logger.info(f"Org migration: migrated {result} credit transactions")
return result
async def _assign_team_tenancy(table_sql: "LiteralString") -> int:
"""Assign organizationId + teamId on a single table's unassigned rows."""
return await prisma.execute_raw(table_sql)
async def assign_resources_to_teams() -> dict[str, int]:
"""Set organizationId and teamId on all tenant-bound rows that lack them.
Uses the user's personal org and its default workspace.
Returns a dict of table_name -> rows_updated.
"""
results: dict[str, int] = {}
# --- Tables needing both organizationId + teamId ---
results["AgentGraph"] = await _assign_team_tenancy(
"""
UPDATE "AgentGraph" t
SET "organizationId" = o."id", "teamId" = w."id"
FROM "OrgMember" om
JOIN "Organization" o ON o."id" = om."orgId" AND o."isPersonal" = true
JOIN "Team" w ON w."orgId" = o."id" AND w."isDefault" = true
WHERE t."userId" = om."userId" AND om."isOwner" = true AND t."organizationId" IS NULL
"""
)
results["AgentGraphExecution"] = await _assign_team_tenancy(
"""
UPDATE "AgentGraphExecution" t
SET "organizationId" = o."id", "teamId" = w."id"
FROM "OrgMember" om
JOIN "Organization" o ON o."id" = om."orgId" AND o."isPersonal" = true
JOIN "Team" w ON w."orgId" = o."id" AND w."isDefault" = true
WHERE t."userId" = om."userId" AND om."isOwner" = true AND t."organizationId" IS NULL
"""
)
results["ChatSession"] = await _assign_team_tenancy(
"""
UPDATE "ChatSession" t
SET "organizationId" = o."id", "teamId" = w."id"
FROM "OrgMember" om
JOIN "Organization" o ON o."id" = om."orgId" AND o."isPersonal" = true
JOIN "Team" w ON w."orgId" = o."id" AND w."isDefault" = true
WHERE t."userId" = om."userId" AND om."isOwner" = true AND t."organizationId" IS NULL
"""
)
results["AgentPreset"] = await _assign_team_tenancy(
"""
UPDATE "AgentPreset" t
SET "organizationId" = o."id", "teamId" = w."id"
FROM "OrgMember" om
JOIN "Organization" o ON o."id" = om."orgId" AND o."isPersonal" = true
JOIN "Team" w ON w."orgId" = o."id" AND w."isDefault" = true
WHERE t."userId" = om."userId" AND om."isOwner" = true AND t."organizationId" IS NULL
"""
)
results["LibraryAgent"] = await _assign_team_tenancy(
"""
UPDATE "LibraryAgent" t
SET "organizationId" = o."id", "teamId" = w."id"
FROM "OrgMember" om
JOIN "Organization" o ON o."id" = om."orgId" AND o."isPersonal" = true
JOIN "Team" w ON w."orgId" = o."id" AND w."isDefault" = true
WHERE t."userId" = om."userId" AND om."isOwner" = true AND t."organizationId" IS NULL
"""
)
results["LibraryFolder"] = await _assign_team_tenancy(
"""
UPDATE "LibraryFolder" t
SET "organizationId" = o."id", "teamId" = w."id"
FROM "OrgMember" om
JOIN "Organization" o ON o."id" = om."orgId" AND o."isPersonal" = true
JOIN "Team" w ON w."orgId" = o."id" AND w."isDefault" = true
WHERE t."userId" = om."userId" AND om."isOwner" = true AND t."organizationId" IS NULL
"""
)
results["IntegrationWebhook"] = await _assign_team_tenancy(
"""
UPDATE "IntegrationWebhook" t
SET "organizationId" = o."id", "teamId" = w."id"
FROM "OrgMember" om
JOIN "Organization" o ON o."id" = om."orgId" AND o."isPersonal" = true
JOIN "Team" w ON w."orgId" = o."id" AND w."isDefault" = true
WHERE t."userId" = om."userId" AND om."isOwner" = true AND t."organizationId" IS NULL
"""
)
results["APIKey"] = await _assign_team_tenancy(
"""
UPDATE "APIKey" t
SET "organizationId" = o."id", "teamId" = w."id"
FROM "OrgMember" om
JOIN "Organization" o ON o."id" = om."orgId" AND o."isPersonal" = true
JOIN "Team" w ON w."orgId" = o."id" AND w."isDefault" = true
WHERE t."userId" = om."userId" AND om."isOwner" = true AND t."organizationId" IS NULL
"""
)
# --- Tables needing only organizationId ---
results["BuilderSearchHistory"] = await prisma.execute_raw(
"""
UPDATE "BuilderSearchHistory" t
SET "organizationId" = o."id"
FROM "OrgMember" om
JOIN "Organization" o ON o."id" = om."orgId" AND o."isPersonal" = true
WHERE t."userId" = om."userId" AND om."isOwner" = true AND t."organizationId" IS NULL
"""
)
results["PendingHumanReview"] = await prisma.execute_raw(
"""
UPDATE "PendingHumanReview" t
SET "organizationId" = o."id"
FROM "OrgMember" om
JOIN "Organization" o ON o."id" = om."orgId" AND o."isPersonal" = true
WHERE t."userId" = om."userId" AND om."isOwner" = true AND t."organizationId" IS NULL
"""
)
results["StoreListingVersion"] = await prisma.execute_raw(
"""
UPDATE "StoreListingVersion" slv
SET "organizationId" = o."id"
FROM "StoreListingVersion" v
JOIN "StoreListing" sl ON sl."id" = v."storeListingId"
JOIN "OrgMember" om ON om."userId" = sl."owningUserId" AND om."isOwner" = true
JOIN "Organization" o ON o."id" = om."orgId" AND o."isPersonal" = true
WHERE slv."id" = v."id" AND slv."organizationId" IS NULL
"""
)
for table_name, count in results.items():
if count > 0:
logger.info(f"Org migration: assigned {count} {table_name} rows")
return results
async def migrate_store_listings() -> int:
"""Set owningOrgId on StoreListings that lack it.
Returns the number of listings migrated.
"""
result = await prisma.execute_raw(
"""
UPDATE "StoreListing" sl
SET "owningOrgId" = o."id"
FROM "OrgMember" om
JOIN "Organization" o ON o."id" = om."orgId" AND o."isPersonal" = true
WHERE sl."owningUserId" = om."userId"
AND om."isOwner" = true
AND sl."owningOrgId" IS NULL
"""
)
if result > 0:
logger.info(f"Org migration: assigned {result} store listings to orgs")
return result
async def create_store_listing_aliases() -> int:
"""Create OrganizationAlias entries for published store listings.
This ensures that marketplace URLs using the org slug continue to work.
Only creates aliases for listings whose org slug matches the user's Profile
username (which it should for personal orgs created from Profile.username).
Returns the number of aliases created.
"""
result = await prisma.execute_raw(
"""
INSERT INTO "OrganizationAlias"
("id", "organizationId", "aliasSlug", "aliasType", "createdByUserId", "isRemovable")
SELECT
gen_random_uuid(),
o."id",
p."username",
'MIGRATION',
o."bootstrapUserId",
false
FROM "StoreListing" sl
JOIN "Organization" o ON o."id" = sl."owningOrgId"
JOIN "Profile" p ON p."userId" = sl."owningUserId"
WHERE sl."owningOrgId" IS NOT NULL
AND sl."hasApprovedVersion" = true
AND o."slug" != p."username"
AND NOT EXISTS (
SELECT 1 FROM "OrganizationAlias" oa
WHERE oa."aliasSlug" = p."username"
)
"""
)
if result > 0:
logger.info(f"Org migration: created {result} store listing aliases")
return result
async def run_migration() -> None:
"""Orchestrate the full org bootstrap migration. Idempotent."""
start = time.monotonic()
logger.info("Org migration: starting personal org bootstrap")
orgs_created = await create_orgs_for_existing_users()
await migrate_org_balances()
await migrate_credit_transactions()
resource_counts = await assign_resources_to_teams()
await migrate_store_listings()
await create_store_listing_aliases()
total_resources = sum(resource_counts.values())
elapsed = time.monotonic() - start
logger.info(
f"Org migration: complete in {elapsed:.2f}s — "
f"{orgs_created} orgs created, {total_resources} resources assigned"
)

View File

@@ -0,0 +1,511 @@
"""Tests for the personal org bootstrap migration.
Tests the migration logic including slug resolution, idempotency,
and correct data mapping. Uses mocks for Prisma DB calls since the
test infrastructure does not provide a live database connection.
"""
from unittest.mock import AsyncMock, MagicMock
import pytest
from backend.data.org_migration import (
_resolve_unique_slug,
_sanitize_slug,
assign_resources_to_teams,
create_orgs_for_existing_users,
migrate_credit_transactions,
migrate_org_balances,
migrate_store_listings,
run_migration,
)
@pytest.fixture(autouse=True)
def mock_prisma(mocker):
"""Replace the prisma client in org_migration with a full mock."""
mock = MagicMock()
# Default: all find_unique calls return None (no collisions)
mock.organization.find_unique = AsyncMock(return_value=None)
mock.organizationalias.find_unique = AsyncMock(return_value=None)
mock.organization.create = AsyncMock(return_value=MagicMock(id="org-1"))
mock.orgmember.create = AsyncMock()
mock.team.create = AsyncMock(return_value=MagicMock(id="ws-1"))
mock.teammember.create = AsyncMock()
mock.organizationprofile.create = AsyncMock()
mock.organizationseatassignment.create = AsyncMock()
mock.query_raw = AsyncMock(return_value=[])
mock.execute_raw = AsyncMock(return_value=0)
mocker.patch("backend.data.org_migration.prisma", mock)
return mock
# ---------------------------------------------------------------------------
# _sanitize_slug
# ---------------------------------------------------------------------------
class TestSanitizeSlug:
def test_lowercase_and_hyphens(self):
assert _sanitize_slug("Hello World") == "hello-world"
def test_strips_special_chars(self):
assert _sanitize_slug("user@name!#$%") == "user-name"
def test_collapses_multiple_hyphens(self):
assert _sanitize_slug("a---b") == "a-b"
def test_strips_leading_trailing_hyphens(self):
assert _sanitize_slug("-hello-") == "hello"
def test_empty_string_returns_user(self):
assert _sanitize_slug("") == "user"
def test_only_special_chars_returns_user(self):
assert _sanitize_slug("@#$%") == "user"
def test_numeric_slug(self):
assert _sanitize_slug("12345") == "12345"
def test_preserves_hyphens(self):
assert _sanitize_slug("my-cool-agent") == "my-cool-agent"
def test_unicode_stripped(self):
assert _sanitize_slug("caf\u00e9-latt\u00e9") == "caf-latt"
def test_whitespace_only(self):
assert _sanitize_slug(" ") == "user"
# ---------------------------------------------------------------------------
# _resolve_unique_slug
# ---------------------------------------------------------------------------
class TestResolveUniqueSlug:
@pytest.mark.asyncio
async def test_slug_available_returns_as_is(self, mock_prisma):
result = await _resolve_unique_slug("my-org")
assert result == "my-org"
@pytest.mark.asyncio
async def test_slug_taken_by_org_gets_suffix(self, mock_prisma):
async def org_find(where):
slug = where.get("slug", "")
if slug == "taken":
return MagicMock(id="existing-org")
return None
mock_prisma.organization.find_unique = AsyncMock(side_effect=org_find)
result = await _resolve_unique_slug("taken")
assert result == "taken-1"
@pytest.mark.asyncio
async def test_slug_taken_by_alias_gets_suffix(self, mock_prisma):
async def alias_find(where):
slug = where.get("aliasSlug", "")
if slug == "aliased":
return MagicMock()
return None
mock_prisma.organizationalias.find_unique = AsyncMock(side_effect=alias_find)
result = await _resolve_unique_slug("aliased")
assert result == "aliased-1"
@pytest.mark.asyncio
async def test_multiple_collisions_increments(self, mock_prisma):
async def org_find(where):
slug = where.get("slug", "")
if slug in ("x", "x-1", "x-2"):
return MagicMock(id="existing")
return None
mock_prisma.organization.find_unique = AsyncMock(side_effect=org_find)
result = await _resolve_unique_slug("x")
assert result == "x-3"
# ---------------------------------------------------------------------------
# create_orgs_for_existing_users
# ---------------------------------------------------------------------------
class TestCreateOrgsForExistingUsers:
@pytest.mark.asyncio
async def test_no_users_without_org_is_noop(self, mock_prisma):
result = await create_orgs_for_existing_users()
assert result == 0
@pytest.mark.asyncio
async def test_user_with_profile_gets_profile_username_slug(self, mock_prisma):
mock_prisma.query_raw = AsyncMock(
return_value=[
{
"id": "user-1",
"email": "alice@example.com",
"name": "Alice",
"stripeCustomerId": "cus_123",
"topUpConfig": None,
"profile_username": "alice",
"profile_name": "Alice Smith",
"profile_description": "A developer",
"profile_avatar_url": "https://example.com/avatar.png",
"profile_links": ["https://github.com/alice"],
},
]
)
result = await create_orgs_for_existing_users()
assert result == 1
# Verify org was created with profile-derived slug
mock_prisma.organization.create.assert_called_once()
create_data = mock_prisma.organization.create.call_args[1]["data"]
assert create_data["slug"] == "alice"
assert create_data["name"] == "Alice Smith"
assert create_data["isPersonal"] is True
assert create_data["stripeCustomerId"] == "cus_123"
assert create_data["bootstrapUserId"] == "user-1"
# Verify workspace created
mock_prisma.team.create.assert_called_once()
ws_data = mock_prisma.team.create.call_args[1]["data"]
assert ws_data["name"] == "Default"
assert ws_data["isDefault"] is True
assert ws_data["joinPolicy"] == "OPEN"
@pytest.mark.asyncio
async def test_user_without_profile_uses_email_slug(self, mock_prisma):
mock_prisma.query_raw = AsyncMock(
return_value=[
{
"id": "user-2",
"email": "bob@company.org",
"name": None,
"stripeCustomerId": None,
"topUpConfig": None,
"profile_username": None,
"profile_name": None,
"profile_description": None,
"profile_avatar_url": None,
"profile_links": None,
},
]
)
result = await create_orgs_for_existing_users()
assert result == 1
create_data = mock_prisma.organization.create.call_args[1]["data"]
assert create_data["slug"] == "bob"
assert create_data["name"] == "bob"
@pytest.mark.asyncio
async def test_user_with_name_no_profile_uses_name_slug(self, mock_prisma):
mock_prisma.query_raw = AsyncMock(
return_value=[
{
"id": "user-3",
"email": "charlie@example.com",
"name": "Charlie Brown",
"stripeCustomerId": None,
"topUpConfig": None,
"profile_username": None,
"profile_name": None,
"profile_description": None,
"profile_avatar_url": None,
"profile_links": None,
},
]
)
result = await create_orgs_for_existing_users()
assert result == 1
create_data = mock_prisma.organization.create.call_args[1]["data"]
assert create_data["slug"] == "charlie-brown"
assert create_data["name"] == "Charlie Brown"
@pytest.mark.asyncio
async def test_user_with_empty_email_uses_id_slug(self, mock_prisma):
mock_prisma.query_raw = AsyncMock(
return_value=[
{
"id": "abcdef12-3456-7890-abcd-ef1234567890",
"email": "",
"name": None,
"stripeCustomerId": None,
"topUpConfig": None,
"profile_username": None,
"profile_name": None,
"profile_description": None,
"profile_avatar_url": None,
"profile_links": None,
},
]
)
result = await create_orgs_for_existing_users()
assert result == 1
create_data = mock_prisma.organization.create.call_args[1]["data"]
assert create_data["slug"] == "user-abcdef12"
@pytest.mark.asyncio
async def test_stripe_customer_id_included_when_present(self, mock_prisma):
mock_prisma.query_raw = AsyncMock(
return_value=[
{
"id": "user-stripe",
"email": "stripe@test.com",
"name": "Stripe User",
"stripeCustomerId": "cus_abc123",
"topUpConfig": '{"amount": 1000}',
"profile_username": "stripeuser",
"profile_name": "Stripe User",
"profile_description": None,
"profile_avatar_url": None,
"profile_links": None,
},
]
)
result = await create_orgs_for_existing_users()
assert result == 1
create_data = mock_prisma.organization.create.call_args[1]["data"]
assert create_data["stripeCustomerId"] == "cus_abc123"
assert create_data["topUpConfig"] == '{"amount": 1000}'
@pytest.mark.asyncio
async def test_stripe_fields_omitted_when_none(self, mock_prisma):
mock_prisma.query_raw = AsyncMock(
return_value=[
{
"id": "user-no-stripe",
"email": "nostripe@test.com",
"name": None,
"stripeCustomerId": None,
"topUpConfig": None,
"profile_username": "nostripe",
"profile_name": None,
"profile_description": None,
"profile_avatar_url": None,
"profile_links": None,
},
]
)
result = await create_orgs_for_existing_users()
assert result == 1
create_data = mock_prisma.organization.create.call_args[1]["data"]
assert "stripeCustomerId" not in create_data
assert "topUpConfig" not in create_data
@pytest.mark.asyncio
async def test_org_profile_omits_none_optional_fields(self, mock_prisma):
"""Profile creation should not pass None for optional JSON fields."""
mock_prisma.query_raw = AsyncMock(
return_value=[
{
"id": "user-minimal",
"email": "minimal@test.com",
"name": "Min",
"stripeCustomerId": None,
"topUpConfig": None,
"profile_username": "minimal",
"profile_name": "Min",
"profile_description": None,
"profile_avatar_url": None,
"profile_links": None,
},
]
)
await create_orgs_for_existing_users()
profile_data = mock_prisma.organizationprofile.create.call_args[1]["data"]
assert "avatarUrl" not in profile_data
assert "bio" not in profile_data
assert "socialLinks" not in profile_data
assert profile_data["username"] == "minimal"
assert profile_data["displayName"] == "Min"
@pytest.mark.asyncio
async def test_creates_all_required_records(self, mock_prisma):
"""Verify the full set of records created per user."""
mock_prisma.query_raw = AsyncMock(
return_value=[
{
"id": "user-full",
"email": "full@test.com",
"name": "Full User",
"stripeCustomerId": None,
"topUpConfig": None,
"profile_username": "fulluser",
"profile_name": "Full User",
"profile_description": "Bio here",
"profile_avatar_url": "https://example.com/avatar.png",
"profile_links": ["https://github.com/fulluser"],
},
]
)
await create_orgs_for_existing_users()
# Verify all 6 records created
mock_prisma.organization.create.assert_called_once()
mock_prisma.orgmember.create.assert_called_once()
mock_prisma.team.create.assert_called_once()
mock_prisma.teammember.create.assert_called_once()
mock_prisma.organizationprofile.create.assert_called_once()
mock_prisma.organizationseatassignment.create.assert_called_once()
# Verify OrgMember is owner+admin
member_data = mock_prisma.orgmember.create.call_args[1]["data"]
assert member_data["isOwner"] is True
assert member_data["isAdmin"] is True
# Verify workspace is default+open
ws_data = mock_prisma.team.create.call_args[1]["data"]
assert ws_data["isDefault"] is True
assert ws_data["joinPolicy"] == "OPEN"
# Verify seat is FREE+ACTIVE
seat_data = mock_prisma.organizationseatassignment.create.call_args[1]["data"]
assert seat_data["seatType"] == "FREE"
assert seat_data["status"] == "ACTIVE"
# ---------------------------------------------------------------------------
# migrate_org_balances
# ---------------------------------------------------------------------------
class TestMigrateOrgBalances:
@pytest.mark.asyncio
async def test_returns_count(self, mock_prisma):
mock_prisma.execute_raw = AsyncMock(return_value=5)
result = await migrate_org_balances()
assert result == 5
# ---------------------------------------------------------------------------
# migrate_credit_transactions
# ---------------------------------------------------------------------------
class TestMigrateCreditTransactions:
@pytest.mark.asyncio
async def test_returns_count(self, mock_prisma):
mock_prisma.execute_raw = AsyncMock(return_value=42)
result = await migrate_credit_transactions()
assert result == 42
# ---------------------------------------------------------------------------
# assign_resources_to_teams
# ---------------------------------------------------------------------------
class TestAssignResources:
@pytest.mark.asyncio
async def test_updates_all_tables(self, mock_prisma, mocker):
mocker.patch(
"backend.data.org_migration._assign_team_tenancy",
new_callable=AsyncMock,
return_value=10,
)
mock_prisma.execute_raw = AsyncMock(return_value=10)
result = await assign_resources_to_teams()
# 8 tables with workspace + 3 tables org-only = 11 entries
assert len(result) == 11
assert result["AgentGraph"] == 10
assert result["ChatSession"] == 10
assert result["BuilderSearchHistory"] == 10
assert result["PendingHumanReview"] == 10
assert result["StoreListingVersion"] == 10
@pytest.mark.asyncio
async def test_zero_updates_still_returns(self, mock_prisma, mocker):
mocker.patch(
"backend.data.org_migration._assign_team_tenancy",
new_callable=AsyncMock,
return_value=0,
)
mock_prisma.execute_raw = AsyncMock(return_value=0)
result = await assign_resources_to_teams()
assert all(v == 0 for v in result.values())
# ---------------------------------------------------------------------------
# migrate_store_listings
# ---------------------------------------------------------------------------
class TestMigrateStoreListings:
@pytest.mark.asyncio
async def test_returns_count(self, mock_prisma):
mock_prisma.execute_raw = AsyncMock(return_value=3)
result = await migrate_store_listings()
assert result == 3
# ---------------------------------------------------------------------------
# run_migration (orchestrator)
# ---------------------------------------------------------------------------
class TestRunMigration:
@pytest.mark.asyncio
async def test_calls_all_steps_in_order(self, mocker):
calls: list[str] = []
mocker.patch(
"backend.data.org_migration.create_orgs_for_existing_users",
new_callable=lambda: lambda: _track(calls, "create_orgs", 1),
)
mocker.patch(
"backend.data.org_migration.migrate_org_balances",
new_callable=lambda: lambda: _track(calls, "balances", 0),
)
mocker.patch(
"backend.data.org_migration.migrate_credit_transactions",
new_callable=lambda: lambda: _track(calls, "credits", 0),
)
mocker.patch(
"backend.data.org_migration.assign_resources_to_teams",
new_callable=lambda: lambda: _track(
calls, "assign_resources", {"AgentGraph": 5}
),
)
mocker.patch(
"backend.data.org_migration.migrate_store_listings",
new_callable=lambda: lambda: _track(calls, "store_listings", 0),
)
mocker.patch(
"backend.data.org_migration.create_store_listing_aliases",
new_callable=lambda: lambda: _track(calls, "aliases", 0),
)
await run_migration()
assert calls == [
"create_orgs",
"balances",
"credits",
"assign_resources",
"store_listings",
"aliases",
]
async def _track(calls: list[str], name: str, result):
calls.append(name)
return result

View File

@@ -159,6 +159,8 @@ async def _execute_graph(**kwargs):
graph_version=args.graph_version,
inputs=args.input_data,
graph_credentials_inputs=args.input_credentials,
organization_id=args.organization_id,
team_id=args.team_id,
)
await db.increment_onboarding_runs(args.user_id)
elapsed = asyncio.get_event_loop().time() - start_time
@@ -390,6 +392,8 @@ class GraphExecutionJobArgs(BaseModel):
cron: str
input_data: GraphInput
input_credentials: dict[str, CredentialsMetaInput] = Field(default_factory=dict)
organization_id: str | None = None
team_id: str | None = None
class GraphExecutionJobInfo(GraphExecutionJobArgs):
@@ -667,6 +671,8 @@ class Scheduler(AppService):
input_credentials: dict[str, CredentialsMetaInput],
name: Optional[str] = None,
user_timezone: str | None = None,
organization_id: Optional[str] = None,
team_id: Optional[str] = None,
) -> GraphExecutionJobInfo:
# Validate the graph before scheduling to prevent runtime failures
# We don't need the return value, just want the validation to run
@@ -703,6 +709,8 @@ class Scheduler(AppService):
cron=cron,
input_data=input_data,
input_credentials=input_credentials,
organization_id=organization_id,
team_id=team_id,
)
job = self.scheduler.add_job(
execute_graph,

View File

@@ -15,7 +15,7 @@ from backend.data import graph as graph_db
from backend.data import human_review as human_review_db
from backend.data import onboarding as onboarding_db
from backend.data import user as user_db
from backend.data import workspace as workspace_db
from backend.data import workspace as team_db
# Import dynamic field utilities from centralized location
from backend.data.block import BlockInput, BlockOutputEntry
@@ -869,6 +869,8 @@ async def add_graph_execution(
execution_context: Optional[ExecutionContext] = None,
graph_exec_id: Optional[str] = None,
dry_run: bool = False,
organization_id: Optional[str] = None,
team_id: Optional[str] = None,
) -> GraphExecutionWithNodes:
"""
Adds a graph execution to the queue and returns the execution entry.
@@ -895,7 +897,7 @@ async def add_graph_execution(
udb = user_db
gdb = graph_db
odb = onboarding_db
wdb = workspace_db
wdb = team_db
else:
edb = udb = gdb = odb = wdb = get_database_manager_async_client()
@@ -953,6 +955,8 @@ async def add_graph_execution(
preset_id=preset_id,
parent_graph_exec_id=parent_exec_id,
is_dry_run=dry_run,
organization_id=organization_id,
team_id=team_id,
)
logger.info(
@@ -983,7 +987,7 @@ async def add_graph_execution(
# Execution hierarchy
root_execution_id=graph_exec.id,
# Workspace (enables workspace:// file resolution in blocks)
workspace_id=workspace.id,
team_id=workspace.id,
)
try:

View File

@@ -0,0 +1,175 @@
"""Scoped credential store using the IntegrationCredential table.
Provides the new credential resolution path (USER → WORKSPACE → ORG)
using the IntegrationCredential table introduced in PR1. During the
dual-read transition period, callers should try this store first and
fall back to the legacy IntegrationCredentialsStore.
This store is used alongside the existing credentials_store.py which
reads from the User.integrations encrypted blob.
"""
import logging
from typing import Optional
from backend.data.db import prisma
from backend.util.encryption import JSONCryptor
logger = logging.getLogger(__name__)
_cryptor = JSONCryptor()
async def get_scoped_credentials(
user_id: str,
organization_id: str,
team_id: str | None = None,
provider: str | None = None,
) -> list[dict]:
"""Get credentials visible to the user in the current org/workspace context.
Resolution order (per plan 3D):
1. USER credentials created by this user in this org
2. WORKSPACE credentials for the active workspace (if workspace is set)
3. ORG credentials for the active org
Returns a list of credential metadata dicts (not decrypted payloads).
"""
results: list[dict] = []
# 1. User-scoped credentials
user_where: dict = {
"organizationId": organization_id,
"ownerType": "USER",
"ownerId": user_id,
"status": "active",
}
if provider:
user_where["provider"] = provider
user_creds = await prisma.integrationcredential.find_many(where=user_where)
for c in user_creds:
results.append(_cred_to_metadata(c, scope="USER"))
# 2. Workspace-scoped credentials (only if workspace is active)
if team_id:
ws_where: dict = {
"organizationId": organization_id,
"ownerType": "TEAM",
"ownerId": team_id,
"status": "active",
}
if provider:
ws_where["provider"] = provider
ws_creds = await prisma.integrationcredential.find_many(where=ws_where)
for c in ws_creds:
results.append(_cred_to_metadata(c, scope="TEAM"))
# 3. Org-scoped credentials
org_where: dict = {
"organizationId": organization_id,
"ownerType": "ORG",
"ownerId": organization_id,
"status": "active",
}
if provider:
org_where["provider"] = provider
org_creds = await prisma.integrationcredential.find_many(where=org_where)
for c in org_creds:
results.append(_cred_to_metadata(c, scope="ORG"))
return results
async def get_credential_by_id(
credential_id: str,
user_id: str,
organization_id: str,
team_id: str | None = None,
decrypt: bool = False,
) -> Optional[dict]:
"""Get a specific credential by ID if the user has access.
Access rules:
- USER creds: only the creating user can access
- WORKSPACE creds: any workspace member can access (verified by caller)
- ORG creds: any org member can access (verified by caller)
"""
cred = await prisma.integrationcredential.find_unique(where={"id": credential_id})
if cred is None or cred.organizationId != organization_id:
return None
# Access check
if cred.ownerType == "USER" and cred.createdByUserId != user_id:
return None
result = _cred_to_metadata(cred, scope=cred.ownerType)
if decrypt:
result["payload"] = _cryptor.decrypt(cred.encryptedPayload)
return result
async def create_credential(
organization_id: str,
owner_type: str, # USER, WORKSPACE, ORG
owner_id: str, # userId, workspaceId, or orgId
provider: str,
credential_type: str,
display_name: str,
payload: dict,
user_id: str,
expires_at=None,
metadata: dict | None = None,
) -> dict:
"""Create a new scoped credential."""
encrypted = _cryptor.encrypt(payload)
cred = await prisma.integrationcredential.create(
data={
"organizationId": organization_id,
"ownerType": owner_type,
"ownerId": owner_id,
"provider": provider,
"credentialType": credential_type,
"displayName": display_name,
"encryptedPayload": encrypted,
"createdByUserId": user_id,
"expiresAt": expires_at,
"metadata": metadata,
}
)
return _cred_to_metadata(cred, scope=owner_type)
async def delete_credential(
credential_id: str, user_id: str, organization_id: str
) -> None:
"""Soft-delete a credential by setting status to 'revoked'."""
cred = await prisma.integrationcredential.find_unique(where={"id": credential_id})
if cred is None or cred.organizationId != organization_id:
raise ValueError(f"Credential {credential_id} not found")
# Only the creator or an admin can delete (admin check done at route level)
await prisma.integrationcredential.update(
where={"id": credential_id},
data={"status": "revoked"},
)
def _cred_to_metadata(cred, scope: str) -> dict:
"""Convert a Prisma IntegrationCredential to a metadata dict."""
return {
"id": cred.id,
"provider": cred.provider,
"credentialType": cred.credentialType,
"displayName": cred.displayName,
"scope": scope,
"createdByUserId": cred.createdByUserId,
"lastUsedAt": cred.lastUsedAt,
"expiresAt": cred.expiresAt,
"createdAt": cred.createdAt,
}

View File

@@ -0,0 +1,553 @@
-- CreateEnum
CREATE TYPE "TeamJoinPolicy" AS ENUM ('OPEN', 'PRIVATE');
-- CreateEnum
CREATE TYPE "ResourceVisibility" AS ENUM ('PRIVATE', 'TEAM', 'ORG');
-- CreateEnum
CREATE TYPE "CredentialScope" AS ENUM ('USER', 'TEAM', 'ORG');
-- CreateEnum
CREATE TYPE "OrgAliasType" AS ENUM ('MIGRATION', 'RENAME', 'MANUAL');
-- CreateEnum
CREATE TYPE "OrgMemberStatus" AS ENUM ('INVITED', 'ACTIVE', 'SUSPENDED', 'REMOVED');
-- CreateEnum
CREATE TYPE "SeatType" AS ENUM ('FREE', 'PAID');
-- CreateEnum
CREATE TYPE "SeatStatus" AS ENUM ('ACTIVE', 'INACTIVE', 'PENDING');
-- CreateEnum
CREATE TYPE "TransferStatus" AS ENUM ('PENDING', 'SOURCE_APPROVED', 'TARGET_APPROVED', 'COMPLETED', 'REJECTED', 'CANCELLED');
-- CreateEnum
CREATE TYPE "CredentialOwnerType" AS ENUM ('USER', 'TEAM', 'ORG');
-- AlterTable
ALTER TABLE "BuilderSearchHistory" ADD COLUMN "organizationId" TEXT;
-- AlterTable
ALTER TABLE "ChatSession" ADD COLUMN "organizationId" TEXT,
ADD COLUMN "teamId" TEXT,
ADD COLUMN "visibility" "ResourceVisibility" NOT NULL DEFAULT 'PRIVATE';
-- AlterTable
ALTER TABLE "AgentGraph" ADD COLUMN "organizationId" TEXT,
ADD COLUMN "teamId" TEXT,
ADD COLUMN "visibility" "ResourceVisibility" NOT NULL DEFAULT 'PRIVATE';
-- AlterTable
ALTER TABLE "AgentPreset" ADD COLUMN "organizationId" TEXT,
ADD COLUMN "teamId" TEXT,
ADD COLUMN "visibility" "ResourceVisibility" NOT NULL DEFAULT 'PRIVATE';
-- AlterTable
ALTER TABLE "UserNotificationBatch" ADD COLUMN "organizationId" TEXT,
ADD COLUMN "teamId" TEXT;
-- AlterTable
ALTER TABLE "LibraryAgent" ADD COLUMN "organizationId" TEXT,
ADD COLUMN "teamId" TEXT,
ADD COLUMN "visibility" "ResourceVisibility" NOT NULL DEFAULT 'PRIVATE';
-- AlterTable
ALTER TABLE "LibraryFolder" ADD COLUMN "organizationId" TEXT,
ADD COLUMN "teamId" TEXT,
ADD COLUMN "visibility" "ResourceVisibility" NOT NULL DEFAULT 'PRIVATE';
-- AlterTable
ALTER TABLE "AgentGraphExecution" ADD COLUMN "organizationId" TEXT,
ADD COLUMN "teamId" TEXT,
ADD COLUMN "visibility" "ResourceVisibility" NOT NULL DEFAULT 'PRIVATE';
-- AlterTable
ALTER TABLE "PendingHumanReview" ADD COLUMN "organizationId" TEXT,
ADD COLUMN "teamId" TEXT;
-- AlterTable
ALTER TABLE "IntegrationWebhook" ADD COLUMN "organizationId" TEXT,
ADD COLUMN "teamId" TEXT,
ADD COLUMN "visibility" "ResourceVisibility" NOT NULL DEFAULT 'PRIVATE';
-- AlterTable
ALTER TABLE "StoreListing" ADD COLUMN "owningOrgId" TEXT;
-- AlterTable
ALTER TABLE "StoreListingVersion" ADD COLUMN "organizationId" TEXT;
-- AlterTable
ALTER TABLE "APIKey" ADD COLUMN "organizationId" TEXT,
ADD COLUMN "ownerType" "CredentialOwnerType",
ADD COLUMN "teamId" TEXT,
ADD COLUMN "teamIdRestriction" TEXT;
-- AlterTable
ALTER TABLE "OAuthApplication" ADD COLUMN "organizationId" TEXT,
ADD COLUMN "ownerType" "CredentialOwnerType",
ADD COLUMN "teamIdRestriction" TEXT;
-- CreateTable
CREATE TABLE "Organization" (
"id" TEXT NOT NULL,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
"name" TEXT NOT NULL,
"slug" TEXT NOT NULL,
"avatarUrl" TEXT,
"description" TEXT,
"isPersonal" BOOLEAN NOT NULL DEFAULT false,
"settings" JSONB NOT NULL DEFAULT '{}',
"stripeCustomerId" TEXT,
"stripeSubscriptionId" TEXT,
"topUpConfig" JSONB,
"archivedAt" TIMESTAMP(3),
"deletedAt" TIMESTAMP(3),
"bootstrapUserId" TEXT,
CONSTRAINT "Organization_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "OrganizationAlias" (
"id" TEXT NOT NULL,
"organizationId" TEXT NOT NULL,
"aliasSlug" TEXT NOT NULL,
"aliasType" "OrgAliasType" NOT NULL,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"createdByUserId" TEXT,
"removedAt" TIMESTAMP(3),
"removedByUserId" TEXT,
"isRemovable" BOOLEAN NOT NULL DEFAULT true,
CONSTRAINT "OrganizationAlias_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "OrganizationProfile" (
"organizationId" TEXT NOT NULL,
"username" TEXT NOT NULL,
"displayName" TEXT,
"avatarUrl" TEXT,
"bio" TEXT,
"socialLinks" JSONB,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
CONSTRAINT "OrganizationProfile_pkey" PRIMARY KEY ("organizationId")
);
-- CreateTable
CREATE TABLE "OrgMember" (
"id" TEXT NOT NULL,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
"orgId" TEXT NOT NULL,
"userId" TEXT NOT NULL,
"isOwner" BOOLEAN NOT NULL DEFAULT false,
"isAdmin" BOOLEAN NOT NULL DEFAULT false,
"isBillingManager" BOOLEAN NOT NULL DEFAULT false,
"status" "OrgMemberStatus" NOT NULL DEFAULT 'ACTIVE',
"joinedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"invitedByUserId" TEXT,
CONSTRAINT "OrgMember_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "OrgInvitation" (
"id" TEXT NOT NULL,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"orgId" TEXT NOT NULL,
"email" TEXT NOT NULL,
"targetUserId" TEXT,
"isAdmin" BOOLEAN NOT NULL DEFAULT false,
"isBillingManager" BOOLEAN NOT NULL DEFAULT false,
"token" TEXT NOT NULL,
"tokenHash" TEXT,
"expiresAt" TIMESTAMP(3) NOT NULL,
"acceptedAt" TIMESTAMP(3),
"revokedAt" TIMESTAMP(3),
"invitedByUserId" TEXT NOT NULL,
"teamIds" TEXT[],
CONSTRAINT "OrgInvitation_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "Team" (
"id" TEXT NOT NULL,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
"name" TEXT NOT NULL,
"slug" TEXT,
"description" TEXT,
"isDefault" BOOLEAN NOT NULL DEFAULT false,
"joinPolicy" "TeamJoinPolicy" NOT NULL DEFAULT 'OPEN',
"orgId" TEXT NOT NULL,
"archivedAt" TIMESTAMP(3),
"createdByUserId" TEXT,
CONSTRAINT "Team_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "TeamMember" (
"id" TEXT NOT NULL,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
"teamId" TEXT NOT NULL,
"userId" TEXT NOT NULL,
"isAdmin" BOOLEAN NOT NULL DEFAULT false,
"isBillingManager" BOOLEAN NOT NULL DEFAULT false,
"status" "OrgMemberStatus" NOT NULL DEFAULT 'ACTIVE',
"joinedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"invitedByUserId" TEXT,
CONSTRAINT "TeamMember_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "TeamInvite" (
"id" TEXT NOT NULL,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"teamId" TEXT NOT NULL,
"email" TEXT NOT NULL,
"targetUserId" TEXT,
"isAdmin" BOOLEAN NOT NULL DEFAULT false,
"isBillingManager" BOOLEAN NOT NULL DEFAULT false,
"token" TEXT NOT NULL,
"tokenHash" TEXT,
"expiresAt" TIMESTAMP(3) NOT NULL,
"acceptedAt" TIMESTAMP(3),
"revokedAt" TIMESTAMP(3),
"invitedByUserId" TEXT NOT NULL,
CONSTRAINT "TeamInvite_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "OrganizationSubscription" (
"organizationId" TEXT NOT NULL,
"planCode" TEXT,
"planTier" TEXT,
"stripeCustomerId" TEXT,
"stripeSubscriptionId" TEXT,
"status" TEXT NOT NULL DEFAULT 'active',
"renewalAt" TIMESTAMP(3),
"cancelAt" TIMESTAMP(3),
"entitlements" JSONB NOT NULL DEFAULT '{}',
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
CONSTRAINT "OrganizationSubscription_pkey" PRIMARY KEY ("organizationId")
);
-- CreateTable
CREATE TABLE "OrganizationSeatAssignment" (
"id" TEXT NOT NULL,
"organizationId" TEXT NOT NULL,
"userId" TEXT NOT NULL,
"seatType" "SeatType" NOT NULL DEFAULT 'FREE',
"status" "SeatStatus" NOT NULL DEFAULT 'ACTIVE',
"assignedByUserId" TEXT,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
CONSTRAINT "OrganizationSeatAssignment_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "OrgBalance" (
"orgId" TEXT NOT NULL,
"balance" INTEGER NOT NULL DEFAULT 0,
"updatedAt" TIMESTAMP(3) NOT NULL,
CONSTRAINT "OrgBalance_pkey" PRIMARY KEY ("orgId")
);
-- CreateTable
CREATE TABLE "OrgCreditTransaction" (
"transactionKey" TEXT NOT NULL,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"orgId" TEXT NOT NULL,
"initiatedByUserId" TEXT,
"teamId" TEXT,
"amount" INTEGER NOT NULL,
"type" "CreditTransactionType" NOT NULL,
"runningBalance" INTEGER,
"isActive" BOOLEAN NOT NULL DEFAULT true,
"metadata" JSONB,
CONSTRAINT "OrgCreditTransaction_pkey" PRIMARY KEY ("transactionKey","orgId")
);
-- CreateTable
CREATE TABLE "TransferRequest" (
"id" TEXT NOT NULL,
"resourceType" TEXT NOT NULL,
"resourceId" TEXT NOT NULL,
"sourceOrganizationId" TEXT NOT NULL,
"targetOrganizationId" TEXT NOT NULL,
"initiatedByUserId" TEXT NOT NULL,
"status" "TransferStatus" NOT NULL DEFAULT 'PENDING',
"sourceApprovedByUserId" TEXT,
"targetApprovedByUserId" TEXT,
"completedAt" TIMESTAMP(3),
"reason" TEXT,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
CONSTRAINT "TransferRequest_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "AuditLog" (
"id" TEXT NOT NULL,
"organizationId" TEXT,
"teamId" TEXT,
"actorUserId" TEXT NOT NULL,
"entityType" TEXT NOT NULL,
"entityId" TEXT,
"action" TEXT NOT NULL,
"beforeJson" JSONB,
"afterJson" JSONB,
"correlationId" TEXT,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "AuditLog_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "IntegrationCredential" (
"id" TEXT NOT NULL,
"organizationId" TEXT NOT NULL,
"ownerType" "CredentialOwnerType" NOT NULL,
"ownerId" TEXT NOT NULL,
"teamId" TEXT,
"provider" TEXT NOT NULL,
"credentialType" TEXT NOT NULL,
"displayName" TEXT NOT NULL,
"encryptedPayload" TEXT NOT NULL,
"createdByUserId" TEXT NOT NULL,
"lastUsedAt" TIMESTAMP(3),
"status" TEXT NOT NULL DEFAULT 'active',
"metadata" JSONB,
"expiresAt" TIMESTAMP(3),
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
CONSTRAINT "IntegrationCredential_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE UNIQUE INDEX "Organization_slug_key" ON "Organization"("slug");
-- CreateIndex
CREATE UNIQUE INDEX "OrganizationAlias_aliasSlug_key" ON "OrganizationAlias"("aliasSlug");
-- CreateIndex
CREATE INDEX "OrganizationAlias_aliasSlug_idx" ON "OrganizationAlias"("aliasSlug");
-- CreateIndex
CREATE INDEX "OrganizationAlias_organizationId_idx" ON "OrganizationAlias"("organizationId");
-- CreateIndex
CREATE UNIQUE INDEX "OrganizationProfile_username_key" ON "OrganizationProfile"("username");
-- CreateIndex
CREATE INDEX "OrgMember_userId_idx" ON "OrgMember"("userId");
-- CreateIndex
CREATE INDEX "OrgMember_orgId_status_idx" ON "OrgMember"("orgId", "status");
-- CreateIndex
CREATE UNIQUE INDEX "OrgMember_orgId_userId_key" ON "OrgMember"("orgId", "userId");
-- CreateIndex
CREATE UNIQUE INDEX "OrgInvitation_token_key" ON "OrgInvitation"("token");
-- CreateIndex
CREATE INDEX "OrgInvitation_email_idx" ON "OrgInvitation"("email");
-- CreateIndex
CREATE INDEX "OrgInvitation_token_idx" ON "OrgInvitation"("token");
-- CreateIndex
CREATE INDEX "OrgInvitation_orgId_idx" ON "OrgInvitation"("orgId");
-- CreateIndex
CREATE INDEX "Team_orgId_isDefault_idx" ON "Team"("orgId", "isDefault");
-- CreateIndex
CREATE INDEX "Team_orgId_joinPolicy_idx" ON "Team"("orgId", "joinPolicy");
-- CreateIndex
CREATE UNIQUE INDEX "Team_orgId_name_key" ON "Team"("orgId", "name");
-- CreateIndex
CREATE INDEX "TeamMember_userId_idx" ON "TeamMember"("userId");
-- CreateIndex
CREATE INDEX "TeamMember_teamId_status_idx" ON "TeamMember"("teamId", "status");
-- CreateIndex
CREATE UNIQUE INDEX "TeamMember_teamId_userId_key" ON "TeamMember"("teamId", "userId");
-- CreateIndex
CREATE UNIQUE INDEX "TeamInvite_token_key" ON "TeamInvite"("token");
-- CreateIndex
CREATE INDEX "TeamInvite_email_idx" ON "TeamInvite"("email");
-- CreateIndex
CREATE INDEX "TeamInvite_token_idx" ON "TeamInvite"("token");
-- CreateIndex
CREATE INDEX "TeamInvite_teamId_idx" ON "TeamInvite"("teamId");
-- CreateIndex
CREATE INDEX "OrganizationSeatAssignment_organizationId_status_idx" ON "OrganizationSeatAssignment"("organizationId", "status");
-- CreateIndex
CREATE UNIQUE INDEX "OrganizationSeatAssignment_organizationId_userId_key" ON "OrganizationSeatAssignment"("organizationId", "userId");
-- CreateIndex
CREATE INDEX "OrgCreditTransaction_orgId_createdAt_idx" ON "OrgCreditTransaction"("orgId", "createdAt");
-- CreateIndex
CREATE INDEX "OrgCreditTransaction_initiatedByUserId_idx" ON "OrgCreditTransaction"("initiatedByUserId");
-- CreateIndex
CREATE INDEX "TransferRequest_sourceOrganizationId_idx" ON "TransferRequest"("sourceOrganizationId");
-- CreateIndex
CREATE INDEX "TransferRequest_targetOrganizationId_idx" ON "TransferRequest"("targetOrganizationId");
-- CreateIndex
CREATE INDEX "TransferRequest_status_idx" ON "TransferRequest"("status");
-- CreateIndex
CREATE INDEX "AuditLog_organizationId_createdAt_idx" ON "AuditLog"("organizationId", "createdAt");
-- CreateIndex
CREATE INDEX "AuditLog_actorUserId_createdAt_idx" ON "AuditLog"("actorUserId", "createdAt");
-- CreateIndex
CREATE INDEX "AuditLog_entityType_entityId_idx" ON "AuditLog"("entityType", "entityId");
-- CreateIndex
CREATE INDEX "IntegrationCredential_organizationId_ownerType_provider_idx" ON "IntegrationCredential"("organizationId", "ownerType", "provider");
-- CreateIndex
CREATE INDEX "IntegrationCredential_ownerId_ownerType_idx" ON "IntegrationCredential"("ownerId", "ownerType");
-- CreateIndex
CREATE INDEX "IntegrationCredential_createdByUserId_idx" ON "IntegrationCredential"("createdByUserId");
-- CreateIndex
CREATE INDEX "ChatSession_teamId_updatedAt_idx" ON "ChatSession"("teamId", "updatedAt");
-- CreateIndex
CREATE INDEX "AgentGraph_teamId_isActive_id_version_idx" ON "AgentGraph"("teamId", "isActive", "id", "version");
-- CreateIndex
CREATE INDEX "AgentPreset_teamId_idx" ON "AgentPreset"("teamId");
-- CreateIndex
CREATE INDEX "LibraryAgent_teamId_idx" ON "LibraryAgent"("teamId");
-- CreateIndex
CREATE INDEX "AgentGraphExecution_teamId_isDeleted_createdAt_idx" ON "AgentGraphExecution"("teamId", "isDeleted", "createdAt");
-- CreateIndex
CREATE INDEX "StoreListing_owningOrgId_idx" ON "StoreListing"("owningOrgId");
-- CreateIndex
CREATE INDEX "APIKey_teamId_idx" ON "APIKey"("teamId");
-- AddForeignKey
ALTER TABLE "ChatSession" ADD CONSTRAINT "ChatSession_teamId_fkey" FOREIGN KEY ("teamId") REFERENCES "Team"("id") ON DELETE SET NULL ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "AgentGraph" ADD CONSTRAINT "AgentGraph_teamId_fkey" FOREIGN KEY ("teamId") REFERENCES "Team"("id") ON DELETE SET NULL ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "AgentPreset" ADD CONSTRAINT "AgentPreset_teamId_fkey" FOREIGN KEY ("teamId") REFERENCES "Team"("id") ON DELETE SET NULL ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "LibraryAgent" ADD CONSTRAINT "LibraryAgent_teamId_fkey" FOREIGN KEY ("teamId") REFERENCES "Team"("id") ON DELETE SET NULL ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "LibraryFolder" ADD CONSTRAINT "LibraryFolder_teamId_fkey" FOREIGN KEY ("teamId") REFERENCES "Team"("id") ON DELETE SET NULL ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "AgentGraphExecution" ADD CONSTRAINT "AgentGraphExecution_teamId_fkey" FOREIGN KEY ("teamId") REFERENCES "Team"("id") ON DELETE SET NULL ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "IntegrationWebhook" ADD CONSTRAINT "IntegrationWebhook_teamId_fkey" FOREIGN KEY ("teamId") REFERENCES "Team"("id") ON DELETE SET NULL ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "StoreListing" ADD CONSTRAINT "StoreListing_owningOrgId_fkey" FOREIGN KEY ("owningOrgId") REFERENCES "Organization"("id") ON DELETE SET NULL ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "APIKey" ADD CONSTRAINT "APIKey_teamId_fkey" FOREIGN KEY ("teamId") REFERENCES "Team"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "OrganizationAlias" ADD CONSTRAINT "OrganizationAlias_organizationId_fkey" FOREIGN KEY ("organizationId") REFERENCES "Organization"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "OrganizationProfile" ADD CONSTRAINT "OrganizationProfile_organizationId_fkey" FOREIGN KEY ("organizationId") REFERENCES "Organization"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "OrgMember" ADD CONSTRAINT "OrgMember_orgId_fkey" FOREIGN KEY ("orgId") REFERENCES "Organization"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "OrgMember" ADD CONSTRAINT "OrgMember_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "OrgInvitation" ADD CONSTRAINT "OrgInvitation_orgId_fkey" FOREIGN KEY ("orgId") REFERENCES "Organization"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "Team" ADD CONSTRAINT "Team_orgId_fkey" FOREIGN KEY ("orgId") REFERENCES "Organization"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "TeamMember" ADD CONSTRAINT "TeamMember_teamId_fkey" FOREIGN KEY ("teamId") REFERENCES "Team"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "TeamMember" ADD CONSTRAINT "TeamMember_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "TeamInvite" ADD CONSTRAINT "TeamInvite_teamId_fkey" FOREIGN KEY ("teamId") REFERENCES "Team"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "OrganizationSubscription" ADD CONSTRAINT "OrganizationSubscription_organizationId_fkey" FOREIGN KEY ("organizationId") REFERENCES "Organization"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "OrganizationSeatAssignment" ADD CONSTRAINT "OrganizationSeatAssignment_organizationId_userId_fkey" FOREIGN KEY ("organizationId", "userId") REFERENCES "OrgMember"("orgId", "userId") ON DELETE RESTRICT ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "OrganizationSeatAssignment" ADD CONSTRAINT "OrganizationSeatAssignment_organizationId_fkey" FOREIGN KEY ("organizationId") REFERENCES "Organization"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "OrgBalance" ADD CONSTRAINT "OrgBalance_orgId_fkey" FOREIGN KEY ("orgId") REFERENCES "Organization"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "OrgCreditTransaction" ADD CONSTRAINT "OrgCreditTransaction_orgId_fkey" FOREIGN KEY ("orgId") REFERENCES "Organization"("id") ON DELETE NO ACTION ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "TransferRequest" ADD CONSTRAINT "TransferRequest_sourceOrganizationId_fkey" FOREIGN KEY ("sourceOrganizationId") REFERENCES "Organization"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "TransferRequest" ADD CONSTRAINT "TransferRequest_targetOrganizationId_fkey" FOREIGN KEY ("targetOrganizationId") REFERENCES "Organization"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "AuditLog" ADD CONSTRAINT "AuditLog_organizationId_fkey" FOREIGN KEY ("organizationId") REFERENCES "Organization"("id") ON DELETE SET NULL ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "IntegrationCredential" ADD CONSTRAINT "IntegrationCredential_organizationId_fkey" FOREIGN KEY ("organizationId") REFERENCES "Organization"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "IntegrationCredential" ADD CONSTRAINT "IntegrationCredential_workspace_fkey" FOREIGN KEY ("teamId") REFERENCES "Team"("id") ON DELETE CASCADE ON UPDATE CASCADE;

View File

@@ -80,6 +80,10 @@ model User {
OAuthAuthorizationCodes OAuthAuthorizationCode[]
OAuthAccessTokens OAuthAccessToken[]
OAuthRefreshTokens OAuthRefreshToken[]
// Organization & Workspace relations
OrgMemberships OrgMember[]
TeamMemberships TeamMember[]
}
enum SubscriptionTier {
@@ -213,6 +217,9 @@ model BuilderSearchHistory {
userId String
User User @relation(fields: [userId], references: [id], onDelete: Cascade)
// Tenancy
organizationId String?
}
////////////////////////////////////////////////////////////
@@ -246,7 +253,14 @@ model ChatSession {
Messages ChatMessage[]
// Tenancy
organizationId String?
teamId String?
visibility ResourceVisibility @default(PRIVATE)
Team Team? @relation(fields: [teamId], references: [id], onDelete: SetNull)
@@index([userId, updatedAt])
@@index([teamId, updatedAt])
}
model ChatMessage {
@@ -304,9 +318,16 @@ model AgentGraph {
LibraryAgents LibraryAgent[]
StoreListingVersions StoreListingVersion[]
// Tenancy columns (nullable during migration, NOT NULL after cutover)
organizationId String?
teamId String?
visibility ResourceVisibility @default(PRIVATE)
Team Team? @relation(fields: [teamId], references: [id], onDelete: SetNull)
@@id(name: "graphVersionId", [id, version])
@@index([userId, isActive, id, version])
@@index([forkedFromId, forkedFromVersion])
@@index([teamId, isActive, id, version])
}
////////////////////////////////////////////////////////////
@@ -347,9 +368,16 @@ model AgentPreset {
isDeleted Boolean @default(false)
// Tenancy
organizationId String?
teamId String?
visibility ResourceVisibility @default(PRIVATE)
Team Team? @relation(fields: [teamId], references: [id], onDelete: SetNull)
@@index([userId])
@@index([agentGraphId, agentGraphVersion])
@@index([webhookId])
@@index([teamId])
}
enum NotificationType {
@@ -393,6 +421,10 @@ model UserNotificationBatch {
Notifications NotificationEvent[]
// Tenancy
organizationId String?
teamId String?
// Each user can only have one batch of a notification type at a time
@@unique([userId, type])
}
@@ -428,10 +460,17 @@ model LibraryAgent {
settings Json @default("{}")
// Tenancy
organizationId String?
teamId String?
visibility ResourceVisibility @default(PRIVATE)
Team Team? @relation(fields: [teamId], references: [id], onDelete: SetNull)
@@unique([userId, agentGraphId, agentGraphVersion])
@@index([agentGraphId, agentGraphVersion])
@@index([creatorId])
@@index([folderId])
@@index([teamId])
}
model LibraryFolder {
@@ -454,6 +493,12 @@ model LibraryFolder {
LibraryAgents LibraryAgent[]
// Tenancy
organizationId String?
teamId String?
visibility ResourceVisibility @default(PRIVATE)
Team Team? @relation(fields: [teamId], references: [id], onDelete: SetNull)
@@unique([userId, parentId, name]) // Name unique per parent per user
}
@@ -587,12 +632,19 @@ model AgentGraphExecution {
shareToken String? @unique
sharedAt DateTime?
// Tenancy
organizationId String?
teamId String?
visibility ResourceVisibility @default(PRIVATE)
Team Team? @relation(fields: [teamId], references: [id], onDelete: SetNull)
@@index([agentGraphId, agentGraphVersion])
@@index([userId, isDeleted, createdAt])
@@index([createdAt])
@@index([agentPresetId])
@@index([shareToken])
@@index([parentGraphExecutionId])
@@index([teamId, isDeleted, createdAt])
}
// This model describes the execution of an AgentNode.
@@ -685,6 +737,10 @@ model PendingHumanReview {
User User @relation(fields: [userId], references: [id], onDelete: Cascade)
// Tenancy
organizationId String?
teamId String?
@@unique([nodeExecId]) // One pending review per node execution
@@index([userId, status])
@@index([graphExecId, status])
@@ -711,6 +767,12 @@ model IntegrationWebhook {
AgentNodes AgentNode[]
AgentPresets AgentPreset[]
// Tenancy
organizationId String?
teamId String?
visibility ResourceVisibility @default(PRIVATE)
Team Team? @relation(fields: [teamId], references: [id], onDelete: SetNull)
}
model AnalyticsDetails {
@@ -774,6 +836,68 @@ enum CreditTransactionType {
CARD_CHECK
}
////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////
////////// ORGANIZATION ORGANIZATION & WORKSPACE ENUMS TEAM ENUMS /////////////
////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////
enum TeamJoinPolicy {
OPEN
PRIVATE
}
enum ResourceVisibility {
PRIVATE
TEAM
ORG
}
enum CredentialScope {
USER
TEAM
ORG
}
enum OrgAliasType {
MIGRATION
RENAME
MANUAL
}
enum OrgMemberStatus {
INVITED
ACTIVE
SUSPENDED
REMOVED
}
enum SeatType {
FREE
PAID
}
enum SeatStatus {
ACTIVE
INACTIVE
PENDING
}
enum TransferStatus {
PENDING
SOURCE_APPROVED
TARGET_APPROVED
COMPLETED
REJECTED
CANCELLED
}
enum CredentialOwnerType {
USER
TEAM
ORG
}
model CreditTransaction {
transactionKey String @default(uuid())
createdAt DateTime @default(now())
@@ -1013,7 +1137,7 @@ model StoreListing {
ActiveVersion StoreListingVersion? @relation("ActiveVersion", fields: [activeVersionId], references: [id])
// The agent link here is only so we can do lookup on agentId
agentGraphId String @unique
agentGraphId String @unique
owningUserId String
OwningUser User @relation(fields: [owningUserId], references: [id])
@@ -1022,9 +1146,14 @@ model StoreListing {
// Relations
Versions StoreListingVersion[] @relation("ListingVersions")
// Tenancy — owningOrgId will replace owningUserId as canonical owner after cutover
owningOrgId String?
OwningOrg Organization? @relation("OrgStoreListings", fields: [owningOrgId], references: [id])
@@unique([owningUserId, slug])
// Used in the view query
@@index([isDeleted, hasApprovedVersion])
@@index([owningOrgId])
}
model StoreListingVersion {
@@ -1085,6 +1214,9 @@ model StoreListingVersion {
// Note: Embeddings now stored in UnifiedContentEmbedding table
// Use contentType=STORE_AGENT and contentId=storeListingVersionId
// Tenancy
organizationId String?
@@unique([storeListingId, version])
@@index([storeListingId, submissionStatus, isAvailable])
@@index([submissionStatus])
@@ -1193,8 +1325,16 @@ model APIKey {
userId String
User User @relation(fields: [userId], references: [id], onDelete: Cascade)
// Tenancy
organizationId String?
teamId String?
ownerType CredentialOwnerType?
teamIdRestriction String?
Team Team? @relation(fields: [teamId], references: [id], onDelete: Cascade)
@@index([head, name])
@@index([userId, status])
@@index([teamId])
}
model UserBalance {
@@ -1248,6 +1388,11 @@ model OAuthApplication {
AccessTokens OAuthAccessToken[]
RefreshTokens OAuthRefreshToken[]
// Tenancy
organizationId String?
ownerType CredentialOwnerType?
teamIdRestriction String?
@@index([clientId])
@@index([ownerId])
}
@@ -1322,3 +1467,321 @@ model OAuthRefreshToken {
@@index([userId, applicationId])
@@index([expiresAt]) // For cleanup
}
////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////
//////// ORGANIZATION ORGANIZATION & WORKSPACE MODELS TEAM MODELS //////////////
////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////
model Organization {
id String @id @default(uuid())
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
name String
slug String @unique
avatarUrl String?
description String?
isPersonal Boolean @default(false)
settings Json @default("{}")
stripeCustomerId String?
stripeSubscriptionId String?
topUpConfig Json?
archivedAt DateTime?
deletedAt DateTime?
bootstrapUserId String?
Members OrgMember[]
Teams Team[]
Aliases OrganizationAlias[]
Balance OrgBalance?
CreditTransactions OrgCreditTransaction[]
Invitations OrgInvitation[]
StoreListings StoreListing[] @relation("OrgStoreListings")
Credentials IntegrationCredential[]
Profile OrganizationProfile?
Subscription OrganizationSubscription?
SeatAssignments OrganizationSeatAssignment[]
TransfersFrom TransferRequest[] @relation("TransferSource")
TransfersTo TransferRequest[] @relation("TransferTarget")
AuditLogs AuditLog[]
// slug already has @unique which creates an index — no separate @@index needed
}
model OrganizationAlias {
id String @id @default(uuid())
organizationId String
aliasSlug String @unique
aliasType OrgAliasType
createdAt DateTime @default(now())
createdByUserId String?
removedAt DateTime?
removedByUserId String?
isRemovable Boolean @default(true)
Organization Organization @relation(fields: [organizationId], references: [id], onDelete: Cascade)
@@index([aliasSlug])
@@index([organizationId])
}
model OrganizationProfile {
organizationId String @id
username String @unique
displayName String?
avatarUrl String?
bio String?
socialLinks Json?
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
Organization Organization @relation(fields: [organizationId], references: [id], onDelete: Cascade)
}
model OrgMember {
id String @id @default(uuid())
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
orgId String
userId String
isOwner Boolean @default(false)
isAdmin Boolean @default(false)
isBillingManager Boolean @default(false)
status OrgMemberStatus @default(ACTIVE)
joinedAt DateTime @default(now())
invitedByUserId String?
Org Organization @relation(fields: [orgId], references: [id], onDelete: Cascade)
User User @relation(fields: [userId], references: [id], onDelete: Cascade)
SeatAssignment OrganizationSeatAssignment? @relation
@@unique([orgId, userId])
@@index([userId])
@@index([orgId, status])
}
model OrgInvitation {
id String @id @default(uuid())
createdAt DateTime @default(now())
orgId String
email String
targetUserId String?
isAdmin Boolean @default(false)
isBillingManager Boolean @default(false)
token String @unique @default(uuid())
tokenHash String?
expiresAt DateTime
acceptedAt DateTime?
revokedAt DateTime?
invitedByUserId String
teamIds String[]
Org Organization @relation(fields: [orgId], references: [id], onDelete: Cascade)
@@index([email])
@@index([token])
@@index([orgId])
}
model Team {
id String @id @default(uuid())
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
name String
slug String?
description String?
isDefault Boolean @default(false)
joinPolicy TeamJoinPolicy @default(OPEN)
orgId String
archivedAt DateTime?
createdByUserId String?
Org Organization @relation(fields: [orgId], references: [id], onDelete: Cascade)
Members TeamMember[]
AgentGraphs AgentGraph[]
Executions AgentGraphExecution[]
ChatSessions ChatSession[]
Presets AgentPreset[]
LibraryAgents LibraryAgent[]
LibraryFolders LibraryFolder[]
Webhooks IntegrationWebhook[]
APIKeys APIKey[]
Credentials IntegrationCredential[] @relation("TeamCredentials")
TeamInvites TeamInvite[]
@@unique([orgId, name])
@@index([orgId, isDefault])
@@index([orgId, joinPolicy])
}
model TeamMember {
id String @id @default(uuid())
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
teamId String
userId String
isAdmin Boolean @default(false)
isBillingManager Boolean @default(false)
status OrgMemberStatus @default(ACTIVE)
joinedAt DateTime @default(now())
invitedByUserId String?
Team Team @relation(fields: [teamId], references: [id], onDelete: Cascade)
User User @relation(fields: [userId], references: [id], onDelete: Cascade)
@@unique([teamId, userId])
@@index([userId])
@@index([teamId, status])
}
model TeamInvite {
id String @id @default(uuid())
createdAt DateTime @default(now())
teamId String
email String
targetUserId String?
isAdmin Boolean @default(false)
isBillingManager Boolean @default(false)
token String @unique @default(uuid())
tokenHash String?
expiresAt DateTime
acceptedAt DateTime?
revokedAt DateTime?
invitedByUserId String
Team Team @relation(fields: [teamId], references: [id], onDelete: Cascade)
@@index([email])
@@index([token])
@@index([teamId])
}
model OrganizationSubscription {
organizationId String @id
planCode String?
planTier String?
stripeCustomerId String?
stripeSubscriptionId String?
status String @default("active")
renewalAt DateTime?
cancelAt DateTime?
entitlements Json @default("{}")
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
Organization Organization @relation(fields: [organizationId], references: [id], onDelete: Cascade)
}
model OrganizationSeatAssignment {
id String @id @default(uuid())
organizationId String
userId String
seatType SeatType @default(FREE)
status SeatStatus @default(ACTIVE)
assignedByUserId String?
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
OrgMember OrgMember? @relation(fields: [organizationId, userId], references: [orgId, userId])
Organization Organization @relation(fields: [organizationId], references: [id])
@@unique([organizationId, userId])
@@index([organizationId, status])
}
model OrgBalance {
orgId String @id
balance Int @default(0)
updatedAt DateTime @updatedAt
Org Organization @relation(fields: [orgId], references: [id], onDelete: Cascade)
}
model OrgCreditTransaction {
transactionKey String @default(uuid())
createdAt DateTime @default(now())
orgId String
initiatedByUserId String?
teamId String?
amount Int
type CreditTransactionType
runningBalance Int?
isActive Boolean @default(true)
metadata Json?
Org Organization @relation(fields: [orgId], references: [id], onDelete: NoAction)
@@id(name: "orgCreditTransactionIdentifier", [transactionKey, orgId])
@@index([orgId, createdAt])
@@index([initiatedByUserId])
}
model TransferRequest {
id String @id @default(uuid())
resourceType String
resourceId String
sourceOrganizationId String
targetOrganizationId String
initiatedByUserId String
status TransferStatus @default(PENDING)
sourceApprovedByUserId String?
targetApprovedByUserId String?
completedAt DateTime?
reason String?
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
SourceOrg Organization @relation("TransferSource", fields: [sourceOrganizationId], references: [id])
TargetOrg Organization @relation("TransferTarget", fields: [targetOrganizationId], references: [id])
@@index([sourceOrganizationId])
@@index([targetOrganizationId])
@@index([status])
}
model AuditLog {
id String @id @default(uuid())
organizationId String?
teamId String?
actorUserId String
entityType String
entityId String?
action String
beforeJson Json?
afterJson Json?
correlationId String?
createdAt DateTime @default(now())
Organization Organization? @relation(fields: [organizationId], references: [id])
@@index([organizationId, createdAt])
@@index([actorUserId, createdAt])
@@index([entityType, entityId])
}
model IntegrationCredential {
id String @id @default(uuid())
organizationId String
ownerType CredentialOwnerType
ownerId String // userId, teamId, or orgId depending on ownerType
teamId String? // Dedicated FK for workspace-scoped creds (set when ownerType=TEAM)
provider String
credentialType String
displayName String
encryptedPayload String
createdByUserId String
lastUsedAt DateTime?
status String @default("active")
metadata Json?
expiresAt DateTime?
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
Organization Organization @relation(fields: [organizationId], references: [id], onDelete: Cascade)
Workspace Team? @relation("TeamCredentials", fields: [teamId], references: [id], onDelete: Cascade, map: "IntegrationCredential_workspace_fkey")
@@index([organizationId, ownerType, provider])
@@index([ownerId, ownerType])
@@index([createdByUserId])
}

View File

@@ -112,6 +112,17 @@ class TestDataCreator:
self.api_keys: List[Dict[str, Any]] = []
self.presets: List[Dict[str, Any]] = []
self.profiles: List[Dict[str, Any]] = []
# Org/workspace context per user (populated after migration runs)
self._user_org_cache: Dict[str, tuple[str | None, str | None]] = {}
async def _get_user_org_ws(self, user_id: str) -> tuple[str | None, str | None]:
"""Get (organization_id, team_id) for a user, with caching."""
if user_id not in self._user_org_cache:
from backend.api.features.orgs.db import get_user_default_team
org_id, ws_id = await get_user_default_team(user_id)
self._user_org_cache[user_id] = (org_id, ws_id)
return self._user_org_cache[user_id]
async def create_test_users(self) -> List[Dict[str, Any]]:
"""Create test users using Supabase client."""
@@ -366,8 +377,14 @@ class TestDataCreator:
)
try:
# Use the API function to create graph
created_graph = await create_graph(graph, user["id"])
# Use the API function to create graph with org context
org_id, ws_id = await self._get_user_org_ws(user["id"])
created_graph = await create_graph(
graph,
user["id"],
organization_id=org_id,
team_id=ws_id,
)
graph_dict = created_graph.model_dump()
# Ensure userId is included for store submissions
graph_dict["userId"] = user["id"]

View File

@@ -69,6 +69,20 @@ export const customMutator = async <
error,
);
}
// Inject org/team context headers
try {
const activeOrgID = localStorage.getItem("active-org-id");
const activeTeamID = localStorage.getItem("active-team-id");
if (activeOrgID) {
headers["X-Org-Id"] = activeOrgID;
}
if (activeTeamID) {
headers["X-Team-Id"] = activeTeamID;
}
} catch (error) {
console.error("Org context: Failed to access localStorage:", error);
}
}
const isFormData = data instanceof FormData;

File diff suppressed because it is too large Load Diff

View File

@@ -6,6 +6,7 @@ import { BackendAPIProvider } from "@/lib/autogpt-server-api/context";
import { getQueryClient } from "@/lib/react-query/queryClient";
import CredentialsProvider from "@/providers/agent-credentials/credentials-provider";
import OnboardingProvider from "@/providers/onboarding/onboarding-provider";
import OrgTeamProvider from "@/providers/org-team/OrgTeamProvider";
import {
PostHogPageViewTracker,
PostHogProvider,
@@ -30,13 +31,15 @@ export function Providers({ children, ...props }: ThemeProviderProps) {
<PostHogPageViewTracker />
</Suspense>
<CredentialsProvider>
<LaunchDarklyProvider>
<OnboardingProvider>
<ThemeProvider forcedTheme="light" {...props}>
<TooltipProvider>{children}</TooltipProvider>
</ThemeProvider>
</OnboardingProvider>
</LaunchDarklyProvider>
<OrgTeamProvider>
<LaunchDarklyProvider>
<OnboardingProvider>
<ThemeProvider forcedTheme="light" {...props}>
<TooltipProvider>{children}</TooltipProvider>
</ThemeProvider>
</OnboardingProvider>
</LaunchDarklyProvider>
</OrgTeamProvider>
</CredentialsProvider>
</BackendAPIProvider>
</PostHogProvider>

View File

@@ -13,6 +13,7 @@ import { environment } from "@/services/environment";
import { AccountMenu } from "./components/AccountMenu/AccountMenu";
import { FeedbackButton } from "./components/FeedbackButton";
import { AgentActivityDropdown } from "./components/AgentActivityDropdown/AgentActivityDropdown";
import { OrgTeamSwitcher } from "./components/OrgTeamSwitcher/OrgTeamSwitcher";
import { LoginButton } from "./components/LoginButton";
import { MobileNavBar } from "./components/MobileNavbar/MobileNavBar";
import { NavbarLink } from "./components/NavbarLink";
@@ -93,6 +94,7 @@ export function Navbar() {
{isLoggedIn && !isSmallScreen ? (
<div className="flex flex-1 items-center justify-end gap-4">
<div className="flex items-center gap-4">
<OrgTeamSwitcher />
<FeedbackButton />
<AgentActivityDropdown />
{profile && <Wallet key={profile.username} />}

View File

@@ -0,0 +1,131 @@
import {
Popover,
PopoverContent,
PopoverTrigger,
} from "@/components/__legacy__/ui/popover";
import Avatar, {
AvatarFallback,
AvatarImage,
} from "@/components/atoms/Avatar/Avatar";
import { useOrgTeamSwitcher } from "./useOrgTeamSwitcher";
import { CaretDown, Check, Plus, GearSix } from "@phosphor-icons/react";
import Link from "next/link";
export function OrgTeamSwitcher() {
const {
orgs,
teams,
activeOrg,
activeTeam,
switchOrg,
switchTeam,
isLoaded,
} = useOrgTeamSwitcher();
if (!isLoaded || orgs.length === 0) {
return null;
}
return (
<Popover>
<PopoverTrigger asChild>
<button
type="button"
className="flex cursor-pointer items-center gap-1.5 rounded-lg bg-white/60 px-2.5 py-1.5 text-sm font-medium text-neutral-700 hover:bg-white/80"
aria-label="Switch organization"
data-testid="org-switcher-trigger"
>
<Avatar className="h-5 w-5">
<AvatarImage
src={activeOrg?.avatarUrl ?? ""}
alt=""
aria-hidden="true"
/>
<AvatarFallback className="text-xs" aria-hidden="true">
{activeOrg?.name?.charAt(0) || "O"}
</AvatarFallback>
</Avatar>
<span className="max-w-[8rem] truncate">{activeOrg?.name}</span>
<CaretDown size={12} />
</button>
</PopoverTrigger>
<PopoverContent
className="flex w-64 flex-col gap-2 rounded-xl bg-white p-2 shadow-lg"
align="end"
data-testid="org-switcher-popover"
>
{/* Org list */}
<div className="flex flex-col gap-0.5">
<span className="px-2 py-1 text-xs font-medium uppercase text-neutral-400">
Organizations
</span>
{orgs.map((org) => (
<button
key={org.id}
type="button"
className="flex w-full items-center gap-2 rounded-lg px-2 py-1.5 text-sm hover:bg-neutral-100"
onClick={() => switchOrg(org.id)}
>
<Avatar className="h-5 w-5">
<AvatarImage src={org.avatarUrl ?? ""} alt="" />
<AvatarFallback className="text-xs">
{org.name.charAt(0)}
</AvatarFallback>
</Avatar>
<span className="flex-1 truncate text-left">{org.name}</span>
{org.isPersonal && (
<span className="text-xs text-neutral-400">Personal</span>
)}
{org.id === activeOrg?.id && (
<Check size={14} className="text-green-600" />
)}
</button>
))}
<Link
href="/org/settings"
className="flex items-center gap-2 rounded-lg px-2 py-1.5 text-sm text-neutral-500 hover:bg-neutral-100"
>
<Plus size={14} />
<span>Create organization</span>
</Link>
</div>
{/* Team list (only if orgs exist) */}
{teams.length > 0 && (
<>
<div className="border-t border-neutral-100" />
<div className="flex flex-col gap-0.5">
<span className="px-2 py-1 text-xs font-medium uppercase text-neutral-400">
Teams
</span>
{teams.map((ws) => (
<button
key={ws.id}
type="button"
className="flex w-full items-center gap-2 rounded-lg px-2 py-1.5 text-sm hover:bg-neutral-100"
onClick={() => switchTeam(ws.id)}
>
<span className="flex-1 truncate text-left">{ws.name}</span>
{ws.joinPolicy === "PRIVATE" && (
<span className="text-xs text-neutral-400">Private</span>
)}
{ws.id === activeTeam?.id && (
<Check size={14} className="text-green-600" />
)}
</button>
))}
<Link
href="/org/teams"
className="flex items-center gap-2 rounded-lg px-2 py-1.5 text-sm text-neutral-500 hover:bg-neutral-100"
>
<GearSix size={14} />
<span>Manage teams</span>
</Link>
</div>
</>
)}
</PopoverContent>
</Popover>
);
}

View File

@@ -0,0 +1,43 @@
import { useOrgTeamStore } from "@/services/org-team/store";
import { getQueryClient } from "@/lib/react-query/queryClient";
export function useOrgTeamSwitcher() {
const {
orgs,
teams,
activeOrgID,
activeTeamID,
setActiveOrg,
setActiveTeam,
isLoaded,
} = useOrgTeamStore();
const activeOrg = orgs.find((o) => o.id === activeOrgID) || null;
const activeTeam = teams.find((w) => w.id === activeTeamID) || null;
function switchOrg(orgID: string) {
if (orgID === activeOrgID) return;
setActiveOrg(orgID);
// Clear cache to force refetch with new org context
const queryClient = getQueryClient();
queryClient.clear();
}
function switchTeam(teamID: string) {
if (teamID === activeTeamID) return;
setActiveTeam(teamID);
// Clear cache for team-scoped data
const queryClient = getQueryClient();
queryClient.clear();
}
return {
orgs,
teams,
activeOrg,
activeTeam,
switchOrg,
switchTeam,
isLoaded,
};
}

View File

@@ -0,0 +1,80 @@
"use client";
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
import { useOrgTeamStore } from "@/services/org-team/store";
import { getQueryClient } from "@/lib/react-query/queryClient";
import { useEffect, useRef } from "react";
interface Props {
children: React.ReactNode;
}
/**
* Initializes org/team context on login and clears it on logout.
*
* On mount (when logged in):
* 1. Fetches the user's org list from GET /api/orgs
* 2. If no activeOrgID is stored, sets the personal org as default
* 3. Fetches teams for the active org
*
* On org/team switch: clears React Query cache to force refetch.
*/
export default function OrgTeamProvider({ children }: Props) {
const { isLoggedIn, user } = useSupabase();
const { activeOrgID, setActiveOrg, setOrgs, setLoaded, clearContext } =
useOrgTeamStore();
const prevOrgID = useRef(activeOrgID);
// Fetch orgs when logged in
useEffect(() => {
if (!isLoggedIn || !user) {
clearContext();
return;
}
async function loadOrgs() {
try {
const res = await fetch("/api/proxy/api/orgs", {
headers: { "Content-Type": "application/json" },
});
if (!res.ok) {
setLoaded(true);
return;
}
const data = await res.json();
const orgs = data.data || data;
setOrgs(orgs);
// If no active org, set the personal org as default
if (!activeOrgID && orgs.length > 0) {
const personal = orgs.find(
(o: { isPersonal: boolean }) => o.isPersonal,
);
if (personal) {
setActiveOrg(personal.id);
} else {
setActiveOrg(orgs[0].id);
}
}
setLoaded(true);
} catch {
setLoaded(true);
}
}
loadOrgs();
}, [isLoggedIn, user]);
// Clear React Query cache when org switches
useEffect(() => {
if (prevOrgID.current !== activeOrgID && prevOrgID.current !== null) {
const queryClient = getQueryClient();
queryClient.clear();
}
prevOrgID.current = activeOrgID;
}, [activeOrgID]);
return <>{children}</>;
}

View File

@@ -0,0 +1,83 @@
import { Key, storage } from "@/services/storage/local-storage";
import { create } from "zustand";
interface Org {
id: string;
name: string;
slug: string;
avatarUrl: string | null;
isPersonal: boolean;
memberCount: number;
}
interface Team {
id: string;
name: string;
slug: string | null;
isDefault: boolean;
joinPolicy: string;
orgId: string;
}
interface OrgTeamState {
activeOrgID: string | null;
activeTeamID: string | null;
orgs: Org[];
teams: Team[];
isLoaded: boolean;
setActiveOrg(orgID: string): void;
setActiveTeam(teamID: string | null): void;
setOrgs(orgs: Org[]): void;
setTeams(teams: Team[]): void;
setLoaded(loaded: boolean): void;
clearContext(): void;
}
export const useOrgTeamStore = create<OrgTeamState>((set) => ({
activeOrgID: storage.get(Key.ACTIVE_ORG) || null,
activeTeamID: storage.get(Key.ACTIVE_TEAM) || null,
orgs: [],
teams: [],
isLoaded: false,
setActiveOrg(orgID: string) {
storage.set(Key.ACTIVE_ORG, orgID);
set({ activeOrgID: orgID, activeTeamID: null });
// Clear team when switching org — provider will resolve default
storage.clean(Key.ACTIVE_TEAM);
},
setActiveTeam(teamID: string | null) {
if (teamID) {
storage.set(Key.ACTIVE_TEAM, teamID);
} else {
storage.clean(Key.ACTIVE_TEAM);
}
set({ activeTeamID: teamID });
},
setOrgs(orgs: Org[]) {
set({ orgs });
},
setTeams(teams: Team[]) {
set({ teams });
},
setLoaded(loaded: boolean) {
set({ isLoaded: loaded });
},
clearContext() {
storage.clean(Key.ACTIVE_ORG);
storage.clean(Key.ACTIVE_TEAM);
set({
activeOrgID: null,
activeTeamID: null,
orgs: [],
teams: [],
isLoaded: false,
});
},
}));

View File

@@ -15,6 +15,8 @@ export enum Key {
COPILOT_NOTIFICATIONS_ENABLED = "copilot-notifications-enabled",
COPILOT_NOTIFICATION_BANNER_DISMISSED = "copilot-notification-banner-dismissed",
COPILOT_NOTIFICATION_DIALOG_DISMISSED = "copilot-notification-dialog-dismissed",
ACTIVE_ORG = "active-org-id",
ACTIVE_TEAM = "active-team-id",
COPILOT_ARTIFACT_PANEL_WIDTH = "copilot-artifact-panel-width",
COPILOT_MODE = "copilot-mode",
COPILOT_COMPLETED_SESSIONS = "copilot-completed-sessions",