mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-02-09 14:25:25 -05:00
Compare commits
4 Commits
swiftyos/m
...
fix/execut
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9b20f4cd13 | ||
|
|
a3d0f9cbd2 | ||
|
|
02ddb51446 | ||
|
|
750e096f15 |
@@ -11,7 +11,7 @@ jobs:
|
|||||||
stale:
|
stale:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/stale@v10
|
- uses: actions/stale@v9
|
||||||
with:
|
with:
|
||||||
# operations-per-run: 5000
|
# operations-per-run: 5000
|
||||||
stale-issue-message: >
|
stale-issue-message: >
|
||||||
|
|||||||
2
.github/workflows/repo-pr-label.yml
vendored
2
.github/workflows/repo-pr-label.yml
vendored
@@ -61,6 +61,6 @@ jobs:
|
|||||||
pull-requests: write
|
pull-requests: write
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/labeler@v6
|
- uses: actions/labeler@v5
|
||||||
with:
|
with:
|
||||||
sync-labels: true
|
sync-labels: true
|
||||||
|
|||||||
@@ -57,9 +57,6 @@ class APIKeySmith:
|
|||||||
|
|
||||||
def hash_key(self, raw_key: str) -> tuple[str, str]:
|
def hash_key(self, raw_key: str) -> tuple[str, str]:
|
||||||
"""Migrate a legacy hash to secure hash format."""
|
"""Migrate a legacy hash to secure hash format."""
|
||||||
if not raw_key.startswith(self.PREFIX):
|
|
||||||
raise ValueError("Key without 'agpt_' prefix would fail validation")
|
|
||||||
|
|
||||||
salt = self._generate_salt()
|
salt = self._generate_salt()
|
||||||
hash = self._hash_key_with_salt(raw_key, salt)
|
hash = self._hash_key_with_salt(raw_key, salt)
|
||||||
return hash, salt.hex()
|
return hash, salt.hex()
|
||||||
|
|||||||
@@ -20,7 +20,6 @@ from backend.data.model import (
|
|||||||
SchemaField,
|
SchemaField,
|
||||||
)
|
)
|
||||||
from backend.integrations.providers import ProviderName
|
from backend.integrations.providers import ProviderName
|
||||||
from backend.util.exceptions import BlockExecutionError
|
|
||||||
from backend.util.request import Requests
|
from backend.util.request import Requests
|
||||||
|
|
||||||
TEST_CREDENTIALS = APIKeyCredentials(
|
TEST_CREDENTIALS = APIKeyCredentials(
|
||||||
@@ -247,11 +246,7 @@ class AIShortformVideoCreatorBlock(Block):
|
|||||||
await asyncio.sleep(10)
|
await asyncio.sleep(10)
|
||||||
|
|
||||||
logger.error("Video creation timed out")
|
logger.error("Video creation timed out")
|
||||||
raise BlockExecutionError(
|
raise TimeoutError("Video creation timed out")
|
||||||
message="Video creation timed out",
|
|
||||||
block_name=self.name,
|
|
||||||
block_id=self.id,
|
|
||||||
)
|
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super().__init__(
|
super().__init__(
|
||||||
@@ -427,11 +422,7 @@ class AIAdMakerVideoCreatorBlock(Block):
|
|||||||
await asyncio.sleep(10)
|
await asyncio.sleep(10)
|
||||||
|
|
||||||
logger.error("Video creation timed out")
|
logger.error("Video creation timed out")
|
||||||
raise BlockExecutionError(
|
raise TimeoutError("Video creation timed out")
|
||||||
message="Video creation timed out",
|
|
||||||
block_name=self.name,
|
|
||||||
block_id=self.id,
|
|
||||||
)
|
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super().__init__(
|
super().__init__(
|
||||||
@@ -608,11 +599,7 @@ class AIScreenshotToVideoAdBlock(Block):
|
|||||||
await asyncio.sleep(10)
|
await asyncio.sleep(10)
|
||||||
|
|
||||||
logger.error("Video creation timed out")
|
logger.error("Video creation timed out")
|
||||||
raise BlockExecutionError(
|
raise TimeoutError("Video creation timed out")
|
||||||
message="Video creation timed out",
|
|
||||||
block_name=self.name,
|
|
||||||
block_id=self.id,
|
|
||||||
)
|
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super().__init__(
|
super().__init__(
|
||||||
|
|||||||
@@ -106,10 +106,7 @@ class ConditionBlock(Block):
|
|||||||
ComparisonOperator.LESS_THAN_OR_EQUAL: lambda a, b: a <= b,
|
ComparisonOperator.LESS_THAN_OR_EQUAL: lambda a, b: a <= b,
|
||||||
}
|
}
|
||||||
|
|
||||||
try:
|
result = comparison_funcs[operator](value1, value2)
|
||||||
result = comparison_funcs[operator](value1, value2)
|
|
||||||
except Exception as e:
|
|
||||||
raise ValueError(f"Comparison failed: {e}") from e
|
|
||||||
|
|
||||||
yield "result", result
|
yield "result", result
|
||||||
|
|
||||||
|
|||||||
@@ -15,7 +15,6 @@ from backend.sdk import (
|
|||||||
SchemaField,
|
SchemaField,
|
||||||
cost,
|
cost,
|
||||||
)
|
)
|
||||||
from backend.util.exceptions import BlockExecutionError
|
|
||||||
|
|
||||||
from ._config import firecrawl
|
from ._config import firecrawl
|
||||||
|
|
||||||
@@ -60,18 +59,11 @@ class FirecrawlExtractBlock(Block):
|
|||||||
) -> BlockOutput:
|
) -> BlockOutput:
|
||||||
app = FirecrawlApp(api_key=credentials.api_key.get_secret_value())
|
app = FirecrawlApp(api_key=credentials.api_key.get_secret_value())
|
||||||
|
|
||||||
try:
|
extract_result = app.extract(
|
||||||
extract_result = app.extract(
|
urls=input_data.urls,
|
||||||
urls=input_data.urls,
|
prompt=input_data.prompt,
|
||||||
prompt=input_data.prompt,
|
schema=input_data.output_schema,
|
||||||
schema=input_data.output_schema,
|
enable_web_search=input_data.enable_web_search,
|
||||||
enable_web_search=input_data.enable_web_search,
|
)
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
raise BlockExecutionError(
|
|
||||||
message=f"Extract failed: {e}",
|
|
||||||
block_name=self.name,
|
|
||||||
block_id=self.id,
|
|
||||||
) from e
|
|
||||||
|
|
||||||
yield "data", extract_result.data
|
yield "data", extract_result.data
|
||||||
|
|||||||
@@ -19,7 +19,6 @@ from backend.data.model import (
|
|||||||
SchemaField,
|
SchemaField,
|
||||||
)
|
)
|
||||||
from backend.integrations.providers import ProviderName
|
from backend.integrations.providers import ProviderName
|
||||||
from backend.util.exceptions import ModerationError
|
|
||||||
from backend.util.file import MediaFileType, store_media_file
|
from backend.util.file import MediaFileType, store_media_file
|
||||||
|
|
||||||
TEST_CREDENTIALS = APIKeyCredentials(
|
TEST_CREDENTIALS = APIKeyCredentials(
|
||||||
@@ -154,8 +153,6 @@ class AIImageEditorBlock(Block):
|
|||||||
),
|
),
|
||||||
aspect_ratio=input_data.aspect_ratio.value,
|
aspect_ratio=input_data.aspect_ratio.value,
|
||||||
seed=input_data.seed,
|
seed=input_data.seed,
|
||||||
user_id=user_id,
|
|
||||||
graph_exec_id=graph_exec_id,
|
|
||||||
)
|
)
|
||||||
yield "output_image", result
|
yield "output_image", result
|
||||||
|
|
||||||
@@ -167,8 +164,6 @@ class AIImageEditorBlock(Block):
|
|||||||
input_image_b64: Optional[str],
|
input_image_b64: Optional[str],
|
||||||
aspect_ratio: str,
|
aspect_ratio: str,
|
||||||
seed: Optional[int],
|
seed: Optional[int],
|
||||||
user_id: str,
|
|
||||||
graph_exec_id: str,
|
|
||||||
) -> MediaFileType:
|
) -> MediaFileType:
|
||||||
client = ReplicateClient(api_token=api_key.get_secret_value())
|
client = ReplicateClient(api_token=api_key.get_secret_value())
|
||||||
input_params = {
|
input_params = {
|
||||||
@@ -178,21 +173,11 @@ class AIImageEditorBlock(Block):
|
|||||||
**({"seed": seed} if seed is not None else {}),
|
**({"seed": seed} if seed is not None else {}),
|
||||||
}
|
}
|
||||||
|
|
||||||
try:
|
output: FileOutput | list[FileOutput] = await client.async_run( # type: ignore
|
||||||
output: FileOutput | list[FileOutput] = await client.async_run( # type: ignore
|
model_name,
|
||||||
model_name,
|
input=input_params,
|
||||||
input=input_params,
|
wait=False,
|
||||||
wait=False,
|
)
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
if "flagged as sensitive" in str(e).lower():
|
|
||||||
raise ModerationError(
|
|
||||||
message="Content was flagged as sensitive by the model provider",
|
|
||||||
user_id=user_id,
|
|
||||||
graph_exec_id=graph_exec_id,
|
|
||||||
moderation_type="model_provider",
|
|
||||||
)
|
|
||||||
raise ValueError(f"Model execution failed: {e}") from e
|
|
||||||
|
|
||||||
if isinstance(output, list) and output:
|
if isinstance(output, list) and output:
|
||||||
output = output[0]
|
output = output[0]
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any, Literal
|
||||||
|
|
||||||
from prisma.enums import ReviewStatus
|
from prisma.enums import ReviewStatus
|
||||||
|
|
||||||
@@ -45,11 +45,11 @@ class HumanInTheLoopBlock(Block):
|
|||||||
)
|
)
|
||||||
|
|
||||||
class Output(BlockSchemaOutput):
|
class Output(BlockSchemaOutput):
|
||||||
approved_data: Any = SchemaField(
|
reviewed_data: Any = SchemaField(
|
||||||
description="The data when approved (may be modified by reviewer)"
|
description="The data after human review (may be modified)"
|
||||||
)
|
)
|
||||||
rejected_data: Any = SchemaField(
|
status: Literal["approved", "rejected"] = SchemaField(
|
||||||
description="The data when rejected (may be modified by reviewer)"
|
description="Status of the review: 'approved' or 'rejected'"
|
||||||
)
|
)
|
||||||
review_message: str = SchemaField(
|
review_message: str = SchemaField(
|
||||||
description="Any message provided by the reviewer", default=""
|
description="Any message provided by the reviewer", default=""
|
||||||
@@ -69,7 +69,8 @@ class HumanInTheLoopBlock(Block):
|
|||||||
"editable": True,
|
"editable": True,
|
||||||
},
|
},
|
||||||
test_output=[
|
test_output=[
|
||||||
("approved_data", {"name": "John Doe", "age": 30}),
|
("status", "approved"),
|
||||||
|
("reviewed_data", {"name": "John Doe", "age": 30}),
|
||||||
],
|
],
|
||||||
test_mock={
|
test_mock={
|
||||||
"get_or_create_human_review": lambda *_args, **_kwargs: ReviewResult(
|
"get_or_create_human_review": lambda *_args, **_kwargs: ReviewResult(
|
||||||
@@ -115,7 +116,8 @@ class HumanInTheLoopBlock(Block):
|
|||||||
logger.info(
|
logger.info(
|
||||||
f"HITL block skipping review for node {node_exec_id} - safe mode disabled"
|
f"HITL block skipping review for node {node_exec_id} - safe mode disabled"
|
||||||
)
|
)
|
||||||
yield "approved_data", input_data.data
|
yield "status", "approved"
|
||||||
|
yield "reviewed_data", input_data.data
|
||||||
yield "review_message", "Auto-approved (safe mode disabled)"
|
yield "review_message", "Auto-approved (safe mode disabled)"
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -156,11 +158,12 @@ class HumanInTheLoopBlock(Block):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if result.status == ReviewStatus.APPROVED:
|
if result.status == ReviewStatus.APPROVED:
|
||||||
yield "approved_data", result.data
|
yield "status", "approved"
|
||||||
|
yield "reviewed_data", result.data
|
||||||
if result.message:
|
if result.message:
|
||||||
yield "review_message", result.message
|
yield "review_message", result.message
|
||||||
|
|
||||||
elif result.status == ReviewStatus.REJECTED:
|
elif result.status == ReviewStatus.REJECTED:
|
||||||
yield "rejected_data", result.data
|
yield "status", "rejected"
|
||||||
if result.message:
|
if result.message:
|
||||||
yield "review_message", result.message
|
yield "review_message", result.message
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ from enum import Enum
|
|||||||
from typing import Any, Dict, Literal, Optional
|
from typing import Any, Dict, Literal, Optional
|
||||||
|
|
||||||
from pydantic import SecretStr
|
from pydantic import SecretStr
|
||||||
|
from requests.exceptions import RequestException
|
||||||
|
|
||||||
from backend.data.block import (
|
from backend.data.block import (
|
||||||
Block,
|
Block,
|
||||||
@@ -331,8 +332,8 @@ class IdeogramModelBlock(Block):
|
|||||||
try:
|
try:
|
||||||
response = await Requests().post(url, headers=headers, json=data)
|
response = await Requests().post(url, headers=headers, json=data)
|
||||||
return response.json()["data"][0]["url"]
|
return response.json()["data"][0]["url"]
|
||||||
except Exception as e:
|
except RequestException as e:
|
||||||
raise ValueError(f"Failed to fetch image with V3 endpoint: {e}") from e
|
raise Exception(f"Failed to fetch image with V3 endpoint: {str(e)}")
|
||||||
|
|
||||||
async def _run_model_legacy(
|
async def _run_model_legacy(
|
||||||
self,
|
self,
|
||||||
@@ -384,8 +385,8 @@ class IdeogramModelBlock(Block):
|
|||||||
try:
|
try:
|
||||||
response = await Requests().post(url, headers=headers, json=data)
|
response = await Requests().post(url, headers=headers, json=data)
|
||||||
return response.json()["data"][0]["url"]
|
return response.json()["data"][0]["url"]
|
||||||
except Exception as e:
|
except RequestException as e:
|
||||||
raise ValueError(f"Failed to fetch image with legacy endpoint: {e}") from e
|
raise Exception(f"Failed to fetch image with legacy endpoint: {str(e)}")
|
||||||
|
|
||||||
async def upscale_image(self, api_key: SecretStr, image_url: str):
|
async def upscale_image(self, api_key: SecretStr, image_url: str):
|
||||||
url = "https://api.ideogram.ai/upscale"
|
url = "https://api.ideogram.ai/upscale"
|
||||||
@@ -412,5 +413,5 @@ class IdeogramModelBlock(Block):
|
|||||||
|
|
||||||
return (response.json())["data"][0]["url"]
|
return (response.json())["data"][0]["url"]
|
||||||
|
|
||||||
except Exception as e:
|
except RequestException as e:
|
||||||
raise ValueError(f"Failed to upscale image: {e}") from e
|
raise Exception(f"Failed to upscale image: {str(e)}")
|
||||||
|
|||||||
@@ -16,7 +16,6 @@ from backend.data.block import (
|
|||||||
BlockSchemaOutput,
|
BlockSchemaOutput,
|
||||||
)
|
)
|
||||||
from backend.data.model import SchemaField
|
from backend.data.model import SchemaField
|
||||||
from backend.util.exceptions import BlockExecutionError
|
|
||||||
|
|
||||||
|
|
||||||
class SearchTheWebBlock(Block, GetRequest):
|
class SearchTheWebBlock(Block, GetRequest):
|
||||||
@@ -57,17 +56,7 @@ class SearchTheWebBlock(Block, GetRequest):
|
|||||||
|
|
||||||
# Prepend the Jina Search URL to the encoded query
|
# Prepend the Jina Search URL to the encoded query
|
||||||
jina_search_url = f"https://s.jina.ai/{encoded_query}"
|
jina_search_url = f"https://s.jina.ai/{encoded_query}"
|
||||||
|
results = await self.get_request(jina_search_url, headers=headers, json=False)
|
||||||
try:
|
|
||||||
results = await self.get_request(
|
|
||||||
jina_search_url, headers=headers, json=False
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
raise BlockExecutionError(
|
|
||||||
message=f"Search failed: {e}",
|
|
||||||
block_name=self.name,
|
|
||||||
block_id=self.id,
|
|
||||||
) from e
|
|
||||||
|
|
||||||
# Output the search results
|
# Output the search results
|
||||||
yield "results", results
|
yield "results", results
|
||||||
|
|||||||
@@ -18,7 +18,6 @@ from backend.data.block import (
|
|||||||
BlockSchemaOutput,
|
BlockSchemaOutput,
|
||||||
)
|
)
|
||||||
from backend.data.model import APIKeyCredentials, CredentialsField, SchemaField
|
from backend.data.model import APIKeyCredentials, CredentialsField, SchemaField
|
||||||
from backend.util.exceptions import BlockExecutionError, BlockInputError
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -112,27 +111,9 @@ class ReplicateModelBlock(Block):
|
|||||||
yield "status", "succeeded"
|
yield "status", "succeeded"
|
||||||
yield "model_name", input_data.model_name
|
yield "model_name", input_data.model_name
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
error_msg = str(e)
|
error_msg = f"Unexpected error running Replicate model: {str(e)}"
|
||||||
logger.error(f"Error running Replicate model: {error_msg}")
|
logger.error(error_msg)
|
||||||
|
raise RuntimeError(error_msg)
|
||||||
# Input validation errors (422, 400) → BlockInputError
|
|
||||||
if (
|
|
||||||
"422" in error_msg
|
|
||||||
or "Input validation failed" in error_msg
|
|
||||||
or "400" in error_msg
|
|
||||||
):
|
|
||||||
raise BlockInputError(
|
|
||||||
message=f"Invalid model inputs: {error_msg}",
|
|
||||||
block_name=self.name,
|
|
||||||
block_id=self.id,
|
|
||||||
) from e
|
|
||||||
# Everything else → BlockExecutionError
|
|
||||||
else:
|
|
||||||
raise BlockExecutionError(
|
|
||||||
message=f"Replicate model error: {error_msg}",
|
|
||||||
block_name=self.name,
|
|
||||||
block_id=self.id,
|
|
||||||
) from e
|
|
||||||
|
|
||||||
async def run_model(self, model_ref: str, model_inputs: dict, api_key: SecretStr):
|
async def run_model(self, model_ref: str, model_inputs: dict, api_key: SecretStr):
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -45,16 +45,10 @@ class GetWikipediaSummaryBlock(Block, GetRequest):
|
|||||||
async def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
async def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||||
topic = input_data.topic
|
topic = input_data.topic
|
||||||
url = f"https://en.wikipedia.org/api/rest_v1/page/summary/{topic}"
|
url = f"https://en.wikipedia.org/api/rest_v1/page/summary/{topic}"
|
||||||
|
response = await self.get_request(url, json=True)
|
||||||
# Note: User-Agent is now automatically set by the request library
|
if "extract" not in response:
|
||||||
# to comply with Wikimedia's robot policy (https://w.wiki/4wJS)
|
raise RuntimeError(f"Unable to parse Wikipedia response: {response}")
|
||||||
try:
|
yield "summary", response["extract"]
|
||||||
response = await self.get_request(url, json=True)
|
|
||||||
if "extract" not in response:
|
|
||||||
raise ValueError(f"Unable to parse Wikipedia response: {response}")
|
|
||||||
yield "summary", response["extract"]
|
|
||||||
except Exception as e:
|
|
||||||
raise ValueError(f"Failed to fetch Wikipedia summary: {e}") from e
|
|
||||||
|
|
||||||
|
|
||||||
TEST_CREDENTIALS = APIKeyCredentials(
|
TEST_CREDENTIALS = APIKeyCredentials(
|
||||||
|
|||||||
@@ -1 +0,0 @@
|
|||||||
"""CLI utilities for backend development & administration"""
|
|
||||||
@@ -1,57 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
Script to generate OpenAPI JSON specification for the FastAPI app.
|
|
||||||
|
|
||||||
This script imports the FastAPI app from backend.server.rest_api and outputs
|
|
||||||
the OpenAPI specification as JSON to stdout or a specified file.
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
`poetry run python generate_openapi_json.py`
|
|
||||||
`poetry run python generate_openapi_json.py --output openapi.json`
|
|
||||||
`poetry run python generate_openapi_json.py --indent 4 --output openapi.json`
|
|
||||||
"""
|
|
||||||
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import click
|
|
||||||
|
|
||||||
|
|
||||||
@click.command()
|
|
||||||
@click.option(
|
|
||||||
"--output",
|
|
||||||
type=click.Path(dir_okay=False, path_type=Path),
|
|
||||||
help="Output file path (default: stdout)",
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--pretty",
|
|
||||||
type=click.BOOL,
|
|
||||||
default=False,
|
|
||||||
help="Pretty-print JSON output (indented 2 spaces)",
|
|
||||||
)
|
|
||||||
def main(output: Path, pretty: bool):
|
|
||||||
"""Generate and output the OpenAPI JSON specification."""
|
|
||||||
openapi_schema = get_openapi_schema()
|
|
||||||
|
|
||||||
json_output = json.dumps(openapi_schema, indent=2 if pretty else None)
|
|
||||||
|
|
||||||
if output:
|
|
||||||
output.write_text(json_output)
|
|
||||||
click.echo(f"✅ OpenAPI specification written to {output}\n\nPreview:")
|
|
||||||
click.echo(f"\n{json_output[:500]} ...")
|
|
||||||
else:
|
|
||||||
print(json_output)
|
|
||||||
|
|
||||||
|
|
||||||
def get_openapi_schema():
|
|
||||||
"""Get the OpenAPI schema from the FastAPI app"""
|
|
||||||
from backend.server.rest_api import app
|
|
||||||
|
|
||||||
return app.openapi()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
os.environ["LOG_LEVEL"] = "ERROR" # disable stdout log output
|
|
||||||
|
|
||||||
main()
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,24 +1,22 @@
|
|||||||
import logging
|
import logging
|
||||||
import uuid
|
import uuid
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
from typing import Literal, Optional
|
from typing import Optional
|
||||||
|
|
||||||
from autogpt_libs.api_key.keysmith import APIKeySmith
|
from autogpt_libs.api_key.keysmith import APIKeySmith
|
||||||
from prisma.enums import APIKeyPermission, APIKeyStatus
|
from prisma.enums import APIKeyPermission, APIKeyStatus
|
||||||
from prisma.models import APIKey as PrismaAPIKey
|
from prisma.models import APIKey as PrismaAPIKey
|
||||||
from prisma.types import APIKeyWhereUniqueInput
|
from prisma.types import APIKeyWhereUniqueInput
|
||||||
from pydantic import Field
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
from backend.data.includes import MAX_USER_API_KEYS_FETCH
|
from backend.data.includes import MAX_USER_API_KEYS_FETCH
|
||||||
from backend.util.exceptions import NotAuthorizedError, NotFoundError
|
from backend.util.exceptions import NotAuthorizedError, NotFoundError
|
||||||
|
|
||||||
from .base import APIAuthorizationInfo
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
keysmith = APIKeySmith()
|
keysmith = APIKeySmith()
|
||||||
|
|
||||||
|
|
||||||
class APIKeyInfo(APIAuthorizationInfo):
|
class APIKeyInfo(BaseModel):
|
||||||
id: str
|
id: str
|
||||||
name: str
|
name: str
|
||||||
head: str = Field(
|
head: str = Field(
|
||||||
@@ -28,9 +26,12 @@ class APIKeyInfo(APIAuthorizationInfo):
|
|||||||
description=f"The last {APIKeySmith.TAIL_LENGTH} characters of the key"
|
description=f"The last {APIKeySmith.TAIL_LENGTH} characters of the key"
|
||||||
)
|
)
|
||||||
status: APIKeyStatus
|
status: APIKeyStatus
|
||||||
|
permissions: list[APIKeyPermission]
|
||||||
|
created_at: datetime
|
||||||
|
last_used_at: Optional[datetime] = None
|
||||||
|
revoked_at: Optional[datetime] = None
|
||||||
description: Optional[str] = None
|
description: Optional[str] = None
|
||||||
|
user_id: str
|
||||||
type: Literal["api_key"] = "api_key" # type: ignore
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_db(api_key: PrismaAPIKey):
|
def from_db(api_key: PrismaAPIKey):
|
||||||
@@ -40,7 +41,7 @@ class APIKeyInfo(APIAuthorizationInfo):
|
|||||||
head=api_key.head,
|
head=api_key.head,
|
||||||
tail=api_key.tail,
|
tail=api_key.tail,
|
||||||
status=APIKeyStatus(api_key.status),
|
status=APIKeyStatus(api_key.status),
|
||||||
scopes=[APIKeyPermission(p) for p in api_key.permissions],
|
permissions=[APIKeyPermission(p) for p in api_key.permissions],
|
||||||
created_at=api_key.createdAt,
|
created_at=api_key.createdAt,
|
||||||
last_used_at=api_key.lastUsedAt,
|
last_used_at=api_key.lastUsedAt,
|
||||||
revoked_at=api_key.revokedAt,
|
revoked_at=api_key.revokedAt,
|
||||||
@@ -210,7 +211,7 @@ async def suspend_api_key(key_id: str, user_id: str) -> APIKeyInfo:
|
|||||||
|
|
||||||
|
|
||||||
def has_permission(api_key: APIKeyInfo, required_permission: APIKeyPermission) -> bool:
|
def has_permission(api_key: APIKeyInfo, required_permission: APIKeyPermission) -> bool:
|
||||||
return required_permission in api_key.scopes
|
return required_permission in api_key.permissions
|
||||||
|
|
||||||
|
|
||||||
async def get_api_key_by_id(key_id: str, user_id: str) -> Optional[APIKeyInfo]:
|
async def get_api_key_by_id(key_id: str, user_id: str) -> Optional[APIKeyInfo]:
|
||||||
@@ -1,58 +0,0 @@
|
|||||||
"""
|
|
||||||
Native authentication module for AutoGPT Platform.
|
|
||||||
|
|
||||||
This module provides authentication functionality that replaces Supabase Auth,
|
|
||||||
including:
|
|
||||||
- Password hashing with Argon2id
|
|
||||||
- JWT token generation and validation
|
|
||||||
- Magic links for email verification and password reset
|
|
||||||
- Email service for auth-related emails
|
|
||||||
- User migration from Supabase
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
from backend.data.auth.password import hash_password, verify_password
|
|
||||||
from backend.data.auth.tokens import create_access_token, create_token_pair
|
|
||||||
from backend.data.auth.magic_links import create_password_reset_link
|
|
||||||
from backend.data.auth.email_service import get_auth_email_service
|
|
||||||
"""
|
|
||||||
|
|
||||||
from backend.data.auth.email_service import AuthEmailService, get_auth_email_service
|
|
||||||
from backend.data.auth.magic_links import (
|
|
||||||
MagicLinkPurpose,
|
|
||||||
create_email_verification_link,
|
|
||||||
create_password_reset_link,
|
|
||||||
verify_email_token,
|
|
||||||
verify_password_reset_token,
|
|
||||||
)
|
|
||||||
from backend.data.auth.password import hash_password, needs_rehash, verify_password
|
|
||||||
from backend.data.auth.tokens import (
|
|
||||||
TokenPair,
|
|
||||||
create_access_token,
|
|
||||||
create_token_pair,
|
|
||||||
decode_access_token,
|
|
||||||
revoke_all_user_refresh_tokens,
|
|
||||||
validate_refresh_token,
|
|
||||||
)
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
# Password
|
|
||||||
"hash_password",
|
|
||||||
"verify_password",
|
|
||||||
"needs_rehash",
|
|
||||||
# Tokens
|
|
||||||
"TokenPair",
|
|
||||||
"create_access_token",
|
|
||||||
"create_token_pair",
|
|
||||||
"decode_access_token",
|
|
||||||
"validate_refresh_token",
|
|
||||||
"revoke_all_user_refresh_tokens",
|
|
||||||
# Magic Links
|
|
||||||
"MagicLinkPurpose",
|
|
||||||
"create_email_verification_link",
|
|
||||||
"create_password_reset_link",
|
|
||||||
"verify_email_token",
|
|
||||||
"verify_password_reset_token",
|
|
||||||
# Email Service
|
|
||||||
"AuthEmailService",
|
|
||||||
"get_auth_email_service",
|
|
||||||
]
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
from datetime import datetime
|
|
||||||
from typing import Literal, Optional
|
|
||||||
|
|
||||||
from prisma.enums import APIKeyPermission
|
|
||||||
from pydantic import BaseModel
|
|
||||||
|
|
||||||
|
|
||||||
class APIAuthorizationInfo(BaseModel):
|
|
||||||
user_id: str
|
|
||||||
scopes: list[APIKeyPermission]
|
|
||||||
type: Literal["oauth", "api_key"]
|
|
||||||
created_at: datetime
|
|
||||||
expires_at: Optional[datetime] = None
|
|
||||||
last_used_at: Optional[datetime] = None
|
|
||||||
revoked_at: Optional[datetime] = None
|
|
||||||
@@ -1,271 +0,0 @@
|
|||||||
"""
|
|
||||||
Email service for authentication flows.
|
|
||||||
|
|
||||||
Uses Postmark to send transactional emails for:
|
|
||||||
- Email verification
|
|
||||||
- Password reset
|
|
||||||
- Account security notifications
|
|
||||||
"""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import pathlib
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from jinja2 import Template
|
|
||||||
from postmarker.core import PostmarkClient
|
|
||||||
|
|
||||||
from backend.util.settings import Settings
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
settings = Settings()
|
|
||||||
|
|
||||||
# Template directory
|
|
||||||
TEMPLATE_DIR = pathlib.Path(__file__).parent / "templates"
|
|
||||||
|
|
||||||
|
|
||||||
class AuthEmailService:
|
|
||||||
"""Email service for authentication-related emails."""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
if settings.secrets.postmark_server_api_token:
|
|
||||||
self.postmark = PostmarkClient(
|
|
||||||
server_token=settings.secrets.postmark_server_api_token
|
|
||||||
)
|
|
||||||
self.enabled = True
|
|
||||||
else:
|
|
||||||
logger.warning(
|
|
||||||
"Postmark server API token not found, auth emails disabled"
|
|
||||||
)
|
|
||||||
self.postmark = None
|
|
||||||
self.enabled = False
|
|
||||||
|
|
||||||
self.sender_email = settings.config.postmark_sender_email
|
|
||||||
self.frontend_url = (
|
|
||||||
settings.config.frontend_base_url or settings.config.platform_base_url
|
|
||||||
)
|
|
||||||
|
|
||||||
def _send_email(
|
|
||||||
self,
|
|
||||||
to_email: str,
|
|
||||||
subject: str,
|
|
||||||
html_body: str,
|
|
||||||
) -> bool:
|
|
||||||
"""
|
|
||||||
Send an email via Postmark.
|
|
||||||
|
|
||||||
Returns True if sent successfully, False otherwise.
|
|
||||||
"""
|
|
||||||
if not self.enabled or not self.postmark:
|
|
||||||
logger.warning(f"Email not sent (disabled): {subject} to {to_email}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
try:
|
|
||||||
self.postmark.emails.send(
|
|
||||||
From=self.sender_email,
|
|
||||||
To=to_email,
|
|
||||||
Subject=subject,
|
|
||||||
HtmlBody=html_body,
|
|
||||||
)
|
|
||||||
logger.info(f"Auth email sent: {subject} to {to_email}")
|
|
||||||
return True
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to send auth email: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
def send_verification_email(self, email: str, token: str) -> bool:
|
|
||||||
"""
|
|
||||||
Send email verification link.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
email: Recipient email address
|
|
||||||
token: Verification token
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
True if sent successfully
|
|
||||||
"""
|
|
||||||
verify_url = f"{self.frontend_url}/auth/verify-email?token={token}"
|
|
||||||
|
|
||||||
subject = "Verify your email address"
|
|
||||||
html_body = f"""
|
|
||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
<style>
|
|
||||||
body {{ font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif; line-height: 1.6; color: #333; }}
|
|
||||||
.container {{ max-width: 600px; margin: 0 auto; padding: 20px; }}
|
|
||||||
.button {{ display: inline-block; padding: 12px 24px; background-color: #5046e5; color: white; text-decoration: none; border-radius: 6px; font-weight: 500; }}
|
|
||||||
.footer {{ margin-top: 30px; font-size: 12px; color: #666; }}
|
|
||||||
</style>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<div class="container">
|
|
||||||
<h2>Verify your email address</h2>
|
|
||||||
<p>Thanks for signing up! Please verify your email address by clicking the button below:</p>
|
|
||||||
<p style="margin: 30px 0;">
|
|
||||||
<a href="{verify_url}" class="button">Verify Email</a>
|
|
||||||
</p>
|
|
||||||
<p>Or copy and paste this link into your browser:</p>
|
|
||||||
<p style="word-break: break-all; color: #666;">{verify_url}</p>
|
|
||||||
<p>This link will expire in 24 hours.</p>
|
|
||||||
<div class="footer">
|
|
||||||
<p>If you didn't create an account, you can safely ignore this email.</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
"""
|
|
||||||
|
|
||||||
return self._send_email(email, subject, html_body)
|
|
||||||
|
|
||||||
def send_password_reset_email(self, email: str, token: str) -> bool:
|
|
||||||
"""
|
|
||||||
Send password reset link.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
email: Recipient email address
|
|
||||||
token: Password reset token
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
True if sent successfully
|
|
||||||
"""
|
|
||||||
reset_url = f"{self.frontend_url}/reset-password?token={token}"
|
|
||||||
|
|
||||||
subject = "Reset your password"
|
|
||||||
html_body = f"""
|
|
||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
<style>
|
|
||||||
body {{ font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif; line-height: 1.6; color: #333; }}
|
|
||||||
.container {{ max-width: 600px; margin: 0 auto; padding: 20px; }}
|
|
||||||
.button {{ display: inline-block; padding: 12px 24px; background-color: #5046e5; color: white; text-decoration: none; border-radius: 6px; font-weight: 500; }}
|
|
||||||
.warning {{ background-color: #fef3c7; border: 1px solid #f59e0b; padding: 12px; border-radius: 6px; margin: 20px 0; }}
|
|
||||||
.footer {{ margin-top: 30px; font-size: 12px; color: #666; }}
|
|
||||||
</style>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<div class="container">
|
|
||||||
<h2>Reset your password</h2>
|
|
||||||
<p>We received a request to reset your password. Click the button below to choose a new password:</p>
|
|
||||||
<p style="margin: 30px 0;">
|
|
||||||
<a href="{reset_url}" class="button">Reset Password</a>
|
|
||||||
</p>
|
|
||||||
<p>Or copy and paste this link into your browser:</p>
|
|
||||||
<p style="word-break: break-all; color: #666;">{reset_url}</p>
|
|
||||||
<div class="warning">
|
|
||||||
<strong>This link will expire in 15 minutes.</strong>
|
|
||||||
</div>
|
|
||||||
<div class="footer">
|
|
||||||
<p>If you didn't request a password reset, you can safely ignore this email. Your password will remain unchanged.</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
"""
|
|
||||||
|
|
||||||
return self._send_email(email, subject, html_body)
|
|
||||||
|
|
||||||
def send_password_changed_notification(self, email: str) -> bool:
|
|
||||||
"""
|
|
||||||
Send notification that password was changed.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
email: Recipient email address
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
True if sent successfully
|
|
||||||
"""
|
|
||||||
subject = "Your password was changed"
|
|
||||||
html_body = f"""
|
|
||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
<style>
|
|
||||||
body {{ font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif; line-height: 1.6; color: #333; }}
|
|
||||||
.container {{ max-width: 600px; margin: 0 auto; padding: 20px; }}
|
|
||||||
.warning {{ background-color: #fee2e2; border: 1px solid #ef4444; padding: 12px; border-radius: 6px; margin: 20px 0; }}
|
|
||||||
.footer {{ margin-top: 30px; font-size: 12px; color: #666; }}
|
|
||||||
</style>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<div class="container">
|
|
||||||
<h2>Password Changed</h2>
|
|
||||||
<p>Your password was successfully changed.</p>
|
|
||||||
<div class="warning">
|
|
||||||
<strong>If you didn't make this change</strong>, please contact support immediately and reset your password.
|
|
||||||
</div>
|
|
||||||
<div class="footer">
|
|
||||||
<p>This is an automated security notification.</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
"""
|
|
||||||
|
|
||||||
return self._send_email(email, subject, html_body)
|
|
||||||
|
|
||||||
def send_migrated_user_password_reset(self, email: str, token: str) -> bool:
|
|
||||||
"""
|
|
||||||
Send password reset email for users migrated from Supabase.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
email: Recipient email address
|
|
||||||
token: Password reset token
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
True if sent successfully
|
|
||||||
"""
|
|
||||||
reset_url = f"{self.frontend_url}/reset-password?token={token}"
|
|
||||||
|
|
||||||
subject = "Action Required: Set your password"
|
|
||||||
html_body = f"""
|
|
||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
<style>
|
|
||||||
body {{ font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif; line-height: 1.6; color: #333; }}
|
|
||||||
.container {{ max-width: 600px; margin: 0 auto; padding: 20px; }}
|
|
||||||
.button {{ display: inline-block; padding: 12px 24px; background-color: #5046e5; color: white; text-decoration: none; border-radius: 6px; font-weight: 500; }}
|
|
||||||
.info {{ background-color: #dbeafe; border: 1px solid #3b82f6; padding: 12px; border-radius: 6px; margin: 20px 0; }}
|
|
||||||
.footer {{ margin-top: 30px; font-size: 12px; color: #666; }}
|
|
||||||
</style>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<div class="container">
|
|
||||||
<h2>Set Your Password</h2>
|
|
||||||
<div class="info">
|
|
||||||
<strong>We've upgraded our authentication system!</strong>
|
|
||||||
<p style="margin: 8px 0 0 0;">For enhanced security, please set a new password to continue using your account.</p>
|
|
||||||
</div>
|
|
||||||
<p>Click the button below to set your password:</p>
|
|
||||||
<p style="margin: 30px 0;">
|
|
||||||
<a href="{reset_url}" class="button">Set Password</a>
|
|
||||||
</p>
|
|
||||||
<p>Or copy and paste this link into your browser:</p>
|
|
||||||
<p style="word-break: break-all; color: #666;">{reset_url}</p>
|
|
||||||
<p>This link will expire in 24 hours.</p>
|
|
||||||
<div class="footer">
|
|
||||||
<p>If you signed up with Google, no action is needed - simply continue signing in with Google.</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
"""
|
|
||||||
|
|
||||||
return self._send_email(email, subject, html_body)
|
|
||||||
|
|
||||||
|
|
||||||
# Singleton instance
|
|
||||||
_email_service: Optional[AuthEmailService] = None
|
|
||||||
|
|
||||||
|
|
||||||
def get_auth_email_service() -> AuthEmailService:
|
|
||||||
"""Get the singleton auth email service instance."""
|
|
||||||
global _email_service
|
|
||||||
if _email_service is None:
|
|
||||||
_email_service = AuthEmailService()
|
|
||||||
return _email_service
|
|
||||||
@@ -1,253 +0,0 @@
|
|||||||
"""
|
|
||||||
Magic link service for email verification and password reset.
|
|
||||||
|
|
||||||
Magic links are single-use, time-limited tokens sent via email that allow
|
|
||||||
users to verify their email address or reset their password without entering
|
|
||||||
the old password.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import hashlib
|
|
||||||
import logging
|
|
||||||
import secrets
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime, timedelta, timezone
|
|
||||||
from enum import Enum
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from prisma.models import UserAuthMagicLink
|
|
||||||
from pydantic import BaseModel
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
# Magic link TTLs
|
|
||||||
EMAIL_VERIFICATION_TTL = timedelta(hours=24)
|
|
||||||
PASSWORD_RESET_TTL = timedelta(minutes=15)
|
|
||||||
|
|
||||||
# Token prefix for identification
|
|
||||||
MAGIC_LINK_PREFIX = "agpt_ml_"
|
|
||||||
|
|
||||||
|
|
||||||
class MagicLinkPurpose(str, Enum):
|
|
||||||
"""Purpose of the magic link."""
|
|
||||||
|
|
||||||
EMAIL_VERIFICATION = "email_verification"
|
|
||||||
PASSWORD_RESET = "password_reset"
|
|
||||||
|
|
||||||
|
|
||||||
class MagicLinkInfo(BaseModel):
|
|
||||||
"""Information about a valid magic link."""
|
|
||||||
|
|
||||||
email: str
|
|
||||||
purpose: MagicLinkPurpose
|
|
||||||
user_id: Optional[str] = None # Set for password reset, not for signup verification
|
|
||||||
|
|
||||||
|
|
||||||
def generate_magic_link_token() -> str:
|
|
||||||
"""
|
|
||||||
Generate a cryptographically secure magic link token.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
A prefixed random token string.
|
|
||||||
"""
|
|
||||||
random_bytes = secrets.token_urlsafe(32)
|
|
||||||
return f"{MAGIC_LINK_PREFIX}{random_bytes}"
|
|
||||||
|
|
||||||
|
|
||||||
def hash_magic_link_token(token: str) -> str:
|
|
||||||
"""
|
|
||||||
Hash a magic link token for storage.
|
|
||||||
|
|
||||||
Uses SHA256 for deterministic lookup.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
token: The plaintext magic link token.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The SHA256 hex digest.
|
|
||||||
"""
|
|
||||||
return hashlib.sha256(token.encode()).hexdigest()
|
|
||||||
|
|
||||||
|
|
||||||
async def create_magic_link(
|
|
||||||
email: str,
|
|
||||||
purpose: MagicLinkPurpose,
|
|
||||||
user_id: Optional[str] = None,
|
|
||||||
) -> str:
|
|
||||||
"""
|
|
||||||
Create a magic link token and store it in the database.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
email: The email address associated with the link.
|
|
||||||
purpose: The purpose of the magic link.
|
|
||||||
user_id: Optional user ID (for password reset).
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The plaintext magic link token.
|
|
||||||
"""
|
|
||||||
token = generate_magic_link_token()
|
|
||||||
token_hash = hash_magic_link_token(token)
|
|
||||||
|
|
||||||
# Determine TTL based on purpose
|
|
||||||
if purpose == MagicLinkPurpose.PASSWORD_RESET:
|
|
||||||
ttl = PASSWORD_RESET_TTL
|
|
||||||
else:
|
|
||||||
ttl = EMAIL_VERIFICATION_TTL
|
|
||||||
|
|
||||||
expires_at = datetime.now(timezone.utc) + ttl
|
|
||||||
|
|
||||||
# Invalidate any existing magic links for this email and purpose
|
|
||||||
await UserAuthMagicLink.prisma().update_many(
|
|
||||||
where={
|
|
||||||
"email": email,
|
|
||||||
"purpose": purpose.value,
|
|
||||||
"usedAt": None,
|
|
||||||
},
|
|
||||||
data={"usedAt": datetime.now(timezone.utc)}, # Mark as used to invalidate
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create new magic link
|
|
||||||
await UserAuthMagicLink.prisma().create(
|
|
||||||
data={
|
|
||||||
"id": str(uuid.uuid4()),
|
|
||||||
"tokenHash": token_hash,
|
|
||||||
"email": email,
|
|
||||||
"purpose": purpose.value,
|
|
||||||
"userId": user_id,
|
|
||||||
"expiresAt": expires_at,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
return token
|
|
||||||
|
|
||||||
|
|
||||||
async def validate_magic_link(
|
|
||||||
token: str,
|
|
||||||
expected_purpose: Optional[MagicLinkPurpose] = None,
|
|
||||||
) -> Optional[MagicLinkInfo]:
|
|
||||||
"""
|
|
||||||
Validate a magic link token without consuming it.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
token: The plaintext magic link token.
|
|
||||||
expected_purpose: Optional expected purpose to validate against.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
MagicLinkInfo if valid, None otherwise.
|
|
||||||
"""
|
|
||||||
token_hash = hash_magic_link_token(token)
|
|
||||||
|
|
||||||
where_clause: dict = {
|
|
||||||
"tokenHash": token_hash,
|
|
||||||
"usedAt": None,
|
|
||||||
"expiresAt": {"gt": datetime.now(timezone.utc)},
|
|
||||||
}
|
|
||||||
|
|
||||||
if expected_purpose:
|
|
||||||
where_clause["purpose"] = expected_purpose.value
|
|
||||||
|
|
||||||
db_link = await UserAuthMagicLink.prisma().find_first(where=where_clause)
|
|
||||||
|
|
||||||
if not db_link:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return MagicLinkInfo(
|
|
||||||
email=db_link.email,
|
|
||||||
purpose=MagicLinkPurpose(db_link.purpose),
|
|
||||||
user_id=db_link.userId,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def consume_magic_link(
|
|
||||||
token: str,
|
|
||||||
expected_purpose: Optional[MagicLinkPurpose] = None,
|
|
||||||
) -> Optional[MagicLinkInfo]:
|
|
||||||
"""
|
|
||||||
Validate and consume a magic link token (single-use).
|
|
||||||
|
|
||||||
Args:
|
|
||||||
token: The plaintext magic link token.
|
|
||||||
expected_purpose: Optional expected purpose to validate against.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
MagicLinkInfo if valid and successfully consumed, None otherwise.
|
|
||||||
"""
|
|
||||||
# First validate
|
|
||||||
link_info = await validate_magic_link(token, expected_purpose)
|
|
||||||
if not link_info:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Then consume (mark as used)
|
|
||||||
token_hash = hash_magic_link_token(token)
|
|
||||||
result = await UserAuthMagicLink.prisma().update_many(
|
|
||||||
where={
|
|
||||||
"tokenHash": token_hash,
|
|
||||||
"usedAt": None,
|
|
||||||
},
|
|
||||||
data={"usedAt": datetime.now(timezone.utc)},
|
|
||||||
)
|
|
||||||
|
|
||||||
if result == 0:
|
|
||||||
# Race condition - link was consumed by another request
|
|
||||||
logger.warning("Magic link was already consumed (race condition)")
|
|
||||||
return None
|
|
||||||
|
|
||||||
return link_info
|
|
||||||
|
|
||||||
|
|
||||||
async def create_email_verification_link(email: str) -> str:
|
|
||||||
"""
|
|
||||||
Create an email verification magic link.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
email: The email address to verify.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The plaintext magic link token.
|
|
||||||
"""
|
|
||||||
return await create_magic_link(email, MagicLinkPurpose.EMAIL_VERIFICATION)
|
|
||||||
|
|
||||||
|
|
||||||
async def create_password_reset_link(email: str, user_id: str) -> str:
|
|
||||||
"""
|
|
||||||
Create a password reset magic link.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
email: The user's email address.
|
|
||||||
user_id: The user's ID.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The plaintext magic link token.
|
|
||||||
"""
|
|
||||||
return await create_magic_link(
|
|
||||||
email, MagicLinkPurpose.PASSWORD_RESET, user_id=user_id
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def verify_email_token(token: str) -> Optional[str]:
|
|
||||||
"""
|
|
||||||
Verify an email verification token and consume it.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
token: The magic link token.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The email address if valid, None otherwise.
|
|
||||||
"""
|
|
||||||
link_info = await consume_magic_link(token, MagicLinkPurpose.EMAIL_VERIFICATION)
|
|
||||||
return link_info.email if link_info else None
|
|
||||||
|
|
||||||
|
|
||||||
async def verify_password_reset_token(token: str) -> Optional[tuple[str, str]]:
|
|
||||||
"""
|
|
||||||
Verify a password reset token and consume it.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
token: The magic link token.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Tuple of (user_id, email) if valid, None otherwise.
|
|
||||||
"""
|
|
||||||
link_info = await consume_magic_link(token, MagicLinkPurpose.PASSWORD_RESET)
|
|
||||||
if not link_info or not link_info.user_id:
|
|
||||||
return None
|
|
||||||
return link_info.user_id, link_info.email
|
|
||||||
@@ -1,441 +0,0 @@
|
|||||||
"""
|
|
||||||
Migration script for moving users from Supabase Auth to native FastAPI auth.
|
|
||||||
|
|
||||||
This script handles:
|
|
||||||
1. Marking existing users as migrated from Supabase
|
|
||||||
2. Sending password reset emails to migrated users
|
|
||||||
3. Tracking migration progress
|
|
||||||
4. Generating reports
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
# Dry run - see what would happen
|
|
||||||
python -m backend.data.auth.migration --dry-run
|
|
||||||
|
|
||||||
# Mark users as migrated (no emails)
|
|
||||||
python -m backend.data.auth.migration --mark-migrated
|
|
||||||
|
|
||||||
# Send password reset emails to migrated users
|
|
||||||
python -m backend.data.auth.migration --send-emails --batch-size 100
|
|
||||||
|
|
||||||
# Full migration (mark + send emails)
|
|
||||||
python -m backend.data.auth.migration --full-migration --batch-size 100
|
|
||||||
"""
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import asyncio
|
|
||||||
import csv
|
|
||||||
import logging
|
|
||||||
from datetime import datetime, timezone
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from prisma.models import User
|
|
||||||
|
|
||||||
from backend.data.auth.email_service import get_auth_email_service
|
|
||||||
from backend.data.auth.magic_links import create_password_reset_link
|
|
||||||
from backend.data.db import connect, disconnect
|
|
||||||
|
|
||||||
logging.basicConfig(
|
|
||||||
level=logging.INFO,
|
|
||||||
format="%(asctime)s - %(levelname)s - %(message)s",
|
|
||||||
)
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class MigrationStats:
|
|
||||||
"""Track migration statistics."""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.total_users = 0
|
|
||||||
self.already_migrated = 0
|
|
||||||
self.marked_migrated = 0
|
|
||||||
self.emails_sent = 0
|
|
||||||
self.emails_failed = 0
|
|
||||||
self.oauth_users_skipped = 0
|
|
||||||
self.errors = []
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return f"""
|
|
||||||
Migration Statistics:
|
|
||||||
---------------------
|
|
||||||
Total users processed: {self.total_users}
|
|
||||||
Already migrated: {self.already_migrated}
|
|
||||||
Newly marked as migrated: {self.marked_migrated}
|
|
||||||
Password reset emails sent: {self.emails_sent}
|
|
||||||
Email failures: {self.emails_failed}
|
|
||||||
OAuth users skipped: {self.oauth_users_skipped}
|
|
||||||
Errors: {len(self.errors)}
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
async def get_users_to_migrate(
|
|
||||||
batch_size: int = 100,
|
|
||||||
offset: int = 0,
|
|
||||||
) -> list[User]:
|
|
||||||
"""
|
|
||||||
Get users that need to be migrated.
|
|
||||||
|
|
||||||
Returns users where:
|
|
||||||
- authProvider is "supabase" or NULL
|
|
||||||
- migratedFromSupabase is False or NULL
|
|
||||||
- passwordHash is NULL (they haven't set a native password)
|
|
||||||
"""
|
|
||||||
users = await User.prisma().find_many(
|
|
||||||
where={
|
|
||||||
"OR": [
|
|
||||||
{"authProvider": "supabase"},
|
|
||||||
{"authProvider": None},
|
|
||||||
],
|
|
||||||
"migratedFromSupabase": False,
|
|
||||||
"passwordHash": None,
|
|
||||||
},
|
|
||||||
take=batch_size,
|
|
||||||
skip=offset,
|
|
||||||
order={"createdAt": "asc"},
|
|
||||||
)
|
|
||||||
return users
|
|
||||||
|
|
||||||
|
|
||||||
async def get_migrated_users_needing_email(
|
|
||||||
batch_size: int = 100,
|
|
||||||
offset: int = 0,
|
|
||||||
) -> list[User]:
|
|
||||||
"""
|
|
||||||
Get migrated users who haven't set their password yet.
|
|
||||||
|
|
||||||
These users need a password reset email.
|
|
||||||
"""
|
|
||||||
users = await User.prisma().find_many(
|
|
||||||
where={
|
|
||||||
"migratedFromSupabase": True,
|
|
||||||
"passwordHash": None,
|
|
||||||
"authProvider": {"not": "google"}, # Skip OAuth users
|
|
||||||
},
|
|
||||||
take=batch_size,
|
|
||||||
skip=offset,
|
|
||||||
order={"createdAt": "asc"},
|
|
||||||
)
|
|
||||||
return users
|
|
||||||
|
|
||||||
|
|
||||||
async def mark_user_as_migrated(user: User, dry_run: bool = False) -> bool:
|
|
||||||
"""
|
|
||||||
Mark a user as migrated from Supabase.
|
|
||||||
|
|
||||||
Sets migratedFromSupabase=True and authProvider="supabase".
|
|
||||||
"""
|
|
||||||
if dry_run:
|
|
||||||
logger.info(f"[DRY RUN] Would mark user {user.id} ({user.email}) as migrated")
|
|
||||||
return True
|
|
||||||
|
|
||||||
try:
|
|
||||||
await User.prisma().update(
|
|
||||||
where={"id": user.id},
|
|
||||||
data={
|
|
||||||
"migratedFromSupabase": True,
|
|
||||||
"authProvider": "supabase",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
logger.info(f"Marked user {user.id} ({user.email}) as migrated")
|
|
||||||
return True
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to mark user {user.id} as migrated: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
async def send_migration_email(
|
|
||||||
user: User,
|
|
||||||
email_service,
|
|
||||||
dry_run: bool = False,
|
|
||||||
) -> bool:
|
|
||||||
"""
|
|
||||||
Send a password reset email to a migrated user.
|
|
||||||
"""
|
|
||||||
if dry_run:
|
|
||||||
logger.info(f"[DRY RUN] Would send migration email to {user.email}")
|
|
||||||
return True
|
|
||||||
|
|
||||||
try:
|
|
||||||
token = await create_password_reset_link(user.email, user.id)
|
|
||||||
success = email_service.send_migrated_user_password_reset(user.email, token)
|
|
||||||
|
|
||||||
if success:
|
|
||||||
logger.info(f"Sent migration email to {user.email}")
|
|
||||||
else:
|
|
||||||
logger.warning(f"Failed to send migration email to {user.email}")
|
|
||||||
|
|
||||||
return success
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error sending migration email to {user.email}: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
async def run_migration(
|
|
||||||
mark_migrated: bool = False,
|
|
||||||
send_emails: bool = False,
|
|
||||||
batch_size: int = 100,
|
|
||||||
dry_run: bool = False,
|
|
||||||
email_delay: float = 0.5, # Delay between emails to avoid rate limiting
|
|
||||||
) -> MigrationStats:
|
|
||||||
"""
|
|
||||||
Run the migration process.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
mark_migrated: Mark users as migrated from Supabase
|
|
||||||
send_emails: Send password reset emails to migrated users
|
|
||||||
batch_size: Number of users to process at a time
|
|
||||||
dry_run: If True, don't make any changes
|
|
||||||
email_delay: Seconds to wait between sending emails
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
MigrationStats with results
|
|
||||||
"""
|
|
||||||
stats = MigrationStats()
|
|
||||||
email_service = get_auth_email_service() if send_emails else None
|
|
||||||
|
|
||||||
# Phase 1: Mark users as migrated
|
|
||||||
if mark_migrated:
|
|
||||||
logger.info("Phase 1: Marking users as migrated...")
|
|
||||||
offset = 0
|
|
||||||
|
|
||||||
while True:
|
|
||||||
users = await get_users_to_migrate(batch_size, offset)
|
|
||||||
if not users:
|
|
||||||
break
|
|
||||||
|
|
||||||
for user in users:
|
|
||||||
stats.total_users += 1
|
|
||||||
|
|
||||||
# Skip OAuth users
|
|
||||||
if user.authProvider == "google":
|
|
||||||
stats.oauth_users_skipped += 1
|
|
||||||
continue
|
|
||||||
|
|
||||||
success = await mark_user_as_migrated(user, dry_run)
|
|
||||||
if success:
|
|
||||||
stats.marked_migrated += 1
|
|
||||||
else:
|
|
||||||
stats.errors.append(f"Failed to mark {user.email}")
|
|
||||||
|
|
||||||
offset += batch_size
|
|
||||||
logger.info(f"Processed {offset} users...")
|
|
||||||
|
|
||||||
# Phase 2: Send password reset emails
|
|
||||||
if send_emails:
|
|
||||||
logger.info("Phase 2: Sending password reset emails...")
|
|
||||||
offset = 0
|
|
||||||
|
|
||||||
while True:
|
|
||||||
users = await get_migrated_users_needing_email(batch_size, offset)
|
|
||||||
if not users:
|
|
||||||
break
|
|
||||||
|
|
||||||
for user in users:
|
|
||||||
stats.total_users += 1
|
|
||||||
|
|
||||||
success = await send_migration_email(user, email_service, dry_run)
|
|
||||||
if success:
|
|
||||||
stats.emails_sent += 1
|
|
||||||
else:
|
|
||||||
stats.emails_failed += 1
|
|
||||||
stats.errors.append(f"Failed to email {user.email}")
|
|
||||||
|
|
||||||
# Rate limiting
|
|
||||||
if not dry_run and email_delay > 0:
|
|
||||||
await asyncio.sleep(email_delay)
|
|
||||||
|
|
||||||
offset += batch_size
|
|
||||||
logger.info(f"Processed {offset} users for email...")
|
|
||||||
|
|
||||||
return stats
|
|
||||||
|
|
||||||
|
|
||||||
async def generate_migration_report(output_path: Optional[str] = None) -> str:
|
|
||||||
"""
|
|
||||||
Generate a CSV report of all users and their migration status.
|
|
||||||
"""
|
|
||||||
if output_path is None:
|
|
||||||
output_path = f"migration_report_{datetime.now().strftime('%Y%m%d_%H%M%S')}.csv"
|
|
||||||
|
|
||||||
users = await User.prisma().find_many(
|
|
||||||
order={"createdAt": "asc"},
|
|
||||||
)
|
|
||||||
|
|
||||||
with open(output_path, "w", newline="") as f:
|
|
||||||
writer = csv.writer(f)
|
|
||||||
writer.writerow([
|
|
||||||
"user_id",
|
|
||||||
"email",
|
|
||||||
"auth_provider",
|
|
||||||
"migrated_from_supabase",
|
|
||||||
"has_password",
|
|
||||||
"email_verified",
|
|
||||||
"created_at",
|
|
||||||
"needs_action",
|
|
||||||
])
|
|
||||||
|
|
||||||
for user in users:
|
|
||||||
needs_action = (
|
|
||||||
user.migratedFromSupabase
|
|
||||||
and user.passwordHash is None
|
|
||||||
and user.authProvider != "google"
|
|
||||||
)
|
|
||||||
|
|
||||||
writer.writerow([
|
|
||||||
user.id,
|
|
||||||
user.email,
|
|
||||||
user.authProvider or "unknown",
|
|
||||||
user.migratedFromSupabase,
|
|
||||||
user.passwordHash is not None,
|
|
||||||
user.emailVerified,
|
|
||||||
user.createdAt.isoformat() if user.createdAt else "",
|
|
||||||
"YES" if needs_action else "NO",
|
|
||||||
])
|
|
||||||
|
|
||||||
logger.info(f"Report saved to {output_path}")
|
|
||||||
return output_path
|
|
||||||
|
|
||||||
|
|
||||||
async def count_migration_status():
|
|
||||||
"""
|
|
||||||
Get counts of users in different migration states.
|
|
||||||
"""
|
|
||||||
total = await User.prisma().count()
|
|
||||||
|
|
||||||
already_native = await User.prisma().count(
|
|
||||||
where={"authProvider": "password", "passwordHash": {"not": None}}
|
|
||||||
)
|
|
||||||
|
|
||||||
oauth_users = await User.prisma().count(
|
|
||||||
where={"authProvider": "google"}
|
|
||||||
)
|
|
||||||
|
|
||||||
migrated_pending = await User.prisma().count(
|
|
||||||
where={
|
|
||||||
"migratedFromSupabase": True,
|
|
||||||
"passwordHash": None,
|
|
||||||
"authProvider": {"not": "google"},
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
not_migrated = await User.prisma().count(
|
|
||||||
where={
|
|
||||||
"migratedFromSupabase": False,
|
|
||||||
"authProvider": {"in": ["supabase", None]},
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"total": total,
|
|
||||||
"already_native": already_native,
|
|
||||||
"oauth_users": oauth_users,
|
|
||||||
"migrated_pending_password": migrated_pending,
|
|
||||||
"not_yet_migrated": not_migrated,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
async def main():
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
description="Migrate users from Supabase Auth to native FastAPI auth"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--dry-run",
|
|
||||||
action="store_true",
|
|
||||||
help="Don't make any changes, just show what would happen",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--mark-migrated",
|
|
||||||
action="store_true",
|
|
||||||
help="Mark existing Supabase users as migrated",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--send-emails",
|
|
||||||
action="store_true",
|
|
||||||
help="Send password reset emails to migrated users",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--full-migration",
|
|
||||||
action="store_true",
|
|
||||||
help="Run full migration (mark + send emails)",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--batch-size",
|
|
||||||
type=int,
|
|
||||||
default=100,
|
|
||||||
help="Number of users to process at a time (default: 100)",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--email-delay",
|
|
||||||
type=float,
|
|
||||||
default=0.5,
|
|
||||||
help="Seconds to wait between emails (default: 0.5)",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--report",
|
|
||||||
action="store_true",
|
|
||||||
help="Generate a CSV report of migration status",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--status",
|
|
||||||
action="store_true",
|
|
||||||
help="Show current migration status counts",
|
|
||||||
)
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
# Connect to database
|
|
||||||
await connect()
|
|
||||||
|
|
||||||
try:
|
|
||||||
if args.status:
|
|
||||||
counts = await count_migration_status()
|
|
||||||
print("\nMigration Status:")
|
|
||||||
print("-" * 40)
|
|
||||||
print(f"Total users: {counts['total']}")
|
|
||||||
print(f"Already using native auth: {counts['already_native']}")
|
|
||||||
print(f"OAuth users (Google): {counts['oauth_users']}")
|
|
||||||
print(f"Migrated, pending password: {counts['migrated_pending_password']}")
|
|
||||||
print(f"Not yet migrated: {counts['not_yet_migrated']}")
|
|
||||||
return
|
|
||||||
|
|
||||||
if args.report:
|
|
||||||
await generate_migration_report()
|
|
||||||
return
|
|
||||||
|
|
||||||
if args.full_migration:
|
|
||||||
args.mark_migrated = True
|
|
||||||
args.send_emails = True
|
|
||||||
|
|
||||||
if not args.mark_migrated and not args.send_emails:
|
|
||||||
parser.print_help()
|
|
||||||
print("\nError: Must specify --mark-migrated, --send-emails, --full-migration, --report, or --status")
|
|
||||||
return
|
|
||||||
|
|
||||||
if args.dry_run:
|
|
||||||
logger.info("=" * 50)
|
|
||||||
logger.info("DRY RUN MODE - No changes will be made")
|
|
||||||
logger.info("=" * 50)
|
|
||||||
|
|
||||||
stats = await run_migration(
|
|
||||||
mark_migrated=args.mark_migrated,
|
|
||||||
send_emails=args.send_emails,
|
|
||||||
batch_size=args.batch_size,
|
|
||||||
dry_run=args.dry_run,
|
|
||||||
email_delay=args.email_delay,
|
|
||||||
)
|
|
||||||
|
|
||||||
print(stats)
|
|
||||||
|
|
||||||
if stats.errors:
|
|
||||||
print("\nErrors encountered:")
|
|
||||||
for error in stats.errors[:10]: # Show first 10 errors
|
|
||||||
print(f" - {error}")
|
|
||||||
if len(stats.errors) > 10:
|
|
||||||
print(f" ... and {len(stats.errors) - 10} more")
|
|
||||||
|
|
||||||
finally:
|
|
||||||
await disconnect()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
||||||
@@ -1,872 +0,0 @@
|
|||||||
"""
|
|
||||||
OAuth 2.0 Provider Data Layer
|
|
||||||
|
|
||||||
Handles management of OAuth applications, authorization codes,
|
|
||||||
access tokens, and refresh tokens.
|
|
||||||
|
|
||||||
Hashing strategy:
|
|
||||||
- Access tokens & Refresh tokens: SHA256 (deterministic, allows direct lookup by hash)
|
|
||||||
- Client secrets: Scrypt with salt (lookup by client_id, then verify with salt)
|
|
||||||
"""
|
|
||||||
|
|
||||||
import hashlib
|
|
||||||
import logging
|
|
||||||
import secrets
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime, timedelta, timezone
|
|
||||||
from typing import Literal, Optional
|
|
||||||
|
|
||||||
from autogpt_libs.api_key.keysmith import APIKeySmith
|
|
||||||
from prisma.enums import APIKeyPermission as APIPermission
|
|
||||||
from prisma.models import OAuthAccessToken as PrismaOAuthAccessToken
|
|
||||||
from prisma.models import OAuthApplication as PrismaOAuthApplication
|
|
||||||
from prisma.models import OAuthAuthorizationCode as PrismaOAuthAuthorizationCode
|
|
||||||
from prisma.models import OAuthRefreshToken as PrismaOAuthRefreshToken
|
|
||||||
from prisma.types import OAuthApplicationUpdateInput
|
|
||||||
from pydantic import BaseModel, Field, SecretStr
|
|
||||||
|
|
||||||
from .base import APIAuthorizationInfo
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
keysmith = APIKeySmith() # Only used for client secret hashing (Scrypt)
|
|
||||||
|
|
||||||
|
|
||||||
def _generate_token() -> str:
|
|
||||||
"""Generate a cryptographically secure random token."""
|
|
||||||
return secrets.token_urlsafe(32)
|
|
||||||
|
|
||||||
|
|
||||||
def _hash_token(token: str) -> str:
|
|
||||||
"""Hash a token using SHA256 (deterministic, for direct lookup)."""
|
|
||||||
return hashlib.sha256(token.encode()).hexdigest()
|
|
||||||
|
|
||||||
|
|
||||||
# Token TTLs
|
|
||||||
AUTHORIZATION_CODE_TTL = timedelta(minutes=10)
|
|
||||||
ACCESS_TOKEN_TTL = timedelta(hours=1)
|
|
||||||
REFRESH_TOKEN_TTL = timedelta(days=30)
|
|
||||||
|
|
||||||
ACCESS_TOKEN_PREFIX = "agpt_xt_"
|
|
||||||
REFRESH_TOKEN_PREFIX = "agpt_rt_"
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Exception Classes
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
|
|
||||||
class OAuthError(Exception):
|
|
||||||
"""Base OAuth error"""
|
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidClientError(OAuthError):
|
|
||||||
"""Invalid client_id or client_secret"""
|
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidGrantError(OAuthError):
|
|
||||||
"""Invalid or expired authorization code/refresh token"""
|
|
||||||
|
|
||||||
def __init__(self, reason: str):
|
|
||||||
self.reason = reason
|
|
||||||
super().__init__(f"Invalid grant: {reason}")
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidTokenError(OAuthError):
|
|
||||||
"""Invalid, expired, or revoked token"""
|
|
||||||
|
|
||||||
def __init__(self, reason: str):
|
|
||||||
self.reason = reason
|
|
||||||
super().__init__(f"Invalid token: {reason}")
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Data Models
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
|
|
||||||
class OAuthApplicationInfo(BaseModel):
|
|
||||||
"""OAuth application information (without client secret hash)"""
|
|
||||||
|
|
||||||
id: str
|
|
||||||
name: str
|
|
||||||
description: Optional[str] = None
|
|
||||||
logo_url: Optional[str] = None
|
|
||||||
client_id: str
|
|
||||||
redirect_uris: list[str]
|
|
||||||
grant_types: list[str]
|
|
||||||
scopes: list[APIPermission]
|
|
||||||
owner_id: str
|
|
||||||
is_active: bool
|
|
||||||
created_at: datetime
|
|
||||||
updated_at: datetime
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def from_db(app: PrismaOAuthApplication):
|
|
||||||
return OAuthApplicationInfo(
|
|
||||||
id=app.id,
|
|
||||||
name=app.name,
|
|
||||||
description=app.description,
|
|
||||||
logo_url=app.logoUrl,
|
|
||||||
client_id=app.clientId,
|
|
||||||
redirect_uris=app.redirectUris,
|
|
||||||
grant_types=app.grantTypes,
|
|
||||||
scopes=[APIPermission(s) for s in app.scopes],
|
|
||||||
owner_id=app.ownerId,
|
|
||||||
is_active=app.isActive,
|
|
||||||
created_at=app.createdAt,
|
|
||||||
updated_at=app.updatedAt,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class OAuthApplicationInfoWithSecret(OAuthApplicationInfo):
|
|
||||||
"""OAuth application with client secret hash (for validation)"""
|
|
||||||
|
|
||||||
client_secret_hash: str
|
|
||||||
client_secret_salt: str
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def from_db(app: PrismaOAuthApplication):
|
|
||||||
return OAuthApplicationInfoWithSecret(
|
|
||||||
**OAuthApplicationInfo.from_db(app).model_dump(),
|
|
||||||
client_secret_hash=app.clientSecret,
|
|
||||||
client_secret_salt=app.clientSecretSalt,
|
|
||||||
)
|
|
||||||
|
|
||||||
def verify_secret(self, plaintext_secret: str) -> bool:
|
|
||||||
"""Verify a plaintext client secret against the stored hash"""
|
|
||||||
# Use keysmith.verify_key() with stored salt
|
|
||||||
return keysmith.verify_key(
|
|
||||||
plaintext_secret, self.client_secret_hash, self.client_secret_salt
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class OAuthAuthorizationCodeInfo(BaseModel):
|
|
||||||
"""Authorization code information"""
|
|
||||||
|
|
||||||
id: str
|
|
||||||
code: str
|
|
||||||
created_at: datetime
|
|
||||||
expires_at: datetime
|
|
||||||
application_id: str
|
|
||||||
user_id: str
|
|
||||||
scopes: list[APIPermission]
|
|
||||||
redirect_uri: str
|
|
||||||
code_challenge: Optional[str] = None
|
|
||||||
code_challenge_method: Optional[str] = None
|
|
||||||
used_at: Optional[datetime] = None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def is_used(self) -> bool:
|
|
||||||
return self.used_at is not None
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def from_db(code: PrismaOAuthAuthorizationCode):
|
|
||||||
return OAuthAuthorizationCodeInfo(
|
|
||||||
id=code.id,
|
|
||||||
code=code.code,
|
|
||||||
created_at=code.createdAt,
|
|
||||||
expires_at=code.expiresAt,
|
|
||||||
application_id=code.applicationId,
|
|
||||||
user_id=code.userId,
|
|
||||||
scopes=[APIPermission(s) for s in code.scopes],
|
|
||||||
redirect_uri=code.redirectUri,
|
|
||||||
code_challenge=code.codeChallenge,
|
|
||||||
code_challenge_method=code.codeChallengeMethod,
|
|
||||||
used_at=code.usedAt,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class OAuthAccessTokenInfo(APIAuthorizationInfo):
|
|
||||||
"""Access token information"""
|
|
||||||
|
|
||||||
id: str
|
|
||||||
expires_at: datetime # type: ignore
|
|
||||||
application_id: str
|
|
||||||
|
|
||||||
type: Literal["oauth"] = "oauth" # type: ignore
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def from_db(token: PrismaOAuthAccessToken):
|
|
||||||
return OAuthAccessTokenInfo(
|
|
||||||
id=token.id,
|
|
||||||
user_id=token.userId,
|
|
||||||
scopes=[APIPermission(s) for s in token.scopes],
|
|
||||||
created_at=token.createdAt,
|
|
||||||
expires_at=token.expiresAt,
|
|
||||||
last_used_at=None,
|
|
||||||
revoked_at=token.revokedAt,
|
|
||||||
application_id=token.applicationId,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class OAuthAccessToken(OAuthAccessTokenInfo):
|
|
||||||
"""Access token with plaintext token included (sensitive)"""
|
|
||||||
|
|
||||||
token: SecretStr = Field(description="Plaintext token (sensitive)")
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def from_db(token: PrismaOAuthAccessToken, plaintext_token: str): # type: ignore
|
|
||||||
return OAuthAccessToken(
|
|
||||||
**OAuthAccessTokenInfo.from_db(token).model_dump(),
|
|
||||||
token=SecretStr(plaintext_token),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class OAuthRefreshTokenInfo(BaseModel):
|
|
||||||
"""Refresh token information"""
|
|
||||||
|
|
||||||
id: str
|
|
||||||
user_id: str
|
|
||||||
scopes: list[APIPermission]
|
|
||||||
created_at: datetime
|
|
||||||
expires_at: datetime
|
|
||||||
application_id: str
|
|
||||||
revoked_at: Optional[datetime] = None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def is_revoked(self) -> bool:
|
|
||||||
return self.revoked_at is not None
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def from_db(token: PrismaOAuthRefreshToken):
|
|
||||||
return OAuthRefreshTokenInfo(
|
|
||||||
id=token.id,
|
|
||||||
user_id=token.userId,
|
|
||||||
scopes=[APIPermission(s) for s in token.scopes],
|
|
||||||
created_at=token.createdAt,
|
|
||||||
expires_at=token.expiresAt,
|
|
||||||
application_id=token.applicationId,
|
|
||||||
revoked_at=token.revokedAt,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class OAuthRefreshToken(OAuthRefreshTokenInfo):
|
|
||||||
"""Refresh token with plaintext token included (sensitive)"""
|
|
||||||
|
|
||||||
token: SecretStr = Field(description="Plaintext token (sensitive)")
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def from_db(token: PrismaOAuthRefreshToken, plaintext_token: str): # type: ignore
|
|
||||||
return OAuthRefreshToken(
|
|
||||||
**OAuthRefreshTokenInfo.from_db(token).model_dump(),
|
|
||||||
token=SecretStr(plaintext_token),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class TokenIntrospectionResult(BaseModel):
|
|
||||||
"""Result of token introspection (RFC 7662)"""
|
|
||||||
|
|
||||||
active: bool
|
|
||||||
scopes: Optional[list[str]] = None
|
|
||||||
client_id: Optional[str] = None
|
|
||||||
user_id: Optional[str] = None
|
|
||||||
exp: Optional[int] = None # Unix timestamp
|
|
||||||
token_type: Optional[Literal["access_token", "refresh_token"]] = None
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# OAuth Application Management
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
|
|
||||||
async def get_oauth_application(client_id: str) -> Optional[OAuthApplicationInfo]:
|
|
||||||
"""Get OAuth application by client ID (without secret)"""
|
|
||||||
app = await PrismaOAuthApplication.prisma().find_unique(
|
|
||||||
where={"clientId": client_id}
|
|
||||||
)
|
|
||||||
if not app:
|
|
||||||
return None
|
|
||||||
return OAuthApplicationInfo.from_db(app)
|
|
||||||
|
|
||||||
|
|
||||||
async def get_oauth_application_with_secret(
|
|
||||||
client_id: str,
|
|
||||||
) -> Optional[OAuthApplicationInfoWithSecret]:
|
|
||||||
"""Get OAuth application by client ID (with secret hash for validation)"""
|
|
||||||
app = await PrismaOAuthApplication.prisma().find_unique(
|
|
||||||
where={"clientId": client_id}
|
|
||||||
)
|
|
||||||
if not app:
|
|
||||||
return None
|
|
||||||
return OAuthApplicationInfoWithSecret.from_db(app)
|
|
||||||
|
|
||||||
|
|
||||||
async def validate_client_credentials(
|
|
||||||
client_id: str, client_secret: str
|
|
||||||
) -> OAuthApplicationInfo:
|
|
||||||
"""
|
|
||||||
Validate client credentials and return application info.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
InvalidClientError: If client_id or client_secret is invalid, or app is inactive
|
|
||||||
"""
|
|
||||||
app = await get_oauth_application_with_secret(client_id)
|
|
||||||
if not app:
|
|
||||||
raise InvalidClientError("Invalid client_id")
|
|
||||||
|
|
||||||
if not app.is_active:
|
|
||||||
raise InvalidClientError("Application is not active")
|
|
||||||
|
|
||||||
# Verify client secret
|
|
||||||
if not app.verify_secret(client_secret):
|
|
||||||
raise InvalidClientError("Invalid client_secret")
|
|
||||||
|
|
||||||
# Return without secret hash
|
|
||||||
return OAuthApplicationInfo(**app.model_dump(exclude={"client_secret_hash"}))
|
|
||||||
|
|
||||||
|
|
||||||
def validate_redirect_uri(app: OAuthApplicationInfo, redirect_uri: str) -> bool:
|
|
||||||
"""Validate that redirect URI is registered for the application"""
|
|
||||||
return redirect_uri in app.redirect_uris
|
|
||||||
|
|
||||||
|
|
||||||
def validate_scopes(
|
|
||||||
app: OAuthApplicationInfo, requested_scopes: list[APIPermission]
|
|
||||||
) -> bool:
|
|
||||||
"""Validate that all requested scopes are allowed for the application"""
|
|
||||||
return all(scope in app.scopes for scope in requested_scopes)
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Authorization Code Flow
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
|
|
||||||
def _generate_authorization_code() -> str:
|
|
||||||
"""Generate a cryptographically secure authorization code"""
|
|
||||||
# 32 bytes = 256 bits of entropy
|
|
||||||
return secrets.token_urlsafe(32)
|
|
||||||
|
|
||||||
|
|
||||||
async def create_authorization_code(
|
|
||||||
application_id: str,
|
|
||||||
user_id: str,
|
|
||||||
scopes: list[APIPermission],
|
|
||||||
redirect_uri: str,
|
|
||||||
code_challenge: Optional[str] = None,
|
|
||||||
code_challenge_method: Optional[Literal["S256", "plain"]] = None,
|
|
||||||
) -> OAuthAuthorizationCodeInfo:
|
|
||||||
"""
|
|
||||||
Create a new authorization code.
|
|
||||||
Expires in 10 minutes and can only be used once.
|
|
||||||
"""
|
|
||||||
code = _generate_authorization_code()
|
|
||||||
now = datetime.now(timezone.utc)
|
|
||||||
expires_at = now + AUTHORIZATION_CODE_TTL
|
|
||||||
|
|
||||||
saved_code = await PrismaOAuthAuthorizationCode.prisma().create(
|
|
||||||
data={
|
|
||||||
"id": str(uuid.uuid4()),
|
|
||||||
"code": code,
|
|
||||||
"expiresAt": expires_at,
|
|
||||||
"applicationId": application_id,
|
|
||||||
"userId": user_id,
|
|
||||||
"scopes": [s for s in scopes],
|
|
||||||
"redirectUri": redirect_uri,
|
|
||||||
"codeChallenge": code_challenge,
|
|
||||||
"codeChallengeMethod": code_challenge_method,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
return OAuthAuthorizationCodeInfo.from_db(saved_code)
|
|
||||||
|
|
||||||
|
|
||||||
async def consume_authorization_code(
|
|
||||||
code: str,
|
|
||||||
application_id: str,
|
|
||||||
redirect_uri: str,
|
|
||||||
code_verifier: Optional[str] = None,
|
|
||||||
) -> tuple[str, list[APIPermission]]:
|
|
||||||
"""
|
|
||||||
Consume an authorization code and return (user_id, scopes).
|
|
||||||
|
|
||||||
This marks the code as used and validates:
|
|
||||||
- Code exists and matches application
|
|
||||||
- Code is not expired
|
|
||||||
- Code has not been used
|
|
||||||
- Redirect URI matches
|
|
||||||
- PKCE code verifier matches (if code challenge was provided)
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
InvalidGrantError: If code is invalid, expired, used, or PKCE fails
|
|
||||||
"""
|
|
||||||
auth_code = await PrismaOAuthAuthorizationCode.prisma().find_unique(
|
|
||||||
where={"code": code}
|
|
||||||
)
|
|
||||||
|
|
||||||
if not auth_code:
|
|
||||||
raise InvalidGrantError("authorization code not found")
|
|
||||||
|
|
||||||
# Validate application
|
|
||||||
if auth_code.applicationId != application_id:
|
|
||||||
raise InvalidGrantError(
|
|
||||||
"authorization code does not belong to this application"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check if already used
|
|
||||||
if auth_code.usedAt is not None:
|
|
||||||
raise InvalidGrantError(
|
|
||||||
f"authorization code already used at {auth_code.usedAt}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check expiration
|
|
||||||
now = datetime.now(timezone.utc)
|
|
||||||
if auth_code.expiresAt < now:
|
|
||||||
raise InvalidGrantError("authorization code expired")
|
|
||||||
|
|
||||||
# Validate redirect URI
|
|
||||||
if auth_code.redirectUri != redirect_uri:
|
|
||||||
raise InvalidGrantError("redirect_uri mismatch")
|
|
||||||
|
|
||||||
# Validate PKCE if code challenge was provided
|
|
||||||
if auth_code.codeChallenge:
|
|
||||||
if not code_verifier:
|
|
||||||
raise InvalidGrantError("code_verifier required but not provided")
|
|
||||||
|
|
||||||
if not _verify_pkce(
|
|
||||||
code_verifier, auth_code.codeChallenge, auth_code.codeChallengeMethod
|
|
||||||
):
|
|
||||||
raise InvalidGrantError("PKCE verification failed")
|
|
||||||
|
|
||||||
# Mark code as used
|
|
||||||
await PrismaOAuthAuthorizationCode.prisma().update(
|
|
||||||
where={"code": code},
|
|
||||||
data={"usedAt": now},
|
|
||||||
)
|
|
||||||
|
|
||||||
return auth_code.userId, [APIPermission(s) for s in auth_code.scopes]
|
|
||||||
|
|
||||||
|
|
||||||
def _verify_pkce(
|
|
||||||
code_verifier: str, code_challenge: str, code_challenge_method: Optional[str]
|
|
||||||
) -> bool:
|
|
||||||
"""
|
|
||||||
Verify PKCE code verifier against code challenge.
|
|
||||||
|
|
||||||
Supports:
|
|
||||||
- S256: SHA256(code_verifier) == code_challenge
|
|
||||||
- plain: code_verifier == code_challenge
|
|
||||||
"""
|
|
||||||
if code_challenge_method == "S256":
|
|
||||||
# Hash the verifier with SHA256 and base64url encode
|
|
||||||
hashed = hashlib.sha256(code_verifier.encode("ascii")).digest()
|
|
||||||
computed_challenge = (
|
|
||||||
secrets.token_urlsafe(len(hashed)).encode("ascii").decode("ascii")
|
|
||||||
)
|
|
||||||
# For proper base64url encoding
|
|
||||||
import base64
|
|
||||||
|
|
||||||
computed_challenge = (
|
|
||||||
base64.urlsafe_b64encode(hashed).decode("ascii").rstrip("=")
|
|
||||||
)
|
|
||||||
return secrets.compare_digest(computed_challenge, code_challenge)
|
|
||||||
elif code_challenge_method == "plain" or code_challenge_method is None:
|
|
||||||
# Plain comparison
|
|
||||||
return secrets.compare_digest(code_verifier, code_challenge)
|
|
||||||
else:
|
|
||||||
logger.warning(f"Unsupported code challenge method: {code_challenge_method}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Access Token Management
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
|
|
||||||
async def create_access_token(
|
|
||||||
application_id: str, user_id: str, scopes: list[APIPermission]
|
|
||||||
) -> OAuthAccessToken:
|
|
||||||
"""
|
|
||||||
Create a new access token.
|
|
||||||
Returns OAuthAccessToken (with plaintext token).
|
|
||||||
"""
|
|
||||||
plaintext_token = ACCESS_TOKEN_PREFIX + _generate_token()
|
|
||||||
token_hash = _hash_token(plaintext_token)
|
|
||||||
now = datetime.now(timezone.utc)
|
|
||||||
expires_at = now + ACCESS_TOKEN_TTL
|
|
||||||
|
|
||||||
saved_token = await PrismaOAuthAccessToken.prisma().create(
|
|
||||||
data={
|
|
||||||
"id": str(uuid.uuid4()),
|
|
||||||
"token": token_hash, # SHA256 hash for direct lookup
|
|
||||||
"expiresAt": expires_at,
|
|
||||||
"applicationId": application_id,
|
|
||||||
"userId": user_id,
|
|
||||||
"scopes": [s for s in scopes],
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
return OAuthAccessToken.from_db(saved_token, plaintext_token=plaintext_token)
|
|
||||||
|
|
||||||
|
|
||||||
async def validate_access_token(
|
|
||||||
token: str,
|
|
||||||
) -> tuple[OAuthAccessTokenInfo, OAuthApplicationInfo]:
|
|
||||||
"""
|
|
||||||
Validate an access token and return token info.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
InvalidTokenError: If token is invalid, expired, or revoked
|
|
||||||
InvalidClientError: If the client application is not marked as active
|
|
||||||
"""
|
|
||||||
token_hash = _hash_token(token)
|
|
||||||
|
|
||||||
# Direct lookup by hash
|
|
||||||
access_token = await PrismaOAuthAccessToken.prisma().find_unique(
|
|
||||||
where={"token": token_hash}, include={"Application": True}
|
|
||||||
)
|
|
||||||
|
|
||||||
if not access_token:
|
|
||||||
raise InvalidTokenError("access token not found")
|
|
||||||
|
|
||||||
if not access_token.Application: # should be impossible
|
|
||||||
raise InvalidClientError("Client application not found")
|
|
||||||
|
|
||||||
if not access_token.Application.isActive:
|
|
||||||
raise InvalidClientError("Client application is disabled")
|
|
||||||
|
|
||||||
if access_token.revokedAt is not None:
|
|
||||||
raise InvalidTokenError("access token has been revoked")
|
|
||||||
|
|
||||||
# Check expiration
|
|
||||||
now = datetime.now(timezone.utc)
|
|
||||||
if access_token.expiresAt < now:
|
|
||||||
raise InvalidTokenError("access token expired")
|
|
||||||
|
|
||||||
return (
|
|
||||||
OAuthAccessTokenInfo.from_db(access_token),
|
|
||||||
OAuthApplicationInfo.from_db(access_token.Application),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def revoke_access_token(
|
|
||||||
token: str, application_id: str
|
|
||||||
) -> OAuthAccessTokenInfo | None:
|
|
||||||
"""
|
|
||||||
Revoke an access token.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
token: The plaintext access token to revoke
|
|
||||||
application_id: The application ID making the revocation request.
|
|
||||||
Only tokens belonging to this application will be revoked.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
OAuthAccessTokenInfo if token was found and revoked, None otherwise.
|
|
||||||
|
|
||||||
Note:
|
|
||||||
Always performs exactly 2 DB queries regardless of outcome to prevent
|
|
||||||
timing side-channel attacks that could reveal token existence.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
token_hash = _hash_token(token)
|
|
||||||
|
|
||||||
# Use update_many to filter by both token and applicationId
|
|
||||||
updated_count = await PrismaOAuthAccessToken.prisma().update_many(
|
|
||||||
where={
|
|
||||||
"token": token_hash,
|
|
||||||
"applicationId": application_id,
|
|
||||||
"revokedAt": None,
|
|
||||||
},
|
|
||||||
data={"revokedAt": datetime.now(timezone.utc)},
|
|
||||||
)
|
|
||||||
|
|
||||||
# Always perform second query to ensure constant time
|
|
||||||
result = await PrismaOAuthAccessToken.prisma().find_unique(
|
|
||||||
where={"token": token_hash}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Only return result if we actually revoked something
|
|
||||||
if updated_count == 0:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return OAuthAccessTokenInfo.from_db(result) if result else None
|
|
||||||
except Exception as e:
|
|
||||||
logger.exception(f"Error revoking access token: {e}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Refresh Token Management
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
|
|
||||||
async def create_refresh_token(
|
|
||||||
application_id: str, user_id: str, scopes: list[APIPermission]
|
|
||||||
) -> OAuthRefreshToken:
|
|
||||||
"""
|
|
||||||
Create a new refresh token.
|
|
||||||
Returns OAuthRefreshToken (with plaintext token).
|
|
||||||
"""
|
|
||||||
plaintext_token = REFRESH_TOKEN_PREFIX + _generate_token()
|
|
||||||
token_hash = _hash_token(plaintext_token)
|
|
||||||
now = datetime.now(timezone.utc)
|
|
||||||
expires_at = now + REFRESH_TOKEN_TTL
|
|
||||||
|
|
||||||
saved_token = await PrismaOAuthRefreshToken.prisma().create(
|
|
||||||
data={
|
|
||||||
"id": str(uuid.uuid4()),
|
|
||||||
"token": token_hash, # SHA256 hash for direct lookup
|
|
||||||
"expiresAt": expires_at,
|
|
||||||
"applicationId": application_id,
|
|
||||||
"userId": user_id,
|
|
||||||
"scopes": [s for s in scopes],
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
return OAuthRefreshToken.from_db(saved_token, plaintext_token=plaintext_token)
|
|
||||||
|
|
||||||
|
|
||||||
async def refresh_tokens(
|
|
||||||
refresh_token: str, application_id: str
|
|
||||||
) -> tuple[OAuthAccessToken, OAuthRefreshToken]:
|
|
||||||
"""
|
|
||||||
Use a refresh token to create new access and refresh tokens.
|
|
||||||
Returns (new_access_token, new_refresh_token) both with plaintext tokens included.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
InvalidGrantError: If refresh token is invalid, expired, or revoked
|
|
||||||
"""
|
|
||||||
token_hash = _hash_token(refresh_token)
|
|
||||||
|
|
||||||
# Direct lookup by hash
|
|
||||||
rt = await PrismaOAuthRefreshToken.prisma().find_unique(where={"token": token_hash})
|
|
||||||
|
|
||||||
if not rt:
|
|
||||||
raise InvalidGrantError("refresh token not found")
|
|
||||||
|
|
||||||
# NOTE: no need to check Application.isActive, this is checked by the token endpoint
|
|
||||||
|
|
||||||
if rt.revokedAt is not None:
|
|
||||||
raise InvalidGrantError("refresh token has been revoked")
|
|
||||||
|
|
||||||
# Validate application
|
|
||||||
if rt.applicationId != application_id:
|
|
||||||
raise InvalidGrantError("refresh token does not belong to this application")
|
|
||||||
|
|
||||||
# Check expiration
|
|
||||||
now = datetime.now(timezone.utc)
|
|
||||||
if rt.expiresAt < now:
|
|
||||||
raise InvalidGrantError("refresh token expired")
|
|
||||||
|
|
||||||
# Revoke old refresh token
|
|
||||||
await PrismaOAuthRefreshToken.prisma().update(
|
|
||||||
where={"token": token_hash},
|
|
||||||
data={"revokedAt": now},
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create new access and refresh tokens with same scopes
|
|
||||||
scopes = [APIPermission(s) for s in rt.scopes]
|
|
||||||
new_access_token = await create_access_token(
|
|
||||||
rt.applicationId,
|
|
||||||
rt.userId,
|
|
||||||
scopes,
|
|
||||||
)
|
|
||||||
new_refresh_token = await create_refresh_token(
|
|
||||||
rt.applicationId,
|
|
||||||
rt.userId,
|
|
||||||
scopes,
|
|
||||||
)
|
|
||||||
|
|
||||||
return new_access_token, new_refresh_token
|
|
||||||
|
|
||||||
|
|
||||||
async def revoke_refresh_token(
|
|
||||||
token: str, application_id: str
|
|
||||||
) -> OAuthRefreshTokenInfo | None:
|
|
||||||
"""
|
|
||||||
Revoke a refresh token.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
token: The plaintext refresh token to revoke
|
|
||||||
application_id: The application ID making the revocation request.
|
|
||||||
Only tokens belonging to this application will be revoked.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
OAuthRefreshTokenInfo if token was found and revoked, None otherwise.
|
|
||||||
|
|
||||||
Note:
|
|
||||||
Always performs exactly 2 DB queries regardless of outcome to prevent
|
|
||||||
timing side-channel attacks that could reveal token existence.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
token_hash = _hash_token(token)
|
|
||||||
|
|
||||||
# Use update_many to filter by both token and applicationId
|
|
||||||
updated_count = await PrismaOAuthRefreshToken.prisma().update_many(
|
|
||||||
where={
|
|
||||||
"token": token_hash,
|
|
||||||
"applicationId": application_id,
|
|
||||||
"revokedAt": None,
|
|
||||||
},
|
|
||||||
data={"revokedAt": datetime.now(timezone.utc)},
|
|
||||||
)
|
|
||||||
|
|
||||||
# Always perform second query to ensure constant time
|
|
||||||
result = await PrismaOAuthRefreshToken.prisma().find_unique(
|
|
||||||
where={"token": token_hash}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Only return result if we actually revoked something
|
|
||||||
if updated_count == 0:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return OAuthRefreshTokenInfo.from_db(result) if result else None
|
|
||||||
except Exception as e:
|
|
||||||
logger.exception(f"Error revoking refresh token: {e}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Token Introspection
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
|
|
||||||
async def introspect_token(
|
|
||||||
token: str,
|
|
||||||
token_type_hint: Optional[Literal["access_token", "refresh_token"]] = None,
|
|
||||||
) -> TokenIntrospectionResult:
|
|
||||||
"""
|
|
||||||
Introspect a token and return its metadata (RFC 7662).
|
|
||||||
|
|
||||||
Returns TokenIntrospectionResult with active=True and metadata if valid,
|
|
||||||
or active=False if the token is invalid/expired/revoked.
|
|
||||||
"""
|
|
||||||
# Try as access token first (or if hint says "access_token")
|
|
||||||
if token_type_hint != "refresh_token":
|
|
||||||
try:
|
|
||||||
token_info, app = await validate_access_token(token)
|
|
||||||
return TokenIntrospectionResult(
|
|
||||||
active=True,
|
|
||||||
scopes=list(s.value for s in token_info.scopes),
|
|
||||||
client_id=app.client_id if app else None,
|
|
||||||
user_id=token_info.user_id,
|
|
||||||
exp=int(token_info.expires_at.timestamp()),
|
|
||||||
token_type="access_token",
|
|
||||||
)
|
|
||||||
except InvalidTokenError:
|
|
||||||
pass # Try as refresh token
|
|
||||||
|
|
||||||
# Try as refresh token
|
|
||||||
token_hash = _hash_token(token)
|
|
||||||
refresh_token = await PrismaOAuthRefreshToken.prisma().find_unique(
|
|
||||||
where={"token": token_hash}
|
|
||||||
)
|
|
||||||
|
|
||||||
if refresh_token and refresh_token.revokedAt is None:
|
|
||||||
# Check if valid (not expired)
|
|
||||||
now = datetime.now(timezone.utc)
|
|
||||||
if refresh_token.expiresAt > now:
|
|
||||||
app = await get_oauth_application_by_id(refresh_token.applicationId)
|
|
||||||
return TokenIntrospectionResult(
|
|
||||||
active=True,
|
|
||||||
scopes=list(s for s in refresh_token.scopes),
|
|
||||||
client_id=app.client_id if app else None,
|
|
||||||
user_id=refresh_token.userId,
|
|
||||||
exp=int(refresh_token.expiresAt.timestamp()),
|
|
||||||
token_type="refresh_token",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Token not found or inactive
|
|
||||||
return TokenIntrospectionResult(active=False)
|
|
||||||
|
|
||||||
|
|
||||||
async def get_oauth_application_by_id(app_id: str) -> Optional[OAuthApplicationInfo]:
|
|
||||||
"""Get OAuth application by ID"""
|
|
||||||
app = await PrismaOAuthApplication.prisma().find_unique(where={"id": app_id})
|
|
||||||
if not app:
|
|
||||||
return None
|
|
||||||
return OAuthApplicationInfo.from_db(app)
|
|
||||||
|
|
||||||
|
|
||||||
async def list_user_oauth_applications(user_id: str) -> list[OAuthApplicationInfo]:
|
|
||||||
"""Get all OAuth applications owned by a user"""
|
|
||||||
apps = await PrismaOAuthApplication.prisma().find_many(
|
|
||||||
where={"ownerId": user_id},
|
|
||||||
order={"createdAt": "desc"},
|
|
||||||
)
|
|
||||||
return [OAuthApplicationInfo.from_db(app) for app in apps]
|
|
||||||
|
|
||||||
|
|
||||||
async def update_oauth_application(
|
|
||||||
app_id: str,
|
|
||||||
*,
|
|
||||||
owner_id: str,
|
|
||||||
is_active: Optional[bool] = None,
|
|
||||||
logo_url: Optional[str] = None,
|
|
||||||
) -> Optional[OAuthApplicationInfo]:
|
|
||||||
"""
|
|
||||||
Update OAuth application active status.
|
|
||||||
Only the owner can update their app's status.
|
|
||||||
|
|
||||||
Returns the updated app info, or None if app not found or not owned by user.
|
|
||||||
"""
|
|
||||||
# First verify ownership
|
|
||||||
app = await PrismaOAuthApplication.prisma().find_first(
|
|
||||||
where={"id": app_id, "ownerId": owner_id}
|
|
||||||
)
|
|
||||||
if not app:
|
|
||||||
return None
|
|
||||||
|
|
||||||
patch: OAuthApplicationUpdateInput = {}
|
|
||||||
if is_active is not None:
|
|
||||||
patch["isActive"] = is_active
|
|
||||||
if logo_url:
|
|
||||||
patch["logoUrl"] = logo_url
|
|
||||||
if not patch:
|
|
||||||
return OAuthApplicationInfo.from_db(app) # return unchanged
|
|
||||||
|
|
||||||
updated_app = await PrismaOAuthApplication.prisma().update(
|
|
||||||
where={"id": app_id},
|
|
||||||
data=patch,
|
|
||||||
)
|
|
||||||
return OAuthApplicationInfo.from_db(updated_app) if updated_app else None
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Token Cleanup
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
|
|
||||||
async def cleanup_expired_oauth_tokens() -> dict[str, int]:
|
|
||||||
"""
|
|
||||||
Delete expired OAuth tokens from the database.
|
|
||||||
|
|
||||||
This removes:
|
|
||||||
- Expired authorization codes (10 min TTL)
|
|
||||||
- Expired access tokens (1 hour TTL)
|
|
||||||
- Expired refresh tokens (30 day TTL)
|
|
||||||
|
|
||||||
Returns a dict with counts of deleted tokens by type.
|
|
||||||
"""
|
|
||||||
now = datetime.now(timezone.utc)
|
|
||||||
|
|
||||||
# Delete expired authorization codes
|
|
||||||
codes_result = await PrismaOAuthAuthorizationCode.prisma().delete_many(
|
|
||||||
where={"expiresAt": {"lt": now}}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Delete expired access tokens
|
|
||||||
access_result = await PrismaOAuthAccessToken.prisma().delete_many(
|
|
||||||
where={"expiresAt": {"lt": now}}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Delete expired refresh tokens
|
|
||||||
refresh_result = await PrismaOAuthRefreshToken.prisma().delete_many(
|
|
||||||
where={"expiresAt": {"lt": now}}
|
|
||||||
)
|
|
||||||
|
|
||||||
deleted = {
|
|
||||||
"authorization_codes": codes_result,
|
|
||||||
"access_tokens": access_result,
|
|
||||||
"refresh_tokens": refresh_result,
|
|
||||||
}
|
|
||||||
|
|
||||||
total = sum(deleted.values())
|
|
||||||
if total > 0:
|
|
||||||
logger.info(f"Cleaned up {total} expired OAuth tokens: {deleted}")
|
|
||||||
|
|
||||||
return deleted
|
|
||||||
@@ -1,70 +0,0 @@
|
|||||||
"""
|
|
||||||
Password hashing service using Argon2id.
|
|
||||||
|
|
||||||
OWASP 2024 recommended configuration:
|
|
||||||
- time_cost: 2 iterations
|
|
||||||
- memory_cost: 19456 KiB (19 MiB)
|
|
||||||
- parallelism: 1
|
|
||||||
"""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from argon2 import PasswordHasher
|
|
||||||
from argon2.exceptions import InvalidHashError, VerifyMismatchError
|
|
||||||
from argon2.profiles import RFC_9106_LOW_MEMORY
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
# Use RFC 9106 low-memory profile (OWASP recommended)
|
|
||||||
# time_cost=2, memory_cost=19456, parallelism=1
|
|
||||||
_hasher = PasswordHasher.from_parameters(RFC_9106_LOW_MEMORY)
|
|
||||||
|
|
||||||
|
|
||||||
def hash_password(password: str) -> str:
|
|
||||||
"""
|
|
||||||
Hash a password using Argon2id.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
password: The plaintext password to hash.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The hashed password string (includes algorithm params and salt).
|
|
||||||
"""
|
|
||||||
return _hasher.hash(password)
|
|
||||||
|
|
||||||
|
|
||||||
def verify_password(password_hash: str, password: str) -> bool:
|
|
||||||
"""
|
|
||||||
Verify a password against a hash.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
password_hash: The stored password hash.
|
|
||||||
password: The plaintext password to verify.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
True if the password matches, False otherwise.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
_hasher.verify(password_hash, password)
|
|
||||||
return True
|
|
||||||
except VerifyMismatchError:
|
|
||||||
return False
|
|
||||||
except InvalidHashError:
|
|
||||||
logger.warning("Invalid password hash format encountered")
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def needs_rehash(password_hash: str) -> bool:
|
|
||||||
"""
|
|
||||||
Check if a password hash needs to be rehashed.
|
|
||||||
|
|
||||||
This returns True if the hash was created with different parameters
|
|
||||||
than the current configuration, allowing for transparent upgrades.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
password_hash: The stored password hash.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
True if the hash should be rehashed, False otherwise.
|
|
||||||
"""
|
|
||||||
return _hasher.check_needs_rehash(password_hash)
|
|
||||||
@@ -1,270 +0,0 @@
|
|||||||
"""
|
|
||||||
JWT token generation and validation for user authentication.
|
|
||||||
|
|
||||||
This module generates tokens compatible with Supabase JWT format to ensure
|
|
||||||
a smooth migration without requiring frontend changes.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import hashlib
|
|
||||||
import logging
|
|
||||||
import secrets
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime, timedelta, timezone
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
import jwt
|
|
||||||
from prisma.models import UserAuthRefreshToken
|
|
||||||
from pydantic import BaseModel
|
|
||||||
|
|
||||||
from autogpt_libs.auth.config import get_settings
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
# Token TTLs
|
|
||||||
ACCESS_TOKEN_TTL = timedelta(hours=1)
|
|
||||||
REFRESH_TOKEN_TTL = timedelta(days=30)
|
|
||||||
|
|
||||||
# Refresh token prefix for identification
|
|
||||||
REFRESH_TOKEN_PREFIX = "agpt_rt_"
|
|
||||||
|
|
||||||
|
|
||||||
class TokenPair(BaseModel):
|
|
||||||
"""Access and refresh token pair."""
|
|
||||||
|
|
||||||
access_token: str
|
|
||||||
refresh_token: str
|
|
||||||
expires_in: int # seconds until access token expires
|
|
||||||
token_type: str = "bearer"
|
|
||||||
|
|
||||||
|
|
||||||
class JWTPayload(BaseModel):
|
|
||||||
"""JWT payload structure matching Supabase format."""
|
|
||||||
|
|
||||||
sub: str # user ID
|
|
||||||
email: str
|
|
||||||
phone: str = ""
|
|
||||||
role: str = "authenticated"
|
|
||||||
aud: str = "authenticated"
|
|
||||||
iat: int # issued at (unix timestamp)
|
|
||||||
exp: int # expiration (unix timestamp)
|
|
||||||
|
|
||||||
|
|
||||||
def create_access_token(
|
|
||||||
user_id: str,
|
|
||||||
email: str,
|
|
||||||
role: str = "authenticated",
|
|
||||||
phone: str = "",
|
|
||||||
) -> str:
|
|
||||||
"""
|
|
||||||
Create a JWT access token.
|
|
||||||
|
|
||||||
The token format matches Supabase JWT structure so existing backend
|
|
||||||
validation code continues to work without modification.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
user_id: The user's UUID.
|
|
||||||
email: The user's email address.
|
|
||||||
role: The user's role (default: "authenticated").
|
|
||||||
phone: The user's phone number (optional).
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The encoded JWT token string.
|
|
||||||
"""
|
|
||||||
settings = get_settings()
|
|
||||||
now = datetime.now(timezone.utc)
|
|
||||||
|
|
||||||
payload = {
|
|
||||||
"sub": user_id,
|
|
||||||
"email": email,
|
|
||||||
"phone": phone,
|
|
||||||
"role": role,
|
|
||||||
"aud": "authenticated",
|
|
||||||
"iat": int(now.timestamp()),
|
|
||||||
"exp": int((now + ACCESS_TOKEN_TTL).timestamp()),
|
|
||||||
}
|
|
||||||
|
|
||||||
return jwt.encode(payload, settings.JWT_VERIFY_KEY, algorithm=settings.JWT_ALGORITHM)
|
|
||||||
|
|
||||||
|
|
||||||
def decode_access_token(token: str) -> Optional[JWTPayload]:
|
|
||||||
"""
|
|
||||||
Decode and validate a JWT access token.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
token: The JWT token string.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The decoded payload if valid, None otherwise.
|
|
||||||
"""
|
|
||||||
settings = get_settings()
|
|
||||||
|
|
||||||
try:
|
|
||||||
payload = jwt.decode(
|
|
||||||
token,
|
|
||||||
settings.JWT_VERIFY_KEY,
|
|
||||||
algorithms=[settings.JWT_ALGORITHM],
|
|
||||||
audience="authenticated",
|
|
||||||
)
|
|
||||||
return JWTPayload(**payload)
|
|
||||||
except jwt.ExpiredSignatureError:
|
|
||||||
logger.debug("Token has expired")
|
|
||||||
return None
|
|
||||||
except jwt.InvalidTokenError as e:
|
|
||||||
logger.debug(f"Invalid token: {e}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def generate_refresh_token() -> str:
|
|
||||||
"""
|
|
||||||
Generate a cryptographically secure refresh token.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
A prefixed random token string.
|
|
||||||
"""
|
|
||||||
random_bytes = secrets.token_urlsafe(32)
|
|
||||||
return f"{REFRESH_TOKEN_PREFIX}{random_bytes}"
|
|
||||||
|
|
||||||
|
|
||||||
def hash_refresh_token(token: str) -> str:
|
|
||||||
"""
|
|
||||||
Hash a refresh token for storage.
|
|
||||||
|
|
||||||
Uses SHA256 for deterministic lookup (unlike passwords which use Argon2).
|
|
||||||
|
|
||||||
Args:
|
|
||||||
token: The plaintext refresh token.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The SHA256 hex digest.
|
|
||||||
"""
|
|
||||||
return hashlib.sha256(token.encode()).hexdigest()
|
|
||||||
|
|
||||||
|
|
||||||
async def create_refresh_token_db(
|
|
||||||
user_id: str,
|
|
||||||
token: Optional[str] = None,
|
|
||||||
) -> tuple[str, datetime]:
|
|
||||||
"""
|
|
||||||
Create a refresh token and store it in the database.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
user_id: The user's UUID.
|
|
||||||
token: Optional pre-generated token (used in OAuth flow).
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Tuple of (plaintext token, expiration datetime).
|
|
||||||
"""
|
|
||||||
if token is None:
|
|
||||||
token = generate_refresh_token()
|
|
||||||
|
|
||||||
token_hash = hash_refresh_token(token)
|
|
||||||
expires_at = datetime.now(timezone.utc) + REFRESH_TOKEN_TTL
|
|
||||||
|
|
||||||
await UserAuthRefreshToken.prisma().create(
|
|
||||||
data={
|
|
||||||
"id": str(uuid.uuid4()),
|
|
||||||
"tokenHash": token_hash,
|
|
||||||
"userId": user_id,
|
|
||||||
"expiresAt": expires_at,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
return token, expires_at
|
|
||||||
|
|
||||||
|
|
||||||
async def validate_refresh_token(token: str) -> Optional[str]:
|
|
||||||
"""
|
|
||||||
Validate a refresh token and return the associated user ID.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
token: The plaintext refresh token.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The user ID if valid, None otherwise.
|
|
||||||
"""
|
|
||||||
token_hash = hash_refresh_token(token)
|
|
||||||
|
|
||||||
db_token = await UserAuthRefreshToken.prisma().find_first(
|
|
||||||
where={
|
|
||||||
"tokenHash": token_hash,
|
|
||||||
"revokedAt": None,
|
|
||||||
"expiresAt": {"gt": datetime.now(timezone.utc)},
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
if not db_token:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return db_token.userId
|
|
||||||
|
|
||||||
|
|
||||||
async def revoke_refresh_token(token: str) -> bool:
|
|
||||||
"""
|
|
||||||
Revoke a refresh token.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
token: The plaintext refresh token.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
True if a token was revoked, False otherwise.
|
|
||||||
"""
|
|
||||||
token_hash = hash_refresh_token(token)
|
|
||||||
|
|
||||||
result = await UserAuthRefreshToken.prisma().update_many(
|
|
||||||
where={
|
|
||||||
"tokenHash": token_hash,
|
|
||||||
"revokedAt": None,
|
|
||||||
},
|
|
||||||
data={"revokedAt": datetime.now(timezone.utc)},
|
|
||||||
)
|
|
||||||
|
|
||||||
return result > 0
|
|
||||||
|
|
||||||
|
|
||||||
async def revoke_all_user_refresh_tokens(user_id: str) -> int:
|
|
||||||
"""
|
|
||||||
Revoke all refresh tokens for a user.
|
|
||||||
|
|
||||||
Used for global logout or security events.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
user_id: The user's UUID.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Number of tokens revoked.
|
|
||||||
"""
|
|
||||||
result = await UserAuthRefreshToken.prisma().update_many(
|
|
||||||
where={
|
|
||||||
"userId": user_id,
|
|
||||||
"revokedAt": None,
|
|
||||||
},
|
|
||||||
data={"revokedAt": datetime.now(timezone.utc)},
|
|
||||||
)
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
async def create_token_pair(
|
|
||||||
user_id: str,
|
|
||||||
email: str,
|
|
||||||
role: str = "authenticated",
|
|
||||||
) -> TokenPair:
|
|
||||||
"""
|
|
||||||
Create a complete token pair (access + refresh).
|
|
||||||
|
|
||||||
Args:
|
|
||||||
user_id: The user's UUID.
|
|
||||||
email: The user's email.
|
|
||||||
role: The user's role.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
TokenPair with access_token, refresh_token, and metadata.
|
|
||||||
"""
|
|
||||||
access_token = create_access_token(user_id, email, role)
|
|
||||||
refresh_token, _ = await create_refresh_token_db(user_id)
|
|
||||||
|
|
||||||
return TokenPair(
|
|
||||||
access_token=access_token,
|
|
||||||
refresh_token=refresh_token,
|
|
||||||
expires_in=int(ACCESS_TOKEN_TTL.total_seconds()),
|
|
||||||
)
|
|
||||||
@@ -1,9 +1,8 @@
|
|||||||
import logging
|
import logging
|
||||||
|
import queue
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from datetime import datetime, timedelta, timezone
|
from datetime import datetime, timedelta, timezone
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from multiprocessing import Manager
|
|
||||||
from queue import Empty
|
|
||||||
from typing import (
|
from typing import (
|
||||||
TYPE_CHECKING,
|
TYPE_CHECKING,
|
||||||
Annotated,
|
Annotated,
|
||||||
@@ -1164,12 +1163,16 @@ class NodeExecutionEntry(BaseModel):
|
|||||||
|
|
||||||
class ExecutionQueue(Generic[T]):
|
class ExecutionQueue(Generic[T]):
|
||||||
"""
|
"""
|
||||||
Queue for managing the execution of agents.
|
Thread-safe queue for managing node execution within a single graph execution.
|
||||||
This will be shared between different processes
|
|
||||||
|
Note: Uses queue.Queue (not multiprocessing.Queue) since all access is from
|
||||||
|
threads within the same process. If migrating back to ProcessPoolExecutor,
|
||||||
|
replace with multiprocessing.Manager().Queue() for cross-process safety.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.queue = Manager().Queue()
|
# Thread-safe queue (not multiprocessing) — see class docstring
|
||||||
|
self.queue: queue.Queue[T] = queue.Queue()
|
||||||
|
|
||||||
def add(self, execution: T) -> T:
|
def add(self, execution: T) -> T:
|
||||||
self.queue.put(execution)
|
self.queue.put(execution)
|
||||||
@@ -1184,7 +1187,7 @@ class ExecutionQueue(Generic[T]):
|
|||||||
def get_or_none(self) -> T | None:
|
def get_or_none(self) -> T | None:
|
||||||
try:
|
try:
|
||||||
return self.queue.get_nowait()
|
return self.queue.get_nowait()
|
||||||
except Empty:
|
except queue.Empty:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,60 @@
|
|||||||
|
"""Tests for ExecutionQueue thread-safety."""
|
||||||
|
|
||||||
|
import queue
|
||||||
|
import threading
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from backend.data.execution import ExecutionQueue
|
||||||
|
|
||||||
|
|
||||||
|
def test_execution_queue_uses_stdlib_queue():
|
||||||
|
"""Verify ExecutionQueue uses queue.Queue (not multiprocessing)."""
|
||||||
|
q = ExecutionQueue()
|
||||||
|
assert isinstance(q.queue, queue.Queue)
|
||||||
|
|
||||||
|
|
||||||
|
def test_basic_operations():
|
||||||
|
"""Test add, get, empty, and get_or_none."""
|
||||||
|
q = ExecutionQueue()
|
||||||
|
|
||||||
|
assert q.empty() is True
|
||||||
|
assert q.get_or_none() is None
|
||||||
|
|
||||||
|
result = q.add("item1")
|
||||||
|
assert result == "item1"
|
||||||
|
assert q.empty() is False
|
||||||
|
|
||||||
|
item = q.get()
|
||||||
|
assert item == "item1"
|
||||||
|
assert q.empty() is True
|
||||||
|
|
||||||
|
|
||||||
|
def test_thread_safety():
|
||||||
|
"""Test concurrent access from multiple threads."""
|
||||||
|
q = ExecutionQueue()
|
||||||
|
results = []
|
||||||
|
num_items = 100
|
||||||
|
|
||||||
|
def producer():
|
||||||
|
for i in range(num_items):
|
||||||
|
q.add(f"item_{i}")
|
||||||
|
|
||||||
|
def consumer():
|
||||||
|
count = 0
|
||||||
|
while count < num_items:
|
||||||
|
item = q.get_or_none()
|
||||||
|
if item is not None:
|
||||||
|
results.append(item)
|
||||||
|
count += 1
|
||||||
|
|
||||||
|
producer_thread = threading.Thread(target=producer)
|
||||||
|
consumer_thread = threading.Thread(target=consumer)
|
||||||
|
|
||||||
|
producer_thread.start()
|
||||||
|
consumer_thread.start()
|
||||||
|
|
||||||
|
producer_thread.join(timeout=5)
|
||||||
|
consumer_thread.join(timeout=5)
|
||||||
|
|
||||||
|
assert len(results) == num_items
|
||||||
@@ -100,7 +100,7 @@ async def get_or_create_human_review(
|
|||||||
return None
|
return None
|
||||||
else:
|
else:
|
||||||
return ReviewResult(
|
return ReviewResult(
|
||||||
data=review.payload,
|
data=review.payload if review.status == ReviewStatus.APPROVED else None,
|
||||||
status=review.status,
|
status=review.status,
|
||||||
message=review.reviewMessage or "",
|
message=review.reviewMessage or "",
|
||||||
processed=review.processed,
|
processed=review.processed,
|
||||||
|
|||||||
@@ -23,7 +23,6 @@ from dotenv import load_dotenv
|
|||||||
from pydantic import BaseModel, Field, ValidationError
|
from pydantic import BaseModel, Field, ValidationError
|
||||||
from sqlalchemy import MetaData, create_engine
|
from sqlalchemy import MetaData, create_engine
|
||||||
|
|
||||||
from backend.data.auth.oauth import cleanup_expired_oauth_tokens
|
|
||||||
from backend.data.block import BlockInput
|
from backend.data.block import BlockInput
|
||||||
from backend.data.execution import GraphExecutionWithNodes
|
from backend.data.execution import GraphExecutionWithNodes
|
||||||
from backend.data.model import CredentialsMetaInput
|
from backend.data.model import CredentialsMetaInput
|
||||||
@@ -243,12 +242,6 @@ def cleanup_expired_files():
|
|||||||
run_async(cleanup_expired_files_async())
|
run_async(cleanup_expired_files_async())
|
||||||
|
|
||||||
|
|
||||||
def cleanup_oauth_tokens():
|
|
||||||
"""Clean up expired OAuth tokens from the database."""
|
|
||||||
# Wait for completion
|
|
||||||
run_async(cleanup_expired_oauth_tokens())
|
|
||||||
|
|
||||||
|
|
||||||
def execution_accuracy_alerts():
|
def execution_accuracy_alerts():
|
||||||
"""Check execution accuracy and send alerts if drops are detected."""
|
"""Check execution accuracy and send alerts if drops are detected."""
|
||||||
return report_execution_accuracy_alerts()
|
return report_execution_accuracy_alerts()
|
||||||
@@ -453,17 +446,6 @@ class Scheduler(AppService):
|
|||||||
jobstore=Jobstores.EXECUTION.value,
|
jobstore=Jobstores.EXECUTION.value,
|
||||||
)
|
)
|
||||||
|
|
||||||
# OAuth Token Cleanup - configurable interval
|
|
||||||
self.scheduler.add_job(
|
|
||||||
cleanup_oauth_tokens,
|
|
||||||
id="cleanup_oauth_tokens",
|
|
||||||
trigger="interval",
|
|
||||||
replace_existing=True,
|
|
||||||
seconds=config.oauth_token_cleanup_interval_hours
|
|
||||||
* 3600, # Convert hours to seconds
|
|
||||||
jobstore=Jobstores.EXECUTION.value,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Execution Accuracy Monitoring - configurable interval
|
# Execution Accuracy Monitoring - configurable interval
|
||||||
self.scheduler.add_job(
|
self.scheduler.add_job(
|
||||||
execution_accuracy_alerts,
|
execution_accuracy_alerts,
|
||||||
@@ -622,11 +604,6 @@ class Scheduler(AppService):
|
|||||||
"""Manually trigger cleanup of expired cloud storage files."""
|
"""Manually trigger cleanup of expired cloud storage files."""
|
||||||
return cleanup_expired_files()
|
return cleanup_expired_files()
|
||||||
|
|
||||||
@expose
|
|
||||||
def execute_cleanup_oauth_tokens(self):
|
|
||||||
"""Manually trigger cleanup of expired OAuth tokens."""
|
|
||||||
return cleanup_oauth_tokens()
|
|
||||||
|
|
||||||
@expose
|
@expose
|
||||||
def execute_report_execution_accuracy_alerts(self):
|
def execute_report_execution_accuracy_alerts(self):
|
||||||
"""Manually trigger execution accuracy alert checking."""
|
"""Manually trigger execution accuracy alert checking."""
|
||||||
|
|||||||
@@ -1,107 +1,36 @@
|
|||||||
from fastapi import HTTPException, Security, status
|
from fastapi import HTTPException, Security
|
||||||
from fastapi.security import APIKeyHeader, HTTPAuthorizationCredentials, HTTPBearer
|
from fastapi.security import APIKeyHeader
|
||||||
from prisma.enums import APIKeyPermission
|
from prisma.enums import APIKeyPermission
|
||||||
|
|
||||||
from backend.data.auth.api_key import APIKeyInfo, validate_api_key
|
from backend.data.api_key import APIKeyInfo, has_permission, validate_api_key
|
||||||
from backend.data.auth.base import APIAuthorizationInfo
|
|
||||||
from backend.data.auth.oauth import (
|
|
||||||
InvalidClientError,
|
|
||||||
InvalidTokenError,
|
|
||||||
OAuthAccessTokenInfo,
|
|
||||||
validate_access_token,
|
|
||||||
)
|
|
||||||
|
|
||||||
api_key_header = APIKeyHeader(name="X-API-Key", auto_error=False)
|
api_key_header = APIKeyHeader(name="X-API-Key", auto_error=False)
|
||||||
bearer_auth = HTTPBearer(auto_error=False)
|
|
||||||
|
|
||||||
|
|
||||||
async def require_api_key(api_key: str | None = Security(api_key_header)) -> APIKeyInfo:
|
async def require_api_key(api_key: str | None = Security(api_key_header)) -> APIKeyInfo:
|
||||||
"""Middleware for API key authentication only"""
|
"""Base middleware for API key authentication"""
|
||||||
if api_key is None:
|
if api_key is None:
|
||||||
raise HTTPException(
|
raise HTTPException(status_code=401, detail="Missing API key")
|
||||||
status_code=status.HTTP_401_UNAUTHORIZED, detail="Missing API key"
|
|
||||||
)
|
|
||||||
|
|
||||||
api_key_obj = await validate_api_key(api_key)
|
api_key_obj = await validate_api_key(api_key)
|
||||||
|
|
||||||
if not api_key_obj:
|
if not api_key_obj:
|
||||||
raise HTTPException(
|
raise HTTPException(status_code=401, detail="Invalid API key")
|
||||||
status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid API key"
|
|
||||||
)
|
|
||||||
|
|
||||||
return api_key_obj
|
return api_key_obj
|
||||||
|
|
||||||
|
|
||||||
async def require_access_token(
|
|
||||||
bearer: HTTPAuthorizationCredentials | None = Security(bearer_auth),
|
|
||||||
) -> OAuthAccessTokenInfo:
|
|
||||||
"""Middleware for OAuth access token authentication only"""
|
|
||||||
if bearer is None:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
|
||||||
detail="Missing Authorization header",
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
token_info, _ = await validate_access_token(bearer.credentials)
|
|
||||||
except (InvalidClientError, InvalidTokenError) as e:
|
|
||||||
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=str(e))
|
|
||||||
|
|
||||||
return token_info
|
|
||||||
|
|
||||||
|
|
||||||
async def require_auth(
|
|
||||||
api_key: str | None = Security(api_key_header),
|
|
||||||
bearer: HTTPAuthorizationCredentials | None = Security(bearer_auth),
|
|
||||||
) -> APIAuthorizationInfo:
|
|
||||||
"""
|
|
||||||
Unified authentication middleware supporting both API keys and OAuth tokens.
|
|
||||||
|
|
||||||
Supports two authentication methods, which are checked in order:
|
|
||||||
1. X-API-Key header (existing API key authentication)
|
|
||||||
2. Authorization: Bearer <token> header (OAuth access token)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
APIAuthorizationInfo: base class of both APIKeyInfo and OAuthAccessTokenInfo.
|
|
||||||
"""
|
|
||||||
# Try API key first
|
|
||||||
if api_key is not None:
|
|
||||||
api_key_info = await validate_api_key(api_key)
|
|
||||||
if api_key_info:
|
|
||||||
return api_key_info
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid API key"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Try OAuth bearer token
|
|
||||||
if bearer is not None:
|
|
||||||
try:
|
|
||||||
token_info, _ = await validate_access_token(bearer.credentials)
|
|
||||||
return token_info
|
|
||||||
except (InvalidClientError, InvalidTokenError) as e:
|
|
||||||
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=str(e))
|
|
||||||
|
|
||||||
# No credentials provided
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
|
||||||
detail="Missing authentication. Provide API key or access token.",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def require_permission(permission: APIKeyPermission):
|
def require_permission(permission: APIKeyPermission):
|
||||||
"""
|
"""Dependency function for checking specific permissions"""
|
||||||
Dependency function for checking specific permissions
|
|
||||||
(works with API keys and OAuth tokens)
|
|
||||||
"""
|
|
||||||
|
|
||||||
async def check_permission(
|
async def check_permission(
|
||||||
auth: APIAuthorizationInfo = Security(require_auth),
|
api_key: APIKeyInfo = Security(require_api_key),
|
||||||
) -> APIAuthorizationInfo:
|
) -> APIKeyInfo:
|
||||||
if permission not in auth.scopes:
|
if not has_permission(api_key, permission):
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_403_FORBIDDEN,
|
status_code=403,
|
||||||
detail=f"Missing required permission: {permission.value}",
|
detail=f"API key lacks the required permission '{permission}'",
|
||||||
)
|
)
|
||||||
return auth
|
return api_key
|
||||||
|
|
||||||
return check_permission
|
return check_permission
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ from fastapi import APIRouter, Body, HTTPException, Path, Security, status
|
|||||||
from prisma.enums import APIKeyPermission
|
from prisma.enums import APIKeyPermission
|
||||||
from pydantic import BaseModel, Field, SecretStr
|
from pydantic import BaseModel, Field, SecretStr
|
||||||
|
|
||||||
from backend.data.auth.base import APIAuthorizationInfo
|
from backend.data.api_key import APIKeyInfo
|
||||||
from backend.data.model import (
|
from backend.data.model import (
|
||||||
APIKeyCredentials,
|
APIKeyCredentials,
|
||||||
Credentials,
|
Credentials,
|
||||||
@@ -255,7 +255,7 @@ def _get_oauth_handler_for_external(
|
|||||||
|
|
||||||
@integrations_router.get("/providers", response_model=list[ProviderInfo])
|
@integrations_router.get("/providers", response_model=list[ProviderInfo])
|
||||||
async def list_providers(
|
async def list_providers(
|
||||||
auth: APIAuthorizationInfo = Security(
|
api_key: APIKeyInfo = Security(
|
||||||
require_permission(APIKeyPermission.READ_INTEGRATIONS)
|
require_permission(APIKeyPermission.READ_INTEGRATIONS)
|
||||||
),
|
),
|
||||||
) -> list[ProviderInfo]:
|
) -> list[ProviderInfo]:
|
||||||
@@ -319,7 +319,7 @@ async def list_providers(
|
|||||||
async def initiate_oauth(
|
async def initiate_oauth(
|
||||||
provider: Annotated[str, Path(title="The OAuth provider")],
|
provider: Annotated[str, Path(title="The OAuth provider")],
|
||||||
request: OAuthInitiateRequest,
|
request: OAuthInitiateRequest,
|
||||||
auth: APIAuthorizationInfo = Security(
|
api_key: APIKeyInfo = Security(
|
||||||
require_permission(APIKeyPermission.MANAGE_INTEGRATIONS)
|
require_permission(APIKeyPermission.MANAGE_INTEGRATIONS)
|
||||||
),
|
),
|
||||||
) -> OAuthInitiateResponse:
|
) -> OAuthInitiateResponse:
|
||||||
@@ -337,10 +337,7 @@ async def initiate_oauth(
|
|||||||
if not validate_callback_url(request.callback_url):
|
if not validate_callback_url(request.callback_url):
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
detail=(
|
detail=f"Callback URL origin is not allowed. Allowed origins: {settings.config.external_oauth_callback_origins}",
|
||||||
f"Callback URL origin is not allowed. "
|
|
||||||
f"Allowed origins: {settings.config.external_oauth_callback_origins}",
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Validate provider
|
# Validate provider
|
||||||
@@ -362,15 +359,13 @@ async def initiate_oauth(
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Store state token with external flow metadata
|
# Store state token with external flow metadata
|
||||||
# Note: initiated_by_api_key_id is only available for API key auth, not OAuth
|
|
||||||
api_key_id = getattr(auth, "id", None) if auth.type == "api_key" else None
|
|
||||||
state_token, code_challenge = await creds_manager.store.store_state_token(
|
state_token, code_challenge = await creds_manager.store.store_state_token(
|
||||||
user_id=auth.user_id,
|
user_id=api_key.user_id,
|
||||||
provider=provider if isinstance(provider_name, str) else provider_name.value,
|
provider=provider if isinstance(provider_name, str) else provider_name.value,
|
||||||
scopes=request.scopes,
|
scopes=request.scopes,
|
||||||
callback_url=request.callback_url,
|
callback_url=request.callback_url,
|
||||||
state_metadata=request.state_metadata,
|
state_metadata=request.state_metadata,
|
||||||
initiated_by_api_key_id=api_key_id,
|
initiated_by_api_key_id=api_key.id,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Build login URL
|
# Build login URL
|
||||||
@@ -398,7 +393,7 @@ async def initiate_oauth(
|
|||||||
async def complete_oauth(
|
async def complete_oauth(
|
||||||
provider: Annotated[str, Path(title="The OAuth provider")],
|
provider: Annotated[str, Path(title="The OAuth provider")],
|
||||||
request: OAuthCompleteRequest,
|
request: OAuthCompleteRequest,
|
||||||
auth: APIAuthorizationInfo = Security(
|
api_key: APIKeyInfo = Security(
|
||||||
require_permission(APIKeyPermission.MANAGE_INTEGRATIONS)
|
require_permission(APIKeyPermission.MANAGE_INTEGRATIONS)
|
||||||
),
|
),
|
||||||
) -> OAuthCompleteResponse:
|
) -> OAuthCompleteResponse:
|
||||||
@@ -411,7 +406,7 @@ async def complete_oauth(
|
|||||||
"""
|
"""
|
||||||
# Verify state token
|
# Verify state token
|
||||||
valid_state = await creds_manager.store.verify_state_token(
|
valid_state = await creds_manager.store.verify_state_token(
|
||||||
auth.user_id, request.state_token, provider
|
api_key.user_id, request.state_token, provider
|
||||||
)
|
)
|
||||||
|
|
||||||
if not valid_state:
|
if not valid_state:
|
||||||
@@ -458,7 +453,7 @@ async def complete_oauth(
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Store credentials
|
# Store credentials
|
||||||
await creds_manager.create(auth.user_id, credentials)
|
await creds_manager.create(api_key.user_id, credentials)
|
||||||
|
|
||||||
logger.info(f"Successfully completed external OAuth for provider {provider}")
|
logger.info(f"Successfully completed external OAuth for provider {provider}")
|
||||||
|
|
||||||
@@ -475,7 +470,7 @@ async def complete_oauth(
|
|||||||
|
|
||||||
@integrations_router.get("/credentials", response_model=list[CredentialSummary])
|
@integrations_router.get("/credentials", response_model=list[CredentialSummary])
|
||||||
async def list_credentials(
|
async def list_credentials(
|
||||||
auth: APIAuthorizationInfo = Security(
|
api_key: APIKeyInfo = Security(
|
||||||
require_permission(APIKeyPermission.READ_INTEGRATIONS)
|
require_permission(APIKeyPermission.READ_INTEGRATIONS)
|
||||||
),
|
),
|
||||||
) -> list[CredentialSummary]:
|
) -> list[CredentialSummary]:
|
||||||
@@ -484,7 +479,7 @@ async def list_credentials(
|
|||||||
|
|
||||||
Returns metadata about each credential without exposing sensitive tokens.
|
Returns metadata about each credential without exposing sensitive tokens.
|
||||||
"""
|
"""
|
||||||
credentials = await creds_manager.store.get_all_creds(auth.user_id)
|
credentials = await creds_manager.store.get_all_creds(api_key.user_id)
|
||||||
return [
|
return [
|
||||||
CredentialSummary(
|
CredentialSummary(
|
||||||
id=cred.id,
|
id=cred.id,
|
||||||
@@ -504,7 +499,7 @@ async def list_credentials(
|
|||||||
)
|
)
|
||||||
async def list_credentials_by_provider(
|
async def list_credentials_by_provider(
|
||||||
provider: Annotated[str, Path(title="The provider to list credentials for")],
|
provider: Annotated[str, Path(title="The provider to list credentials for")],
|
||||||
auth: APIAuthorizationInfo = Security(
|
api_key: APIKeyInfo = Security(
|
||||||
require_permission(APIKeyPermission.READ_INTEGRATIONS)
|
require_permission(APIKeyPermission.READ_INTEGRATIONS)
|
||||||
),
|
),
|
||||||
) -> list[CredentialSummary]:
|
) -> list[CredentialSummary]:
|
||||||
@@ -512,7 +507,7 @@ async def list_credentials_by_provider(
|
|||||||
List credentials for a specific provider.
|
List credentials for a specific provider.
|
||||||
"""
|
"""
|
||||||
credentials = await creds_manager.store.get_creds_by_provider(
|
credentials = await creds_manager.store.get_creds_by_provider(
|
||||||
auth.user_id, provider
|
api_key.user_id, provider
|
||||||
)
|
)
|
||||||
return [
|
return [
|
||||||
CredentialSummary(
|
CredentialSummary(
|
||||||
@@ -541,7 +536,7 @@ async def create_credential(
|
|||||||
CreateUserPasswordCredentialRequest,
|
CreateUserPasswordCredentialRequest,
|
||||||
CreateHostScopedCredentialRequest,
|
CreateHostScopedCredentialRequest,
|
||||||
] = Body(..., discriminator="type"),
|
] = Body(..., discriminator="type"),
|
||||||
auth: APIAuthorizationInfo = Security(
|
api_key: APIKeyInfo = Security(
|
||||||
require_permission(APIKeyPermission.MANAGE_INTEGRATIONS)
|
require_permission(APIKeyPermission.MANAGE_INTEGRATIONS)
|
||||||
),
|
),
|
||||||
) -> CreateCredentialResponse:
|
) -> CreateCredentialResponse:
|
||||||
@@ -596,7 +591,7 @@ async def create_credential(
|
|||||||
|
|
||||||
# Store credentials
|
# Store credentials
|
||||||
try:
|
try:
|
||||||
await creds_manager.create(auth.user_id, credentials)
|
await creds_manager.create(api_key.user_id, credentials)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Failed to store credentials: {e}")
|
logger.error(f"Failed to store credentials: {e}")
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
@@ -628,7 +623,7 @@ class DeleteCredentialResponse(BaseModel):
|
|||||||
async def delete_credential(
|
async def delete_credential(
|
||||||
provider: Annotated[str, Path(title="The provider")],
|
provider: Annotated[str, Path(title="The provider")],
|
||||||
cred_id: Annotated[str, Path(title="The credential ID to delete")],
|
cred_id: Annotated[str, Path(title="The credential ID to delete")],
|
||||||
auth: APIAuthorizationInfo = Security(
|
api_key: APIKeyInfo = Security(
|
||||||
require_permission(APIKeyPermission.DELETE_INTEGRATIONS)
|
require_permission(APIKeyPermission.DELETE_INTEGRATIONS)
|
||||||
),
|
),
|
||||||
) -> DeleteCredentialResponse:
|
) -> DeleteCredentialResponse:
|
||||||
@@ -639,7 +634,7 @@ async def delete_credential(
|
|||||||
use the main API's delete endpoint which handles webhook cleanup and
|
use the main API's delete endpoint which handles webhook cleanup and
|
||||||
token revocation.
|
token revocation.
|
||||||
"""
|
"""
|
||||||
creds = await creds_manager.store.get_creds_by_id(auth.user_id, cred_id)
|
creds = await creds_manager.store.get_creds_by_id(api_key.user_id, cred_id)
|
||||||
if not creds:
|
if not creds:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_404_NOT_FOUND, detail="Credentials not found"
|
status_code=status.HTTP_404_NOT_FOUND, detail="Credentials not found"
|
||||||
@@ -650,6 +645,6 @@ async def delete_credential(
|
|||||||
detail="Credentials do not match the specified provider",
|
detail="Credentials do not match the specified provider",
|
||||||
)
|
)
|
||||||
|
|
||||||
await creds_manager.delete(auth.user_id, cred_id)
|
await creds_manager.delete(api_key.user_id, cred_id)
|
||||||
|
|
||||||
return DeleteCredentialResponse(deleted=True, credentials_id=cred_id)
|
return DeleteCredentialResponse(deleted=True, credentials_id=cred_id)
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ from fastapi import APIRouter, Security
|
|||||||
from prisma.enums import APIKeyPermission
|
from prisma.enums import APIKeyPermission
|
||||||
from pydantic import BaseModel, Field
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
from backend.data.auth.base import APIAuthorizationInfo
|
from backend.data.api_key import APIKeyInfo
|
||||||
from backend.server.external.middleware import require_permission
|
from backend.server.external.middleware import require_permission
|
||||||
from backend.server.v2.chat.model import ChatSession
|
from backend.server.v2.chat.model import ChatSession
|
||||||
from backend.server.v2.chat.tools import find_agent_tool, run_agent_tool
|
from backend.server.v2.chat.tools import find_agent_tool, run_agent_tool
|
||||||
@@ -24,9 +24,9 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
tools_router = APIRouter(prefix="/tools", tags=["tools"])
|
tools_router = APIRouter(prefix="/tools", tags=["tools"])
|
||||||
|
|
||||||
# Note: We use Security() as a function parameter dependency (auth: APIAuthorizationInfo = Security(...))
|
# Note: We use Security() as a function parameter dependency (api_key: APIKeyInfo = Security(...))
|
||||||
# rather than in the decorator's dependencies= list. This avoids duplicate permission checks
|
# rather than in the decorator's dependencies= list. This avoids duplicate permission checks
|
||||||
# while still enforcing auth AND giving us access to auth for extracting user_id.
|
# while still enforcing auth AND giving us access to the api_key for extracting user_id.
|
||||||
|
|
||||||
|
|
||||||
# Request models
|
# Request models
|
||||||
@@ -80,9 +80,7 @@ def _create_ephemeral_session(user_id: str | None) -> ChatSession:
|
|||||||
)
|
)
|
||||||
async def find_agent(
|
async def find_agent(
|
||||||
request: FindAgentRequest,
|
request: FindAgentRequest,
|
||||||
auth: APIAuthorizationInfo = Security(
|
api_key: APIKeyInfo = Security(require_permission(APIKeyPermission.USE_TOOLS)),
|
||||||
require_permission(APIKeyPermission.USE_TOOLS)
|
|
||||||
),
|
|
||||||
) -> dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
Search for agents in the marketplace based on capabilities and user needs.
|
Search for agents in the marketplace based on capabilities and user needs.
|
||||||
@@ -93,9 +91,9 @@ async def find_agent(
|
|||||||
Returns:
|
Returns:
|
||||||
List of matching agents or no results response
|
List of matching agents or no results response
|
||||||
"""
|
"""
|
||||||
session = _create_ephemeral_session(auth.user_id)
|
session = _create_ephemeral_session(api_key.user_id)
|
||||||
result = await find_agent_tool._execute(
|
result = await find_agent_tool._execute(
|
||||||
user_id=auth.user_id,
|
user_id=api_key.user_id,
|
||||||
session=session,
|
session=session,
|
||||||
query=request.query,
|
query=request.query,
|
||||||
)
|
)
|
||||||
@@ -107,9 +105,7 @@ async def find_agent(
|
|||||||
)
|
)
|
||||||
async def run_agent(
|
async def run_agent(
|
||||||
request: RunAgentRequest,
|
request: RunAgentRequest,
|
||||||
auth: APIAuthorizationInfo = Security(
|
api_key: APIKeyInfo = Security(require_permission(APIKeyPermission.USE_TOOLS)),
|
||||||
require_permission(APIKeyPermission.USE_TOOLS)
|
|
||||||
),
|
|
||||||
) -> dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
Run or schedule an agent from the marketplace.
|
Run or schedule an agent from the marketplace.
|
||||||
@@ -133,9 +129,9 @@ async def run_agent(
|
|||||||
- execution_started: If agent was run or scheduled successfully
|
- execution_started: If agent was run or scheduled successfully
|
||||||
- error: If something went wrong
|
- error: If something went wrong
|
||||||
"""
|
"""
|
||||||
session = _create_ephemeral_session(auth.user_id)
|
session = _create_ephemeral_session(api_key.user_id)
|
||||||
result = await run_agent_tool._execute(
|
result = await run_agent_tool._execute(
|
||||||
user_id=auth.user_id,
|
user_id=api_key.user_id,
|
||||||
session=session,
|
session=session,
|
||||||
username_agent_slug=request.username_agent_slug,
|
username_agent_slug=request.username_agent_slug,
|
||||||
inputs=request.inputs,
|
inputs=request.inputs,
|
||||||
|
|||||||
@@ -5,7 +5,6 @@ from typing import Annotated, Any, Literal, Optional, Sequence
|
|||||||
|
|
||||||
from fastapi import APIRouter, Body, HTTPException, Security
|
from fastapi import APIRouter, Body, HTTPException, Security
|
||||||
from prisma.enums import AgentExecutionStatus, APIKeyPermission
|
from prisma.enums import AgentExecutionStatus, APIKeyPermission
|
||||||
from pydantic import BaseModel, Field
|
|
||||||
from typing_extensions import TypedDict
|
from typing_extensions import TypedDict
|
||||||
|
|
||||||
import backend.data.block
|
import backend.data.block
|
||||||
@@ -13,8 +12,7 @@ import backend.server.v2.store.cache as store_cache
|
|||||||
import backend.server.v2.store.model as store_model
|
import backend.server.v2.store.model as store_model
|
||||||
from backend.data import execution as execution_db
|
from backend.data import execution as execution_db
|
||||||
from backend.data import graph as graph_db
|
from backend.data import graph as graph_db
|
||||||
from backend.data import user as user_db
|
from backend.data.api_key import APIKeyInfo
|
||||||
from backend.data.auth.base import APIAuthorizationInfo
|
|
||||||
from backend.data.block import BlockInput, CompletedBlockOutput
|
from backend.data.block import BlockInput, CompletedBlockOutput
|
||||||
from backend.executor.utils import add_graph_execution
|
from backend.executor.utils import add_graph_execution
|
||||||
from backend.server.external.middleware import require_permission
|
from backend.server.external.middleware import require_permission
|
||||||
@@ -26,33 +24,27 @@ logger = logging.getLogger(__name__)
|
|||||||
v1_router = APIRouter()
|
v1_router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
class UserInfoResponse(BaseModel):
|
class NodeOutput(TypedDict):
|
||||||
id: str
|
key: str
|
||||||
name: Optional[str]
|
value: Any
|
||||||
email: str
|
|
||||||
timezone: str = Field(
|
|
||||||
description="The user's last known timezone (e.g. 'Europe/Amsterdam'), "
|
|
||||||
"or 'not-set' if not set"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@v1_router.get(
|
class ExecutionNode(TypedDict):
|
||||||
path="/me",
|
node_id: str
|
||||||
tags=["user", "meta"],
|
input: Any
|
||||||
)
|
output: dict[str, Any]
|
||||||
async def get_user_info(
|
|
||||||
auth: APIAuthorizationInfo = Security(
|
|
||||||
require_permission(APIKeyPermission.IDENTITY)
|
|
||||||
),
|
|
||||||
) -> UserInfoResponse:
|
|
||||||
user = await user_db.get_user_by_id(auth.user_id)
|
|
||||||
|
|
||||||
return UserInfoResponse(
|
|
||||||
id=user.id,
|
class ExecutionNodeOutput(TypedDict):
|
||||||
name=user.name,
|
node_id: str
|
||||||
email=user.email,
|
outputs: list[NodeOutput]
|
||||||
timezone=user.timezone,
|
|
||||||
)
|
|
||||||
|
class GraphExecutionResult(TypedDict):
|
||||||
|
execution_id: str
|
||||||
|
status: str
|
||||||
|
nodes: list[ExecutionNode]
|
||||||
|
output: Optional[list[dict[str, str]]]
|
||||||
|
|
||||||
|
|
||||||
@v1_router.get(
|
@v1_router.get(
|
||||||
@@ -73,9 +65,7 @@ async def get_graph_blocks() -> Sequence[dict[Any, Any]]:
|
|||||||
async def execute_graph_block(
|
async def execute_graph_block(
|
||||||
block_id: str,
|
block_id: str,
|
||||||
data: BlockInput,
|
data: BlockInput,
|
||||||
auth: APIAuthorizationInfo = Security(
|
api_key: APIKeyInfo = Security(require_permission(APIKeyPermission.EXECUTE_BLOCK)),
|
||||||
require_permission(APIKeyPermission.EXECUTE_BLOCK)
|
|
||||||
),
|
|
||||||
) -> CompletedBlockOutput:
|
) -> CompletedBlockOutput:
|
||||||
obj = backend.data.block.get_block(block_id)
|
obj = backend.data.block.get_block(block_id)
|
||||||
if not obj:
|
if not obj:
|
||||||
@@ -95,14 +85,12 @@ async def execute_graph(
|
|||||||
graph_id: str,
|
graph_id: str,
|
||||||
graph_version: int,
|
graph_version: int,
|
||||||
node_input: Annotated[dict[str, Any], Body(..., embed=True, default_factory=dict)],
|
node_input: Annotated[dict[str, Any], Body(..., embed=True, default_factory=dict)],
|
||||||
auth: APIAuthorizationInfo = Security(
|
api_key: APIKeyInfo = Security(require_permission(APIKeyPermission.EXECUTE_GRAPH)),
|
||||||
require_permission(APIKeyPermission.EXECUTE_GRAPH)
|
|
||||||
),
|
|
||||||
) -> dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
try:
|
try:
|
||||||
graph_exec = await add_graph_execution(
|
graph_exec = await add_graph_execution(
|
||||||
graph_id=graph_id,
|
graph_id=graph_id,
|
||||||
user_id=auth.user_id,
|
user_id=api_key.user_id,
|
||||||
inputs=node_input,
|
inputs=node_input,
|
||||||
graph_version=graph_version,
|
graph_version=graph_version,
|
||||||
)
|
)
|
||||||
@@ -112,19 +100,6 @@ async def execute_graph(
|
|||||||
raise HTTPException(status_code=400, detail=msg)
|
raise HTTPException(status_code=400, detail=msg)
|
||||||
|
|
||||||
|
|
||||||
class ExecutionNode(TypedDict):
|
|
||||||
node_id: str
|
|
||||||
input: Any
|
|
||||||
output: dict[str, Any]
|
|
||||||
|
|
||||||
|
|
||||||
class GraphExecutionResult(TypedDict):
|
|
||||||
execution_id: str
|
|
||||||
status: str
|
|
||||||
nodes: list[ExecutionNode]
|
|
||||||
output: Optional[list[dict[str, str]]]
|
|
||||||
|
|
||||||
|
|
||||||
@v1_router.get(
|
@v1_router.get(
|
||||||
path="/graphs/{graph_id}/executions/{graph_exec_id}/results",
|
path="/graphs/{graph_id}/executions/{graph_exec_id}/results",
|
||||||
tags=["graphs"],
|
tags=["graphs"],
|
||||||
@@ -132,12 +107,10 @@ class GraphExecutionResult(TypedDict):
|
|||||||
async def get_graph_execution_results(
|
async def get_graph_execution_results(
|
||||||
graph_id: str,
|
graph_id: str,
|
||||||
graph_exec_id: str,
|
graph_exec_id: str,
|
||||||
auth: APIAuthorizationInfo = Security(
|
api_key: APIKeyInfo = Security(require_permission(APIKeyPermission.READ_GRAPH)),
|
||||||
require_permission(APIKeyPermission.READ_GRAPH)
|
|
||||||
),
|
|
||||||
) -> GraphExecutionResult:
|
) -> GraphExecutionResult:
|
||||||
graph_exec = await execution_db.get_graph_execution(
|
graph_exec = await execution_db.get_graph_execution(
|
||||||
user_id=auth.user_id,
|
user_id=api_key.user_id,
|
||||||
execution_id=graph_exec_id,
|
execution_id=graph_exec_id,
|
||||||
include_node_executions=True,
|
include_node_executions=True,
|
||||||
)
|
)
|
||||||
@@ -149,7 +122,7 @@ async def get_graph_execution_results(
|
|||||||
if not await graph_db.get_graph(
|
if not await graph_db.get_graph(
|
||||||
graph_id=graph_exec.graph_id,
|
graph_id=graph_exec.graph_id,
|
||||||
version=graph_exec.graph_version,
|
version=graph_exec.graph_version,
|
||||||
user_id=auth.user_id,
|
user_id=api_key.user_id,
|
||||||
):
|
):
|
||||||
raise HTTPException(status_code=404, detail=f"Graph #{graph_id} not found.")
|
raise HTTPException(status_code=404, detail=f"Graph #{graph_id} not found.")
|
||||||
|
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ from typing import Any, Literal, Optional
|
|||||||
import pydantic
|
import pydantic
|
||||||
from prisma.enums import OnboardingStep
|
from prisma.enums import OnboardingStep
|
||||||
|
|
||||||
from backend.data.auth.api_key import APIKeyInfo, APIKeyPermission
|
from backend.data.api_key import APIKeyInfo, APIKeyPermission
|
||||||
from backend.data.graph import Graph
|
from backend.data.graph import Graph
|
||||||
from backend.util.timezone_name import TimeZoneName
|
from backend.util.timezone_name import TimeZoneName
|
||||||
|
|
||||||
|
|||||||
@@ -21,9 +21,7 @@ import backend.data.db
|
|||||||
import backend.data.graph
|
import backend.data.graph
|
||||||
import backend.data.user
|
import backend.data.user
|
||||||
import backend.integrations.webhooks.utils
|
import backend.integrations.webhooks.utils
|
||||||
import backend.server.routers.oauth
|
|
||||||
import backend.server.routers.postmark.postmark
|
import backend.server.routers.postmark.postmark
|
||||||
import backend.server.routers.user_auth
|
|
||||||
import backend.server.routers.v1
|
import backend.server.routers.v1
|
||||||
import backend.server.v2.admin.credit_admin_routes
|
import backend.server.v2.admin.credit_admin_routes
|
||||||
import backend.server.v2.admin.execution_analytics_routes
|
import backend.server.v2.admin.execution_analytics_routes
|
||||||
@@ -299,16 +297,6 @@ app.include_router(
|
|||||||
tags=["v2", "chat"],
|
tags=["v2", "chat"],
|
||||||
prefix="/api/chat",
|
prefix="/api/chat",
|
||||||
)
|
)
|
||||||
app.include_router(
|
|
||||||
backend.server.routers.oauth.router,
|
|
||||||
tags=["oauth"],
|
|
||||||
prefix="/api/oauth",
|
|
||||||
)
|
|
||||||
app.include_router(
|
|
||||||
backend.server.routers.user_auth.router,
|
|
||||||
tags=["user-auth"],
|
|
||||||
prefix="/api",
|
|
||||||
)
|
|
||||||
|
|
||||||
app.mount("/external-api", external_app)
|
app.mount("/external-api", external_app)
|
||||||
|
|
||||||
|
|||||||
@@ -1,833 +0,0 @@
|
|||||||
"""
|
|
||||||
OAuth 2.0 Provider Endpoints
|
|
||||||
|
|
||||||
Implements OAuth 2.0 Authorization Code flow with PKCE support.
|
|
||||||
|
|
||||||
Flow:
|
|
||||||
1. User clicks "Login with AutoGPT" in 3rd party app
|
|
||||||
2. App redirects user to /oauth/authorize with client_id, redirect_uri, scope, state
|
|
||||||
3. User sees consent screen (if not already logged in, redirects to login first)
|
|
||||||
4. User approves → backend creates authorization code
|
|
||||||
5. User redirected back to app with code
|
|
||||||
6. App exchanges code for access/refresh tokens at /oauth/token
|
|
||||||
7. App uses access token to call external API endpoints
|
|
||||||
"""
|
|
||||||
|
|
||||||
import io
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Literal, Optional
|
|
||||||
from urllib.parse import urlencode
|
|
||||||
|
|
||||||
from autogpt_libs.auth import get_user_id
|
|
||||||
from fastapi import APIRouter, Body, HTTPException, Security, UploadFile, status
|
|
||||||
from gcloud.aio import storage as async_storage
|
|
||||||
from PIL import Image
|
|
||||||
from prisma.enums import APIKeyPermission
|
|
||||||
from pydantic import BaseModel, Field
|
|
||||||
|
|
||||||
from backend.data.auth.oauth import (
|
|
||||||
InvalidClientError,
|
|
||||||
InvalidGrantError,
|
|
||||||
OAuthApplicationInfo,
|
|
||||||
TokenIntrospectionResult,
|
|
||||||
consume_authorization_code,
|
|
||||||
create_access_token,
|
|
||||||
create_authorization_code,
|
|
||||||
create_refresh_token,
|
|
||||||
get_oauth_application,
|
|
||||||
get_oauth_application_by_id,
|
|
||||||
introspect_token,
|
|
||||||
list_user_oauth_applications,
|
|
||||||
refresh_tokens,
|
|
||||||
revoke_access_token,
|
|
||||||
revoke_refresh_token,
|
|
||||||
update_oauth_application,
|
|
||||||
validate_client_credentials,
|
|
||||||
validate_redirect_uri,
|
|
||||||
validate_scopes,
|
|
||||||
)
|
|
||||||
from backend.util.settings import Settings
|
|
||||||
from backend.util.virus_scanner import scan_content_safe
|
|
||||||
|
|
||||||
settings = Settings()
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
router = APIRouter()
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Request/Response Models
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
|
|
||||||
class TokenResponse(BaseModel):
|
|
||||||
"""OAuth 2.0 token response"""
|
|
||||||
|
|
||||||
token_type: Literal["Bearer"] = "Bearer"
|
|
||||||
access_token: str
|
|
||||||
access_token_expires_at: datetime
|
|
||||||
refresh_token: str
|
|
||||||
refresh_token_expires_at: datetime
|
|
||||||
scopes: list[str]
|
|
||||||
|
|
||||||
|
|
||||||
class ErrorResponse(BaseModel):
|
|
||||||
"""OAuth 2.0 error response"""
|
|
||||||
|
|
||||||
error: str
|
|
||||||
error_description: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
class OAuthApplicationPublicInfo(BaseModel):
|
|
||||||
"""Public information about an OAuth application (for consent screen)"""
|
|
||||||
|
|
||||||
name: str
|
|
||||||
description: Optional[str] = None
|
|
||||||
logo_url: Optional[str] = None
|
|
||||||
scopes: list[str]
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Application Info Endpoint
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
|
|
||||||
@router.get(
|
|
||||||
"/app/{client_id}",
|
|
||||||
responses={
|
|
||||||
404: {"description": "Application not found or disabled"},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
async def get_oauth_app_info(
|
|
||||||
client_id: str, user_id: str = Security(get_user_id)
|
|
||||||
) -> OAuthApplicationPublicInfo:
|
|
||||||
"""
|
|
||||||
Get public information about an OAuth application.
|
|
||||||
|
|
||||||
This endpoint is used by the consent screen to display application details
|
|
||||||
to the user before they authorize access.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
- name: Application name
|
|
||||||
- description: Application description (if provided)
|
|
||||||
- scopes: List of scopes the application is allowed to request
|
|
||||||
"""
|
|
||||||
app = await get_oauth_application(client_id)
|
|
||||||
if not app or not app.is_active:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_404_NOT_FOUND,
|
|
||||||
detail="Application not found",
|
|
||||||
)
|
|
||||||
|
|
||||||
return OAuthApplicationPublicInfo(
|
|
||||||
name=app.name,
|
|
||||||
description=app.description,
|
|
||||||
logo_url=app.logo_url,
|
|
||||||
scopes=[s.value for s in app.scopes],
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Authorization Endpoint
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
|
|
||||||
class AuthorizeRequest(BaseModel):
|
|
||||||
"""OAuth 2.0 authorization request"""
|
|
||||||
|
|
||||||
client_id: str = Field(description="Client identifier")
|
|
||||||
redirect_uri: str = Field(description="Redirect URI")
|
|
||||||
scopes: list[str] = Field(description="List of scopes")
|
|
||||||
state: str = Field(description="Anti-CSRF token from client")
|
|
||||||
response_type: str = Field(
|
|
||||||
default="code", description="Must be 'code' for authorization code flow"
|
|
||||||
)
|
|
||||||
code_challenge: str = Field(description="PKCE code challenge (required)")
|
|
||||||
code_challenge_method: Literal["S256", "plain"] = Field(
|
|
||||||
default="S256", description="PKCE code challenge method (S256 recommended)"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class AuthorizeResponse(BaseModel):
|
|
||||||
"""OAuth 2.0 authorization response with redirect URL"""
|
|
||||||
|
|
||||||
redirect_url: str = Field(description="URL to redirect the user to")
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/authorize")
|
|
||||||
async def authorize(
|
|
||||||
request: AuthorizeRequest = Body(),
|
|
||||||
user_id: str = Security(get_user_id),
|
|
||||||
) -> AuthorizeResponse:
|
|
||||||
"""
|
|
||||||
OAuth 2.0 Authorization Endpoint
|
|
||||||
|
|
||||||
User must be logged in (authenticated with Supabase JWT).
|
|
||||||
This endpoint creates an authorization code and returns a redirect URL.
|
|
||||||
|
|
||||||
PKCE (Proof Key for Code Exchange) is REQUIRED for all authorization requests.
|
|
||||||
|
|
||||||
The frontend consent screen should call this endpoint after the user approves,
|
|
||||||
then redirect the user to the returned `redirect_url`.
|
|
||||||
|
|
||||||
Request Body:
|
|
||||||
- client_id: The OAuth application's client ID
|
|
||||||
- redirect_uri: Where to redirect after authorization (must match registered URI)
|
|
||||||
- scopes: List of permissions (e.g., "EXECUTE_GRAPH READ_GRAPH")
|
|
||||||
- state: Anti-CSRF token provided by client (will be returned in redirect)
|
|
||||||
- response_type: Must be "code" (for authorization code flow)
|
|
||||||
- code_challenge: PKCE code challenge (required)
|
|
||||||
- code_challenge_method: "S256" (recommended) or "plain"
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
- redirect_url: The URL to redirect the user to (includes authorization code)
|
|
||||||
|
|
||||||
Error cases return a redirect_url with error parameters, or raise HTTPException
|
|
||||||
for critical errors (like invalid redirect_uri).
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
# Validate response_type
|
|
||||||
if request.response_type != "code":
|
|
||||||
return _error_redirect_url(
|
|
||||||
request.redirect_uri,
|
|
||||||
request.state,
|
|
||||||
"unsupported_response_type",
|
|
||||||
"Only 'code' response type is supported",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Get application
|
|
||||||
app = await get_oauth_application(request.client_id)
|
|
||||||
if not app:
|
|
||||||
return _error_redirect_url(
|
|
||||||
request.redirect_uri,
|
|
||||||
request.state,
|
|
||||||
"invalid_client",
|
|
||||||
"Unknown client_id",
|
|
||||||
)
|
|
||||||
|
|
||||||
if not app.is_active:
|
|
||||||
return _error_redirect_url(
|
|
||||||
request.redirect_uri,
|
|
||||||
request.state,
|
|
||||||
"invalid_client",
|
|
||||||
"Application is not active",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Validate redirect URI
|
|
||||||
if not validate_redirect_uri(app, request.redirect_uri):
|
|
||||||
# For invalid redirect_uri, we can't redirect safely
|
|
||||||
# Must return error instead
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
|
||||||
detail=(
|
|
||||||
"Invalid redirect_uri. "
|
|
||||||
f"Must be one of: {', '.join(app.redirect_uris)}"
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
# Parse and validate scopes
|
|
||||||
try:
|
|
||||||
requested_scopes = [APIKeyPermission(s.strip()) for s in request.scopes]
|
|
||||||
except ValueError as e:
|
|
||||||
return _error_redirect_url(
|
|
||||||
request.redirect_uri,
|
|
||||||
request.state,
|
|
||||||
"invalid_scope",
|
|
||||||
f"Invalid scope: {e}",
|
|
||||||
)
|
|
||||||
|
|
||||||
if not requested_scopes:
|
|
||||||
return _error_redirect_url(
|
|
||||||
request.redirect_uri,
|
|
||||||
request.state,
|
|
||||||
"invalid_scope",
|
|
||||||
"At least one scope is required",
|
|
||||||
)
|
|
||||||
|
|
||||||
if not validate_scopes(app, requested_scopes):
|
|
||||||
return _error_redirect_url(
|
|
||||||
request.redirect_uri,
|
|
||||||
request.state,
|
|
||||||
"invalid_scope",
|
|
||||||
"Application is not authorized for all requested scopes. "
|
|
||||||
f"Allowed: {', '.join(s.value for s in app.scopes)}",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create authorization code
|
|
||||||
auth_code = await create_authorization_code(
|
|
||||||
application_id=app.id,
|
|
||||||
user_id=user_id,
|
|
||||||
scopes=requested_scopes,
|
|
||||||
redirect_uri=request.redirect_uri,
|
|
||||||
code_challenge=request.code_challenge,
|
|
||||||
code_challenge_method=request.code_challenge_method,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Build redirect URL with authorization code
|
|
||||||
params = {
|
|
||||||
"code": auth_code.code,
|
|
||||||
"state": request.state,
|
|
||||||
}
|
|
||||||
redirect_url = f"{request.redirect_uri}?{urlencode(params)}"
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
f"Authorization code issued for user #{user_id} "
|
|
||||||
f"and app {app.name} (#{app.id})"
|
|
||||||
)
|
|
||||||
|
|
||||||
return AuthorizeResponse(redirect_url=redirect_url)
|
|
||||||
|
|
||||||
except HTTPException:
|
|
||||||
raise
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error in authorization endpoint: {e}", exc_info=True)
|
|
||||||
return _error_redirect_url(
|
|
||||||
request.redirect_uri,
|
|
||||||
request.state,
|
|
||||||
"server_error",
|
|
||||||
"An unexpected error occurred",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _error_redirect_url(
|
|
||||||
redirect_uri: str,
|
|
||||||
state: str,
|
|
||||||
error: str,
|
|
||||||
error_description: Optional[str] = None,
|
|
||||||
) -> AuthorizeResponse:
|
|
||||||
"""Helper to build redirect URL with OAuth error parameters"""
|
|
||||||
params = {
|
|
||||||
"error": error,
|
|
||||||
"state": state,
|
|
||||||
}
|
|
||||||
if error_description:
|
|
||||||
params["error_description"] = error_description
|
|
||||||
|
|
||||||
redirect_url = f"{redirect_uri}?{urlencode(params)}"
|
|
||||||
return AuthorizeResponse(redirect_url=redirect_url)
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Token Endpoint
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
|
|
||||||
class TokenRequestByCode(BaseModel):
|
|
||||||
grant_type: Literal["authorization_code"]
|
|
||||||
code: str = Field(description="Authorization code")
|
|
||||||
redirect_uri: str = Field(
|
|
||||||
description="Redirect URI (must match authorization request)"
|
|
||||||
)
|
|
||||||
client_id: str
|
|
||||||
client_secret: str
|
|
||||||
code_verifier: str = Field(description="PKCE code verifier")
|
|
||||||
|
|
||||||
|
|
||||||
class TokenRequestByRefreshToken(BaseModel):
|
|
||||||
grant_type: Literal["refresh_token"]
|
|
||||||
refresh_token: str
|
|
||||||
client_id: str
|
|
||||||
client_secret: str
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/token")
|
|
||||||
async def token(
|
|
||||||
request: TokenRequestByCode | TokenRequestByRefreshToken = Body(),
|
|
||||||
) -> TokenResponse:
|
|
||||||
"""
|
|
||||||
OAuth 2.0 Token Endpoint
|
|
||||||
|
|
||||||
Exchanges authorization code or refresh token for access token.
|
|
||||||
|
|
||||||
Grant Types:
|
|
||||||
1. authorization_code: Exchange authorization code for tokens
|
|
||||||
- Required: grant_type, code, redirect_uri, client_id, client_secret
|
|
||||||
- Optional: code_verifier (required if PKCE was used)
|
|
||||||
|
|
||||||
2. refresh_token: Exchange refresh token for new access token
|
|
||||||
- Required: grant_type, refresh_token, client_id, client_secret
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
- access_token: Bearer token for API access (1 hour TTL)
|
|
||||||
- token_type: "Bearer"
|
|
||||||
- expires_in: Seconds until access token expires
|
|
||||||
- refresh_token: Token for refreshing access (30 days TTL)
|
|
||||||
- scopes: List of scopes
|
|
||||||
"""
|
|
||||||
# Validate client credentials
|
|
||||||
try:
|
|
||||||
app = await validate_client_credentials(
|
|
||||||
request.client_id, request.client_secret
|
|
||||||
)
|
|
||||||
except InvalidClientError as e:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
|
||||||
detail=str(e),
|
|
||||||
)
|
|
||||||
|
|
||||||
# Handle authorization_code grant
|
|
||||||
if request.grant_type == "authorization_code":
|
|
||||||
# Consume authorization code
|
|
||||||
try:
|
|
||||||
user_id, scopes = await consume_authorization_code(
|
|
||||||
code=request.code,
|
|
||||||
application_id=app.id,
|
|
||||||
redirect_uri=request.redirect_uri,
|
|
||||||
code_verifier=request.code_verifier,
|
|
||||||
)
|
|
||||||
except InvalidGrantError as e:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
|
||||||
detail=str(e),
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create access and refresh tokens
|
|
||||||
access_token = await create_access_token(app.id, user_id, scopes)
|
|
||||||
refresh_token = await create_refresh_token(app.id, user_id, scopes)
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
f"Access token issued for user #{user_id} and app {app.name} (#{app.id})"
|
|
||||||
"via authorization code"
|
|
||||||
)
|
|
||||||
|
|
||||||
if not access_token.token or not refresh_token.token:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
||||||
detail="Failed to generate tokens",
|
|
||||||
)
|
|
||||||
|
|
||||||
return TokenResponse(
|
|
||||||
token_type="Bearer",
|
|
||||||
access_token=access_token.token.get_secret_value(),
|
|
||||||
access_token_expires_at=access_token.expires_at,
|
|
||||||
refresh_token=refresh_token.token.get_secret_value(),
|
|
||||||
refresh_token_expires_at=refresh_token.expires_at,
|
|
||||||
scopes=list(s.value for s in scopes),
|
|
||||||
)
|
|
||||||
|
|
||||||
# Handle refresh_token grant
|
|
||||||
elif request.grant_type == "refresh_token":
|
|
||||||
# Refresh access token
|
|
||||||
try:
|
|
||||||
new_access_token, new_refresh_token = await refresh_tokens(
|
|
||||||
request.refresh_token, app.id
|
|
||||||
)
|
|
||||||
except InvalidGrantError as e:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
|
||||||
detail=str(e),
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
f"Tokens refreshed for user #{new_access_token.user_id} "
|
|
||||||
f"by app {app.name} (#{app.id})"
|
|
||||||
)
|
|
||||||
|
|
||||||
if not new_access_token.token or not new_refresh_token.token:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
||||||
detail="Failed to generate tokens",
|
|
||||||
)
|
|
||||||
|
|
||||||
return TokenResponse(
|
|
||||||
token_type="Bearer",
|
|
||||||
access_token=new_access_token.token.get_secret_value(),
|
|
||||||
access_token_expires_at=new_access_token.expires_at,
|
|
||||||
refresh_token=new_refresh_token.token.get_secret_value(),
|
|
||||||
refresh_token_expires_at=new_refresh_token.expires_at,
|
|
||||||
scopes=list(s.value for s in new_access_token.scopes),
|
|
||||||
)
|
|
||||||
|
|
||||||
else:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
|
||||||
detail=f"Unsupported grant_type: {request.grant_type}. "
|
|
||||||
"Must be 'authorization_code' or 'refresh_token'",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Token Introspection Endpoint
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/introspect")
|
|
||||||
async def introspect(
|
|
||||||
token: str = Body(description="Token to introspect"),
|
|
||||||
token_type_hint: Optional[Literal["access_token", "refresh_token"]] = Body(
|
|
||||||
None, description="Hint about token type ('access_token' or 'refresh_token')"
|
|
||||||
),
|
|
||||||
client_id: str = Body(description="Client identifier"),
|
|
||||||
client_secret: str = Body(description="Client secret"),
|
|
||||||
) -> TokenIntrospectionResult:
|
|
||||||
"""
|
|
||||||
OAuth 2.0 Token Introspection Endpoint (RFC 7662)
|
|
||||||
|
|
||||||
Allows clients to check if a token is valid and get its metadata.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
- active: Whether the token is currently active
|
|
||||||
- scopes: List of authorized scopes (if active)
|
|
||||||
- client_id: The client the token was issued to (if active)
|
|
||||||
- user_id: The user the token represents (if active)
|
|
||||||
- exp: Expiration timestamp (if active)
|
|
||||||
- token_type: "access_token" or "refresh_token" (if active)
|
|
||||||
"""
|
|
||||||
# Validate client credentials
|
|
||||||
try:
|
|
||||||
await validate_client_credentials(client_id, client_secret)
|
|
||||||
except InvalidClientError as e:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
|
||||||
detail=str(e),
|
|
||||||
)
|
|
||||||
|
|
||||||
# Introspect the token
|
|
||||||
return await introspect_token(token, token_type_hint)
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Token Revocation Endpoint
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/revoke")
|
|
||||||
async def revoke(
|
|
||||||
token: str = Body(description="Token to revoke"),
|
|
||||||
token_type_hint: Optional[Literal["access_token", "refresh_token"]] = Body(
|
|
||||||
None, description="Hint about token type ('access_token' or 'refresh_token')"
|
|
||||||
),
|
|
||||||
client_id: str = Body(description="Client identifier"),
|
|
||||||
client_secret: str = Body(description="Client secret"),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
OAuth 2.0 Token Revocation Endpoint (RFC 7009)
|
|
||||||
|
|
||||||
Allows clients to revoke an access or refresh token.
|
|
||||||
|
|
||||||
Note: Revoking a refresh token does NOT revoke associated access tokens.
|
|
||||||
Revoking an access token does NOT revoke the associated refresh token.
|
|
||||||
"""
|
|
||||||
# Validate client credentials
|
|
||||||
try:
|
|
||||||
app = await validate_client_credentials(client_id, client_secret)
|
|
||||||
except InvalidClientError as e:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
|
||||||
detail=str(e),
|
|
||||||
)
|
|
||||||
|
|
||||||
# Try to revoke as access token first
|
|
||||||
# Note: We pass app.id to ensure the token belongs to the authenticated app
|
|
||||||
if token_type_hint != "refresh_token":
|
|
||||||
revoked = await revoke_access_token(token, app.id)
|
|
||||||
if revoked:
|
|
||||||
logger.info(
|
|
||||||
f"Access token revoked for app {app.name} (#{app.id}); "
|
|
||||||
f"user #{revoked.user_id}"
|
|
||||||
)
|
|
||||||
return {"status": "ok"}
|
|
||||||
|
|
||||||
# Try to revoke as refresh token
|
|
||||||
revoked = await revoke_refresh_token(token, app.id)
|
|
||||||
if revoked:
|
|
||||||
logger.info(
|
|
||||||
f"Refresh token revoked for app {app.name} (#{app.id}); "
|
|
||||||
f"user #{revoked.user_id}"
|
|
||||||
)
|
|
||||||
return {"status": "ok"}
|
|
||||||
|
|
||||||
# Per RFC 7009, revocation endpoint returns 200 even if token not found
|
|
||||||
# or if token belongs to a different application.
|
|
||||||
# This prevents token scanning attacks.
|
|
||||||
logger.warning(f"Unsuccessful token revocation attempt by app {app.name} #{app.id}")
|
|
||||||
return {"status": "ok"}
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Application Management Endpoints (for app owners)
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/apps/mine")
|
|
||||||
async def list_my_oauth_apps(
|
|
||||||
user_id: str = Security(get_user_id),
|
|
||||||
) -> list[OAuthApplicationInfo]:
|
|
||||||
"""
|
|
||||||
List all OAuth applications owned by the current user.
|
|
||||||
|
|
||||||
Returns a list of OAuth applications with their details including:
|
|
||||||
- id, name, description, logo_url
|
|
||||||
- client_id (public identifier)
|
|
||||||
- redirect_uris, grant_types, scopes
|
|
||||||
- is_active status
|
|
||||||
- created_at, updated_at timestamps
|
|
||||||
|
|
||||||
Note: client_secret is never returned for security reasons.
|
|
||||||
"""
|
|
||||||
return await list_user_oauth_applications(user_id)
|
|
||||||
|
|
||||||
|
|
||||||
@router.patch("/apps/{app_id}/status")
|
|
||||||
async def update_app_status(
|
|
||||||
app_id: str,
|
|
||||||
user_id: str = Security(get_user_id),
|
|
||||||
is_active: bool = Body(description="Whether the app should be active", embed=True),
|
|
||||||
) -> OAuthApplicationInfo:
|
|
||||||
"""
|
|
||||||
Enable or disable an OAuth application.
|
|
||||||
|
|
||||||
Only the application owner can update the status.
|
|
||||||
When disabled, the application cannot be used for new authorizations
|
|
||||||
and existing access tokens will fail validation.
|
|
||||||
|
|
||||||
Returns the updated application info.
|
|
||||||
"""
|
|
||||||
updated_app = await update_oauth_application(
|
|
||||||
app_id=app_id,
|
|
||||||
owner_id=user_id,
|
|
||||||
is_active=is_active,
|
|
||||||
)
|
|
||||||
|
|
||||||
if not updated_app:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_404_NOT_FOUND,
|
|
||||||
detail="Application not found or you don't have permission to update it",
|
|
||||||
)
|
|
||||||
|
|
||||||
action = "enabled" if is_active else "disabled"
|
|
||||||
logger.info(f"OAuth app {updated_app.name} (#{app_id}) {action} by user #{user_id}")
|
|
||||||
|
|
||||||
return updated_app
|
|
||||||
|
|
||||||
|
|
||||||
class UpdateAppLogoRequest(BaseModel):
|
|
||||||
logo_url: str = Field(description="URL of the uploaded logo image")
|
|
||||||
|
|
||||||
|
|
||||||
@router.patch("/apps/{app_id}/logo")
|
|
||||||
async def update_app_logo(
|
|
||||||
app_id: str,
|
|
||||||
request: UpdateAppLogoRequest = Body(),
|
|
||||||
user_id: str = Security(get_user_id),
|
|
||||||
) -> OAuthApplicationInfo:
|
|
||||||
"""
|
|
||||||
Update the logo URL for an OAuth application.
|
|
||||||
|
|
||||||
Only the application owner can update the logo.
|
|
||||||
The logo should be uploaded first using the media upload endpoint,
|
|
||||||
then this endpoint is called with the resulting URL.
|
|
||||||
|
|
||||||
Logo requirements:
|
|
||||||
- Must be square (1:1 aspect ratio)
|
|
||||||
- Minimum 512x512 pixels
|
|
||||||
- Maximum 2048x2048 pixels
|
|
||||||
|
|
||||||
Returns the updated application info.
|
|
||||||
"""
|
|
||||||
if (
|
|
||||||
not (app := await get_oauth_application_by_id(app_id))
|
|
||||||
or app.owner_id != user_id
|
|
||||||
):
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_404_NOT_FOUND,
|
|
||||||
detail="OAuth App not found",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Delete the current app logo file (if any and it's in our cloud storage)
|
|
||||||
await _delete_app_current_logo_file(app)
|
|
||||||
|
|
||||||
updated_app = await update_oauth_application(
|
|
||||||
app_id=app_id,
|
|
||||||
owner_id=user_id,
|
|
||||||
logo_url=request.logo_url,
|
|
||||||
)
|
|
||||||
|
|
||||||
if not updated_app:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_404_NOT_FOUND,
|
|
||||||
detail="Application not found or you don't have permission to update it",
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
f"OAuth app {updated_app.name} (#{app_id}) logo updated by user #{user_id}"
|
|
||||||
)
|
|
||||||
|
|
||||||
return updated_app
|
|
||||||
|
|
||||||
|
|
||||||
# Logo upload constraints
|
|
||||||
LOGO_MIN_SIZE = 512
|
|
||||||
LOGO_MAX_SIZE = 2048
|
|
||||||
LOGO_ALLOWED_TYPES = {"image/jpeg", "image/png", "image/webp"}
|
|
||||||
LOGO_MAX_FILE_SIZE = 3 * 1024 * 1024 # 3MB
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/apps/{app_id}/logo/upload")
|
|
||||||
async def upload_app_logo(
|
|
||||||
app_id: str,
|
|
||||||
file: UploadFile,
|
|
||||||
user_id: str = Security(get_user_id),
|
|
||||||
) -> OAuthApplicationInfo:
|
|
||||||
"""
|
|
||||||
Upload a logo image for an OAuth application.
|
|
||||||
|
|
||||||
Requirements:
|
|
||||||
- Image must be square (1:1 aspect ratio)
|
|
||||||
- Minimum 512x512 pixels
|
|
||||||
- Maximum 2048x2048 pixels
|
|
||||||
- Allowed formats: JPEG, PNG, WebP
|
|
||||||
- Maximum file size: 3MB
|
|
||||||
|
|
||||||
The image is uploaded to cloud storage and the app's logoUrl is updated.
|
|
||||||
Returns the updated application info.
|
|
||||||
"""
|
|
||||||
# Verify ownership to reduce vulnerability to DoS(torage) or DoM(oney) attacks
|
|
||||||
if (
|
|
||||||
not (app := await get_oauth_application_by_id(app_id))
|
|
||||||
or app.owner_id != user_id
|
|
||||||
):
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_404_NOT_FOUND,
|
|
||||||
detail="OAuth App not found",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check GCS configuration
|
|
||||||
if not settings.config.media_gcs_bucket_name:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
|
|
||||||
detail="Media storage is not configured",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Validate content type
|
|
||||||
content_type = file.content_type
|
|
||||||
if content_type not in LOGO_ALLOWED_TYPES:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
|
||||||
detail=f"Invalid file type. Allowed: JPEG, PNG, WebP. Got: {content_type}",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Read file content
|
|
||||||
try:
|
|
||||||
file_bytes = await file.read()
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error reading logo file: {e}")
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
|
||||||
detail="Failed to read uploaded file",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check file size
|
|
||||||
if len(file_bytes) > LOGO_MAX_FILE_SIZE:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
|
||||||
detail=(
|
|
||||||
"File too large. "
|
|
||||||
f"Maximum size is {LOGO_MAX_FILE_SIZE // 1024 // 1024}MB"
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
# Validate image dimensions
|
|
||||||
try:
|
|
||||||
image = Image.open(io.BytesIO(file_bytes))
|
|
||||||
width, height = image.size
|
|
||||||
|
|
||||||
if width != height:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
|
||||||
detail=f"Logo must be square. Got {width}x{height}",
|
|
||||||
)
|
|
||||||
|
|
||||||
if width < LOGO_MIN_SIZE:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
|
||||||
detail=f"Logo too small. Minimum {LOGO_MIN_SIZE}x{LOGO_MIN_SIZE}. "
|
|
||||||
f"Got {width}x{height}",
|
|
||||||
)
|
|
||||||
|
|
||||||
if width > LOGO_MAX_SIZE:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
|
||||||
detail=f"Logo too large. Maximum {LOGO_MAX_SIZE}x{LOGO_MAX_SIZE}. "
|
|
||||||
f"Got {width}x{height}",
|
|
||||||
)
|
|
||||||
except HTTPException:
|
|
||||||
raise
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error validating logo image: {e}")
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
|
||||||
detail="Invalid image file",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Scan for viruses
|
|
||||||
filename = file.filename or "logo"
|
|
||||||
await scan_content_safe(file_bytes, filename=filename)
|
|
||||||
|
|
||||||
# Generate unique filename
|
|
||||||
file_ext = os.path.splitext(filename)[1].lower() or ".png"
|
|
||||||
unique_filename = f"{uuid.uuid4()}{file_ext}"
|
|
||||||
storage_path = f"oauth-apps/{app_id}/logo/{unique_filename}"
|
|
||||||
|
|
||||||
# Upload to GCS
|
|
||||||
try:
|
|
||||||
async with async_storage.Storage() as async_client:
|
|
||||||
bucket_name = settings.config.media_gcs_bucket_name
|
|
||||||
|
|
||||||
await async_client.upload(
|
|
||||||
bucket_name, storage_path, file_bytes, content_type=content_type
|
|
||||||
)
|
|
||||||
|
|
||||||
logo_url = f"https://storage.googleapis.com/{bucket_name}/{storage_path}"
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error uploading logo to GCS: {e}")
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
||||||
detail="Failed to upload logo",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Delete the current app logo file (if any and it's in our cloud storage)
|
|
||||||
await _delete_app_current_logo_file(app)
|
|
||||||
|
|
||||||
# Update the app with the new logo URL
|
|
||||||
updated_app = await update_oauth_application(
|
|
||||||
app_id=app_id,
|
|
||||||
owner_id=user_id,
|
|
||||||
logo_url=logo_url,
|
|
||||||
)
|
|
||||||
|
|
||||||
if not updated_app:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_404_NOT_FOUND,
|
|
||||||
detail="Application not found or you don't have permission to update it",
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
f"OAuth app {updated_app.name} (#{app_id}) logo uploaded by user #{user_id}"
|
|
||||||
)
|
|
||||||
|
|
||||||
return updated_app
|
|
||||||
|
|
||||||
|
|
||||||
async def _delete_app_current_logo_file(app: OAuthApplicationInfo):
|
|
||||||
"""
|
|
||||||
Delete the current logo file for the given app, if there is one in our cloud storage
|
|
||||||
"""
|
|
||||||
bucket_name = settings.config.media_gcs_bucket_name
|
|
||||||
storage_base_url = f"https://storage.googleapis.com/{bucket_name}/"
|
|
||||||
|
|
||||||
if app.logo_url and app.logo_url.startswith(storage_base_url):
|
|
||||||
# Parse blob path from URL: https://storage.googleapis.com/{bucket}/{path}
|
|
||||||
old_path = app.logo_url.replace(storage_base_url, "")
|
|
||||||
try:
|
|
||||||
async with async_storage.Storage() as async_client:
|
|
||||||
await async_client.delete(bucket_name, old_path)
|
|
||||||
logger.info(f"Deleted old logo for OAuth app #{app.id}: {old_path}")
|
|
||||||
except Exception as e:
|
|
||||||
# Log but don't fail - the new logo was uploaded successfully
|
|
||||||
logger.warning(
|
|
||||||
f"Failed to delete old logo for OAuth app #{app.id}: {e}", exc_info=e
|
|
||||||
)
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,910 +0,0 @@
|
|||||||
"""
|
|
||||||
User authentication router for native FastAPI auth.
|
|
||||||
|
|
||||||
This router provides endpoints that are compatible with the Supabase Auth API
|
|
||||||
structure, allowing the frontend to migrate without code changes.
|
|
||||||
|
|
||||||
Endpoints:
|
|
||||||
- POST /auth/signup - Register a new user
|
|
||||||
- POST /auth/login - Login with email/password
|
|
||||||
- POST /auth/logout - Logout (clear session)
|
|
||||||
- POST /auth/refresh - Refresh access token
|
|
||||||
- GET /auth/me - Get current user
|
|
||||||
- POST /auth/password/reset - Request password reset email
|
|
||||||
- POST /auth/password/set - Set new password from reset link
|
|
||||||
- GET /auth/verify-email - Verify email from magic link
|
|
||||||
- GET /auth/oauth/google/authorize - Get Google OAuth URL
|
|
||||||
- GET /auth/oauth/google/callback - Handle Google OAuth callback
|
|
||||||
|
|
||||||
Admin Endpoints:
|
|
||||||
- GET /auth/admin/users - List users (admin only)
|
|
||||||
- GET /auth/admin/users/{user_id} - Get user details (admin only)
|
|
||||||
- POST /auth/admin/users/{user_id}/impersonate - Get impersonation token (admin only)
|
|
||||||
"""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import secrets
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime, timezone
|
|
||||||
from typing import List, Optional
|
|
||||||
from urllib.parse import urlencode
|
|
||||||
|
|
||||||
import httpx
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, Query, Request, Response
|
|
||||||
from pydantic import BaseModel, EmailStr
|
|
||||||
from prisma.models import User
|
|
||||||
|
|
||||||
from backend.data.auth.email_service import get_auth_email_service
|
|
||||||
from backend.data.auth.magic_links import (
|
|
||||||
create_email_verification_link,
|
|
||||||
create_password_reset_link,
|
|
||||||
verify_email_token,
|
|
||||||
verify_password_reset_token,
|
|
||||||
)
|
|
||||||
from backend.data.auth.password import hash_password, needs_rehash, verify_password
|
|
||||||
from backend.data.auth.tokens import (
|
|
||||||
ACCESS_TOKEN_TTL,
|
|
||||||
REFRESH_TOKEN_TTL,
|
|
||||||
create_access_token,
|
|
||||||
create_refresh_token_db,
|
|
||||||
decode_access_token,
|
|
||||||
revoke_all_user_refresh_tokens,
|
|
||||||
revoke_refresh_token,
|
|
||||||
validate_refresh_token,
|
|
||||||
)
|
|
||||||
from backend.util.settings import Settings
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
settings = Settings()
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/auth", tags=["user-auth"])
|
|
||||||
|
|
||||||
# Cookie configuration
|
|
||||||
ACCESS_TOKEN_COOKIE = "access_token"
|
|
||||||
REFRESH_TOKEN_COOKIE = "refresh_token"
|
|
||||||
OAUTH_STATE_COOKIE = "oauth_state"
|
|
||||||
|
|
||||||
# Header for admin impersonation (matches existing autogpt_libs pattern)
|
|
||||||
IMPERSONATION_HEADER = "X-Act-As-User-Id"
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Admin Role Detection
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
|
|
||||||
def _get_admin_domains() -> set[str]:
|
|
||||||
"""Get set of email domains that grant admin role."""
|
|
||||||
domains_str = settings.config.admin_email_domains
|
|
||||||
if not domains_str:
|
|
||||||
return set()
|
|
||||||
return {d.strip().lower() for d in domains_str.split(",") if d.strip()}
|
|
||||||
|
|
||||||
|
|
||||||
def _get_admin_emails() -> set[str]:
|
|
||||||
"""Get set of specific email addresses that grant admin role."""
|
|
||||||
emails_str = settings.config.admin_emails
|
|
||||||
if not emails_str:
|
|
||||||
return set()
|
|
||||||
return {e.strip().lower() for e in emails_str.split(",") if e.strip()}
|
|
||||||
|
|
||||||
|
|
||||||
def get_user_role(email: str) -> str:
|
|
||||||
"""
|
|
||||||
Determine user role based on email.
|
|
||||||
|
|
||||||
Returns "admin" if:
|
|
||||||
- Email domain is in admin_email_domains list
|
|
||||||
- Email is in admin_emails list
|
|
||||||
|
|
||||||
Otherwise returns "authenticated".
|
|
||||||
"""
|
|
||||||
email_lower = email.lower()
|
|
||||||
domain = email_lower.split("@")[-1] if "@" in email_lower else ""
|
|
||||||
|
|
||||||
# Check specific emails first
|
|
||||||
if email_lower in _get_admin_emails():
|
|
||||||
return "admin"
|
|
||||||
|
|
||||||
# Check domains
|
|
||||||
if domain in _get_admin_domains():
|
|
||||||
return "admin"
|
|
||||||
|
|
||||||
return "authenticated"
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Request/Response Models
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
|
|
||||||
class SignupRequest(BaseModel):
|
|
||||||
email: EmailStr
|
|
||||||
password: str
|
|
||||||
|
|
||||||
|
|
||||||
class LoginRequest(BaseModel):
|
|
||||||
email: EmailStr
|
|
||||||
password: str
|
|
||||||
|
|
||||||
|
|
||||||
class PasswordResetRequest(BaseModel):
|
|
||||||
email: EmailStr
|
|
||||||
|
|
||||||
|
|
||||||
class PasswordSetRequest(BaseModel):
|
|
||||||
token: str
|
|
||||||
password: str
|
|
||||||
|
|
||||||
|
|
||||||
class UserResponse(BaseModel):
|
|
||||||
id: str
|
|
||||||
email: str
|
|
||||||
email_verified: bool
|
|
||||||
name: Optional[str] = None
|
|
||||||
created_at: datetime
|
|
||||||
role: Optional[str] = None
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def from_db(user: User, include_role: bool = False) -> "UserResponse":
|
|
||||||
return UserResponse(
|
|
||||||
id=user.id,
|
|
||||||
email=user.email,
|
|
||||||
email_verified=user.emailVerified,
|
|
||||||
name=user.name,
|
|
||||||
created_at=user.createdAt,
|
|
||||||
role=get_user_role(user.email) if include_role else None,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class AuthResponse(BaseModel):
|
|
||||||
"""Response matching Supabase auth response structure."""
|
|
||||||
|
|
||||||
user: UserResponse
|
|
||||||
access_token: str
|
|
||||||
refresh_token: str
|
|
||||||
expires_in: int
|
|
||||||
token_type: str = "bearer"
|
|
||||||
|
|
||||||
|
|
||||||
class MessageResponse(BaseModel):
|
|
||||||
message: str
|
|
||||||
|
|
||||||
|
|
||||||
class AdminUserListResponse(BaseModel):
|
|
||||||
users: List[UserResponse]
|
|
||||||
total: int
|
|
||||||
page: int
|
|
||||||
page_size: int
|
|
||||||
|
|
||||||
|
|
||||||
class ImpersonationResponse(BaseModel):
|
|
||||||
access_token: str
|
|
||||||
impersonated_user: UserResponse
|
|
||||||
expires_in: int
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Cookie Helpers
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
|
|
||||||
def _is_production() -> bool:
|
|
||||||
return os.getenv("APP_ENV", "local").lower() in ("production", "prod")
|
|
||||||
|
|
||||||
|
|
||||||
def _set_auth_cookies(response: Response, access_token: str, refresh_token: str):
|
|
||||||
"""Set authentication cookies on the response."""
|
|
||||||
secure = _is_production()
|
|
||||||
|
|
||||||
# Access token: accessible to JavaScript for API calls
|
|
||||||
response.set_cookie(
|
|
||||||
key=ACCESS_TOKEN_COOKIE,
|
|
||||||
value=access_token,
|
|
||||||
httponly=False, # JS needs access for Authorization header
|
|
||||||
secure=secure,
|
|
||||||
samesite="lax",
|
|
||||||
max_age=int(ACCESS_TOKEN_TTL.total_seconds()),
|
|
||||||
path="/",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Refresh token: httpOnly, restricted path
|
|
||||||
response.set_cookie(
|
|
||||||
key=REFRESH_TOKEN_COOKIE,
|
|
||||||
value=refresh_token,
|
|
||||||
httponly=True, # Not accessible to JavaScript
|
|
||||||
secure=secure,
|
|
||||||
samesite="strict",
|
|
||||||
max_age=int(REFRESH_TOKEN_TTL.total_seconds()),
|
|
||||||
path="/api/auth/refresh", # Only sent to refresh endpoint
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _clear_auth_cookies(response: Response):
|
|
||||||
"""Clear authentication cookies."""
|
|
||||||
response.delete_cookie(key=ACCESS_TOKEN_COOKIE, path="/")
|
|
||||||
response.delete_cookie(key=REFRESH_TOKEN_COOKIE, path="/api/auth/refresh")
|
|
||||||
|
|
||||||
|
|
||||||
def _get_access_token(request: Request) -> Optional[str]:
|
|
||||||
"""Get access token from cookie or Authorization header."""
|
|
||||||
# Try cookie first
|
|
||||||
token = request.cookies.get(ACCESS_TOKEN_COOKIE)
|
|
||||||
if token:
|
|
||||||
return token
|
|
||||||
|
|
||||||
# Try Authorization header
|
|
||||||
auth_header = request.headers.get("Authorization")
|
|
||||||
if auth_header and auth_header.startswith("Bearer "):
|
|
||||||
return auth_header[7:]
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Auth Dependencies
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
|
|
||||||
async def get_current_user_from_token(request: Request) -> Optional[User]:
|
|
||||||
"""Get the current user from the access token."""
|
|
||||||
access_token = _get_access_token(request)
|
|
||||||
if not access_token:
|
|
||||||
return None
|
|
||||||
|
|
||||||
payload = decode_access_token(access_token)
|
|
||||||
if not payload:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return await User.prisma().find_unique(where={"id": payload.sub})
|
|
||||||
|
|
||||||
|
|
||||||
async def require_auth(request: Request) -> User:
|
|
||||||
"""Require authentication - returns user or raises 401."""
|
|
||||||
user = await get_current_user_from_token(request)
|
|
||||||
if not user:
|
|
||||||
raise HTTPException(status_code=401, detail="Not authenticated")
|
|
||||||
return user
|
|
||||||
|
|
||||||
|
|
||||||
async def require_admin(request: Request) -> User:
|
|
||||||
"""Require admin authentication - returns user or raises 401/403."""
|
|
||||||
user = await require_auth(request)
|
|
||||||
role = get_user_role(user.email)
|
|
||||||
if role != "admin":
|
|
||||||
raise HTTPException(status_code=403, detail="Admin access required")
|
|
||||||
return user
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Authentication Endpoints
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/signup", response_model=MessageResponse)
|
|
||||||
async def signup(data: SignupRequest):
|
|
||||||
"""
|
|
||||||
Register a new user.
|
|
||||||
|
|
||||||
Returns a message prompting the user to verify their email.
|
|
||||||
No automatic login until email is verified.
|
|
||||||
"""
|
|
||||||
# Check if email already exists
|
|
||||||
existing = await User.prisma().find_unique(where={"email": data.email})
|
|
||||||
if existing:
|
|
||||||
raise HTTPException(status_code=400, detail="Email already registered")
|
|
||||||
|
|
||||||
# Validate password strength
|
|
||||||
if len(data.password) < 8:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=400, detail="Password must be at least 8 characters"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create user with hashed password
|
|
||||||
password_hash = hash_password(data.password)
|
|
||||||
user = await User.prisma().create(
|
|
||||||
data={
|
|
||||||
"id": str(uuid.uuid4()),
|
|
||||||
"email": data.email,
|
|
||||||
"passwordHash": password_hash,
|
|
||||||
"authProvider": "password",
|
|
||||||
"emailVerified": False,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create verification link and send email
|
|
||||||
token = await create_email_verification_link(data.email)
|
|
||||||
email_service = get_auth_email_service()
|
|
||||||
email_sent = email_service.send_verification_email(data.email, token)
|
|
||||||
|
|
||||||
if not email_sent:
|
|
||||||
logger.warning(f"Failed to send verification email to {data.email}")
|
|
||||||
# Still log the token for development
|
|
||||||
logger.info(f"Verification token for {data.email}: {token}")
|
|
||||||
|
|
||||||
return MessageResponse(
|
|
||||||
message="Please check your email to verify your account"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/login", response_model=AuthResponse)
|
|
||||||
async def login(data: LoginRequest, response: Response):
|
|
||||||
"""
|
|
||||||
Login with email and password.
|
|
||||||
|
|
||||||
Sets httpOnly cookies for session management.
|
|
||||||
"""
|
|
||||||
user = await User.prisma().find_unique(where={"email": data.email})
|
|
||||||
|
|
||||||
if not user:
|
|
||||||
raise HTTPException(status_code=401, detail="Invalid email or password")
|
|
||||||
|
|
||||||
# Check if this is a migrated user without password
|
|
||||||
if user.passwordHash is None:
|
|
||||||
if user.migratedFromSupabase:
|
|
||||||
# Send password reset email for migrated user
|
|
||||||
token = await create_password_reset_link(data.email, user.id)
|
|
||||||
email_service = get_auth_email_service()
|
|
||||||
email_service.send_migrated_user_password_reset(data.email, token)
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=400,
|
|
||||||
detail="Please check your email to set your password",
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
# OAuth user trying to login with password
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=400,
|
|
||||||
detail=f"This account uses {user.authProvider} login",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Verify password
|
|
||||||
if not verify_password(user.passwordHash, data.password):
|
|
||||||
raise HTTPException(status_code=401, detail="Invalid email or password")
|
|
||||||
|
|
||||||
# Check if email is verified
|
|
||||||
if not user.emailVerified:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=400, detail="Please verify your email before logging in"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Rehash password if needed (transparent security upgrade)
|
|
||||||
if needs_rehash(user.passwordHash):
|
|
||||||
new_hash = hash_password(data.password)
|
|
||||||
await User.prisma().update(
|
|
||||||
where={"id": user.id}, data={"passwordHash": new_hash}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create tokens
|
|
||||||
role = get_user_role(user.email)
|
|
||||||
access_token = create_access_token(user.id, user.email, role)
|
|
||||||
refresh_token, _ = await create_refresh_token_db(user.id)
|
|
||||||
|
|
||||||
# Set cookies
|
|
||||||
_set_auth_cookies(response, access_token, refresh_token)
|
|
||||||
|
|
||||||
return AuthResponse(
|
|
||||||
user=UserResponse.from_db(user),
|
|
||||||
access_token=access_token,
|
|
||||||
refresh_token=refresh_token,
|
|
||||||
expires_in=int(ACCESS_TOKEN_TTL.total_seconds()),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/logout", response_model=MessageResponse)
|
|
||||||
async def logout(request: Request, response: Response, scope: str = Query("local")):
|
|
||||||
"""
|
|
||||||
Logout the current user.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
scope: "local" to clear current session, "global" to revoke all sessions.
|
|
||||||
"""
|
|
||||||
# Get refresh token to revoke
|
|
||||||
refresh_token = request.cookies.get(REFRESH_TOKEN_COOKIE)
|
|
||||||
|
|
||||||
if scope == "global":
|
|
||||||
# Get user from access token
|
|
||||||
access_token = _get_access_token(request)
|
|
||||||
if access_token:
|
|
||||||
payload = decode_access_token(access_token)
|
|
||||||
if payload:
|
|
||||||
await revoke_all_user_refresh_tokens(payload.sub)
|
|
||||||
elif refresh_token:
|
|
||||||
await revoke_refresh_token(refresh_token)
|
|
||||||
|
|
||||||
# Clear cookies
|
|
||||||
_clear_auth_cookies(response)
|
|
||||||
|
|
||||||
return MessageResponse(message="Logged out successfully")
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/refresh", response_model=AuthResponse)
|
|
||||||
async def refresh(request: Request, response: Response):
|
|
||||||
"""
|
|
||||||
Refresh the access token using the refresh token.
|
|
||||||
"""
|
|
||||||
refresh_token = request.cookies.get(REFRESH_TOKEN_COOKIE)
|
|
||||||
|
|
||||||
if not refresh_token:
|
|
||||||
raise HTTPException(status_code=401, detail="No refresh token")
|
|
||||||
|
|
||||||
# Validate refresh token
|
|
||||||
user_id = await validate_refresh_token(refresh_token)
|
|
||||||
if not user_id:
|
|
||||||
_clear_auth_cookies(response)
|
|
||||||
raise HTTPException(status_code=401, detail="Invalid or expired refresh token")
|
|
||||||
|
|
||||||
# Get user
|
|
||||||
user = await User.prisma().find_unique(where={"id": user_id})
|
|
||||||
if not user:
|
|
||||||
_clear_auth_cookies(response)
|
|
||||||
raise HTTPException(status_code=401, detail="User not found")
|
|
||||||
|
|
||||||
# Revoke old refresh token
|
|
||||||
await revoke_refresh_token(refresh_token)
|
|
||||||
|
|
||||||
# Create new tokens
|
|
||||||
role = get_user_role(user.email)
|
|
||||||
new_access_token = create_access_token(user.id, user.email, role)
|
|
||||||
new_refresh_token, _ = await create_refresh_token_db(user.id)
|
|
||||||
|
|
||||||
# Set new cookies
|
|
||||||
_set_auth_cookies(response, new_access_token, new_refresh_token)
|
|
||||||
|
|
||||||
return AuthResponse(
|
|
||||||
user=UserResponse.from_db(user),
|
|
||||||
access_token=new_access_token,
|
|
||||||
refresh_token=new_refresh_token,
|
|
||||||
expires_in=int(ACCESS_TOKEN_TTL.total_seconds()),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/me", response_model=UserResponse)
|
|
||||||
async def get_current_user(request: Request):
|
|
||||||
"""
|
|
||||||
Get the currently authenticated user.
|
|
||||||
|
|
||||||
Supports admin impersonation via X-Act-As-User-Id header.
|
|
||||||
"""
|
|
||||||
access_token = _get_access_token(request)
|
|
||||||
|
|
||||||
if not access_token:
|
|
||||||
raise HTTPException(status_code=401, detail="Not authenticated")
|
|
||||||
|
|
||||||
payload = decode_access_token(access_token)
|
|
||||||
if not payload:
|
|
||||||
raise HTTPException(status_code=401, detail="Invalid or expired token")
|
|
||||||
|
|
||||||
# Check for impersonation header
|
|
||||||
impersonate_user_id = request.headers.get(IMPERSONATION_HEADER, "").strip()
|
|
||||||
if impersonate_user_id:
|
|
||||||
# Verify caller is admin
|
|
||||||
if payload.role != "admin":
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=403, detail="Only admins can impersonate users"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Log impersonation for audit
|
|
||||||
logger.info(
|
|
||||||
f"Admin impersonation: {payload.sub} ({payload.email}) "
|
|
||||||
f"viewing as user {impersonate_user_id}"
|
|
||||||
)
|
|
||||||
|
|
||||||
user = await User.prisma().find_unique(where={"id": impersonate_user_id})
|
|
||||||
if not user:
|
|
||||||
raise HTTPException(status_code=404, detail="Impersonated user not found")
|
|
||||||
else:
|
|
||||||
user = await User.prisma().find_unique(where={"id": payload.sub})
|
|
||||||
if not user:
|
|
||||||
raise HTTPException(status_code=401, detail="User not found")
|
|
||||||
|
|
||||||
return UserResponse.from_db(user, include_role=True)
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Password Reset Endpoints
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/password/reset", response_model=MessageResponse)
|
|
||||||
async def request_password_reset(data: PasswordResetRequest):
|
|
||||||
"""
|
|
||||||
Request a password reset email.
|
|
||||||
"""
|
|
||||||
user = await User.prisma().find_unique(where={"email": data.email})
|
|
||||||
|
|
||||||
# Always return success to prevent email enumeration
|
|
||||||
if not user:
|
|
||||||
return MessageResponse(message="If the email exists, a reset link has been sent")
|
|
||||||
|
|
||||||
# Don't allow password reset for OAuth-only users
|
|
||||||
if user.authProvider not in ("password", "supabase"):
|
|
||||||
return MessageResponse(message="If the email exists, a reset link has been sent")
|
|
||||||
|
|
||||||
# Create reset link and send email
|
|
||||||
token = await create_password_reset_link(data.email, user.id)
|
|
||||||
email_service = get_auth_email_service()
|
|
||||||
email_service.send_password_reset_email(data.email, token)
|
|
||||||
|
|
||||||
return MessageResponse(message="If the email exists, a reset link has been sent")
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/password/set", response_model=MessageResponse)
|
|
||||||
async def set_password(data: PasswordSetRequest, response: Response):
|
|
||||||
"""
|
|
||||||
Set a new password using a reset token.
|
|
||||||
"""
|
|
||||||
# Validate token
|
|
||||||
result = await verify_password_reset_token(data.token)
|
|
||||||
if not result:
|
|
||||||
raise HTTPException(status_code=400, detail="Invalid or expired reset token")
|
|
||||||
|
|
||||||
user_id, email = result
|
|
||||||
|
|
||||||
# Validate password strength
|
|
||||||
if len(data.password) < 8:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=400, detail="Password must be at least 8 characters"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Update password and verify email (if not already)
|
|
||||||
password_hash = hash_password(data.password)
|
|
||||||
await User.prisma().update(
|
|
||||||
where={"id": user_id},
|
|
||||||
data={
|
|
||||||
"passwordHash": password_hash,
|
|
||||||
"emailVerified": True,
|
|
||||||
"emailVerifiedAt": datetime.now(timezone.utc),
|
|
||||||
"authProvider": "password",
|
|
||||||
"migratedFromSupabase": False, # Clear migration flag
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
# Send notification that password was changed
|
|
||||||
email_service = get_auth_email_service()
|
|
||||||
email_service.send_password_changed_notification(email)
|
|
||||||
|
|
||||||
# Revoke all existing sessions for security
|
|
||||||
await revoke_all_user_refresh_tokens(user_id)
|
|
||||||
|
|
||||||
# Clear any existing cookies
|
|
||||||
_clear_auth_cookies(response)
|
|
||||||
|
|
||||||
return MessageResponse(message="Password updated successfully. Please log in.")
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Email Verification
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/verify-email", response_model=MessageResponse)
|
|
||||||
async def verify_email(token: str = Query(...)):
|
|
||||||
"""
|
|
||||||
Verify email address from magic link.
|
|
||||||
"""
|
|
||||||
email = await verify_email_token(token)
|
|
||||||
if not email:
|
|
||||||
raise HTTPException(status_code=400, detail="Invalid or expired verification link")
|
|
||||||
|
|
||||||
# Update user as verified
|
|
||||||
user = await User.prisma().find_unique(where={"email": email})
|
|
||||||
if user:
|
|
||||||
await User.prisma().update(
|
|
||||||
where={"id": user.id},
|
|
||||||
data={
|
|
||||||
"emailVerified": True,
|
|
||||||
"emailVerifiedAt": datetime.now(timezone.utc),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
return MessageResponse(message="Email verified successfully. You can now log in.")
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Google OAuth
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
GOOGLE_AUTH_URL = "https://accounts.google.com/o/oauth2/v2/auth"
|
|
||||||
GOOGLE_TOKEN_URL = "https://oauth2.googleapis.com/token"
|
|
||||||
GOOGLE_USERINFO_URL = "https://www.googleapis.com/oauth2/v2/userinfo"
|
|
||||||
|
|
||||||
|
|
||||||
def _get_google_config():
|
|
||||||
"""Get Google OAuth configuration from environment."""
|
|
||||||
client_id = os.getenv("GOOGLE_CLIENT_ID")
|
|
||||||
client_secret = os.getenv("GOOGLE_CLIENT_SECRET")
|
|
||||||
redirect_uri = os.getenv("GOOGLE_REDIRECT_URI", "")
|
|
||||||
|
|
||||||
if not client_id or not client_secret:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=503, detail="Google OAuth not configured"
|
|
||||||
)
|
|
||||||
|
|
||||||
return client_id, client_secret, redirect_uri
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/oauth/google/authorize")
|
|
||||||
async def google_authorize(
|
|
||||||
response: Response,
|
|
||||||
redirect_to: str = Query("/marketplace", description="URL to redirect after auth"),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Initiate Google OAuth flow.
|
|
||||||
|
|
||||||
Returns the authorization URL to redirect the user to.
|
|
||||||
"""
|
|
||||||
client_id, _, redirect_uri = _get_google_config()
|
|
||||||
|
|
||||||
# Generate state for CSRF protection
|
|
||||||
state = secrets.token_urlsafe(32)
|
|
||||||
|
|
||||||
# Store state and redirect_to in cookie
|
|
||||||
secure = _is_production()
|
|
||||||
response.set_cookie(
|
|
||||||
key=OAUTH_STATE_COOKIE,
|
|
||||||
value=f"{state}|{redirect_to}",
|
|
||||||
httponly=True,
|
|
||||||
secure=secure,
|
|
||||||
samesite="lax",
|
|
||||||
max_age=600, # 10 minutes
|
|
||||||
)
|
|
||||||
|
|
||||||
# Build authorization URL
|
|
||||||
params = {
|
|
||||||
"client_id": client_id,
|
|
||||||
"redirect_uri": redirect_uri,
|
|
||||||
"response_type": "code",
|
|
||||||
"scope": "openid email profile",
|
|
||||||
"state": state,
|
|
||||||
"access_type": "offline",
|
|
||||||
"prompt": "consent",
|
|
||||||
}
|
|
||||||
|
|
||||||
auth_url = f"{GOOGLE_AUTH_URL}?{urlencode(params)}"
|
|
||||||
|
|
||||||
return {"url": auth_url}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/oauth/google/callback")
|
|
||||||
async def google_callback(
|
|
||||||
request: Request,
|
|
||||||
response: Response,
|
|
||||||
code: str = Query(...),
|
|
||||||
state: str = Query(...),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Handle Google OAuth callback.
|
|
||||||
|
|
||||||
Exchanges the authorization code for tokens and creates/updates the user.
|
|
||||||
"""
|
|
||||||
client_id, client_secret, redirect_uri = _get_google_config()
|
|
||||||
|
|
||||||
# Verify state
|
|
||||||
stored_state_cookie = request.cookies.get(OAUTH_STATE_COOKIE)
|
|
||||||
if not stored_state_cookie:
|
|
||||||
raise HTTPException(status_code=400, detail="Missing OAuth state")
|
|
||||||
|
|
||||||
stored_state, redirect_to = stored_state_cookie.split("|", 1)
|
|
||||||
if state != stored_state:
|
|
||||||
raise HTTPException(status_code=400, detail="Invalid OAuth state")
|
|
||||||
|
|
||||||
# Clear state cookie
|
|
||||||
response.delete_cookie(key=OAUTH_STATE_COOKIE)
|
|
||||||
|
|
||||||
# Exchange code for tokens
|
|
||||||
async with httpx.AsyncClient() as client:
|
|
||||||
token_response = await client.post(
|
|
||||||
GOOGLE_TOKEN_URL,
|
|
||||||
data={
|
|
||||||
"client_id": client_id,
|
|
||||||
"client_secret": client_secret,
|
|
||||||
"code": code,
|
|
||||||
"grant_type": "authorization_code",
|
|
||||||
"redirect_uri": redirect_uri,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
if token_response.status_code != 200:
|
|
||||||
logger.error(f"Google token exchange failed: {token_response.text}")
|
|
||||||
raise HTTPException(status_code=400, detail="Failed to exchange code")
|
|
||||||
|
|
||||||
tokens = token_response.json()
|
|
||||||
google_access_token = tokens.get("access_token")
|
|
||||||
|
|
||||||
# Get user info
|
|
||||||
userinfo_response = await client.get(
|
|
||||||
GOOGLE_USERINFO_URL,
|
|
||||||
headers={"Authorization": f"Bearer {google_access_token}"},
|
|
||||||
)
|
|
||||||
|
|
||||||
if userinfo_response.status_code != 200:
|
|
||||||
raise HTTPException(status_code=400, detail="Failed to get user info")
|
|
||||||
|
|
||||||
userinfo = userinfo_response.json()
|
|
||||||
|
|
||||||
email = userinfo.get("email")
|
|
||||||
if not email:
|
|
||||||
raise HTTPException(status_code=400, detail="Email not provided by Google")
|
|
||||||
|
|
||||||
# Get or create user
|
|
||||||
user = await User.prisma().find_unique(where={"email": email})
|
|
||||||
|
|
||||||
if user:
|
|
||||||
# Update existing user if needed
|
|
||||||
if user.authProvider == "supabase":
|
|
||||||
await User.prisma().update(
|
|
||||||
where={"id": user.id},
|
|
||||||
data={"authProvider": "google"},
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
# Create new user
|
|
||||||
user = await User.prisma().create(
|
|
||||||
data={
|
|
||||||
"id": str(uuid.uuid4()),
|
|
||||||
"email": email,
|
|
||||||
"name": userinfo.get("name"),
|
|
||||||
"emailVerified": True, # Google verifies emails
|
|
||||||
"emailVerifiedAt": datetime.now(timezone.utc),
|
|
||||||
"authProvider": "google",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create tokens
|
|
||||||
role = get_user_role(email)
|
|
||||||
access_token = create_access_token(user.id, user.email, role)
|
|
||||||
refresh_token, _ = await create_refresh_token_db(user.id)
|
|
||||||
|
|
||||||
# Set cookies
|
|
||||||
_set_auth_cookies(response, access_token, refresh_token)
|
|
||||||
|
|
||||||
# Redirect to frontend
|
|
||||||
frontend_url = os.getenv("FRONTEND_BASE_URL", "http://localhost:3000")
|
|
||||||
from fastapi.responses import RedirectResponse
|
|
||||||
|
|
||||||
return RedirectResponse(url=f"{frontend_url}{redirect_to}")
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Admin Routes
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/admin/users", response_model=AdminUserListResponse)
|
|
||||||
async def list_users(
|
|
||||||
request: Request,
|
|
||||||
page: int = Query(1, ge=1),
|
|
||||||
page_size: int = Query(50, ge=1, le=100),
|
|
||||||
search: Optional[str] = Query(None, description="Search by email"),
|
|
||||||
admin_user: User = Depends(require_admin),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
List all users (admin only).
|
|
||||||
"""
|
|
||||||
skip = (page - 1) * page_size
|
|
||||||
|
|
||||||
where_clause = {}
|
|
||||||
if search:
|
|
||||||
where_clause["email"] = {"contains": search, "mode": "insensitive"}
|
|
||||||
|
|
||||||
users = await User.prisma().find_many(
|
|
||||||
where=where_clause,
|
|
||||||
skip=skip,
|
|
||||||
take=page_size,
|
|
||||||
order={"createdAt": "desc"},
|
|
||||||
)
|
|
||||||
|
|
||||||
total = await User.prisma().count(where=where_clause)
|
|
||||||
|
|
||||||
return AdminUserListResponse(
|
|
||||||
users=[UserResponse.from_db(u, include_role=True) for u in users],
|
|
||||||
total=total,
|
|
||||||
page=page,
|
|
||||||
page_size=page_size,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/admin/users/{user_id}", response_model=UserResponse)
|
|
||||||
async def get_user_by_id(
|
|
||||||
user_id: str,
|
|
||||||
admin_user: User = Depends(require_admin),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Get a specific user by ID (admin only).
|
|
||||||
"""
|
|
||||||
user = await User.prisma().find_unique(where={"id": user_id})
|
|
||||||
if not user:
|
|
||||||
raise HTTPException(status_code=404, detail="User not found")
|
|
||||||
|
|
||||||
return UserResponse.from_db(user, include_role=True)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/admin/users/{user_id}/impersonate", response_model=ImpersonationResponse)
|
|
||||||
async def impersonate_user(
|
|
||||||
request: Request,
|
|
||||||
user_id: str,
|
|
||||||
admin_user: User = Depends(require_admin),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Get an access token to impersonate a user (admin only).
|
|
||||||
|
|
||||||
This token can be used with the Authorization header to act as the user.
|
|
||||||
All actions are logged for audit purposes.
|
|
||||||
"""
|
|
||||||
target_user = await User.prisma().find_unique(where={"id": user_id})
|
|
||||||
if not target_user:
|
|
||||||
raise HTTPException(status_code=404, detail="User not found")
|
|
||||||
|
|
||||||
# Log the impersonation
|
|
||||||
logger.warning(
|
|
||||||
f"ADMIN IMPERSONATION: Admin {admin_user.id} ({admin_user.email}) "
|
|
||||||
f"generated impersonation token for user {target_user.id} ({target_user.email})"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create an access token for the target user (but with original role for safety)
|
|
||||||
# The impersonation is tracked via the audit log
|
|
||||||
role = get_user_role(target_user.email)
|
|
||||||
access_token = create_access_token(target_user.id, target_user.email, role)
|
|
||||||
|
|
||||||
return ImpersonationResponse(
|
|
||||||
access_token=access_token,
|
|
||||||
impersonated_user=UserResponse.from_db(target_user, include_role=True),
|
|
||||||
expires_in=int(ACCESS_TOKEN_TTL.total_seconds()),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/admin/users/{user_id}/force-password-reset", response_model=MessageResponse)
|
|
||||||
async def force_password_reset(
|
|
||||||
user_id: str,
|
|
||||||
admin_user: User = Depends(require_admin),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Force send a password reset email to a user (admin only).
|
|
||||||
|
|
||||||
Useful for helping users who are locked out.
|
|
||||||
"""
|
|
||||||
user = await User.prisma().find_unique(where={"id": user_id})
|
|
||||||
if not user:
|
|
||||||
raise HTTPException(status_code=404, detail="User not found")
|
|
||||||
|
|
||||||
# Create and send password reset
|
|
||||||
token = await create_password_reset_link(user.email, user.id)
|
|
||||||
email_service = get_auth_email_service()
|
|
||||||
email_sent = email_service.send_password_reset_email(user.email, token)
|
|
||||||
|
|
||||||
# Log the action
|
|
||||||
logger.info(
|
|
||||||
f"Admin {admin_user.id} ({admin_user.email}) "
|
|
||||||
f"triggered password reset for user {user.id} ({user.email})"
|
|
||||||
)
|
|
||||||
|
|
||||||
if email_sent:
|
|
||||||
return MessageResponse(message=f"Password reset email sent to {user.email}")
|
|
||||||
else:
|
|
||||||
return MessageResponse(message="Email service unavailable, reset link logged")
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/admin/users/{user_id}/revoke-sessions", response_model=MessageResponse)
|
|
||||||
async def revoke_user_sessions(
|
|
||||||
user_id: str,
|
|
||||||
admin_user: User = Depends(require_admin),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Revoke all sessions for a user (admin only).
|
|
||||||
|
|
||||||
Useful for security incidents.
|
|
||||||
"""
|
|
||||||
user = await User.prisma().find_unique(where={"id": user_id})
|
|
||||||
if not user:
|
|
||||||
raise HTTPException(status_code=404, detail="User not found")
|
|
||||||
|
|
||||||
count = await revoke_all_user_refresh_tokens(user_id)
|
|
||||||
|
|
||||||
# Log the action
|
|
||||||
logger.warning(
|
|
||||||
f"Admin {admin_user.id} ({admin_user.email}) "
|
|
||||||
f"revoked all sessions for user {user.id} ({user.email}). "
|
|
||||||
f"Revoked {count} refresh tokens."
|
|
||||||
)
|
|
||||||
|
|
||||||
return MessageResponse(message=f"Revoked {count} sessions for user {user.email}")
|
|
||||||
@@ -31,9 +31,9 @@ from typing_extensions import Optional, TypedDict
|
|||||||
import backend.server.integrations.router
|
import backend.server.integrations.router
|
||||||
import backend.server.routers.analytics
|
import backend.server.routers.analytics
|
||||||
import backend.server.v2.library.db as library_db
|
import backend.server.v2.library.db as library_db
|
||||||
|
from backend.data import api_key as api_key_db
|
||||||
from backend.data import execution as execution_db
|
from backend.data import execution as execution_db
|
||||||
from backend.data import graph as graph_db
|
from backend.data import graph as graph_db
|
||||||
from backend.data.auth import api_key as api_key_db
|
|
||||||
from backend.data.block import BlockInput, CompletedBlockOutput, get_block, get_blocks
|
from backend.data.block import BlockInput, CompletedBlockOutput, get_block, get_blocks
|
||||||
from backend.data.credit import (
|
from backend.data.credit import (
|
||||||
AutoTopUpConfig,
|
AutoTopUpConfig,
|
||||||
|
|||||||
@@ -134,14 +134,18 @@ async def process_review_action(
|
|||||||
# Build review decisions map
|
# Build review decisions map
|
||||||
review_decisions = {}
|
review_decisions = {}
|
||||||
for review in request.reviews:
|
for review in request.reviews:
|
||||||
review_status = (
|
if review.approved:
|
||||||
ReviewStatus.APPROVED if review.approved else ReviewStatus.REJECTED
|
review_decisions[review.node_exec_id] = (
|
||||||
)
|
ReviewStatus.APPROVED,
|
||||||
review_decisions[review.node_exec_id] = (
|
review.reviewed_data,
|
||||||
review_status,
|
review.message,
|
||||||
review.reviewed_data,
|
)
|
||||||
review.message,
|
else:
|
||||||
)
|
review_decisions[review.node_exec_id] = (
|
||||||
|
ReviewStatus.REJECTED,
|
||||||
|
None,
|
||||||
|
review.message,
|
||||||
|
)
|
||||||
|
|
||||||
# Process all reviews
|
# Process all reviews
|
||||||
updated_reviews = await process_all_reviews_for_execution(
|
updated_reviews = await process_all_reviews_for_execution(
|
||||||
|
|||||||
@@ -14,47 +14,12 @@ from backend.util.virus_scanner import scan_content_safe
|
|||||||
|
|
||||||
TEMP_DIR = Path(tempfile.gettempdir()).resolve()
|
TEMP_DIR = Path(tempfile.gettempdir()).resolve()
|
||||||
|
|
||||||
# Maximum filename length (conservative limit for most filesystems)
|
|
||||||
MAX_FILENAME_LENGTH = 200
|
|
||||||
|
|
||||||
|
|
||||||
def sanitize_filename(filename: str) -> str:
|
|
||||||
"""
|
|
||||||
Sanitize and truncate filename to prevent filesystem errors.
|
|
||||||
"""
|
|
||||||
# Remove or replace invalid characters
|
|
||||||
sanitized = re.sub(r'[<>:"/\\|?*\n\r\t]', "_", filename)
|
|
||||||
|
|
||||||
# Truncate if too long
|
|
||||||
if len(sanitized) > MAX_FILENAME_LENGTH:
|
|
||||||
# Keep the extension if possible
|
|
||||||
if "." in sanitized:
|
|
||||||
name, ext = sanitized.rsplit(".", 1)
|
|
||||||
max_name_length = MAX_FILENAME_LENGTH - len(ext) - 1
|
|
||||||
sanitized = name[:max_name_length] + "." + ext
|
|
||||||
else:
|
|
||||||
sanitized = sanitized[:MAX_FILENAME_LENGTH]
|
|
||||||
|
|
||||||
# Ensure it's not empty or just dots
|
|
||||||
if not sanitized or sanitized.strip(".") == "":
|
|
||||||
sanitized = f"file_{uuid.uuid4().hex[:8]}"
|
|
||||||
|
|
||||||
return sanitized
|
|
||||||
|
|
||||||
|
|
||||||
def get_exec_file_path(graph_exec_id: str, path: str) -> str:
|
def get_exec_file_path(graph_exec_id: str, path: str) -> str:
|
||||||
"""
|
"""
|
||||||
Utility to build an absolute path in the {temp}/exec_file/{exec_id}/... folder.
|
Utility to build an absolute path in the {temp}/exec_file/{exec_id}/... folder.
|
||||||
"""
|
"""
|
||||||
try:
|
return str(TEMP_DIR / "exec_file" / graph_exec_id / path)
|
||||||
full_path = TEMP_DIR / "exec_file" / graph_exec_id / path
|
|
||||||
return str(full_path)
|
|
||||||
except OSError as e:
|
|
||||||
if "File name too long" in str(e):
|
|
||||||
raise ValueError(
|
|
||||||
f"File path too long: {len(path)} characters. Maximum path length exceeded."
|
|
||||||
) from e
|
|
||||||
raise ValueError(f"Invalid file path: {e}") from e
|
|
||||||
|
|
||||||
|
|
||||||
def clean_exec_files(graph_exec_id: str, file: str = "") -> None:
|
def clean_exec_files(graph_exec_id: str, file: str = "") -> None:
|
||||||
@@ -152,11 +117,8 @@ async def store_media_file(
|
|||||||
|
|
||||||
# Generate filename from cloud path
|
# Generate filename from cloud path
|
||||||
_, path_part = cloud_storage.parse_cloud_path(file)
|
_, path_part = cloud_storage.parse_cloud_path(file)
|
||||||
filename = sanitize_filename(Path(path_part).name or f"{uuid.uuid4()}.bin")
|
filename = Path(path_part).name or f"{uuid.uuid4()}.bin"
|
||||||
try:
|
target_path = _ensure_inside_base(base_path / filename, base_path)
|
||||||
target_path = _ensure_inside_base(base_path / filename, base_path)
|
|
||||||
except OSError as e:
|
|
||||||
raise ValueError(f"Invalid file path '{filename}': {e}") from e
|
|
||||||
|
|
||||||
# Check file size limit
|
# Check file size limit
|
||||||
if len(cloud_content) > MAX_FILE_SIZE:
|
if len(cloud_content) > MAX_FILE_SIZE:
|
||||||
@@ -182,10 +144,7 @@ async def store_media_file(
|
|||||||
# Generate filename and decode
|
# Generate filename and decode
|
||||||
extension = _extension_from_mime(mime_type)
|
extension = _extension_from_mime(mime_type)
|
||||||
filename = f"{uuid.uuid4()}{extension}"
|
filename = f"{uuid.uuid4()}{extension}"
|
||||||
try:
|
target_path = _ensure_inside_base(base_path / filename, base_path)
|
||||||
target_path = _ensure_inside_base(base_path / filename, base_path)
|
|
||||||
except OSError as e:
|
|
||||||
raise ValueError(f"Invalid file path '{filename}': {e}") from e
|
|
||||||
content = base64.b64decode(b64_content)
|
content = base64.b64decode(b64_content)
|
||||||
|
|
||||||
# Check file size limit
|
# Check file size limit
|
||||||
@@ -201,11 +160,8 @@ async def store_media_file(
|
|||||||
elif file.startswith(("http://", "https://")):
|
elif file.startswith(("http://", "https://")):
|
||||||
# URL
|
# URL
|
||||||
parsed_url = urlparse(file)
|
parsed_url = urlparse(file)
|
||||||
filename = sanitize_filename(Path(parsed_url.path).name or f"{uuid.uuid4()}")
|
filename = Path(parsed_url.path).name or f"{uuid.uuid4()}"
|
||||||
try:
|
target_path = _ensure_inside_base(base_path / filename, base_path)
|
||||||
target_path = _ensure_inside_base(base_path / filename, base_path)
|
|
||||||
except OSError as e:
|
|
||||||
raise ValueError(f"Invalid file path '{filename}': {e}") from e
|
|
||||||
|
|
||||||
# Download and save
|
# Download and save
|
||||||
resp = await Requests().get(file)
|
resp = await Requests().get(file)
|
||||||
@@ -221,12 +177,8 @@ async def store_media_file(
|
|||||||
target_path.write_bytes(resp.content)
|
target_path.write_bytes(resp.content)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# Local path - sanitize the filename part to prevent long filename errors
|
# Local path
|
||||||
sanitized_file = sanitize_filename(file)
|
target_path = _ensure_inside_base(base_path / file, base_path)
|
||||||
try:
|
|
||||||
target_path = _ensure_inside_base(base_path / sanitized_file, base_path)
|
|
||||||
except OSError as e:
|
|
||||||
raise ValueError(f"Invalid file path '{sanitized_file}': {e}") from e
|
|
||||||
if not target_path.is_file():
|
if not target_path.is_file():
|
||||||
raise ValueError(f"Local file does not exist: {target_path}")
|
raise ValueError(f"Local file does not exist: {target_path}")
|
||||||
|
|
||||||
|
|||||||
@@ -21,26 +21,6 @@ from tenacity import (
|
|||||||
|
|
||||||
from backend.util.json import loads
|
from backend.util.json import loads
|
||||||
|
|
||||||
|
|
||||||
class HTTPClientError(Exception):
|
|
||||||
"""4xx client errors (400-499)"""
|
|
||||||
|
|
||||||
def __init__(self, message: str, status_code: int):
|
|
||||||
super().__init__(message)
|
|
||||||
self.status_code = status_code
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPServerError(Exception):
|
|
||||||
"""5xx server errors (500-599)"""
|
|
||||||
|
|
||||||
def __init__(self, message: str, status_code: int):
|
|
||||||
super().__init__(message)
|
|
||||||
self.status_code = status_code
|
|
||||||
|
|
||||||
|
|
||||||
# Default User-Agent for all requests
|
|
||||||
DEFAULT_USER_AGENT = "AutoGPT-Platform/1.0 (https://github.com/Significant-Gravitas/AutoGPT; info@agpt.co) aiohttp"
|
|
||||||
|
|
||||||
# Retry status codes for which we will automatically retry the request
|
# Retry status codes for which we will automatically retry the request
|
||||||
THROTTLE_RETRY_STATUS_CODES: set[int] = {429, 500, 502, 503, 504, 408}
|
THROTTLE_RETRY_STATUS_CODES: set[int] = {429, 500, 502, 503, 504, 408}
|
||||||
|
|
||||||
@@ -470,10 +450,6 @@ class Requests:
|
|||||||
if self.extra_headers is not None:
|
if self.extra_headers is not None:
|
||||||
req_headers.update(self.extra_headers)
|
req_headers.update(self.extra_headers)
|
||||||
|
|
||||||
# Set default User-Agent if not provided
|
|
||||||
if "User-Agent" not in req_headers and "user-agent" not in req_headers:
|
|
||||||
req_headers["User-Agent"] = DEFAULT_USER_AGENT
|
|
||||||
|
|
||||||
# Override Host header if using IP connection
|
# Override Host header if using IP connection
|
||||||
if connector:
|
if connector:
|
||||||
req_headers["Host"] = hostname
|
req_headers["Host"] = hostname
|
||||||
@@ -500,16 +476,9 @@ class Requests:
|
|||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
except ClientResponseError as e:
|
except ClientResponseError as e:
|
||||||
body = await response.read()
|
body = await response.read()
|
||||||
error_message = f"HTTP {response.status} Error: {response.reason}, Body: {body.decode(errors='replace')}"
|
raise Exception(
|
||||||
|
f"HTTP {response.status} Error: {response.reason}, Body: {body.decode(errors='replace')}"
|
||||||
# Raise specific exceptions based on status code range
|
) from e
|
||||||
if 400 <= response.status <= 499:
|
|
||||||
raise HTTPClientError(error_message, response.status) from e
|
|
||||||
elif 500 <= response.status <= 599:
|
|
||||||
raise HTTPServerError(error_message, response.status) from e
|
|
||||||
else:
|
|
||||||
# Generic fallback for other HTTP errors
|
|
||||||
raise Exception(error_message) from e
|
|
||||||
|
|
||||||
# If allowed and a redirect is received, follow the redirect manually
|
# If allowed and a redirect is received, follow the redirect manually
|
||||||
if allow_redirects and response.status in (301, 302, 303, 307, 308):
|
if allow_redirects and response.status in (301, 302, 303, 307, 308):
|
||||||
|
|||||||
@@ -308,16 +308,6 @@ class Config(UpdateTrackingModel["Config"], BaseSettings):
|
|||||||
description="The email address to use for sending emails",
|
description="The email address to use for sending emails",
|
||||||
)
|
)
|
||||||
|
|
||||||
# Admin configuration for native auth
|
|
||||||
admin_email_domains: str = Field(
|
|
||||||
default="agpt.co",
|
|
||||||
description="Comma-separated list of email domains that grant admin role (e.g., 'agpt.co,autogpt.com')",
|
|
||||||
)
|
|
||||||
admin_emails: str = Field(
|
|
||||||
default="",
|
|
||||||
description="Comma-separated list of specific email addresses that grant admin role",
|
|
||||||
)
|
|
||||||
|
|
||||||
use_agent_image_generation_v2: bool = Field(
|
use_agent_image_generation_v2: bool = Field(
|
||||||
default=True,
|
default=True,
|
||||||
description="Whether to use the new agent image generation service",
|
description="Whether to use the new agent image generation service",
|
||||||
@@ -372,13 +362,6 @@ class Config(UpdateTrackingModel["Config"], BaseSettings):
|
|||||||
description="Hours between cloud storage cleanup runs (1-24 hours)",
|
description="Hours between cloud storage cleanup runs (1-24 hours)",
|
||||||
)
|
)
|
||||||
|
|
||||||
oauth_token_cleanup_interval_hours: int = Field(
|
|
||||||
default=6,
|
|
||||||
ge=1,
|
|
||||||
le=24,
|
|
||||||
description="Hours between OAuth token cleanup runs (1-24 hours)",
|
|
||||||
)
|
|
||||||
|
|
||||||
upload_file_size_limit_mb: int = Field(
|
upload_file_size_limit_mb: int = Field(
|
||||||
default=256,
|
default=256,
|
||||||
ge=1,
|
ge=1,
|
||||||
|
|||||||
@@ -5,13 +5,6 @@ from typing import Any, Type, TypeVar, Union, cast, get_args, get_origin, overlo
|
|||||||
from prisma import Json as PrismaJson
|
from prisma import Json as PrismaJson
|
||||||
|
|
||||||
|
|
||||||
def _is_type_or_subclass(origin: Any, target_type: type) -> bool:
|
|
||||||
"""Check if origin is exactly the target type or a subclass of it."""
|
|
||||||
return origin is target_type or (
|
|
||||||
isinstance(origin, type) and issubclass(origin, target_type)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ConversionError(ValueError):
|
class ConversionError(ValueError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -145,11 +138,7 @@ def _try_convert(value: Any, target_type: Any, raise_on_mismatch: bool) -> Any:
|
|||||||
|
|
||||||
if origin is None:
|
if origin is None:
|
||||||
origin = target_type
|
origin = target_type
|
||||||
# Early return for unsupported types (skip subclasses of supported types)
|
if origin not in [list, dict, tuple, str, set, int, float, bool]:
|
||||||
supported_types = [list, dict, tuple, str, set, int, float, bool]
|
|
||||||
if origin not in supported_types and not (
|
|
||||||
isinstance(origin, type) and any(issubclass(origin, t) for t in supported_types)
|
|
||||||
):
|
|
||||||
return value
|
return value
|
||||||
|
|
||||||
# Handle the case when value is already of the target type
|
# Handle the case when value is already of the target type
|
||||||
@@ -179,47 +168,44 @@ def _try_convert(value: Any, target_type: Any, raise_on_mismatch: bool) -> Any:
|
|||||||
raise TypeError(f"Value {value} is not of expected type {target_type}")
|
raise TypeError(f"Value {value} is not of expected type {target_type}")
|
||||||
else:
|
else:
|
||||||
# Need to convert value to the origin type
|
# Need to convert value to the origin type
|
||||||
if _is_type_or_subclass(origin, list):
|
if origin is list:
|
||||||
converted_list = __convert_list(value)
|
value = __convert_list(value)
|
||||||
if args:
|
if args:
|
||||||
converted_list = [convert(v, args[0]) for v in converted_list]
|
return [convert(v, args[0]) for v in value]
|
||||||
return origin(converted_list) if origin is not list else converted_list
|
else:
|
||||||
elif _is_type_or_subclass(origin, dict):
|
return value
|
||||||
converted_dict = __convert_dict(value)
|
elif origin is dict:
|
||||||
|
value = __convert_dict(value)
|
||||||
if args:
|
if args:
|
||||||
key_type, val_type = args
|
key_type, val_type = args
|
||||||
converted_dict = {
|
return {
|
||||||
convert(k, key_type): convert(v, val_type)
|
convert(k, key_type): convert(v, val_type) for k, v in value.items()
|
||||||
for k, v in converted_dict.items()
|
|
||||||
}
|
}
|
||||||
return origin(converted_dict) if origin is not dict else converted_dict
|
else:
|
||||||
elif _is_type_or_subclass(origin, tuple):
|
return value
|
||||||
converted_tuple = __convert_tuple(value)
|
elif origin is tuple:
|
||||||
|
value = __convert_tuple(value)
|
||||||
if args:
|
if args:
|
||||||
if len(args) == 1:
|
if len(args) == 1:
|
||||||
converted_tuple = tuple(
|
return tuple(convert(v, args[0]) for v in value)
|
||||||
convert(v, args[0]) for v in converted_tuple
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
converted_tuple = tuple(
|
return tuple(convert(v, t) for v, t in zip(value, args))
|
||||||
convert(v, t) for v, t in zip(converted_tuple, args)
|
else:
|
||||||
)
|
return value
|
||||||
return origin(converted_tuple) if origin is not tuple else converted_tuple
|
elif origin is str:
|
||||||
elif _is_type_or_subclass(origin, str):
|
return __convert_str(value)
|
||||||
converted_str = __convert_str(value)
|
elif origin is set:
|
||||||
return origin(converted_str) if origin is not str else converted_str
|
|
||||||
elif _is_type_or_subclass(origin, set):
|
|
||||||
value = __convert_set(value)
|
value = __convert_set(value)
|
||||||
if args:
|
if args:
|
||||||
return {convert(v, args[0]) for v in value}
|
return {convert(v, args[0]) for v in value}
|
||||||
else:
|
else:
|
||||||
return value
|
return value
|
||||||
elif _is_type_or_subclass(origin, bool):
|
elif origin is int:
|
||||||
return __convert_bool(value)
|
|
||||||
elif _is_type_or_subclass(origin, int):
|
|
||||||
return __convert_num(value, int)
|
return __convert_num(value, int)
|
||||||
elif _is_type_or_subclass(origin, float):
|
elif origin is float:
|
||||||
return __convert_num(value, float)
|
return __convert_num(value, float)
|
||||||
|
elif origin is bool:
|
||||||
|
return __convert_bool(value)
|
||||||
else:
|
else:
|
||||||
return value
|
return value
|
||||||
|
|
||||||
|
|||||||
@@ -32,17 +32,3 @@ def test_type_conversion():
|
|||||||
assert convert("5", List[int]) == [5]
|
assert convert("5", List[int]) == [5]
|
||||||
assert convert("[5,4,2]", List[int]) == [5, 4, 2]
|
assert convert("[5,4,2]", List[int]) == [5, 4, 2]
|
||||||
assert convert([5, 4, 2], List[str]) == ["5", "4", "2"]
|
assert convert([5, 4, 2], List[str]) == ["5", "4", "2"]
|
||||||
|
|
||||||
# Test the specific case that was failing: empty list to Optional[str]
|
|
||||||
assert convert([], Optional[str]) == "[]"
|
|
||||||
assert convert([], str) == "[]"
|
|
||||||
|
|
||||||
# Test the actual failing case: empty list to ShortTextType
|
|
||||||
from backend.util.type import ShortTextType
|
|
||||||
|
|
||||||
assert convert([], Optional[ShortTextType]) == "[]"
|
|
||||||
assert convert([], ShortTextType) == "[]"
|
|
||||||
|
|
||||||
# Test other empty list conversions
|
|
||||||
assert convert([], int) == 0 # len([]) = 0
|
|
||||||
assert convert([], Optional[int]) == 0
|
|
||||||
|
|||||||
@@ -1,129 +0,0 @@
|
|||||||
-- CreateTable
|
|
||||||
CREATE TABLE "OAuthApplication" (
|
|
||||||
"id" TEXT NOT NULL,
|
|
||||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
|
||||||
"name" TEXT NOT NULL,
|
|
||||||
"description" TEXT,
|
|
||||||
"clientId" TEXT NOT NULL,
|
|
||||||
"clientSecret" TEXT NOT NULL,
|
|
||||||
"clientSecretSalt" TEXT NOT NULL,
|
|
||||||
"redirectUris" TEXT[],
|
|
||||||
"grantTypes" TEXT[] DEFAULT ARRAY['authorization_code', 'refresh_token']::TEXT[],
|
|
||||||
"scopes" "APIKeyPermission"[],
|
|
||||||
"ownerId" TEXT NOT NULL,
|
|
||||||
"isActive" BOOLEAN NOT NULL DEFAULT true,
|
|
||||||
|
|
||||||
CONSTRAINT "OAuthApplication_pkey" PRIMARY KEY ("id")
|
|
||||||
);
|
|
||||||
|
|
||||||
-- CreateTable
|
|
||||||
CREATE TABLE "OAuthAuthorizationCode" (
|
|
||||||
"id" TEXT NOT NULL,
|
|
||||||
"code" TEXT NOT NULL,
|
|
||||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
"expiresAt" TIMESTAMP(3) NOT NULL,
|
|
||||||
"applicationId" TEXT NOT NULL,
|
|
||||||
"userId" TEXT NOT NULL,
|
|
||||||
"scopes" "APIKeyPermission"[],
|
|
||||||
"redirectUri" TEXT NOT NULL,
|
|
||||||
"codeChallenge" TEXT,
|
|
||||||
"codeChallengeMethod" TEXT,
|
|
||||||
"usedAt" TIMESTAMP(3),
|
|
||||||
|
|
||||||
CONSTRAINT "OAuthAuthorizationCode_pkey" PRIMARY KEY ("id")
|
|
||||||
);
|
|
||||||
|
|
||||||
-- CreateTable
|
|
||||||
CREATE TABLE "OAuthAccessToken" (
|
|
||||||
"id" TEXT NOT NULL,
|
|
||||||
"token" TEXT NOT NULL,
|
|
||||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
"expiresAt" TIMESTAMP(3) NOT NULL,
|
|
||||||
"applicationId" TEXT NOT NULL,
|
|
||||||
"userId" TEXT NOT NULL,
|
|
||||||
"scopes" "APIKeyPermission"[],
|
|
||||||
"revokedAt" TIMESTAMP(3),
|
|
||||||
|
|
||||||
CONSTRAINT "OAuthAccessToken_pkey" PRIMARY KEY ("id")
|
|
||||||
);
|
|
||||||
|
|
||||||
-- CreateTable
|
|
||||||
CREATE TABLE "OAuthRefreshToken" (
|
|
||||||
"id" TEXT NOT NULL,
|
|
||||||
"token" TEXT NOT NULL,
|
|
||||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
"expiresAt" TIMESTAMP(3) NOT NULL,
|
|
||||||
"applicationId" TEXT NOT NULL,
|
|
||||||
"userId" TEXT NOT NULL,
|
|
||||||
"scopes" "APIKeyPermission"[],
|
|
||||||
"revokedAt" TIMESTAMP(3),
|
|
||||||
|
|
||||||
CONSTRAINT "OAuthRefreshToken_pkey" PRIMARY KEY ("id")
|
|
||||||
);
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE UNIQUE INDEX "OAuthApplication_clientId_key" ON "OAuthApplication"("clientId");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE INDEX "OAuthApplication_clientId_idx" ON "OAuthApplication"("clientId");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE INDEX "OAuthApplication_ownerId_idx" ON "OAuthApplication"("ownerId");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE UNIQUE INDEX "OAuthAuthorizationCode_code_key" ON "OAuthAuthorizationCode"("code");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE INDEX "OAuthAuthorizationCode_code_idx" ON "OAuthAuthorizationCode"("code");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE INDEX "OAuthAuthorizationCode_applicationId_userId_idx" ON "OAuthAuthorizationCode"("applicationId", "userId");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE INDEX "OAuthAuthorizationCode_expiresAt_idx" ON "OAuthAuthorizationCode"("expiresAt");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE UNIQUE INDEX "OAuthAccessToken_token_key" ON "OAuthAccessToken"("token");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE INDEX "OAuthAccessToken_token_idx" ON "OAuthAccessToken"("token");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE INDEX "OAuthAccessToken_userId_applicationId_idx" ON "OAuthAccessToken"("userId", "applicationId");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE INDEX "OAuthAccessToken_expiresAt_idx" ON "OAuthAccessToken"("expiresAt");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE UNIQUE INDEX "OAuthRefreshToken_token_key" ON "OAuthRefreshToken"("token");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE INDEX "OAuthRefreshToken_token_idx" ON "OAuthRefreshToken"("token");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE INDEX "OAuthRefreshToken_userId_applicationId_idx" ON "OAuthRefreshToken"("userId", "applicationId");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE INDEX "OAuthRefreshToken_expiresAt_idx" ON "OAuthRefreshToken"("expiresAt");
|
|
||||||
|
|
||||||
-- AddForeignKey
|
|
||||||
ALTER TABLE "OAuthApplication" ADD CONSTRAINT "OAuthApplication_ownerId_fkey" FOREIGN KEY ("ownerId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
|
||||||
|
|
||||||
-- AddForeignKey
|
|
||||||
ALTER TABLE "OAuthAuthorizationCode" ADD CONSTRAINT "OAuthAuthorizationCode_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "OAuthApplication"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
|
||||||
|
|
||||||
-- AddForeignKey
|
|
||||||
ALTER TABLE "OAuthAuthorizationCode" ADD CONSTRAINT "OAuthAuthorizationCode_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
|
||||||
|
|
||||||
-- AddForeignKey
|
|
||||||
ALTER TABLE "OAuthAccessToken" ADD CONSTRAINT "OAuthAccessToken_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "OAuthApplication"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
|
||||||
|
|
||||||
-- AddForeignKey
|
|
||||||
ALTER TABLE "OAuthAccessToken" ADD CONSTRAINT "OAuthAccessToken_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
|
||||||
|
|
||||||
-- AddForeignKey
|
|
||||||
ALTER TABLE "OAuthRefreshToken" ADD CONSTRAINT "OAuthRefreshToken_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "OAuthApplication"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
|
||||||
|
|
||||||
-- AddForeignKey
|
|
||||||
ALTER TABLE "OAuthRefreshToken" ADD CONSTRAINT "OAuthRefreshToken_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
-- AlterEnum
|
|
||||||
ALTER TYPE "APIKeyPermission" ADD VALUE 'IDENTITY';
|
|
||||||
|
|
||||||
-- AlterTable
|
|
||||||
ALTER TABLE "OAuthApplication" ADD COLUMN "logoUrl" TEXT;
|
|
||||||
@@ -115,8 +115,6 @@ format = "linter:format"
|
|||||||
lint = "linter:lint"
|
lint = "linter:lint"
|
||||||
test = "run_tests:test"
|
test = "run_tests:test"
|
||||||
load-store-agents = "test.load_store_agents:run"
|
load-store-agents = "test.load_store_agents:run"
|
||||||
export-api-schema = "backend.cli.generate_openapi_json:main"
|
|
||||||
oauth-tool = "backend.cli.oauth_tool:cli"
|
|
||||||
|
|
||||||
[tool.isort]
|
[tool.isort]
|
||||||
profile = "black"
|
profile = "black"
|
||||||
|
|||||||
@@ -25,12 +25,6 @@ model User {
|
|||||||
stripeCustomerId String?
|
stripeCustomerId String?
|
||||||
topUpConfig Json?
|
topUpConfig Json?
|
||||||
|
|
||||||
// Native auth fields (for migration from Supabase)
|
|
||||||
passwordHash String? // NULL for OAuth users or migrated users awaiting password reset
|
|
||||||
authProvider String @default("supabase") // "password", "google", "supabase" (legacy)
|
|
||||||
migratedFromSupabase Boolean @default(false)
|
|
||||||
emailVerifiedAt DateTime?
|
|
||||||
|
|
||||||
maxEmailsPerDay Int @default(3)
|
maxEmailsPerDay Int @default(3)
|
||||||
notifyOnAgentRun Boolean @default(true)
|
notifyOnAgentRun Boolean @default(true)
|
||||||
notifyOnZeroBalance Boolean @default(true)
|
notifyOnZeroBalance Boolean @default(true)
|
||||||
@@ -67,16 +61,6 @@ model User {
|
|||||||
IntegrationWebhooks IntegrationWebhook[]
|
IntegrationWebhooks IntegrationWebhook[]
|
||||||
NotificationBatches UserNotificationBatch[]
|
NotificationBatches UserNotificationBatch[]
|
||||||
PendingHumanReviews PendingHumanReview[]
|
PendingHumanReviews PendingHumanReview[]
|
||||||
|
|
||||||
// OAuth Provider relations
|
|
||||||
OAuthApplications OAuthApplication[]
|
|
||||||
OAuthAuthorizationCodes OAuthAuthorizationCode[]
|
|
||||||
OAuthAccessTokens OAuthAccessToken[]
|
|
||||||
OAuthRefreshTokens OAuthRefreshToken[]
|
|
||||||
|
|
||||||
// Native auth relations
|
|
||||||
UserAuthRefreshTokens UserAuthRefreshToken[]
|
|
||||||
UserAuthMagicLinks UserAuthMagicLink[]
|
|
||||||
}
|
}
|
||||||
|
|
||||||
enum OnboardingStep {
|
enum OnboardingStep {
|
||||||
@@ -940,7 +924,6 @@ enum SubmissionStatus {
|
|||||||
}
|
}
|
||||||
|
|
||||||
enum APIKeyPermission {
|
enum APIKeyPermission {
|
||||||
IDENTITY // Info about the authenticated user
|
|
||||||
EXECUTE_GRAPH // Can execute agent graphs
|
EXECUTE_GRAPH // Can execute agent graphs
|
||||||
READ_GRAPH // Can get graph versions and details
|
READ_GRAPH // Can get graph versions and details
|
||||||
EXECUTE_BLOCK // Can execute individual blocks
|
EXECUTE_BLOCK // Can execute individual blocks
|
||||||
@@ -992,157 +975,3 @@ enum APIKeyStatus {
|
|||||||
REVOKED
|
REVOKED
|
||||||
SUSPENDED
|
SUSPENDED
|
||||||
}
|
}
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////
|
|
||||||
////////////////////////////////////////////////////////////
|
|
||||||
////////////// OAUTH PROVIDER TABLES //////////////////
|
|
||||||
////////////////////////////////////////////////////////////
|
|
||||||
////////////////////////////////////////////////////////////
|
|
||||||
|
|
||||||
// OAuth2 applications that can access AutoGPT on behalf of users
|
|
||||||
model OAuthApplication {
|
|
||||||
id String @id @default(uuid())
|
|
||||||
createdAt DateTime @default(now())
|
|
||||||
updatedAt DateTime @updatedAt
|
|
||||||
|
|
||||||
// Application metadata
|
|
||||||
name String
|
|
||||||
description String?
|
|
||||||
logoUrl String? // URL to app logo stored in GCS
|
|
||||||
clientId String @unique
|
|
||||||
clientSecret String // Hashed with Scrypt (same as API keys)
|
|
||||||
clientSecretSalt String // Salt for Scrypt hashing
|
|
||||||
|
|
||||||
// OAuth configuration
|
|
||||||
redirectUris String[] // Allowed callback URLs
|
|
||||||
grantTypes String[] @default(["authorization_code", "refresh_token"])
|
|
||||||
scopes APIKeyPermission[] // Which permissions the app can request
|
|
||||||
|
|
||||||
// Application management
|
|
||||||
ownerId String
|
|
||||||
Owner User @relation(fields: [ownerId], references: [id], onDelete: Cascade)
|
|
||||||
isActive Boolean @default(true)
|
|
||||||
|
|
||||||
// Relations
|
|
||||||
AuthorizationCodes OAuthAuthorizationCode[]
|
|
||||||
AccessTokens OAuthAccessToken[]
|
|
||||||
RefreshTokens OAuthRefreshToken[]
|
|
||||||
|
|
||||||
@@index([clientId])
|
|
||||||
@@index([ownerId])
|
|
||||||
}
|
|
||||||
|
|
||||||
// Temporary authorization codes (10 min TTL)
|
|
||||||
model OAuthAuthorizationCode {
|
|
||||||
id String @id @default(uuid())
|
|
||||||
code String @unique
|
|
||||||
createdAt DateTime @default(now())
|
|
||||||
expiresAt DateTime // Now + 10 minutes
|
|
||||||
|
|
||||||
applicationId String
|
|
||||||
Application OAuthApplication @relation(fields: [applicationId], references: [id], onDelete: Cascade)
|
|
||||||
|
|
||||||
userId String
|
|
||||||
User User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
|
||||||
|
|
||||||
scopes APIKeyPermission[]
|
|
||||||
redirectUri String // Must match one from application
|
|
||||||
|
|
||||||
// PKCE (Proof Key for Code Exchange) support
|
|
||||||
codeChallenge String?
|
|
||||||
codeChallengeMethod String? // "S256" or "plain"
|
|
||||||
|
|
||||||
usedAt DateTime? // Set when code is consumed
|
|
||||||
|
|
||||||
@@index([code])
|
|
||||||
@@index([applicationId, userId])
|
|
||||||
@@index([expiresAt]) // For cleanup
|
|
||||||
}
|
|
||||||
|
|
||||||
// Access tokens (1 hour TTL)
|
|
||||||
model OAuthAccessToken {
|
|
||||||
id String @id @default(uuid())
|
|
||||||
token String @unique // SHA256 hash of plaintext token
|
|
||||||
createdAt DateTime @default(now())
|
|
||||||
expiresAt DateTime // Now + 1 hour
|
|
||||||
|
|
||||||
applicationId String
|
|
||||||
Application OAuthApplication @relation(fields: [applicationId], references: [id], onDelete: Cascade)
|
|
||||||
|
|
||||||
userId String
|
|
||||||
User User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
|
||||||
|
|
||||||
scopes APIKeyPermission[]
|
|
||||||
|
|
||||||
revokedAt DateTime? // Set when token is revoked
|
|
||||||
|
|
||||||
@@index([token]) // For token lookup
|
|
||||||
@@index([userId, applicationId])
|
|
||||||
@@index([expiresAt]) // For cleanup
|
|
||||||
}
|
|
||||||
|
|
||||||
// Refresh tokens (30 days TTL)
|
|
||||||
model OAuthRefreshToken {
|
|
||||||
id String @id @default(uuid())
|
|
||||||
token String @unique // SHA256 hash of plaintext token
|
|
||||||
createdAt DateTime @default(now())
|
|
||||||
expiresAt DateTime // Now + 30 days
|
|
||||||
|
|
||||||
applicationId String
|
|
||||||
Application OAuthApplication @relation(fields: [applicationId], references: [id], onDelete: Cascade)
|
|
||||||
|
|
||||||
userId String
|
|
||||||
User User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
|
||||||
|
|
||||||
scopes APIKeyPermission[]
|
|
||||||
|
|
||||||
revokedAt DateTime? // Set when token is revoked
|
|
||||||
|
|
||||||
@@index([token]) // For token lookup
|
|
||||||
@@index([userId, applicationId])
|
|
||||||
@@index([expiresAt]) // For cleanup
|
|
||||||
}
|
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////
|
|
||||||
////////////////////////////////////////////////////////////
|
|
||||||
////////////// NATIVE AUTH TABLES /////////////////////
|
|
||||||
////////////////////////////////////////////////////////////
|
|
||||||
////////////////////////////////////////////////////////////
|
|
||||||
|
|
||||||
// Refresh tokens for native authentication (30 days TTL)
|
|
||||||
model UserAuthRefreshToken {
|
|
||||||
id String @id @default(uuid())
|
|
||||||
tokenHash String @unique // SHA256 hash of plaintext token
|
|
||||||
createdAt DateTime @default(now())
|
|
||||||
expiresAt DateTime // Now + 30 days
|
|
||||||
|
|
||||||
userId String
|
|
||||||
User User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
|
||||||
|
|
||||||
revokedAt DateTime? // Set when token is revoked
|
|
||||||
|
|
||||||
@@index([tokenHash]) // For token lookup
|
|
||||||
@@index([userId])
|
|
||||||
@@index([expiresAt]) // For cleanup
|
|
||||||
}
|
|
||||||
|
|
||||||
// Magic links for email verification and password reset
|
|
||||||
model UserAuthMagicLink {
|
|
||||||
id String @id @default(uuid())
|
|
||||||
tokenHash String @unique // SHA256 hash of plaintext token
|
|
||||||
createdAt DateTime @default(now())
|
|
||||||
expiresAt DateTime
|
|
||||||
|
|
||||||
email String
|
|
||||||
purpose String // "email_verification" or "password_reset"
|
|
||||||
|
|
||||||
// For password reset, track which user this is for
|
|
||||||
userId String?
|
|
||||||
User User? @relation(fields: [userId], references: [id], onDelete: Cascade)
|
|
||||||
|
|
||||||
usedAt DateTime? // Set when link is consumed (single-use)
|
|
||||||
|
|
||||||
@@index([tokenHash]) // For token lookup
|
|
||||||
@@index([email, purpose])
|
|
||||||
@@index([expiresAt]) // For cleanup
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -23,13 +23,13 @@ from typing import Any, Dict, List
|
|||||||
|
|
||||||
from faker import Faker
|
from faker import Faker
|
||||||
|
|
||||||
from backend.data.auth.api_key import create_api_key
|
from backend.data.api_key import create_api_key
|
||||||
from backend.data.credit import get_user_credit_model
|
from backend.data.credit import get_user_credit_model
|
||||||
from backend.data.db import prisma
|
from backend.data.db import prisma
|
||||||
from backend.data.graph import Graph, Link, Node, create_graph
|
from backend.data.graph import Graph, Link, Node, create_graph
|
||||||
from backend.data.user import get_or_create_user
|
|
||||||
|
|
||||||
# Import API functions from the backend
|
# Import API functions from the backend
|
||||||
|
from backend.data.user import get_or_create_user
|
||||||
from backend.server.v2.library.db import create_library_agent, create_preset
|
from backend.server.v2.library.db import create_library_agent, create_preset
|
||||||
from backend.server.v2.library.model import LibraryAgentPresetCreatable
|
from backend.server.v2.library.model import LibraryAgentPresetCreatable
|
||||||
from backend.server.v2.store.db import create_store_submission, review_store_submission
|
from backend.server.v2.store.db import create_store_submission, review_store_submission
|
||||||
@@ -464,7 +464,7 @@ class TestDataCreator:
|
|||||||
|
|
||||||
api_keys = []
|
api_keys = []
|
||||||
for user in self.users:
|
for user in self.users:
|
||||||
from backend.data.auth.api_key import APIKeyPermission
|
from backend.data.api_key import APIKeyPermission
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Use the API function to create API key
|
# Use the API function to create API key
|
||||||
|
|||||||
@@ -1,798 +0,0 @@
|
|||||||
# Migrating from Supabase Auth to Native FastAPI Auth
|
|
||||||
|
|
||||||
This guide covers the complete migration from Supabase Auth to native FastAPI authentication.
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
|
|
||||||
The migration replaces Supabase Auth with a native FastAPI implementation while:
|
|
||||||
- Maintaining the same JWT format so existing sessions remain valid
|
|
||||||
- Keeping the frontend interface identical (no component changes needed)
|
|
||||||
- Supporting both password and Google OAuth authentication
|
|
||||||
- Providing admin impersonation and user management
|
|
||||||
|
|
||||||
## Prerequisites
|
|
||||||
|
|
||||||
- Access to the production database
|
|
||||||
- Postmark API credentials configured
|
|
||||||
- Google OAuth credentials (if using Google sign-in)
|
|
||||||
- Ability to deploy backend and frontend changes
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Phase 1: Backend Setup
|
|
||||||
|
|
||||||
### 1.1 Install Dependencies
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd autogpt_platform/backend
|
|
||||||
poetry add argon2-cffi
|
|
||||||
```
|
|
||||||
|
|
||||||
### 1.2 Run Database Migration
|
|
||||||
|
|
||||||
Create and apply the Prisma migration:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd autogpt_platform/backend
|
|
||||||
poetry run prisma migrate dev --name add_native_auth
|
|
||||||
```
|
|
||||||
|
|
||||||
This adds:
|
|
||||||
- `passwordHash`, `authProvider`, `migratedFromSupabase`, `emailVerifiedAt` fields to `User`
|
|
||||||
- `UserAuthRefreshToken` table for session management
|
|
||||||
- `UserAuthMagicLink` table for email verification and password reset
|
|
||||||
|
|
||||||
### 1.3 Configure Environment Variables
|
|
||||||
|
|
||||||
Add to your `.env` file:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Admin Configuration
|
|
||||||
ADMIN_EMAIL_DOMAINS=agpt.co,autogpt.com
|
|
||||||
ADMIN_EMAILS=specific-admin@example.com
|
|
||||||
|
|
||||||
# Google OAuth (if using)
|
|
||||||
GOOGLE_CLIENT_ID=your-google-client-id
|
|
||||||
GOOGLE_CLIENT_SECRET=your-google-client-secret
|
|
||||||
GOOGLE_REDIRECT_URI=https://your-domain.com/api/auth/oauth/google/callback
|
|
||||||
|
|
||||||
# Frontend URL for redirects
|
|
||||||
FRONTEND_BASE_URL=https://your-domain.com
|
|
||||||
|
|
||||||
# Postmark (should already be configured)
|
|
||||||
POSTMARK_SERVER_API_TOKEN=your-postmark-token
|
|
||||||
POSTMARK_SENDER_EMAIL=noreply@your-domain.com
|
|
||||||
```
|
|
||||||
|
|
||||||
### 1.4 Deploy Backend
|
|
||||||
|
|
||||||
Deploy the backend with the new auth endpoints. The new endpoints are:
|
|
||||||
|
|
||||||
| Endpoint | Method | Description |
|
|
||||||
|----------|--------|-------------|
|
|
||||||
| `/api/auth/signup` | POST | Register new user |
|
|
||||||
| `/api/auth/login` | POST | Login with email/password |
|
|
||||||
| `/api/auth/logout` | POST | Logout |
|
|
||||||
| `/api/auth/refresh` | POST | Refresh access token |
|
|
||||||
| `/api/auth/me` | GET | Get current user |
|
|
||||||
| `/api/auth/password/reset` | POST | Request password reset |
|
|
||||||
| `/api/auth/password/set` | POST | Set new password |
|
|
||||||
| `/api/auth/verify-email` | GET | Verify email from link |
|
|
||||||
| `/api/auth/oauth/google/authorize` | GET | Start Google OAuth |
|
|
||||||
| `/api/auth/oauth/google/callback` | GET | Google OAuth callback |
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Phase 2: User Migration
|
|
||||||
|
|
||||||
### 2.1 Check Migration Status
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd autogpt_platform/backend
|
|
||||||
poetry run python -m backend.data.auth.migration --status
|
|
||||||
```
|
|
||||||
|
|
||||||
This shows:
|
|
||||||
```
|
|
||||||
Migration Status:
|
|
||||||
----------------------------------------
|
|
||||||
Total users: 10000
|
|
||||||
Already using native auth: 0
|
|
||||||
OAuth users (Google): 1500
|
|
||||||
Migrated, pending password: 0
|
|
||||||
Not yet migrated: 8500
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2.2 Generate Pre-Migration Report
|
|
||||||
|
|
||||||
```bash
|
|
||||||
poetry run python -m backend.data.auth.migration --report
|
|
||||||
```
|
|
||||||
|
|
||||||
This creates a CSV file with all users and their current status.
|
|
||||||
|
|
||||||
### 2.3 Dry Run
|
|
||||||
|
|
||||||
Test the migration without making changes:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
poetry run python -m backend.data.auth.migration --dry-run --full-migration
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2.4 Run Migration (Mark Users)
|
|
||||||
|
|
||||||
Mark all existing Supabase users as migrated:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
poetry run python -m backend.data.auth.migration --mark-migrated --batch-size 500
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2.5 Send Password Reset Emails
|
|
||||||
|
|
||||||
Send emails to users who need to set their password:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Start with a small batch to verify emails work
|
|
||||||
poetry run python -m backend.data.auth.migration --send-emails --batch-size 10
|
|
||||||
|
|
||||||
# Then send to everyone
|
|
||||||
poetry run python -m backend.data.auth.migration --send-emails --batch-size 100 --email-delay 0.5
|
|
||||||
```
|
|
||||||
|
|
||||||
**Note:** OAuth users (Google) are automatically skipped - they continue using Google sign-in.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Phase 3: Frontend Migration
|
|
||||||
|
|
||||||
The frontend uses a Supabase client abstraction layer. We need to replace the internals while keeping the interface identical.
|
|
||||||
|
|
||||||
### 3.1 Understanding the Architecture
|
|
||||||
|
|
||||||
The frontend has these Supabase-related files:
|
|
||||||
|
|
||||||
```
|
|
||||||
src/lib/supabase/
|
|
||||||
├── actions.ts # Server actions (validateSession, logout, etc.)
|
|
||||||
├── middleware.ts # Next.js middleware for session validation
|
|
||||||
├── helpers.ts # Utility functions
|
|
||||||
├── server/
|
|
||||||
│ └── getServerSupabase.ts # Server-side Supabase client
|
|
||||||
└── hooks/
|
|
||||||
├── helpers.ts # Client-side helpers
|
|
||||||
├── useSupabase.ts # Main auth hook
|
|
||||||
└── useSupabaseStore.ts # Zustand store for auth state
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3.2 Option A: Gradual Migration (Recommended)
|
|
||||||
|
|
||||||
Keep Supabase running during migration and gradually switch endpoints.
|
|
||||||
|
|
||||||
#### Step 1: Create Native Auth Client
|
|
||||||
|
|
||||||
The native auth client is already created at `src/lib/auth/native-auth.ts`. It provides:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
// Client-side functions
|
|
||||||
getAccessToken() // Get token from cookie
|
|
||||||
isAuthenticated() // Check if user is authenticated
|
|
||||||
getCurrentUserFromToken() // Parse user from JWT
|
|
||||||
|
|
||||||
// Server-side functions (for server actions)
|
|
||||||
serverLogin(email, password)
|
|
||||||
serverSignup(email, password)
|
|
||||||
serverLogout(scope)
|
|
||||||
serverRefreshToken()
|
|
||||||
serverGetCurrentUser()
|
|
||||||
serverRequestPasswordReset(email)
|
|
||||||
serverSetPassword(token, password)
|
|
||||||
serverGetGoogleAuthUrl(redirectTo)
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Step 2: Update Login Action
|
|
||||||
|
|
||||||
Edit `src/app/(platform)/login/actions.ts`:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
"use server";
|
|
||||||
|
|
||||||
import { serverLogin } from "@/lib/auth/native-auth";
|
|
||||||
import { loginFormSchema } from "@/types/auth";
|
|
||||||
import * as Sentry from "@sentry/nextjs";
|
|
||||||
import BackendAPI from "@/lib/autogpt-server-api";
|
|
||||||
import { shouldShowOnboarding } from "../../api/helpers";
|
|
||||||
|
|
||||||
export async function login(email: string, password: string) {
|
|
||||||
try {
|
|
||||||
const parsed = loginFormSchema.safeParse({ email, password });
|
|
||||||
|
|
||||||
if (!parsed.success) {
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
error: "Invalid email or password",
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = await serverLogin(parsed.data.email, parsed.data.password);
|
|
||||||
|
|
||||||
if (!result.success) {
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
error: result.error || "Login failed",
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create user in backend if needed
|
|
||||||
const api = new BackendAPI();
|
|
||||||
await api.createUser();
|
|
||||||
|
|
||||||
const onboarding = await shouldShowOnboarding();
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
onboarding,
|
|
||||||
};
|
|
||||||
} catch (err) {
|
|
||||||
Sentry.captureException(err);
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
error: "Failed to login. Please try again.",
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Step 3: Update Signup Action
|
|
||||||
|
|
||||||
Edit `src/app/(platform)/signup/actions.ts`:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
"use server";
|
|
||||||
|
|
||||||
import { serverSignup } from "@/lib/auth/native-auth";
|
|
||||||
import { signupFormSchema } from "@/types/auth";
|
|
||||||
import * as Sentry from "@sentry/nextjs";
|
|
||||||
|
|
||||||
export async function signup(
|
|
||||||
email: string,
|
|
||||||
password: string,
|
|
||||||
confirmPassword: string,
|
|
||||||
agreeToTerms: boolean,
|
|
||||||
) {
|
|
||||||
try {
|
|
||||||
const parsed = signupFormSchema.safeParse({
|
|
||||||
email,
|
|
||||||
password,
|
|
||||||
confirmPassword,
|
|
||||||
agreeToTerms,
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!parsed.success) {
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
error: "Invalid signup payload",
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = await serverSignup(parsed.data.email, parsed.data.password);
|
|
||||||
|
|
||||||
if (!result.success) {
|
|
||||||
if (result.error === "Email already registered") {
|
|
||||||
return { success: false, error: "user_already_exists" };
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
error: result.error || "Signup failed",
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// User needs to verify email before logging in
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
message: result.message,
|
|
||||||
requiresVerification: true,
|
|
||||||
};
|
|
||||||
} catch (err) {
|
|
||||||
Sentry.captureException(err);
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
error: "Failed to sign up. Please try again.",
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Step 4: Update Server Actions
|
|
||||||
|
|
||||||
Edit `src/lib/supabase/actions.ts`:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
"use server";
|
|
||||||
|
|
||||||
import * as Sentry from "@sentry/nextjs";
|
|
||||||
import { revalidatePath } from "next/cache";
|
|
||||||
import { cookies } from "next/headers";
|
|
||||||
import { getRedirectPath } from "./helpers";
|
|
||||||
import {
|
|
||||||
serverGetCurrentUser,
|
|
||||||
serverLogout as nativeLogout,
|
|
||||||
serverRefreshToken,
|
|
||||||
} from "@/lib/auth/native-auth";
|
|
||||||
|
|
||||||
// User type compatible with existing code
|
|
||||||
interface User {
|
|
||||||
id: string;
|
|
||||||
email: string;
|
|
||||||
role?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface SessionValidationResult {
|
|
||||||
user: User | null;
|
|
||||||
isValid: boolean;
|
|
||||||
redirectPath?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function validateSession(
|
|
||||||
currentPath: string,
|
|
||||||
): Promise<SessionValidationResult> {
|
|
||||||
return await Sentry.withServerActionInstrumentation(
|
|
||||||
"validateSession",
|
|
||||||
{},
|
|
||||||
async () => {
|
|
||||||
try {
|
|
||||||
const { user, error } = await serverGetCurrentUser();
|
|
||||||
|
|
||||||
if (error || !user) {
|
|
||||||
const redirectPath = getRedirectPath(currentPath);
|
|
||||||
return {
|
|
||||||
user: null,
|
|
||||||
isValid: false,
|
|
||||||
redirectPath: redirectPath || undefined,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
user: {
|
|
||||||
id: user.id,
|
|
||||||
email: user.email,
|
|
||||||
role: user.role,
|
|
||||||
},
|
|
||||||
isValid: true,
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
console.error("Session validation error:", error);
|
|
||||||
const redirectPath = getRedirectPath(currentPath);
|
|
||||||
return {
|
|
||||||
user: null,
|
|
||||||
isValid: false,
|
|
||||||
redirectPath: redirectPath || undefined,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getCurrentUser(): Promise<{
|
|
||||||
user: User | null;
|
|
||||||
error?: string;
|
|
||||||
}> {
|
|
||||||
return await Sentry.withServerActionInstrumentation(
|
|
||||||
"getCurrentUser",
|
|
||||||
{},
|
|
||||||
async () => {
|
|
||||||
try {
|
|
||||||
const { user, error } = await serverGetCurrentUser();
|
|
||||||
|
|
||||||
if (error) {
|
|
||||||
return { user: null, error };
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!user) {
|
|
||||||
return { user: null };
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
user: {
|
|
||||||
id: user.id,
|
|
||||||
email: user.email,
|
|
||||||
role: user.role,
|
|
||||||
}
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
console.error("Get current user error:", error);
|
|
||||||
return {
|
|
||||||
user: null,
|
|
||||||
error: error instanceof Error ? error.message : "Unknown error",
|
|
||||||
};
|
|
||||||
}
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getWebSocketToken(): Promise<{
|
|
||||||
token: string | null;
|
|
||||||
error?: string;
|
|
||||||
}> {
|
|
||||||
return await Sentry.withServerActionInstrumentation(
|
|
||||||
"getWebSocketToken",
|
|
||||||
{},
|
|
||||||
async () => {
|
|
||||||
try {
|
|
||||||
// Get access token from cookie
|
|
||||||
const cookieStore = await cookies();
|
|
||||||
const token = cookieStore.get("access_token")?.value;
|
|
||||||
return { token: token || null };
|
|
||||||
} catch (error) {
|
|
||||||
console.error("Get WebSocket token error:", error);
|
|
||||||
return {
|
|
||||||
token: null,
|
|
||||||
error: error instanceof Error ? error.message : "Unknown error",
|
|
||||||
};
|
|
||||||
}
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
export type ServerLogoutOptions = {
|
|
||||||
globalLogout?: boolean;
|
|
||||||
};
|
|
||||||
|
|
||||||
export async function serverLogout(options: ServerLogoutOptions = {}) {
|
|
||||||
return await Sentry.withServerActionInstrumentation(
|
|
||||||
"serverLogout",
|
|
||||||
{},
|
|
||||||
async () => {
|
|
||||||
try {
|
|
||||||
const scope = options.globalLogout ? "global" : "local";
|
|
||||||
const result = await nativeLogout(scope);
|
|
||||||
|
|
||||||
revalidatePath("/");
|
|
||||||
|
|
||||||
if (!result.success) {
|
|
||||||
console.error("Error logging out:", result.error);
|
|
||||||
return { success: false, error: result.error };
|
|
||||||
}
|
|
||||||
|
|
||||||
revalidatePath("/", "layout");
|
|
||||||
return { success: true };
|
|
||||||
} catch (error) {
|
|
||||||
console.error("Logout error:", error);
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
error: error instanceof Error ? error.message : "Unknown error",
|
|
||||||
};
|
|
||||||
}
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function refreshSession() {
|
|
||||||
return await Sentry.withServerActionInstrumentation(
|
|
||||||
"refreshSession",
|
|
||||||
{},
|
|
||||||
async () => {
|
|
||||||
try {
|
|
||||||
const result = await serverRefreshToken();
|
|
||||||
|
|
||||||
if (!result.success || !result.user) {
|
|
||||||
return {
|
|
||||||
user: null,
|
|
||||||
error: result.error,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
revalidatePath("/", "layout");
|
|
||||||
|
|
||||||
return {
|
|
||||||
user: {
|
|
||||||
id: result.user.id,
|
|
||||||
email: result.user.email,
|
|
||||||
role: result.user.role,
|
|
||||||
}
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
console.error("Refresh session error:", error);
|
|
||||||
return {
|
|
||||||
user: null,
|
|
||||||
error: error instanceof Error ? error.message : "Unknown error",
|
|
||||||
};
|
|
||||||
}
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Step 5: Update Middleware
|
|
||||||
|
|
||||||
Edit `src/lib/supabase/middleware.ts`:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { NextResponse, type NextRequest } from "next/server";
|
|
||||||
import { isAdminPage, isProtectedPage } from "./helpers";
|
|
||||||
|
|
||||||
export async function updateSession(request: NextRequest) {
|
|
||||||
let response = NextResponse.next({ request });
|
|
||||||
|
|
||||||
const accessToken = request.cookies.get("access_token")?.value;
|
|
||||||
|
|
||||||
// Parse JWT to get user info (without verification - backend will verify)
|
|
||||||
let user = null;
|
|
||||||
let userRole = null;
|
|
||||||
|
|
||||||
if (accessToken) {
|
|
||||||
try {
|
|
||||||
const payload = JSON.parse(
|
|
||||||
Buffer.from(accessToken.split(".")[1], "base64").toString()
|
|
||||||
);
|
|
||||||
|
|
||||||
// Check if token is expired
|
|
||||||
if (payload.exp && Date.now() / 1000 < payload.exp) {
|
|
||||||
user = { id: payload.sub, email: payload.email };
|
|
||||||
userRole = payload.role;
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
// Invalid token format
|
|
||||||
console.error("Failed to parse access token:", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const url = request.nextUrl.clone();
|
|
||||||
const pathname = request.nextUrl.pathname;
|
|
||||||
|
|
||||||
// AUTH REDIRECTS
|
|
||||||
// 1. Check if user is not authenticated but trying to access protected content
|
|
||||||
if (!user) {
|
|
||||||
const attemptingProtectedPage = isProtectedPage(pathname);
|
|
||||||
const attemptingAdminPage = isAdminPage(pathname);
|
|
||||||
|
|
||||||
if (attemptingProtectedPage || attemptingAdminPage) {
|
|
||||||
const currentDest = url.pathname + url.search;
|
|
||||||
url.pathname = "/login";
|
|
||||||
url.search = `?next=${encodeURIComponent(currentDest)}`;
|
|
||||||
return NextResponse.redirect(url);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// 2. Check if user is authenticated but lacks admin role when accessing admin pages
|
|
||||||
if (user && userRole !== "admin" && isAdminPage(pathname)) {
|
|
||||||
url.pathname = "/marketplace";
|
|
||||||
return NextResponse.redirect(url);
|
|
||||||
}
|
|
||||||
|
|
||||||
return response;
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Step 6: Update OAuth Callback
|
|
||||||
|
|
||||||
Edit `src/app/(platform)/auth/callback/route.ts`:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import BackendAPI from "@/lib/autogpt-server-api";
|
|
||||||
import { NextResponse } from "next/server";
|
|
||||||
import { revalidatePath } from "next/cache";
|
|
||||||
import { shouldShowOnboarding } from "@/app/api/helpers";
|
|
||||||
|
|
||||||
// This route now just handles the redirect after OAuth
|
|
||||||
// The actual OAuth callback is handled by the backend at /api/auth/oauth/google/callback
|
|
||||||
export async function GET(request: Request) {
|
|
||||||
const { searchParams, origin } = new URL(request.url);
|
|
||||||
|
|
||||||
// Check if user is now authenticated (cookie should be set by backend)
|
|
||||||
const cookies = request.headers.get("cookie") || "";
|
|
||||||
const hasAccessToken = cookies.includes("access_token=");
|
|
||||||
|
|
||||||
if (!hasAccessToken) {
|
|
||||||
return NextResponse.redirect(`${origin}/auth/auth-code-error`);
|
|
||||||
}
|
|
||||||
|
|
||||||
let next = "/marketplace";
|
|
||||||
|
|
||||||
try {
|
|
||||||
const api = new BackendAPI();
|
|
||||||
await api.createUser();
|
|
||||||
|
|
||||||
if (await shouldShowOnboarding()) {
|
|
||||||
next = "/onboarding";
|
|
||||||
revalidatePath("/onboarding", "layout");
|
|
||||||
} else {
|
|
||||||
revalidatePath("/", "layout");
|
|
||||||
}
|
|
||||||
} catch (createUserError) {
|
|
||||||
console.error("Error creating user:", createUserError);
|
|
||||||
return NextResponse.redirect(`${origin}/error?message=user-creation-failed`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get redirect destination from 'next' query parameter
|
|
||||||
next = searchParams.get("next") || next;
|
|
||||||
|
|
||||||
const forwardedHost = request.headers.get("x-forwarded-host");
|
|
||||||
const isLocalEnv = process.env.NODE_ENV === "development";
|
|
||||||
|
|
||||||
if (isLocalEnv) {
|
|
||||||
return NextResponse.redirect(`${origin}${next}`);
|
|
||||||
} else if (forwardedHost) {
|
|
||||||
return NextResponse.redirect(`https://${forwardedHost}${next}`);
|
|
||||||
} else {
|
|
||||||
return NextResponse.redirect(`${origin}${next}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Step 7: Update Google OAuth Provider Route
|
|
||||||
|
|
||||||
Edit `src/app/api/auth/provider/route.ts`:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { NextResponse } from "next/server";
|
|
||||||
import { serverGetGoogleAuthUrl } from "@/lib/auth/native-auth";
|
|
||||||
|
|
||||||
export async function POST(request: Request) {
|
|
||||||
try {
|
|
||||||
const body = await request.json();
|
|
||||||
const { provider, redirectTo } = body;
|
|
||||||
|
|
||||||
if (provider !== "google") {
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: "Unsupported provider" },
|
|
||||||
{ status: 400 }
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = await serverGetGoogleAuthUrl(redirectTo || "/marketplace");
|
|
||||||
|
|
||||||
if (result.error) {
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: result.error },
|
|
||||||
{ status: 500 }
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return NextResponse.json({ url: result.url });
|
|
||||||
} catch (error) {
|
|
||||||
console.error("OAuth provider error:", error);
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: "Failed to initialize OAuth" },
|
|
||||||
{ status: 500 }
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3.3 Option B: Big Bang Migration
|
|
||||||
|
|
||||||
Replace all Supabase references at once. Higher risk but faster.
|
|
||||||
|
|
||||||
1. Apply all the changes from Option A simultaneously
|
|
||||||
2. Remove `@supabase/ssr` and `@supabase/supabase-js` dependencies
|
|
||||||
3. Delete old Supabase configuration
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Phase 4: Cutover
|
|
||||||
|
|
||||||
### 4.1 Pre-Cutover Checklist
|
|
||||||
|
|
||||||
- [ ] Backend deployed with new auth endpoints
|
|
||||||
- [ ] Database migration applied
|
|
||||||
- [ ] Environment variables configured
|
|
||||||
- [ ] Postmark email templates verified
|
|
||||||
- [ ] Google OAuth redirect URIs updated
|
|
||||||
- [ ] Frontend changes tested in staging
|
|
||||||
|
|
||||||
### 4.2 Cutover Steps
|
|
||||||
|
|
||||||
1. **Deploy frontend changes**
|
|
||||||
2. **Verify login/signup works**
|
|
||||||
3. **Verify Google OAuth works**
|
|
||||||
4. **Verify password reset works**
|
|
||||||
5. **Run user migration script** (if not already done)
|
|
||||||
|
|
||||||
### 4.3 Rollback Plan
|
|
||||||
|
|
||||||
If issues occur:
|
|
||||||
|
|
||||||
1. Revert frontend to use Supabase client
|
|
||||||
2. Supabase Auth remains functional (keep it running for 30 days)
|
|
||||||
3. Users can still login via Supabase during rollback
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Phase 5: Cleanup (After 30 Days)
|
|
||||||
|
|
||||||
Once migration is stable:
|
|
||||||
|
|
||||||
1. **Remove Supabase dependencies from frontend**
|
|
||||||
```bash
|
|
||||||
cd autogpt_platform/frontend
|
|
||||||
pnpm remove @supabase/ssr @supabase/supabase-js
|
|
||||||
```
|
|
||||||
|
|
||||||
2. **Remove Supabase environment variables**
|
|
||||||
- `NEXT_PUBLIC_SUPABASE_URL`
|
|
||||||
- `NEXT_PUBLIC_SUPABASE_ANON_KEY`
|
|
||||||
- `SUPABASE_URL`
|
|
||||||
- `SUPABASE_JWT_SECRET` (keep if using same key)
|
|
||||||
|
|
||||||
3. **Delete old Supabase files**
|
|
||||||
- `src/lib/supabase/server/getServerSupabase.ts`
|
|
||||||
- Any remaining Supabase-specific code
|
|
||||||
|
|
||||||
4. **Cancel Supabase subscription** (if applicable)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Troubleshooting
|
|
||||||
|
|
||||||
### Users Can't Login
|
|
||||||
|
|
||||||
1. Check if user is marked as migrated: `migratedFromSupabase = true`
|
|
||||||
2. Check if password reset email was sent
|
|
||||||
3. Verify Postmark is configured correctly
|
|
||||||
|
|
||||||
### OAuth Not Working
|
|
||||||
|
|
||||||
1. Verify Google OAuth credentials in environment
|
|
||||||
2. Check redirect URI matches exactly
|
|
||||||
3. Look for errors in backend logs
|
|
||||||
|
|
||||||
### Token Issues
|
|
||||||
|
|
||||||
1. Ensure `JWT_VERIFY_KEY` matches the old `SUPABASE_JWT_SECRET`
|
|
||||||
2. Check token expiration
|
|
||||||
3. Verify audience claim is "authenticated"
|
|
||||||
|
|
||||||
### Admin Access Issues
|
|
||||||
|
|
||||||
1. Verify email is in `ADMIN_EMAIL_DOMAINS` or `ADMIN_EMAILS`
|
|
||||||
2. Check JWT role claim is "admin"
|
|
||||||
3. User may need to re-login to get new token with updated role
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## API Reference
|
|
||||||
|
|
||||||
### Authentication Endpoints
|
|
||||||
|
|
||||||
| Endpoint | Method | Auth | Description |
|
|
||||||
|----------|--------|------|-------------|
|
|
||||||
| `/api/auth/signup` | POST | - | Register new user |
|
|
||||||
| `/api/auth/login` | POST | - | Login, returns tokens |
|
|
||||||
| `/api/auth/logout` | POST | Token | Logout |
|
|
||||||
| `/api/auth/refresh` | POST | Cookie | Refresh access token |
|
|
||||||
| `/api/auth/me` | GET | Token | Get current user |
|
|
||||||
| `/api/auth/password/reset` | POST | - | Request reset email |
|
|
||||||
| `/api/auth/password/set` | POST | - | Set new password |
|
|
||||||
| `/api/auth/verify-email` | GET | - | Verify email |
|
|
||||||
| `/api/auth/oauth/google/authorize` | GET | - | Get Google OAuth URL |
|
|
||||||
| `/api/auth/oauth/google/callback` | GET | - | OAuth callback |
|
|
||||||
|
|
||||||
### Admin Endpoints
|
|
||||||
|
|
||||||
| Endpoint | Method | Auth | Description |
|
|
||||||
|----------|--------|------|-------------|
|
|
||||||
| `/api/auth/admin/users` | GET | Admin | List users |
|
|
||||||
| `/api/auth/admin/users/{id}` | GET | Admin | Get user details |
|
|
||||||
| `/api/auth/admin/users/{id}/impersonate` | POST | Admin | Get impersonation token |
|
|
||||||
| `/api/auth/admin/users/{id}/force-password-reset` | POST | Admin | Force password reset |
|
|
||||||
| `/api/auth/admin/users/{id}/revoke-sessions` | POST | Admin | Revoke all sessions |
|
|
||||||
|
|
||||||
### Cookie Structure
|
|
||||||
|
|
||||||
| Cookie | HttpOnly | Path | Purpose |
|
|
||||||
|--------|----------|------|---------|
|
|
||||||
| `access_token` | No | `/` | JWT for API auth |
|
|
||||||
| `refresh_token` | Yes | `/api/auth/refresh` | Session refresh |
|
|
||||||
|
|
||||||
### JWT Claims
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"sub": "user-uuid",
|
|
||||||
"email": "user@example.com",
|
|
||||||
"role": "authenticated",
|
|
||||||
"aud": "authenticated",
|
|
||||||
"iat": 1234567890,
|
|
||||||
"exp": 1234571490
|
|
||||||
}
|
|
||||||
```
|
|
||||||
@@ -3,14 +3,6 @@ import { withSentryConfig } from "@sentry/nextjs";
|
|||||||
/** @type {import('next').NextConfig} */
|
/** @type {import('next').NextConfig} */
|
||||||
const nextConfig = {
|
const nextConfig = {
|
||||||
productionBrowserSourceMaps: true,
|
productionBrowserSourceMaps: true,
|
||||||
experimental: {
|
|
||||||
serverActions: {
|
|
||||||
bodySizeLimit: "256mb",
|
|
||||||
},
|
|
||||||
// Increase body size limit for API routes (file uploads) - 256MB to match backend limit
|
|
||||||
proxyClientMaxBodySize: "256mb",
|
|
||||||
middlewareClientMaxBodySize: "256mb",
|
|
||||||
},
|
|
||||||
images: {
|
images: {
|
||||||
domains: [
|
domains: [
|
||||||
// We dont need to maintain alphabetical order here
|
// We dont need to maintain alphabetical order here
|
||||||
|
|||||||
@@ -137,8 +137,9 @@
|
|||||||
"concurrently": "9.2.1",
|
"concurrently": "9.2.1",
|
||||||
"cross-env": "10.1.0",
|
"cross-env": "10.1.0",
|
||||||
"eslint": "8.57.1",
|
"eslint": "8.57.1",
|
||||||
"eslint-config-next": "15.5.7",
|
"eslint-config-next": "15.5.2",
|
||||||
"eslint-plugin-storybook": "9.1.5",
|
"eslint-plugin-storybook": "9.1.5",
|
||||||
|
"import-in-the-middle": "1.14.2",
|
||||||
"msw": "2.11.6",
|
"msw": "2.11.6",
|
||||||
"msw-storybook-addon": "2.0.6",
|
"msw-storybook-addon": "2.0.6",
|
||||||
"orval": "7.13.0",
|
"orval": "7.13.0",
|
||||||
|
|||||||
282
autogpt_platform/frontend/pnpm-lock.yaml
generated
282
autogpt_platform/frontend/pnpm-lock.yaml
generated
@@ -331,11 +331,14 @@ importers:
|
|||||||
specifier: 8.57.1
|
specifier: 8.57.1
|
||||||
version: 8.57.1
|
version: 8.57.1
|
||||||
eslint-config-next:
|
eslint-config-next:
|
||||||
specifier: 15.5.7
|
specifier: 15.5.2
|
||||||
version: 15.5.7(eslint@8.57.1)(typescript@5.9.3)
|
version: 15.5.2(eslint@8.57.1)(typescript@5.9.3)
|
||||||
eslint-plugin-storybook:
|
eslint-plugin-storybook:
|
||||||
specifier: 9.1.5
|
specifier: 9.1.5
|
||||||
version: 9.1.5(eslint@8.57.1)(storybook@9.1.5(@testing-library/dom@10.4.1)(msw@2.11.6(@types/node@24.10.0)(typescript@5.9.3))(prettier@3.6.2))(typescript@5.9.3)
|
version: 9.1.5(eslint@8.57.1)(storybook@9.1.5(@testing-library/dom@10.4.1)(msw@2.11.6(@types/node@24.10.0)(typescript@5.9.3))(prettier@3.6.2))(typescript@5.9.3)
|
||||||
|
import-in-the-middle:
|
||||||
|
specifier: 1.14.2
|
||||||
|
version: 1.14.2
|
||||||
msw:
|
msw:
|
||||||
specifier: 2.11.6
|
specifier: 2.11.6
|
||||||
version: 2.11.6(@types/node@24.10.0)(typescript@5.9.3)
|
version: 2.11.6(@types/node@24.10.0)(typescript@5.9.3)
|
||||||
@@ -983,15 +986,12 @@ packages:
|
|||||||
'@date-fns/tz@1.4.1':
|
'@date-fns/tz@1.4.1':
|
||||||
resolution: {integrity: sha512-P5LUNhtbj6YfI3iJjw5EL9eUAG6OitD0W3fWQcpQjDRc/QIsL0tRNuO1PcDvPccWL1fSTXXdE1ds+l95DV/OFA==}
|
resolution: {integrity: sha512-P5LUNhtbj6YfI3iJjw5EL9eUAG6OitD0W3fWQcpQjDRc/QIsL0tRNuO1PcDvPccWL1fSTXXdE1ds+l95DV/OFA==}
|
||||||
|
|
||||||
'@emnapi/core@1.7.1':
|
'@emnapi/core@1.5.0':
|
||||||
resolution: {integrity: sha512-o1uhUASyo921r2XtHYOHy7gdkGLge8ghBEQHMWmyJFoXlpU58kIrhhN3w26lpQb6dspetweapMn2CSNwQ8I4wg==}
|
resolution: {integrity: sha512-sbP8GzB1WDzacS8fgNPpHlp6C9VZe+SJP3F90W9rLemaQj2PzIuTEl1qDOYQf58YIpyjViI24y9aPWCjEzY2cg==}
|
||||||
|
|
||||||
'@emnapi/runtime@1.5.0':
|
'@emnapi/runtime@1.5.0':
|
||||||
resolution: {integrity: sha512-97/BJ3iXHww3djw6hYIfErCZFee7qCtrneuLa20UXFCOTCfBM2cvQHjWJ2EG0s0MtdNwInarqCTz35i4wWXHsQ==}
|
resolution: {integrity: sha512-97/BJ3iXHww3djw6hYIfErCZFee7qCtrneuLa20UXFCOTCfBM2cvQHjWJ2EG0s0MtdNwInarqCTz35i4wWXHsQ==}
|
||||||
|
|
||||||
'@emnapi/runtime@1.7.1':
|
|
||||||
resolution: {integrity: sha512-PVtJr5CmLwYAU9PZDMITZoR5iAOShYREoR45EyyLrbntV50mdePTgUn4AmOw90Ifcj+x2kRjdzr1HP3RrNiHGA==}
|
|
||||||
|
|
||||||
'@emnapi/wasi-threads@1.1.0':
|
'@emnapi/wasi-threads@1.1.0':
|
||||||
resolution: {integrity: sha512-WI0DdZ8xFSbgMjR1sFsKABJ/C5OnRrjT06JXbZKexJGrDuPTzZdDYfFlsgcCXCyf+suG5QU2e/y1Wo2V/OapLQ==}
|
resolution: {integrity: sha512-WI0DdZ8xFSbgMjR1sFsKABJ/C5OnRrjT06JXbZKexJGrDuPTzZdDYfFlsgcCXCyf+suG5QU2e/y1Wo2V/OapLQ==}
|
||||||
|
|
||||||
@@ -1329,10 +1329,6 @@ packages:
|
|||||||
resolution: {integrity: sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==}
|
resolution: {integrity: sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==}
|
||||||
engines: {node: ^12.0.0 || ^14.0.0 || >=16.0.0}
|
engines: {node: ^12.0.0 || ^14.0.0 || >=16.0.0}
|
||||||
|
|
||||||
'@eslint-community/regexpp@4.12.2':
|
|
||||||
resolution: {integrity: sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew==}
|
|
||||||
engines: {node: ^12.0.0 || ^14.0.0 || >=16.0.0}
|
|
||||||
|
|
||||||
'@eslint/eslintrc@2.1.4':
|
'@eslint/eslintrc@2.1.4':
|
||||||
resolution: {integrity: sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==}
|
resolution: {integrity: sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==}
|
||||||
engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
|
engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
|
||||||
@@ -1609,8 +1605,8 @@ packages:
|
|||||||
'@next/env@15.4.10':
|
'@next/env@15.4.10':
|
||||||
resolution: {integrity: sha512-knhmoJ0Vv7VRf6pZEPSnciUG1S4bIhWx+qTYBW/AjxEtlzsiNORPk8sFDCEvqLfmKuey56UB9FL1UdHEV3uBrg==}
|
resolution: {integrity: sha512-knhmoJ0Vv7VRf6pZEPSnciUG1S4bIhWx+qTYBW/AjxEtlzsiNORPk8sFDCEvqLfmKuey56UB9FL1UdHEV3uBrg==}
|
||||||
|
|
||||||
'@next/eslint-plugin-next@15.5.7':
|
'@next/eslint-plugin-next@15.5.2':
|
||||||
resolution: {integrity: sha512-DtRU2N7BkGr8r+pExfuWHwMEPX5SD57FeA6pxdgCHODo+b/UgIgjE+rgWKtJAbEbGhVZ2jtHn4g3wNhWFoNBQQ==}
|
resolution: {integrity: sha512-lkLrRVxcftuOsJNhWatf1P2hNVfh98k/omQHrCEPPriUypR6RcS13IvLdIrEvkm9AH2Nu2YpR5vLqBuy6twH3Q==}
|
||||||
|
|
||||||
'@next/swc-darwin-arm64@15.4.8':
|
'@next/swc-darwin-arm64@15.4.8':
|
||||||
resolution: {integrity: sha512-Pf6zXp7yyQEn7sqMxur6+kYcywx5up1J849psyET7/8pG2gQTVMjU3NzgIt8SeEP5to3If/SaWmaA6H6ysBr1A==}
|
resolution: {integrity: sha512-Pf6zXp7yyQEn7sqMxur6+kYcywx5up1J849psyET7/8pG2gQTVMjU3NzgIt8SeEP5to3If/SaWmaA6H6ysBr1A==}
|
||||||
@@ -2626,8 +2622,8 @@ packages:
|
|||||||
'@rtsao/scc@1.1.0':
|
'@rtsao/scc@1.1.0':
|
||||||
resolution: {integrity: sha512-zt6OdqaDoOnJ1ZYsCYGt9YmWzDXl4vQdKTyJev62gFhRGKdx7mcT54V9KIjg+d2wi9EXsPvAPKe7i7WjfVWB8g==}
|
resolution: {integrity: sha512-zt6OdqaDoOnJ1ZYsCYGt9YmWzDXl4vQdKTyJev62gFhRGKdx7mcT54V9KIjg+d2wi9EXsPvAPKe7i7WjfVWB8g==}
|
||||||
|
|
||||||
'@rushstack/eslint-patch@1.15.0':
|
'@rushstack/eslint-patch@1.12.0':
|
||||||
resolution: {integrity: sha512-ojSshQPKwVvSMR8yT2L/QtUkV5SXi/IfDiJ4/8d6UbTPjiHVmxZzUAzGD8Tzks1b9+qQkZa0isUOvYObedITaw==}
|
resolution: {integrity: sha512-5EwMtOqvJMMa3HbmxLlF74e+3/HhwBTMcvt3nqVJgGCozO6hzIPOBlwm8mGVNR9SN2IJpxSnlxczyDjcn7qIyw==}
|
||||||
|
|
||||||
'@scarf/scarf@1.4.0':
|
'@scarf/scarf@1.4.0':
|
||||||
resolution: {integrity: sha512-xxeapPiUXdZAE3che6f3xogoJPeZgig6omHEy1rIY5WVsB3H2BHNnZH+gHG6x91SCWyQCzWGsuL2Hh3ClO5/qQ==}
|
resolution: {integrity: sha512-xxeapPiUXdZAE3che6f3xogoJPeZgig6omHEy1rIY5WVsB3H2BHNnZH+gHG6x91SCWyQCzWGsuL2Hh3ClO5/qQ==}
|
||||||
@@ -3101,8 +3097,8 @@ packages:
|
|||||||
peerDependencies:
|
peerDependencies:
|
||||||
'@testing-library/dom': '>=7.21.4'
|
'@testing-library/dom': '>=7.21.4'
|
||||||
|
|
||||||
'@tybys/wasm-util@0.10.1':
|
'@tybys/wasm-util@0.10.0':
|
||||||
resolution: {integrity: sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg==}
|
resolution: {integrity: sha512-VyyPYFlOMNylG45GoAe0xDoLwWuowvf92F9kySqzYh8vmYm7D2u4iUJKa1tOUpS70Ku13ASrOkS4ScXFsTaCNQ==}
|
||||||
|
|
||||||
'@types/aria-query@5.0.4':
|
'@types/aria-query@5.0.4':
|
||||||
resolution: {integrity: sha512-rfT93uj5s0PRL7EzccGMs3brplhcrghnDoV26NqKhCAS1hVo+WdNsPvE/yb6ilfr5hi2MEk6d5EWJTKdxg8jVw==}
|
resolution: {integrity: sha512-rfT93uj5s0PRL7EzccGMs3brplhcrghnDoV26NqKhCAS1hVo+WdNsPvE/yb6ilfr5hi2MEk6d5EWJTKdxg8jVw==}
|
||||||
@@ -3292,16 +3288,16 @@ packages:
|
|||||||
'@types/ws@8.18.1':
|
'@types/ws@8.18.1':
|
||||||
resolution: {integrity: sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==}
|
resolution: {integrity: sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==}
|
||||||
|
|
||||||
'@typescript-eslint/eslint-plugin@8.48.1':
|
'@typescript-eslint/eslint-plugin@8.43.0':
|
||||||
resolution: {integrity: sha512-X63hI1bxl5ohelzr0LY5coufyl0LJNthld+abwxpCoo6Gq+hSqhKwci7MUWkXo67mzgUK6YFByhmaHmUcuBJmA==}
|
resolution: {integrity: sha512-8tg+gt7ENL7KewsKMKDHXR1vm8tt9eMxjJBYINf6swonlWgkYn5NwyIgXpbbDxTNU5DgpDFfj95prcTq2clIQQ==}
|
||||||
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
||||||
peerDependencies:
|
peerDependencies:
|
||||||
'@typescript-eslint/parser': ^8.48.1
|
'@typescript-eslint/parser': ^8.43.0
|
||||||
eslint: ^8.57.0 || ^9.0.0
|
eslint: ^8.57.0 || ^9.0.0
|
||||||
typescript: '>=4.8.4 <6.0.0'
|
typescript: '>=4.8.4 <6.0.0'
|
||||||
|
|
||||||
'@typescript-eslint/parser@8.48.1':
|
'@typescript-eslint/parser@8.43.0':
|
||||||
resolution: {integrity: sha512-PC0PDZfJg8sP7cmKe6L3QIL8GZwU5aRvUFedqSIpw3B+QjRSUZeeITC2M5XKeMXEzL6wccN196iy3JLwKNvDVA==}
|
resolution: {integrity: sha512-B7RIQiTsCBBmY+yW4+ILd6mF5h1FUwJsVvpqkrgpszYifetQ2Ke+Z4u6aZh0CblkUGIdR59iYVyXqqZGkZ3aBw==}
|
||||||
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
||||||
peerDependencies:
|
peerDependencies:
|
||||||
eslint: ^8.57.0 || ^9.0.0
|
eslint: ^8.57.0 || ^9.0.0
|
||||||
@@ -3319,12 +3315,6 @@ packages:
|
|||||||
peerDependencies:
|
peerDependencies:
|
||||||
typescript: '>=4.8.4 <6.0.0'
|
typescript: '>=4.8.4 <6.0.0'
|
||||||
|
|
||||||
'@typescript-eslint/project-service@8.48.1':
|
|
||||||
resolution: {integrity: sha512-HQWSicah4s9z2/HifRPQ6b6R7G+SBx64JlFQpgSSHWPKdvCZX57XCbszg/bapbRsOEv42q5tayTYcEFpACcX1w==}
|
|
||||||
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
|
||||||
peerDependencies:
|
|
||||||
typescript: '>=4.8.4 <6.0.0'
|
|
||||||
|
|
||||||
'@typescript-eslint/scope-manager@8.43.0':
|
'@typescript-eslint/scope-manager@8.43.0':
|
||||||
resolution: {integrity: sha512-daSWlQ87ZhsjrbMLvpuuMAt3y4ba57AuvadcR7f3nl8eS3BjRc8L9VLxFLk92RL5xdXOg6IQ+qKjjqNEimGuAg==}
|
resolution: {integrity: sha512-daSWlQ87ZhsjrbMLvpuuMAt3y4ba57AuvadcR7f3nl8eS3BjRc8L9VLxFLk92RL5xdXOg6IQ+qKjjqNEimGuAg==}
|
||||||
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
||||||
@@ -3333,10 +3323,6 @@ packages:
|
|||||||
resolution: {integrity: sha512-LF4b/NmGvdWEHD2H4MsHD8ny6JpiVNDzrSZr3CsckEgCbAGZbYM4Cqxvi9L+WqDMT+51Ozy7lt2M+d0JLEuBqA==}
|
resolution: {integrity: sha512-LF4b/NmGvdWEHD2H4MsHD8ny6JpiVNDzrSZr3CsckEgCbAGZbYM4Cqxvi9L+WqDMT+51Ozy7lt2M+d0JLEuBqA==}
|
||||||
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
||||||
|
|
||||||
'@typescript-eslint/scope-manager@8.48.1':
|
|
||||||
resolution: {integrity: sha512-rj4vWQsytQbLxC5Bf4XwZ0/CKd362DkWMUkviT7DCS057SK64D5lH74sSGzhI6PDD2HCEq02xAP9cX68dYyg1w==}
|
|
||||||
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
|
||||||
|
|
||||||
'@typescript-eslint/tsconfig-utils@8.43.0':
|
'@typescript-eslint/tsconfig-utils@8.43.0':
|
||||||
resolution: {integrity: sha512-ALC2prjZcj2YqqL5X/bwWQmHA2em6/94GcbB/KKu5SX3EBDOsqztmmX1kMkvAJHzxk7TazKzJfFiEIagNV3qEA==}
|
resolution: {integrity: sha512-ALC2prjZcj2YqqL5X/bwWQmHA2em6/94GcbB/KKu5SX3EBDOsqztmmX1kMkvAJHzxk7TazKzJfFiEIagNV3qEA==}
|
||||||
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
||||||
@@ -3349,14 +3335,8 @@ packages:
|
|||||||
peerDependencies:
|
peerDependencies:
|
||||||
typescript: '>=4.8.4 <6.0.0'
|
typescript: '>=4.8.4 <6.0.0'
|
||||||
|
|
||||||
'@typescript-eslint/tsconfig-utils@8.48.1':
|
'@typescript-eslint/type-utils@8.43.0':
|
||||||
resolution: {integrity: sha512-k0Jhs4CpEffIBm6wPaCXBAD7jxBtrHjrSgtfCjUvPp9AZ78lXKdTR8fxyZO5y4vWNlOvYXRtngSZNSn+H53Jkw==}
|
resolution: {integrity: sha512-qaH1uLBpBuBBuRf8c1mLJ6swOfzCXryhKND04Igr4pckzSEW9JX5Aw9AgW00kwfjWJF0kk0ps9ExKTfvXfw4Qg==}
|
||||||
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
|
||||||
peerDependencies:
|
|
||||||
typescript: '>=4.8.4 <6.0.0'
|
|
||||||
|
|
||||||
'@typescript-eslint/type-utils@8.48.1':
|
|
||||||
resolution: {integrity: sha512-1jEop81a3LrJQLTf/1VfPQdhIY4PlGDBc/i67EVWObrtvcziysbLN3oReexHOM6N3jyXgCrkBsZpqwH0hiDOQg==}
|
|
||||||
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
||||||
peerDependencies:
|
peerDependencies:
|
||||||
eslint: ^8.57.0 || ^9.0.0
|
eslint: ^8.57.0 || ^9.0.0
|
||||||
@@ -3370,10 +3350,6 @@ packages:
|
|||||||
resolution: {integrity: sha512-lNCWCbq7rpg7qDsQrd3D6NyWYu+gkTENkG5IKYhUIcxSb59SQC/hEQ+MrG4sTgBVghTonNWq42bA/d4yYumldQ==}
|
resolution: {integrity: sha512-lNCWCbq7rpg7qDsQrd3D6NyWYu+gkTENkG5IKYhUIcxSb59SQC/hEQ+MrG4sTgBVghTonNWq42bA/d4yYumldQ==}
|
||||||
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
||||||
|
|
||||||
'@typescript-eslint/types@8.48.1':
|
|
||||||
resolution: {integrity: sha512-+fZ3LZNeiELGmimrujsDCT4CRIbq5oXdHe7chLiW8qzqyPMnn1puNstCrMNVAqwcl2FdIxkuJ4tOs/RFDBVc/Q==}
|
|
||||||
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
|
||||||
|
|
||||||
'@typescript-eslint/typescript-estree@8.43.0':
|
'@typescript-eslint/typescript-estree@8.43.0':
|
||||||
resolution: {integrity: sha512-7Vv6zlAhPb+cvEpP06WXXy/ZByph9iL6BQRBDj4kmBsW98AqEeQHlj/13X+sZOrKSo9/rNKH4Ul4f6EICREFdw==}
|
resolution: {integrity: sha512-7Vv6zlAhPb+cvEpP06WXXy/ZByph9iL6BQRBDj4kmBsW98AqEeQHlj/13X+sZOrKSo9/rNKH4Ul4f6EICREFdw==}
|
||||||
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
||||||
@@ -3386,12 +3362,6 @@ packages:
|
|||||||
peerDependencies:
|
peerDependencies:
|
||||||
typescript: '>=4.8.4 <6.0.0'
|
typescript: '>=4.8.4 <6.0.0'
|
||||||
|
|
||||||
'@typescript-eslint/typescript-estree@8.48.1':
|
|
||||||
resolution: {integrity: sha512-/9wQ4PqaefTK6POVTjJaYS0bynCgzh6ClJHGSBj06XEHjkfylzB+A3qvyaXnErEZSaxhIo4YdyBgq6j4RysxDg==}
|
|
||||||
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
|
||||||
peerDependencies:
|
|
||||||
typescript: '>=4.8.4 <6.0.0'
|
|
||||||
|
|
||||||
'@typescript-eslint/utils@8.43.0':
|
'@typescript-eslint/utils@8.43.0':
|
||||||
resolution: {integrity: sha512-S1/tEmkUeeswxd0GGcnwuVQPFWo8NzZTOMxCvw8BX7OMxnNae+i8Tm7REQen/SwUIPoPqfKn7EaZ+YLpiB3k9g==}
|
resolution: {integrity: sha512-S1/tEmkUeeswxd0GGcnwuVQPFWo8NzZTOMxCvw8BX7OMxnNae+i8Tm7REQen/SwUIPoPqfKn7EaZ+YLpiB3k9g==}
|
||||||
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
||||||
@@ -3406,13 +3376,6 @@ packages:
|
|||||||
eslint: ^8.57.0 || ^9.0.0
|
eslint: ^8.57.0 || ^9.0.0
|
||||||
typescript: '>=4.8.4 <6.0.0'
|
typescript: '>=4.8.4 <6.0.0'
|
||||||
|
|
||||||
'@typescript-eslint/utils@8.48.1':
|
|
||||||
resolution: {integrity: sha512-fAnhLrDjiVfey5wwFRwrweyRlCmdz5ZxXz2G/4cLn0YDLjTapmN4gcCsTBR1N2rWnZSDeWpYtgLDsJt+FpmcwA==}
|
|
||||||
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
|
||||||
peerDependencies:
|
|
||||||
eslint: ^8.57.0 || ^9.0.0
|
|
||||||
typescript: '>=4.8.4 <6.0.0'
|
|
||||||
|
|
||||||
'@typescript-eslint/visitor-keys@8.43.0':
|
'@typescript-eslint/visitor-keys@8.43.0':
|
||||||
resolution: {integrity: sha512-T+S1KqRD4sg/bHfLwrpF/K3gQLBM1n7Rp7OjjikjTEssI2YJzQpi5WXoynOaQ93ERIuq3O8RBTOUYDKszUCEHw==}
|
resolution: {integrity: sha512-T+S1KqRD4sg/bHfLwrpF/K3gQLBM1n7Rp7OjjikjTEssI2YJzQpi5WXoynOaQ93ERIuq3O8RBTOUYDKszUCEHw==}
|
||||||
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
||||||
@@ -3421,10 +3384,6 @@ packages:
|
|||||||
resolution: {integrity: sha512-tUFMXI4gxzzMXt4xpGJEsBsTox0XbNQ1y94EwlD/CuZwFcQP79xfQqMhau9HsRc/J0cAPA/HZt1dZPtGn9V/7w==}
|
resolution: {integrity: sha512-tUFMXI4gxzzMXt4xpGJEsBsTox0XbNQ1y94EwlD/CuZwFcQP79xfQqMhau9HsRc/J0cAPA/HZt1dZPtGn9V/7w==}
|
||||||
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
||||||
|
|
||||||
'@typescript-eslint/visitor-keys@8.48.1':
|
|
||||||
resolution: {integrity: sha512-BmxxndzEWhE4TIEEMBs8lP3MBWN3jFPs/p6gPm/wkv02o41hI6cq9AuSmGAaTTHPtA1FTi2jBre4A9rm5ZmX+Q==}
|
|
||||||
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
|
||||||
|
|
||||||
'@ungap/structured-clone@1.3.0':
|
'@ungap/structured-clone@1.3.0':
|
||||||
resolution: {integrity: sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==}
|
resolution: {integrity: sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==}
|
||||||
|
|
||||||
@@ -4626,8 +4585,8 @@ packages:
|
|||||||
resolution: {integrity: sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==}
|
resolution: {integrity: sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==}
|
||||||
engines: {node: '>=12'}
|
engines: {node: '>=12'}
|
||||||
|
|
||||||
eslint-config-next@15.5.7:
|
eslint-config-next@15.5.2:
|
||||||
resolution: {integrity: sha512-nU/TRGHHeG81NeLW5DeQT5t6BDUqbpsNQTvef1ld/tqHT+/zTx60/TIhKnmPISTTe++DVo+DLxDmk4rnwHaZVw==}
|
resolution: {integrity: sha512-3hPZghsLupMxxZ2ggjIIrat/bPniM2yRpsVPVM40rp8ZMzKWOJp2CGWn7+EzoV2ddkUr5fxNfHpF+wU1hGt/3g==}
|
||||||
peerDependencies:
|
peerDependencies:
|
||||||
eslint: ^7.23.0 || ^8.0.0 || ^9.0.0
|
eslint: ^7.23.0 || ^8.0.0 || ^9.0.0
|
||||||
typescript: '>=3.3.1'
|
typescript: '>=3.3.1'
|
||||||
@@ -4959,10 +4918,6 @@ packages:
|
|||||||
peerDependencies:
|
peerDependencies:
|
||||||
next: '>=13.2.0'
|
next: '>=13.2.0'
|
||||||
|
|
||||||
generator-function@2.0.1:
|
|
||||||
resolution: {integrity: sha512-SFdFmIJi+ybC0vjlHN0ZGVGHc3lgE0DxPAT0djjVg+kjOnSqclqmj0KQ7ykTOLP6YxoqOvuAODGdcHJn+43q3g==}
|
|
||||||
engines: {node: '>= 0.4'}
|
|
||||||
|
|
||||||
gensync@1.0.0-beta.2:
|
gensync@1.0.0-beta.2:
|
||||||
resolution: {integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==}
|
resolution: {integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==}
|
||||||
engines: {node: '>=6.9.0'}
|
engines: {node: '>=6.9.0'}
|
||||||
@@ -4991,8 +4946,8 @@ packages:
|
|||||||
resolution: {integrity: sha512-w9UMqWwJxHNOvoNzSJ2oPF5wvYcvP7jUvYzhp67yEhTi17ZDBBC1z9pTdGuzjD+EFIqLSYRweZjqfiPzQ06Ebg==}
|
resolution: {integrity: sha512-w9UMqWwJxHNOvoNzSJ2oPF5wvYcvP7jUvYzhp67yEhTi17ZDBBC1z9pTdGuzjD+EFIqLSYRweZjqfiPzQ06Ebg==}
|
||||||
engines: {node: '>= 0.4'}
|
engines: {node: '>= 0.4'}
|
||||||
|
|
||||||
get-tsconfig@4.13.0:
|
get-tsconfig@4.10.1:
|
||||||
resolution: {integrity: sha512-1VKTZJCwBrvbd+Wn3AOgQP/2Av+TfTCOlE4AcRJE72W1ksZXbAx8PPBR9RzgTeSPzlPMHrbANMH3LbltH73wxQ==}
|
resolution: {integrity: sha512-auHyJ4AgMz7vgS8Hp3N6HXSmlMdUyhSUrfBF16w153rxtLIEOE+HGqaBppczZvnHLqQJfiHotCYpNhl0lUROFQ==}
|
||||||
|
|
||||||
github-slugger@2.0.0:
|
github-slugger@2.0.0:
|
||||||
resolution: {integrity: sha512-IaOQ9puYtjrkq7Y0Ygl9KDZnrf/aiUJYUpVf89y8kyaxbRG7Y1SrX/jaumrv81vc61+kiMempujsM3Yw7w5qcw==}
|
resolution: {integrity: sha512-IaOQ9puYtjrkq7Y0Ygl9KDZnrf/aiUJYUpVf89y8kyaxbRG7Y1SrX/jaumrv81vc61+kiMempujsM3Yw7w5qcw==}
|
||||||
@@ -5213,6 +5168,9 @@ packages:
|
|||||||
resolution: {integrity: sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==}
|
resolution: {integrity: sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==}
|
||||||
engines: {node: '>=6'}
|
engines: {node: '>=6'}
|
||||||
|
|
||||||
|
import-in-the-middle@1.14.2:
|
||||||
|
resolution: {integrity: sha512-5tCuY9BV8ujfOpwtAGgsTx9CGUapcFMEEyByLv1B+v2+6DhAcw+Zr0nhQT7uwaZ7DiourxFEscghOR8e1aPLQw==}
|
||||||
|
|
||||||
import-in-the-middle@2.0.0:
|
import-in-the-middle@2.0.0:
|
||||||
resolution: {integrity: sha512-yNZhyQYqXpkT0AKq3F3KLasUSK4fHvebNH5hOsKQw2dhGSALvQ4U0BqUc5suziKvydO5u5hgN2hy1RJaho8U5A==}
|
resolution: {integrity: sha512-yNZhyQYqXpkT0AKq3F3KLasUSK4fHvebNH5hOsKQw2dhGSALvQ4U0BqUc5suziKvydO5u5hgN2hy1RJaho8U5A==}
|
||||||
|
|
||||||
@@ -5324,10 +5282,6 @@ packages:
|
|||||||
resolution: {integrity: sha512-nPUB5km40q9e8UfN/Zc24eLlzdSf9OfKByBw9CIdw4H1giPMeA0OIJvbchsCu4npfI2QcMVBsGEBHKZ7wLTWmQ==}
|
resolution: {integrity: sha512-nPUB5km40q9e8UfN/Zc24eLlzdSf9OfKByBw9CIdw4H1giPMeA0OIJvbchsCu4npfI2QcMVBsGEBHKZ7wLTWmQ==}
|
||||||
engines: {node: '>= 0.4'}
|
engines: {node: '>= 0.4'}
|
||||||
|
|
||||||
is-generator-function@1.1.2:
|
|
||||||
resolution: {integrity: sha512-upqt1SkGkODW9tsGNG5mtXTXtECizwtS2kA161M+gJPc1xdb/Ax629af6YrTwcOeQHbewrPNlE5Dx7kzvXTizA==}
|
|
||||||
engines: {node: '>= 0.4'}
|
|
||||||
|
|
||||||
is-glob@4.0.3:
|
is-glob@4.0.3:
|
||||||
resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==}
|
resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==}
|
||||||
engines: {node: '>=0.10.0'}
|
engines: {node: '>=0.10.0'}
|
||||||
@@ -5949,8 +5903,8 @@ packages:
|
|||||||
engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1}
|
engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1}
|
||||||
hasBin: true
|
hasBin: true
|
||||||
|
|
||||||
napi-postinstall@0.3.4:
|
napi-postinstall@0.3.3:
|
||||||
resolution: {integrity: sha512-PHI5f1O0EP5xJ9gQmFGMS6IZcrVvTjpXjz7Na41gTE7eE2hK11lg04CECCYEEjdc17EV4DO+fkGEtt7TpTaTiQ==}
|
resolution: {integrity: sha512-uTp172LLXSxuSYHv/kou+f6KW3SMppU9ivthaVTXian9sOt3XM/zHYHpRZiLgQoxeWfYUnslNWQHF1+G71xcow==}
|
||||||
engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0}
|
engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0}
|
||||||
hasBin: true
|
hasBin: true
|
||||||
|
|
||||||
@@ -6815,11 +6769,6 @@ packages:
|
|||||||
engines: {node: '>= 0.4'}
|
engines: {node: '>= 0.4'}
|
||||||
hasBin: true
|
hasBin: true
|
||||||
|
|
||||||
resolve@1.22.11:
|
|
||||||
resolution: {integrity: sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==}
|
|
||||||
engines: {node: '>= 0.4'}
|
|
||||||
hasBin: true
|
|
||||||
|
|
||||||
resolve@1.22.8:
|
resolve@1.22.8:
|
||||||
resolution: {integrity: sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==}
|
resolution: {integrity: sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==}
|
||||||
hasBin: true
|
hasBin: true
|
||||||
@@ -7909,7 +7858,7 @@ snapshots:
|
|||||||
'@babel/helper-plugin-utils': 7.27.1
|
'@babel/helper-plugin-utils': 7.27.1
|
||||||
debug: 4.4.3
|
debug: 4.4.3
|
||||||
lodash.debounce: 4.0.8
|
lodash.debounce: 4.0.8
|
||||||
resolve: 1.22.11
|
resolve: 1.22.10
|
||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- supports-color
|
- supports-color
|
||||||
|
|
||||||
@@ -8601,7 +8550,7 @@ snapshots:
|
|||||||
|
|
||||||
'@date-fns/tz@1.4.1': {}
|
'@date-fns/tz@1.4.1': {}
|
||||||
|
|
||||||
'@emnapi/core@1.7.1':
|
'@emnapi/core@1.5.0':
|
||||||
dependencies:
|
dependencies:
|
||||||
'@emnapi/wasi-threads': 1.1.0
|
'@emnapi/wasi-threads': 1.1.0
|
||||||
tslib: 2.8.1
|
tslib: 2.8.1
|
||||||
@@ -8612,11 +8561,6 @@ snapshots:
|
|||||||
tslib: 2.8.1
|
tslib: 2.8.1
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
'@emnapi/runtime@1.7.1':
|
|
||||||
dependencies:
|
|
||||||
tslib: 2.8.1
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
'@emnapi/wasi-threads@1.1.0':
|
'@emnapi/wasi-threads@1.1.0':
|
||||||
dependencies:
|
dependencies:
|
||||||
tslib: 2.8.1
|
tslib: 2.8.1
|
||||||
@@ -8795,8 +8739,6 @@ snapshots:
|
|||||||
|
|
||||||
'@eslint-community/regexpp@4.12.1': {}
|
'@eslint-community/regexpp@4.12.1': {}
|
||||||
|
|
||||||
'@eslint-community/regexpp@4.12.2': {}
|
|
||||||
|
|
||||||
'@eslint/eslintrc@2.1.4':
|
'@eslint/eslintrc@2.1.4':
|
||||||
dependencies:
|
dependencies:
|
||||||
ajv: 6.12.6
|
ajv: 6.12.6
|
||||||
@@ -9054,16 +8996,16 @@ snapshots:
|
|||||||
|
|
||||||
'@napi-rs/wasm-runtime@0.2.12':
|
'@napi-rs/wasm-runtime@0.2.12':
|
||||||
dependencies:
|
dependencies:
|
||||||
'@emnapi/core': 1.7.1
|
'@emnapi/core': 1.5.0
|
||||||
'@emnapi/runtime': 1.7.1
|
'@emnapi/runtime': 1.5.0
|
||||||
'@tybys/wasm-util': 0.10.1
|
'@tybys/wasm-util': 0.10.0
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
'@neoconfetti/react@1.0.0': {}
|
'@neoconfetti/react@1.0.0': {}
|
||||||
|
|
||||||
'@next/env@15.4.10': {}
|
'@next/env@15.4.10': {}
|
||||||
|
|
||||||
'@next/eslint-plugin-next@15.5.7':
|
'@next/eslint-plugin-next@15.5.2':
|
||||||
dependencies:
|
dependencies:
|
||||||
fast-glob: 3.3.1
|
fast-glob: 3.3.1
|
||||||
|
|
||||||
@@ -10173,7 +10115,7 @@ snapshots:
|
|||||||
|
|
||||||
'@rtsao/scc@1.1.0': {}
|
'@rtsao/scc@1.1.0': {}
|
||||||
|
|
||||||
'@rushstack/eslint-patch@1.15.0': {}
|
'@rushstack/eslint-patch@1.12.0': {}
|
||||||
|
|
||||||
'@scarf/scarf@1.4.0': {}
|
'@scarf/scarf@1.4.0': {}
|
||||||
|
|
||||||
@@ -10925,7 +10867,7 @@ snapshots:
|
|||||||
dependencies:
|
dependencies:
|
||||||
'@testing-library/dom': 10.4.1
|
'@testing-library/dom': 10.4.1
|
||||||
|
|
||||||
'@tybys/wasm-util@0.10.1':
|
'@tybys/wasm-util@0.10.0':
|
||||||
dependencies:
|
dependencies:
|
||||||
tslib: 2.8.1
|
tslib: 2.8.1
|
||||||
optional: true
|
optional: true
|
||||||
@@ -11123,14 +11065,14 @@ snapshots:
|
|||||||
dependencies:
|
dependencies:
|
||||||
'@types/node': 24.10.0
|
'@types/node': 24.10.0
|
||||||
|
|
||||||
'@typescript-eslint/eslint-plugin@8.48.1(@typescript-eslint/parser@8.48.1(eslint@8.57.1)(typescript@5.9.3))(eslint@8.57.1)(typescript@5.9.3)':
|
'@typescript-eslint/eslint-plugin@8.43.0(@typescript-eslint/parser@8.43.0(eslint@8.57.1)(typescript@5.9.3))(eslint@8.57.1)(typescript@5.9.3)':
|
||||||
dependencies:
|
dependencies:
|
||||||
'@eslint-community/regexpp': 4.12.2
|
'@eslint-community/regexpp': 4.12.1
|
||||||
'@typescript-eslint/parser': 8.48.1(eslint@8.57.1)(typescript@5.9.3)
|
'@typescript-eslint/parser': 8.43.0(eslint@8.57.1)(typescript@5.9.3)
|
||||||
'@typescript-eslint/scope-manager': 8.48.1
|
'@typescript-eslint/scope-manager': 8.43.0
|
||||||
'@typescript-eslint/type-utils': 8.48.1(eslint@8.57.1)(typescript@5.9.3)
|
'@typescript-eslint/type-utils': 8.43.0(eslint@8.57.1)(typescript@5.9.3)
|
||||||
'@typescript-eslint/utils': 8.48.1(eslint@8.57.1)(typescript@5.9.3)
|
'@typescript-eslint/utils': 8.43.0(eslint@8.57.1)(typescript@5.9.3)
|
||||||
'@typescript-eslint/visitor-keys': 8.48.1
|
'@typescript-eslint/visitor-keys': 8.43.0
|
||||||
eslint: 8.57.1
|
eslint: 8.57.1
|
||||||
graphemer: 1.4.0
|
graphemer: 1.4.0
|
||||||
ignore: 7.0.5
|
ignore: 7.0.5
|
||||||
@@ -11140,12 +11082,12 @@ snapshots:
|
|||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- supports-color
|
- supports-color
|
||||||
|
|
||||||
'@typescript-eslint/parser@8.48.1(eslint@8.57.1)(typescript@5.9.3)':
|
'@typescript-eslint/parser@8.43.0(eslint@8.57.1)(typescript@5.9.3)':
|
||||||
dependencies:
|
dependencies:
|
||||||
'@typescript-eslint/scope-manager': 8.48.1
|
'@typescript-eslint/scope-manager': 8.43.0
|
||||||
'@typescript-eslint/types': 8.48.1
|
'@typescript-eslint/types': 8.43.0
|
||||||
'@typescript-eslint/typescript-estree': 8.48.1(typescript@5.9.3)
|
'@typescript-eslint/typescript-estree': 8.43.0(typescript@5.9.3)
|
||||||
'@typescript-eslint/visitor-keys': 8.48.1
|
'@typescript-eslint/visitor-keys': 8.43.0
|
||||||
debug: 4.4.3
|
debug: 4.4.3
|
||||||
eslint: 8.57.1
|
eslint: 8.57.1
|
||||||
typescript: 5.9.3
|
typescript: 5.9.3
|
||||||
@@ -11155,7 +11097,7 @@ snapshots:
|
|||||||
'@typescript-eslint/project-service@8.43.0(typescript@5.9.3)':
|
'@typescript-eslint/project-service@8.43.0(typescript@5.9.3)':
|
||||||
dependencies:
|
dependencies:
|
||||||
'@typescript-eslint/tsconfig-utils': 8.43.0(typescript@5.9.3)
|
'@typescript-eslint/tsconfig-utils': 8.43.0(typescript@5.9.3)
|
||||||
'@typescript-eslint/types': 8.48.1
|
'@typescript-eslint/types': 8.43.0
|
||||||
debug: 4.4.3
|
debug: 4.4.3
|
||||||
typescript: 5.9.3
|
typescript: 5.9.3
|
||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
@@ -11164,16 +11106,7 @@ snapshots:
|
|||||||
'@typescript-eslint/project-service@8.46.2(typescript@5.9.3)':
|
'@typescript-eslint/project-service@8.46.2(typescript@5.9.3)':
|
||||||
dependencies:
|
dependencies:
|
||||||
'@typescript-eslint/tsconfig-utils': 8.46.2(typescript@5.9.3)
|
'@typescript-eslint/tsconfig-utils': 8.46.2(typescript@5.9.3)
|
||||||
'@typescript-eslint/types': 8.48.1
|
'@typescript-eslint/types': 8.46.2
|
||||||
debug: 4.4.3
|
|
||||||
typescript: 5.9.3
|
|
||||||
transitivePeerDependencies:
|
|
||||||
- supports-color
|
|
||||||
|
|
||||||
'@typescript-eslint/project-service@8.48.1(typescript@5.9.3)':
|
|
||||||
dependencies:
|
|
||||||
'@typescript-eslint/tsconfig-utils': 8.48.1(typescript@5.9.3)
|
|
||||||
'@typescript-eslint/types': 8.48.1
|
|
||||||
debug: 4.4.3
|
debug: 4.4.3
|
||||||
typescript: 5.9.3
|
typescript: 5.9.3
|
||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
@@ -11189,11 +11122,6 @@ snapshots:
|
|||||||
'@typescript-eslint/types': 8.46.2
|
'@typescript-eslint/types': 8.46.2
|
||||||
'@typescript-eslint/visitor-keys': 8.46.2
|
'@typescript-eslint/visitor-keys': 8.46.2
|
||||||
|
|
||||||
'@typescript-eslint/scope-manager@8.48.1':
|
|
||||||
dependencies:
|
|
||||||
'@typescript-eslint/types': 8.48.1
|
|
||||||
'@typescript-eslint/visitor-keys': 8.48.1
|
|
||||||
|
|
||||||
'@typescript-eslint/tsconfig-utils@8.43.0(typescript@5.9.3)':
|
'@typescript-eslint/tsconfig-utils@8.43.0(typescript@5.9.3)':
|
||||||
dependencies:
|
dependencies:
|
||||||
typescript: 5.9.3
|
typescript: 5.9.3
|
||||||
@@ -11202,15 +11130,11 @@ snapshots:
|
|||||||
dependencies:
|
dependencies:
|
||||||
typescript: 5.9.3
|
typescript: 5.9.3
|
||||||
|
|
||||||
'@typescript-eslint/tsconfig-utils@8.48.1(typescript@5.9.3)':
|
'@typescript-eslint/type-utils@8.43.0(eslint@8.57.1)(typescript@5.9.3)':
|
||||||
dependencies:
|
dependencies:
|
||||||
typescript: 5.9.3
|
'@typescript-eslint/types': 8.43.0
|
||||||
|
'@typescript-eslint/typescript-estree': 8.43.0(typescript@5.9.3)
|
||||||
'@typescript-eslint/type-utils@8.48.1(eslint@8.57.1)(typescript@5.9.3)':
|
'@typescript-eslint/utils': 8.43.0(eslint@8.57.1)(typescript@5.9.3)
|
||||||
dependencies:
|
|
||||||
'@typescript-eslint/types': 8.48.1
|
|
||||||
'@typescript-eslint/typescript-estree': 8.48.1(typescript@5.9.3)
|
|
||||||
'@typescript-eslint/utils': 8.48.1(eslint@8.57.1)(typescript@5.9.3)
|
|
||||||
debug: 4.4.3
|
debug: 4.4.3
|
||||||
eslint: 8.57.1
|
eslint: 8.57.1
|
||||||
ts-api-utils: 2.1.0(typescript@5.9.3)
|
ts-api-utils: 2.1.0(typescript@5.9.3)
|
||||||
@@ -11222,8 +11146,6 @@ snapshots:
|
|||||||
|
|
||||||
'@typescript-eslint/types@8.46.2': {}
|
'@typescript-eslint/types@8.46.2': {}
|
||||||
|
|
||||||
'@typescript-eslint/types@8.48.1': {}
|
|
||||||
|
|
||||||
'@typescript-eslint/typescript-estree@8.43.0(typescript@5.9.3)':
|
'@typescript-eslint/typescript-estree@8.43.0(typescript@5.9.3)':
|
||||||
dependencies:
|
dependencies:
|
||||||
'@typescript-eslint/project-service': 8.43.0(typescript@5.9.3)
|
'@typescript-eslint/project-service': 8.43.0(typescript@5.9.3)
|
||||||
@@ -11234,7 +11156,7 @@ snapshots:
|
|||||||
fast-glob: 3.3.3
|
fast-glob: 3.3.3
|
||||||
is-glob: 4.0.3
|
is-glob: 4.0.3
|
||||||
minimatch: 9.0.5
|
minimatch: 9.0.5
|
||||||
semver: 7.7.3
|
semver: 7.7.2
|
||||||
ts-api-utils: 2.1.0(typescript@5.9.3)
|
ts-api-utils: 2.1.0(typescript@5.9.3)
|
||||||
typescript: 5.9.3
|
typescript: 5.9.3
|
||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
@@ -11256,21 +11178,6 @@ snapshots:
|
|||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- supports-color
|
- supports-color
|
||||||
|
|
||||||
'@typescript-eslint/typescript-estree@8.48.1(typescript@5.9.3)':
|
|
||||||
dependencies:
|
|
||||||
'@typescript-eslint/project-service': 8.48.1(typescript@5.9.3)
|
|
||||||
'@typescript-eslint/tsconfig-utils': 8.48.1(typescript@5.9.3)
|
|
||||||
'@typescript-eslint/types': 8.48.1
|
|
||||||
'@typescript-eslint/visitor-keys': 8.48.1
|
|
||||||
debug: 4.4.3
|
|
||||||
minimatch: 9.0.5
|
|
||||||
semver: 7.7.3
|
|
||||||
tinyglobby: 0.2.15
|
|
||||||
ts-api-utils: 2.1.0(typescript@5.9.3)
|
|
||||||
typescript: 5.9.3
|
|
||||||
transitivePeerDependencies:
|
|
||||||
- supports-color
|
|
||||||
|
|
||||||
'@typescript-eslint/utils@8.43.0(eslint@8.57.1)(typescript@5.9.3)':
|
'@typescript-eslint/utils@8.43.0(eslint@8.57.1)(typescript@5.9.3)':
|
||||||
dependencies:
|
dependencies:
|
||||||
'@eslint-community/eslint-utils': 4.9.0(eslint@8.57.1)
|
'@eslint-community/eslint-utils': 4.9.0(eslint@8.57.1)
|
||||||
@@ -11293,17 +11200,6 @@ snapshots:
|
|||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- supports-color
|
- supports-color
|
||||||
|
|
||||||
'@typescript-eslint/utils@8.48.1(eslint@8.57.1)(typescript@5.9.3)':
|
|
||||||
dependencies:
|
|
||||||
'@eslint-community/eslint-utils': 4.9.0(eslint@8.57.1)
|
|
||||||
'@typescript-eslint/scope-manager': 8.48.1
|
|
||||||
'@typescript-eslint/types': 8.48.1
|
|
||||||
'@typescript-eslint/typescript-estree': 8.48.1(typescript@5.9.3)
|
|
||||||
eslint: 8.57.1
|
|
||||||
typescript: 5.9.3
|
|
||||||
transitivePeerDependencies:
|
|
||||||
- supports-color
|
|
||||||
|
|
||||||
'@typescript-eslint/visitor-keys@8.43.0':
|
'@typescript-eslint/visitor-keys@8.43.0':
|
||||||
dependencies:
|
dependencies:
|
||||||
'@typescript-eslint/types': 8.43.0
|
'@typescript-eslint/types': 8.43.0
|
||||||
@@ -11314,11 +11210,6 @@ snapshots:
|
|||||||
'@typescript-eslint/types': 8.46.2
|
'@typescript-eslint/types': 8.46.2
|
||||||
eslint-visitor-keys: 4.2.1
|
eslint-visitor-keys: 4.2.1
|
||||||
|
|
||||||
'@typescript-eslint/visitor-keys@8.48.1':
|
|
||||||
dependencies:
|
|
||||||
'@typescript-eslint/types': 8.48.1
|
|
||||||
eslint-visitor-keys: 4.2.1
|
|
||||||
|
|
||||||
'@ungap/structured-clone@1.3.0': {}
|
'@ungap/structured-clone@1.3.0': {}
|
||||||
|
|
||||||
'@unrs/resolver-binding-android-arm-eabi@1.11.1':
|
'@unrs/resolver-binding-android-arm-eabi@1.11.1':
|
||||||
@@ -12641,16 +12532,16 @@ snapshots:
|
|||||||
|
|
||||||
escape-string-regexp@5.0.0: {}
|
escape-string-regexp@5.0.0: {}
|
||||||
|
|
||||||
eslint-config-next@15.5.7(eslint@8.57.1)(typescript@5.9.3):
|
eslint-config-next@15.5.2(eslint@8.57.1)(typescript@5.9.3):
|
||||||
dependencies:
|
dependencies:
|
||||||
'@next/eslint-plugin-next': 15.5.7
|
'@next/eslint-plugin-next': 15.5.2
|
||||||
'@rushstack/eslint-patch': 1.15.0
|
'@rushstack/eslint-patch': 1.12.0
|
||||||
'@typescript-eslint/eslint-plugin': 8.48.1(@typescript-eslint/parser@8.48.1(eslint@8.57.1)(typescript@5.9.3))(eslint@8.57.1)(typescript@5.9.3)
|
'@typescript-eslint/eslint-plugin': 8.43.0(@typescript-eslint/parser@8.43.0(eslint@8.57.1)(typescript@5.9.3))(eslint@8.57.1)(typescript@5.9.3)
|
||||||
'@typescript-eslint/parser': 8.48.1(eslint@8.57.1)(typescript@5.9.3)
|
'@typescript-eslint/parser': 8.43.0(eslint@8.57.1)(typescript@5.9.3)
|
||||||
eslint: 8.57.1
|
eslint: 8.57.1
|
||||||
eslint-import-resolver-node: 0.3.9
|
eslint-import-resolver-node: 0.3.9
|
||||||
eslint-import-resolver-typescript: 3.10.1(eslint-plugin-import@2.32.0)(eslint@8.57.1)
|
eslint-import-resolver-typescript: 3.10.1(eslint-plugin-import@2.32.0)(eslint@8.57.1)
|
||||||
eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.48.1(eslint@8.57.1)(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1)
|
eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.43.0(eslint@8.57.1)(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1)
|
||||||
eslint-plugin-jsx-a11y: 6.10.2(eslint@8.57.1)
|
eslint-plugin-jsx-a11y: 6.10.2(eslint@8.57.1)
|
||||||
eslint-plugin-react: 7.37.5(eslint@8.57.1)
|
eslint-plugin-react: 7.37.5(eslint@8.57.1)
|
||||||
eslint-plugin-react-hooks: 5.2.0(eslint@8.57.1)
|
eslint-plugin-react-hooks: 5.2.0(eslint@8.57.1)
|
||||||
@@ -12665,7 +12556,7 @@ snapshots:
|
|||||||
dependencies:
|
dependencies:
|
||||||
debug: 3.2.7
|
debug: 3.2.7
|
||||||
is-core-module: 2.16.1
|
is-core-module: 2.16.1
|
||||||
resolve: 1.22.11
|
resolve: 1.22.10
|
||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- supports-color
|
- supports-color
|
||||||
|
|
||||||
@@ -12674,28 +12565,28 @@ snapshots:
|
|||||||
'@nolyfill/is-core-module': 1.0.39
|
'@nolyfill/is-core-module': 1.0.39
|
||||||
debug: 4.4.3
|
debug: 4.4.3
|
||||||
eslint: 8.57.1
|
eslint: 8.57.1
|
||||||
get-tsconfig: 4.13.0
|
get-tsconfig: 4.10.1
|
||||||
is-bun-module: 2.0.0
|
is-bun-module: 2.0.0
|
||||||
stable-hash: 0.0.5
|
stable-hash: 0.0.5
|
||||||
tinyglobby: 0.2.15
|
tinyglobby: 0.2.15
|
||||||
unrs-resolver: 1.11.1
|
unrs-resolver: 1.11.1
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.48.1(eslint@8.57.1)(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1)
|
eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.43.0(eslint@8.57.1)(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1)
|
||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- supports-color
|
- supports-color
|
||||||
|
|
||||||
eslint-module-utils@2.12.1(@typescript-eslint/parser@8.48.1(eslint@8.57.1)(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1):
|
eslint-module-utils@2.12.1(@typescript-eslint/parser@8.43.0(eslint@8.57.1)(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1):
|
||||||
dependencies:
|
dependencies:
|
||||||
debug: 3.2.7
|
debug: 3.2.7
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
'@typescript-eslint/parser': 8.48.1(eslint@8.57.1)(typescript@5.9.3)
|
'@typescript-eslint/parser': 8.43.0(eslint@8.57.1)(typescript@5.9.3)
|
||||||
eslint: 8.57.1
|
eslint: 8.57.1
|
||||||
eslint-import-resolver-node: 0.3.9
|
eslint-import-resolver-node: 0.3.9
|
||||||
eslint-import-resolver-typescript: 3.10.1(eslint-plugin-import@2.32.0)(eslint@8.57.1)
|
eslint-import-resolver-typescript: 3.10.1(eslint-plugin-import@2.32.0)(eslint@8.57.1)
|
||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- supports-color
|
- supports-color
|
||||||
|
|
||||||
eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.48.1(eslint@8.57.1)(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1):
|
eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.43.0(eslint@8.57.1)(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1):
|
||||||
dependencies:
|
dependencies:
|
||||||
'@rtsao/scc': 1.1.0
|
'@rtsao/scc': 1.1.0
|
||||||
array-includes: 3.1.9
|
array-includes: 3.1.9
|
||||||
@@ -12706,7 +12597,7 @@ snapshots:
|
|||||||
doctrine: 2.1.0
|
doctrine: 2.1.0
|
||||||
eslint: 8.57.1
|
eslint: 8.57.1
|
||||||
eslint-import-resolver-node: 0.3.9
|
eslint-import-resolver-node: 0.3.9
|
||||||
eslint-module-utils: 2.12.1(@typescript-eslint/parser@8.48.1(eslint@8.57.1)(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1)
|
eslint-module-utils: 2.12.1(@typescript-eslint/parser@8.43.0(eslint@8.57.1)(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1)
|
||||||
hasown: 2.0.2
|
hasown: 2.0.2
|
||||||
is-core-module: 2.16.1
|
is-core-module: 2.16.1
|
||||||
is-glob: 4.0.3
|
is-glob: 4.0.3
|
||||||
@@ -12718,7 +12609,7 @@ snapshots:
|
|||||||
string.prototype.trimend: 1.0.9
|
string.prototype.trimend: 1.0.9
|
||||||
tsconfig-paths: 3.15.0
|
tsconfig-paths: 3.15.0
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
'@typescript-eslint/parser': 8.48.1(eslint@8.57.1)(typescript@5.9.3)
|
'@typescript-eslint/parser': 8.43.0(eslint@8.57.1)(typescript@5.9.3)
|
||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- eslint-import-resolver-typescript
|
- eslint-import-resolver-typescript
|
||||||
- eslint-import-resolver-webpack
|
- eslint-import-resolver-webpack
|
||||||
@@ -13067,8 +12958,6 @@ snapshots:
|
|||||||
dependencies:
|
dependencies:
|
||||||
next: 15.4.10(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
next: 15.4.10(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
||||||
|
|
||||||
generator-function@2.0.1: {}
|
|
||||||
|
|
||||||
gensync@1.0.0-beta.2: {}
|
gensync@1.0.0-beta.2: {}
|
||||||
|
|
||||||
get-caller-file@2.0.5: {}
|
get-caller-file@2.0.5: {}
|
||||||
@@ -13101,7 +12990,7 @@ snapshots:
|
|||||||
es-errors: 1.3.0
|
es-errors: 1.3.0
|
||||||
get-intrinsic: 1.3.0
|
get-intrinsic: 1.3.0
|
||||||
|
|
||||||
get-tsconfig@4.13.0:
|
get-tsconfig@4.10.1:
|
||||||
dependencies:
|
dependencies:
|
||||||
resolve-pkg-maps: 1.0.0
|
resolve-pkg-maps: 1.0.0
|
||||||
|
|
||||||
@@ -13385,6 +13274,13 @@ snapshots:
|
|||||||
parent-module: 1.0.1
|
parent-module: 1.0.1
|
||||||
resolve-from: 4.0.0
|
resolve-from: 4.0.0
|
||||||
|
|
||||||
|
import-in-the-middle@1.14.2:
|
||||||
|
dependencies:
|
||||||
|
acorn: 8.15.0
|
||||||
|
acorn-import-attributes: 1.9.5(acorn@8.15.0)
|
||||||
|
cjs-module-lexer: 1.4.3
|
||||||
|
module-details-from-path: 1.0.4
|
||||||
|
|
||||||
import-in-the-middle@2.0.0:
|
import-in-the-middle@2.0.0:
|
||||||
dependencies:
|
dependencies:
|
||||||
acorn: 8.15.0
|
acorn: 8.15.0
|
||||||
@@ -13461,7 +13357,7 @@ snapshots:
|
|||||||
|
|
||||||
is-bun-module@2.0.0:
|
is-bun-module@2.0.0:
|
||||||
dependencies:
|
dependencies:
|
||||||
semver: 7.7.3
|
semver: 7.7.2
|
||||||
|
|
||||||
is-callable@1.2.7: {}
|
is-callable@1.2.7: {}
|
||||||
|
|
||||||
@@ -13499,14 +13395,6 @@ snapshots:
|
|||||||
has-tostringtag: 1.0.2
|
has-tostringtag: 1.0.2
|
||||||
safe-regex-test: 1.1.0
|
safe-regex-test: 1.1.0
|
||||||
|
|
||||||
is-generator-function@1.1.2:
|
|
||||||
dependencies:
|
|
||||||
call-bound: 1.0.4
|
|
||||||
generator-function: 2.0.1
|
|
||||||
get-proto: 1.0.1
|
|
||||||
has-tostringtag: 1.0.2
|
|
||||||
safe-regex-test: 1.1.0
|
|
||||||
|
|
||||||
is-glob@4.0.3:
|
is-glob@4.0.3:
|
||||||
dependencies:
|
dependencies:
|
||||||
is-extglob: 2.1.1
|
is-extglob: 2.1.1
|
||||||
@@ -14327,7 +14215,7 @@ snapshots:
|
|||||||
|
|
||||||
nanoid@3.3.11: {}
|
nanoid@3.3.11: {}
|
||||||
|
|
||||||
napi-postinstall@0.3.4: {}
|
napi-postinstall@0.3.3: {}
|
||||||
|
|
||||||
natural-compare@1.4.0: {}
|
natural-compare@1.4.0: {}
|
||||||
|
|
||||||
@@ -15297,12 +15185,6 @@ snapshots:
|
|||||||
path-parse: 1.0.7
|
path-parse: 1.0.7
|
||||||
supports-preserve-symlinks-flag: 1.0.0
|
supports-preserve-symlinks-flag: 1.0.0
|
||||||
|
|
||||||
resolve@1.22.11:
|
|
||||||
dependencies:
|
|
||||||
is-core-module: 2.16.1
|
|
||||||
path-parse: 1.0.7
|
|
||||||
supports-preserve-symlinks-flag: 1.0.0
|
|
||||||
|
|
||||||
resolve@1.22.8:
|
resolve@1.22.8:
|
||||||
dependencies:
|
dependencies:
|
||||||
is-core-module: 2.16.1
|
is-core-module: 2.16.1
|
||||||
@@ -16114,7 +15996,7 @@ snapshots:
|
|||||||
|
|
||||||
unrs-resolver@1.11.1:
|
unrs-resolver@1.11.1:
|
||||||
dependencies:
|
dependencies:
|
||||||
napi-postinstall: 0.3.4
|
napi-postinstall: 0.3.3
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
'@unrs/resolver-binding-android-arm-eabi': 1.11.1
|
'@unrs/resolver-binding-android-arm-eabi': 1.11.1
|
||||||
'@unrs/resolver-binding-android-arm64': 1.11.1
|
'@unrs/resolver-binding-android-arm64': 1.11.1
|
||||||
@@ -16342,7 +16224,7 @@ snapshots:
|
|||||||
is-async-function: 2.1.1
|
is-async-function: 2.1.1
|
||||||
is-date-object: 1.1.0
|
is-date-object: 1.1.0
|
||||||
is-finalizationregistry: 1.1.1
|
is-finalizationregistry: 1.1.1
|
||||||
is-generator-function: 1.1.2
|
is-generator-function: 1.1.0
|
||||||
is-regex: 1.2.1
|
is-regex: 1.2.1
|
||||||
is-weakref: 1.1.1
|
is-weakref: 1.1.1
|
||||||
isarray: 2.0.5
|
isarray: 2.0.5
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ import {
|
|||||||
CardTitle,
|
CardTitle,
|
||||||
} from "@/components/__legacy__/ui/card";
|
} from "@/components/__legacy__/ui/card";
|
||||||
import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard";
|
import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard";
|
||||||
|
import { InformationTooltip } from "@/components/molecules/InformationTooltip/InformationTooltip";
|
||||||
import { CircleNotchIcon } from "@phosphor-icons/react/dist/ssr";
|
import { CircleNotchIcon } from "@phosphor-icons/react/dist/ssr";
|
||||||
import { Play } from "lucide-react";
|
import { Play } from "lucide-react";
|
||||||
import OnboardingButton from "../components/OnboardingButton";
|
import OnboardingButton from "../components/OnboardingButton";
|
||||||
@@ -78,13 +79,20 @@ export default function Page() {
|
|||||||
<CardContent className="flex flex-col gap-4">
|
<CardContent className="flex flex-col gap-4">
|
||||||
{Object.entries(agent?.input_schema.properties || {}).map(
|
{Object.entries(agent?.input_schema.properties || {}).map(
|
||||||
([key, inputSubSchema]) => (
|
([key, inputSubSchema]) => (
|
||||||
<RunAgentInputs
|
<div key={key} className="flex flex-col space-y-2">
|
||||||
key={key}
|
<label className="flex items-center gap-1 text-sm font-medium">
|
||||||
schema={inputSubSchema}
|
{inputSubSchema.title || key}
|
||||||
value={onboarding.state?.agentInput?.[key]}
|
<InformationTooltip
|
||||||
placeholder={inputSubSchema.description}
|
description={inputSubSchema.description}
|
||||||
onChange={(value) => handleSetAgentInput(key, value)}
|
/>
|
||||||
/>
|
</label>
|
||||||
|
<RunAgentInputs
|
||||||
|
schema={inputSubSchema}
|
||||||
|
value={onboarding.state?.agentInput?.[key]}
|
||||||
|
placeholder={inputSubSchema.description}
|
||||||
|
onChange={(value) => handleSetAgentInput(key, value)}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
),
|
),
|
||||||
)}
|
)}
|
||||||
<AgentOnboardingCredentials
|
<AgentOnboardingCredentials
|
||||||
|
|||||||
@@ -1,296 +0,0 @@
|
|||||||
"use client";
|
|
||||||
|
|
||||||
import { useState } from "react";
|
|
||||||
import { useSearchParams } from "next/navigation";
|
|
||||||
import { AuthCard } from "@/components/auth/AuthCard";
|
|
||||||
import { Text } from "@/components/atoms/Text/Text";
|
|
||||||
import { Button } from "@/components/atoms/Button/Button";
|
|
||||||
import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner";
|
|
||||||
import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard";
|
|
||||||
import { ImageIcon, SealCheckIcon } from "@phosphor-icons/react";
|
|
||||||
import {
|
|
||||||
postOauthAuthorize,
|
|
||||||
useGetOauthGetOauthAppInfo,
|
|
||||||
} from "@/app/api/__generated__/endpoints/oauth/oauth";
|
|
||||||
import type { APIKeyPermission } from "@/app/api/__generated__/models/aPIKeyPermission";
|
|
||||||
|
|
||||||
// Human-readable scope descriptions
|
|
||||||
const SCOPE_DESCRIPTIONS: { [key in APIKeyPermission]: string } = {
|
|
||||||
IDENTITY: "Read user ID, name, e-mail, and timezone",
|
|
||||||
EXECUTE_GRAPH: "Run your agents",
|
|
||||||
READ_GRAPH: "View your agents and their configurations",
|
|
||||||
EXECUTE_BLOCK: "Execute individual blocks",
|
|
||||||
READ_BLOCK: "View available blocks",
|
|
||||||
READ_STORE: "Access the Marketplace",
|
|
||||||
USE_TOOLS: "Use tools on your behalf",
|
|
||||||
MANAGE_INTEGRATIONS: "Set up new integrations",
|
|
||||||
READ_INTEGRATIONS: "View your connected integrations",
|
|
||||||
DELETE_INTEGRATIONS: "Remove connected integrations",
|
|
||||||
};
|
|
||||||
|
|
||||||
export default function AuthorizePage() {
|
|
||||||
const searchParams = useSearchParams();
|
|
||||||
|
|
||||||
// Extract OAuth parameters from URL
|
|
||||||
const clientID = searchParams.get("client_id");
|
|
||||||
const redirectURI = searchParams.get("redirect_uri");
|
|
||||||
const scope = searchParams.get("scope");
|
|
||||||
const state = searchParams.get("state");
|
|
||||||
const codeChallenge = searchParams.get("code_challenge");
|
|
||||||
const codeChallengeMethod =
|
|
||||||
searchParams.get("code_challenge_method") || "S256";
|
|
||||||
const responseType = searchParams.get("response_type") || "code";
|
|
||||||
|
|
||||||
// Parse requested scopes
|
|
||||||
const requestedScopes = scope?.split(" ").filter(Boolean) || [];
|
|
||||||
|
|
||||||
// Fetch application info using generated hook
|
|
||||||
const {
|
|
||||||
data: appInfoResponse,
|
|
||||||
isLoading,
|
|
||||||
error,
|
|
||||||
refetch,
|
|
||||||
} = useGetOauthGetOauthAppInfo(clientID || "", {
|
|
||||||
query: {
|
|
||||||
enabled: !!clientID,
|
|
||||||
staleTime: Infinity,
|
|
||||||
refetchOnMount: false,
|
|
||||||
refetchOnWindowFocus: false,
|
|
||||||
refetchOnReconnect: false,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
const appInfo = appInfoResponse?.status === 200 ? appInfoResponse.data : null;
|
|
||||||
|
|
||||||
// Validate required parameters
|
|
||||||
const missingParams: string[] = [];
|
|
||||||
if (!clientID) missingParams.push("client_id");
|
|
||||||
if (!redirectURI) missingParams.push("redirect_uri");
|
|
||||||
if (!scope) missingParams.push("scope");
|
|
||||||
if (!state) missingParams.push("state");
|
|
||||||
if (!codeChallenge) missingParams.push("code_challenge");
|
|
||||||
|
|
||||||
const [isAuthorizing, setIsAuthorizing] = useState(false);
|
|
||||||
const [authorizeError, setAuthorizeError] = useState<string | null>(null);
|
|
||||||
|
|
||||||
async function handleApprove() {
|
|
||||||
setIsAuthorizing(true);
|
|
||||||
setAuthorizeError(null);
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Call the backend /oauth/authorize POST endpoint
|
|
||||||
// Returns JSON with redirect_url that we use to redirect the user
|
|
||||||
const response = await postOauthAuthorize({
|
|
||||||
client_id: clientID!,
|
|
||||||
redirect_uri: redirectURI!,
|
|
||||||
scopes: requestedScopes,
|
|
||||||
state: state!,
|
|
||||||
response_type: responseType,
|
|
||||||
code_challenge: codeChallenge!,
|
|
||||||
code_challenge_method: codeChallengeMethod as "S256" | "plain",
|
|
||||||
});
|
|
||||||
|
|
||||||
if (response.status === 200 && response.data.redirect_url) {
|
|
||||||
window.location.href = response.data.redirect_url;
|
|
||||||
} else {
|
|
||||||
setAuthorizeError("Authorization failed: no redirect URL received");
|
|
||||||
setIsAuthorizing(false);
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Authorization error:", err);
|
|
||||||
setAuthorizeError(
|
|
||||||
err instanceof Error ? err.message : "Authorization failed",
|
|
||||||
);
|
|
||||||
setIsAuthorizing(false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function handleDeny() {
|
|
||||||
// Redirect back to client with access_denied error
|
|
||||||
const params = new URLSearchParams({
|
|
||||||
error: "access_denied",
|
|
||||||
error_description: "User denied access",
|
|
||||||
state: state || "",
|
|
||||||
});
|
|
||||||
window.location.href = `${redirectURI}?${params.toString()}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Show error if missing required parameters
|
|
||||||
if (missingParams.length > 0) {
|
|
||||||
return (
|
|
||||||
<div className="flex h-full min-h-[85vh] flex-col items-center justify-center py-10">
|
|
||||||
<AuthCard title="Invalid Request">
|
|
||||||
<ErrorCard
|
|
||||||
context="request parameters"
|
|
||||||
responseError={{
|
|
||||||
message: `Missing required parameters: ${missingParams.join(", ")}`,
|
|
||||||
}}
|
|
||||||
hint="Please contact the administrator of the app that sent you here."
|
|
||||||
isOurProblem={false}
|
|
||||||
/>
|
|
||||||
</AuthCard>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Show loading state
|
|
||||||
if (isLoading) {
|
|
||||||
return (
|
|
||||||
<div className="flex h-full min-h-[85vh] flex-col items-center justify-center py-10">
|
|
||||||
<AuthCard title="Loading...">
|
|
||||||
<div className="flex flex-col items-center gap-4 py-8">
|
|
||||||
<LoadingSpinner size="large" />
|
|
||||||
<Text variant="body" className="text-center text-slate-500">
|
|
||||||
Loading application information...
|
|
||||||
</Text>
|
|
||||||
</div>
|
|
||||||
</AuthCard>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Show error if app not found
|
|
||||||
if (error || !appInfo) {
|
|
||||||
return (
|
|
||||||
<div className="flex h-full min-h-[85vh] flex-col items-center justify-center py-10">
|
|
||||||
<AuthCard title="Application Not Found">
|
|
||||||
<ErrorCard
|
|
||||||
context="application"
|
|
||||||
responseError={
|
|
||||||
error
|
|
||||||
? error
|
|
||||||
: {
|
|
||||||
message:
|
|
||||||
"The application you're trying to authorize could not be found or is disabled.",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
onRetry={refetch}
|
|
||||||
/>
|
|
||||||
{redirectURI && (
|
|
||||||
<Button
|
|
||||||
variant="secondary"
|
|
||||||
onClick={handleDeny}
|
|
||||||
className="mt-4 w-full"
|
|
||||||
>
|
|
||||||
Return to Application
|
|
||||||
</Button>
|
|
||||||
)}
|
|
||||||
</AuthCard>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate that requested scopes are allowed by the app
|
|
||||||
const invalidScopes = requestedScopes.filter(
|
|
||||||
(s) => !appInfo.scopes.includes(s),
|
|
||||||
);
|
|
||||||
|
|
||||||
if (invalidScopes.length > 0) {
|
|
||||||
return (
|
|
||||||
<div className="flex h-full min-h-[85vh] flex-col items-center justify-center py-10">
|
|
||||||
<AuthCard title="Invalid Scopes">
|
|
||||||
<ErrorCard
|
|
||||||
context="scopes"
|
|
||||||
responseError={{
|
|
||||||
message: `The application is requesting scopes it is not authorized for: ${invalidScopes.join(", ")}`,
|
|
||||||
}}
|
|
||||||
hint="Please contact the administrator of the app that sent you here."
|
|
||||||
isOurProblem={false}
|
|
||||||
/>
|
|
||||||
<Button
|
|
||||||
variant="secondary"
|
|
||||||
onClick={handleDeny}
|
|
||||||
className="mt-4 w-full"
|
|
||||||
>
|
|
||||||
Return to Application
|
|
||||||
</Button>
|
|
||||||
</AuthCard>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="flex h-full min-h-[85vh] flex-col items-center justify-center py-10">
|
|
||||||
<AuthCard title="Authorize Application">
|
|
||||||
<div className="flex w-full flex-col gap-6">
|
|
||||||
{/* App info */}
|
|
||||||
<div className="flex flex-col items-center text-center">
|
|
||||||
{/* App logo */}
|
|
||||||
<div className="mb-4 flex size-16 items-center justify-center overflow-hidden rounded-xl border bg-slate-100">
|
|
||||||
{appInfo.logo_url ? (
|
|
||||||
// eslint-disable-next-line @next/next/no-img-element
|
|
||||||
<img
|
|
||||||
src={appInfo.logo_url}
|
|
||||||
alt={`${appInfo.name} logo`}
|
|
||||||
className="h-full w-full object-cover"
|
|
||||||
/>
|
|
||||||
) : (
|
|
||||||
<ImageIcon className="h-8 w-8 text-slate-400" />
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
<Text variant="h4" className="mb-2">
|
|
||||||
{appInfo.name}
|
|
||||||
</Text>
|
|
||||||
{appInfo.description && (
|
|
||||||
<Text variant="body" className="text-slate-600">
|
|
||||||
{appInfo.description}
|
|
||||||
</Text>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Permissions */}
|
|
||||||
<div>
|
|
||||||
<Text variant="body-medium" className="mb-3">
|
|
||||||
This application is requesting permission to:
|
|
||||||
</Text>
|
|
||||||
<ul className="space-y-2">
|
|
||||||
{requestedScopes.map((scopeKey) => (
|
|
||||||
<li key={scopeKey} className="flex items-start gap-3">
|
|
||||||
<SealCheckIcon className="mt-0.5 text-green-600" />
|
|
||||||
<Text variant="body">
|
|
||||||
{SCOPE_DESCRIPTIONS[scopeKey as APIKeyPermission] ||
|
|
||||||
scopeKey}
|
|
||||||
</Text>
|
|
||||||
</li>
|
|
||||||
))}
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Error message */}
|
|
||||||
{authorizeError && (
|
|
||||||
<ErrorCard
|
|
||||||
context="authorization"
|
|
||||||
responseError={{ message: authorizeError }}
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{/* Action buttons */}
|
|
||||||
<div className="flex flex-col gap-3">
|
|
||||||
<Button
|
|
||||||
variant="primary"
|
|
||||||
onClick={handleApprove}
|
|
||||||
disabled={isAuthorizing}
|
|
||||||
className="w-full text-lg"
|
|
||||||
>
|
|
||||||
{isAuthorizing ? "Authorizing..." : "Authorize"}
|
|
||||||
</Button>
|
|
||||||
<Button
|
|
||||||
variant="secondary"
|
|
||||||
onClick={handleDeny}
|
|
||||||
disabled={isAuthorizing}
|
|
||||||
className="w-full text-lg"
|
|
||||||
>
|
|
||||||
Deny
|
|
||||||
</Button>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Warning */}
|
|
||||||
<Text variant="small" className="text-center text-slate-500">
|
|
||||||
By authorizing, you allow this application to access your AutoGPT
|
|
||||||
account with the permissions listed above.
|
|
||||||
</Text>
|
|
||||||
</div>
|
|
||||||
</AuthCard>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -74,9 +74,6 @@ export async function GET(request: Request) {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get redirect destination from 'next' query parameter
|
|
||||||
next = searchParams.get("next") || next;
|
|
||||||
|
|
||||||
const forwardedHost = request.headers.get("x-forwarded-host"); // original origin before load balancer
|
const forwardedHost = request.headers.get("x-forwarded-host"); // original origin before load balancer
|
||||||
const isLocalEnv = process.env.NODE_ENV === "development";
|
const isLocalEnv = process.env.NODE_ENV === "development";
|
||||||
if (isLocalEnv) {
|
if (isLocalEnv) {
|
||||||
|
|||||||
@@ -1,331 +0,0 @@
|
|||||||
"use client";
|
|
||||||
|
|
||||||
import Image from "next/image";
|
|
||||||
import Link from "next/link";
|
|
||||||
import { useSearchParams } from "next/navigation";
|
|
||||||
import { useState, useMemo, useRef } from "react";
|
|
||||||
import { AuthCard } from "@/components/auth/AuthCard";
|
|
||||||
import { Text } from "@/components/atoms/Text/Text";
|
|
||||||
import { Button } from "@/components/atoms/Button/Button";
|
|
||||||
import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard";
|
|
||||||
import { CredentialsInput } from "@/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/CredentialsInputs/CredentialsInputs";
|
|
||||||
import type {
|
|
||||||
BlockIOCredentialsSubSchema,
|
|
||||||
CredentialsMetaInput,
|
|
||||||
CredentialsType,
|
|
||||||
} from "@/lib/autogpt-server-api";
|
|
||||||
import { CheckIcon, CircleIcon } from "@phosphor-icons/react";
|
|
||||||
import { useGetOauthGetOauthAppInfo } from "@/app/api/__generated__/endpoints/oauth/oauth";
|
|
||||||
import { okData } from "@/app/api/helpers";
|
|
||||||
import { OAuthApplicationPublicInfo } from "@/app/api/__generated__/models/oAuthApplicationPublicInfo";
|
|
||||||
|
|
||||||
// All credential types - we accept any type of credential
|
|
||||||
const ALL_CREDENTIAL_TYPES: CredentialsType[] = [
|
|
||||||
"api_key",
|
|
||||||
"oauth2",
|
|
||||||
"user_password",
|
|
||||||
"host_scoped",
|
|
||||||
];
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Provider configuration for the setup wizard.
|
|
||||||
*
|
|
||||||
* Query parameters:
|
|
||||||
* - `providers`: base64-encoded JSON array of { provider, scopes? } objects
|
|
||||||
* - `app_name`: (optional) Name of the requesting application
|
|
||||||
* - `redirect_uri`: Where to redirect after completion
|
|
||||||
* - `state`: Anti-CSRF token
|
|
||||||
*
|
|
||||||
* Example `providers` JSON:
|
|
||||||
* [
|
|
||||||
* { "provider": "google", "scopes": ["https://www.googleapis.com/auth/gmail.readonly"] },
|
|
||||||
* { "provider": "github", "scopes": ["repo"] }
|
|
||||||
* ]
|
|
||||||
*
|
|
||||||
* Example URL:
|
|
||||||
* /auth/integrations/setup-wizard?app_name=My%20App&providers=W3sicHJvdmlkZXIiOiJnb29nbGUifV0=&redirect_uri=...
|
|
||||||
*/
|
|
||||||
interface ProviderConfig {
|
|
||||||
provider: string;
|
|
||||||
scopes?: string[];
|
|
||||||
}
|
|
||||||
|
|
||||||
function createSchemaFromProviderConfig(
|
|
||||||
config: ProviderConfig,
|
|
||||||
): BlockIOCredentialsSubSchema {
|
|
||||||
return {
|
|
||||||
type: "object",
|
|
||||||
properties: {},
|
|
||||||
credentials_provider: [config.provider],
|
|
||||||
credentials_types: ALL_CREDENTIAL_TYPES,
|
|
||||||
credentials_scopes: config.scopes,
|
|
||||||
discriminator: undefined,
|
|
||||||
discriminator_mapping: undefined,
|
|
||||||
discriminator_values: undefined,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function toDisplayName(provider: string): string {
|
|
||||||
// Convert snake_case or kebab-case to Title Case
|
|
||||||
return provider
|
|
||||||
.split(/[_-]/)
|
|
||||||
.map((word) => word.charAt(0).toUpperCase() + word.slice(1))
|
|
||||||
.join(" ");
|
|
||||||
}
|
|
||||||
|
|
||||||
function parseProvidersParam(providersParam: string): ProviderConfig[] {
|
|
||||||
try {
|
|
||||||
// Decode base64 and parse JSON
|
|
||||||
const decoded = atob(providersParam);
|
|
||||||
const parsed = JSON.parse(decoded);
|
|
||||||
|
|
||||||
if (!Array.isArray(parsed)) {
|
|
||||||
console.warn("providers parameter is not an array");
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
return parsed.filter(
|
|
||||||
(item): item is ProviderConfig =>
|
|
||||||
typeof item === "object" &&
|
|
||||||
item !== null &&
|
|
||||||
typeof item.provider === "string",
|
|
||||||
);
|
|
||||||
} catch (error) {
|
|
||||||
console.warn("Failed to parse providers parameter:", error);
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export default function IntegrationSetupWizardPage() {
|
|
||||||
const searchParams = useSearchParams();
|
|
||||||
|
|
||||||
// Extract query parameters
|
|
||||||
// `providers` is a base64-encoded JSON array of { provider, scopes?: string[] } objects
|
|
||||||
const clientID = searchParams.get("client_id");
|
|
||||||
const providersParam = searchParams.get("providers");
|
|
||||||
const redirectURI = searchParams.get("redirect_uri");
|
|
||||||
const state = searchParams.get("state");
|
|
||||||
|
|
||||||
const { data: appInfo } = useGetOauthGetOauthAppInfo(clientID || "", {
|
|
||||||
query: { enabled: !!clientID, select: okData<OAuthApplicationPublicInfo> },
|
|
||||||
});
|
|
||||||
|
|
||||||
// Parse providers from base64-encoded JSON
|
|
||||||
const providerConfigs = useMemo<ProviderConfig[]>(() => {
|
|
||||||
if (!providersParam) return [];
|
|
||||||
return parseProvidersParam(providersParam);
|
|
||||||
}, [providersParam]);
|
|
||||||
|
|
||||||
// Track selected credentials for each provider
|
|
||||||
const [selectedCredentials, setSelectedCredentials] = useState<
|
|
||||||
Record<string, CredentialsMetaInput | undefined>
|
|
||||||
>({});
|
|
||||||
|
|
||||||
// Track if we've already redirected
|
|
||||||
const hasRedirectedRef = useRef(false);
|
|
||||||
|
|
||||||
// Check if all providers have credentials
|
|
||||||
const isAllComplete = useMemo(() => {
|
|
||||||
if (providerConfigs.length === 0) return false;
|
|
||||||
return providerConfigs.every(
|
|
||||||
(config) => selectedCredentials[config.provider],
|
|
||||||
);
|
|
||||||
}, [providerConfigs, selectedCredentials]);
|
|
||||||
|
|
||||||
// Handle credential selection
|
|
||||||
const handleCredentialSelect = (
|
|
||||||
provider: string,
|
|
||||||
credential?: CredentialsMetaInput,
|
|
||||||
) => {
|
|
||||||
setSelectedCredentials((prev) => ({
|
|
||||||
...prev,
|
|
||||||
[provider]: credential,
|
|
||||||
}));
|
|
||||||
};
|
|
||||||
|
|
||||||
// Handle completion - redirect back to client
|
|
||||||
const handleComplete = () => {
|
|
||||||
if (!redirectURI || hasRedirectedRef.current) return;
|
|
||||||
hasRedirectedRef.current = true;
|
|
||||||
|
|
||||||
const params = new URLSearchParams({
|
|
||||||
success: "true",
|
|
||||||
});
|
|
||||||
if (state) {
|
|
||||||
params.set("state", state);
|
|
||||||
}
|
|
||||||
|
|
||||||
window.location.href = `${redirectURI}?${params.toString()}`;
|
|
||||||
};
|
|
||||||
|
|
||||||
// Handle cancel - redirect back to client with error
|
|
||||||
const handleCancel = () => {
|
|
||||||
if (!redirectURI || hasRedirectedRef.current) return;
|
|
||||||
hasRedirectedRef.current = true;
|
|
||||||
|
|
||||||
const params = new URLSearchParams({
|
|
||||||
error: "user_cancelled",
|
|
||||||
error_description: "User cancelled the integration setup",
|
|
||||||
});
|
|
||||||
if (state) {
|
|
||||||
params.set("state", state);
|
|
||||||
}
|
|
||||||
|
|
||||||
window.location.href = `${redirectURI}?${params.toString()}`;
|
|
||||||
};
|
|
||||||
|
|
||||||
// Validate required parameters
|
|
||||||
const missingParams: string[] = [];
|
|
||||||
if (!providersParam) missingParams.push("providers");
|
|
||||||
if (!redirectURI) missingParams.push("redirect_uri");
|
|
||||||
|
|
||||||
if (missingParams.length > 0) {
|
|
||||||
return (
|
|
||||||
<div className="flex h-full min-h-[85vh] flex-col items-center justify-center py-10">
|
|
||||||
<AuthCard title="Invalid Request">
|
|
||||||
<ErrorCard
|
|
||||||
context="request details"
|
|
||||||
responseError={{
|
|
||||||
message: `Missing required parameters: ${missingParams.join(", ")}`,
|
|
||||||
}}
|
|
||||||
hint="Please contact the administrator of the app that sent you here."
|
|
||||||
isOurProblem={false}
|
|
||||||
/>
|
|
||||||
</AuthCard>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (providerConfigs.length === 0) {
|
|
||||||
return (
|
|
||||||
<div className="flex h-full min-h-[85vh] flex-col items-center justify-center py-10">
|
|
||||||
<AuthCard title="Invalid Request">
|
|
||||||
<ErrorCard
|
|
||||||
context="providers"
|
|
||||||
responseError={{ message: "No providers specified" }}
|
|
||||||
hint="Please contact the administrator of the app that sent you here."
|
|
||||||
isOurProblem={false}
|
|
||||||
/>
|
|
||||||
<Button
|
|
||||||
variant="secondary"
|
|
||||||
onClick={handleCancel}
|
|
||||||
className="mt-4 w-full"
|
|
||||||
>
|
|
||||||
Cancel
|
|
||||||
</Button>
|
|
||||||
</AuthCard>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="flex h-full min-h-[85vh] flex-col items-center justify-center py-10">
|
|
||||||
<AuthCard title="Connect Your Accounts">
|
|
||||||
<div className="flex w-full flex-col gap-6">
|
|
||||||
<Text variant="body" className="text-center text-slate-600">
|
|
||||||
{appInfo ? (
|
|
||||||
<>
|
|
||||||
<strong>{appInfo.name}</strong> is requesting you to connect the
|
|
||||||
following integrations to your AutoGPT account.
|
|
||||||
</>
|
|
||||||
) : (
|
|
||||||
"Please connect the following integrations to continue."
|
|
||||||
)}
|
|
||||||
</Text>
|
|
||||||
|
|
||||||
{/* Provider credentials list */}
|
|
||||||
<div className="space-y-4">
|
|
||||||
{providerConfigs.map((config) => {
|
|
||||||
const schema = createSchemaFromProviderConfig(config);
|
|
||||||
const isSelected = !!selectedCredentials[config.provider];
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div
|
|
||||||
key={config.provider}
|
|
||||||
className="relative rounded-xl border border-slate-200 bg-white p-4"
|
|
||||||
>
|
|
||||||
<div className="mb-4 flex items-center gap-2">
|
|
||||||
<div className="relative size-8">
|
|
||||||
<Image
|
|
||||||
src={`/integrations/${config.provider}.png`}
|
|
||||||
alt={`${config.provider} icon`}
|
|
||||||
fill
|
|
||||||
className="object-contain group-disabled:opacity-50"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
<Text className="mx-1" variant="large-medium">
|
|
||||||
{toDisplayName(config.provider)}
|
|
||||||
</Text>
|
|
||||||
<div className="grow"></div>
|
|
||||||
{isSelected ? (
|
|
||||||
<CheckIcon
|
|
||||||
size={20}
|
|
||||||
className="text-green-500"
|
|
||||||
weight="bold"
|
|
||||||
/>
|
|
||||||
) : (
|
|
||||||
<CircleIcon
|
|
||||||
size={20}
|
|
||||||
className="text-slate-300"
|
|
||||||
weight="bold"
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
{isSelected && (
|
|
||||||
<Text variant="small" className="text-green-600">
|
|
||||||
Connected
|
|
||||||
</Text>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<CredentialsInput
|
|
||||||
schema={schema}
|
|
||||||
selectedCredentials={selectedCredentials[config.provider]}
|
|
||||||
onSelectCredentials={(credMeta) =>
|
|
||||||
handleCredentialSelect(config.provider, credMeta)
|
|
||||||
}
|
|
||||||
showTitle={false}
|
|
||||||
className="mb-0"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
})}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Action buttons */}
|
|
||||||
<div className="flex flex-col gap-3">
|
|
||||||
<Button
|
|
||||||
variant="primary"
|
|
||||||
onClick={handleComplete}
|
|
||||||
disabled={!isAllComplete}
|
|
||||||
className="w-full text-lg"
|
|
||||||
>
|
|
||||||
{isAllComplete
|
|
||||||
? "Continue"
|
|
||||||
: `Connect ${providerConfigs.length - Object.values(selectedCredentials).filter(Boolean).length} more`}
|
|
||||||
</Button>
|
|
||||||
<Button
|
|
||||||
variant="secondary"
|
|
||||||
onClick={handleCancel}
|
|
||||||
className="w-full text-lg"
|
|
||||||
>
|
|
||||||
Cancel
|
|
||||||
</Button>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Link to integrations settings */}
|
|
||||||
<Text variant="small" className="text-center text-slate-500">
|
|
||||||
You can view and manage all your integrations in your{" "}
|
|
||||||
<Link
|
|
||||||
href="/profile/integrations"
|
|
||||||
target="_blank"
|
|
||||||
className="text-purple-600 underline hover:text-purple-800"
|
|
||||||
>
|
|
||||||
integration settings
|
|
||||||
</Link>
|
|
||||||
.
|
|
||||||
</Text>
|
|
||||||
</div>
|
|
||||||
</AuthCard>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -1,86 +0,0 @@
|
|||||||
import { GraphModel } from "@/app/api/__generated__/models/graphModel";
|
|
||||||
import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
|
|
||||||
import { Button } from "@/components/atoms/Button/Button";
|
|
||||||
import { Graph } from "@/lib/autogpt-server-api/types";
|
|
||||||
import { cn } from "@/lib/utils";
|
|
||||||
import { ShieldCheckIcon, ShieldIcon } from "@phosphor-icons/react";
|
|
||||||
import { Text } from "@/components/atoms/Text/Text";
|
|
||||||
import { useAgentSafeMode } from "@/hooks/useAgentSafeMode";
|
|
||||||
import {
|
|
||||||
Tooltip,
|
|
||||||
TooltipContent,
|
|
||||||
TooltipTrigger,
|
|
||||||
} from "@/components/atoms/Tooltip/BaseTooltip";
|
|
||||||
|
|
||||||
interface Props {
|
|
||||||
graph: GraphModel | LibraryAgent | Graph;
|
|
||||||
className?: string;
|
|
||||||
fullWidth?: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function FloatingSafeModeToggle({
|
|
||||||
graph,
|
|
||||||
className,
|
|
||||||
fullWidth = false,
|
|
||||||
}: Props) {
|
|
||||||
const {
|
|
||||||
currentSafeMode,
|
|
||||||
isPending,
|
|
||||||
shouldShowToggle,
|
|
||||||
isStateUndetermined,
|
|
||||||
handleToggle,
|
|
||||||
} = useAgentSafeMode(graph);
|
|
||||||
|
|
||||||
if (!shouldShowToggle || isStateUndetermined || isPending) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className={cn("fixed z-50", className)}>
|
|
||||||
<Tooltip delayDuration={100}>
|
|
||||||
<TooltipTrigger asChild>
|
|
||||||
<Button
|
|
||||||
variant={currentSafeMode! ? "primary" : "outline"}
|
|
||||||
key={graph.id}
|
|
||||||
size="small"
|
|
||||||
title={
|
|
||||||
currentSafeMode!
|
|
||||||
? "Safe Mode: ON. Human in the loop blocks require manual review"
|
|
||||||
: "Safe Mode: OFF. Human in the loop blocks proceed automatically"
|
|
||||||
}
|
|
||||||
onClick={handleToggle}
|
|
||||||
className={cn(fullWidth ? "w-full" : "")}
|
|
||||||
>
|
|
||||||
{currentSafeMode! ? (
|
|
||||||
<>
|
|
||||||
<ShieldCheckIcon weight="bold" size={16} />
|
|
||||||
<Text variant="body" className="text-zinc-200">
|
|
||||||
Safe Mode: ON
|
|
||||||
</Text>
|
|
||||||
</>
|
|
||||||
) : (
|
|
||||||
<>
|
|
||||||
<ShieldIcon weight="bold" size={16} />
|
|
||||||
<Text variant="body" className="text-zinc-600">
|
|
||||||
Safe Mode: OFF
|
|
||||||
</Text>
|
|
||||||
</>
|
|
||||||
)}
|
|
||||||
</Button>
|
|
||||||
</TooltipTrigger>
|
|
||||||
<TooltipContent>
|
|
||||||
<div className="text-center">
|
|
||||||
<div className="font-medium">
|
|
||||||
Safe Mode: {currentSafeMode! ? "ON" : "OFF"}
|
|
||||||
</div>
|
|
||||||
<div className="mt-1 text-xs text-muted-foreground">
|
|
||||||
{currentSafeMode!
|
|
||||||
? "Human in the loop blocks require manual review"
|
|
||||||
: "Human in the loop blocks proceed automatically"}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</TooltipContent>
|
|
||||||
</Tooltip>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -16,12 +16,12 @@ import { useCopyPaste } from "./useCopyPaste";
|
|||||||
import { FloatingReviewsPanel } from "@/components/organisms/FloatingReviewsPanel/FloatingReviewsPanel";
|
import { FloatingReviewsPanel } from "@/components/organisms/FloatingReviewsPanel/FloatingReviewsPanel";
|
||||||
import { parseAsString, useQueryStates } from "nuqs";
|
import { parseAsString, useQueryStates } from "nuqs";
|
||||||
import { CustomControls } from "./components/CustomControl";
|
import { CustomControls } from "./components/CustomControl";
|
||||||
|
import { FloatingSafeModeToggle } from "@/components/molecules/FloatingSafeModeToggle/FloatingSafeModeToggle";
|
||||||
import { useGetV1GetSpecificGraph } from "@/app/api/__generated__/endpoints/graphs/graphs";
|
import { useGetV1GetSpecificGraph } from "@/app/api/__generated__/endpoints/graphs/graphs";
|
||||||
import { GraphModel } from "@/app/api/__generated__/models/graphModel";
|
import { GraphModel } from "@/app/api/__generated__/models/graphModel";
|
||||||
import { okData } from "@/app/api/helpers";
|
import { okData } from "@/app/api/helpers";
|
||||||
import { TriggerAgentBanner } from "./components/TriggerAgentBanner";
|
import { TriggerAgentBanner } from "./components/TriggerAgentBanner";
|
||||||
import { resolveCollisions } from "./helpers/resolve-collision";
|
import { resolveCollisions } from "./helpers/resolve-collision";
|
||||||
import { FloatingSafeModeToggle } from "../../FloatingSafeModeToogle";
|
|
||||||
|
|
||||||
export const Flow = () => {
|
export const Flow = () => {
|
||||||
const [{ flowID, flowExecutionID }] = useQueryStates({
|
const [{ flowID, flowExecutionID }] = useQueryStates({
|
||||||
@@ -113,7 +113,8 @@ export const Flow = () => {
|
|||||||
{graph && (
|
{graph && (
|
||||||
<FloatingSafeModeToggle
|
<FloatingSafeModeToggle
|
||||||
graph={graph}
|
graph={graph}
|
||||||
className="right-2 top-32 p-2"
|
className="right-4 top-32 p-2"
|
||||||
|
variant="black"
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
</ReactFlow>
|
</ReactFlow>
|
||||||
|
|||||||
@@ -20,7 +20,6 @@ import { AgentExecutionStatus } from "@/app/api/__generated__/models/agentExecut
|
|||||||
|
|
||||||
export const useFlow = () => {
|
export const useFlow = () => {
|
||||||
const [isLocked, setIsLocked] = useState(false);
|
const [isLocked, setIsLocked] = useState(false);
|
||||||
const [hasAutoFramed, setHasAutoFramed] = useState(false);
|
|
||||||
const addNodes = useNodeStore(useShallow((state) => state.addNodes));
|
const addNodes = useNodeStore(useShallow((state) => state.addNodes));
|
||||||
const addLinks = useEdgeStore(useShallow((state) => state.addLinks));
|
const addLinks = useEdgeStore(useShallow((state) => state.addLinks));
|
||||||
const updateNodeStatus = useNodeStore(
|
const updateNodeStatus = useNodeStore(
|
||||||
@@ -188,36 +187,9 @@ export const useFlow = () => {
|
|||||||
};
|
};
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
const linkCount = graph?.links?.length ?? 0;
|
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (isGraphLoading || isBlocksLoading) {
|
fitView({ padding: 0.2, duration: 800, maxZoom: 2 });
|
||||||
setHasAutoFramed(false);
|
}, [fitView]);
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (hasAutoFramed) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const rafId = requestAnimationFrame(() => {
|
|
||||||
fitView({ padding: 0.2, duration: 800, maxZoom: 1 });
|
|
||||||
setHasAutoFramed(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
return () => cancelAnimationFrame(rafId);
|
|
||||||
}, [
|
|
||||||
fitView,
|
|
||||||
hasAutoFramed,
|
|
||||||
customNodes.length,
|
|
||||||
isBlocksLoading,
|
|
||||||
isGraphLoading,
|
|
||||||
linkCount,
|
|
||||||
]);
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
setHasAutoFramed(false);
|
|
||||||
}, [flowID, flowVersion]);
|
|
||||||
|
|
||||||
// Drag and drop block from block menu
|
// Drag and drop block from block menu
|
||||||
const onDragOver = useCallback((event: React.DragEvent) => {
|
const onDragOver = useCallback((event: React.DragEvent) => {
|
||||||
|
|||||||
@@ -106,11 +106,7 @@ export const CustomNode: React.FC<NodeProps<CustomNode>> = React.memo(
|
|||||||
/>
|
/>
|
||||||
<NodeAdvancedToggle nodeId={nodeId} />
|
<NodeAdvancedToggle nodeId={nodeId} />
|
||||||
{data.uiType != BlockUIType.OUTPUT && (
|
{data.uiType != BlockUIType.OUTPUT && (
|
||||||
<OutputHandler
|
<OutputHandler outputSchema={outputSchema} nodeId={nodeId} />
|
||||||
uiType={data.uiType}
|
|
||||||
outputSchema={outputSchema}
|
|
||||||
nodeId={nodeId}
|
|
||||||
/>
|
|
||||||
)}
|
)}
|
||||||
<NodeDataRenderer nodeId={nodeId} />
|
<NodeDataRenderer nodeId={nodeId} />
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ const statusStyles: Record<AgentExecutionStatus, string> = {
|
|||||||
INCOMPLETE: "text-slate-700 border-slate-400",
|
INCOMPLETE: "text-slate-700 border-slate-400",
|
||||||
QUEUED: "text-blue-700 border-blue-400",
|
QUEUED: "text-blue-700 border-blue-400",
|
||||||
RUNNING: "text-amber-700 border-amber-400",
|
RUNNING: "text-amber-700 border-amber-400",
|
||||||
REVIEW: "text-yellow-700 border-yellow-400 bg-yellow-50",
|
REVIEW: "text-orange-700 border-orange-400 bg-orange-50",
|
||||||
COMPLETED: "text-green-700 border-green-400",
|
COMPLETED: "text-green-700 border-green-400",
|
||||||
TERMINATED: "text-orange-700 border-orange-400",
|
TERMINATED: "text-orange-700 border-orange-400",
|
||||||
FAILED: "text-red-700 border-red-400",
|
FAILED: "text-red-700 border-red-400",
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ export const nodeStyleBasedOnStatus: Record<AgentExecutionStatus, string> = {
|
|||||||
INCOMPLETE: "ring-slate-300 bg-slate-300",
|
INCOMPLETE: "ring-slate-300 bg-slate-300",
|
||||||
QUEUED: " ring-blue-300 bg-blue-300",
|
QUEUED: " ring-blue-300 bg-blue-300",
|
||||||
RUNNING: "ring-amber-300 bg-amber-300",
|
RUNNING: "ring-amber-300 bg-amber-300",
|
||||||
REVIEW: "ring-yellow-300 bg-yellow-300",
|
REVIEW: "ring-orange-300 bg-orange-300",
|
||||||
COMPLETED: "ring-green-300 bg-green-300",
|
COMPLETED: "ring-green-300 bg-green-300",
|
||||||
TERMINATED: "ring-orange-300 bg-orange-300 ",
|
TERMINATED: "ring-orange-300 bg-orange-300 ",
|
||||||
FAILED: "ring-red-300 bg-red-300",
|
FAILED: "ring-red-300 bg-red-300",
|
||||||
|
|||||||
@@ -20,32 +20,17 @@ export const FormCreator = React.memo(
|
|||||||
className?: string;
|
className?: string;
|
||||||
}) => {
|
}) => {
|
||||||
const updateNodeData = useNodeStore((state) => state.updateNodeData);
|
const updateNodeData = useNodeStore((state) => state.updateNodeData);
|
||||||
|
|
||||||
const getHardCodedValues = useNodeStore(
|
const getHardCodedValues = useNodeStore(
|
||||||
(state) => state.getHardCodedValues,
|
(state) => state.getHardCodedValues,
|
||||||
);
|
);
|
||||||
|
|
||||||
const handleChange = ({ formData }: any) => {
|
const handleChange = ({ formData }: any) => {
|
||||||
if ("credentials" in formData && !formData.credentials?.id) {
|
if ("credentials" in formData && !formData.credentials?.id) {
|
||||||
delete formData.credentials;
|
delete formData.credentials;
|
||||||
}
|
}
|
||||||
|
updateNodeData(nodeId, { hardcodedValues: formData });
|
||||||
const updatedValues =
|
|
||||||
uiType === BlockUIType.AGENT
|
|
||||||
? {
|
|
||||||
...getHardCodedValues(nodeId),
|
|
||||||
inputs: formData,
|
|
||||||
}
|
|
||||||
: formData;
|
|
||||||
|
|
||||||
updateNodeData(nodeId, { hardcodedValues: updatedValues });
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const hardcodedValues = getHardCodedValues(nodeId);
|
const initialValues = getHardCodedValues(nodeId);
|
||||||
const initialValues =
|
|
||||||
uiType === BlockUIType.AGENT
|
|
||||||
? (hardcodedValues.inputs ?? {})
|
|
||||||
: hardcodedValues;
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className={className}>
|
<div className={className}>
|
||||||
|
|||||||
@@ -14,16 +14,13 @@ import {
|
|||||||
import { useEdgeStore } from "@/app/(platform)/build/stores/edgeStore";
|
import { useEdgeStore } from "@/app/(platform)/build/stores/edgeStore";
|
||||||
import { getTypeDisplayInfo } from "./helpers";
|
import { getTypeDisplayInfo } from "./helpers";
|
||||||
import { generateHandleId } from "../handlers/helpers";
|
import { generateHandleId } from "../handlers/helpers";
|
||||||
import { BlockUIType } from "../../types";
|
|
||||||
|
|
||||||
export const OutputHandler = ({
|
export const OutputHandler = ({
|
||||||
outputSchema,
|
outputSchema,
|
||||||
nodeId,
|
nodeId,
|
||||||
uiType,
|
|
||||||
}: {
|
}: {
|
||||||
outputSchema: RJSFSchema;
|
outputSchema: RJSFSchema;
|
||||||
nodeId: string;
|
nodeId: string;
|
||||||
uiType: BlockUIType;
|
|
||||||
}) => {
|
}) => {
|
||||||
const { isOutputConnected } = useEdgeStore();
|
const { isOutputConnected } = useEdgeStore();
|
||||||
const properties = outputSchema?.properties || {};
|
const properties = outputSchema?.properties || {};
|
||||||
@@ -82,9 +79,7 @@ export const OutputHandler = ({
|
|||||||
</Text>
|
</Text>
|
||||||
|
|
||||||
<NodeHandle
|
<NodeHandle
|
||||||
handleId={
|
handleId={generateHandleId(key)}
|
||||||
uiType === BlockUIType.AGENT ? key : generateHandleId(key)
|
|
||||||
}
|
|
||||||
isConnected={isConnected}
|
isConnected={isConnected}
|
||||||
side="right"
|
side="right"
|
||||||
/>
|
/>
|
||||||
|
|||||||
@@ -7,7 +7,6 @@ import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
|
|||||||
import { getV2GetSpecificAgent } from "@/app/api/__generated__/endpoints/store/store";
|
import { getV2GetSpecificAgent } from "@/app/api/__generated__/endpoints/store/store";
|
||||||
import {
|
import {
|
||||||
getGetV2ListLibraryAgentsQueryKey,
|
getGetV2ListLibraryAgentsQueryKey,
|
||||||
getV2GetLibraryAgent,
|
|
||||||
usePostV2AddMarketplaceAgent,
|
usePostV2AddMarketplaceAgent,
|
||||||
} from "@/app/api/__generated__/endpoints/library/library";
|
} from "@/app/api/__generated__/endpoints/library/library";
|
||||||
import {
|
import {
|
||||||
@@ -152,12 +151,7 @@ export const useBlockMenuSearch = () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
const libraryAgent = response.data as LibraryAgent;
|
const libraryAgent = response.data as LibraryAgent;
|
||||||
|
addAgentToBuilder(libraryAgent);
|
||||||
const { data: libraryAgentDetails } = await getV2GetLibraryAgent(
|
|
||||||
libraryAgent.id,
|
|
||||||
);
|
|
||||||
|
|
||||||
addAgentToBuilder(libraryAgentDetails as LibraryAgent);
|
|
||||||
|
|
||||||
toast({
|
toast({
|
||||||
title: "Agent Added",
|
title: "Agent Added",
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
import { getGetV2GetBuilderItemCountsQueryKey } from "@/app/api/__generated__/endpoints/default/default";
|
import { getGetV2GetBuilderItemCountsQueryKey } from "@/app/api/__generated__/endpoints/default/default";
|
||||||
import {
|
import {
|
||||||
getGetV2ListLibraryAgentsQueryKey,
|
getGetV2ListLibraryAgentsQueryKey,
|
||||||
getV2GetLibraryAgent,
|
|
||||||
usePostV2AddMarketplaceAgent,
|
usePostV2AddMarketplaceAgent,
|
||||||
} from "@/app/api/__generated__/endpoints/library/library";
|
} from "@/app/api/__generated__/endpoints/library/library";
|
||||||
import {
|
import {
|
||||||
@@ -106,16 +105,8 @@ export const useMarketplaceAgentsContent = () => {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
// Here, libraryAgent has empty input and output schemas.
|
|
||||||
// Not updating the endpoint because this endpoint is used elsewhere.
|
|
||||||
// TODO: Create a new endpoint for builder specific to marketplace agents.
|
|
||||||
const libraryAgent = response.data as LibraryAgent;
|
const libraryAgent = response.data as LibraryAgent;
|
||||||
|
addAgentToBuilder(libraryAgent);
|
||||||
const { data: libraryAgentDetails } = await getV2GetLibraryAgent(
|
|
||||||
libraryAgent.id,
|
|
||||||
);
|
|
||||||
|
|
||||||
addAgentToBuilder(libraryAgentDetails as LibraryAgent);
|
|
||||||
|
|
||||||
toast({
|
toast({
|
||||||
title: "Agent Added",
|
title: "Agent Added",
|
||||||
|
|||||||
@@ -65,8 +65,7 @@ import NewControlPanel from "@/app/(platform)/build/components/NewControlPanel/N
|
|||||||
import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag";
|
import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag";
|
||||||
import { BuildActionBar } from "../BuildActionBar";
|
import { BuildActionBar } from "../BuildActionBar";
|
||||||
import { FloatingReviewsPanel } from "@/components/organisms/FloatingReviewsPanel/FloatingReviewsPanel";
|
import { FloatingReviewsPanel } from "@/components/organisms/FloatingReviewsPanel/FloatingReviewsPanel";
|
||||||
import { useFlowRealtime } from "@/app/(platform)/build/components/FlowEditor/Flow/useFlowRealtime";
|
import { FloatingSafeModeToggle } from "@/components/molecules/FloatingSafeModeToggle/FloatingSafeModeToggle";
|
||||||
import { FloatingSafeModeToggle } from "../../FloatingSafeModeToogle";
|
|
||||||
|
|
||||||
// This is for the history, this is the minimum distance a block must move before it is logged
|
// This is for the history, this is the minimum distance a block must move before it is logged
|
||||||
// It helps to prevent spamming the history with small movements especially when pressing on a input in a block
|
// It helps to prevent spamming the history with small movements especially when pressing on a input in a block
|
||||||
@@ -104,7 +103,6 @@ const FlowEditor: React.FC<{
|
|||||||
updateNode,
|
updateNode,
|
||||||
getViewport,
|
getViewport,
|
||||||
setViewport,
|
setViewport,
|
||||||
fitView,
|
|
||||||
screenToFlowPosition,
|
screenToFlowPosition,
|
||||||
} = useReactFlow<CustomNode, CustomEdge>();
|
} = useReactFlow<CustomNode, CustomEdge>();
|
||||||
const [nodeId, setNodeId] = useState<number>(1);
|
const [nodeId, setNodeId] = useState<number>(1);
|
||||||
@@ -117,7 +115,6 @@ const FlowEditor: React.FC<{
|
|||||||
const [pinBlocksPopover, setPinBlocksPopover] = useState(false);
|
const [pinBlocksPopover, setPinBlocksPopover] = useState(false);
|
||||||
// State to control if save popover should be pinned open
|
// State to control if save popover should be pinned open
|
||||||
const [pinSavePopover, setPinSavePopover] = useState(false);
|
const [pinSavePopover, setPinSavePopover] = useState(false);
|
||||||
const [hasAutoFramed, setHasAutoFramed] = useState(false);
|
|
||||||
|
|
||||||
const {
|
const {
|
||||||
agentName,
|
agentName,
|
||||||
@@ -154,9 +151,6 @@ const FlowEditor: React.FC<{
|
|||||||
Record<string, { x: number; y: number }>
|
Record<string, { x: number; y: number }>
|
||||||
>(Object.fromEntries(nodes.map((node) => [node.id, node.position])));
|
>(Object.fromEntries(nodes.map((node) => [node.id, node.position])));
|
||||||
|
|
||||||
// Add realtime execution status tracking for FloatingReviewsPanel
|
|
||||||
useFlowRealtime();
|
|
||||||
|
|
||||||
const router = useRouter();
|
const router = useRouter();
|
||||||
const pathname = usePathname();
|
const pathname = usePathname();
|
||||||
const params = useSearchParams();
|
const params = useSearchParams();
|
||||||
@@ -488,26 +482,35 @@ const FlowEditor: React.FC<{
|
|||||||
return uuidv4();
|
return uuidv4();
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
|
// Set the initial view port to center the canvas.
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (nodes.length === 0) {
|
const { x, y } = getViewport();
|
||||||
|
if (nodes.length <= 0 || x !== 0 || y !== 0) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (hasAutoFramed) {
|
const topLeft = { x: Infinity, y: Infinity };
|
||||||
return;
|
const bottomRight = { x: -Infinity, y: -Infinity };
|
||||||
}
|
|
||||||
|
|
||||||
const rafId = requestAnimationFrame(() => {
|
nodes.forEach((node) => {
|
||||||
fitView({ padding: 0.2, duration: 800, maxZoom: 1 });
|
const { x, y } = node.position;
|
||||||
setHasAutoFramed(true);
|
topLeft.x = Math.min(topLeft.x, x);
|
||||||
|
topLeft.y = Math.min(topLeft.y, y);
|
||||||
|
// Rough estimate of the width and height of the node: 500x400.
|
||||||
|
bottomRight.x = Math.max(bottomRight.x, x + 500);
|
||||||
|
bottomRight.y = Math.max(bottomRight.y, y + 400);
|
||||||
});
|
});
|
||||||
|
|
||||||
return () => cancelAnimationFrame(rafId);
|
const centerX = (topLeft.x + bottomRight.x) / 2;
|
||||||
}, [fitView, hasAutoFramed, nodes.length]);
|
const centerY = (topLeft.y + bottomRight.y) / 2;
|
||||||
|
const zoom = 0.8;
|
||||||
|
|
||||||
useEffect(() => {
|
setViewport({
|
||||||
setHasAutoFramed(false);
|
x: window.innerWidth / 2 - centerX * zoom,
|
||||||
}, [flowID, flowVersion]);
|
y: window.innerHeight / 2 - centerY * zoom,
|
||||||
|
zoom: zoom,
|
||||||
|
});
|
||||||
|
}, [nodes, getViewport, setViewport]);
|
||||||
|
|
||||||
const navigateToNode = useCallback(
|
const navigateToNode = useCallback(
|
||||||
(nodeId: string) => {
|
(nodeId: string) => {
|
||||||
@@ -928,7 +931,8 @@ const FlowEditor: React.FC<{
|
|||||||
{savedAgent && (
|
{savedAgent && (
|
||||||
<FloatingSafeModeToggle
|
<FloatingSafeModeToggle
|
||||||
graph={savedAgent}
|
graph={savedAgent}
|
||||||
className="right-2 top-32 p-2"
|
className="right-4 top-32 p-2"
|
||||||
|
variant="black"
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
{isNewBlockEnabled ? (
|
{isNewBlockEnabled ? (
|
||||||
|
|||||||
@@ -5,7 +5,6 @@ import { Breadcrumbs } from "@/components/molecules/Breadcrumbs/Breadcrumbs";
|
|||||||
import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard";
|
import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard";
|
||||||
import { cn } from "@/lib/utils";
|
import { cn } from "@/lib/utils";
|
||||||
import { PlusIcon } from "@phosphor-icons/react";
|
import { PlusIcon } from "@phosphor-icons/react";
|
||||||
import { useEffect } from "react";
|
|
||||||
import { RunAgentModal } from "./components/modals/RunAgentModal/RunAgentModal";
|
import { RunAgentModal } from "./components/modals/RunAgentModal/RunAgentModal";
|
||||||
import { AgentRunsLoading } from "./components/other/AgentRunsLoading";
|
import { AgentRunsLoading } from "./components/other/AgentRunsLoading";
|
||||||
import { EmptySchedules } from "./components/other/EmptySchedules";
|
import { EmptySchedules } from "./components/other/EmptySchedules";
|
||||||
@@ -18,7 +17,6 @@ import { SelectedRunView } from "./components/selected-views/SelectedRunView/Sel
|
|||||||
import { SelectedScheduleView } from "./components/selected-views/SelectedScheduleView/SelectedScheduleView";
|
import { SelectedScheduleView } from "./components/selected-views/SelectedScheduleView/SelectedScheduleView";
|
||||||
import { SelectedTemplateView } from "./components/selected-views/SelectedTemplateView/SelectedTemplateView";
|
import { SelectedTemplateView } from "./components/selected-views/SelectedTemplateView/SelectedTemplateView";
|
||||||
import { SelectedTriggerView } from "./components/selected-views/SelectedTriggerView/SelectedTriggerView";
|
import { SelectedTriggerView } from "./components/selected-views/SelectedTriggerView/SelectedTriggerView";
|
||||||
import { SelectedSettingsView } from "./components/selected-views/SelectedSettingsView/SelectedSettingsView";
|
|
||||||
import { SelectedViewLayout } from "./components/selected-views/SelectedViewLayout";
|
import { SelectedViewLayout } from "./components/selected-views/SelectedViewLayout";
|
||||||
import { SidebarRunsList } from "./components/sidebar/SidebarRunsList/SidebarRunsList";
|
import { SidebarRunsList } from "./components/sidebar/SidebarRunsList/SidebarRunsList";
|
||||||
import { AGENT_LIBRARY_SECTION_PADDING_X } from "./helpers";
|
import { AGENT_LIBRARY_SECTION_PADDING_X } from "./helpers";
|
||||||
@@ -26,6 +24,7 @@ import { useNewAgentLibraryView } from "./useNewAgentLibraryView";
|
|||||||
|
|
||||||
export function NewAgentLibraryView() {
|
export function NewAgentLibraryView() {
|
||||||
const {
|
const {
|
||||||
|
agentId,
|
||||||
agent,
|
agent,
|
||||||
ready,
|
ready,
|
||||||
activeTemplate,
|
activeTemplate,
|
||||||
@@ -40,17 +39,10 @@ export function NewAgentLibraryView() {
|
|||||||
handleCountsChange,
|
handleCountsChange,
|
||||||
handleClearSelectedRun,
|
handleClearSelectedRun,
|
||||||
onRunInitiated,
|
onRunInitiated,
|
||||||
handleSelectSettings,
|
|
||||||
onTriggerSetup,
|
onTriggerSetup,
|
||||||
onScheduleCreated,
|
onScheduleCreated,
|
||||||
} = useNewAgentLibraryView();
|
} = useNewAgentLibraryView();
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
if (agent) {
|
|
||||||
document.title = `${agent.name} - Library - AutoGPT Platform`;
|
|
||||||
}
|
|
||||||
}, [agent]);
|
|
||||||
|
|
||||||
if (error) {
|
if (error) {
|
||||||
return (
|
return (
|
||||||
<ErrorCard
|
<ErrorCard
|
||||||
@@ -70,14 +62,12 @@ export function NewAgentLibraryView() {
|
|||||||
return (
|
return (
|
||||||
<div className="flex h-full flex-col">
|
<div className="flex h-full flex-col">
|
||||||
<div className="mx-6 pt-4">
|
<div className="mx-6 pt-4">
|
||||||
<div className="relative flex items-center gap-2">
|
<Breadcrumbs
|
||||||
<Breadcrumbs
|
items={[
|
||||||
items={[
|
{ name: "My Library", link: "/library" },
|
||||||
{ name: "My Library", link: "/library" },
|
{ name: agent.name, link: `/library/agents/${agentId}` },
|
||||||
{ name: agent.name },
|
]}
|
||||||
]}
|
/>
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
<div className="flex min-h-0 flex-1">
|
<div className="flex min-h-0 flex-1">
|
||||||
<EmptyTasks
|
<EmptyTasks
|
||||||
@@ -131,12 +121,7 @@ export function NewAgentLibraryView() {
|
|||||||
</SectionWrap>
|
</SectionWrap>
|
||||||
|
|
||||||
{activeItem ? (
|
{activeItem ? (
|
||||||
activeItem === "settings" ? (
|
activeTab === "scheduled" ? (
|
||||||
<SelectedSettingsView
|
|
||||||
agent={agent}
|
|
||||||
onClearSelectedRun={handleClearSelectedRun}
|
|
||||||
/>
|
|
||||||
) : activeTab === "scheduled" ? (
|
|
||||||
<SelectedScheduleView
|
<SelectedScheduleView
|
||||||
agent={agent}
|
agent={agent}
|
||||||
scheduleId={activeItem}
|
scheduleId={activeItem}
|
||||||
@@ -163,40 +148,24 @@ export function NewAgentLibraryView() {
|
|||||||
runId={activeItem}
|
runId={activeItem}
|
||||||
onSelectRun={handleSelectRun}
|
onSelectRun={handleSelectRun}
|
||||||
onClearSelectedRun={handleClearSelectedRun}
|
onClearSelectedRun={handleClearSelectedRun}
|
||||||
onSelectSettings={handleSelectSettings}
|
|
||||||
/>
|
/>
|
||||||
)
|
)
|
||||||
) : sidebarLoading ? (
|
) : sidebarLoading ? (
|
||||||
<LoadingSelectedContent
|
<LoadingSelectedContent agentName={agent.name} agentId={agent.id} />
|
||||||
agent={agent}
|
|
||||||
onSelectSettings={handleSelectSettings}
|
|
||||||
/>
|
|
||||||
) : activeTab === "scheduled" ? (
|
) : activeTab === "scheduled" ? (
|
||||||
<SelectedViewLayout
|
<SelectedViewLayout agentName={agent.name} agentId={agent.id}>
|
||||||
agent={agent}
|
|
||||||
onSelectSettings={handleSelectSettings}
|
|
||||||
>
|
|
||||||
<EmptySchedules />
|
<EmptySchedules />
|
||||||
</SelectedViewLayout>
|
</SelectedViewLayout>
|
||||||
) : activeTab === "templates" ? (
|
) : activeTab === "templates" ? (
|
||||||
<SelectedViewLayout
|
<SelectedViewLayout agentName={agent.name} agentId={agent.id}>
|
||||||
agent={agent}
|
|
||||||
onSelectSettings={handleSelectSettings}
|
|
||||||
>
|
|
||||||
<EmptyTemplates />
|
<EmptyTemplates />
|
||||||
</SelectedViewLayout>
|
</SelectedViewLayout>
|
||||||
) : activeTab === "triggers" ? (
|
) : activeTab === "triggers" ? (
|
||||||
<SelectedViewLayout
|
<SelectedViewLayout agentName={agent.name} agentId={agent.id}>
|
||||||
agent={agent}
|
|
||||||
onSelectSettings={handleSelectSettings}
|
|
||||||
>
|
|
||||||
<EmptyTriggers />
|
<EmptyTriggers />
|
||||||
</SelectedViewLayout>
|
</SelectedViewLayout>
|
||||||
) : (
|
) : (
|
||||||
<SelectedViewLayout
|
<SelectedViewLayout agentName={agent.name} agentId={agent.id}>
|
||||||
agent={agent}
|
|
||||||
onSelectSettings={handleSelectSettings}
|
|
||||||
>
|
|
||||||
<EmptyTasks
|
<EmptyTasks
|
||||||
agent={agent}
|
agent={agent}
|
||||||
onRun={onRunInitiated}
|
onRun={onRunInitiated}
|
||||||
|
|||||||
@@ -1,11 +1,16 @@
|
|||||||
"use client";
|
"use client";
|
||||||
|
|
||||||
import type { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
|
import type { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
|
||||||
import { Text } from "@/components/atoms/Text/Text";
|
import type {
|
||||||
import type { CredentialsMetaInput } from "@/lib/autogpt-server-api/types";
|
BlockIOSubSchema,
|
||||||
|
CredentialsMetaInput,
|
||||||
|
} from "@/lib/autogpt-server-api/types";
|
||||||
import { CredentialsInput } from "../CredentialsInputs/CredentialsInputs";
|
import { CredentialsInput } from "../CredentialsInputs/CredentialsInputs";
|
||||||
import { RunAgentInputs } from "../RunAgentInputs/RunAgentInputs";
|
import {
|
||||||
import { getAgentCredentialsFields, getAgentInputFields } from "./helpers";
|
getAgentCredentialsFields,
|
||||||
|
getAgentInputFields,
|
||||||
|
renderValue,
|
||||||
|
} from "./helpers";
|
||||||
|
|
||||||
type Props = {
|
type Props = {
|
||||||
agent: LibraryAgent;
|
agent: LibraryAgent;
|
||||||
@@ -23,23 +28,19 @@ export function AgentInputsReadOnly({
|
|||||||
getAgentCredentialsFields(agent),
|
getAgentCredentialsFields(agent),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Take actual input entries as leading; augment with schema from input fields.
|
||||||
|
// TODO: ensure consistent ordering.
|
||||||
const inputEntries =
|
const inputEntries =
|
||||||
inputs &&
|
inputs &&
|
||||||
Object.entries(inputs).map(([key, value]) => ({
|
Object.entries(inputs).map<[string, [BlockIOSubSchema | undefined, any]]>(
|
||||||
key,
|
([k, v]) => [k, [inputFields[k], v]],
|
||||||
schema: inputFields[key],
|
);
|
||||||
value,
|
|
||||||
}));
|
|
||||||
|
|
||||||
const hasInputs = inputEntries && inputEntries.length > 0;
|
const hasInputs = inputEntries && inputEntries.length > 0;
|
||||||
const hasCredentials = credentialInputs && credentialFieldEntries.length > 0;
|
const hasCredentials = credentialInputs && credentialFieldEntries.length > 0;
|
||||||
|
|
||||||
if (!hasInputs && !hasCredentials) {
|
if (!hasInputs && !hasCredentials) {
|
||||||
return (
|
return <div className="text-neutral-600">No input for this run.</div>;
|
||||||
<Text variant="body" className="text-zinc-700">
|
|
||||||
No input for this run.
|
|
||||||
</Text>
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
@@ -47,20 +48,16 @@ export function AgentInputsReadOnly({
|
|||||||
{/* Regular inputs */}
|
{/* Regular inputs */}
|
||||||
{hasInputs && (
|
{hasInputs && (
|
||||||
<div className="flex flex-col gap-4">
|
<div className="flex flex-col gap-4">
|
||||||
{inputEntries.map(({ key, schema, value }) => {
|
{inputEntries.map(([key, [schema, value]]) => (
|
||||||
if (!schema) return null;
|
<div key={key} className="flex flex-col gap-1.5">
|
||||||
|
<label className="text-sm font-medium">
|
||||||
return (
|
{schema?.title || key}
|
||||||
<RunAgentInputs
|
</label>
|
||||||
key={key}
|
<p className="whitespace-pre-wrap break-words text-sm text-neutral-700">
|
||||||
schema={schema}
|
{renderValue(value)}
|
||||||
value={value}
|
</p>
|
||||||
placeholder={schema.description}
|
</div>
|
||||||
onChange={() => {}}
|
))}
|
||||||
readOnly={true}
|
|
||||||
/>
|
|
||||||
);
|
|
||||||
})}
|
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
|
|||||||
@@ -15,14 +15,13 @@ import { HostScopedCredentialsModal } from "./components/HotScopedCredentialsMod
|
|||||||
import { OAuthFlowWaitingModal } from "./components/OAuthWaitingModal/OAuthWaitingModal";
|
import { OAuthFlowWaitingModal } from "./components/OAuthWaitingModal/OAuthWaitingModal";
|
||||||
import { PasswordCredentialsModal } from "./components/PasswordCredentialsModal/PasswordCredentialsModal";
|
import { PasswordCredentialsModal } from "./components/PasswordCredentialsModal/PasswordCredentialsModal";
|
||||||
import { getCredentialDisplayName } from "./helpers";
|
import { getCredentialDisplayName } from "./helpers";
|
||||||
import {
|
import { useCredentialsInputs } from "./useCredentialsInputs";
|
||||||
CredentialsInputState,
|
|
||||||
useCredentialsInput,
|
type UseCredentialsInputsReturn = ReturnType<typeof useCredentialsInputs>;
|
||||||
} from "./useCredentialsInput";
|
|
||||||
|
|
||||||
function isLoaded(
|
function isLoaded(
|
||||||
data: CredentialsInputState,
|
data: UseCredentialsInputsReturn,
|
||||||
): data is Extract<CredentialsInputState, { isLoading: false }> {
|
): data is Extract<UseCredentialsInputsReturn, { isLoading: false }> {
|
||||||
return data.isLoading === false;
|
return data.isLoading === false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -34,23 +33,21 @@ type Props = {
|
|||||||
onSelectCredentials: (newValue?: CredentialsMetaInput) => void;
|
onSelectCredentials: (newValue?: CredentialsMetaInput) => void;
|
||||||
onLoaded?: (loaded: boolean) => void;
|
onLoaded?: (loaded: boolean) => void;
|
||||||
readOnly?: boolean;
|
readOnly?: boolean;
|
||||||
showTitle?: boolean;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export function CredentialsInput({
|
export function CredentialsInput({
|
||||||
schema,
|
schema,
|
||||||
className,
|
className,
|
||||||
selectedCredentials: selectedCredential,
|
selectedCredentials,
|
||||||
onSelectCredentials: onSelectCredential,
|
onSelectCredentials,
|
||||||
siblingInputs,
|
siblingInputs,
|
||||||
onLoaded,
|
onLoaded,
|
||||||
readOnly = false,
|
readOnly = false,
|
||||||
showTitle = true,
|
|
||||||
}: Props) {
|
}: Props) {
|
||||||
const hookData = useCredentialsInput({
|
const hookData = useCredentialsInputs({
|
||||||
schema,
|
schema,
|
||||||
selectedCredential,
|
selectedCredentials,
|
||||||
onSelectCredential,
|
onSelectCredentials,
|
||||||
siblingInputs,
|
siblingInputs,
|
||||||
onLoaded,
|
onLoaded,
|
||||||
readOnly,
|
readOnly,
|
||||||
@@ -92,14 +89,12 @@ export function CredentialsInput({
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<div className={cn("mb-6", className)}>
|
<div className={cn("mb-6", className)}>
|
||||||
{showTitle && (
|
<div className="mb-2 flex items-center gap-2">
|
||||||
<div className="mb-2 flex items-center gap-2">
|
<Text variant="large-medium">{displayName} credentials</Text>
|
||||||
<Text variant="large-medium">{displayName} credentials</Text>
|
{schema.description && (
|
||||||
{schema.description && (
|
<InformationTooltip description={schema.description} />
|
||||||
<InformationTooltip description={schema.description} />
|
)}
|
||||||
)}
|
</div>
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{hasCredentialsToShow ? (
|
{hasCredentialsToShow ? (
|
||||||
<>
|
<>
|
||||||
@@ -108,7 +103,7 @@ export function CredentialsInput({
|
|||||||
credentials={credentialsToShow}
|
credentials={credentialsToShow}
|
||||||
provider={provider}
|
provider={provider}
|
||||||
displayName={displayName}
|
displayName={displayName}
|
||||||
selectedCredentials={selectedCredential}
|
selectedCredentials={selectedCredentials}
|
||||||
onSelectCredential={handleCredentialSelect}
|
onSelectCredential={handleCredentialSelect}
|
||||||
readOnly={readOnly}
|
readOnly={readOnly}
|
||||||
/>
|
/>
|
||||||
@@ -169,7 +164,7 @@ export function CredentialsInput({
|
|||||||
open={isAPICredentialsModalOpen}
|
open={isAPICredentialsModalOpen}
|
||||||
onClose={() => setAPICredentialsModalOpen(false)}
|
onClose={() => setAPICredentialsModalOpen(false)}
|
||||||
onCredentialsCreate={(credsMeta) => {
|
onCredentialsCreate={(credsMeta) => {
|
||||||
onSelectCredential(credsMeta);
|
onSelectCredentials(credsMeta);
|
||||||
setAPICredentialsModalOpen(false);
|
setAPICredentialsModalOpen(false);
|
||||||
}}
|
}}
|
||||||
siblingInputs={siblingInputs}
|
siblingInputs={siblingInputs}
|
||||||
@@ -188,7 +183,7 @@ export function CredentialsInput({
|
|||||||
open={isUserPasswordCredentialsModalOpen}
|
open={isUserPasswordCredentialsModalOpen}
|
||||||
onClose={() => setUserPasswordCredentialsModalOpen(false)}
|
onClose={() => setUserPasswordCredentialsModalOpen(false)}
|
||||||
onCredentialsCreate={(creds) => {
|
onCredentialsCreate={(creds) => {
|
||||||
onSelectCredential(creds);
|
onSelectCredentials(creds);
|
||||||
setUserPasswordCredentialsModalOpen(false);
|
setUserPasswordCredentialsModalOpen(false);
|
||||||
}}
|
}}
|
||||||
siblingInputs={siblingInputs}
|
siblingInputs={siblingInputs}
|
||||||
@@ -200,7 +195,7 @@ export function CredentialsInput({
|
|||||||
open={isHostScopedCredentialsModalOpen}
|
open={isHostScopedCredentialsModalOpen}
|
||||||
onClose={() => setHostScopedCredentialsModalOpen(false)}
|
onClose={() => setHostScopedCredentialsModalOpen(false)}
|
||||||
onCredentialsCreate={(creds) => {
|
onCredentialsCreate={(creds) => {
|
||||||
onSelectCredential(creds);
|
onSelectCredentials(creds);
|
||||||
setHostScopedCredentialsModalOpen(false);
|
setHostScopedCredentialsModalOpen(false);
|
||||||
}}
|
}}
|
||||||
siblingInputs={siblingInputs}
|
siblingInputs={siblingInputs}
|
||||||
|
|||||||
@@ -62,15 +62,12 @@ export function CredentialRow({
|
|||||||
</div>
|
</div>
|
||||||
<IconKey className="h-5 w-5 shrink-0 text-zinc-800" />
|
<IconKey className="h-5 w-5 shrink-0 text-zinc-800" />
|
||||||
<div className="flex min-w-0 flex-1 flex-nowrap items-center gap-4">
|
<div className="flex min-w-0 flex-1 flex-nowrap items-center gap-4">
|
||||||
<Text
|
<Text variant="body" className="tracking-tight">
|
||||||
variant="body"
|
|
||||||
className="line-clamp-1 flex-[0_0_50%] text-ellipsis tracking-tight"
|
|
||||||
>
|
|
||||||
{getCredentialDisplayName(credential, displayName)}
|
{getCredentialDisplayName(credential, displayName)}
|
||||||
</Text>
|
</Text>
|
||||||
<Text
|
<Text
|
||||||
variant="large"
|
variant="large"
|
||||||
className="lex-[0_0_40%] relative top-1 hidden overflow-hidden whitespace-nowrap font-mono tracking-tight md:block"
|
className="relative top-1 font-mono tracking-tight"
|
||||||
>
|
>
|
||||||
{"*".repeat(MASKED_KEY_LENGTH)}
|
{"*".repeat(MASKED_KEY_LENGTH)}
|
||||||
</Text>
|
</Text>
|
||||||
|
|||||||
@@ -5,33 +5,32 @@ import {
|
|||||||
BlockIOCredentialsSubSchema,
|
BlockIOCredentialsSubSchema,
|
||||||
CredentialsMetaInput,
|
CredentialsMetaInput,
|
||||||
} from "@/lib/autogpt-server-api/types";
|
} from "@/lib/autogpt-server-api/types";
|
||||||
|
import { CredentialsProvidersContext } from "@/providers/agent-credentials/credentials-provider";
|
||||||
import { useQueryClient } from "@tanstack/react-query";
|
import { useQueryClient } from "@tanstack/react-query";
|
||||||
import { useEffect, useMemo, useState } from "react";
|
import { useContext, useEffect, useMemo, useState } from "react";
|
||||||
import {
|
import {
|
||||||
getActionButtonText,
|
getActionButtonText,
|
||||||
OAUTH_TIMEOUT_MS,
|
OAUTH_TIMEOUT_MS,
|
||||||
OAuthPopupResultMessage,
|
OAuthPopupResultMessage,
|
||||||
} from "./helpers";
|
} from "./helpers";
|
||||||
|
|
||||||
export type CredentialsInputState = ReturnType<typeof useCredentialsInput>;
|
type Args = {
|
||||||
|
|
||||||
type Params = {
|
|
||||||
schema: BlockIOCredentialsSubSchema;
|
schema: BlockIOCredentialsSubSchema;
|
||||||
selectedCredential?: CredentialsMetaInput;
|
selectedCredentials?: CredentialsMetaInput;
|
||||||
onSelectCredential: (newValue?: CredentialsMetaInput) => void;
|
onSelectCredentials: (newValue?: CredentialsMetaInput) => void;
|
||||||
siblingInputs?: Record<string, any>;
|
siblingInputs?: Record<string, any>;
|
||||||
onLoaded?: (loaded: boolean) => void;
|
onLoaded?: (loaded: boolean) => void;
|
||||||
readOnly?: boolean;
|
readOnly?: boolean;
|
||||||
};
|
};
|
||||||
|
|
||||||
export function useCredentialsInput({
|
export function useCredentialsInputs({
|
||||||
schema,
|
schema,
|
||||||
selectedCredential,
|
selectedCredentials,
|
||||||
onSelectCredential,
|
onSelectCredentials,
|
||||||
siblingInputs,
|
siblingInputs,
|
||||||
onLoaded,
|
onLoaded,
|
||||||
readOnly = false,
|
readOnly = false,
|
||||||
}: Params) {
|
}: Args) {
|
||||||
const [isAPICredentialsModalOpen, setAPICredentialsModalOpen] =
|
const [isAPICredentialsModalOpen, setAPICredentialsModalOpen] =
|
||||||
useState(false);
|
useState(false);
|
||||||
const [
|
const [
|
||||||
@@ -52,6 +51,7 @@ export function useCredentialsInput({
|
|||||||
const api = useBackendAPI();
|
const api = useBackendAPI();
|
||||||
const queryClient = useQueryClient();
|
const queryClient = useQueryClient();
|
||||||
const credentials = useCredentials(schema, siblingInputs);
|
const credentials = useCredentials(schema, siblingInputs);
|
||||||
|
const allProviders = useContext(CredentialsProvidersContext);
|
||||||
|
|
||||||
const deleteCredentialsMutation = useDeleteV1DeleteCredentials({
|
const deleteCredentialsMutation = useDeleteV1DeleteCredentials({
|
||||||
mutation: {
|
mutation: {
|
||||||
@@ -63,49 +63,57 @@ export function useCredentialsInput({
|
|||||||
queryKey: [`/api/integrations/${credentials?.provider}/credentials`],
|
queryKey: [`/api/integrations/${credentials?.provider}/credentials`],
|
||||||
});
|
});
|
||||||
setCredentialToDelete(null);
|
setCredentialToDelete(null);
|
||||||
if (selectedCredential?.id === credentialToDelete?.id) {
|
if (selectedCredentials?.id === credentialToDelete?.id) {
|
||||||
onSelectCredential(undefined);
|
onSelectCredentials(undefined);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const rawProvider = credentials
|
||||||
|
? allProviders?.[credentials.provider as keyof typeof allProviders]
|
||||||
|
: null;
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (onLoaded) {
|
if (onLoaded) {
|
||||||
onLoaded(Boolean(credentials && credentials.isLoading === false));
|
onLoaded(Boolean(credentials && credentials.isLoading === false));
|
||||||
}
|
}
|
||||||
}, [credentials, onLoaded]);
|
}, [credentials, onLoaded]);
|
||||||
|
|
||||||
// Unselect credential if not available
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (readOnly) return;
|
if (readOnly) return;
|
||||||
if (!credentials || !("savedCredentials" in credentials)) return;
|
if (!credentials || !("savedCredentials" in credentials)) return;
|
||||||
if (
|
if (
|
||||||
selectedCredential &&
|
selectedCredentials &&
|
||||||
!credentials.savedCredentials.some((c) => c.id === selectedCredential.id)
|
!credentials.savedCredentials.some((c) => c.id === selectedCredentials.id)
|
||||||
) {
|
) {
|
||||||
onSelectCredential(undefined);
|
onSelectCredentials(undefined);
|
||||||
}
|
}
|
||||||
}, [credentials, selectedCredential, onSelectCredential, readOnly]);
|
}, [credentials, selectedCredentials, onSelectCredentials, readOnly]);
|
||||||
|
|
||||||
// The available credential, if there is only one
|
const { singleCredential } = useMemo(() => {
|
||||||
const singleCredential = useMemo(() => {
|
|
||||||
if (!credentials || !("savedCredentials" in credentials)) {
|
if (!credentials || !("savedCredentials" in credentials)) {
|
||||||
return null;
|
return {
|
||||||
|
singleCredential: null,
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
return credentials.savedCredentials.length === 1
|
const single =
|
||||||
? credentials.savedCredentials[0]
|
credentials.savedCredentials.length === 1
|
||||||
: null;
|
? credentials.savedCredentials[0]
|
||||||
|
: null;
|
||||||
|
|
||||||
|
return {
|
||||||
|
singleCredential: single,
|
||||||
|
};
|
||||||
}, [credentials]);
|
}, [credentials]);
|
||||||
|
|
||||||
// Auto-select the one available credential
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (readOnly) return;
|
if (readOnly) return;
|
||||||
if (singleCredential && !selectedCredential) {
|
if (singleCredential && !selectedCredentials) {
|
||||||
onSelectCredential(singleCredential);
|
onSelectCredentials(singleCredential);
|
||||||
}
|
}
|
||||||
}, [singleCredential, selectedCredential, onSelectCredential, readOnly]);
|
}, [singleCredential, selectedCredentials, onSelectCredentials, readOnly]);
|
||||||
|
|
||||||
if (
|
if (
|
||||||
!credentials ||
|
!credentials ||
|
||||||
@@ -128,6 +136,25 @@ export function useCredentialsInput({
|
|||||||
oAuthCallback,
|
oAuthCallback,
|
||||||
} = credentials;
|
} = credentials;
|
||||||
|
|
||||||
|
const allSavedCredentials = rawProvider?.savedCredentials || savedCredentials;
|
||||||
|
|
||||||
|
const credentialsToShow = (() => {
|
||||||
|
const creds = [...allSavedCredentials];
|
||||||
|
if (
|
||||||
|
!readOnly &&
|
||||||
|
selectedCredentials &&
|
||||||
|
!creds.some((c) => c.id === selectedCredentials.id)
|
||||||
|
) {
|
||||||
|
creds.push({
|
||||||
|
id: selectedCredentials.id,
|
||||||
|
type: selectedCredentials.type,
|
||||||
|
title: selectedCredentials.title || "Selected credential",
|
||||||
|
provider: provider,
|
||||||
|
} as any);
|
||||||
|
}
|
||||||
|
return creds;
|
||||||
|
})();
|
||||||
|
|
||||||
async function handleOAuthLogin() {
|
async function handleOAuthLogin() {
|
||||||
setOAuthError(null);
|
setOAuthError(null);
|
||||||
const { login_url, state_token } = await api.oAuthLogin(
|
const { login_url, state_token } = await api.oAuthLogin(
|
||||||
@@ -180,31 +207,7 @@ export function useCredentialsInput({
|
|||||||
console.debug("Processing OAuth callback");
|
console.debug("Processing OAuth callback");
|
||||||
const credentials = await oAuthCallback(e.data.code, e.data.state);
|
const credentials = await oAuthCallback(e.data.code, e.data.state);
|
||||||
console.debug("OAuth callback processed successfully");
|
console.debug("OAuth callback processed successfully");
|
||||||
|
onSelectCredentials({
|
||||||
// Check if the credential's scopes match the required scopes
|
|
||||||
const requiredScopes = schema.credentials_scopes;
|
|
||||||
if (requiredScopes && requiredScopes.length > 0) {
|
|
||||||
const grantedScopes = new Set(credentials.scopes || []);
|
|
||||||
const hasAllRequiredScopes = new Set(requiredScopes).isSubsetOf(
|
|
||||||
grantedScopes,
|
|
||||||
);
|
|
||||||
|
|
||||||
if (!hasAllRequiredScopes) {
|
|
||||||
console.error(
|
|
||||||
`Newly created OAuth credential for ${providerName} has insufficient scopes. Required:`,
|
|
||||||
requiredScopes,
|
|
||||||
"Granted:",
|
|
||||||
credentials.scopes,
|
|
||||||
);
|
|
||||||
setOAuthError(
|
|
||||||
"Connection failed: the granted permissions don't match what's required. " +
|
|
||||||
"Please contact the application administrator.",
|
|
||||||
);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
onSelectCredential({
|
|
||||||
id: credentials.id,
|
id: credentials.id,
|
||||||
type: "oauth2",
|
type: "oauth2",
|
||||||
title: credentials.title,
|
title: credentials.title,
|
||||||
@@ -250,9 +253,9 @@ export function useCredentialsInput({
|
|||||||
}
|
}
|
||||||
|
|
||||||
function handleCredentialSelect(credentialId: string) {
|
function handleCredentialSelect(credentialId: string) {
|
||||||
const selectedCreds = savedCredentials.find((c) => c.id === credentialId);
|
const selectedCreds = credentialsToShow.find((c) => c.id === credentialId);
|
||||||
if (selectedCreds) {
|
if (selectedCreds) {
|
||||||
onSelectCredential({
|
onSelectCredentials({
|
||||||
id: selectedCreds.id,
|
id: selectedCreds.id,
|
||||||
type: selectedCreds.type,
|
type: selectedCreds.type,
|
||||||
provider: provider,
|
provider: provider,
|
||||||
@@ -282,8 +285,8 @@ export function useCredentialsInput({
|
|||||||
supportsOAuth2,
|
supportsOAuth2,
|
||||||
supportsUserPassword,
|
supportsUserPassword,
|
||||||
supportsHostScoped,
|
supportsHostScoped,
|
||||||
credentialsToShow: savedCredentials,
|
credentialsToShow,
|
||||||
selectedCredential,
|
selectedCredentials,
|
||||||
oAuthError,
|
oAuthError,
|
||||||
isAPICredentialsModalOpen,
|
isAPICredentialsModalOpen,
|
||||||
isUserPasswordCredentialsModalOpen,
|
isUserPasswordCredentialsModalOpen,
|
||||||
@@ -297,7 +300,7 @@ export function useCredentialsInput({
|
|||||||
supportsApiKey,
|
supportsApiKey,
|
||||||
supportsUserPassword,
|
supportsUserPassword,
|
||||||
supportsHostScoped,
|
supportsHostScoped,
|
||||||
savedCredentials.length > 0,
|
credentialsToShow.length > 0,
|
||||||
),
|
),
|
||||||
setAPICredentialsModalOpen,
|
setAPICredentialsModalOpen,
|
||||||
setUserPasswordCredentialsModalOpen,
|
setUserPasswordCredentialsModalOpen,
|
||||||
@@ -308,7 +311,7 @@ export function useCredentialsInput({
|
|||||||
handleDeleteCredential,
|
handleDeleteCredential,
|
||||||
handleDeleteConfirm,
|
handleDeleteConfirm,
|
||||||
handleOAuthLogin,
|
handleOAuthLogin,
|
||||||
onSelectCredential,
|
onSelectCredentials,
|
||||||
schema,
|
schema,
|
||||||
siblingInputs,
|
siblingInputs,
|
||||||
};
|
};
|
||||||
@@ -9,7 +9,6 @@ import { Button } from "@/components/atoms/Button/Button";
|
|||||||
import { FileInput } from "@/components/atoms/FileInput/FileInput";
|
import { FileInput } from "@/components/atoms/FileInput/FileInput";
|
||||||
import { Switch } from "@/components/atoms/Switch/Switch";
|
import { Switch } from "@/components/atoms/Switch/Switch";
|
||||||
import { GoogleDrivePickerInput } from "@/components/contextual/GoogleDrivePicker/GoogleDrivePickerInput";
|
import { GoogleDrivePickerInput } from "@/components/contextual/GoogleDrivePicker/GoogleDrivePickerInput";
|
||||||
import { InformationTooltip } from "@/components/molecules/InformationTooltip/InformationTooltip";
|
|
||||||
import { TimePicker } from "@/components/molecules/TimePicker/TimePicker";
|
import { TimePicker } from "@/components/molecules/TimePicker/TimePicker";
|
||||||
import {
|
import {
|
||||||
BlockIOObjectSubSchema,
|
BlockIOObjectSubSchema,
|
||||||
@@ -33,7 +32,6 @@ interface Props {
|
|||||||
value?: any;
|
value?: any;
|
||||||
placeholder?: string;
|
placeholder?: string;
|
||||||
onChange: (value: any) => void;
|
onChange: (value: any) => void;
|
||||||
readOnly?: boolean;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -46,7 +44,6 @@ export function RunAgentInputs({
|
|||||||
value,
|
value,
|
||||||
placeholder,
|
placeholder,
|
||||||
onChange,
|
onChange,
|
||||||
readOnly = false,
|
|
||||||
...props
|
...props
|
||||||
}: Props & React.HTMLAttributes<HTMLElement>) {
|
}: Props & React.HTMLAttributes<HTMLElement>) {
|
||||||
const { handleUploadFile, uploadProgress } = useRunAgentInputs();
|
const { handleUploadFile, uploadProgress } = useRunAgentInputs();
|
||||||
@@ -65,6 +62,7 @@ export function RunAgentInputs({
|
|||||||
id={`${baseId}-number`}
|
id={`${baseId}-number`}
|
||||||
label={schema.title ?? placeholder ?? "Number"}
|
label={schema.title ?? placeholder ?? "Number"}
|
||||||
hideLabel
|
hideLabel
|
||||||
|
size="small"
|
||||||
type="number"
|
type="number"
|
||||||
value={value ?? ""}
|
value={value ?? ""}
|
||||||
placeholder={placeholder || "Enter number"}
|
placeholder={placeholder || "Enter number"}
|
||||||
@@ -82,6 +80,7 @@ export function RunAgentInputs({
|
|||||||
id={`${baseId}-textarea`}
|
id={`${baseId}-textarea`}
|
||||||
label={schema.title ?? placeholder ?? "Text"}
|
label={schema.title ?? placeholder ?? "Text"}
|
||||||
hideLabel
|
hideLabel
|
||||||
|
size="small"
|
||||||
type="textarea"
|
type="textarea"
|
||||||
rows={3}
|
rows={3}
|
||||||
value={value ?? ""}
|
value={value ?? ""}
|
||||||
@@ -103,7 +102,7 @@ export function RunAgentInputs({
|
|||||||
value={value}
|
value={value}
|
||||||
onChange={onChange}
|
onChange={onChange}
|
||||||
className="w-full"
|
className="w-full"
|
||||||
showRemoveButton={!readOnly}
|
showRemoveButton={false}
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
break;
|
break;
|
||||||
@@ -131,6 +130,7 @@ export function RunAgentInputs({
|
|||||||
id={`${baseId}-date`}
|
id={`${baseId}-date`}
|
||||||
label={schema.title ?? placeholder ?? "Date"}
|
label={schema.title ?? placeholder ?? "Date"}
|
||||||
hideLabel
|
hideLabel
|
||||||
|
size="small"
|
||||||
type="date"
|
type="date"
|
||||||
value={value ? format(value as Date, "yyyy-MM-dd") : ""}
|
value={value ? format(value as Date, "yyyy-MM-dd") : ""}
|
||||||
onChange={(e) => {
|
onChange={(e) => {
|
||||||
@@ -159,6 +159,7 @@ export function RunAgentInputs({
|
|||||||
id={`${baseId}-datetime`}
|
id={`${baseId}-datetime`}
|
||||||
label={schema.title ?? placeholder ?? "Date time"}
|
label={schema.title ?? placeholder ?? "Date time"}
|
||||||
hideLabel
|
hideLabel
|
||||||
|
size="small"
|
||||||
type="datetime-local"
|
type="datetime-local"
|
||||||
value={value ?? ""}
|
value={value ?? ""}
|
||||||
onChange={(e) => onChange((e.target as HTMLInputElement).value)}
|
onChange={(e) => onChange((e.target as HTMLInputElement).value)}
|
||||||
@@ -193,6 +194,7 @@ export function RunAgentInputs({
|
|||||||
label={schema.title ?? placeholder ?? "Select"}
|
label={schema.title ?? placeholder ?? "Select"}
|
||||||
hideLabel
|
hideLabel
|
||||||
value={value ?? ""}
|
value={value ?? ""}
|
||||||
|
size="small"
|
||||||
onValueChange={(val: string) => onChange(val)}
|
onValueChange={(val: string) => onChange(val)}
|
||||||
placeholder={placeholder || "Select an option"}
|
placeholder={placeholder || "Select an option"}
|
||||||
options={schema.enum
|
options={schema.enum
|
||||||
@@ -215,6 +217,7 @@ export function RunAgentInputs({
|
|||||||
items={allKeys.map((key) => ({
|
items={allKeys.map((key) => ({
|
||||||
value: key,
|
value: key,
|
||||||
label: _schema.properties[key]?.title ?? key,
|
label: _schema.properties[key]?.title ?? key,
|
||||||
|
size: "small",
|
||||||
}))}
|
}))}
|
||||||
selectedValues={selectedValues}
|
selectedValues={selectedValues}
|
||||||
onChange={(values: string[]) =>
|
onChange={(values: string[]) =>
|
||||||
@@ -333,6 +336,7 @@ export function RunAgentInputs({
|
|||||||
id={`${baseId}-text`}
|
id={`${baseId}-text`}
|
||||||
label={schema.title ?? placeholder ?? "Text"}
|
label={schema.title ?? placeholder ?? "Text"}
|
||||||
hideLabel
|
hideLabel
|
||||||
|
size="small"
|
||||||
type="text"
|
type="text"
|
||||||
value={value ?? ""}
|
value={value ?? ""}
|
||||||
onChange={(e) => onChange((e.target as HTMLInputElement).value)}
|
onChange={(e) => onChange((e.target as HTMLInputElement).value)}
|
||||||
@@ -343,17 +347,6 @@ export function RunAgentInputs({
|
|||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="flex w-full flex-col gap-0 space-y-2">
|
<div className="no-drag relative flex w-full">{innerInputElement}</div>
|
||||||
<label className="large-medium flex items-center gap-1 font-medium">
|
|
||||||
{schema.title || placeholder}
|
|
||||||
<InformationTooltip description={schema.description} />
|
|
||||||
</label>
|
|
||||||
<div
|
|
||||||
className="no-drag relative flex w-full"
|
|
||||||
style={readOnly ? { pointerEvents: "none", opacity: 0.7 } : undefined}
|
|
||||||
>
|
|
||||||
{innerInputElement}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -73,15 +73,22 @@ export function ModalRunSection() {
|
|||||||
title="Task Inputs"
|
title="Task Inputs"
|
||||||
subtitle="Enter the information you want to provide to the agent for this task"
|
subtitle="Enter the information you want to provide to the agent for this task"
|
||||||
>
|
>
|
||||||
|
{/* Regular inputs */}
|
||||||
{inputFields.map(([key, inputSubSchema]) => (
|
{inputFields.map(([key, inputSubSchema]) => (
|
||||||
<RunAgentInputs
|
<div key={key} className="flex w-full flex-col gap-0 space-y-2">
|
||||||
key={key}
|
<label className="flex items-center gap-1 text-sm font-medium">
|
||||||
schema={inputSubSchema}
|
{inputSubSchema.title || key}
|
||||||
value={inputValues[key] ?? inputSubSchema.default}
|
<InformationTooltip description={inputSubSchema.description} />
|
||||||
placeholder={inputSubSchema.description}
|
</label>
|
||||||
onChange={(value) => setInputValue(key, value)}
|
|
||||||
data-testid={`agent-input-${key}`}
|
<RunAgentInputs
|
||||||
/>
|
schema={inputSubSchema}
|
||||||
|
value={inputValues[key] ?? inputSubSchema.default}
|
||||||
|
placeholder={inputSubSchema.description}
|
||||||
|
onChange={(value) => setInputValue(key, value)}
|
||||||
|
data-testid={`agent-input-${key}`}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
))}
|
))}
|
||||||
</ModalSection>
|
</ModalSection>
|
||||||
) : null}
|
) : null}
|
||||||
|
|||||||
@@ -1,29 +0,0 @@
|
|||||||
import { Button } from "@/components/atoms/Button/Button";
|
|
||||||
import { GearIcon } from "@phosphor-icons/react";
|
|
||||||
import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
|
|
||||||
import { useAgentSafeMode } from "@/hooks/useAgentSafeMode";
|
|
||||||
|
|
||||||
interface Props {
|
|
||||||
agent: LibraryAgent;
|
|
||||||
onSelectSettings: () => void;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function AgentSettingsButton({ agent, onSelectSettings }: Props) {
|
|
||||||
const { hasHITLBlocks } = useAgentSafeMode(agent);
|
|
||||||
|
|
||||||
if (!hasHITLBlocks) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
|
||||||
<Button
|
|
||||||
variant="ghost"
|
|
||||||
size="small"
|
|
||||||
className="m-0 min-w-0 rounded-full p-0 px-1"
|
|
||||||
onClick={onSelectSettings}
|
|
||||||
aria-label="Agent Settings"
|
|
||||||
>
|
|
||||||
<GearIcon size={18} className="text-zinc-600" />
|
|
||||||
</Button>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -0,0 +1,14 @@
|
|||||||
|
import { cn } from "@/lib/utils";
|
||||||
|
import { AGENT_LIBRARY_SECTION_PADDING_X } from "../../helpers";
|
||||||
|
|
||||||
|
type Props = {
|
||||||
|
children: React.ReactNode;
|
||||||
|
};
|
||||||
|
|
||||||
|
export function AnchorLinksWrap({ children }: Props) {
|
||||||
|
return (
|
||||||
|
<div className={cn(AGENT_LIBRARY_SECTION_PADDING_X, "hidden lg:block")}>
|
||||||
|
<nav className="flex gap-8 px-3 pb-1">{children}</nav>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
@@ -1,22 +1,16 @@
|
|||||||
import { Skeleton } from "@/components/__legacy__/ui/skeleton";
|
import { Skeleton } from "@/components/__legacy__/ui/skeleton";
|
||||||
import { cn } from "@/lib/utils";
|
import { cn } from "@/lib/utils";
|
||||||
import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
|
|
||||||
import { AGENT_LIBRARY_SECTION_PADDING_X } from "../../helpers";
|
import { AGENT_LIBRARY_SECTION_PADDING_X } from "../../helpers";
|
||||||
import { SelectedViewLayout } from "./SelectedViewLayout";
|
import { SelectedViewLayout } from "./SelectedViewLayout";
|
||||||
|
|
||||||
interface Props {
|
interface Props {
|
||||||
agent: LibraryAgent;
|
agentName: string;
|
||||||
onSelectSettings?: () => void;
|
agentId: string;
|
||||||
selectedSettings?: boolean;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function LoadingSelectedContent(props: Props) {
|
export function LoadingSelectedContent(props: Props) {
|
||||||
return (
|
return (
|
||||||
<SelectedViewLayout
|
<SelectedViewLayout agentName={props.agentName} agentId={props.agentId}>
|
||||||
agent={props.agent}
|
|
||||||
onSelectSettings={props.onSelectSettings}
|
|
||||||
selectedSettings={props.selectedSettings}
|
|
||||||
>
|
|
||||||
<div
|
<div
|
||||||
className={cn("flex flex-col gap-4", AGENT_LIBRARY_SECTION_PADDING_X)}
|
className={cn("flex flex-col gap-4", AGENT_LIBRARY_SECTION_PADDING_X)}
|
||||||
>
|
>
|
||||||
|
|||||||
@@ -4,19 +4,20 @@ import { AgentExecutionStatus } from "@/app/api/__generated__/models/agentExecut
|
|||||||
import type { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
|
import type { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
|
||||||
import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner";
|
import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner";
|
||||||
import { Text } from "@/components/atoms/Text/Text";
|
import { Text } from "@/components/atoms/Text/Text";
|
||||||
import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard";
|
|
||||||
import { InformationTooltip } from "@/components/molecules/InformationTooltip/InformationTooltip";
|
|
||||||
import {
|
import {
|
||||||
ScrollableTabs,
|
Tooltip,
|
||||||
ScrollableTabsContent,
|
TooltipContent,
|
||||||
ScrollableTabsList,
|
TooltipProvider,
|
||||||
ScrollableTabsTrigger,
|
TooltipTrigger,
|
||||||
} from "@/components/molecules/ScrollableTabs/ScrollableTabs";
|
} from "@/components/atoms/Tooltip/BaseTooltip";
|
||||||
|
import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard";
|
||||||
import { PendingReviewsList } from "@/components/organisms/PendingReviewsList/PendingReviewsList";
|
import { PendingReviewsList } from "@/components/organisms/PendingReviewsList/PendingReviewsList";
|
||||||
import { usePendingReviewsForExecution } from "@/hooks/usePendingReviews";
|
import { usePendingReviewsForExecution } from "@/hooks/usePendingReviews";
|
||||||
import { isLargeScreen, useBreakpoint } from "@/lib/hooks/useBreakpoint";
|
import { isLargeScreen, useBreakpoint } from "@/lib/hooks/useBreakpoint";
|
||||||
|
import { InfoIcon } from "@phosphor-icons/react";
|
||||||
import { useEffect } from "react";
|
import { useEffect } from "react";
|
||||||
import { AgentInputsReadOnly } from "../../modals/AgentInputsReadOnly/AgentInputsReadOnly";
|
import { AgentInputsReadOnly } from "../../modals/AgentInputsReadOnly/AgentInputsReadOnly";
|
||||||
|
import { AnchorLinksWrap } from "../AnchorLinksWrap";
|
||||||
import { LoadingSelectedContent } from "../LoadingSelectedContent";
|
import { LoadingSelectedContent } from "../LoadingSelectedContent";
|
||||||
import { RunDetailCard } from "../RunDetailCard/RunDetailCard";
|
import { RunDetailCard } from "../RunDetailCard/RunDetailCard";
|
||||||
import { RunDetailHeader } from "../RunDetailHeader/RunDetailHeader";
|
import { RunDetailHeader } from "../RunDetailHeader/RunDetailHeader";
|
||||||
@@ -27,13 +28,14 @@ import { SelectedRunActions } from "./components/SelectedRunActions/SelectedRunA
|
|||||||
import { WebhookTriggerSection } from "./components/WebhookTriggerSection";
|
import { WebhookTriggerSection } from "./components/WebhookTriggerSection";
|
||||||
import { useSelectedRunView } from "./useSelectedRunView";
|
import { useSelectedRunView } from "./useSelectedRunView";
|
||||||
|
|
||||||
|
const anchorStyles =
|
||||||
|
"border-b-2 border-transparent pb-1 text-sm font-medium text-slate-600 transition-colors hover:text-slate-900 hover:border-slate-900";
|
||||||
|
|
||||||
interface Props {
|
interface Props {
|
||||||
agent: LibraryAgent;
|
agent: LibraryAgent;
|
||||||
runId: string;
|
runId: string;
|
||||||
onSelectRun?: (id: string) => void;
|
onSelectRun?: (id: string) => void;
|
||||||
onClearSelectedRun?: () => void;
|
onClearSelectedRun?: () => void;
|
||||||
onSelectSettings?: () => void;
|
|
||||||
selectedSettings?: boolean;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function SelectedRunView({
|
export function SelectedRunView({
|
||||||
@@ -41,7 +43,6 @@ export function SelectedRunView({
|
|||||||
runId,
|
runId,
|
||||||
onSelectRun,
|
onSelectRun,
|
||||||
onClearSelectedRun,
|
onClearSelectedRun,
|
||||||
onSelectSettings,
|
|
||||||
}: Props) {
|
}: Props) {
|
||||||
const { run, preset, isLoading, responseError, httpError } =
|
const { run, preset, isLoading, responseError, httpError } =
|
||||||
useSelectedRunView(agent.graph_id, runId);
|
useSelectedRunView(agent.graph_id, runId);
|
||||||
@@ -64,6 +65,13 @@ export function SelectedRunView({
|
|||||||
const withSummary = run?.stats?.activity_status;
|
const withSummary = run?.stats?.activity_status;
|
||||||
const withReviews = run?.status === AgentExecutionStatus.REVIEW;
|
const withReviews = run?.status === AgentExecutionStatus.REVIEW;
|
||||||
|
|
||||||
|
function scrollToSection(id: string) {
|
||||||
|
const element = document.getElementById(id);
|
||||||
|
if (element) {
|
||||||
|
element.scrollIntoView({ behavior: "smooth", block: "start" });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (responseError || httpError) {
|
if (responseError || httpError) {
|
||||||
return (
|
return (
|
||||||
<ErrorCard
|
<ErrorCard
|
||||||
@@ -75,13 +83,13 @@ export function SelectedRunView({
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (isLoading && !run) {
|
if (isLoading && !run) {
|
||||||
return <LoadingSelectedContent agent={agent} />;
|
return <LoadingSelectedContent agentName={agent.name} agentId={agent.id} />;
|
||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="flex h-full w-full gap-4">
|
<div className="flex h-full w-full gap-4">
|
||||||
<div className="flex min-h-0 min-w-0 flex-1 flex-col">
|
<div className="flex min-h-0 min-w-0 flex-1 flex-col">
|
||||||
<SelectedViewLayout agent={agent} onSelectSettings={onSelectSettings}>
|
<SelectedViewLayout agentName={agent.name} agentId={agent.id}>
|
||||||
<div className="flex flex-col gap-4">
|
<div className="flex flex-col gap-4">
|
||||||
<RunDetailHeader agent={agent} run={run} />
|
<RunDetailHeader agent={agent} run={run} />
|
||||||
|
|
||||||
@@ -104,114 +112,118 @@ export function SelectedRunView({
|
|||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
<ScrollableTabs
|
{/* Navigation Links */}
|
||||||
defaultValue="output"
|
<AnchorLinksWrap>
|
||||||
className="-mt-2 flex flex-col"
|
{withSummary && (
|
||||||
>
|
<button
|
||||||
<ScrollableTabsList className="px-4">
|
onClick={() => scrollToSection("summary")}
|
||||||
{withReviews && (
|
className={anchorStyles}
|
||||||
<ScrollableTabsTrigger value="reviews">
|
>
|
||||||
Reviews ({pendingReviews.length})
|
Summary
|
||||||
</ScrollableTabsTrigger>
|
</button>
|
||||||
)}
|
)}
|
||||||
{withSummary && (
|
<button
|
||||||
<ScrollableTabsTrigger value="summary">
|
onClick={() => scrollToSection("output")}
|
||||||
Summary
|
className={anchorStyles}
|
||||||
</ScrollableTabsTrigger>
|
>
|
||||||
)}
|
Output
|
||||||
<ScrollableTabsTrigger value="output">
|
</button>
|
||||||
Output
|
<button
|
||||||
</ScrollableTabsTrigger>
|
onClick={() => scrollToSection("input")}
|
||||||
<ScrollableTabsTrigger value="input">
|
className={anchorStyles}
|
||||||
Your input
|
>
|
||||||
</ScrollableTabsTrigger>
|
Your input
|
||||||
</ScrollableTabsList>
|
</button>
|
||||||
<div className="my-6 flex flex-col gap-6">
|
{withReviews && (
|
||||||
{/* Human-in-the-Loop Reviews Section */}
|
<button
|
||||||
{withReviews && (
|
onClick={() => scrollToSection("reviews")}
|
||||||
<ScrollableTabsContent value="reviews">
|
className={anchorStyles}
|
||||||
<div id="reviews" className="scroll-mt-4 px-4">
|
>
|
||||||
{reviewsLoading ? (
|
Reviews ({pendingReviews.length})
|
||||||
<LoadingSpinner size="small" />
|
</button>
|
||||||
) : pendingReviews.length > 0 ? (
|
)}
|
||||||
<PendingReviewsList
|
</AnchorLinksWrap>
|
||||||
reviews={pendingReviews}
|
|
||||||
onReviewComplete={refetchReviews}
|
|
||||||
emptyMessage="No pending reviews for this execution"
|
|
||||||
/>
|
|
||||||
) : (
|
|
||||||
<Text variant="body" className="text-zinc-600">
|
|
||||||
No pending reviews for this execution
|
|
||||||
</Text>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</ScrollableTabsContent>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{/* Summary Section */}
|
{/* Summary Section */}
|
||||||
{withSummary && (
|
{withSummary && (
|
||||||
<ScrollableTabsContent value="summary">
|
<div id="summary" className="scroll-mt-4">
|
||||||
<div className="scroll-mt-4">
|
<RunDetailCard
|
||||||
<RunDetailCard
|
title={
|
||||||
title={
|
<div className="flex items-center gap-2">
|
||||||
<div className="flex items-center gap-1">
|
<Text variant="lead-semibold">Summary</Text>
|
||||||
<Text variant="lead-semibold">Summary</Text>
|
<TooltipProvider>
|
||||||
<InformationTooltip
|
<Tooltip>
|
||||||
iconSize={20}
|
<TooltipTrigger asChild>
|
||||||
description="This AI-generated summary describes how the agent handled your task. It's an experimental feature and may occasionally be inaccurate."
|
<InfoIcon
|
||||||
|
size={16}
|
||||||
|
className="cursor-help text-neutral-500 hover:text-neutral-700"
|
||||||
/>
|
/>
|
||||||
</div>
|
</TooltipTrigger>
|
||||||
}
|
<TooltipContent>
|
||||||
>
|
<p className="max-w-xs">
|
||||||
<RunSummary run={run} />
|
This AI-generated summary describes how the agent
|
||||||
</RunDetailCard>
|
handled your task. It's an experimental
|
||||||
|
feature and may occasionally be inaccurate.
|
||||||
|
</p>
|
||||||
|
</TooltipContent>
|
||||||
|
</Tooltip>
|
||||||
|
</TooltipProvider>
|
||||||
</div>
|
</div>
|
||||||
</ScrollableTabsContent>
|
}
|
||||||
)}
|
>
|
||||||
|
<RunSummary run={run} />
|
||||||
{/* Output Section */}
|
</RunDetailCard>
|
||||||
<ScrollableTabsContent value="output">
|
|
||||||
<div className="scroll-mt-4">
|
|
||||||
<RunDetailCard title="Output">
|
|
||||||
{isLoading ? (
|
|
||||||
<div className="text-neutral-500">
|
|
||||||
<LoadingSpinner />
|
|
||||||
</div>
|
|
||||||
) : run && "outputs" in run ? (
|
|
||||||
<RunOutputs outputs={run.outputs as any} />
|
|
||||||
) : (
|
|
||||||
<Text variant="body" className="text-neutral-600">
|
|
||||||
No output from this run.
|
|
||||||
</Text>
|
|
||||||
)}
|
|
||||||
</RunDetailCard>
|
|
||||||
</div>
|
|
||||||
</ScrollableTabsContent>
|
|
||||||
|
|
||||||
{/* Input Section */}
|
|
||||||
<ScrollableTabsContent value="input">
|
|
||||||
<div id="input" className="scroll-mt-4">
|
|
||||||
<RunDetailCard
|
|
||||||
title={
|
|
||||||
<div className="flex items-center gap-1">
|
|
||||||
<Text variant="lead-semibold">Your input</Text>
|
|
||||||
<InformationTooltip
|
|
||||||
iconSize={20}
|
|
||||||
description="This is the input that was provided to the agent for running this task."
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
}
|
|
||||||
>
|
|
||||||
<AgentInputsReadOnly
|
|
||||||
agent={agent}
|
|
||||||
inputs={run?.inputs}
|
|
||||||
credentialInputs={run?.credential_inputs}
|
|
||||||
/>
|
|
||||||
</RunDetailCard>
|
|
||||||
</div>
|
|
||||||
</ScrollableTabsContent>
|
|
||||||
</div>
|
</div>
|
||||||
</ScrollableTabs>
|
)}
|
||||||
|
|
||||||
|
{/* Output Section */}
|
||||||
|
<div id="output" className="scroll-mt-4">
|
||||||
|
<RunDetailCard title="Output">
|
||||||
|
{isLoading ? (
|
||||||
|
<div className="text-neutral-500">
|
||||||
|
<LoadingSpinner />
|
||||||
|
</div>
|
||||||
|
) : run && "outputs" in run ? (
|
||||||
|
<RunOutputs outputs={run.outputs as any} />
|
||||||
|
) : (
|
||||||
|
<Text variant="body" className="text-neutral-600">
|
||||||
|
No output from this run.
|
||||||
|
</Text>
|
||||||
|
)}
|
||||||
|
</RunDetailCard>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Input Section */}
|
||||||
|
<div id="input" className="scroll-mt-4">
|
||||||
|
<RunDetailCard title="Your input">
|
||||||
|
<AgentInputsReadOnly
|
||||||
|
agent={agent}
|
||||||
|
inputs={run?.inputs}
|
||||||
|
credentialInputs={run?.credential_inputs}
|
||||||
|
/>
|
||||||
|
</RunDetailCard>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Reviews Section */}
|
||||||
|
{withReviews && (
|
||||||
|
<div id="reviews" className="scroll-mt-4">
|
||||||
|
<RunDetailCard>
|
||||||
|
{reviewsLoading ? (
|
||||||
|
<div className="text-neutral-500">Loading reviews…</div>
|
||||||
|
) : pendingReviews.length > 0 ? (
|
||||||
|
<PendingReviewsList
|
||||||
|
reviews={pendingReviews}
|
||||||
|
onReviewComplete={refetchReviews}
|
||||||
|
emptyMessage="No pending reviews for this execution"
|
||||||
|
/>
|
||||||
|
) : (
|
||||||
|
<div className="text-neutral-600">
|
||||||
|
No pending reviews for this execution
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</RunDetailCard>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
</SelectedViewLayout>
|
</SelectedViewLayout>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -2,10 +2,10 @@ import { AgentExecutionStatus } from "@/app/api/__generated__/models/agentExecut
|
|||||||
import {
|
import {
|
||||||
CheckCircleIcon,
|
CheckCircleIcon,
|
||||||
ClockIcon,
|
ClockIcon,
|
||||||
|
EyeIcon,
|
||||||
PauseCircleIcon,
|
PauseCircleIcon,
|
||||||
StopCircleIcon,
|
StopCircleIcon,
|
||||||
WarningCircleIcon,
|
WarningCircleIcon,
|
||||||
WarningIcon,
|
|
||||||
XCircleIcon,
|
XCircleIcon,
|
||||||
} from "@phosphor-icons/react";
|
} from "@phosphor-icons/react";
|
||||||
import { Text } from "@/components/atoms/Text/Text";
|
import { Text } from "@/components/atoms/Text/Text";
|
||||||
@@ -38,9 +38,9 @@ const statusIconMap: Record<AgentExecutionStatus, StatusIconMap> = {
|
|||||||
textColor: "!text-yellow-700",
|
textColor: "!text-yellow-700",
|
||||||
},
|
},
|
||||||
REVIEW: {
|
REVIEW: {
|
||||||
icon: <WarningIcon size={16} className="text-yellow-700" weight="bold" />,
|
icon: <EyeIcon size={16} className="text-orange-700" weight="bold" />,
|
||||||
bgColor: "bg-yellow-50",
|
bgColor: "bg-orange-50",
|
||||||
textColor: "!text-yellow-700",
|
textColor: "!text-orange-700",
|
||||||
},
|
},
|
||||||
COMPLETED: {
|
COMPLETED: {
|
||||||
icon: (
|
icon: (
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ export function RunSummary({ run }: Props) {
|
|||||||
</p>
|
</p>
|
||||||
|
|
||||||
{typeof correctnessScore === "number" && (
|
{typeof correctnessScore === "number" && (
|
||||||
<div className="flex items-center gap-3">
|
<div className="flex items-center gap-3 rounded-lg bg-neutral-50 p-3">
|
||||||
<div className="flex items-center gap-2">
|
<div className="flex items-center gap-2">
|
||||||
<span className="text-sm font-medium text-neutral-600">
|
<span className="text-sm font-medium text-neutral-600">
|
||||||
Success Estimate:
|
Success Estimate:
|
||||||
|
|||||||
@@ -1,52 +0,0 @@
|
|||||||
import { GraphModel } from "@/app/api/__generated__/models/graphModel";
|
|
||||||
import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
|
|
||||||
import { Button } from "@/components/atoms/Button/Button";
|
|
||||||
import { Graph } from "@/lib/autogpt-server-api/types";
|
|
||||||
import { cn } from "@/lib/utils";
|
|
||||||
import { ShieldCheckIcon, ShieldIcon } from "@phosphor-icons/react";
|
|
||||||
import { useAgentSafeMode } from "@/hooks/useAgentSafeMode";
|
|
||||||
|
|
||||||
interface Props {
|
|
||||||
graph: GraphModel | LibraryAgent | Graph;
|
|
||||||
className?: string;
|
|
||||||
fullWidth?: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function SafeModeToggle({ graph }: Props) {
|
|
||||||
const {
|
|
||||||
currentSafeMode,
|
|
||||||
isPending,
|
|
||||||
shouldShowToggle,
|
|
||||||
isStateUndetermined,
|
|
||||||
handleToggle,
|
|
||||||
} = useAgentSafeMode(graph);
|
|
||||||
|
|
||||||
if (!shouldShowToggle || isStateUndetermined) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
|
||||||
<Button
|
|
||||||
variant="icon"
|
|
||||||
key={graph.id}
|
|
||||||
size="icon"
|
|
||||||
aria-label={
|
|
||||||
currentSafeMode!
|
|
||||||
? "Safe Mode: ON. Human in the loop blocks require manual review"
|
|
||||||
: "Safe Mode: OFF. Human in the loop blocks proceed automatically"
|
|
||||||
}
|
|
||||||
onClick={handleToggle}
|
|
||||||
className={cn(isPending ? "opacity-0" : "opacity-100")}
|
|
||||||
>
|
|
||||||
{currentSafeMode! ? (
|
|
||||||
<>
|
|
||||||
<ShieldCheckIcon weight="bold" size={16} />
|
|
||||||
</>
|
|
||||||
) : (
|
|
||||||
<>
|
|
||||||
<ShieldIcon weight="bold" size={16} />
|
|
||||||
</>
|
|
||||||
)}
|
|
||||||
</Button>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -2,6 +2,7 @@ import { GraphExecution } from "@/app/api/__generated__/models/graphExecution";
|
|||||||
import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
|
import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
|
||||||
import { Button } from "@/components/atoms/Button/Button";
|
import { Button } from "@/components/atoms/Button/Button";
|
||||||
import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner";
|
import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner";
|
||||||
|
import { FloatingSafeModeToggle } from "@/components/molecules/FloatingSafeModeToggle/FloatingSafeModeToggle";
|
||||||
import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag";
|
import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag";
|
||||||
import {
|
import {
|
||||||
ArrowBendLeftUpIcon,
|
ArrowBendLeftUpIcon,
|
||||||
@@ -15,7 +16,6 @@ import { SelectedActionsWrap } from "../../../SelectedActionsWrap";
|
|||||||
import { ShareRunButton } from "../../../ShareRunButton/ShareRunButton";
|
import { ShareRunButton } from "../../../ShareRunButton/ShareRunButton";
|
||||||
import { CreateTemplateModal } from "../CreateTemplateModal/CreateTemplateModal";
|
import { CreateTemplateModal } from "../CreateTemplateModal/CreateTemplateModal";
|
||||||
import { useSelectedRunActions } from "./useSelectedRunActions";
|
import { useSelectedRunActions } from "./useSelectedRunActions";
|
||||||
import { SafeModeToggle } from "../SafeModeToggle";
|
|
||||||
|
|
||||||
type Props = {
|
type Props = {
|
||||||
agent: LibraryAgent;
|
agent: LibraryAgent;
|
||||||
@@ -113,7 +113,7 @@ export function SelectedRunActions({
|
|||||||
shareToken={run.share_token}
|
shareToken={run.share_token}
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
<SafeModeToggle graph={agent} fullWidth={false} />
|
<FloatingSafeModeToggle graph={agent} variant="white" fullWidth={false} />
|
||||||
{canRunManually && (
|
{canRunManually && (
|
||||||
<>
|
<>
|
||||||
<Button
|
<Button
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ import { humanizeCronExpression } from "@/lib/cron-expression-utils";
|
|||||||
import { isLargeScreen, useBreakpoint } from "@/lib/hooks/useBreakpoint";
|
import { isLargeScreen, useBreakpoint } from "@/lib/hooks/useBreakpoint";
|
||||||
import { formatInTimezone, getTimezoneDisplayName } from "@/lib/timezone-utils";
|
import { formatInTimezone, getTimezoneDisplayName } from "@/lib/timezone-utils";
|
||||||
import { AgentInputsReadOnly } from "../../modals/AgentInputsReadOnly/AgentInputsReadOnly";
|
import { AgentInputsReadOnly } from "../../modals/AgentInputsReadOnly/AgentInputsReadOnly";
|
||||||
|
import { AnchorLinksWrap } from "../AnchorLinksWrap";
|
||||||
import { LoadingSelectedContent } from "../LoadingSelectedContent";
|
import { LoadingSelectedContent } from "../LoadingSelectedContent";
|
||||||
import { RunDetailCard } from "../RunDetailCard/RunDetailCard";
|
import { RunDetailCard } from "../RunDetailCard/RunDetailCard";
|
||||||
import { RunDetailHeader } from "../RunDetailHeader/RunDetailHeader";
|
import { RunDetailHeader } from "../RunDetailHeader/RunDetailHeader";
|
||||||
@@ -16,20 +17,19 @@ import { SelectedViewLayout } from "../SelectedViewLayout";
|
|||||||
import { SelectedScheduleActions } from "./components/SelectedScheduleActions";
|
import { SelectedScheduleActions } from "./components/SelectedScheduleActions";
|
||||||
import { useSelectedScheduleView } from "./useSelectedScheduleView";
|
import { useSelectedScheduleView } from "./useSelectedScheduleView";
|
||||||
|
|
||||||
|
const anchorStyles =
|
||||||
|
"border-b-2 border-transparent pb-1 text-sm font-medium text-slate-600 transition-colors hover:text-slate-900 hover:border-slate-900";
|
||||||
|
|
||||||
interface Props {
|
interface Props {
|
||||||
agent: LibraryAgent;
|
agent: LibraryAgent;
|
||||||
scheduleId: string;
|
scheduleId: string;
|
||||||
onClearSelectedRun?: () => void;
|
onClearSelectedRun?: () => void;
|
||||||
onSelectSettings?: () => void;
|
|
||||||
selectedSettings?: boolean;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function SelectedScheduleView({
|
export function SelectedScheduleView({
|
||||||
agent,
|
agent,
|
||||||
scheduleId,
|
scheduleId,
|
||||||
onClearSelectedRun,
|
onClearSelectedRun,
|
||||||
onSelectSettings,
|
|
||||||
selectedSettings,
|
|
||||||
}: Props) {
|
}: Props) {
|
||||||
const { schedule, isLoading, error } = useSelectedScheduleView(
|
const { schedule, isLoading, error } = useSelectedScheduleView(
|
||||||
agent.graph_id,
|
agent.graph_id,
|
||||||
@@ -45,6 +45,13 @@ export function SelectedScheduleView({
|
|||||||
const breakpoint = useBreakpoint();
|
const breakpoint = useBreakpoint();
|
||||||
const isLgScreenUp = isLargeScreen(breakpoint);
|
const isLgScreenUp = isLargeScreen(breakpoint);
|
||||||
|
|
||||||
|
function scrollToSection(id: string) {
|
||||||
|
const element = document.getElementById(id);
|
||||||
|
if (element) {
|
||||||
|
element.scrollIntoView({ behavior: "smooth", block: "start" });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (error) {
|
if (error) {
|
||||||
return (
|
return (
|
||||||
<ErrorCard
|
<ErrorCard
|
||||||
@@ -72,17 +79,13 @@ export function SelectedScheduleView({
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (isLoading && !schedule) {
|
if (isLoading && !schedule) {
|
||||||
return <LoadingSelectedContent agent={agent} />;
|
return <LoadingSelectedContent agentName={agent.name} agentId={agent.id} />;
|
||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="flex h-full w-full gap-4">
|
<div className="flex h-full w-full gap-4">
|
||||||
<div className="flex min-h-0 min-w-0 flex-1 flex-col">
|
<div className="flex min-h-0 min-w-0 flex-1 flex-col">
|
||||||
<SelectedViewLayout
|
<SelectedViewLayout agentName={agent.name} agentId={agent.id}>
|
||||||
agent={agent}
|
|
||||||
onSelectSettings={onSelectSettings}
|
|
||||||
selectedSettings={selectedSettings}
|
|
||||||
>
|
|
||||||
<div className="flex flex-col gap-4">
|
<div className="flex flex-col gap-4">
|
||||||
<div className="flex w-full flex-col gap-0">
|
<div className="flex w-full flex-col gap-0">
|
||||||
<RunDetailHeader
|
<RunDetailHeader
|
||||||
@@ -105,6 +108,22 @@ export function SelectedScheduleView({
|
|||||||
) : null}
|
) : null}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
{/* Navigation Links */}
|
||||||
|
<AnchorLinksWrap>
|
||||||
|
<button
|
||||||
|
onClick={() => scrollToSection("schedule")}
|
||||||
|
className={anchorStyles}
|
||||||
|
>
|
||||||
|
Schedule
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
onClick={() => scrollToSection("input")}
|
||||||
|
className={anchorStyles}
|
||||||
|
>
|
||||||
|
Your input
|
||||||
|
</button>
|
||||||
|
</AnchorLinksWrap>
|
||||||
|
|
||||||
{/* Schedule Section */}
|
{/* Schedule Section */}
|
||||||
<div id="schedule" className="scroll-mt-4">
|
<div id="schedule" className="scroll-mt-4">
|
||||||
<RunDetailCard title="Schedule">
|
<RunDetailCard title="Schedule">
|
||||||
|
|||||||
@@ -0,0 +1,84 @@
|
|||||||
|
"use client";
|
||||||
|
|
||||||
|
import type { GraphExecutionJobInfo } from "@/app/api/__generated__/models/graphExecutionJobInfo";
|
||||||
|
import type { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
|
||||||
|
import { Button } from "@/components/atoms/Button/Button";
|
||||||
|
import { Text } from "@/components/atoms/Text/Text";
|
||||||
|
import { Dialog } from "@/components/molecules/Dialog/Dialog";
|
||||||
|
import { PencilSimpleIcon } from "@phosphor-icons/react";
|
||||||
|
import { RunAgentInputs } from "../../../../modals/RunAgentInputs/RunAgentInputs";
|
||||||
|
import { useEditInputsModal } from "./useEditInputsModal";
|
||||||
|
|
||||||
|
type Props = {
|
||||||
|
agent: LibraryAgent;
|
||||||
|
schedule: GraphExecutionJobInfo;
|
||||||
|
};
|
||||||
|
|
||||||
|
export function EditInputsModal({ agent, schedule }: Props) {
|
||||||
|
const {
|
||||||
|
isOpen,
|
||||||
|
setIsOpen,
|
||||||
|
inputFields,
|
||||||
|
values,
|
||||||
|
setValues,
|
||||||
|
handleSave,
|
||||||
|
isSaving,
|
||||||
|
} = useEditInputsModal(agent, schedule);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Dialog
|
||||||
|
controlled={{ isOpen, set: setIsOpen }}
|
||||||
|
styling={{ maxWidth: "32rem" }}
|
||||||
|
>
|
||||||
|
<Dialog.Trigger>
|
||||||
|
<Button
|
||||||
|
variant="ghost"
|
||||||
|
size="small"
|
||||||
|
className="absolute -right-2 -top-2"
|
||||||
|
>
|
||||||
|
<PencilSimpleIcon className="size-4" /> Edit inputs
|
||||||
|
</Button>
|
||||||
|
</Dialog.Trigger>
|
||||||
|
<Dialog.Content>
|
||||||
|
<div className="flex flex-col gap-4">
|
||||||
|
<Text variant="h3">Edit inputs</Text>
|
||||||
|
<div className="flex flex-col gap-4">
|
||||||
|
{Object.entries(inputFields).map(([key, fieldSchema]) => (
|
||||||
|
<div key={key} className="flex flex-col gap-1.5">
|
||||||
|
<label className="text-sm font-medium">
|
||||||
|
{fieldSchema?.title || key}
|
||||||
|
</label>
|
||||||
|
<RunAgentInputs
|
||||||
|
schema={fieldSchema as any}
|
||||||
|
value={values[key]}
|
||||||
|
onChange={(v) => setValues((prev) => ({ ...prev, [key]: v }))}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<Dialog.Footer>
|
||||||
|
<div className="flex w-full justify-end gap-2">
|
||||||
|
<Button
|
||||||
|
variant="secondary"
|
||||||
|
size="small"
|
||||||
|
onClick={() => setIsOpen(false)}
|
||||||
|
className="min-w-32"
|
||||||
|
>
|
||||||
|
Cancel
|
||||||
|
</Button>
|
||||||
|
<Button
|
||||||
|
variant="primary"
|
||||||
|
size="small"
|
||||||
|
onClick={handleSave}
|
||||||
|
loading={isSaving}
|
||||||
|
className="min-w-32"
|
||||||
|
>
|
||||||
|
{isSaving ? "Saving…" : "Save"}
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
</Dialog.Footer>
|
||||||
|
</Dialog.Content>
|
||||||
|
</Dialog>
|
||||||
|
);
|
||||||
|
}
|
||||||
@@ -0,0 +1,78 @@
|
|||||||
|
"use client";
|
||||||
|
|
||||||
|
import { useMemo, useState } from "react";
|
||||||
|
import { useQueryClient } from "@tanstack/react-query";
|
||||||
|
import { getGetV1ListExecutionSchedulesForAGraphQueryKey } from "@/app/api/__generated__/endpoints/schedules/schedules";
|
||||||
|
import type { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
|
||||||
|
import type { GraphExecutionJobInfo } from "@/app/api/__generated__/models/graphExecutionJobInfo";
|
||||||
|
import { useToast } from "@/components/molecules/Toast/use-toast";
|
||||||
|
|
||||||
|
function getAgentInputFields(agent: LibraryAgent): Record<string, any> {
|
||||||
|
const schema = agent.input_schema as unknown as {
|
||||||
|
properties?: Record<string, any>;
|
||||||
|
} | null;
|
||||||
|
if (!schema || !schema.properties) return {};
|
||||||
|
const properties = schema.properties as Record<string, any>;
|
||||||
|
const visibleEntries = Object.entries(properties).filter(
|
||||||
|
([, sub]) => !sub?.hidden,
|
||||||
|
);
|
||||||
|
return Object.fromEntries(visibleEntries);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function useEditInputsModal(
|
||||||
|
agent: LibraryAgent,
|
||||||
|
schedule: GraphExecutionJobInfo,
|
||||||
|
) {
|
||||||
|
const queryClient = useQueryClient();
|
||||||
|
const { toast } = useToast();
|
||||||
|
const [isOpen, setIsOpen] = useState(false);
|
||||||
|
const [isSaving, setIsSaving] = useState(false);
|
||||||
|
const inputFields = useMemo(() => getAgentInputFields(agent), [agent]);
|
||||||
|
const [values, setValues] = useState<Record<string, any>>({
|
||||||
|
...(schedule.input_data as Record<string, any>),
|
||||||
|
});
|
||||||
|
|
||||||
|
async function handleSave() {
|
||||||
|
setIsSaving(true);
|
||||||
|
try {
|
||||||
|
const res = await fetch(`/api/schedules/${schedule.id}`, {
|
||||||
|
method: "PATCH",
|
||||||
|
headers: { "Content-Type": "application/json" },
|
||||||
|
body: JSON.stringify({ inputs: values }),
|
||||||
|
});
|
||||||
|
if (!res.ok) {
|
||||||
|
let message = "Failed to update schedule inputs";
|
||||||
|
const data = await res.json();
|
||||||
|
message = data?.message || data?.detail || message;
|
||||||
|
throw new Error(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
await queryClient.invalidateQueries({
|
||||||
|
queryKey: getGetV1ListExecutionSchedulesForAGraphQueryKey(
|
||||||
|
schedule.graph_id,
|
||||||
|
),
|
||||||
|
});
|
||||||
|
toast({
|
||||||
|
title: "Schedule inputs updated",
|
||||||
|
});
|
||||||
|
setIsOpen(false);
|
||||||
|
} catch (error: any) {
|
||||||
|
toast({
|
||||||
|
title: "Failed to update schedule inputs",
|
||||||
|
description: error?.message || "An unexpected error occurred.",
|
||||||
|
variant: "destructive",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
setIsSaving(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
isOpen,
|
||||||
|
setIsOpen,
|
||||||
|
inputFields,
|
||||||
|
values,
|
||||||
|
setValues,
|
||||||
|
handleSave,
|
||||||
|
isSaving,
|
||||||
|
} as const;
|
||||||
|
}
|
||||||
@@ -25,10 +25,9 @@ export function SelectedScheduleActions({ agent, scheduleId }: Props) {
|
|||||||
<Button
|
<Button
|
||||||
variant="icon"
|
variant="icon"
|
||||||
size="icon"
|
size="icon"
|
||||||
|
aria-label="Open in builder"
|
||||||
as="NextLink"
|
as="NextLink"
|
||||||
href={openInBuilderHref}
|
href={openInBuilderHref}
|
||||||
target="_blank"
|
|
||||||
aria-label="View scheduled task details"
|
|
||||||
>
|
>
|
||||||
<EyeIcon weight="bold" size={18} className="text-zinc-700" />
|
<EyeIcon weight="bold" size={18} className="text-zinc-700" />
|
||||||
</Button>
|
</Button>
|
||||||
|
|||||||
@@ -1,67 +0,0 @@
|
|||||||
import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
|
|
||||||
import { Text } from "@/components/atoms/Text/Text";
|
|
||||||
import { Switch } from "@/components/atoms/Switch/Switch";
|
|
||||||
import { Button } from "@/components/atoms/Button/Button";
|
|
||||||
import { ArrowLeftIcon } from "@phosphor-icons/react";
|
|
||||||
import { useAgentSafeMode } from "@/hooks/useAgentSafeMode";
|
|
||||||
import { SelectedViewLayout } from "../SelectedViewLayout";
|
|
||||||
import { AGENT_LIBRARY_SECTION_PADDING_X } from "../../../helpers";
|
|
||||||
|
|
||||||
interface Props {
|
|
||||||
agent: LibraryAgent;
|
|
||||||
onClearSelectedRun: () => void;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function SelectedSettingsView({ agent, onClearSelectedRun }: Props) {
|
|
||||||
const { currentSafeMode, isPending, hasHITLBlocks, handleToggle } =
|
|
||||||
useAgentSafeMode(agent);
|
|
||||||
|
|
||||||
return (
|
|
||||||
<SelectedViewLayout agent={agent} onSelectSettings={() => {}}>
|
|
||||||
<div className="flex flex-col gap-4">
|
|
||||||
<div
|
|
||||||
className={`${AGENT_LIBRARY_SECTION_PADDING_X} mb-8 flex items-center gap-3`}
|
|
||||||
>
|
|
||||||
<Button
|
|
||||||
variant="icon"
|
|
||||||
size="small"
|
|
||||||
onClick={onClearSelectedRun}
|
|
||||||
className="w-[2.375rem]"
|
|
||||||
>
|
|
||||||
<ArrowLeftIcon />
|
|
||||||
</Button>
|
|
||||||
<Text variant="h2">Agent Settings</Text>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className={AGENT_LIBRARY_SECTION_PADDING_X}>
|
|
||||||
{!hasHITLBlocks ? (
|
|
||||||
<div className="rounded-xl border border-zinc-100 bg-white p-6">
|
|
||||||
<Text variant="body" className="text-muted-foreground">
|
|
||||||
This agent doesn't have any human-in-the-loop blocks, so
|
|
||||||
there are no settings to configure.
|
|
||||||
</Text>
|
|
||||||
</div>
|
|
||||||
) : (
|
|
||||||
<div className="flex w-full max-w-2xl flex-col items-start gap-4 rounded-xl border border-zinc-100 bg-white p-6">
|
|
||||||
<div className="flex w-full items-start justify-between gap-4">
|
|
||||||
<div className="flex-1">
|
|
||||||
<Text variant="large-semibold">Require human approval</Text>
|
|
||||||
<Text variant="large" className="mt-1 text-zinc-900">
|
|
||||||
The agent will pause and wait for your review before
|
|
||||||
continuing
|
|
||||||
</Text>
|
|
||||||
</div>
|
|
||||||
<Switch
|
|
||||||
checked={currentSafeMode || false}
|
|
||||||
onCheckedChange={handleToggle}
|
|
||||||
disabled={isPending}
|
|
||||||
className="mt-1"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</SelectedViewLayout>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -4,6 +4,7 @@ import type { GraphExecutionMeta } from "@/app/api/__generated__/models/graphExe
|
|||||||
import type { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
|
import type { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
|
||||||
import { Input } from "@/components/atoms/Input/Input";
|
import { Input } from "@/components/atoms/Input/Input";
|
||||||
import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard";
|
import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard";
|
||||||
|
import { InformationTooltip } from "@/components/molecules/InformationTooltip/InformationTooltip";
|
||||||
import {
|
import {
|
||||||
getAgentCredentialsFields,
|
getAgentCredentialsFields,
|
||||||
getAgentInputFields,
|
getAgentInputFields,
|
||||||
@@ -87,7 +88,7 @@ export function SelectedTemplateView({
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (isLoading && !template) {
|
if (isLoading && !template) {
|
||||||
return <LoadingSelectedContent agent={agent} />;
|
return <LoadingSelectedContent agentName={agent.name} agentId={agent.id} />;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!template) {
|
if (!template) {
|
||||||
@@ -100,7 +101,7 @@ export function SelectedTemplateView({
|
|||||||
return (
|
return (
|
||||||
<div className="flex h-full w-full gap-4">
|
<div className="flex h-full w-full gap-4">
|
||||||
<div className="flex min-h-0 min-w-0 flex-1 flex-col">
|
<div className="flex min-h-0 min-w-0 flex-1 flex-col">
|
||||||
<SelectedViewLayout agent={agent}>
|
<SelectedViewLayout agentName={agent.name} agentId={agent.id}>
|
||||||
<div className="flex flex-col gap-4">
|
<div className="flex flex-col gap-4">
|
||||||
<RunDetailHeader agent={agent} run={undefined} />
|
<RunDetailHeader agent={agent} run={undefined} />
|
||||||
|
|
||||||
@@ -137,13 +138,25 @@ export function SelectedTemplateView({
|
|||||||
<RunDetailCard title="Your Input">
|
<RunDetailCard title="Your Input">
|
||||||
<div className="flex flex-col gap-4">
|
<div className="flex flex-col gap-4">
|
||||||
{inputFields.map(([key, inputSubSchema]) => (
|
{inputFields.map(([key, inputSubSchema]) => (
|
||||||
<RunAgentInputs
|
<div
|
||||||
key={key}
|
key={key}
|
||||||
schema={inputSubSchema}
|
className="flex w-full flex-col gap-0 space-y-2"
|
||||||
value={inputs[key] ?? inputSubSchema.default}
|
>
|
||||||
placeholder={inputSubSchema.description}
|
<label className="flex items-center gap-1 text-sm font-medium">
|
||||||
onChange={(value) => setInputValue(key, value)}
|
{inputSubSchema.title || key}
|
||||||
/>
|
{inputSubSchema.description && (
|
||||||
|
<InformationTooltip
|
||||||
|
description={inputSubSchema.description}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
</label>
|
||||||
|
<RunAgentInputs
|
||||||
|
schema={inputSubSchema}
|
||||||
|
value={inputs[key] ?? inputSubSchema.default}
|
||||||
|
placeholder={inputSubSchema.description}
|
||||||
|
onChange={(value) => setInputValue(key, value)}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
))}
|
))}
|
||||||
</div>
|
</div>
|
||||||
</RunDetailCard>
|
</RunDetailCard>
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
import type { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
|
import type { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
|
||||||
import { Input } from "@/components/atoms/Input/Input";
|
import { Input } from "@/components/atoms/Input/Input";
|
||||||
import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard";
|
import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard";
|
||||||
|
import { InformationTooltip } from "@/components/molecules/InformationTooltip/InformationTooltip";
|
||||||
import {
|
import {
|
||||||
getAgentCredentialsFields,
|
getAgentCredentialsFields,
|
||||||
getAgentInputFields,
|
getAgentInputFields,
|
||||||
@@ -81,7 +82,7 @@ export function SelectedTriggerView({
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (isLoading && !trigger) {
|
if (isLoading && !trigger) {
|
||||||
return <LoadingSelectedContent agent={agent} />;
|
return <LoadingSelectedContent agentName={agent.name} agentId={agent.id} />;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!trigger) {
|
if (!trigger) {
|
||||||
@@ -93,7 +94,7 @@ export function SelectedTriggerView({
|
|||||||
return (
|
return (
|
||||||
<div className="flex h-full w-full gap-4">
|
<div className="flex h-full w-full gap-4">
|
||||||
<div className="flex min-h-0 min-w-0 flex-1 flex-col">
|
<div className="flex min-h-0 min-w-0 flex-1 flex-col">
|
||||||
<SelectedViewLayout agent={agent}>
|
<SelectedViewLayout agentName={agent.name} agentId={agent.id}>
|
||||||
<div className="flex flex-col gap-4">
|
<div className="flex flex-col gap-4">
|
||||||
<RunDetailHeader agent={agent} run={undefined} />
|
<RunDetailHeader agent={agent} run={undefined} />
|
||||||
|
|
||||||
@@ -130,13 +131,25 @@ export function SelectedTriggerView({
|
|||||||
<RunDetailCard title="Your Input">
|
<RunDetailCard title="Your Input">
|
||||||
<div className="flex flex-col gap-4">
|
<div className="flex flex-col gap-4">
|
||||||
{inputFields.map(([key, inputSubSchema]) => (
|
{inputFields.map(([key, inputSubSchema]) => (
|
||||||
<RunAgentInputs
|
<div
|
||||||
key={key}
|
key={key}
|
||||||
schema={inputSubSchema}
|
className="flex w-full flex-col gap-0 space-y-2"
|
||||||
value={inputs[key] ?? inputSubSchema.default}
|
>
|
||||||
placeholder={inputSubSchema.description}
|
<label className="flex items-center gap-1 text-sm font-medium">
|
||||||
onChange={(value) => setInputValue(key, value)}
|
{inputSubSchema.title || key}
|
||||||
/>
|
{inputSubSchema.description && (
|
||||||
|
<InformationTooltip
|
||||||
|
description={inputSubSchema.description}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
</label>
|
||||||
|
<RunAgentInputs
|
||||||
|
schema={inputSubSchema}
|
||||||
|
value={inputs[key] ?? inputSubSchema.default}
|
||||||
|
placeholder={inputSubSchema.description}
|
||||||
|
onChange={(value) => setInputValue(key, value)}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
))}
|
))}
|
||||||
</div>
|
</div>
|
||||||
</RunDetailCard>
|
</RunDetailCard>
|
||||||
|
|||||||
@@ -1,15 +1,11 @@
|
|||||||
import { Breadcrumbs } from "@/components/molecules/Breadcrumbs/Breadcrumbs";
|
import { Breadcrumbs } from "@/components/molecules/Breadcrumbs/Breadcrumbs";
|
||||||
import { AgentSettingsButton } from "@/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/other/AgentSettingsButton";
|
|
||||||
import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
|
|
||||||
import { AGENT_LIBRARY_SECTION_PADDING_X } from "../../helpers";
|
import { AGENT_LIBRARY_SECTION_PADDING_X } from "../../helpers";
|
||||||
import { SectionWrap } from "../other/SectionWrap";
|
import { SectionWrap } from "../other/SectionWrap";
|
||||||
|
|
||||||
interface Props {
|
interface Props {
|
||||||
agent: LibraryAgent;
|
agentName: string;
|
||||||
|
agentId: string;
|
||||||
children: React.ReactNode;
|
children: React.ReactNode;
|
||||||
additionalBreadcrumb?: { name: string; link?: string };
|
|
||||||
onSelectSettings?: () => void;
|
|
||||||
selectedSettings?: boolean;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function SelectedViewLayout(props: Props) {
|
export function SelectedViewLayout(props: Props) {
|
||||||
@@ -18,24 +14,12 @@ export function SelectedViewLayout(props: Props) {
|
|||||||
<div
|
<div
|
||||||
className={`${AGENT_LIBRARY_SECTION_PADDING_X} flex-shrink-0 border-b border-zinc-100 pb-0 lg:pb-4`}
|
className={`${AGENT_LIBRARY_SECTION_PADDING_X} flex-shrink-0 border-b border-zinc-100 pb-0 lg:pb-4`}
|
||||||
>
|
>
|
||||||
<div className="relative flex w-fit items-center gap-2">
|
<Breadcrumbs
|
||||||
<Breadcrumbs
|
items={[
|
||||||
items={[
|
{ name: "My Library", link: "/library" },
|
||||||
{ name: "My Library", link: "/library" },
|
{ name: props.agentName, link: `/library/agents/${props.agentId}` },
|
||||||
{
|
]}
|
||||||
name: props.agent.name,
|
/>
|
||||||
},
|
|
||||||
]}
|
|
||||||
/>
|
|
||||||
{props.agent && props.onSelectSettings && (
|
|
||||||
<div className="absolute -right-8">
|
|
||||||
<AgentSettingsButton
|
|
||||||
agent={props.agent}
|
|
||||||
onSelectSettings={props.onSelectSettings}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
<div className="flex min-h-0 flex-1 flex-col overflow-y-auto overflow-x-visible">
|
<div className="flex min-h-0 flex-1 flex-col overflow-y-auto overflow-x-visible">
|
||||||
{props.children}
|
{props.children}
|
||||||
|
|||||||
@@ -34,8 +34,8 @@ const statusIconMap: Record<AgentExecutionStatus, React.ReactNode> = {
|
|||||||
</IconWrapper>
|
</IconWrapper>
|
||||||
),
|
),
|
||||||
REVIEW: (
|
REVIEW: (
|
||||||
<IconWrapper className="border-yellow-50 bg-yellow-50">
|
<IconWrapper className="border-orange-50 bg-orange-50">
|
||||||
<PauseCircleIcon size={16} className="text-yellow-700" weight="bold" />
|
<PauseCircleIcon size={16} className="text-orange-700" weight="bold" />
|
||||||
</IconWrapper>
|
</IconWrapper>
|
||||||
),
|
),
|
||||||
COMPLETED: (
|
COMPLETED: (
|
||||||
|
|||||||
@@ -89,8 +89,10 @@ export function useNewAgentLibraryView() {
|
|||||||
[sidebarCounts],
|
[sidebarCounts],
|
||||||
);
|
);
|
||||||
|
|
||||||
// Show sidebar layout while loading or when there are items or settings is selected
|
// Show sidebar layout while loading or when there are items
|
||||||
const showSidebarLayout = useEffect(() => {
|
const showSidebarLayout = sidebarLoading || hasAnyItems;
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
if (agent) {
|
if (agent) {
|
||||||
document.title = `${agent.name} - Library - AutoGPT Platform`;
|
document.title = `${agent.name} - Library - AutoGPT Platform`;
|
||||||
}
|
}
|
||||||
@@ -132,13 +134,6 @@ export function useNewAgentLibraryView() {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function handleSelectSettings() {
|
|
||||||
setQueryStates({
|
|
||||||
activeItem: "settings",
|
|
||||||
activeTab: "runs", // Reset to runs tab when going to settings
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const handleCountsChange = useCallback(
|
const handleCountsChange = useCallback(
|
||||||
(counts: {
|
(counts: {
|
||||||
runsCount: number;
|
runsCount: number;
|
||||||
@@ -210,7 +205,6 @@ export function useNewAgentLibraryView() {
|
|||||||
handleCountsChange,
|
handleCountsChange,
|
||||||
handleSelectRun,
|
handleSelectRun,
|
||||||
onRunInitiated,
|
onRunInitiated,
|
||||||
handleSelectSettings,
|
|
||||||
onTriggerSetup,
|
onTriggerSetup,
|
||||||
onScheduleCreated,
|
onScheduleCreated,
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -680,20 +680,28 @@ export function AgentRunDraftView({
|
|||||||
|
|
||||||
{/* Regular inputs */}
|
{/* Regular inputs */}
|
||||||
{Object.entries(agentInputFields).map(([key, inputSubSchema]) => (
|
{Object.entries(agentInputFields).map(([key, inputSubSchema]) => (
|
||||||
<RunAgentInputs
|
<div key={key} className="flex flex-col space-y-2">
|
||||||
key={key}
|
<label className="flex items-center gap-1 text-sm font-medium">
|
||||||
schema={inputSubSchema}
|
{inputSubSchema.title || key}
|
||||||
value={inputValues[key] ?? inputSubSchema.default}
|
<InformationTooltip
|
||||||
placeholder={inputSubSchema.description}
|
description={inputSubSchema.description}
|
||||||
onChange={(value) => {
|
/>
|
||||||
setInputValues((obj) => ({
|
</label>
|
||||||
...obj,
|
|
||||||
[key]: value,
|
<RunAgentInputs
|
||||||
}));
|
schema={inputSubSchema}
|
||||||
setChangedPresetAttributes((prev) => prev.add("inputs"));
|
value={inputValues[key] ?? inputSubSchema.default}
|
||||||
}}
|
placeholder={inputSubSchema.description}
|
||||||
data-testid={`agent-input-${key}`}
|
onChange={(value) => {
|
||||||
/>
|
setInputValues((obj) => ({
|
||||||
|
...obj,
|
||||||
|
[key]: value,
|
||||||
|
}));
|
||||||
|
setChangedPresetAttributes((prev) => prev.add("inputs"));
|
||||||
|
}}
|
||||||
|
data-testid={`agent-input-${key}`}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
))}
|
))}
|
||||||
</CardContent>
|
</CardContent>
|
||||||
</Card>
|
</Card>
|
||||||
|
|||||||
@@ -38,7 +38,7 @@ const statusData: Record<
|
|||||||
draft: { label: "Draft", variant: "secondary" },
|
draft: { label: "Draft", variant: "secondary" },
|
||||||
stopped: { label: "Stopped", variant: "secondary" },
|
stopped: { label: "Stopped", variant: "secondary" },
|
||||||
scheduled: { label: "Scheduled", variant: "secondary" },
|
scheduled: { label: "Scheduled", variant: "secondary" },
|
||||||
review: { label: "In Review", variant: "warning" },
|
review: { label: "In Review", variant: "orange" },
|
||||||
};
|
};
|
||||||
|
|
||||||
const statusStyles = {
|
const statusStyles = {
|
||||||
@@ -47,6 +47,8 @@ const statusStyles = {
|
|||||||
destructive: "bg-red-100 text-red-800 hover:bg-red-100 hover:text-red-800",
|
destructive: "bg-red-100 text-red-800 hover:bg-red-100 hover:text-red-800",
|
||||||
warning:
|
warning:
|
||||||
"bg-yellow-100 text-yellow-800 hover:bg-yellow-100 hover:text-yellow-800",
|
"bg-yellow-100 text-yellow-800 hover:bg-yellow-100 hover:text-yellow-800",
|
||||||
|
orange:
|
||||||
|
"bg-orange-100 text-orange-800 hover:bg-orange-100 hover:text-orange-800",
|
||||||
info: "bg-blue-100 text-blue-800 hover:bg-blue-100 hover:text-blue-800",
|
info: "bg-blue-100 text-blue-800 hover:bg-blue-100 hover:text-blue-800",
|
||||||
secondary:
|
secondary:
|
||||||
"bg-slate-100 text-slate-800 hover:bg-slate-100 hover:text-slate-800",
|
"bg-slate-100 text-slate-800 hover:bg-slate-100 hover:text-slate-800",
|
||||||
|
|||||||
@@ -11,16 +11,8 @@ import { environment } from "@/services/environment";
|
|||||||
import { LoadingLogin } from "./components/LoadingLogin";
|
import { LoadingLogin } from "./components/LoadingLogin";
|
||||||
import { useLoginPage } from "./useLoginPage";
|
import { useLoginPage } from "./useLoginPage";
|
||||||
import { MobileWarningBanner } from "@/components/auth/MobileWarningBanner";
|
import { MobileWarningBanner } from "@/components/auth/MobileWarningBanner";
|
||||||
import { useSearchParams } from "next/navigation";
|
|
||||||
|
|
||||||
export default function LoginPage() {
|
export default function LoginPage() {
|
||||||
const searchParams = useSearchParams();
|
|
||||||
const nextUrl = searchParams.get("next");
|
|
||||||
// Preserve next parameter when switching between login/signup
|
|
||||||
const signupHref = nextUrl
|
|
||||||
? `/signup?next=${encodeURIComponent(nextUrl)}`
|
|
||||||
: "/signup";
|
|
||||||
|
|
||||||
const {
|
const {
|
||||||
user,
|
user,
|
||||||
form,
|
form,
|
||||||
@@ -116,7 +108,7 @@ export default function LoginPage() {
|
|||||||
</Form>
|
</Form>
|
||||||
<AuthCard.BottomText
|
<AuthCard.BottomText
|
||||||
text="Don't have an account?"
|
text="Don't have an account?"
|
||||||
link={{ text: "Sign up", href: signupHref }}
|
link={{ text: "Sign up", href: "/signup" }}
|
||||||
/>
|
/>
|
||||||
</AuthCard>
|
</AuthCard>
|
||||||
<MobileWarningBanner />
|
<MobileWarningBanner />
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
|
|||||||
import { environment } from "@/services/environment";
|
import { environment } from "@/services/environment";
|
||||||
import { loginFormSchema, LoginProvider } from "@/types/auth";
|
import { loginFormSchema, LoginProvider } from "@/types/auth";
|
||||||
import { zodResolver } from "@hookform/resolvers/zod";
|
import { zodResolver } from "@hookform/resolvers/zod";
|
||||||
import { useRouter, useSearchParams } from "next/navigation";
|
import { useRouter } from "next/navigation";
|
||||||
import { useEffect, useState } from "react";
|
import { useEffect, useState } from "react";
|
||||||
import { useForm } from "react-hook-form";
|
import { useForm } from "react-hook-form";
|
||||||
import z from "zod";
|
import z from "zod";
|
||||||
@@ -13,7 +13,6 @@ export function useLoginPage() {
|
|||||||
const { supabase, user, isUserLoading, isLoggedIn } = useSupabase();
|
const { supabase, user, isUserLoading, isLoggedIn } = useSupabase();
|
||||||
const [feedback, setFeedback] = useState<string | null>(null);
|
const [feedback, setFeedback] = useState<string | null>(null);
|
||||||
const router = useRouter();
|
const router = useRouter();
|
||||||
const searchParams = useSearchParams();
|
|
||||||
const { toast } = useToast();
|
const { toast } = useToast();
|
||||||
const [isLoading, setIsLoading] = useState(false);
|
const [isLoading, setIsLoading] = useState(false);
|
||||||
const [isLoggingIn, setIsLoggingIn] = useState(false);
|
const [isLoggingIn, setIsLoggingIn] = useState(false);
|
||||||
@@ -21,14 +20,11 @@ export function useLoginPage() {
|
|||||||
const [showNotAllowedModal, setShowNotAllowedModal] = useState(false);
|
const [showNotAllowedModal, setShowNotAllowedModal] = useState(false);
|
||||||
const isCloudEnv = environment.isCloud();
|
const isCloudEnv = environment.isCloud();
|
||||||
|
|
||||||
// Get redirect destination from 'next' query parameter
|
|
||||||
const nextUrl = searchParams.get("next");
|
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (isLoggedIn && !isLoggingIn) {
|
if (isLoggedIn && !isLoggingIn) {
|
||||||
router.push(nextUrl || "/marketplace");
|
router.push("/marketplace");
|
||||||
}
|
}
|
||||||
}, [isLoggedIn, isLoggingIn, nextUrl, router]);
|
}, [isLoggedIn, isLoggingIn]);
|
||||||
|
|
||||||
const form = useForm<z.infer<typeof loginFormSchema>>({
|
const form = useForm<z.infer<typeof loginFormSchema>>({
|
||||||
resolver: zodResolver(loginFormSchema),
|
resolver: zodResolver(loginFormSchema),
|
||||||
@@ -43,16 +39,10 @@ export function useLoginPage() {
|
|||||||
setIsLoggingIn(true);
|
setIsLoggingIn(true);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Include next URL in OAuth flow if present
|
|
||||||
const callbackUrl = nextUrl
|
|
||||||
? `/auth/callback?next=${encodeURIComponent(nextUrl)}`
|
|
||||||
: `/auth/callback`;
|
|
||||||
const fullCallbackUrl = `${window.location.origin}${callbackUrl}`;
|
|
||||||
|
|
||||||
const response = await fetch("/api/auth/provider", {
|
const response = await fetch("/api/auth/provider", {
|
||||||
method: "POST",
|
method: "POST",
|
||||||
headers: { "Content-Type": "application/json" },
|
headers: { "Content-Type": "application/json" },
|
||||||
body: JSON.stringify({ provider, redirectTo: fullCallbackUrl }),
|
body: JSON.stringify({ provider }),
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!response.ok) {
|
if (!response.ok) {
|
||||||
@@ -93,9 +83,7 @@ export function useLoginPage() {
|
|||||||
throw new Error(result.error || "Login failed");
|
throw new Error(result.error || "Login failed");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (nextUrl) {
|
if (result.onboarding) {
|
||||||
router.replace(nextUrl);
|
|
||||||
} else if (result.onboarding) {
|
|
||||||
router.replace("/onboarding");
|
router.replace("/onboarding");
|
||||||
} else {
|
} else {
|
||||||
router.replace("/marketplace");
|
router.replace("/marketplace");
|
||||||
|
|||||||
@@ -54,7 +54,7 @@ export const AgentFlowList = ({
|
|||||||
|
|
||||||
<div className="flex items-center">
|
<div className="flex items-center">
|
||||||
{/* Split "Create" button */}
|
{/* Split "Create" button */}
|
||||||
<Button variant="outline" className="rounded-r-none">
|
<Button variant="outline" className="rounded-r-none" asChild>
|
||||||
<Link href="/build">Create</Link>
|
<Link href="/build">Create</Link>
|
||||||
</Button>
|
</Button>
|
||||||
<Dialog>
|
<Dialog>
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user