diff --git a/.github/workflows/platform-backend-ci.yml b/.github/workflows/platform-backend-ci.yml index 7671919dba..938cb7b6f9 100644 --- a/.github/workflows/platform-backend-ci.yml +++ b/.github/workflows/platform-backend-ci.yml @@ -137,9 +137,9 @@ jobs: SUPABASE_URL: ${{ steps.supabase.outputs.API_URL }} SUPABASE_SERVICE_ROLE_KEY: ${{ steps.supabase.outputs.SERVICE_ROLE_KEY }} SUPABASE_JWT_SECRET: ${{ steps.supabase.outputs.JWT_SECRET }} - REDIS_HOST: "localhost" - REDIS_PORT: "6379" - REDIS_PASSWORD: "testpassword" + REDIS_HOST: 'localhost' + REDIS_PORT: '6379' + REDIS_PASSWORD: 'testpassword' env: CI: true @@ -152,8 +152,8 @@ jobs: # If you want to replace this, you can do so by making our entire system generate # new credentials for each local user and update the environment variables in # the backend service, docker composes, and examples - RABBITMQ_DEFAULT_USER: rabbitmq_user_default - RABBITMQ_DEFAULT_PASS: k0VMxyIJF9S35f3x2uaw5IWAl6Y536O7 + RABBITMQ_DEFAULT_USER: 'rabbitmq_user_default' + RABBITMQ_DEFAULT_PASS: 'k0VMxyIJF9S35f3x2uaw5IWAl6Y536O7' # - name: Upload coverage reports to Codecov # uses: codecov/codecov-action@v4 diff --git a/autogpt_platform/backend/backend/data/graph.py b/autogpt_platform/backend/backend/data/graph.py index 2dde243bc2..3d9a107676 100644 --- a/autogpt_platform/backend/backend/data/graph.py +++ b/autogpt_platform/backend/backend/data/graph.py @@ -716,11 +716,9 @@ async def fix_llm_provider_credentials(): store = IntegrationCredentialsStore() - broken_nodes = [] - try: - broken_nodes = await prisma.get_client().query_raw( - """ - SELECT graph."userId" user_id, + broken_nodes = await prisma.get_client().query_raw( + """ + SELECT graph."userId" user_id, node.id node_id, node."constantInput" node_preset_input FROM platform."AgentNode" node @@ -729,10 +727,8 @@ async def fix_llm_provider_credentials(): WHERE node."constantInput"::jsonb->'credentials'->>'provider' = 'llm' ORDER BY graph."userId"; """ - ) - logger.info(f"Fixing LLM credential inputs on {len(broken_nodes)} nodes") - except Exception as e: - logger.error(f"Error fixing LLM credential inputs: {e}") + ) + logger.info(f"Fixing LLM credential inputs on {len(broken_nodes)} nodes") user_id: str = "" user_integrations = None diff --git a/autogpt_platform/backend/backend/notifications/notifications.py b/autogpt_platform/backend/backend/notifications/notifications.py index ebb190ce86..be201ce2d7 100644 --- a/autogpt_platform/backend/backend/notifications/notifications.py +++ b/autogpt_platform/backend/backend/notifications/notifications.py @@ -99,8 +99,7 @@ class NotificationManager(AppService): def __init__(self): super().__init__() self.use_db = True - self.use_async = False # Use async RabbitMQ client - self.use_rabbitmq = create_notification_config() + self.rabbitmq_config = create_notification_config() self.running = True self.email_sender = EmailSender() diff --git a/autogpt_platform/backend/backend/util/service.py b/autogpt_platform/backend/backend/util/service.py index 338f9fdb29..387f4758e0 100644 --- a/autogpt_platform/backend/backend/util/service.py +++ b/autogpt_platform/backend/backend/util/service.py @@ -117,7 +117,7 @@ class AppService(AppProcess, ABC): shared_event_loop: asyncio.AbstractEventLoop use_db: bool = False use_redis: bool = False - use_rabbitmq: Optional[rabbitmq.RabbitMQConfig] = None + rabbitmq_config: Optional[rabbitmq.RabbitMQConfig] = None rabbitmq_service: Optional[rabbitmq.AsyncRabbitMQ] = None use_supabase: bool = False @@ -143,9 +143,9 @@ class AppService(AppProcess, ABC): @property def rabbit_config(self) -> rabbitmq.RabbitMQConfig: """Access the RabbitMQ config. Will raise if not configured.""" - if not self.use_rabbitmq: + if not self.rabbitmq_config: raise RuntimeError("RabbitMQ not configured for this service") - return self.use_rabbitmq + return self.rabbitmq_config def run_service(self) -> None: while True: @@ -164,13 +164,13 @@ class AppService(AppProcess, ABC): self.shared_event_loop.run_until_complete(db.connect()) if self.use_redis: redis.connect() - if self.use_rabbitmq: + if self.rabbitmq_config: logger.info(f"[{self.__class__.__name__}] ⏳ Configuring RabbitMQ...") # if self.use_async: - self.rabbitmq_service = rabbitmq.AsyncRabbitMQ(self.use_rabbitmq) + self.rabbitmq_service = rabbitmq.AsyncRabbitMQ(self.rabbitmq_config) self.shared_event_loop.run_until_complete(self.rabbitmq_service.connect()) # else: - # self.rabbitmq_service = rabbitmq.SyncRabbitMQ(self.use_rabbitmq) + # self.rabbitmq_service = rabbitmq.SyncRabbitMQ(self.rabbitmq_config) # self.rabbitmq_service.connect() if self.use_supabase: from supabase import create_client @@ -200,7 +200,7 @@ class AppService(AppProcess, ABC): if self.use_redis: logger.info(f"[{self.__class__.__name__}] ⏳ Disconnecting Redis...") redis.disconnect() - if self.use_rabbitmq: + if self.rabbitmq_config: logger.info(f"[{self.__class__.__name__}] ⏳ Disconnecting RabbitMQ...") @conn_retry("Pyro", "Starting Pyro Service") diff --git a/autogpt_platform/backend/backend/util/test.py b/autogpt_platform/backend/backend/util/test.py index bc60ce2d26..29b2cfdbc5 100644 --- a/autogpt_platform/backend/backend/util/test.py +++ b/autogpt_platform/backend/backend/util/test.py @@ -22,7 +22,7 @@ class SpinTestServer: self.exec_manager = ExecutionManager() self.agent_server = AgentServer() self.scheduler = ExecutionScheduler() - self.notifications = NotificationManager() + self.notif_manager = NotificationManager() @staticmethod def test_get_user_id(): @@ -34,7 +34,7 @@ class SpinTestServer: self.agent_server.__enter__() self.exec_manager.__enter__() self.scheduler.__enter__() - self.notifications.__enter__() + self.notif_manager.__enter__() await db.connect() await initialize_blocks() @@ -49,7 +49,7 @@ class SpinTestServer: self.exec_manager.__exit__(exc_type, exc_val, exc_tb) self.agent_server.__exit__(exc_type, exc_val, exc_tb) self.db_api.__exit__(exc_type, exc_val, exc_tb) - self.notifications.__exit__(exc_type, exc_val, exc_tb) + self.notif_manager.__exit__(exc_type, exc_val, exc_tb) def setup_dependency_overrides(self): # Override get_user_id for testing